repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
ApptuitAI/xcollector | collectors/lib/hadoop_http.py | 1 | 3440 | #!/usr/bin/env python
# This file is part of tcollector.
# Copyright (C) 2011-2013 The tcollector Authors.
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
# General Public License for more details. You should have received a copy
# of the GNU Lesser General Public License along with this program. If not,
# see <http://www.gnu.org/licenses/>.
try:
import httplib as httplib
except ImportError:
import http.client as httplib
try:
import json
except ImportError:
json = None
try:
from collections import OrderedDict # New in Python 2.7
except ImportError:
from ordereddict import OrderedDict # Can be easy_install'ed for <= 2.6
from collectors.lib.utils import is_numeric
EXCLUDED_KEYS = (
"Name",
"name"
)
class HadoopHttp(object):
def __init__(self, service, daemon, host, port, uri="/jmx"):
self.service = service
self.daemon = daemon
self.port = port
self.host = host
self.uri = uri
self.server = httplib.HTTPConnection(self.host, self.port)
self.server.auto_open = True
def request(self):
try:
self.server.request('GET', self.uri)
resp = self.server.getresponse().read()
except:
resp = '{}'
finally:
self.server.close()
return json.loads(resp)
def poll(self):
"""
Get metrics from the http server's /jmx page, and transform them into normalized tupes
@return: array of tuples ([u'Context', u'Array'], u'metricName', value)
"""
json_arr = self.request().get('beans', [])
kept = []
for bean in json_arr:
if (not bean['name']) or (not "name=" in bean['name']):
continue
# split the name string
context = bean['name'].split("name=")[1].split(",sub=")
# Create a set that keeps the first occurrence
context = list(OrderedDict.fromkeys(context).keys())
# lower case and replace spaces.
context = [c.lower().replace(" ", "_") for c in context]
# don't want to include the service or daemon twice
context = [c for c in context if c != self.service and c != self.daemon]
for key, value in bean.items():
if key in EXCLUDED_KEYS:
continue
if not is_numeric(value):
continue
kept.append((context, key, value))
return kept
def emit_metric(self, context, current_time, metric_name, value, tag_dict=None):
if not tag_dict:
print("%s.%s.%s.%s %d %d" % (
self.service, self.daemon, ".".join(context), metric_name, current_time, value))
else:
tag_string = " ".join([k + "=" + v for k, v in tag_dict.items()])
print("%s.%s.%s.%s %d %d %s" % \
(self.service, self.daemon, ".".join(context), metric_name, current_time, value, tag_string))
def emit(self):
pass
| lgpl-3.0 |
mzizzi/ansible | lib/ansible/modules/files/xattr.py | 41 | 6746 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: xattr
version_added: "1.3"
short_description: set/retrieve extended attributes
description:
- Manages filesystem user defined extended attributes, requires that they are enabled
on the target filesystem and that the setfattr/getfattr utilities are present.
options:
path:
required: true
default: None
aliases: ['name']
description:
- The full path of the file/object to get the facts of.
- Before 2.3 this option was only usable as I(name).
key:
required: false
default: None
description:
- The name of a specific Extended attribute key to set/retrieve
value:
required: false
default: None
description:
- The value to set the named name/key to, it automatically sets the C(state) to 'set'
state:
required: false
default: get
choices: [ 'read', 'present', 'all', 'keys', 'absent' ]
description:
- defines which state you want to do.
C(read) retrieves the current value for a C(key) (default)
C(present) sets C(name) to C(value), default if value is set
C(all) dumps all data
C(keys) retrieves all keys
C(absent) deletes the key
follow:
required: false
default: yes
choices: [ 'yes', 'no' ]
description:
- if yes, dereferences symlinks and sets/gets attributes on symlink target,
otherwise acts on symlink itself.
notes:
- As of Ansible 2.3, the I(name) option has been changed to I(path) as default, but I(name) still works as well.
author: "Brian Coca (@bcoca)"
'''
EXAMPLES = '''
# Obtain the extended attributes of /etc/foo.conf
- xattr:
path: /etc/foo.conf
# Sets the key 'foo' to value 'bar'
- xattr:
path: /etc/foo.conf
key: user.foo
value: bar
# Removes the key 'foo'
- xattr:
path: /etc/foo.conf
key: user.foo
state: absent
'''
import operator
import re
import os
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
def get_xattr_keys(module,path,follow):
cmd = [ module.get_bin_path('getfattr', True) ]
# prevents warning and not sure why it's not default
cmd.append('--absolute-names')
if not follow:
cmd.append('-h')
cmd.append(path)
return _run_xattr(module,cmd)
def get_xattr(module,path,key,follow):
cmd = [ module.get_bin_path('getfattr', True) ]
# prevents warning and not sure why it's not default
cmd.append('--absolute-names')
if not follow:
cmd.append('-h')
if key is None:
cmd.append('-d')
else:
cmd.append('-n %s' % key)
cmd.append(path)
return _run_xattr(module,cmd,False)
def set_xattr(module,path,key,value,follow):
cmd = [ module.get_bin_path('setfattr', True) ]
if not follow:
cmd.append('-h')
cmd.append('-n %s' % key)
cmd.append('-v %s' % value)
cmd.append(path)
return _run_xattr(module,cmd)
def rm_xattr(module,path,key,follow):
cmd = [ module.get_bin_path('setfattr', True) ]
if not follow:
cmd.append('-h')
cmd.append('-x %s' % key)
cmd.append(path)
return _run_xattr(module,cmd,False)
def _run_xattr(module,cmd,check_rc=True):
try:
(rc, out, err) = module.run_command(' '.join(cmd), check_rc=check_rc)
except Exception:
e = get_exception()
module.fail_json(msg="%s!" % e.strerror)
#result = {'raw': out}
result = {}
for line in out.splitlines():
if re.match("^#", line) or line == "":
pass
elif re.search('=', line):
(key, val) = line.split("=")
result[key] = val.strip('"')
else:
result[line] = ''
return result
def main():
module = AnsibleModule(
argument_spec = dict(
path = dict(required=True, aliases=['name'], type='path'),
key = dict(required=False, default=None, type='str'),
value = dict(required=False, default=None, type='str'),
state = dict(required=False, default='read', choices=[ 'read', 'present', 'all', 'keys', 'absent' ], type='str'),
follow = dict(required=False, type='bool', default=True),
),
supports_check_mode=True,
)
path = module.params.get('path')
key = module.params.get('key')
value = module.params.get('value')
state = module.params.get('state')
follow = module.params.get('follow')
if not os.path.exists(path):
module.fail_json(msg="path not found or not accessible!")
changed=False
msg = ""
res = {}
if key is None and state in ['present','absent']:
module.fail_json(msg="%s needs a key parameter" % state)
# All xattr must begin in user namespace
if key is not None and not re.match('^user\.',key):
key = 'user.%s' % key
if (state == 'present' or value is not None):
current=get_xattr(module,path,key,follow)
if current is None or not key in current or value != current[key]:
if not module.check_mode:
res = set_xattr(module,path,key,value,follow)
changed=True
res=current
msg="%s set to %s" % (key, value)
elif state == 'absent':
current=get_xattr(module,path,key,follow)
if current is not None and key in current:
if not module.check_mode:
res = rm_xattr(module,path,key,follow)
changed=True
res=current
msg="%s removed" % (key)
elif state == 'keys':
res=get_xattr_keys(module,path,follow)
msg="returning all keys"
elif state == 'all':
res=get_xattr(module,path,None,follow)
msg="dumping all"
else:
res=get_xattr(module,path,key,follow)
msg="returning %s" % key
module.exit_json(changed=changed, msg=msg, xattr=res)
if __name__ == '__main__':
main()
| gpl-3.0 |
zerkh/GroundHog | groundhog/layers/basic.py | 19 | 22520 | """
Parent classes describing a layer, model or operator
"""
__docformat__ = 'restructedtext en'
__authors__ = ("Razvan Pascanu "
"KyungHyun Cho "
"Caglar Gulcehre ")
__contact__ = "Razvan Pascanu <r.pascanu@gmail>"
import numpy
import copy
import cPickle as pkl
import logging
import theano
import theano.tensor as TT
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
from groundhog.utils import utils
from groundhog.utils.utils import id_generator
logger = logging.getLogger(__name__)
class Container(object):
"""
Root class. It contains some properties one would expect from any
derived class.
"""
def __init__(self):
self.floatX = theano.config.floatX
# Parameters of the model (shared variables)
self.params = []
# Factor scaling the gradient of the cost with respect to some
# parameter
self.params_grad_scale = []
# List of shared variables holding the noise used when applying
# weight noise
self.noise_params = []
# List of functions that compute the shape of a parameter
self.noise_params_shape_fn = []
# Updates that the model require at each step
self.updates = []
# Additional gradients that need to be added to the gradients of the
# cost of a model with respect to the parameters
self.additional_gradients = []
# Theano variables representing the inputs required to compute the
# value of the container
self.inputs = []
# Schedules for updating shared variables involved in the
# computation of the container
self.schedules = []
# Additional properties that can be computed beside the output of
# the container
self.properties = []
def tensor_from_layer(self, arg):
"""
Grab the theano tensor representing the computation of this
layer/operator iff `arg` is a layer
"""
if isinstance(arg, Container):
return arg.out
else:
return arg
def add_schedule(self, sched):
"""
Add a new schedule to the list of schedules
"""
self.schedules += [sched]
def add_schedules(self, scheds):
"""
Add a list of schedules to the current list of schedules
"""
self.schedules += scheds
def tensor_from_layer(self, arg, collect_params=True):
"""
Grab the theano tensor representing the computation of this
layer/operator iff `arg` is a layer.
:type collect_params: bool
:param collect_params: Flag. If true, also collect the parameters
and inputs of the layer `arg` and make them parameters and inputs
needed to compute the current layer/operator
"""
if not collect_params:
if isinstance(arg, Container):
return arg.out
else:
return arg
if isinstance(arg, Container):
self.merge_params(arg)
return arg.out
elif isinstance(arg, theano.gof.Variable):
inps = [x for x in theano.gof.graph.inputs([arg])
if not isinstance(x, (TT.Constant, theano.compile.SharedVariable))]
self.add_inputs(inps)
return arg
else:
return arg
def add_inputs(self, inps):
"""
Add to the current list of inputs the tensors in the `inps` list
"""
if not isinstance(inps, (list, tuple)):
inps = [inps]
for inp in inps:
if inp not in self.inputs:
self.inputs = self.inputs + [inp]
def merge_params(self, model):
"""
Add to the current properties of the container (params, schedules,
etc.) those of the layer/operator/model `model`.
"""
new_params_grad_scale = [ps for ps, param in zip(model.params_grad_scale,
model.params)
if param not in self.params]
new_params = [param for param in model.params if param not in self.params]
assert len(new_params_grad_scale) == len(new_params)
new_noise_params_shape_fn = [shape_fn
for shape_fn,noise_param in zip(model.noise_params_shape_fn,
model.noise_params)
if noise_param not in self.noise_params]
new_noise_params = [noise_param
for noise_param in model.noise_params
if noise_param not in self.noise_params]
new_inputs =[inp for inp in model.inputs if inp not in self.inputs]
new_schedules =[schedule for schedule in model.schedules
if schedule not in self.schedules]
new_updates = [update for update in model.updates
if update not in self.updates]
new_additional_gradients = [additional_gradient
for additional_gradient in model.additional_gradients
if additional_gradient not in self.additional_gradients]
new_properties = [prop for prop in model.properties
if prop not in self.properties]
self.params += new_params
self.params_grad_scale += new_params_grad_scale
assert len(self.params) == len(self.params_grad_scale)
if hasattr(self, 'grads'):
self.grads += [ 0 for param in new_params]
self.noise_params += new_noise_params
self.noise_params_shape_fn += new_noise_params_shape_fn
self.inputs += new_inputs
self.schedules += new_schedules
self.updates += new_updates
self.additional_gradients += new_additional_gradients
self.properties += new_properties
def save(self, filename):
"""
Save the model to file `filename`
"""
vals = dict([(x.name, x.get_value()) for x in self.params])
numpy.savez(filename, **vals)
def load(self, filename):
"""
Load the model.
"""
vals = numpy.load(filename)
for p in self.params:
if p.name in vals:
logger.debug('Loading {} of {}'.format(p.name, p.get_value(borrow=True).shape))
if p.get_value().shape != vals[p.name].shape:
raise Exception("Shape mismatch: {} != {} for {}"
.format(p.get_value().shape, vals[p.name].shape, p.name))
p.set_value(vals[p.name])
else:
# FIXME: do not stop loading even if there's a parameter value missing
#raise Exception("No parameter {} given".format(p.name))
logger.error( "No parameter {} given: default initialization used".format(p.name))
unknown = set(vals.keys()) - {p.name for p in self.params}
if len(unknown):
logger.error("Unknown parameters {} given".format(unknown))
class Layer(Container):
"""
Parent class for Layers.
A layer is a segment of a computational pipeline. It is different from a
model in the sense that it does not necessarly have a cost or gradients
defined, neither does it respect the interface expected from the
trainers.
"""
def __init__(self, n_in=0, n_out=0, rng=None, name=None):
super(Layer, self).__init__()
if name:
self.name = name
else:
self.name = 'unknown_'+ id_generator(4)
self.rng = rng
self.n_in = n_in
self.n_out = n_out
self.n_hid = n_out
self.floatX = theano.config.floatX
def reshape(self, shape):
assert hasattr(self, 'out'), 'all layers need a default output'
new_obj = utils.copy(self)
new_obj.out = new_obj.out.reshape(shape)
return new_obj
shape = property(lambda self: self.out.shape)
def __str__(self):
return self.name
def __add__(self, other):
assert hasattr(self, 'out'), 'all layers need a default output'
new_obj = utils.copy(self)
other_var = new_obj.tensor_from_layer(other)
new_obj.out = new_obj.out + other_var
# Summing cost layers:
if hasattr(new_obj, 'grads') and hasattr(other, 'grads'):
for param, grad_param in zip(other.params, other.grads):
pos = new_obj.params.index(param)
new_obj.grads[pos] += grad_param
elif hasattr(new_obj, 'grads') and \
isinstance(other, theano.gof.Variable) and \
other.ndim == 0:
other_grads = TT.grad(other, new_obj.params,
disconnected_inputs='ignore')
new_obj.grads = [x + y for x,y in zip(new_obj.grads,
other_grads)]
elif hasattr(new_obj, 'grads'):
raise ValueError('I do not know how to compute the gradients'
' of the added term' + str(other) + '. Call'
' train on it if it is an output layer')
return new_obj
def __sub__(self, other):
assert hasattr(self, 'out'), 'all layers need a default output'
new_obj = utils.copy(self)
other_var = new_obj.tensor_from_layer(other)
new_obj.out = new_obj.out - other_var
if hasattr(new_obj, 'grads') and hasattr(other, 'grads'):
for param, grad_param in zip(other.params, other.grads):
pos = new_obj.params.index(param)
new_obj.grads[pos] -= grad_param
elif hasattr(new_obj, 'grads') and \
isinstance(other, theano.gof.Variable) and \
other.ndim == 0:
other_grads = TT.grad(other, new_obj.params,
disconnected_inputs='ignore')
new_obj.grads = [x - y for x,y in zip(new_obj.grads,
other_grads)]
elif hasattr(new_obj, 'grads'):
raise ValueError('I do not know how to compute the gradients'
' of the subtracted term' + str(other) + '. Call'
' train on it if it is an output layer')
return new_obj
def __mul__(self, other):
assert hasattr(self, 'out'), 'all layers need a default output'
new_obj = utils.copy(self)
other_var = self.tensor_from_layer(other)
if hasattr(new_obj, 'grads') and hasattr(other, 'grads'):
new_obj.grads = [ x * other_var for x in new_obj.grads]
for param, grad_param in zip(other.params, other.grads):
pos = new_obj.params.index(param)
new_obj.grads[pos] += new_obj.out * grad_param
elif hasattr(new_obj, 'grads') and \
isinstance(other, theano.gof.Variable) and \
other.ndim == 0:
new_obj.grads = [ x * other_var for x in new_obj.grads]
other_grads = TT.grad(other, new_obj.params,
disconnected_inputs='ignore')
new_obj.grads = [x + new_obj.cost * y
for x,y in zip(new_obj.grads,
other_grads)]
elif hasattr(new_obj, 'grads'):
raise ValueError('I do not know how to compute the gradients'
' of the subtracted term' + str(other) + '. Call'
' train on it if it is an output layer')
new_obj.out = new_obj.out * other_var
return new_obj
def __div__(self, other):
assert hasattr(self, 'out'), 'all layers need a default output'
new_obj = utils.copy(self)
other_var = new_obj.tensor_from_layer(other)
if hasattr(new_obj, 'grads') and hasattr(other, 'grads'):
new_obj.grads = [ x * other_var for x in new_obj.grads]
for param, grad_param in zip(other.params, other.grads):
pos = new_obj.params.index(param)
new_obj.grads[pos] -= new_obj.out * grad_param
new_obj.grads = [ x / (other_var**2) for x in new_obj.grads]
elif hasattr(new_obj, 'grads') and \
isinstance(other, theano.gof.Variable) and \
other.ndim == 0:
new_obj.grads = [ x * other_var for x in new_obj.grads]
other_grads = TT.grad(other, new_obj.params,
disconnected_inputs='ignore')
new_obj.grads = [(x - new_obj.cost * y)/ (other_var**2)
for x,y in zip(new_obj.grads,
other_grads)]
elif hasattr(new_obj, 'grads'):
raise ValueError('I do not know how to compute the gradients'
' of the subtracted term' + str(other) + '. Call'
' train on it if it is an output layer')
new_obj.out = new_obj.out / other_var
return new_obj
def __abs__(self, other):
assert hasattr(self, 'out'), 'all layers need a default output'
new_obj = utils.copy(self)
new_obj.out = abs(new_obj.out)
if hasattr(new_obj, 'grads'):
new_obj.grads = [TT.sgn(new_obj.out) * x for x in new_obj.grads]
return new_obj
def __pow__(self, power):
assert hasattr(self, 'out'), 'all layers need a default output'
new_obj = utils.copy(self)
power = self.tensor_from_layer(power)
new_obj.out = new_obj.out**power
if hasattr(new_obj, 'grads'):
raise NotImplemented
return new_obj
def __lt__(self, other):
assert hasattr(self, 'out'), 'all layers need a default output'
new_obj = utils.copy(self)
other = self.tensor_from_layer(other)
new_obj.out = new_obj.out.__lt__(other)
if hasattr(new_obj, 'grads'):
raise NotImplemented
return new_obj
def __le__(self, other):
assert hasattr(self, 'out'), 'all layers need a default output'
new_obj = utils.copy(self)
other = self.tensor_from_layer(other)
new_obj.out = new_obj.out.__le__(other)
if hasattr(new_obj, 'grads'):
raise NotImplemented
return new_obj
def __gt__(self, other):
assert hasattr(self, 'out'), 'all layers need a default output'
new_obj = utils.copy(self)
other = self.tensor_from_layer(other)
new_obj.out = new_obj.out.__gt__(other)
if hasattr(new_obj, 'grads'):
raise NotImplemented
return new_obj
def __ge__(self, other):
assert hasattr(self, 'out'), 'all layers need a default output'
new_obj = utils.copy(self)
other = self.tensor_from_layer(other)
new_obj.out = new_obj.out.__ge__(other)
if hasattr(new_obj, 'grads'):
raise NotImplemented
return new_obj
def __getitem__(self, pos):
assert hasattr(self, 'out'), 'all layers need a default output'
new_obj = utils.copy(self)
pos = self.tensor_from_layer(pos)
new_obj.out = new_obj.out.__getitem__(pos)
if hasattr(new_obj, 'grads'):
raise NotImplemented
return new_obj
def _as_TensorVariable(self):
print ('WARNING: you might loose track of parameters or inputs '\
'because layer ' + self.name +' is being converted to a '\
'theano variable')
return self.out
def validate(self, **kwargs):
"""
Recompute the cost error (without the gradients)
It only works for output layers !
"""
if not hasattr(self, 'get_cost'):
raise TypeError('Non-output layer does not support this method')
new_obj = utils.copy(self)
try:
o_args, o_kwargs = new_obj.prev_args
except:
o_args, o_kwargs = ([], {})
kwargs = dict([(k, new_obj.tensor_from_layer(v)) for k,v in kwargs.items()])
for (k,v) in kwargs.items():
o_kwargs[k] = v
new_obj.prev_args = (o_args, o_kwargs)
new_obj.get_cost(*o_args, **o_kwargs)
return new_obj
def train(self, **kwargs):
"""
Compute the cost and gradients of the current layer with respect to
its parameters.
! Only works for output layers
"""
if not hasattr(self, 'get_grads'):
raise TypeError('Non-output layer does not support this method')
new_obj = utils.copy(self)
try:
o_args, o_kwargs = new_obj.prev_args
except:
o_args, o_kwargs = ([], {})
kwargs = dict([(k, new_obj.tensor_from_layer(v)) for k,v in kwargs.items()])
for (k,v) in kwargs.items():
o_kwargs[k] = v
new_obj.prev_args = (o_args, o_kwargs)
new_obj.get_grads(*o_args, **o_kwargs)
return new_obj
def get_sample(self, **kwargs):
"""
Get a sample from the curren model.
! Only works for output layers
"""
if not hasattr(self, 'get_cost'):
raise TypeError('Non-output layer does not support this method')
new_obj = utils.copy(self)
try:
o_args, o_kwargs = new_obj.prev_args
except:
o_args, o_kwargs = ([], {})
kwargs = dict([(k, new_obj.tensor_from_layer(v)) for k,v in kwargs.items()])
for (k,v) in kwargs.items():
o_kwargs[k] = v
new_obj.prev_args = (o_args, o_kwargs)
sample = new_obj.compute_sample(*o_args, **o_kwargs)
return sample
def __call__(self, *args, **kwargs):
"""
Compose this layer with the inputs provided
"""
if 'one_step' in kwargs and kwargs['one_step']:
del kwargs['one_step']
args = [self.tensor_from_layer(arg, False) for arg in args]
kwargs = dict([(k, self.tensor_from_layer(v, False))
for k,v in kwargs.items()])
if hasattr(self, 'step_fprop'):
return self.step_fprop(*args, **kwargs)
else:
return self.fprop(*args, **kwargs)
new_obj = utils.copy(self)
args = [new_obj.tensor_from_layer(arg) for arg in args]
kwargs = dict([(k, new_obj.tensor_from_layer(v)) for k,v in kwargs.items()])
if 'do' in kwargs:
kind = kwargs['do']
del kwargs['do']
else:
kind = 'fprop'
if 'one_step' in kwargs:
del kwargs['one_step']
new_obj.prev_args = (args, kwargs)
if kind == 'fprop':
new_obj.fprop(*args, **kwargs)
elif kind == 'eval':
new_obj.get_cost(*args, **kwargs)
elif kind == 'train':
new_obj.get_grads(*args, **kwargs)
elif kind == 'run':
return new_obj.run(*args, **kwargs)
return new_obj
def _init_params(self):
raise NotImplementedError
def fprop(self, state_below, state_before=None, state_after=None):
raise NotImplementedError
class Model(Container):
"""
Model class. It respects the interface expected by the trainer.
"""
def __init__(self, output_layer,
sample_fn,
indx_word="/data/lisa/data/PennTreebankCorpus/dictionaries.npz",
indx_word_src=None,
rng =None):
super(Model, self).__init__()
if rng == None:
rng = numpy.random.RandomState(123)
assert hasattr(output_layer,'grads'), \
'The model needs to have gradients defined'
self.rng = rng
self.trng = RandomStreams(rng.randint(1000)+1)
self.sample_fn = sample_fn
self.indx_word = indx_word
self.indx_word_src = indx_word_src
self.param_grads = output_layer.grads
self.params = output_layer.params
self.updates = output_layer.updates
self.noise_params = output_layer.noise_params
self.noise_params_shape_fn = output_layer.noise_params_shape_fn
self.inputs = output_layer.inputs
self.params_grad_scale = output_layer.params_grad_scale
self.train_cost = output_layer.cost
self.out = output_layer.out
self.schedules = output_layer.schedules
self.output_layer = output_layer
self.properties = output_layer.properties
self._get_samples = output_layer._get_samples
def get_schedules(self):
return self.schedules
def validate(self, data_iterator):
raise NotImplemented
def clone(**new_inputs):
new_obj = utils.copy(self)
# Reorder inputs
assert len(new_obj.inputs) == len(new_inputs.items())
pairs=[(x, new_inputs[x.name]) for x in inputs]
new_obj.inputs = new_inputs.values()
new_obj.out = theano.clone(new_obj.out, replace=pairs)
if hasattr(new_obj, 'cost'):
new_obj.cost = theano.clone(new_obj.cost, replace=pairs)
if hasattr(new_obj, 'grads'):
new_obj.grads = theano.clone(new_obj.grads, replace=pairs)
if hasattr(new_obj, 'sample'):
new_obj.sample = theano.clone(new_obj.sample, replace=pairs)
return new_obj
def __call__(self, *args, **kwargs):
return self.run(*args, **kwargs)
class Operator(Layer):
def __init__(self,
# apply_operator=None,
n_in=0,
n_out = 0):
super(Operator, self).__init__(n_in, n_out, rng=None)
self.apply_operator = apply_operator
def __call__(self, *args, **kwargs):
rval = self.apply_operator(*args, **kwargs)
if 'one_step' in kwargs and kwargs['one_step']:
return rval
self.params = rval.params
self.noise_params = rval.noise_params
self.noise_params_shape_fn = rval.noise_params_shape_fn
self.params_grad_scale = rval.params_grad_scale
self.inputs = rval.inputs
self.schedules = rval.schedules
return rval
def on(self, *args, **kwargs):
# Experimental
if not hasattr(self, 'run_fn'):
self.run_fn = theano.function(self.inputs, self.out)
return self.run_fn(*args, **kwargs)
| bsd-3-clause |
spookylukey/django-autocomplete-light | autocomplete_light/autocomplete/model.py | 1 | 4383 | from django.db.models import Q
__all__ = ('AutocompleteModel', )
class AutocompleteModel(object):
"""
Autocomplete which considers choices as a queryset.
choices
A queryset.
limit_choices
Maximum number of choices to display.
search_fields
Fields to search in, configurable like on ModelAdmin.search_fields.
split_words
If True, AutocompleteModel splits the search query into words and
returns all objects that contain each of the words, case insensitive,
where each word must be in at least one of search_fields.
If 'or', AutocompleteModel does the same but returns all objects that
contain **any** of the words.
order_by
If set, it will be used to order choices. It can be a single field name
or an iterable (ie. list, tuple).
"""
limit_choices = 20
choices = None
search_fields = None
split_words = False
order_by = None
def choice_value(self, choice):
"""
Return the pk of the choice by default.
"""
return choice.pk
def choice_label(self, choice):
"""
Return the unicode representation of the choice by default.
"""
return unicode(choice)
def order_choices(self, choices):
"""
Order choices using `order_by` option if it is set.
"""
if self.order_by is None:
return choices
if isinstance(self.order_by, basestring):
return choices.order_by(self.order_by)
return choices.order_by(*self.order_by)
def choices_for_values(self):
"""
Return ordered choices which pk are in self.values.
"""
assert self.choices is not None, 'choices should be a queryset'
return self.order_choices(self.choices.filter(
pk__in=self.values or []))
def choices_for_request(self):
"""
Return a queryset based on `choices` using options `split_words`,
`search_fields` and `limit_choices`. Refer to the class-level
documentation for documentation on each of these options.
"""
assert self.choices is not None, 'choices should be a queryset'
assert self.search_fields, 'autocomplete.search_fields must be set'
q = self.request.GET.get('q', '')
exclude = self.request.GET.getlist('exclude')
conditions = self._choices_for_request_conditions(q,
self.search_fields)
return self.order_choices(self.choices.filter(
conditions).exclude(pk__in=exclude))[0:self.limit_choices]
def _construct_search(self, field_name):
"""
Using a field name optionnaly prefixed by `^`, `=`, `@`, return a
case-insensitive filter condition name usable as a queryset `filter()`
keyword argument.
"""
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
def _choices_for_request_conditions(self, q, search_fields):
"""
Return a `Q` object usable by `filter()` based on a list of fields to
search in `search_fields` for string `q`.
It uses options `split_words` and `search_fields` . Refer to the
class-level documentation for documentation on each of these options.
"""
conditions = Q()
if self.split_words:
for word in q.strip().split():
word_conditions = Q()
for search_field in search_fields:
word_conditions |= Q(**{
self._construct_search(search_field): word})
if self.split_words == 'or':
conditions |= word_conditions
else:
conditions &= word_conditions
else:
for search_field in search_fields:
conditions |= Q(**{self._construct_search(search_field): q})
return conditions
def validate_values(self):
"""
Return True if all values where found in `choices`.
"""
return len(self.choices_for_values()) == len(self.values)
| mit |
jimcunderwood/MissionPlanner | Lib/unittest/case.py | 42 | 43140 | """Test case implementation"""
import collections
import sys
import functools
import difflib
import pprint
import re
import warnings
from . import result
from .util import (
strclass, safe_repr, unorderable_list_difference,
_count_diff_all_purpose, _count_diff_hashable
)
__unittest = True
DIFF_OMITTED = ('\nDiff is %s characters long. '
'Set self.maxDiff to None to see it.')
class SkipTest(Exception):
"""
Raise this exception in a test to skip it.
Usually you can use TestResult.skip() or one of the skipping decorators
instead of raising this directly.
"""
pass
class _ExpectedFailure(Exception):
"""
Raise this when a test is expected to fail.
This is an implementation detail.
"""
def __init__(self, exc_info):
super(_ExpectedFailure, self).__init__()
self.exc_info = exc_info
class _UnexpectedSuccess(Exception):
"""
The test was supposed to fail, but it didn't!
"""
pass
def _id(obj):
return obj
def skip(reason):
"""
Unconditionally skip a test.
"""
def decorator(test_item):
if not (isinstance(test_item, type) and issubclass(test_item, TestCase)):
@functools.wraps(test_item)
def skip_wrapper(*args, **kwargs):
raise SkipTest(reason)
test_item = skip_wrapper
test_item.__unittest_skip__ = True
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIf(condition, reason):
"""
Skip a test if the condition is true.
"""
if condition:
return skip(reason)
return _id
def skipUnless(condition, reason):
"""
Skip a test unless the condition is true.
"""
if not condition:
return skip(reason)
return _id
def expectedFailure(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception:
raise _ExpectedFailure(sys.exc_info())
raise _UnexpectedSuccess
return wrapper
class _AssertRaisesContext(object):
"""A context manager used to implement TestCase.assertRaises* methods."""
def __init__(self, expected, test_case, expected_regexp=None):
self.expected = expected
self.failureException = test_case.failureException
self.expected_regexp = expected_regexp
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise self.failureException(
"{0} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
self.exception = exc_value # store for later retrieval
if self.expected_regexp is None:
return True
expected_regexp = self.expected_regexp
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
return True
class TestCase(object):
"""A class whose instances are single test cases.
By default, the test code itself should be placed in a method named
'runTest'.
If the fixture may be used for many test cases, create as
many test methods as are needed. When instantiating such a TestCase
subclass, specify in the constructor arguments the name of the test method
that the instance is to execute.
Test authors should subclass TestCase for their own tests. Construction
and deconstruction of the test's environment ('fixture') can be
implemented by overriding the 'setUp' and 'tearDown' methods respectively.
If it is necessary to override the __init__ method, the base class
__init__ method must always be called. It is important that subclasses
should not change the signature of their __init__ method, since instances
of the classes are instantiated automatically by parts of the framework
in order to be run.
"""
# This attribute determines which exception will be raised when
# the instance's assertion methods fail; test methods raising this
# exception will be deemed to have 'failed' rather than 'errored'
failureException = AssertionError
# This attribute determines whether long messages (including repr of
# objects used in assert methods) will be printed on failure in *addition*
# to any explicit message passed.
longMessage = False
# This attribute sets the maximum length of a diff in failure messages
# by assert methods using difflib. It is looked up as an instance attribute
# so can be configured by individual tests if required.
maxDiff = 80*8
# Attribute used by TestSuite for classSetUp
_classSetupFailed = False
def __init__(self, methodName='runTest'):
"""Create an instance of the class that will use the named test
method when executed. Raises a ValueError if the instance does
not have a method with the specified name.
"""
self._testMethodName = methodName
self._resultForDoCleanups = None
try:
testMethod = getattr(self, methodName)
except AttributeError:
raise ValueError("no such test method in %s: %s" %
(self.__class__, methodName))
self._testMethodDoc = testMethod.__doc__
self._cleanups = []
# Map types to custom assertEqual functions that will compare
# instances of said type in more detail to generate a more useful
# error message.
self._type_equality_funcs = {}
self.addTypeEqualityFunc(dict, self.assertDictEqual)
self.addTypeEqualityFunc(list, self.assertListEqual)
self.addTypeEqualityFunc(tuple, self.assertTupleEqual)
self.addTypeEqualityFunc(set, self.assertSetEqual)
self.addTypeEqualityFunc(frozenset, self.assertSetEqual)
self.addTypeEqualityFunc(unicode, self.assertMultiLineEqual)
def addTypeEqualityFunc(self, typeobj, function):
"""Add a type specific assertEqual style function to compare a type.
This method is for use by TestCase subclasses that need to register
their own type equality functions to provide nicer error messages.
Args:
typeobj: The data type to call this function on when both values
are of the same type in assertEqual().
function: The callable taking two arguments and an optional
msg= argument that raises self.failureException with a
useful error message when the two arguments are not equal.
"""
self._type_equality_funcs[typeobj] = function
def addCleanup(self, function, *args, **kwargs):
"""Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called after tearDown on test failure or success.
Cleanup items are called even if setUp fails (unlike tearDown)."""
self._cleanups.append((function, args, kwargs))
def setUp(self):
"Hook method for setting up the test fixture before exercising it."
pass
def tearDown(self):
"Hook method for deconstructing the test fixture after testing it."
pass
@classmethod
def setUpClass(cls):
"Hook method for setting up class fixture before running tests in the class."
@classmethod
def tearDownClass(cls):
"Hook method for deconstructing the class fixture after running all tests in the class."
def countTestCases(self):
return 1
def defaultTestResult(self):
return result.TestResult()
def shortDescription(self):
"""Returns a one-line description of the test, or None if no
description has been provided.
The default implementation of this method returns the first line of
the specified test method's docstring.
"""
doc = self._testMethodDoc
return doc and doc.split("\n")[0].strip() or None
def id(self):
return "%s.%s" % (strclass(self.__class__), self._testMethodName)
def __eq__(self, other):
if type(self) is not type(other):
return NotImplemented
return self._testMethodName == other._testMethodName
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._testMethodName))
def __str__(self):
return "%s (%s)" % (self._testMethodName, strclass(self.__class__))
def __repr__(self):
return "<%s testMethod=%s>" % \
(strclass(self.__class__), self._testMethodName)
def _addSkip(self, result, reason):
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None:
addSkip(self, reason)
else:
warnings.warn("TestResult has no addSkip method, skips not reported",
RuntimeWarning, 2)
result.addSuccess(self)
def run(self, result=None):
orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
self._resultForDoCleanups = result
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)):
# If the class or method was skipped.
try:
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
self._addSkip(result, skip_why)
finally:
result.stopTest(self)
return
try:
success = False
try:
self.setUp()
except SkipTest as e:
self._addSkip(result, str(e))
except KeyboardInterrupt:
raise
except:
result.addError(self, sys.exc_info())
else:
try:
testMethod()
except KeyboardInterrupt:
raise
except self.failureException:
result.addFailure(self, sys.exc_info())
except _ExpectedFailure as e:
addExpectedFailure = getattr(result, 'addExpectedFailure', None)
if addExpectedFailure is not None:
addExpectedFailure(self, e.exc_info)
else:
warnings.warn("TestResult has no addExpectedFailure method, reporting as passes",
RuntimeWarning)
result.addSuccess(self)
except _UnexpectedSuccess:
addUnexpectedSuccess = getattr(result, 'addUnexpectedSuccess', None)
if addUnexpectedSuccess is not None:
addUnexpectedSuccess(self)
else:
warnings.warn("TestResult has no addUnexpectedSuccess method, reporting as failures",
RuntimeWarning)
result.addFailure(self, sys.exc_info())
except SkipTest as e:
self._addSkip(result, str(e))
except:
result.addError(self, sys.exc_info())
else:
success = True
try:
self.tearDown()
except KeyboardInterrupt:
raise
except:
result.addError(self, sys.exc_info())
success = False
cleanUpSuccess = self.doCleanups()
success = success and cleanUpSuccess
if success:
result.addSuccess(self)
finally:
result.stopTest(self)
if orig_result is None:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
def doCleanups(self):
"""Execute all cleanup functions. Normally called for you after
tearDown."""
result = self._resultForDoCleanups
ok = True
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
try:
function(*args, **kwargs)
except KeyboardInterrupt:
raise
except:
ok = False
result.addError(self, sys.exc_info())
return ok
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the test without collecting errors in a TestResult"""
self.setUp()
getattr(self, self._testMethodName)()
self.tearDown()
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
function(*args, **kwargs)
def skipTest(self, reason):
"""Skip this test."""
raise SkipTest(reason)
def fail(self, msg=None):
"""Fail immediately, with the given message."""
raise self.failureException(msg)
def assertFalse(self, expr, msg=None):
"""Check that the expression is false."""
if expr:
msg = self._formatMessage(msg, "%s is not false" % safe_repr(expr))
raise self.failureException(msg)
def assertTrue(self, expr, msg=None):
"""Check that the expression is true."""
if not expr:
msg = self._formatMessage(msg, "%s is not true" % safe_repr(expr))
raise self.failureException(msg)
def _formatMessage(self, msg, standardMsg):
"""Honour the longMessage attribute when generating failure messages.
If longMessage is False this means:
* Use only an explicit message if it is provided
* Otherwise use the standard message for the assert
If longMessage is True:
* Use the standard message
* If an explicit message is provided, plus ' : ' and the explicit message
"""
if not self.longMessage:
return msg or standardMsg
if msg is None:
return standardMsg
try:
# don't switch to '{}' formatting in Python 2.X
# it changes the way unicode input is handled
return '%s : %s' % (standardMsg, msg)
except UnicodeDecodeError:
return '%s : %s' % (safe_repr(standardMsg), safe_repr(msg))
def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
"""Fail unless an exception of class excClass is thrown
by callableObj when invoked with arguments args and keyword
arguments kwargs. If a different type of exception is
thrown, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
If called with callableObj omitted or None, will return a
context object used like this::
with self.assertRaises(SomeException):
do_something()
The context manager keeps a reference to the exception as
the 'exception' attribute. This allows you to inspect the
exception after the assertion::
with self.assertRaises(SomeException) as cm:
do_something()
the_exception = cm.exception
self.assertEqual(the_exception.error_code, 3)
"""
context = _AssertRaisesContext(excClass, self)
if callableObj is None:
return context
with context:
callableObj(*args, **kwargs)
def _getAssertEqualityFunc(self, first, second):
"""Get a detailed comparison function for the types of the two args.
Returns: A callable accepting (first, second, msg=None) that will
raise a failure exception if first != second with a useful human
readable error message for those types.
"""
#
# NOTE(gregory.p.smith): I considered isinstance(first, type(second))
# and vice versa. I opted for the conservative approach in case
# subclasses are not intended to be compared in detail to their super
# class instances using a type equality func. This means testing
# subtypes won't automagically use the detailed comparison. Callers
# should use their type specific assertSpamEqual method to compare
# subclasses if the detailed comparison is desired and appropriate.
# See the discussion in http://bugs.python.org/issue2578.
#
if type(first) is type(second):
asserter = self._type_equality_funcs.get(type(first))
if asserter is not None:
return asserter
return self._baseAssertEqual
def _baseAssertEqual(self, first, second, msg=None):
"""The default assertEqual implementation, not type specific."""
if not first == second:
standardMsg = '%s != %s' % (safe_repr(first), safe_repr(second))
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertEqual(self, first, second, msg=None):
"""Fail if the two objects are unequal as determined by the '=='
operator.
"""
assertion_func = self._getAssertEqualityFunc(first, second)
assertion_func(first, second, msg=msg)
def assertNotEqual(self, first, second, msg=None):
"""Fail if the two objects are equal as determined by the '=='
operator.
"""
if not first != second:
msg = self._formatMessage(msg, '%s == %s' % (safe_repr(first),
safe_repr(second)))
raise self.failureException(msg)
def assertAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are unequal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is more than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
If the two objects compare equal then they will automatically
compare almost equal.
"""
if first == second:
# shortcut
return
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if abs(first - second) <= delta:
return
standardMsg = '%s != %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if round(abs(second-first), places) == 0:
return
standardMsg = '%s != %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertNotAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are equal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is less than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
Objects that are equal automatically fail.
"""
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if not (first == second) and abs(first - second) > delta:
return
standardMsg = '%s == %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if not (first == second) and round(abs(second-first), places) != 0:
return
standardMsg = '%s == %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
# Synonyms for assertion methods
# The plurals are undocumented. Keep them that way to discourage use.
# Do not add more. Do not remove.
# Going through a deprecation cycle on these would annoy many people.
assertEquals = assertEqual
assertNotEquals = assertNotEqual
assertAlmostEquals = assertAlmostEqual
assertNotAlmostEquals = assertNotAlmostEqual
assert_ = assertTrue
# These fail* assertion method names are pending deprecation and will
# be a DeprecationWarning in 3.2; http://bugs.python.org/issue2578
def _deprecate(original_func):
def deprecated_func(*args, **kwargs):
warnings.warn(
'Please use {0} instead.'.format(original_func.__name__),
PendingDeprecationWarning, 2)
return original_func(*args, **kwargs)
return deprecated_func
failUnlessEqual = _deprecate(assertEqual)
failIfEqual = _deprecate(assertNotEqual)
failUnlessAlmostEqual = _deprecate(assertAlmostEqual)
failIfAlmostEqual = _deprecate(assertNotAlmostEqual)
failUnless = _deprecate(assertTrue)
failUnlessRaises = _deprecate(assertRaises)
failIf = _deprecate(assertFalse)
def assertSequenceEqual(self, seq1, seq2, msg=None, seq_type=None):
"""An equality assertion for ordered sequences (like lists and tuples).
For the purposes of this function, a valid ordered sequence type is one
which can be indexed, has a length, and has an equality operator.
Args:
seq1: The first sequence to compare.
seq2: The second sequence to compare.
seq_type: The expected datatype of the sequences, or None if no
datatype should be enforced.
msg: Optional message to use on failure instead of a list of
differences.
"""
if seq_type is not None:
seq_type_name = seq_type.__name__
if not isinstance(seq1, seq_type):
raise self.failureException('First sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq1)))
if not isinstance(seq2, seq_type):
raise self.failureException('Second sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq2)))
else:
seq_type_name = "sequence"
differing = None
try:
len1 = len(seq1)
except (TypeError, NotImplementedError):
differing = 'First %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
try:
len2 = len(seq2)
except (TypeError, NotImplementedError):
differing = 'Second %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
if seq1 == seq2:
return
seq1_repr = safe_repr(seq1)
seq2_repr = safe_repr(seq2)
if len(seq1_repr) > 30:
seq1_repr = seq1_repr[:30] + '...'
if len(seq2_repr) > 30:
seq2_repr = seq2_repr[:30] + '...'
elements = (seq_type_name.capitalize(), seq1_repr, seq2_repr)
differing = '%ss differ: %s != %s\n' % elements
for i in xrange(min(len1, len2)):
try:
item1 = seq1[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of first %s\n' %
(i, seq_type_name))
break
try:
item2 = seq2[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of second %s\n' %
(i, seq_type_name))
break
if item1 != item2:
differing += ('\nFirst differing element %d:\n%s\n%s\n' %
(i, item1, item2))
break
else:
if (len1 == len2 and seq_type is None and
type(seq1) != type(seq2)):
# The sequences are the same, but have differing types.
return
if len1 > len2:
differing += ('\nFirst %s contains %d additional '
'elements.\n' % (seq_type_name, len1 - len2))
try:
differing += ('First extra element %d:\n%s\n' %
(len2, seq1[len2]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of first %s\n' % (len2, seq_type_name))
elif len1 < len2:
differing += ('\nSecond %s contains %d additional '
'elements.\n' % (seq_type_name, len2 - len1))
try:
differing += ('First extra element %d:\n%s\n' %
(len1, seq2[len1]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of second %s\n' % (len1, seq_type_name))
standardMsg = differing
diffMsg = '\n' + '\n'.join(
difflib.ndiff(pprint.pformat(seq1).splitlines(),
pprint.pformat(seq2).splitlines()))
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def _truncateMessage(self, message, diff):
max_diff = self.maxDiff
if max_diff is None or len(diff) <= max_diff:
return message + diff
return message + (DIFF_OMITTED % len(diff))
def assertListEqual(self, list1, list2, msg=None):
"""A list-specific equality assertion.
Args:
list1: The first list to compare.
list2: The second list to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(list1, list2, msg, seq_type=list)
def assertTupleEqual(self, tuple1, tuple2, msg=None):
"""A tuple-specific equality assertion.
Args:
tuple1: The first tuple to compare.
tuple2: The second tuple to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(tuple1, tuple2, msg, seq_type=tuple)
def assertSetEqual(self, set1, set2, msg=None):
"""A set-specific equality assertion.
Args:
set1: The first set to compare.
set2: The second set to compare.
msg: Optional message to use on failure instead of a list of
differences.
assertSetEqual uses ducktyping to support different types of sets, and
is optimized for sets specifically (parameters must support a
difference method).
"""
try:
difference1 = set1.difference(set2)
except TypeError, e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError, e:
self.fail('first argument does not support set difference: %s' % e)
try:
difference2 = set2.difference(set1)
except TypeError, e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError, e:
self.fail('second argument does not support set difference: %s' % e)
if not (difference1 or difference2):
return
lines = []
if difference1:
lines.append('Items in the first set but not the second:')
for item in difference1:
lines.append(repr(item))
if difference2:
lines.append('Items in the second set but not the first:')
for item in difference2:
lines.append(repr(item))
standardMsg = '\n'.join(lines)
self.fail(self._formatMessage(msg, standardMsg))
def assertIn(self, member, container, msg=None):
"""Just like self.assertTrue(a in b), but with a nicer default message."""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIn(self, member, container, msg=None):
"""Just like self.assertTrue(a not in b), but with a nicer default message."""
if member in container:
standardMsg = '%s unexpectedly found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertIs(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is b), but with a nicer default message."""
if expr1 is not expr2:
standardMsg = '%s is not %s' % (safe_repr(expr1),
safe_repr(expr2))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNot(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is not b), but with a nicer default message."""
if expr1 is expr2:
standardMsg = 'unexpectedly identical: %s' % (safe_repr(expr1),)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictEqual(self, d1, d2, msg=None):
self.assertIsInstance(d1, dict, 'First argument is not a dictionary')
self.assertIsInstance(d2, dict, 'Second argument is not a dictionary')
if d1 != d2:
standardMsg = '%s != %s' % (safe_repr(d1, True), safe_repr(d2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(d1).splitlines(),
pprint.pformat(d2).splitlines())))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictContainsSubset(self, expected, actual, msg=None):
"""Checks whether actual is a superset of expected."""
missing = []
mismatched = []
for key, value in expected.iteritems():
if key not in actual:
missing.append(key)
elif value != actual[key]:
mismatched.append('%s, expected: %s, actual: %s' %
(safe_repr(key), safe_repr(value),
safe_repr(actual[key])))
if not (missing or mismatched):
return
standardMsg = ''
if missing:
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
missing)
if mismatched:
if standardMsg:
standardMsg += '; '
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
self.fail(self._formatMessage(msg, standardMsg))
def assertItemsEqual(self, expected_seq, actual_seq, msg=None):
"""An unordered sequence specific comparison. It asserts that
actual_seq and expected_seq have the same element counts.
Equivalent to::
self.assertEqual(Counter(iter(actual_seq)),
Counter(iter(expected_seq)))
Asserts that each element has the same count in both sequences.
Example:
- [0, 1, 1] and [1, 0, 1] compare equal.
- [0, 0, 1] and [0, 1] compare unequal.
"""
first_seq, second_seq = list(actual_seq), list(expected_seq)
with warnings.catch_warnings():
if sys.py3kwarning:
# Silence Py3k warning raised during the sorting
for _msg in ["(code|dict|type) inequality comparisons",
"builtin_function_or_method order comparisons",
"comparing unequal types"]:
warnings.filterwarnings("ignore", _msg, DeprecationWarning)
try:
first = collections.Counter(first_seq)
second = collections.Counter(second_seq)
except TypeError:
# Handle case with unhashable elements
differences = _count_diff_all_purpose(first_seq, second_seq)
else:
if first == second:
return
differences = _count_diff_hashable(first_seq, second_seq)
if differences:
standardMsg = 'Element counts were not equal:\n'
lines = ['First has %d, Second has %d: %r' % diff for diff in differences]
diffMsg = '\n'.join(lines)
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def assertMultiLineEqual(self, first, second, msg=None):
"""Assert that two multi-line strings are equal."""
self.assertIsInstance(first, basestring,
'First argument is not a string')
self.assertIsInstance(second, basestring,
'Second argument is not a string')
if first != second:
firstlines = first.splitlines(True)
secondlines = second.splitlines(True)
if len(firstlines) == 1 and first.strip('\r\n') == first:
firstlines = [first + '\n']
secondlines = [second + '\n']
standardMsg = '%s != %s' % (safe_repr(first, True),
safe_repr(second, True))
diff = '\n' + ''.join(difflib.ndiff(firstlines, secondlines))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertLess(self, a, b, msg=None):
"""Just like self.assertTrue(a < b), but with a nicer default message."""
if not a < b:
standardMsg = '%s not less than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertLessEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a <= b), but with a nicer default message."""
if not a <= b:
standardMsg = '%s not less than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreater(self, a, b, msg=None):
"""Just like self.assertTrue(a > b), but with a nicer default message."""
if not a > b:
standardMsg = '%s not greater than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreaterEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a >= b), but with a nicer default message."""
if not a >= b:
standardMsg = '%s not greater than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNone(self, obj, msg=None):
"""Same as self.assertTrue(obj is None), with a nicer default message."""
if obj is not None:
standardMsg = '%s is not None' % (safe_repr(obj),)
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNotNone(self, obj, msg=None):
"""Included for symmetry with assertIsNone."""
if obj is None:
standardMsg = 'unexpectedly None'
self.fail(self._formatMessage(msg, standardMsg))
def assertIsInstance(self, obj, cls, msg=None):
"""Same as self.assertTrue(isinstance(obj, cls)), with a nicer
default message."""
if not isinstance(obj, cls):
standardMsg = '%s is not an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIsInstance(self, obj, cls, msg=None):
"""Included for symmetry with assertIsInstance."""
if isinstance(obj, cls):
standardMsg = '%s is an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertRaisesRegexp(self, expected_exception, expected_regexp,
callable_obj=None, *args, **kwargs):
"""Asserts that the message in a raised exception matches a regexp.
Args:
expected_exception: Exception class expected to be raised.
expected_regexp: Regexp (re pattern object or string) expected
to be found in error message.
callable_obj: Function to be called.
args: Extra args.
kwargs: Extra kwargs.
"""
context = _AssertRaisesContext(expected_exception, self, expected_regexp)
if callable_obj is None:
return context
with context:
callable_obj(*args, **kwargs)
def assertRegexpMatches(self, text, expected_regexp, msg=None):
"""Fail the test unless the text matches the regular expression."""
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(text):
msg = msg or "Regexp didn't match"
msg = '%s: %r not found in %r' % (msg, expected_regexp.pattern, text)
raise self.failureException(msg)
def assertNotRegexpMatches(self, text, unexpected_regexp, msg=None):
"""Fail the test if the text matches the regular expression."""
if isinstance(unexpected_regexp, basestring):
unexpected_regexp = re.compile(unexpected_regexp)
match = unexpected_regexp.search(text)
if match:
msg = msg or "Regexp matched"
msg = '%s: %r matches %r in %r' % (msg,
text[match.start():match.end()],
unexpected_regexp.pattern,
text)
raise self.failureException(msg)
class FunctionTestCase(TestCase):
"""A test case that wraps a test function.
This is useful for slipping pre-existing test functions into the
unittest framework. Optionally, set-up and tidy-up functions can be
supplied. As with TestCase, the tidy-up ('tearDown') function will
always be called if the set-up ('setUp') function ran successfully.
"""
def __init__(self, testFunc, setUp=None, tearDown=None, description=None):
super(FunctionTestCase, self).__init__()
self._setUpFunc = setUp
self._tearDownFunc = tearDown
self._testFunc = testFunc
self._description = description
def setUp(self):
if self._setUpFunc is not None:
self._setUpFunc()
def tearDown(self):
if self._tearDownFunc is not None:
self._tearDownFunc()
def runTest(self):
self._testFunc()
def id(self):
return self._testFunc.__name__
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._setUpFunc == other._setUpFunc and \
self._tearDownFunc == other._tearDownFunc and \
self._testFunc == other._testFunc and \
self._description == other._description
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._setUpFunc, self._tearDownFunc,
self._testFunc, self._description))
def __str__(self):
return "%s (%s)" % (strclass(self.__class__),
self._testFunc.__name__)
def __repr__(self):
return "<%s tec=%s>" % (strclass(self.__class__),
self._testFunc)
def shortDescription(self):
if self._description is not None:
return self._description
doc = self._testFunc.__doc__
return doc and doc.split("\n")[0].strip() or None
| gpl-3.0 |
cortedeltimo/SickRage | lib/tzlocal/win32.py | 11 | 3157 | try:
import _winreg as winreg
except ImportError:
import winreg
from tzlocal.windows_tz import win_tz
import pytz
_cache_tz = None
def valuestodict(key):
"""Convert a registry key's values to a dictionary."""
dict = {}
size = winreg.QueryInfoKey(key)[1]
for i in range(size):
data = winreg.EnumValue(key, i)
dict[data[0]] = data[1]
return dict
def get_localzone_name():
# Windows is special. It has unique time zone names (in several
# meanings of the word) available, but unfortunately, they can be
# translated to the language of the operating system, so we need to
# do a backwards lookup, by going through all time zones and see which
# one matches.
handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation"
localtz = winreg.OpenKey(handle, TZLOCALKEYNAME)
keyvalues = valuestodict(localtz)
localtz.Close()
if 'TimeZoneKeyName' in keyvalues:
# Windows 7 (and Vista?)
# For some reason this returns a string with loads of NUL bytes at
# least on some systems. I don't know if this is a bug somewhere, I
# just work around it.
tzkeyname = keyvalues['TimeZoneKeyName'].split('\x00', 1)[0]
else:
# Windows 2000 or XP
# This is the localized name:
tzwin = keyvalues['StandardName']
# Open the list of timezones to look up the real name:
TZKEYNAME = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones"
tzkey = winreg.OpenKey(handle, TZKEYNAME)
# Now, match this value to Time Zone information
tzkeyname = None
for i in range(winreg.QueryInfoKey(tzkey)[0]):
subkey = winreg.EnumKey(tzkey, i)
sub = winreg.OpenKey(tzkey, subkey)
data = valuestodict(sub)
sub.Close()
try:
if data['Std'] == tzwin:
tzkeyname = subkey
break
except KeyError:
# This timezone didn't have proper configuration.
# Ignore it.
pass
tzkey.Close()
handle.Close()
if tzkeyname is None:
raise LookupError('Can not find Windows timezone configuration')
timezone = win_tz.get(tzkeyname)
if timezone is None:
# Nope, that didn't work. Try adding "Standard Time",
# it seems to work a lot of times:
timezone = win_tz.get(tzkeyname + " Standard Time")
# Return what we have.
if timezone is None:
raise pytz.UnknownTimeZoneError('Can not find timezone ' + tzkeyname)
return timezone
def get_localzone():
"""Returns the zoneinfo-based tzinfo object that matches the Windows-configured timezone."""
global _cache_tz
if _cache_tz is None:
_cache_tz = pytz.timezone(get_localzone_name())
return _cache_tz
def reload_localzone():
"""Reload the cached localzone. You need to call this if the timezone has changed."""
global _cache_tz
_cache_tz = pytz.timezone(get_localzone_name())
return _cache_tz
| gpl-3.0 |
pferreir/indico | indico/modules/attachments/__init__.py | 4 | 2521 | # This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import session
from indico.core import signals
from indico.core.logger import Logger
from indico.modules.attachments.logging import connect_log_signals
from indico.modules.attachments.models.attachments import Attachment
from indico.modules.attachments.models.folders import AttachmentFolder
from indico.modules.attachments.util import can_manage_attachments
from indico.util.i18n import _
from indico.web.flask.util import url_for
from indico.web.menu import SideMenuItem
__all__ = ('logger', 'Attachment', 'AttachmentFolder')
logger = Logger.get('attachments')
connect_log_signals()
@signals.users.merged.connect
def _merge_users(target, source, **kwargs):
from indico.modules.attachments.models.attachments import Attachment, AttachmentFile
from indico.modules.attachments.models.principals import AttachmentFolderPrincipal, AttachmentPrincipal
Attachment.query.filter_by(user_id=source.id).update({Attachment.user_id: target.id})
AttachmentFile.query.filter_by(user_id=source.id).update({AttachmentFile.user_id: target.id})
AttachmentPrincipal.merge_users(target, source, 'attachment')
AttachmentFolderPrincipal.merge_users(target, source, 'folder')
@signals.menu.items.connect_via('event-management-sidemenu')
def _extend_event_management_menu(sender, event, **kwargs):
if not can_manage_attachments(event, session.user):
return
return SideMenuItem('attachments', _('Materials'), url_for('attachments.management', event), 80,
section='organization')
@signals.event_management.management_url.connect
def _get_event_management_url(event, **kwargs):
if can_manage_attachments(event, session.user):
return url_for('attachments.management', event)
@signals.menu.items.connect_via('category-management-sidemenu')
def _extend_category_management_menu(sender, category, **kwargs):
return SideMenuItem('attachments', _('Materials'), url_for('attachments.management', category), icon='upload')
@signals.event_management.get_cloners.connect
def _get_attachment_cloner(sender, **kwargs):
from indico.modules.attachments.clone import AttachmentCloner
return AttachmentCloner
@signals.import_tasks.connect
def _import_tasks(sender, **kwargs):
import indico.modules.attachments.tasks # noqa: F401
| mit |
mozman/ezdxf | tests/test_06_math/test_606_convexhull.py | 1 | 5800 | # Author: mozman
# Purpose: test convex_hull_2d
# Created: 28.02.2010
# License: MIT License
import pytest
from ezdxf.math import convex_hull_2d
from io import StringIO
def import_asc_coords(file_obj):
""" Import allplan asc-format point file.
returns: a dictionary of Coordinates ('name': Coordinate)
"""
points = dict()
lines = file_obj.readlines()
for line in lines:
name, x, y, z, code = line.split()
x = float(x)
y = float(y)
z = float(z)
try:
points[name] = (x, y, z)
except ValueError:
pass
return points
def test_convex_hull_raises():
with pytest.raises(ValueError):
_ = convex_hull_2d([])
with pytest.raises(ValueError):
_ = convex_hull_2d([(0., 0.), (0., 0.)])
with pytest.raises(ValueError):
_ = convex_hull_2d([(0., 0.), (0., 0.), (0., 0.), (0., 0.)])
def test_convex_hull_set1():
set1 = import_asc_coords(StringIO(cx_set1))
hull = convex_hull_2d(set1.values())
result_keys = ["3", "18", "19", "1", "7", "8", "2", "16", "17"]
for result, result_key in zip(hull, result_keys):
assert result == set1[result_key]
def test_convex_hull_set2():
set2 = import_asc_coords(StringIO(cx_set2))
hull = convex_hull_2d(set2.values())
result_keys = ["1", "2", "8", "15"]
for result, result_key in zip(hull, result_keys):
assert result == set2[result_key]
def test_convex_hull_set3():
set3 = import_asc_coords(StringIO(cx_set3))
hull = convex_hull_2d(set3.values())
result_keys = ["1", "7", "13"]
for result, result_key in zip(hull, result_keys):
assert result == set3[result_key]
# inline data files
cx_set1 = """ 1 4.000 8.000 0.000 0
2 9.000 3.000 0.000 0
3 1.000 4.000 0.000 0
4 5.000 5.000 0.000 0
5 3.000 7.000 0.000 0
6 2.000 5.000 0.000 0
7 7.000 7.000 0.000 0
8 9.000 5.000 0.000 0
9 5.000 3.000 0.000 0
10 6.000 6.000 0.000 0
11 3.000 6.000 0.000 0
12 7.000 4.000 0.000 0
13 8.000 3.000 0.000 0
14 6.000 2.000 0.000 0
15 4.000 2.000 0.000 0
16 7.000 1.000 0.000 0
17 4.000 1.000 0.000 0
18 1.000 5.000 0.000 0
19 2.000 7.000 0.000 0
"""
cx_set2 = """ 1 -7.000 1.000 0.000 0
2 -7.000 8.000 0.000 0
3 4.000 5.000 0.000 0
4 -6.128 4.000 0.000 0
5 -3.605 6.559 0.000 0
6 -3.672 4.781 0.000 0
7 1.645 4.680 0.000 0
8 5.000 8.000 0.000 0
9 1.543 6.356 0.000 0
10 -2.335 4.341 0.000 0
11 -3.757 2.648 0.000 0
12 2.559 2.000 0.000 0
13 -2.673 5.442 0.000 0
14 3.203 6.627 0.000 0
15 5.000 1.000 0.000 0
16 -2.910 1.000 0.000 0
17 -2.555 8.000 0.000 0
18 5.000 5.000 0.000 0
19 -7.000 6.000 0.000 0
20 -4.841 3.410 0.000 0
21 -5.349 6.221 0.000 0
22 0.375 4.460 0.000 0
"""
cx_set3 = """ 1 -8.000 8.000 0.000 0
2 -3.244 6.443 0.000 0
3 1.646 6.443 0.000 0
4 -6.000 8.000 0.000 0
5 2.000 8.000 0.000 0
6 5.000 8.000 0.000 0
7 7.000 8.000 0.000 0
8 4.000 6.000 0.000 0
9 -2.566 5.406 0.000 0
10 1.688 5.406 0.000 0
11 3.500 4.500 0.000 0
12 1.442 2.442 0.000 0
13 0.000 1.000 0.000 0
14 -1.936 4.488 0.000 0
15 0.383 4.521 0.000 0
16 1.445 3.480 0.000 0
17 -0.484 2.483 0.000 0
18 -1.936 3.632 0.000 0
19 -3.429 4.000 0.000 0
20 -5.714 6.000 0.000 0
21 -7.315 7.401 0.000 0
22 -4.915 7.506 0.000 0
23 -5.335 6.531 0.000 0
24 -2.287 7.452 0.000 0
"""
| mit |
kaustubh-kabra/modified-xen | tools/python/xen/xm/setenforce.py | 43 | 2560 | #============================================================================
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Author: Machon Gregory <[email protected]>
#============================================================================
"""Modify the current mode of the Flask XSM module.
"""
from xen.xm.opts import OptionError
from xen.xm import main as xm_main
from xen.xm.main import server
from xen.util import xsconstants
def help():
return """
Usage: xm setenforce [ Enforcing | Permissive | 1 | 0 ]
Modifies the current mode of the Flask XSM module to be permissive or
enforcing. Using Enforcing or 1 will put the Flask module in enforcing
mode. Using Permissive or 0 will put the Flask module in permissive
mode."""
def setenforce(mode):
if len(mode) == 1 and ( mode == "0" or mode == "1" ):
val = int(mode)
elif mode.lower() == "enforcing":
val = 1
elif mode.lower() == "permissive":
val = 0
else:
raise OptionError("%s is an unsupported mode" % mode)
if xm_main.serverType == xm_main.SERVER_XEN_API:
if xsconstants.XS_POLICY_FLASK != \
int(server.xenapi.XSPolicy.get_xstype()):
raise OptionError("Unsupported policy type")
ret = server.xenapi.XSPolicy.setenforce(val)
else:
if server.xend.security.on() != xsconstants.XS_POLICY_FLASK:
raise OptionError("Unsupported policy type")
ret = server.xend.security.setenforce(val)
def main(argv):
if len(argv) != 2:
raise OptionError("Invalid arguments")
if "-?" in argv:
help()
return
mode = argv[1];
setenforce(mode)
if __name__ == '__main__':
try:
main(sys.argv)
except Exception, e:
sys.stderr.write('Error: %s\n' % str(e))
sys.exit(-1)
| gpl-2.0 |
acfogarty/espressopp | src/tools/vmd.py | 7 | 5181 | # Copyright (C) 2012,2013
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import subprocess
import time
import socket
import select
import struct
import espressopp
IMD_HANDSHAKE = 4
IMDVERSION = 2
IMD_DISCONNECT, \
IMD_ENERGIES, \
IMD_FCOORDS, \
IMD_GO, \
IMD_HANDSHAKE, \
IMD_KILL, \
IMD_MDCOMM, \
IMD_PAUSE, \
IMD_TRATE, \
IMD_IOERROR = range(10)
def handshake(initsock):
print "before"
(sock, sock_port) = initsock.accept()
print "after"
header = struct.Struct('!II')
msg = header.pack(IMD_HANDSHAKE, IMDVERSION)
sock.send(msg)
res = []
cnt = 100
while len(res) == 0 and cnt > 0:
res = select.select([sock],[],[],0)[0]
cnt -= 1
time.sleep(0.1)
if len(res) == 1:
msg = struct.unpack('!II',sock.recv(8))
if msg[0] == IMD_GO:
print "VMD sent IMD_GO"
return sock
else:
print "unexpected answer from VMD"
return 0
else:
print "VMD did not answer."
return 0
def drain_socket(sock):
res = select.select([sock],[],[],0)[0]
while len(res) > 0:
buf = sock.recv(8)
bufl = len(buf)
if bufl == 0:
break
# msg = struct.unpack('B'*bufl,buf)
res = select.select([sock],[],[],0)[0]
return
def connect(system, molsize=10, pqrfile=False, vmd_path='vmd'):
"""Connects to the VMD.
:param espressopp.system system: The system object.
:param int molsize: The optional size of the molecule.
:param bool pqrfile: If set to True then the pqr vmd.pqr file will be used otherwise (default)
the vmd.pdb file will be used.
:param str vmd_path: The path to the executable of vmd, by default it is set to 'vmd'.
:returns: Socket to the VMD.
"""
espressopp.tools.psfwrite("vmd.psf", system, molsize=molsize, maxdist=system.bc.boxL[0]/2)
if pqrfile==True:
espressopp.tools.pqrwrite("vmd.pqr", system, molsize=molsize)
else:
espressopp.tools.pdbwrite("vmd.pdb", system, molsize=molsize)
initsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
hostname = socket.gethostname()
port = 10000
while port < 65000:
try:
initsock.bind((hostname, port))
initsock.listen(1)
break
except:
port += 1
if port == 65000:
print "no free port for vmd socket found."
return initsock
vmdfile = open("vmd.tcl","w")
if pqrfile == True:
vmdfile.write("mol load psf vmd.psf pqr vmd.pqr\n")
else:
vmdfile.write("mol load psf vmd.psf pdb vmd.pdb\n")
vmdfile.write("logfile vmd.log\n")
vmdfile.write("rotate stop\n")
vmdfile.write("logfile off\n")
# vmdfile.write("mol modstyle 0 0 CPK 0.500000 0.500000 8.000000 6.000000\n")
vmdfile.write("mol modstyle 0 0 VDW 0.4 20\n")
vmdfile.write("mol modcolor 0 0 SegName\n")
vmdfile.write("color Segname {T000} 3\n")
#vmdfile.write("mol delrep 0 top\n")
#vmdfile.write("mol representation CPK 0.500000 0.500000 8.000000 6.000000\n")
#vmdfile.write("mol color SegName\n")
#vmdfile.write("mol selection {segname T000}\n")
#vmdfile.write("mol material Opaque\n")
#vmdfile.write("mol addrep top\n")
#vmdfile.write("mol selupdate 0 top 0\n")
#vmdfile.write("mol colupdate 0 top 0\n")
#vmdfile.write("mol scaleminmax top 0 0.000000 0.000000\n")
#vmdfile.write("mol smoothrep top 0 0\n")
#vmdfile.write("mol drawframes top 0 {now}\n")
#vmdfile.write("color Segname {T000} red\n")
#vmdfile.write("color Display {Background} silver\n")
st = "imd connect %s %i\n" % (hostname, port)
vmdfile.write(st)
vmdfile.write("imd transfer 1\n")
vmdfile.write("imd keep 1\n")
vmdfile.close()
subprocess.Popen([vmd_path, '-e', 'vmd.tcl'])
sock = handshake(initsock)
if (sock != 0):
time.sleep(0.25)
drain_socket(sock)
return sock
def imd_positions(system, sock, folded=True):
maxParticleID = int(espressopp.analysis.MaxPID(system).compute())
count = 0
pid = 0
coords = struct.pack('')
while pid <= maxParticleID:
if system.storage.particleExists(pid):
particle = system.storage.getParticle(pid)
if folded:
p = particle.pos
else:
p = system.bc.getUnfoldedPosition(particle.pos, particle.imageBox)
coords += struct.pack('!fff', p[0], p[1], p[2])
count += 1
pid += 1
else:
pid += 1
header = struct.pack('!II',IMD_FCOORDS,count)
msg = header + coords
sock.send(msg)
| gpl-3.0 |
gorgorom/p2pool-gor-alts | SOAPpy/Client.py | 289 | 19821 | from __future__ import nested_scopes
"""
################################################################################
#
# SOAPpy - Cayce Ullman ([email protected])
# Brian Matthews ([email protected])
# Gregory Warnes ([email protected])
# Christopher Blunck ([email protected])
#
################################################################################
# Copyright (c) 2003, Pfizer
# Copyright (c) 2001, Cayce Ullman.
# Copyright (c) 2001, Brian Matthews.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of actzero, inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
"""
ident = '$Id: Client.py 1496 2010-03-04 23:46:17Z pooryorick $'
from version import __version__
#import xml.sax
import urllib
from types import *
import re
import base64
import socket, httplib
from httplib import HTTPConnection, HTTP
import Cookie
# SOAPpy modules
from Errors import *
from Config import Config
from Parser import parseSOAPRPC
from SOAPBuilder import buildSOAP
from Utilities import *
from Types import faultType, simplify
################################################################################
# Client
################################################################################
def SOAPUserAgent():
return "SOAPpy " + __version__ + " (pywebsvcs.sf.net)"
class SOAPAddress:
def __init__(self, url, config = Config):
proto, uri = urllib.splittype(url)
# apply some defaults
if uri[0:2] != '//':
if proto != None:
uri = proto + ':' + uri
uri = '//' + uri
proto = 'http'
host, path = urllib.splithost(uri)
try:
int(host)
host = 'localhost:' + host
except:
pass
if not path:
path = '/'
if proto not in ('http', 'https', 'httpg'):
raise IOError, "unsupported SOAP protocol"
if proto == 'httpg' and not config.GSIclient:
raise AttributeError, \
"GSI client not supported by this Python installation"
if proto == 'https' and not config.SSLclient:
raise AttributeError, \
"SSL client not supported by this Python installation"
self.user,host = urllib.splituser(host)
self.proto = proto
self.host = host
self.path = path
def __str__(self):
return "%(proto)s://%(host)s%(path)s" % self.__dict__
__repr__ = __str__
class SOAPTimeoutError(socket.timeout):
'''This exception is raised when a timeout occurs in SOAP operations'''
pass
class HTTPConnectionWithTimeout(HTTPConnection):
'''Extend HTTPConnection for timeout support'''
def __init__(self, host, port=None, strict=None, timeout=None):
HTTPConnection.__init__(self, host, port, strict)
self._timeout = timeout
def connect(self):
HTTPConnection.connect(self)
if self.sock and self._timeout:
self.sock.settimeout(self._timeout)
class HTTPWithTimeout(HTTP):
_connection_class = HTTPConnectionWithTimeout
## this __init__ copied from httplib.HTML class
def __init__(self, host='', port=None, strict=None, timeout=None):
"Provide a default host, since the superclass requires one."
# some joker passed 0 explicitly, meaning default port
if port == 0:
port = None
# Note that we may pass an empty string as the host; this will throw
# an error when we attempt to connect. Presumably, the client code
# will call connect before then, with a proper host.
self._setup(self._connection_class(host, port, strict, timeout))
class HTTPTransport:
def __init__(self):
self.cookies = Cookie.SimpleCookie();
def getNS(self, original_namespace, data):
"""Extract the (possibly extended) namespace from the returned
SOAP message."""
if type(original_namespace) == StringType:
pattern="xmlns:\w+=['\"](" + original_namespace + "[^'\"]*)['\"]"
match = re.search(pattern, data)
if match:
return match.group(1)
else:
return original_namespace
else:
return original_namespace
def __addcookies(self, r):
'''Add cookies from self.cookies to request r
'''
for cname, morsel in self.cookies.items():
attrs = []
value = morsel.get('version', '')
if value != '' and value != '0':
attrs.append('$Version=%s' % value)
attrs.append('%s=%s' % (cname, morsel.coded_value))
value = morsel.get('path')
if value:
attrs.append('$Path=%s' % value)
value = morsel.get('domain')
if value:
attrs.append('$Domain=%s' % value)
r.putheader('Cookie', "; ".join(attrs))
def call(self, addr, data, namespace, soapaction = None, encoding = None,
http_proxy = None, config = Config, timeout=None):
if not isinstance(addr, SOAPAddress):
addr = SOAPAddress(addr, config)
# Build a request
if http_proxy:
real_addr = http_proxy
real_path = addr.proto + "://" + addr.host + addr.path
else:
real_addr = addr.host
real_path = addr.path
if addr.proto == 'httpg':
from pyGlobus.io import GSIHTTP
r = GSIHTTP(real_addr, tcpAttr = config.tcpAttr)
elif addr.proto == 'https':
r = httplib.HTTPS(real_addr, key_file=config.SSL.key_file, cert_file=config.SSL.cert_file)
else:
r = HTTPWithTimeout(real_addr, timeout=timeout)
r.putrequest("POST", real_path)
r.putheader("Host", addr.host)
r.putheader("User-agent", SOAPUserAgent())
t = 'text/xml';
if encoding != None:
t += '; charset=%s' % encoding
r.putheader("Content-type", t)
r.putheader("Content-length", str(len(data)))
self.__addcookies(r);
# if user is not a user:passwd format
# we'll receive a failure from the server. . .I guess (??)
if addr.user != None:
val = base64.encodestring(addr.user)
r.putheader('Authorization','Basic ' + val.replace('\012',''))
# This fixes sending either "" or "None"
if soapaction == None or len(soapaction) == 0:
r.putheader("SOAPAction", "")
else:
r.putheader("SOAPAction", '"%s"' % soapaction)
if config.dumpHeadersOut:
s = 'Outgoing HTTP headers'
debugHeader(s)
print "POST %s %s" % (real_path, r._http_vsn_str)
print "Host:", addr.host
print "User-agent: SOAPpy " + __version__ + " (http://pywebsvcs.sf.net)"
print "Content-type:", t
print "Content-length:", len(data)
print 'SOAPAction: "%s"' % soapaction
debugFooter(s)
r.endheaders()
if config.dumpSOAPOut:
s = 'Outgoing SOAP'
debugHeader(s)
print data,
if data[-1] != '\n':
print
debugFooter(s)
# send the payload
r.send(data)
# read response line
code, msg, headers = r.getreply()
self.cookies = Cookie.SimpleCookie();
if headers:
content_type = headers.get("content-type","text/xml")
content_length = headers.get("Content-length")
for cookie in headers.getallmatchingheaders("Set-Cookie"):
self.cookies.load(cookie);
else:
content_type=None
content_length=None
# work around OC4J bug which does '<len>, <len>' for some reaason
if content_length:
comma=content_length.find(',')
if comma>0:
content_length = content_length[:comma]
# attempt to extract integer message size
try:
message_len = int(content_length)
except:
message_len = -1
if message_len < 0:
# Content-Length missing or invalid; just read the whole socket
# This won't work with HTTP/1.1 chunked encoding
data = r.getfile().read()
message_len = len(data)
else:
data = r.getfile().read(message_len)
if(config.debug):
print "code=",code
print "msg=", msg
print "headers=", headers
print "content-type=", content_type
print "data=", data
if config.dumpHeadersIn:
s = 'Incoming HTTP headers'
debugHeader(s)
if headers.headers:
print "HTTP/1.? %d %s" % (code, msg)
print "\n".join(map (lambda x: x.strip(), headers.headers))
else:
print "HTTP/0.9 %d %s" % (code, msg)
debugFooter(s)
def startswith(string, val):
return string[0:len(val)] == val
if code == 500 and not \
( startswith(content_type, "text/xml") and message_len > 0 ):
raise HTTPError(code, msg)
if config.dumpSOAPIn:
s = 'Incoming SOAP'
debugHeader(s)
print data,
if (len(data)>0) and (data[-1] != '\n'):
print
debugFooter(s)
if code not in (200, 500):
raise HTTPError(code, msg)
# get the new namespace
if namespace is None:
new_ns = None
else:
new_ns = self.getNS(namespace, data)
# return response payload
return data, new_ns
################################################################################
# SOAP Proxy
################################################################################
class SOAPProxy:
def __init__(self, proxy, namespace = None, soapaction = None,
header = None, methodattrs = None, transport = HTTPTransport,
encoding = 'UTF-8', throw_faults = 1, unwrap_results = None,
http_proxy=None, config = Config, noroot = 0,
simplify_objects=None, timeout=None):
# Test the encoding, raising an exception if it's not known
if encoding != None:
''.encode(encoding)
# get default values for unwrap_results and simplify_objects
# from config
if unwrap_results is None:
self.unwrap_results=config.unwrap_results
else:
self.unwrap_results=unwrap_results
if simplify_objects is None:
self.simplify_objects=config.simplify_objects
else:
self.simplify_objects=simplify_objects
self.proxy = SOAPAddress(proxy, config)
self.namespace = namespace
self.soapaction = soapaction
self.header = header
self.methodattrs = methodattrs
self.transport = transport()
self.encoding = encoding
self.throw_faults = throw_faults
self.http_proxy = http_proxy
self.config = config
self.noroot = noroot
self.timeout = timeout
# GSI Additions
if hasattr(config, "channel_mode") and \
hasattr(config, "delegation_mode"):
self.channel_mode = config.channel_mode
self.delegation_mode = config.delegation_mode
#end GSI Additions
def invoke(self, method, args):
return self.__call(method, args, {})
def __call(self, name, args, kw, ns = None, sa = None, hd = None,
ma = None):
ns = ns or self.namespace
ma = ma or self.methodattrs
if sa: # Get soapaction
if type(sa) == TupleType:
sa = sa[0]
else:
if self.soapaction:
sa = self.soapaction
else:
sa = name
if hd: # Get header
if type(hd) == TupleType:
hd = hd[0]
else:
hd = self.header
hd = hd or self.header
if ma: # Get methodattrs
if type(ma) == TupleType: ma = ma[0]
else:
ma = self.methodattrs
ma = ma or self.methodattrs
m = buildSOAP(args = args, kw = kw, method = name, namespace = ns,
header = hd, methodattrs = ma, encoding = self.encoding,
config = self.config, noroot = self.noroot)
call_retry = 0
try:
r, self.namespace = self.transport.call(self.proxy, m, ns, sa,
encoding = self.encoding,
http_proxy = self.http_proxy,
config = self.config,
timeout = self.timeout)
except socket.timeout:
raise SOAPTimeoutError
except Exception, ex:
#
# Call failed.
#
# See if we have a fault handling vector installed in our
# config. If we do, invoke it. If it returns a true value,
# retry the call.
#
# In any circumstance other than the fault handler returning
# true, reraise the exception. This keeps the semantics of this
# code the same as without the faultHandler code.
#
if hasattr(self.config, "faultHandler"):
if callable(self.config.faultHandler):
call_retry = self.config.faultHandler(self.proxy, ex)
if not call_retry:
raise
else:
raise
else:
raise
if call_retry:
try:
r, self.namespace = self.transport.call(self.proxy, m, ns, sa,
encoding = self.encoding,
http_proxy = self.http_proxy,
config = self.config,
timeout = self.timeout)
except socket.timeout:
raise SOAPTimeoutError
p, attrs = parseSOAPRPC(r, attrs = 1)
try:
throw_struct = self.throw_faults and \
isinstance (p, faultType)
except:
throw_struct = 0
if throw_struct:
if Config.debug:
print p
raise p
# If unwrap_results=1 and there is only element in the struct,
# SOAPProxy will assume that this element is the result
# and return it rather than the struct containing it.
# Otherwise SOAPproxy will return the struct with all the
# elements as attributes.
if self.unwrap_results:
try:
count = 0
for i in p.__dict__.keys():
if i[0] != "_": # don't count the private stuff
count += 1
t = getattr(p, i)
if count == 1: # Only one piece of data, bubble it up
p = t
except:
pass
# Automatically simplfy SOAP complex types into the
# corresponding python types. (structType --> dict,
# arrayType --> array, etc.)
if self.simplify_objects:
p = simplify(p)
if self.config.returnAllAttrs:
return p, attrs
return p
def _callWithBody(self, body):
return self.__call(None, body, {})
def __getattr__(self, name): # hook to catch method calls
if name in ( '__del__', '__getinitargs__', '__getnewargs__',
'__getstate__', '__setstate__', '__reduce__', '__reduce_ex__'):
raise AttributeError, name
return self.__Method(self.__call, name, config = self.config)
# To handle attribute weirdness
class __Method:
# Some magic to bind a SOAP method to an RPC server.
# Supports "nested" methods (e.g. examples.getStateName) -- concept
# borrowed from xmlrpc/soaplib -- www.pythonware.com
# Altered (improved?) to let you inline namespaces on a per call
# basis ala SOAP::LITE -- www.soaplite.com
def __init__(self, call, name, ns = None, sa = None, hd = None,
ma = None, config = Config):
self.__call = call
self.__name = name
self.__ns = ns
self.__sa = sa
self.__hd = hd
self.__ma = ma
self.__config = config
return
def __call__(self, *args, **kw):
if self.__name[0] == "_":
if self.__name in ["__repr__","__str__"]:
return self.__repr__()
else:
return self.__f_call(*args, **kw)
else:
return self.__r_call(*args, **kw)
def __getattr__(self, name):
if name == '__del__':
raise AttributeError, name
if self.__name[0] == "_":
# Don't nest method if it is a directive
return self.__class__(self.__call, name, self.__ns,
self.__sa, self.__hd, self.__ma)
return self.__class__(self.__call, "%s.%s" % (self.__name, name),
self.__ns, self.__sa, self.__hd, self.__ma)
def __f_call(self, *args, **kw):
if self.__name == "_ns": self.__ns = args
elif self.__name == "_sa": self.__sa = args
elif self.__name == "_hd": self.__hd = args
elif self.__name == "_ma": self.__ma = args
return self
def __r_call(self, *args, **kw):
return self.__call(self.__name, args, kw, self.__ns, self.__sa,
self.__hd, self.__ma)
def __repr__(self):
return "<%s at %d>" % (self.__class__, id(self))
| gpl-3.0 |
vipulroxx/sympy | sympy/physics/quantum/circuitplot.py | 58 | 12941 | """Matplotlib based plotting of quantum circuits.
Todo:
* Optimize printing of large circuits.
* Get this to work with single gates.
* Do a better job checking the form of circuits to make sure it is a Mul of
Gates.
* Get multi-target gates plotting.
* Get initial and final states to plot.
* Get measurements to plot. Might need to rethink measurement as a gate
issue.
* Get scale and figsize to be handled in a better way.
* Write some tests/examples!
"""
from __future__ import print_function, division
from sympy import Mul
from sympy.core.compatibility import u, range
from sympy.external import import_module
from sympy.physics.quantum.gate import Gate, OneQubitGate, CGate, CGateS
from sympy.core.core import BasicMeta
from sympy.core.assumptions import ManagedProperties
__all__ = [
'CircuitPlot',
'circuit_plot',
'labeller',
'Mz',
'Mx',
'CreateOneQubitGate',
'CreateCGate',
]
np = import_module('numpy')
matplotlib = import_module(
'matplotlib', __import__kwargs={'fromlist': ['pyplot']},
catch=(RuntimeError,)) # This is raised in environments that have no display.
if not np or not matplotlib:
class CircuitPlot(object):
def __init__(*args, **kwargs):
raise ImportError('numpy or matplotlib not available.')
def circuit_plot(*args, **kwargs):
raise ImportError('numpy or matplotlib not available.')
else:
pyplot = matplotlib.pyplot
Line2D = matplotlib.lines.Line2D
Circle = matplotlib.patches.Circle
#from matplotlib import rc
#rc('text',usetex=True)
class CircuitPlot(object):
"""A class for managing a circuit plot."""
scale = 1.0
fontsize = 20.0
linewidth = 1.0
control_radius = 0.05
not_radius = 0.15
swap_delta = 0.05
labels = []
inits = {}
label_buffer = 0.5
def __init__(self, c, nqubits, **kwargs):
self.circuit = c
self.ngates = len(self.circuit.args)
self.nqubits = nqubits
self.update(kwargs)
self._create_grid()
self._create_figure()
self._plot_wires()
self._plot_gates()
self._finish()
def update(self, kwargs):
"""Load the kwargs into the instance dict."""
self.__dict__.update(kwargs)
def _create_grid(self):
"""Create the grid of wires."""
scale = self.scale
wire_grid = np.arange(0.0, self.nqubits*scale, scale, dtype=float)
gate_grid = np.arange(0.0, self.ngates*scale, scale, dtype=float)
self._wire_grid = wire_grid
self._gate_grid = gate_grid
def _create_figure(self):
"""Create the main matplotlib figure."""
self._figure = pyplot.figure(
figsize=(self.ngates*self.scale, self.nqubits*self.scale),
facecolor='w',
edgecolor='w'
)
ax = self._figure.add_subplot(
1, 1, 1,
frameon=True
)
ax.set_axis_off()
offset = 0.5*self.scale
ax.set_xlim(self._gate_grid[0] - offset, self._gate_grid[-1] + offset)
ax.set_ylim(self._wire_grid[0] - offset, self._wire_grid[-1] + offset)
ax.set_aspect('equal')
self._axes = ax
def _plot_wires(self):
"""Plot the wires of the circuit diagram."""
xstart = self._gate_grid[0]
xstop = self._gate_grid[-1]
xdata = (xstart - self.scale, xstop + self.scale)
for i in range(self.nqubits):
ydata = (self._wire_grid[i], self._wire_grid[i])
line = Line2D(
xdata, ydata,
color='k',
lw=self.linewidth
)
self._axes.add_line(line)
if self.labels:
init_label_buffer = 0
if self.inits.get(self.labels[i]): init_label_buffer = 0.25
self._axes.text(
xdata[0]-self.label_buffer-init_label_buffer,ydata[0],
render_label(self.labels[i],self.inits),
size=self.fontsize,
color='k',ha='center',va='center')
self._plot_measured_wires()
def _plot_measured_wires(self):
ismeasured = self._measurements()
xstop = self._gate_grid[-1]
dy = 0.04 # amount to shift wires when doubled
# Plot doubled wires after they are measured
for im in ismeasured:
xdata = (self._gate_grid[ismeasured[im]],xstop+self.scale)
ydata = (self._wire_grid[im]+dy,self._wire_grid[im]+dy)
line = Line2D(
xdata, ydata,
color='k',
lw=self.linewidth
)
self._axes.add_line(line)
# Also double any controlled lines off these wires
for i,g in enumerate(self._gates()):
if isinstance(g, CGate) or isinstance(g, CGateS):
wires = g.controls + g.targets
for wire in wires:
if wire in ismeasured and \
self._gate_grid[i] > self._gate_grid[ismeasured[wire]]:
ydata = min(wires), max(wires)
xdata = self._gate_grid[i]-dy, self._gate_grid[i]-dy
line = Line2D(
xdata, ydata,
color='k',
lw=self.linewidth
)
self._axes.add_line(line)
def _gates(self):
"""Create a list of all gates in the circuit plot."""
gates = []
if isinstance(self.circuit, Mul):
for g in reversed(self.circuit.args):
if isinstance(g, Gate):
gates.append(g)
elif isinstance(self.circuit, Gate):
gates.append(self.circuit)
return gates
def _plot_gates(self):
"""Iterate through the gates and plot each of them."""
for i, gate in enumerate(self._gates()):
gate.plot_gate(self, i)
def _measurements(self):
"""Return a dict {i:j} where i is the index of the wire that has
been measured, and j is the gate where the wire is measured.
"""
ismeasured = {}
for i,g in enumerate(self._gates()):
if getattr(g,'measurement',False):
for target in g.targets:
if target in ismeasured:
if ismeasured[target] > i:
ismeasured[target] = i
else:
ismeasured[target] = i
return ismeasured
def _finish(self):
# Disable clipping to make panning work well for large circuits.
for o in self._figure.findobj():
o.set_clip_on(False)
def one_qubit_box(self, t, gate_idx, wire_idx):
"""Draw a box for a single qubit gate."""
x = self._gate_grid[gate_idx]
y = self._wire_grid[wire_idx]
self._axes.text(
x, y, t,
color='k',
ha='center',
va='center',
bbox=dict(ec='k', fc='w', fill=True, lw=self.linewidth),
size=self.fontsize
)
def two_qubit_box(self, t, gate_idx, wire_idx):
"""Draw a box for a two qubit gate. Doesn't work yet.
"""
x = self._gate_grid[gate_idx]
y = self._wire_grid[wire_idx]+0.5
print(self._gate_grid)
print(self._wire_grid)
obj = self._axes.text(
x, y, t,
color='k',
ha='center',
va='center',
bbox=dict(ec='k', fc='w', fill=True, lw=self.linewidth),
size=self.fontsize
)
def control_line(self, gate_idx, min_wire, max_wire):
"""Draw a vertical control line."""
xdata = (self._gate_grid[gate_idx], self._gate_grid[gate_idx])
ydata = (self._wire_grid[min_wire], self._wire_grid[max_wire])
line = Line2D(
xdata, ydata,
color='k',
lw=self.linewidth
)
self._axes.add_line(line)
def control_point(self, gate_idx, wire_idx):
"""Draw a control point."""
x = self._gate_grid[gate_idx]
y = self._wire_grid[wire_idx]
radius = self.control_radius
c = Circle(
(x, y),
radius*self.scale,
ec='k',
fc='k',
fill=True,
lw=self.linewidth
)
self._axes.add_patch(c)
def not_point(self, gate_idx, wire_idx):
"""Draw a NOT gates as the circle with plus in the middle."""
x = self._gate_grid[gate_idx]
y = self._wire_grid[wire_idx]
radius = self.not_radius
c = Circle(
(x, y),
radius,
ec='k',
fc='w',
fill=False,
lw=self.linewidth
)
self._axes.add_patch(c)
l = Line2D(
(x, x), (y - radius, y + radius),
color='k',
lw=self.linewidth
)
self._axes.add_line(l)
def swap_point(self, gate_idx, wire_idx):
"""Draw a swap point as a cross."""
x = self._gate_grid[gate_idx]
y = self._wire_grid[wire_idx]
d = self.swap_delta
l1 = Line2D(
(x - d, x + d),
(y - d, y + d),
color='k',
lw=self.linewidth
)
l2 = Line2D(
(x - d, x + d),
(y + d, y - d),
color='k',
lw=self.linewidth
)
self._axes.add_line(l1)
self._axes.add_line(l2)
def circuit_plot(c, nqubits, **kwargs):
"""Draw the circuit diagram for the circuit with nqubits.
Parameters
==========
c : circuit
The circuit to plot. Should be a product of Gate instances.
nqubits : int
The number of qubits to include in the circuit. Must be at least
as big as the largest `min_qubits`` of the gates.
"""
return CircuitPlot(c, nqubits, **kwargs)
def render_label(label, inits={}):
"""Slightly more flexible way to render labels.
>>> from sympy.physics.quantum.circuitplot import render_label
>>> render_label('q0')
'$|q0\\\\rangle$'
>>> render_label('q0', {'q0':'0'})
'$|q0\\\\rangle=|0\\\\rangle$'
"""
init = inits.get(label)
if init:
return r'$|%s\rangle=|%s\rangle$' % (label, init)
return r'$|%s\rangle$' % label
def labeller(n, symbol='q'):
"""Autogenerate labels for wires of quantum circuits.
Parameters
==========
n : int
number of qubits in the circuit
symbol : string
A character string to precede all gate labels. E.g. 'q_0', 'q_1', etc.
>>> from sympy.physics.quantum.circuitplot import labeller
>>> labeller(2)
['q_1', 'q_0']
>>> labeller(3,'j')
['j_2', 'j_1', 'j_0']
"""
return ['%s_%d' % (symbol,n-i-1) for i in range(n)]
class Mz(OneQubitGate):
"""Mock-up of a z measurement gate.
This is in circuitplot rather than gate.py because it's not a real
gate, it just draws one.
"""
measurement = True
gate_name='Mz'
gate_name_latex=u('M_z')
class Mx(OneQubitGate):
"""Mock-up of an x measurement gate.
This is in circuitplot rather than gate.py because it's not a real
gate, it just draws one.
"""
measurement = True
gate_name='Mx'
gate_name_latex=u('M_x')
class CreateOneQubitGate(ManagedProperties):
def __new__(mcl, name, latexname=None):
if not latexname:
latexname = name
return BasicMeta.__new__(mcl, name + "Gate", (OneQubitGate,),
{'gate_name': name, 'gate_name_latex': latexname})
def CreateCGate(name, latexname=None):
"""Use a lexical closure to make a controlled gate.
"""
if not latexname:
latexname = name
onequbitgate = CreateOneQubitGate(name, latexname)
def ControlledGate(ctrls,target):
return CGate(tuple(ctrls),onequbitgate(target))
return ControlledGate
| bsd-3-clause |
ShassAro/ShassAro | Bl_project/blVirtualEnv/lib/python2.7/site-packages/rest_framework/validators.py | 8 | 9383 | """
We perform uniqueness checks explicitly on the serializer class, rather
the using Django's `.full_clean()`.
This gives us better separation of concerns, allows us to use single-step
object creation, and makes it possible to switch between using the implicit
`ModelSerializer` class and an equivalent explicit `Serializer` class.
"""
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from rest_framework.compat import unicode_to_repr
from rest_framework.exceptions import ValidationError
from rest_framework.utils.representation import smart_repr
class UniqueValidator:
"""
Validator that corresponds to `unique=True` on a model field.
Should be applied to an individual field on the serializer.
"""
message = _('This field must be unique.')
def __init__(self, queryset, message=None):
self.queryset = queryset
self.serializer_field = None
self.message = message or self.message
def set_context(self, serializer_field):
"""
This hook is called by the serializer instance,
prior to the validation call being made.
"""
# Determine the underlying model field name. This may not be the
# same as the serializer field name if `source=<>` is set.
self.field_name = serializer_field.source_attrs[0]
# Determine the existing instance, if this is an update operation.
self.instance = getattr(serializer_field.parent, 'instance', None)
def filter_queryset(self, value, queryset):
"""
Filter the queryset to all instances matching the given attribute.
"""
filter_kwargs = {self.field_name: value}
return queryset.filter(**filter_kwargs)
def exclude_current_instance(self, queryset):
"""
If an instance is being updated, then do not include
that instance itself as a uniqueness conflict.
"""
if self.instance is not None:
return queryset.exclude(pk=self.instance.pk)
return queryset
def __call__(self, value):
queryset = self.queryset
queryset = self.filter_queryset(value, queryset)
queryset = self.exclude_current_instance(queryset)
if queryset.exists():
raise ValidationError(self.message)
def __repr__(self):
return unicode_to_repr('<%s(queryset=%s)>' % (
self.__class__.__name__,
smart_repr(self.queryset)
))
class UniqueTogetherValidator:
"""
Validator that corresponds to `unique_together = (...)` on a model class.
Should be applied to the serializer class, not to an individual field.
"""
message = _('The fields {field_names} must make a unique set.')
missing_message = _('This field is required.')
def __init__(self, queryset, fields, message=None):
self.queryset = queryset
self.fields = fields
self.serializer_field = None
self.message = message or self.message
def set_context(self, serializer):
"""
This hook is called by the serializer instance,
prior to the validation call being made.
"""
# Determine the existing instance, if this is an update operation.
self.instance = getattr(serializer, 'instance', None)
def enforce_required_fields(self, attrs):
"""
The `UniqueTogetherValidator` always forces an implied 'required'
state on the fields it applies to.
"""
if self.instance is not None:
return
missing = dict([
(field_name, self.missing_message)
for field_name in self.fields
if field_name not in attrs
])
if missing:
raise ValidationError(missing)
def filter_queryset(self, attrs, queryset):
"""
Filter the queryset to all instances matching the given attributes.
"""
# If this is an update, then any unprovided field should
# have it's value set based on the existing instance attribute.
if self.instance is not None:
for field_name in self.fields:
if field_name not in attrs:
attrs[field_name] = getattr(self.instance, field_name)
# Determine the filter keyword arguments and filter the queryset.
filter_kwargs = dict([
(field_name, attrs[field_name])
for field_name in self.fields
])
return queryset.filter(**filter_kwargs)
def exclude_current_instance(self, attrs, queryset):
"""
If an instance is being updated, then do not include
that instance itself as a uniqueness conflict.
"""
if self.instance is not None:
return queryset.exclude(pk=self.instance.pk)
return queryset
def __call__(self, attrs):
self.enforce_required_fields(attrs)
queryset = self.queryset
queryset = self.filter_queryset(attrs, queryset)
queryset = self.exclude_current_instance(attrs, queryset)
if queryset.exists():
field_names = ', '.join(self.fields)
raise ValidationError(self.message.format(field_names=field_names))
def __repr__(self):
return unicode_to_repr('<%s(queryset=%s, fields=%s)>' % (
self.__class__.__name__,
smart_repr(self.queryset),
smart_repr(self.fields)
))
class BaseUniqueForValidator:
message = None
missing_message = _('This field is required.')
def __init__(self, queryset, field, date_field, message=None):
self.queryset = queryset
self.field = field
self.date_field = date_field
self.message = message or self.message
def set_context(self, serializer):
"""
This hook is called by the serializer instance,
prior to the validation call being made.
"""
# Determine the underlying model field names. These may not be the
# same as the serializer field names if `source=<>` is set.
self.field_name = serializer.fields[self.field].source_attrs[0]
self.date_field_name = serializer.fields[self.date_field].source_attrs[0]
# Determine the existing instance, if this is an update operation.
self.instance = getattr(serializer, 'instance', None)
def enforce_required_fields(self, attrs):
"""
The `UniqueFor<Range>Validator` classes always force an implied
'required' state on the fields they are applied to.
"""
missing = dict([
(field_name, self.missing_message)
for field_name in [self.field, self.date_field]
if field_name not in attrs
])
if missing:
raise ValidationError(missing)
def filter_queryset(self, attrs, queryset):
raise NotImplementedError('`filter_queryset` must be implemented.')
def exclude_current_instance(self, attrs, queryset):
"""
If an instance is being updated, then do not include
that instance itself as a uniqueness conflict.
"""
if self.instance is not None:
return queryset.exclude(pk=self.instance.pk)
return queryset
def __call__(self, attrs):
self.enforce_required_fields(attrs)
queryset = self.queryset
queryset = self.filter_queryset(attrs, queryset)
queryset = self.exclude_current_instance(attrs, queryset)
if queryset.exists():
message = self.message.format(date_field=self.date_field)
raise ValidationError({self.field: message})
def __repr__(self):
return unicode_to_repr('<%s(queryset=%s, field=%s, date_field=%s)>' % (
self.__class__.__name__,
smart_repr(self.queryset),
smart_repr(self.field),
smart_repr(self.date_field)
))
class UniqueForDateValidator(BaseUniqueForValidator):
message = _('This field must be unique for the "{date_field}" date.')
def filter_queryset(self, attrs, queryset):
value = attrs[self.field]
date = attrs[self.date_field]
filter_kwargs = {}
filter_kwargs[self.field_name] = value
filter_kwargs['%s__day' % self.date_field_name] = date.day
filter_kwargs['%s__month' % self.date_field_name] = date.month
filter_kwargs['%s__year' % self.date_field_name] = date.year
return queryset.filter(**filter_kwargs)
class UniqueForMonthValidator(BaseUniqueForValidator):
message = _('This field must be unique for the "{date_field}" month.')
def filter_queryset(self, attrs, queryset):
value = attrs[self.field]
date = attrs[self.date_field]
filter_kwargs = {}
filter_kwargs[self.field_name] = value
filter_kwargs['%s__month' % self.date_field_name] = date.month
return queryset.filter(**filter_kwargs)
class UniqueForYearValidator(BaseUniqueForValidator):
message = _('This field must be unique for the "{date_field}" year.')
def filter_queryset(self, attrs, queryset):
value = attrs[self.field]
date = attrs[self.date_field]
filter_kwargs = {}
filter_kwargs[self.field_name] = value
filter_kwargs['%s__year' % self.date_field_name] = date.year
return queryset.filter(**filter_kwargs)
| gpl-2.0 |
aleisalem/Aion | tools/visualizeFeatureVectors.py | 1 | 5900 | #!/usr/bin/python
from Aion.data_generation.reconstruction.Numerical import *
from Aion.data_inference.visualization.visualizeData import *
from Aion.utils.graphics import *
from Aion.utils.data import *
import pickledb
import glob, sys, time, os, argparse, hashlib
def defineArguments():
parser = argparse.ArgumentParser(prog="visualizeFeatureVectors.py", description="A tool to reduce the dimensionality of data points and visualize them in 2- or 3-D.")
parser.add_argument("-p", "--datasetpath", help="The directory containing the feature vectors", required=True)
parser.add_argument("-t", "--datasettype", help="The type of the feature vectors to load: indicates the type of experiment and the file extensions", required=True, choices=["static", "dynamic"])
parser.add_argument("-a", "--algorithm", help="The dimensionality reduction algorithm to use", required=False, default="tsne", choices=["tsne", "pca"])
parser.add_argument("-d", "--dimensionality", help="The target dimensionality to which the feature vectors are projected", required=False, default="2", choices=["2", "3"])
parser.add_argument("-s", "--figuresize", help="The size of the Plotly figure", required=False, default="(1024, 1024)")
return parser
def main():
try:
argumentParser = defineArguments()
arguments = argumentParser.parse_args()
prettyPrint("Welcome to the \"Aion\"'s experiment I")
# Check the existence of the dataset directories
if not os.path.exists("%s/malware/" % arguments.datasetpath) or not os.path.exists("%s/goodware/" % arguments.datasetpath):
prettyPrint("Could not find malware or goodware directories under \"%s\". Exiting" % arguments.datasetpath, "warning")
return False
# Retrieve the data
fileExtension = "static" if arguments.datasettype == "static" else "num"
allFiles = glob.glob("%s/malware/*.%s" % (arguments.datasetpath, fileExtension)) + glob.glob("%s/goodware/*.%s" % (arguments.datasetpath, fileExtension))
if len(allFiles) < 1:
prettyPrint("Could not retrieve any \".%s\" files from the dataset directory \"%s\". Exiting" % (fileExtension, arguments.datasetpath), "warning")
return False
prettyPrint("Successfully retrieved %s \".%s\" files from the dataset directory \"%s\"" % (len(allFiles), fileExtension, arguments.datasetpath))
# Load the data
X, y = [], []
appNames = []
hashesDB = pickledb.load(getHashesDBPath(), False) # Open the hashes key-value store
prettyPrint("Attempting to load feature vectors")
for f in allFiles:
featureVector = loadNumericalFeatures(f)
if len(featureVector) < 1:
continue
else:
# Retrieve app name from path
appKey = f[f.rfind('/')+1:].replace(".%s" % fileExtension, "").lower()
appName = hashesDB.get(appKey)
if appName == None:
appKey = appKey + ".apk"
appName = hashesDB.get(hashlib.sha256(appKey).hexdigest())
if appName == None:
appName = f[f.rfind("/")+1:f.rfind(".")]
if f.lower().find("malware") != -1:
y.append(1)
else:
y.append(0)
X.append(featureVector)
appNames.append(appName)
if verboseON():
prettyPrint("App \"%s\" matched to package name \"%s\"" % (f, appName), "debug")
if len(X) < 1:
prettyPrint("Could not load any numerical feature vectors. Exiting", "warning")
return False
prettyPrint("Successfully retrieved and parsed %s numerical feature vectors" % len(X))
# Perform visualization
if arguments.datasettype == "static":
# Retrieve different types of features
X_basic = [x[:6] for x in X]
X_perm = [x[6:10] for x in X]
X_api = [x[10:] for x in X]
# Reduce and visualize features
figureTitle = "Combined static features in %sD" % arguments.dimensionality
prettyPrint("Visualizing combined static features in %sD" % arguments.dimensionality)
reduceAndVisualize(X, y, int(arguments.dimensionality), arguments.algorithm, eval(arguments.figuresize), figureTitle, appNames=appNames)
figureTitle = "Basic static features in %sD" % arguments.dimensionality
prettyPrint("Visualizing basic features in %sD" % arguments.dimensionality)
reduceAndVisualize(X_basic, y, int(arguments.dimensionality), arguments.algorithm, eval(arguments.figuresize), figureTitle, appNames=appNames)
figureTitle = "Permission-based static features in %sD" % arguments.dimensionality
prettyPrint("Visualizing permission-based features in %sD" % arguments.dimensionality)
reduceAndVisualize(X_perm, y, int(arguments.dimensionality), arguments.algorithm, eval(arguments.figuresize), figureTitle, appNames=appNames)
figureTitle = "API static features in %sD" % arguments.dimensionality
prettyPrint("Visualizing API call features in %sD" % arguments.dimensionality)
reduceAndVisualize(X_api, y, int(arguments.dimensionality), arguments.algorithm, eval(arguments.figuresize), figureTitle, appNames=appNames)
else:
figureTitle = "Dynamic Introspy features in %sD" % arguments.dimensionality
reduceAndVisualize(X, y, int(arguments.dimensionality), arguments.algorithm, eval(arguments.figsize), figureTitle, appNames=appNames)
except Exception as e:
prettyPrintError(e)
return False
return True
if __name__ == "__main__":
main()
| gpl-3.0 |
nostradamIQ/webapp | services/twitter/server.py | 6 | 1750 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
# serves the twitter files in redis via REST API on URL:8088
from cgi import parse_qs
import requests
import datetime
import os
import os.path
import redis
import json
REDIS = redis.Redis()
# Format:
# HOUR = HH (str)
# DATE = DD-MM-YYYY (str)
# ARRAY = key for keywords Dict that contains filterwords for twitter stream object (str)
# tweets_ARRAY_HOUR_DATE.geojson -> geoJSON object to be read by Cesium
# stats_ARRAY_HOUR_DATE -> {"All_Tweets_seen":countAll, "Location_Tweets_seen":countLoc, "All_Tweets_Intervall":countAll_intervall, "Location_Tweets_Intervall":countLoc_intervall}
def app(environ, start_response):
start_response('200 OK', [('Content-Type', 'application/json'), ('Access-Control-Allow-Origin', '*')])
parameters = parse_qs(environ.get('QUERY_STRING', ''))
filename = "No filename given"
if 'file' in parameters:
filename = parameters['file'][0]
response = REDIS.get(filename)
if response != None:
return response
else:
# retrieve geoJSON file: # ONLY IF WE DON'T DELETE THEM!
if os.path.isfile(filename):
with open(filename, 'r') as responseFile:
response = json.loads(responseFile)
responseFile.close()
if response != None:
return response
print "ERROR: FILE NOT FOUND!\n"
return ""
print "ERROR: REQUEST COULD NOT BE MADE!\n"
return ""
if __name__ == '__main__':
from wsgiref.simple_server import make_server
url = ''
port = 8088
srv = make_server(url, port, app)
print "Twitter-File-Server listening on {0}:{1}\n".format(url, port)
srv.serve_forever() | gpl-3.0 |
chinmaygarde/mojo | tools/android/findbugs_plugin/test/run_findbugs_plugin_tests.py | 63 | 3602 | #!/usr/bin/env python
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This is used to test the findbugs plugin, it calls
# build/android/pylib/utils/findbugs.py to analyze the classes in
# org.chromium.tools.findbugs.plugin package, and expects to get the same
# issue with those in expected_result.txt.
#
# Useful command line:
# --rebaseline to generate the expected_result.txt, please make sure don't
# remove the expected result of exsting tests.
import argparse
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..', '..', '..',
'build', 'android')))
from pylib import constants
from pylib.utils import findbugs
_EXPECTED_WARNINGS = set([
findbugs.FindBugsWarning(
bug_type='CHROMIUM_SYNCHRONIZED_THIS',
start_line=15,
end_line=15,
file_name='SimpleSynchronizedThis.java',
message=(
"Shouldn't use synchronized(this)",
'In class org.chromium.tools.findbugs.plugin.'
+ 'SimpleSynchronizedThis',
'In method org.chromium.tools.findbugs.plugin.'
+ 'SimpleSynchronizedThis.synchronizedThis()',
'At SimpleSynchronizedThis.java:[line 15]',
)),
findbugs.FindBugsWarning(
bug_type='CHROMIUM_SYNCHRONIZED_METHOD',
start_line=14,
end_line=14,
file_name='SimpleSynchronizedStaticMethod.java',
message=(
"Shouldn't use synchronized method",
'In class org.chromium.tools.findbugs.plugin.'
+ 'SimpleSynchronizedStaticMethod',
'In method org.chromium.tools.findbugs.plugin.'
+ 'SimpleSynchronizedStaticMethod.synchronizedStaticMethod()',
'At SimpleSynchronizedStaticMethod.java:[line 14]',
)),
findbugs.FindBugsWarning(
bug_type='CHROMIUM_SYNCHRONIZED_METHOD',
start_line=15,
end_line=15,
file_name='SimpleSynchronizedMethod.java',
message=(
"Shouldn't use synchronized method",
'In class org.chromium.tools.findbugs.plugin.'
+ 'SimpleSynchronizedMethod',
'In method org.chromium.tools.findbugs.plugin.'
+ 'SimpleSynchronizedMethod.synchronizedMethod()',
'At SimpleSynchronizedMethod.java:[line 15]',
)),
])
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument(
'-l', '--release-build', action='store_true', dest='release',
help='Run the release build of the findbugs plugin test.')
args = parser.parse_args()
test_jar_path = os.path.join(
constants.GetOutDirectory(
'Release' if args.release else 'Debug'),
'lib.java', 'findbugs_plugin_test.jar')
findbugs_command, findbugs_warnings = findbugs.Run(
None, 'org.chromium.tools.findbugs.plugin.*', None, None, None,
[test_jar_path])
missing_warnings = _EXPECTED_WARNINGS.difference(findbugs_warnings)
if missing_warnings:
print 'Missing warnings:'
for w in missing_warnings:
print '%s' % str(w)
unexpected_warnings = findbugs_warnings.difference(_EXPECTED_WARNINGS)
if unexpected_warnings:
print 'Unexpected warnings:'
for w in unexpected_warnings:
print '%s' % str(w)
return len(unexpected_warnings) + len(missing_warnings)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause |
BurtBiel/azure-cli | src/command_modules/azure-cli-network/azure/cli/command_modules/network/mgmt_nic/lib/models/template_link.py | 1 | 1664 | #---------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#---------------------------------------------------------------------------------------------
#pylint: skip-file
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class TemplateLink(Model):
"""
Entity representing the reference to the template.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar uri: URI referencing the template. Default value:
"https://azuresdkci.blob.core.windows.net/templatehost/CreateNic_2016-08-01/azuredeploy.json"
.
:vartype uri: str
:param content_version: If included it must match the ContentVersion in
the template.
:type content_version: str
"""
_validation = {
'uri': {'required': True, 'constant': True},
}
_attribute_map = {
'uri': {'key': 'uri', 'type': 'str'},
'content_version': {'key': 'contentVersion', 'type': 'str'},
}
uri = "https://azuresdkci.blob.core.windows.net/templatehost/CreateNic_2016-08-01/azuredeploy.json"
def __init__(self, content_version=None):
self.content_version = content_version
| mit |
memtoko/django | tests/auth_tests/urls.py | 37 | 4594 | from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth.urls import urlpatterns
from django.contrib.messages.api import info
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from django.template import RequestContext, Template
from django.views.decorators.cache import never_cache
class CustomRequestAuthenticationForm(AuthenticationForm):
def __init__(self, request, *args, **kwargs):
assert isinstance(request, HttpRequest)
super(CustomRequestAuthenticationForm, self).__init__(request, *args, **kwargs)
@never_cache
def remote_user_auth_view(request):
"Dummy view for remote user tests"
t = Template("Username is {{ user }}.")
c = RequestContext(request, {})
return HttpResponse(t.render(c))
def auth_processor_no_attr_access(request):
render(request, 'context_processors/auth_attrs_no_access.html')
# *After* rendering, we check whether the session was accessed
return render(request,
'context_processors/auth_attrs_test_access.html',
{'session_accessed': request.session.accessed})
def auth_processor_attr_access(request):
render(request, 'context_processors/auth_attrs_access.html')
return render(request,
'context_processors/auth_attrs_test_access.html',
{'session_accessed': request.session.accessed})
def auth_processor_user(request):
return render(request, 'context_processors/auth_attrs_user.html')
def auth_processor_perms(request):
return render(request, 'context_processors/auth_attrs_perms.html')
def auth_processor_perm_in_perms(request):
return render(request, 'context_processors/auth_attrs_perm_in_perms.html')
def auth_processor_messages(request):
info(request, "Message 1")
return render(request, 'context_processors/auth_attrs_messages.html')
def userpage(request):
pass
def custom_request_auth_login(request):
return views.login(request, authentication_form=CustomRequestAuthenticationForm)
# special urls for auth test cases
urlpatterns += [
url(r'^logout/custom_query/$', views.logout, dict(redirect_field_name='follow')),
url(r'^logout/next_page/$', views.logout, dict(next_page='/somewhere/')),
url(r'^logout/next_page/named/$', views.logout, dict(next_page='password_reset')),
url(r'^remote_user/$', remote_user_auth_view),
url(r'^password_reset_from_email/$', views.password_reset, dict(from_email='[email protected]')),
url(r'^password_reset/custom_redirect/$', views.password_reset, dict(post_reset_redirect='/custom/')),
url(r'^password_reset/custom_redirect/named/$', views.password_reset, dict(post_reset_redirect='password_reset')),
url(r'^password_reset/html_email_template/$', views.password_reset,
dict(html_email_template_name='registration/html_password_reset_email.html')),
url(r'^reset/custom/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.password_reset_confirm,
dict(post_reset_redirect='/custom/')),
url(r'^reset/custom/named/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.password_reset_confirm,
dict(post_reset_redirect='password_reset')),
url(r'^password_change/custom/$', views.password_change, dict(post_change_redirect='/custom/')),
url(r'^password_change/custom/named/$', views.password_change, dict(post_change_redirect='password_reset')),
url(r'^admin_password_reset/$', views.password_reset, dict(is_admin_site=True)),
url(r'^login_required/$', login_required(views.password_reset)),
url(r'^login_required_login_url/$', login_required(views.password_reset, login_url='/somewhere/')),
url(r'^auth_processor_no_attr_access/$', auth_processor_no_attr_access),
url(r'^auth_processor_attr_access/$', auth_processor_attr_access),
url(r'^auth_processor_user/$', auth_processor_user),
url(r'^auth_processor_perms/$', auth_processor_perms),
url(r'^auth_processor_perm_in_perms/$', auth_processor_perm_in_perms),
url(r'^auth_processor_messages/$', auth_processor_messages),
url(r'^custom_request_auth_login/$', custom_request_auth_login),
url(r'^userpage/(.+)/$', userpage, name="userpage"),
# This line is only required to render the password reset with is_admin=True
url(r'^admin/', include(admin.site.urls)),
]
| bsd-3-clause |
nrc/servo | tests/wpt/css-tests/tools/wptserve/tests/functional/base.py | 293 | 1831 | import base64
import logging
import os
import unittest
import urllib
import urllib2
import urlparse
import wptserve
logging.basicConfig()
here = os.path.split(__file__)[0]
doc_root = os.path.join(here, "docroot")
class Request(urllib2.Request):
def __init__(self, *args, **kwargs):
urllib2.Request.__init__(self, *args, **kwargs)
self.method = "GET"
def get_method(self):
return self.method
def add_data(self, data):
if hasattr(data, "iteritems"):
data = urllib.urlencode(data)
print data
self.add_header("Content-Length", str(len(data)))
urllib2.Request.add_data(self, data)
class TestUsingServer(unittest.TestCase):
def setUp(self):
self.server = wptserve.server.WebTestHttpd(host="localhost",
port=0,
use_ssl=False,
certificate=None,
doc_root=doc_root)
self.server.start(False)
def tearDown(self):
self.server.stop()
def abs_url(self, path, query=None):
return urlparse.urlunsplit(("http", "%s:%i" % (self.server.host, self.server.port), path, query, None))
def request(self, path, query=None, method="GET", headers=None, body=None, auth=None):
req = Request(self.abs_url(path, query))
req.method = method
if headers is None:
headers = {}
for name, value in headers.iteritems():
req.add_header(name, value)
if body is not None:
req.add_data(body)
if auth is not None:
req.add_header("Authorization", "Basic %s" % base64.encodestring('%s:%s' % auth))
return urllib2.urlopen(req)
| mpl-2.0 |
eranchetz/nupic | tests/unit/nupic/data/generators/anomalyzer_test.py | 35 | 14938 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""Tests for the anomalyzer."""
import csv
from mock import MagicMock, patch
from StringIO import StringIO
import unittest2 as unittest
from nupic.data.file_record_stream import FileRecordStream
from nupic.data.generators import anomalyzer
class AnomalyzerTest(unittest.TestCase):
"""Tests for the anomalyzer."""
def setUp(self):
self.sampleInput = ("Timestamp,Value\n"
"datetime,int\n"
"T,\n"
"2011-09-04 2:00:00.0,1\n"
"2011-09-04 2:05:00.0,2\n"
"2011-09-04 2:10:00.0,3\n"
"2011-09-04 2:15:00.0,4\n"
"2011-09-04 2:20:00.0,5\n"
"2011-09-04 2:25:00.0,6")
def testAddBeginning(self):
expectedOutput = ("Timestamp,Value\n"
"datetime,int\n"
"T,\n"
"2011-09-04 02:00:00.000000,9\n"
"2011-09-04 02:05:00.000000,10\n"
"2011-09-04 02:10:00.000000,3\n"
"2011-09-04 02:15:00.000000,4\n"
"2011-09-04 02:20:00.000000,5\n"
"2011-09-04 02:25:00.000000,6\n")
mockInput = MagicMock(return_value=StringIO(self.sampleInput))
output = StringIO()
mockOutput = MagicMock(return_value=output)
with patch("__builtin__.open", mockInput):
inputFile = FileRecordStream("input_path")
with patch("__builtin__.open", mockOutput):
outputFile = FileRecordStream("output_path",
fields=inputFile.getFields(),
write=True)
anomalyzer.add(inputFile, outputFile, 1, 0, 1, 8)
result = output.getvalue()
result = result.replace("\r\n", "\n")
result = result.replace("\r", "\n")
self.assertSequenceEqual(expectedOutput, result)
def testAddMiddle(self):
expectedOutput = ("Timestamp,Value\n"
"datetime,int\n"
"T,\n"
"2011-09-04 02:00:00.000000,1\n"
"2011-09-04 02:05:00.000000,10\n"
"2011-09-04 02:10:00.000000,11\n"
"2011-09-04 02:15:00.000000,4\n"
"2011-09-04 02:20:00.000000,5\n"
"2011-09-04 02:25:00.000000,6\n")
mockInput = MagicMock(return_value=StringIO(self.sampleInput))
output = StringIO()
mockOutput = MagicMock(return_value=output)
with patch("__builtin__.open", mockInput):
inputFile = FileRecordStream("input_path")
with patch("__builtin__.open", mockOutput):
outputFile = FileRecordStream("output_path",
fields=inputFile.getFields(),
write=True)
anomalyzer.add(inputFile, outputFile, 1, 1, 2, 8)
result = output.getvalue()
result = result.replace("\r\n", "\n")
result = result.replace("\r", "\n")
self.assertSequenceEqual(expectedOutput, result)
def testAddEnd(self):
expectedOutput = ("Timestamp,Value\n"
"datetime,int\n"
"T,\n"
"2011-09-04 02:00:00.000000,1\n"
"2011-09-04 02:05:00.000000,2\n"
"2011-09-04 02:10:00.000000,3\n"
"2011-09-04 02:15:00.000000,4\n"
"2011-09-04 02:20:00.000000,13\n"
"2011-09-04 02:25:00.000000,14\n")
mockInput = MagicMock(return_value=StringIO(self.sampleInput))
output = StringIO()
mockOutput = MagicMock(return_value=output)
with patch("__builtin__.open", mockInput):
inputFile = FileRecordStream("input_path")
with patch("__builtin__.open", mockOutput):
outputFile = FileRecordStream("output_path",
fields=inputFile.getFields(),
write=True)
anomalyzer.add(inputFile, outputFile, 1, 4, 5, 8)
result = output.getvalue()
result = result.replace("\r\n", "\n")
result = result.replace("\r", "\n")
self.assertSequenceEqual(expectedOutput, result)
def testAddSingle(self):
expectedOutput = ("Timestamp,Value\n"
"datetime,int\n"
"T,\n"
"2011-09-04 02:00:00.000000,1\n"
"2011-09-04 02:05:00.000000,10\n"
"2011-09-04 02:10:00.000000,3\n"
"2011-09-04 02:15:00.000000,4\n"
"2011-09-04 02:20:00.000000,5\n"
"2011-09-04 02:25:00.000000,6\n")
mockInput = MagicMock(return_value=StringIO(self.sampleInput))
output = StringIO()
mockOutput = MagicMock(return_value=output)
with patch("__builtin__.open", mockInput):
inputFile = FileRecordStream("input_path")
with patch("__builtin__.open", mockOutput):
outputFile = FileRecordStream("output_path",
fields=inputFile.getFields(),
write=True)
anomalyzer.add(inputFile, outputFile, 1, 1, 1, 8)
result = output.getvalue()
result = result.replace("\r\n", "\n")
result = result.replace("\r", "\n")
self.assertSequenceEqual(expectedOutput, result)
def testAddAll(self):
expectedOutput = ("Timestamp,Value\n"
"datetime,int\n"
"T,\n"
"2011-09-04 02:00:00.000000,9\n"
"2011-09-04 02:05:00.000000,10\n"
"2011-09-04 02:10:00.000000,11\n"
"2011-09-04 02:15:00.000000,12\n"
"2011-09-04 02:20:00.000000,13\n"
"2011-09-04 02:25:00.000000,14\n")
mockInput = MagicMock(return_value=StringIO(self.sampleInput))
output = StringIO()
mockOutput = MagicMock(return_value=output)
with patch("__builtin__.open", mockInput):
inputFile = FileRecordStream("input_path")
with patch("__builtin__.open", mockOutput):
outputFile = FileRecordStream("output_path",
fields=inputFile.getFields(),
write=True)
anomalyzer.add(inputFile, outputFile, 1, 0, 5, 8)
result = output.getvalue()
result = result.replace("\r\n", "\n")
result = result.replace("\r", "\n")
self.assertSequenceEqual(expectedOutput, result)
def testScale(self):
expectedOutput = ("Timestamp,Value\n"
"datetime,int\n"
"T,\n"
"2011-09-04 02:00:00.000000,1\n"
"2011-09-04 02:05:00.000000,16\n"
"2011-09-04 02:10:00.000000,24\n"
"2011-09-04 02:15:00.000000,4\n"
"2011-09-04 02:20:00.000000,5\n"
"2011-09-04 02:25:00.000000,6\n")
mockInput = MagicMock(return_value=StringIO(self.sampleInput))
output = StringIO()
mockOutput = MagicMock(return_value=output)
with patch("__builtin__.open", mockInput):
inputFile = FileRecordStream("input_path")
with patch("__builtin__.open", mockOutput):
outputFile = FileRecordStream("output_path",
fields=inputFile.getFields(),
write=True)
anomalyzer.scale(inputFile, outputFile, 1, 1, 2, 8)
result = output.getvalue()
result = result.replace("\r\n", "\n")
result = result.replace("\r", "\n")
self.assertSequenceEqual(expectedOutput, result)
def testCopyAllImplicit(self):
expectedOutput = ("Timestamp,Value\n"
"datetime,int\n"
"T,\n"
"2011-09-04 02:00:00.000000,1\n"
"2011-09-04 02:05:00.000000,2\n"
"2011-09-04 02:10:00.000000,3\n"
"2011-09-04 02:15:00.000000,4\n"
"2011-09-04 02:20:00.000000,5\n"
"2011-09-04 02:25:00.000000,6\n"
"2011-09-04 02:30:00.000000,1\n"
"2011-09-04 02:35:00.000000,2\n"
"2011-09-04 02:40:00.000000,3\n"
"2011-09-04 02:45:00.000000,4\n"
"2011-09-04 02:50:00.000000,5\n"
"2011-09-04 02:55:00.000000,6\n")
mockInput = MagicMock(return_value=StringIO(self.sampleInput))
output = StringIO()
mockOutput = MagicMock(return_value=output)
with patch("__builtin__.open", mockInput):
inputFile = FileRecordStream("input_path")
with patch("__builtin__.open", mockOutput):
outputFile = FileRecordStream("output_path",
fields=inputFile.getFields(),
write=True)
anomalyzer.copy(inputFile, outputFile, 0, 5)
result = output.getvalue()
result = result.replace("\r\n", "\n")
result = result.replace("\r", "\n")
self.assertSequenceEqual(expectedOutput, result)
def testCopyAllExplicit(self):
expectedOutput = ("Timestamp,Value\n"
"datetime,int\n"
"T,\n"
"2011-09-04 02:00:00.000000,1\n"
"2011-09-04 02:05:00.000000,2\n"
"2011-09-04 02:10:00.000000,3\n"
"2011-09-04 02:15:00.000000,4\n"
"2011-09-04 02:20:00.000000,5\n"
"2011-09-04 02:25:00.000000,6\n"
"2011-09-04 02:30:00.000000,1\n"
"2011-09-04 02:35:00.000000,2\n"
"2011-09-04 02:40:00.000000,3\n"
"2011-09-04 02:45:00.000000,4\n"
"2011-09-04 02:50:00.000000,5\n"
"2011-09-04 02:55:00.000000,6\n")
mockInput = MagicMock(return_value=StringIO(self.sampleInput))
output = StringIO()
mockOutput = MagicMock(return_value=output)
with patch("__builtin__.open", mockInput):
inputFile = FileRecordStream("input_path")
with patch("__builtin__.open", mockOutput):
outputFile = FileRecordStream("output_path",
fields=inputFile.getFields(),
write=True)
anomalyzer.copy(inputFile, outputFile, 0, 5, 6)
result = output.getvalue()
result = result.replace("\r\n", "\n")
result = result.replace("\r", "\n")
self.assertSequenceEqual(expectedOutput, result)
def testCopyBeginning(self):
expectedOutput = ("Timestamp,Value\n"
"datetime,int\n"
"T,\n"
"2011-09-04 02:00:00.000000,1\n"
"2011-09-04 02:05:00.000000,2\n"
"2011-09-04 02:10:00.000000,1\n"
"2011-09-04 02:15:00.000000,2\n"
"2011-09-04 02:20:00.000000,3\n"
"2011-09-04 02:25:00.000000,4\n"
"2011-09-04 02:30:00.000000,5\n"
"2011-09-04 02:35:00.000000,6\n")
mockInput = MagicMock(return_value=StringIO(self.sampleInput))
output = StringIO()
mockOutput = MagicMock(return_value=output)
with patch("__builtin__.open", mockInput):
inputFile = FileRecordStream("input_path")
with patch("__builtin__.open", mockOutput):
outputFile = FileRecordStream("output_path",
fields=inputFile.getFields(),
write=True)
anomalyzer.copy(inputFile, outputFile, 0, 1, 0)
result = output.getvalue()
result = result.replace("\r\n", "\n")
result = result.replace("\r", "\n")
self.assertSequenceEqual(expectedOutput, result)
def testCopyOneRow(self):
expectedOutput = ("Timestamp,Value\n"
"datetime,int\n"
"T,\n"
"2011-09-04 02:00:00.000000,1\n"
"2011-09-04 02:05:00.000000,2\n"
"2011-09-04 02:10:00.000000,2\n"
"2011-09-04 02:15:00.000000,3\n"
"2011-09-04 02:20:00.000000,4\n"
"2011-09-04 02:25:00.000000,5\n"
"2011-09-04 02:30:00.000000,6\n")
mockInput = MagicMock(return_value=StringIO(self.sampleInput))
output = StringIO()
mockOutput = MagicMock(return_value=output)
with patch("__builtin__.open", mockInput):
inputFile = FileRecordStream("input_path")
with patch("__builtin__.open", mockOutput):
outputFile = FileRecordStream("output_path",
fields=inputFile.getFields(),
write=True)
anomalyzer.copy(inputFile, outputFile, 1, 1, 1)
result = output.getvalue()
result = result.replace("\r\n", "\n")
result = result.replace("\r", "\n")
self.assertSequenceEqual(expectedOutput, result)
def testSample(self):
mockInput = MagicMock(return_value=StringIO(self.sampleInput))
output = StringIO()
mockOutput = MagicMock(return_value=output)
with patch("__builtin__.open", mockInput):
inputFile = FileRecordStream("input_path")
with patch("__builtin__.open", mockOutput):
outputFile = FileRecordStream("output_path",
fields=inputFile.getFields(),
write=True)
anomalyzer.sample(inputFile, outputFile, 1)
result = StringIO(output.getvalue())
result.next()
result.next()
result.next()
reader = csv.reader(result)
_, value = reader.next()
self.assertIn(int(value), (1, 2, 3, 4, 5, 6))
self.assertRaises(StopIteration, result.next)
if __name__ == "__main__":
unittest.main()
| agpl-3.0 |
artmusic0/theano-learning.part02 | cPickle_test/cPickle_out-v3.py | 1 | 2430 | # -*- coding: utf-8 -*-
"""
Created on Mon Dec 07 11:30:17 2015
@author: AkishinoShiame
"""
import cPickle, gzip, theano.tensor
### file out put code below
try:
out1 = open("CR_data-train_set.pic.txt","w")
out2 = open("CR_data-train_set.lab.txt","w")
out3 = open("CR_data-valid_set.pic.txt","w")
out4 = open("CR_data-valid_set.lab.txt","w")
out5 = open("CR_data-test_set.pic.txt","w")
out6 = open("CR_data-test_set.lab.txt","w")
except IOError:
print ("file open error")
### file out put code above
# Load the dataset
f = gzip.open('mnist.pkl.gz', 'rb')
train_set, valid_set, test_set = cPickle.load(f)
###### above is origional pkl read file
##### below test seprate file to pic and label
train_set_pic , train_set_lab = train_set
valid_set_pic , valid_set_lab = valid_set
test_set_pic , test_set_lab = test_set
##### above test seprate file to pic and label
out1.write("train set picture\n")
temp1 = train_set_pic[0]
#output train_set picture
for i in range(1):
for j in range(len(temp1)): #""""""
out1.write(str(train_set_pic[i][j]))
out1.write(",")
#output train_set label
out2.write("train set label\n")
for i in range(len(train_set_lab)):
out2.write(str(train_set_lab[i]))
out2.write(",")
print ("training set :", train_set_pic)
print ("training set lab :", train_set_lab)
temp2 = valid_set_pic[0]
out3.write("valid set picture\n")
#output valid_set picture
for i in range(1):
for j in range(len(temp2)):#""""""
out3.write(str(valid_set_pic[i][j]))
out3.write(",")
#output valid_set label
out4.write("valid set label\n")
for i in range(len(valid_set_lab)):
out4.write(str(valid_set_lab[i]),)
out4.write(",")
print ("valid set :", valid_set_pic)
print ("valid set lab :", valid_set_lab)
temp3 = test_set_pic[0]
#output test_set picture
out5.write("test set\n")
for i in range(1):
for j in range(len(temp3)): #""""""
out5.write(str(test_set_pic[i][j]))
out5.write(",")
#output test_set label
#out6.write(str(test_set_lab.eval()))
for i in range(len(test_set_lab)):
out6.write(str(test_set_lab[i]))
out6.write(",")
print type(test_set_lab)
print ("test set :", test_set_pic)
print ("test set lab :", test_set_lab)
### file out put code below
out1.close()
out2.close()
out3.close()
out4.close()
out5.close()
out6.close()
### file out put code above
###### below is origional pkl read file
f.close() | gpl-3.0 |
michalliu/OpenWrt-Firefly-Libraries | staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python3.4/shutil.py | 15 | 39249 | """Utility functions for copying and archiving files and directory trees.
XXX The functions here don't copy the resource fork or other metadata on Mac.
"""
import os
import sys
import stat
from os.path import abspath
import fnmatch
import collections
import errno
import tarfile
try:
import bz2
del bz2
_BZ2_SUPPORTED = True
except ImportError:
_BZ2_SUPPORTED = False
try:
from pwd import getpwnam
except ImportError:
getpwnam = None
try:
from grp import getgrnam
except ImportError:
getgrnam = None
__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
"copytree", "move", "rmtree", "Error", "SpecialFileError",
"ExecError", "make_archive", "get_archive_formats",
"register_archive_format", "unregister_archive_format",
"get_unpack_formats", "register_unpack_format",
"unregister_unpack_format", "unpack_archive",
"ignore_patterns", "chown", "which", "get_terminal_size",
"SameFileError"]
# disk_usage is added later, if available on the platform
class Error(OSError):
pass
class SameFileError(Error):
"""Raised when source and destination are the same file."""
class SpecialFileError(OSError):
"""Raised when trying to do a kind of operation (e.g. copying) which is
not supported on a special file (e.g. a named pipe)"""
class ExecError(OSError):
"""Raised when a command could not be executed"""
class ReadError(OSError):
"""Raised when an archive cannot be read"""
class RegistryError(Exception):
"""Raised when a registry operation with the archiving
and unpacking registeries fails"""
def copyfileobj(fsrc, fdst, length=16*1024):
"""copy data from file-like object fsrc to file-like object fdst"""
while 1:
buf = fsrc.read(length)
if not buf:
break
fdst.write(buf)
def _samefile(src, dst):
# Macintosh, Unix.
if hasattr(os.path, 'samefile'):
try:
return os.path.samefile(src, dst)
except OSError:
return False
# All other platforms: check for same pathname.
return (os.path.normcase(os.path.abspath(src)) ==
os.path.normcase(os.path.abspath(dst)))
def copyfile(src, dst, *, follow_symlinks=True):
"""Copy data from src to dst.
If follow_symlinks is not set and src is a symbolic link, a new
symlink will be created instead of copying the file it points to.
"""
if _samefile(src, dst):
raise SameFileError("{!r} and {!r} are the same file".format(src, dst))
for fn in [src, dst]:
try:
st = os.stat(fn)
except OSError:
# File most likely does not exist
pass
else:
# XXX What about other special files? (sockets, devices...)
if stat.S_ISFIFO(st.st_mode):
raise SpecialFileError("`%s` is a named pipe" % fn)
if not follow_symlinks and os.path.islink(src):
os.symlink(os.readlink(src), dst)
else:
with open(src, 'rb') as fsrc:
with open(dst, 'wb') as fdst:
copyfileobj(fsrc, fdst)
return dst
def copymode(src, dst, *, follow_symlinks=True):
"""Copy mode bits from src to dst.
If follow_symlinks is not set, symlinks aren't followed if and only
if both `src` and `dst` are symlinks. If `lchmod` isn't available
(e.g. Linux) this method does nothing.
"""
if not follow_symlinks and os.path.islink(src) and os.path.islink(dst):
if hasattr(os, 'lchmod'):
stat_func, chmod_func = os.lstat, os.lchmod
else:
return
elif hasattr(os, 'chmod'):
stat_func, chmod_func = os.stat, os.chmod
else:
return
st = stat_func(src)
chmod_func(dst, stat.S_IMODE(st.st_mode))
if hasattr(os, 'listxattr'):
def _copyxattr(src, dst, *, follow_symlinks=True):
"""Copy extended filesystem attributes from `src` to `dst`.
Overwrite existing attributes.
If `follow_symlinks` is false, symlinks won't be followed.
"""
try:
names = os.listxattr(src, follow_symlinks=follow_symlinks)
except OSError as e:
if e.errno not in (errno.ENOTSUP, errno.ENODATA):
raise
return
for name in names:
try:
value = os.getxattr(src, name, follow_symlinks=follow_symlinks)
os.setxattr(dst, name, value, follow_symlinks=follow_symlinks)
except OSError as e:
if e.errno not in (errno.EPERM, errno.ENOTSUP, errno.ENODATA):
raise
else:
def _copyxattr(*args, **kwargs):
pass
def copystat(src, dst, *, follow_symlinks=True):
"""Copy all stat info (mode bits, atime, mtime, flags) from src to dst.
If the optional flag `follow_symlinks` is not set, symlinks aren't followed if and
only if both `src` and `dst` are symlinks.
"""
def _nop(*args, ns=None, follow_symlinks=None):
pass
# follow symlinks (aka don't not follow symlinks)
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
if follow:
# use the real function if it exists
def lookup(name):
return getattr(os, name, _nop)
else:
# use the real function only if it exists
# *and* it supports follow_symlinks
def lookup(name):
fn = getattr(os, name, _nop)
if fn in os.supports_follow_symlinks:
return fn
return _nop
st = lookup("stat")(src, follow_symlinks=follow)
mode = stat.S_IMODE(st.st_mode)
lookup("utime")(dst, ns=(st.st_atime_ns, st.st_mtime_ns),
follow_symlinks=follow)
try:
lookup("chmod")(dst, mode, follow_symlinks=follow)
except NotImplementedError:
# if we got a NotImplementedError, it's because
# * follow_symlinks=False,
# * lchown() is unavailable, and
# * either
# * fchownat() is unavailable or
# * fchownat() doesn't implement AT_SYMLINK_NOFOLLOW.
# (it returned ENOSUP.)
# therefore we're out of options--we simply cannot chown the
# symlink. give up, suppress the error.
# (which is what shutil always did in this circumstance.)
pass
if hasattr(st, 'st_flags'):
try:
lookup("chflags")(dst, st.st_flags, follow_symlinks=follow)
except OSError as why:
for err in 'EOPNOTSUPP', 'ENOTSUP':
if hasattr(errno, err) and why.errno == getattr(errno, err):
break
else:
raise
_copyxattr(src, dst, follow_symlinks=follow)
def copy(src, dst, *, follow_symlinks=True):
"""Copy data and mode bits ("cp src dst"). Return the file's destination.
The destination may be a directory.
If follow_symlinks is false, symlinks won't be followed. This
resembles GNU's "cp -P src dst".
If source and destination are the same file, a SameFileError will be
raised.
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
copyfile(src, dst, follow_symlinks=follow_symlinks)
copymode(src, dst, follow_symlinks=follow_symlinks)
return dst
def copy2(src, dst, *, follow_symlinks=True):
"""Copy data and all stat info ("cp -p src dst"). Return the file's
destination."
The destination may be a directory.
If follow_symlinks is false, symlinks won't be followed. This
resembles GNU's "cp -P src dst".
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
copyfile(src, dst, follow_symlinks=follow_symlinks)
copystat(src, dst, follow_symlinks=follow_symlinks)
return dst
def ignore_patterns(*patterns):
"""Function that can be used as copytree() ignore parameter.
Patterns is a sequence of glob-style patterns
that are used to exclude files"""
def _ignore_patterns(path, names):
ignored_names = []
for pattern in patterns:
ignored_names.extend(fnmatch.filter(names, pattern))
return set(ignored_names)
return _ignore_patterns
def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
ignore_dangling_symlinks=False):
"""Recursively copy a directory tree.
The destination directory must not already exist.
If exception(s) occur, an Error is raised with a list of reasons.
If the optional symlinks flag is true, symbolic links in the
source tree result in symbolic links in the destination tree; if
it is false, the contents of the files pointed to by symbolic
links are copied. If the file pointed by the symlink doesn't
exist, an exception will be added in the list of errors raised in
an Error exception at the end of the copy process.
You can set the optional ignore_dangling_symlinks flag to true if you
want to silence this exception. Notice that this has no effect on
platforms that don't support os.symlink.
The optional ignore argument is a callable. If given, it
is called with the `src` parameter, which is the directory
being visited by copytree(), and `names` which is the list of
`src` contents, as returned by os.listdir():
callable(src, names) -> ignored_names
Since copytree() is called recursively, the callable will be
called once for each directory that is copied. It returns a
list of names relative to the `src` directory that should
not be copied.
The optional copy_function argument is a callable that will be used
to copy each file. It will be called with the source path and the
destination path as arguments. By default, copy2() is used, but any
function that supports the same signature (like copy()) can be used.
"""
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
os.makedirs(dst)
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if os.path.islink(srcname):
linkto = os.readlink(srcname)
if symlinks:
# We can't just leave it to `copy_function` because legacy
# code with a custom `copy_function` may rely on copytree
# doing the right thing.
os.symlink(linkto, dstname)
copystat(srcname, dstname, follow_symlinks=not symlinks)
else:
# ignore dangling symlink if the flag is on
if not os.path.exists(linkto) and ignore_dangling_symlinks:
continue
# otherwise let the copy occurs. copy2 will raise an error
copy_function(srcname, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks, ignore, copy_function)
else:
# Will raise a SpecialFileError for unsupported file types
copy_function(srcname, dstname)
# catch the Error from the recursive copytree so that we can
# continue with other files
except Error as err:
errors.extend(err.args[0])
except OSError as why:
errors.append((srcname, dstname, str(why)))
try:
copystat(src, dst)
except OSError as why:
# Copying file access times may fail on Windows
if getattr(why, 'winerror', None) is None:
errors.append((src, dst, str(why)))
if errors:
raise Error(errors)
return dst
# version vulnerable to race conditions
def _rmtree_unsafe(path, onerror):
try:
if os.path.islink(path):
# symlinks to directories are forbidden, see bug #1669
raise OSError("Cannot call rmtree on a symbolic link")
except OSError:
onerror(os.path.islink, path, sys.exc_info())
# can't continue even if onerror hook returns
return
names = []
try:
names = os.listdir(path)
except OSError:
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
mode = os.lstat(fullname).st_mode
except OSError:
mode = 0
if stat.S_ISDIR(mode):
_rmtree_unsafe(fullname, onerror)
else:
try:
os.unlink(fullname)
except OSError:
onerror(os.unlink, fullname, sys.exc_info())
try:
os.rmdir(path)
except OSError:
onerror(os.rmdir, path, sys.exc_info())
# Version using fd-based APIs to protect against races
def _rmtree_safe_fd(topfd, path, onerror):
names = []
try:
names = os.listdir(topfd)
except OSError as err:
err.filename = path
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
orig_st = os.stat(name, dir_fd=topfd, follow_symlinks=False)
mode = orig_st.st_mode
except OSError:
mode = 0
if stat.S_ISDIR(mode):
try:
dirfd = os.open(name, os.O_RDONLY, dir_fd=topfd)
except OSError:
onerror(os.open, fullname, sys.exc_info())
else:
try:
if os.path.samestat(orig_st, os.fstat(dirfd)):
_rmtree_safe_fd(dirfd, fullname, onerror)
try:
os.rmdir(name, dir_fd=topfd)
except OSError:
onerror(os.rmdir, fullname, sys.exc_info())
else:
try:
# This can only happen if someone replaces
# a directory with a symlink after the call to
# stat.S_ISDIR above.
raise OSError("Cannot call rmtree on a symbolic "
"link")
except OSError:
onerror(os.path.islink, fullname, sys.exc_info())
finally:
os.close(dirfd)
else:
try:
os.unlink(name, dir_fd=topfd)
except OSError:
onerror(os.unlink, fullname, sys.exc_info())
_use_fd_functions = ({os.open, os.stat, os.unlink, os.rmdir} <=
os.supports_dir_fd and
os.listdir in os.supports_fd and
os.stat in os.supports_follow_symlinks)
def rmtree(path, ignore_errors=False, onerror=None):
"""Recursively delete a directory tree.
If ignore_errors is set, errors are ignored; otherwise, if onerror
is set, it is called to handle the error with arguments (func,
path, exc_info) where func is platform and implementation dependent;
path is the argument to that function that caused it to fail; and
exc_info is a tuple returned by sys.exc_info(). If ignore_errors
is false and onerror is None, an exception is raised.
"""
if ignore_errors:
def onerror(*args):
pass
elif onerror is None:
def onerror(*args):
raise
if _use_fd_functions:
# While the unsafe rmtree works fine on bytes, the fd based does not.
if isinstance(path, bytes):
path = os.fsdecode(path)
# Note: To guard against symlink races, we use the standard
# lstat()/open()/fstat() trick.
try:
orig_st = os.lstat(path)
except Exception:
onerror(os.lstat, path, sys.exc_info())
return
try:
fd = os.open(path, os.O_RDONLY)
except Exception:
onerror(os.lstat, path, sys.exc_info())
return
try:
if os.path.samestat(orig_st, os.fstat(fd)):
_rmtree_safe_fd(fd, path, onerror)
try:
os.rmdir(path)
except OSError:
onerror(os.rmdir, path, sys.exc_info())
else:
try:
# symlinks to directories are forbidden, see bug #1669
raise OSError("Cannot call rmtree on a symbolic link")
except OSError:
onerror(os.path.islink, path, sys.exc_info())
finally:
os.close(fd)
else:
return _rmtree_unsafe(path, onerror)
# Allow introspection of whether or not the hardening against symlink
# attacks is supported on the current platform
rmtree.avoids_symlink_attacks = _use_fd_functions
def _basename(path):
# A basename() variant which first strips the trailing slash, if present.
# Thus we always get the last component of the path, even for directories.
sep = os.path.sep + (os.path.altsep or '')
return os.path.basename(path.rstrip(sep))
def move(src, dst):
"""Recursively move a file or directory to another location. This is
similar to the Unix "mv" command. Return the file or directory's
destination.
If the destination is a directory or a symlink to a directory, the source
is moved inside the directory. The destination path must not already
exist.
If the destination already exists but is not a directory, it may be
overwritten depending on os.rename() semantics.
If the destination is on our current filesystem, then rename() is used.
Otherwise, src is copied to the destination and then removed. Symlinks are
recreated under the new name if os.rename() fails because of cross
filesystem renames.
A lot more could be done here... A look at a mv.c shows a lot of
the issues this implementation glosses over.
"""
real_dst = dst
if os.path.isdir(dst):
if _samefile(src, dst):
# We might be on a case insensitive filesystem,
# perform the rename anyway.
os.rename(src, dst)
return
real_dst = os.path.join(dst, _basename(src))
if os.path.exists(real_dst):
raise Error("Destination path '%s' already exists" % real_dst)
try:
os.rename(src, real_dst)
except OSError:
if os.path.islink(src):
linkto = os.readlink(src)
os.symlink(linkto, real_dst)
os.unlink(src)
elif os.path.isdir(src):
if _destinsrc(src, dst):
raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
copytree(src, real_dst, symlinks=True)
rmtree(src)
else:
copy2(src, real_dst)
os.unlink(src)
return real_dst
def _destinsrc(src, dst):
src = abspath(src)
dst = abspath(dst)
if not src.endswith(os.path.sep):
src += os.path.sep
if not dst.endswith(os.path.sep):
dst += os.path.sep
return dst.startswith(src)
def _get_gid(name):
"""Returns a gid, given a group name."""
if getgrnam is None or name is None:
return None
try:
result = getgrnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def _get_uid(name):
"""Returns an uid, given a user name."""
if getpwnam is None or name is None:
return None
try:
result = getpwnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
owner=None, group=None, logger=None):
"""Create a (possibly compressed) tar file from all the files under
'base_dir'.
'compress' must be "gzip" (the default), "bzip2", or None.
'owner' and 'group' can be used to define an owner and a group for the
archive that is being built. If not provided, the current owner and group
will be used.
The output tar file will be named 'base_name' + ".tar", possibly plus
the appropriate compression extension (".gz", or ".bz2").
Returns the output filename.
"""
tar_compression = {'gzip': 'gz', None: ''}
compress_ext = {'gzip': '.gz'}
if _BZ2_SUPPORTED:
tar_compression['bzip2'] = 'bz2'
compress_ext['bzip2'] = '.bz2'
# flags for compression program, each element of list will be an argument
if compress is not None and compress not in compress_ext:
raise ValueError("bad value for 'compress', or compression format not "
"supported : {0}".format(compress))
archive_name = base_name + '.tar' + compress_ext.get(compress, '')
archive_dir = os.path.dirname(archive_name)
if archive_dir and not os.path.exists(archive_dir):
if logger is not None:
logger.info("creating %s", archive_dir)
if not dry_run:
os.makedirs(archive_dir)
# creating the tarball
if logger is not None:
logger.info('Creating tar archive')
uid = _get_uid(owner)
gid = _get_gid(group)
def _set_uid_gid(tarinfo):
if gid is not None:
tarinfo.gid = gid
tarinfo.gname = group
if uid is not None:
tarinfo.uid = uid
tarinfo.uname = owner
return tarinfo
if not dry_run:
tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
try:
tar.add(base_dir, filter=_set_uid_gid)
finally:
tar.close()
return archive_name
def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False):
# XXX see if we want to keep an external call here
if verbose:
zipoptions = "-r"
else:
zipoptions = "-rq"
from distutils.errors import DistutilsExecError
from distutils.spawn import spawn
try:
spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
except DistutilsExecError:
# XXX really should distinguish between "couldn't find
# external 'zip' command" and "zip failed".
raise ExecError("unable to create zip file '%s': "
"could neither import the 'zipfile' module nor "
"find a standalone zip utility") % zip_filename
def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):
"""Create a zip file from all the files under 'base_dir'.
The output zip file will be named 'base_name' + ".zip". Uses either the
"zipfile" Python module (if available) or the InfoZIP "zip" utility
(if installed and found on the default search path). If neither tool is
available, raises ExecError. Returns the name of the output zip
file.
"""
zip_filename = base_name + ".zip"
archive_dir = os.path.dirname(base_name)
if archive_dir and not os.path.exists(archive_dir):
if logger is not None:
logger.info("creating %s", archive_dir)
if not dry_run:
os.makedirs(archive_dir)
# If zipfile module is not available, try spawning an external 'zip'
# command.
try:
import zipfile
except ImportError:
zipfile = None
if zipfile is None:
_call_external_zip(base_dir, zip_filename, verbose, dry_run)
else:
if logger is not None:
logger.info("creating '%s' and adding '%s' to it",
zip_filename, base_dir)
if not dry_run:
with zipfile.ZipFile(zip_filename, "w",
compression=zipfile.ZIP_DEFLATED) as zf:
for dirpath, dirnames, filenames in os.walk(base_dir):
for name in filenames:
path = os.path.normpath(os.path.join(dirpath, name))
if os.path.isfile(path):
zf.write(path, path)
if logger is not None:
logger.info("adding '%s'", path)
return zip_filename
_ARCHIVE_FORMATS = {
'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"),
'zip': (_make_zipfile, [], "ZIP file")
}
if _BZ2_SUPPORTED:
_ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
"bzip2'ed tar-file")
def get_archive_formats():
"""Returns a list of supported formats for archiving and unarchiving.
Each element of the returned sequence is a tuple (name, description)
"""
formats = [(name, registry[2]) for name, registry in
_ARCHIVE_FORMATS.items()]
formats.sort()
return formats
def register_archive_format(name, function, extra_args=None, description=''):
"""Registers an archive format.
name is the name of the format. function is the callable that will be
used to create archives. If provided, extra_args is a sequence of
(name, value) tuples that will be passed as arguments to the callable.
description can be provided to describe the format, and will be returned
by the get_archive_formats() function.
"""
if extra_args is None:
extra_args = []
if not callable(function):
raise TypeError('The %s object is not callable' % function)
if not isinstance(extra_args, (tuple, list)):
raise TypeError('extra_args needs to be a sequence')
for element in extra_args:
if not isinstance(element, (tuple, list)) or len(element) !=2:
raise TypeError('extra_args elements are : (arg_name, value)')
_ARCHIVE_FORMATS[name] = (function, extra_args, description)
def unregister_archive_format(name):
del _ARCHIVE_FORMATS[name]
def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
dry_run=0, owner=None, group=None, logger=None):
"""Create an archive file (eg. zip or tar).
'base_name' is the name of the file to create, minus any format-specific
extension; 'format' is the archive format: one of "zip", "tar", "bztar"
or "gztar".
'root_dir' is a directory that will be the root directory of the
archive; ie. we typically chdir into 'root_dir' before creating the
archive. 'base_dir' is the directory where we start archiving from;
ie. 'base_dir' will be the common prefix of all files and
directories in the archive. 'root_dir' and 'base_dir' both default
to the current directory. Returns the name of the archive file.
'owner' and 'group' are used when creating a tar archive. By default,
uses the current owner and group.
"""
save_cwd = os.getcwd()
if root_dir is not None:
if logger is not None:
logger.debug("changing into '%s'", root_dir)
base_name = os.path.abspath(base_name)
if not dry_run:
os.chdir(root_dir)
if base_dir is None:
base_dir = os.curdir
kwargs = {'dry_run': dry_run, 'logger': logger}
try:
format_info = _ARCHIVE_FORMATS[format]
except KeyError:
raise ValueError("unknown archive format '%s'" % format)
func = format_info[0]
for arg, val in format_info[1]:
kwargs[arg] = val
if format != 'zip':
kwargs['owner'] = owner
kwargs['group'] = group
try:
filename = func(base_name, base_dir, **kwargs)
finally:
if root_dir is not None:
if logger is not None:
logger.debug("changing back to '%s'", save_cwd)
os.chdir(save_cwd)
return filename
def get_unpack_formats():
"""Returns a list of supported formats for unpacking.
Each element of the returned sequence is a tuple
(name, extensions, description)
"""
formats = [(name, info[0], info[3]) for name, info in
_UNPACK_FORMATS.items()]
formats.sort()
return formats
def _check_unpack_options(extensions, function, extra_args):
"""Checks what gets registered as an unpacker."""
# first make sure no other unpacker is registered for this extension
existing_extensions = {}
for name, info in _UNPACK_FORMATS.items():
for ext in info[0]:
existing_extensions[ext] = name
for extension in extensions:
if extension in existing_extensions:
msg = '%s is already registered for "%s"'
raise RegistryError(msg % (extension,
existing_extensions[extension]))
if not callable(function):
raise TypeError('The registered function must be a callable')
def register_unpack_format(name, extensions, function, extra_args=None,
description=''):
"""Registers an unpack format.
`name` is the name of the format. `extensions` is a list of extensions
corresponding to the format.
`function` is the callable that will be
used to unpack archives. The callable will receive archives to unpack.
If it's unable to handle an archive, it needs to raise a ReadError
exception.
If provided, `extra_args` is a sequence of
(name, value) tuples that will be passed as arguments to the callable.
description can be provided to describe the format, and will be returned
by the get_unpack_formats() function.
"""
if extra_args is None:
extra_args = []
_check_unpack_options(extensions, function, extra_args)
_UNPACK_FORMATS[name] = extensions, function, extra_args, description
def unregister_unpack_format(name):
"""Removes the pack format from the registery."""
del _UNPACK_FORMATS[name]
def _ensure_directory(path):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
def _unpack_zipfile(filename, extract_dir):
"""Unpack zip `filename` to `extract_dir`
"""
try:
import zipfile
except ImportError:
raise ReadError('zlib not supported, cannot unpack this archive.')
if not zipfile.is_zipfile(filename):
raise ReadError("%s is not a zip file" % filename)
zip = zipfile.ZipFile(filename)
try:
for info in zip.infolist():
name = info.filename
# don't extract absolute paths or ones with .. in them
if name.startswith('/') or '..' in name:
continue
target = os.path.join(extract_dir, *name.split('/'))
if not target:
continue
_ensure_directory(target)
if not name.endswith('/'):
# file
data = zip.read(info.filename)
f = open(target, 'wb')
try:
f.write(data)
finally:
f.close()
del data
finally:
zip.close()
def _unpack_tarfile(filename, extract_dir):
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
"""
try:
tarobj = tarfile.open(filename)
except tarfile.TarError:
raise ReadError(
"%s is not a compressed or uncompressed tar file" % filename)
try:
tarobj.extractall(extract_dir)
finally:
tarobj.close()
_UNPACK_FORMATS = {
'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"),
'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),
'zip': (['.zip'], _unpack_zipfile, [], "ZIP file")
}
if _BZ2_SUPPORTED:
_UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [],
"bzip2'ed tar-file")
def _find_unpack_format(filename):
for name, info in _UNPACK_FORMATS.items():
for extension in info[0]:
if filename.endswith(extension):
return name
return None
def unpack_archive(filename, extract_dir=None, format=None):
"""Unpack an archive.
`filename` is the name of the archive.
`extract_dir` is the name of the target directory, where the archive
is unpacked. If not provided, the current working directory is used.
`format` is the archive format: one of "zip", "tar", or "gztar". Or any
other registered format. If not provided, unpack_archive will use the
filename extension and see if an unpacker was registered for that
extension.
In case none is found, a ValueError is raised.
"""
if extract_dir is None:
extract_dir = os.getcwd()
if format is not None:
try:
format_info = _UNPACK_FORMATS[format]
except KeyError:
raise ValueError("Unknown unpack format '{0}'".format(format))
func = format_info[1]
func(filename, extract_dir, **dict(format_info[2]))
else:
# we need to look at the registered unpackers supported extensions
format = _find_unpack_format(filename)
if format is None:
raise ReadError("Unknown archive format '{0}'".format(filename))
func = _UNPACK_FORMATS[format][1]
kwargs = dict(_UNPACK_FORMATS[format][2])
func(filename, extract_dir, **kwargs)
if hasattr(os, 'statvfs'):
__all__.append('disk_usage')
_ntuple_diskusage = collections.namedtuple('usage', 'total used free')
def disk_usage(path):
"""Return disk usage statistics about the given path.
Returned value is a named tuple with attributes 'total', 'used' and
'free', which are the amount of total, used and free space, in bytes.
"""
st = os.statvfs(path)
free = st.f_bavail * st.f_frsize
total = st.f_blocks * st.f_frsize
used = (st.f_blocks - st.f_bfree) * st.f_frsize
return _ntuple_diskusage(total, used, free)
elif os.name == 'nt':
import nt
__all__.append('disk_usage')
_ntuple_diskusage = collections.namedtuple('usage', 'total used free')
def disk_usage(path):
"""Return disk usage statistics about the given path.
Returned values is a named tuple with attributes 'total', 'used' and
'free', which are the amount of total, used and free space, in bytes.
"""
total, free = nt._getdiskusage(path)
used = total - free
return _ntuple_diskusage(total, used, free)
def chown(path, user=None, group=None):
"""Change owner user and group of the given path.
user and group can be the uid/gid or the user/group names, and in that case,
they are converted to their respective uid/gid.
"""
if user is None and group is None:
raise ValueError("user and/or group must be set")
_user = user
_group = group
# -1 means don't change it
if user is None:
_user = -1
# user can either be an int (the uid) or a string (the system username)
elif isinstance(user, str):
_user = _get_uid(user)
if _user is None:
raise LookupError("no such user: {!r}".format(user))
if group is None:
_group = -1
elif not isinstance(group, int):
_group = _get_gid(group)
if _group is None:
raise LookupError("no such group: {!r}".format(group))
os.chown(path, _user, _group)
def get_terminal_size(fallback=(80, 24)):
"""Get the size of the terminal window.
For each of the two dimensions, the environment variable, COLUMNS
and LINES respectively, is checked. If the variable is defined and
the value is a positive integer, it is used.
When COLUMNS or LINES is not defined, which is the common case,
the terminal connected to sys.__stdout__ is queried
by invoking os.get_terminal_size.
If the terminal size cannot be successfully queried, either because
the system doesn't support querying, or because we are not
connected to a terminal, the value given in fallback parameter
is used. Fallback defaults to (80, 24) which is the default
size used by many terminal emulators.
The value returned is a named tuple of type os.terminal_size.
"""
# columns, lines are the working values
try:
columns = int(os.environ['COLUMNS'])
except (KeyError, ValueError):
columns = 0
try:
lines = int(os.environ['LINES'])
except (KeyError, ValueError):
lines = 0
# only query if necessary
if columns <= 0 or lines <= 0:
try:
size = os.get_terminal_size(sys.__stdout__.fileno())
except (NameError, OSError):
size = os.terminal_size(fallback)
if columns <= 0:
columns = size.columns
if lines <= 0:
lines = size.lines
return os.terminal_size((columns, lines))
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
"""Given a command, mode, and a PATH string, return the path which
conforms to the given mode on the PATH, or None if there is no such
file.
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
of os.environ.get("PATH"), or can be overridden with a custom search
path.
"""
# Check that a given file can be accessed with the correct mode.
# Additionally check that `file` is not a directory, as on Windows
# directories pass the os.access check.
def _access_check(fn, mode):
return (os.path.exists(fn) and os.access(fn, mode)
and not os.path.isdir(fn))
# If we're given a path with a directory part, look it up directly rather
# than referring to PATH directories. This includes checking relative to the
# current directory, e.g. ./script
if os.path.dirname(cmd):
if _access_check(cmd, mode):
return cmd
return None
if path is None:
path = os.environ.get("PATH", os.defpath)
if not path:
return None
path = path.split(os.pathsep)
if sys.platform == "win32":
# The current directory takes precedence on Windows.
if not os.curdir in path:
path.insert(0, os.curdir)
# PATHEXT is necessary to check on Windows.
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
# See if the given file matches any of the expected path extensions.
# This will allow us to short circuit when given "python.exe".
# If it does match, only test that one, otherwise we have to try
# others.
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
files = [cmd]
else:
files = [cmd + ext for ext in pathext]
else:
# On other platforms you don't have things like PATHEXT to tell you
# what file suffixes are executable, so just pass on cmd as-is.
files = [cmd]
seen = set()
for dir in path:
normdir = os.path.normcase(dir)
if not normdir in seen:
seen.add(normdir)
for thefile in files:
name = os.path.join(dir, thefile)
if _access_check(name, mode):
return name
return None
| gpl-2.0 |
Griffen8280/PandoraPi_Website | wificonfig.py | 1 | 1775 | #!/usr/bin/python
#import Libraries
import cgi
import cgitb
import time
import thread
import subprocess
# Create instance of FieldStorage
form = cgi.FieldStorage()
# Get data from fields
wifif = form.getvalue('wifi')
wifif = '"%s"'%wifif
passwordf = form.getvalue('password')
passwordf = '"%s"'%passwordf
sudo = '/usr/bin/sudo'
modconfig = '/var/www/html/modconfig.py'
config_file = '/etc/wpa_supplicant/wpa_supplicant.conf'
userv = 'ssid'
passwordv = 'psk'
#Use modconfig to update the user information
def updateuser():
print ("Updating the SSID")
command = "%s %s %s %s %s" % (sudo, modconfig, config_file, userv, wifif)
print (command)
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
output = process.communicate()[0]
process.wait()
#Use modconfig to update the password information
def updatepassword():
print ("Updating the PSK")
command = "%s %s %s %s %s" % (sudo, modconfig, config_file, passwordv, passwordf)
print (command)
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
output = process.communicate()[0]
process.wait()
#Restart to take effect
def restart():
command = "/usr/bin/sudo /sbin/reboot"
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
output = process.communicate()[0]
#create a time delay function
def spinner():
while True:
print ('.')
time.sleep(1)
#create main method
if __name__ == '__main__':
thread.start_new_thread(spinner, ())
updateuser()
time.sleep(2)
thread.start_new_thread(spinner, ())
updatepassword()
time.sleep(2)
restart()
#HTML Example
#<form action="/cgi-bin/hello_get.py" method="get">
#Email: <input type="text" name="email"> <br />
#Password: <input type="text" name="password" />
#<input type="submit" value="Submit" />
#</form>
| gpl-3.0 |
Vassy/odoo | addons/calendar/__openerp__.py | 33 | 2108 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Calendar',
'version': '1.0',
'depends': ['base', 'mail', 'base_action_rule', 'web_calendar'],
'summary': 'Personal & Shared Calendar',
'description': """
This is a full-featured calendar system.
========================================
It supports:
------------
- Calendar of events
- Recurring events
If you need to manage your meetings, you should install the CRM module.
""",
'author': 'OpenERP SA',
'category': 'Hidden/Dependency',
'website': 'http://www.openerp.com',
'demo': ['calendar_demo.xml'],
'data': [
'security/ir.model.access.csv',
'security/calendar_security.xml',
'calendar_view.xml',
'contacts_view.xml',
'calendar_data.xml',
'views/calendar.xml',
],
'qweb': ['static/src/xml/*.xml'],
'test': [
'test/calendar_test.yml',
'test/test_calendar_recurrent_event_case2.yml'
],
'installable': True,
'application': True,
'auto_install': False,
'images': ['images/calendar1.jpeg', 'images/calendar2.jpeg', 'images/calendar3.jpeg', 'images/calendar4.jpeg'],
}
| agpl-3.0 |
allanino/nupic | tests/unit/nupic/data/inference_shifter_test.py | 35 | 5836 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""Unit tests for InferenceShifter."""
from nupic.data.inference_shifter import InferenceShifter
from nupic.frameworks.opf.opfutils import InferenceElement, ModelResult
from nupic.support.unittesthelpers.testcasebase import (TestCaseBase,
unittest)
class TestInferenceShifter(TestCaseBase):
def _shiftAndCheck(self, inferences, expectedOutput):
inferenceShifter = InferenceShifter()
for inference, expected in zip(inferences, expectedOutput):
inputResult = ModelResult(inferences=inference)
outputResult = inferenceShifter.shift(inputResult)
self.assertEqual(outputResult.inferences, expected)
def testNoShift(self):
for element in (InferenceElement.anomalyScore,
InferenceElement.classification,
InferenceElement.classConfidences):
inferences = [
{element: 1},
{element: 2},
{element: 3},
]
expectedOutput = [
{element: 1},
{element: 2},
{element: 3},
]
self._shiftAndCheck(inferences, expectedOutput)
def testNoShiftMultipleValues(self):
for element in (InferenceElement.anomalyScore,
InferenceElement.classification,
InferenceElement.classConfidences):
inferences = [
{element: [1, 2, 3]},
{element: [4, 5, 6]},
{element: [5, 6, 7]},
]
expectedOutput = [
{element: [1, 2, 3]},
{element: [4, 5, 6]},
{element: [5, 6, 7]},
]
self._shiftAndCheck(inferences, expectedOutput)
def testSingleShift(self):
for element in (InferenceElement.prediction,
InferenceElement.encodings):
inferences = [
{element: 1},
{element: 2},
{element: 3},
]
expectedOutput = [
{element: None},
{element: 1},
{element: 2},
]
self._shiftAndCheck(inferences, expectedOutput)
def testSingleShiftMultipleValues(self):
for element in (InferenceElement.prediction,
InferenceElement.encodings):
inferences = [
{element: [1, 2, 3]},
{element: [4, 5, 6]},
{element: [5, 6, 7]},
]
expectedOutput = [
{element: [None, None, None]},
{element: [1, 2, 3]},
{element: [4, 5, 6]},
]
self._shiftAndCheck(inferences, expectedOutput)
def testMultiStepShift(self):
for element in (InferenceElement.multiStepPredictions,
InferenceElement.multiStepBestPredictions):
inferences = [
{element: {2: 1}},
{element: {2: 2}},
{element: {2: 3}},
{element: {2: 4}},
]
expectedOutput = [
{element: {2: None}},
{element: {2: None}},
{element: {2: 1}},
{element: {2: 2}},
]
self._shiftAndCheck(inferences, expectedOutput)
def testMultiStepShiftMultipleValues(self):
for element in (InferenceElement.multiStepPredictions,
InferenceElement.multiStepBestPredictions):
inferences = [
{element: {2: [1, 11]}},
{element: {2: [2, 12]}},
{element: {2: [3, 13]}},
{element: {2: [4, 14]}},
]
expectedOutput = [
{element: {2: None}},
{element: {2: None}},
{element: {2: [1, 11]}},
{element: {2: [2, 12]}},
]
self._shiftAndCheck(inferences, expectedOutput)
def testDifferentMultiStepsShift(self):
for element in (InferenceElement.multiStepPredictions,
InferenceElement.multiStepBestPredictions):
inferences = [
{element: {2: 1, 3: 5}},
{element: {2: 2, 3: 6}},
{element: {2: 3, 3: 7}},
{element: {2: 4, 3: 8}},
]
expectedOutput = [
{element: {2: None, 3: None}},
{element: {2: None, 3: None}},
{element: {2: 1, 3: None}},
{element: {2: 2, 3: 5}},
]
self._shiftAndCheck(inferences, expectedOutput)
def testDifferentMultiStepsShiftMultipleValues(self):
for element in (InferenceElement.multiStepPredictions,
InferenceElement.multiStepBestPredictions):
inferences = [
{element: {2: [1, 11], 3: [5, 15]}},
{element: {2: [2, 12], 3: [6, 16]}},
{element: {2: [3, 13], 3: [7, 17]}},
{element: {2: [4, 14], 3: [8, 18]}},
]
expectedOutput = [
{element: {2: None, 3: None}},
{element: {2: None, 3: None}},
{element: {2: [1, 11], 3: None}},
{element: {2: [2, 12], 3: [5, 15]}},
]
self._shiftAndCheck(inferences, expectedOutput)
if __name__ == '__main__':
unittest.main()
| agpl-3.0 |
edx/lettuce | tests/integration/lib/Django-1.2.5/tests/regressiontests/m2m_through_regress/tests.py | 38 | 5744 | try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from django.core import management
from django.contrib.auth.models import User
from django.test import TestCase
from models import (Person, Group, Membership, UserMembership,
Car, Driver, CarDriver)
class M2MThroughTestCase(TestCase):
def test_everything(self):
bob = Person.objects.create(name="Bob")
jim = Person.objects.create(name="Jim")
rock = Group.objects.create(name="Rock")
roll = Group.objects.create(name="Roll")
frank = User.objects.create_user("frank", "[email protected]", "password")
jane = User.objects.create_user("jane", "[email protected]", "password")
Membership.objects.create(person=bob, group=rock)
Membership.objects.create(person=bob, group=roll)
Membership.objects.create(person=jim, group=rock)
self.assertQuerysetEqual(
bob.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
]
)
self.assertQuerysetEqual(
roll.members.all(), [
"<Person: Bob>",
]
)
self.assertRaises(AttributeError, setattr, bob, "group_set", [])
self.assertRaises(AttributeError, setattr, roll, "members", [])
self.assertRaises(AttributeError, rock.members.create, name="Anne")
self.assertRaises(AttributeError, bob.group_set.create, name="Funk")
UserMembership.objects.create(user=frank, group=rock)
UserMembership.objects.create(user=frank, group=roll)
UserMembership.objects.create(user=jane, group=rock)
self.assertQuerysetEqual(
frank.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
]
)
self.assertQuerysetEqual(
roll.user_members.all(), [
"<User: frank>",
]
)
def test_serialization(self):
"m2m-through models aren't serialized as m2m fields. Refs #8134"
p = Person.objects.create(name="Bob")
g = Group.objects.create(name="Roll")
m = Membership.objects.create(person=p, group=g)
pks = {"p_pk": p.pk, "g_pk": g.pk, "m_pk": m.pk}
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
self.assertEqual(out.getvalue().strip(), """[{"pk": %(m_pk)s, "model": "m2m_through_regress.membership", "fields": {"person": %(p_pk)s, "price": 100, "group": %(g_pk)s}}, {"pk": %(p_pk)s, "model": "m2m_through_regress.person", "fields": {"name": "Bob"}}, {"pk": %(g_pk)s, "model": "m2m_through_regress.group", "fields": {"name": "Roll"}}]""" % pks)
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="xml",
indent=2, stdout=out)
self.assertEqual(out.getvalue().strip(), """
<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object pk="%(m_pk)s" model="m2m_through_regress.membership">
<field to="m2m_through_regress.person" name="person" rel="ManyToOneRel">%(p_pk)s</field>
<field to="m2m_through_regress.group" name="group" rel="ManyToOneRel">%(g_pk)s</field>
<field type="IntegerField" name="price">100</field>
</object>
<object pk="%(p_pk)s" model="m2m_through_regress.person">
<field type="CharField" name="name">Bob</field>
</object>
<object pk="%(g_pk)s" model="m2m_through_regress.group">
<field type="CharField" name="name">Roll</field>
</object>
</django-objects>
""".strip() % pks)
def test_join_trimming(self):
"Check that we don't involve too many copies of the intermediate table when doing a join. Refs #8046, #8254"
bob = Person.objects.create(name="Bob")
jim = Person.objects.create(name="Jim")
rock = Group.objects.create(name="Rock")
roll = Group.objects.create(name="Roll")
Membership.objects.create(person=bob, group=rock)
Membership.objects.create(person=jim, group=rock, price=50)
Membership.objects.create(person=bob, group=roll, price=50)
self.assertQuerysetEqual(
rock.members.filter(membership__price=50), [
"<Person: Jim>",
]
)
self.assertQuerysetEqual(
bob.group_set.filter(membership__price=50), [
"<Group: Roll>",
]
)
class ToFieldThroughTests(TestCase):
def setUp(self):
self.car = Car.objects.create(make="Toyota")
self.driver = Driver.objects.create(name="Ryan Briscoe")
CarDriver.objects.create(car=self.car, driver=self.driver)
def test_to_field(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
def test_to_field_reverse(self):
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
class ThroughLoadDataTestCase(TestCase):
fixtures = ["m2m_through"]
def test_sequence_creation(self):
"Check that sequences on an m2m_through are created for the through model, not a phantom auto-generated m2m table. Refs #11107"
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
self.assertEqual(out.getvalue().strip(), """[{"pk": 1, "model": "m2m_through_regress.usermembership", "fields": {"price": 100, "group": 1, "user": 1}}, {"pk": 1, "model": "m2m_through_regress.person", "fields": {"name": "Guido"}}, {"pk": 1, "model": "m2m_through_regress.group", "fields": {"name": "Python Core Group"}}]""")
| gpl-3.0 |
ruffoa/Qhacks2016 | Sigma-Securities/src/sentiment/text_extract.py | 1 | 1390 |
# Load the AlchemyAPI module code.
import AlchemyAPI
# Create an AlchemyAPI object.
alchemyObj = AlchemyAPI.AlchemyAPI()
# Load the API key from disk.
alchemyObj.loadAPIKey("api_key.txt");
'''
# Extract a title from a web URL.
result = alchemyObj.URLGetTitle("http://www.techcrunch.com/");
print result
'''
# Extract page text from a web URL (ignoring navigation links, ads, etc.).
result = alchemyObj.URLGetText("http://www.reuters.com/article/2012/11/30/us-china-apple-iphone-idUSBRE8AT06G20121130?type=companyNews");
print result
'''
# Extract raw page text from a web URL (including navigation links, ads, etc.).
result = alchemyObj.URLGetRawText("http://www.techcrunch.com/");
print result
# Load a HTML document to analyze.
htmlFileHandle = open("data/example.html", 'r')
htmlFile = htmlFileHandle.read()
htmlFileHandle.close()
# Extract a title from a HTML document.
result = alchemyObj.HTMLGetTitle(htmlFile, "http://www.test.com/");
print result
# Extract page text from a HTML document (ignoring navigation links, ads, etc.).
result = alchemyObj.HTMLGetText(htmlFile, "http://www.reuters.com/article/2012/11/30/us-china-apple-iphone-idUSBRE8AT06G20121130?type=companyNews");
print result
# Extract raw page text from a HTML document (including navigation links, ads, etc.).
result = alchemyObj.HTMLGetRawText(htmlFile, "http://www.test.com/");
print result
'''
| mit |
minhphung171093/GreenERP | openerp/addons/hr_timesheet/report/hr_timesheet_report.py | 10 | 2157 |
from openerp import tools, models, fields
from openerp.addons.decimal_precision import decimal_precision as dp
class hr_timesheet_report(models.Model):
_name = "hr.timesheet.report"
_description = "Timesheet"
_auto = False
_rec_name = "date"
date = fields.Date('Date', readonly=True)
product_id = fields.Many2one('product.product', 'Product', readonly=True)
user_id = fields.Many2one('res.users', 'User', readonly=True)
account_id = fields.Many2one('account.analytic.account', 'Analytic Account', readonly=True)
company_id = fields.Many2one('res.company', 'Company', readonly=True)
cost = fields.Float('Cost', readonly=True, digits_compute=dp.get_precision('Account'))
quantity = fields.Float('Time', readonly=True)
def _select(self):
select_str = """
SELECT min(aal.id) as id,
aal.date as date,
sum(aal.amount) as cost,
sum(aal.unit_amount) as quantity,
aal.account_id as account_id,
aal.product_id as product_id,
aal.user_id as user_id,
aal.company_id as company_id,
aal.currency_id as currency_id
"""
return select_str
def _from(self):
from_str = """
FROM account_analytic_line as aal
"""
return from_str
def _group_by(self):
group_by_str = """
GROUP BY aal.date,
aal.account_id,
aal.product_id,
aal.user_id,
aal.company_id,
aal.currency_id
"""
return group_by_str
def _where(self):
where_str = """
WHERE aal.is_timesheet IS TRUE
"""
return where_str
def init(self, cr):
# self._table = hr_timesheet_report
tools.drop_view_if_exists(cr, self._table)
cr.execute("""CREATE or REPLACE VIEW %s as (
%s
%s
%s
%s
)""" % (self._table, self._select(), self._from(), self._where(), self._group_by()))
| gpl-3.0 |
konklone/shaaaaaaaaaaaaa | fabfile.py | 3 | 1652 | import time
from fabric.api import run, execute, env
environment = "production"
env.use_ssh_config = True
env.hosts = ["shaaaaa"]
branch = "master"
repo = "[email protected]:konklone/shaaaaaaaaaaaaa.git"
username = "shaaaaa"
home = "/home/%s/%s" % (username, username)
shared_path = "%s/shared" % home
versions_path = "%s/versions" % home
version_path = "%s/%s" % (versions_path, time.strftime("%Y%m%d%H%M%S"))
current_path = "%s/current" % home
logs = "/home/%s" % username
keep = 5
def checkout():
run('git clone -q -b %s %s %s' % (branch, repo, version_path))
def dependencies():
run('cd %s && npm install' % version_path)
# TODO: why did I do this? (cp instead of ln)
def make_current():
# run('rm -f %s && ln -s %s %s' % (current_path, version_path, current_path))
run('rm -rf %s && cp -r %s %s' % (current_path, version_path, current_path))
def cleanup():
versions = run("ls -x %s" % versions_path).split()
destroy = versions[:-keep]
for version in destroy:
command = "rm -rf %s/%s" % (versions_path, version)
run(command)
## can be run on their own
def start():
# run("cd %s && NODE_ENV=%s forever -l %s/forever.log -a start app.js -p 3000" % (current_path, environment, logs))
run(("cd %s && " +
"NODE_ENV=%s forever -l %s/forever.log -a start app.js 3000 && " +
"NODE_ENV=%s forever -l %s/forever.log -a start app.js 3001") %
(current_path, environment, logs, environment, logs)
)
def stop():
run("forever stop app.js")
def restart():
run("forever restart app.js")
def deploy():
execute(checkout)
execute(dependencies)
execute(make_current)
execute(restart)
execute(cleanup)
| bsd-3-clause |
jkyeung/XlsxWriter | dev/docs/source/conf.py | 1 | 9926 | # -*- coding: utf-8 -*-
#
# XlsxWriter documentation build configuration file, created by
# sphinx-quickstart on Mon Jan 28 00:12:14 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
# templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'XlsxWriter'
copyright = u'2013-2016, John McNamara'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.8.7'
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/2/': None}
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# html_theme = 'default'
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes']
html_theme = 'bootstrap'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {
# # 'nosidebar': True,
# 'sidebarbgcolor': '#F2F2F2',
# 'relbarbgcolor': '#9CB640',
# 'linkcolor': '#9CB640',
# 'sidebarlinkcolor': '#9CB640',
# 'footerbgcolor': '#FFFFFF',
# 'footertextcolor': '#9CB640',
# 'headtextcolor': '#9CB640',
# 'codebgcolor': '#FFFFFF',
# }
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "XlsxWriter Documentation"
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = '_images/logo.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'XlsxWriterdoc'
# Remove permalinks.
html_add_permalinks = ""
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '11pt',
# Additional stuff for the LaTeX preamble.
'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'XlsxWriter.tex', u'Creating Excel files with Python and XlsxWriter',
u'John McNamara', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = '_images/logo.png'
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'xlsxwriter', u'XlsxWriter Documentation',
[u'John McNamara'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'XlsxWriter', u'XlsxWriter Documentation',
u'John McNamara', 'XlsxWriter', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'XlsxWriter'
epub_author = u'John McNamara'
epub_publisher = u'John McNamara'
epub_copyright = u'2013-2016, John McNamara'
# The language of the text. It defaults to the language option
# or en if the language is not set.
# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
# epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
# epub_identifier = ''
# A unique identification for the text.
# epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
# epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_post_files = []
# A list of files that should not be packed into the epub file.
# epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
# epub_tocdepth = 3
# Allow duplicate toc entries.
# epub_tocdup = True
| bsd-2-clause |
chrisfranzen/django | django/contrib/flatpages/tests/forms.py | 113 | 4014 | from __future__ import unicode_literals
from django.conf import settings
from django.contrib.flatpages.forms import FlatpageForm
from django.contrib.flatpages.models import FlatPage
from django.test import TestCase
from django.test.utils import override_settings
from django.utils import translation
@override_settings(SITE_ID=1)
class FlatpageAdminFormTests(TestCase):
fixtures = ['example_site']
def setUp(self):
self.form_data = {
'title': "A test page",
'content': "This is a test",
'sites': [settings.SITE_ID],
}
def test_flatpage_admin_form_url_validation(self):
"The flatpage admin form correctly validates urls"
self.assertTrue(FlatpageForm(data=dict(url='/new_flatpage/', **self.form_data)).is_valid())
self.assertTrue(FlatpageForm(data=dict(url='/some.special~chars/', **self.form_data)).is_valid())
self.assertTrue(FlatpageForm(data=dict(url='/some.very_special~chars-here/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a space/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a % char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a ! char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a & char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a ? char/', **self.form_data)).is_valid())
def test_flatpage_requires_leading_slash(self):
form = FlatpageForm(data=dict(url='no_leading_slash/', **self.form_data))
with translation.override('en'):
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['url'], ["URL is missing a leading slash."])
@override_settings(APPEND_SLASH=True,
MIDDLEWARE_CLASSES=('django.middleware.common.CommonMiddleware',))
def test_flatpage_requires_trailing_slash_with_append_slash(self):
form = FlatpageForm(data=dict(url='/no_trailing_slash', **self.form_data))
with translation.override('en'):
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['url'], ["URL is missing a trailing slash."])
@override_settings(APPEND_SLASH=False,
MIDDLEWARE_CLASSES=('django.middleware.common.CommonMiddleware',))
def test_flatpage_doesnt_requires_trailing_slash_without_append_slash(self):
form = FlatpageForm(data=dict(url='/no_trailing_slash', **self.form_data))
self.assertTrue(form.is_valid())
def test_flatpage_admin_form_url_uniqueness_validation(self):
"The flatpage admin form correctly enforces url uniqueness among flatpages of the same site"
data = dict(url='/myflatpage1/', **self.form_data)
FlatpageForm(data=data).save()
f = FlatpageForm(data=data)
self.assertFalse(f.is_valid())
self.assertEqual(
f.errors,
{'__all__': ['Flatpage with url /myflatpage1/ already exists for site example.com']})
def test_flatpage_admin_form_edit(self):
"""
Existing flatpages can be edited in the admin form without triggering
the url-uniqueness validation.
"""
existing = FlatPage.objects.create(
url="/myflatpage1/", title="Some page", content="The content")
existing.sites.add(settings.SITE_ID)
data = dict(url='/myflatpage1/', **self.form_data)
f = FlatpageForm(data=data, instance=existing)
self.assertTrue(f.is_valid(), f.errors)
updated = f.save()
self.assertEqual(updated.title, "A test page")
def test_flatpage_nosites(self):
data = dict(url='/myflatpage1/', **self.form_data)
data.update({'sites': ''})
f = FlatpageForm(data=data)
self.assertFalse(f.is_valid())
self.assertEqual(
f.errors,
{'sites': [translation.ugettext('This field is required.')]})
| bsd-3-clause |
jart/tensorflow | tensorflow/python/kernel_tests/distributions/kullback_leibler_test.py | 31 | 5251 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for distributions KL mechanism."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops import array_ops
from tensorflow.python.ops.distributions import kullback_leibler
from tensorflow.python.ops.distributions import normal
from tensorflow.python.platform import test
# pylint: disable=protected-access
_DIVERGENCES = kullback_leibler._DIVERGENCES
_registered_kl = kullback_leibler._registered_kl
# pylint: enable=protected-access
class KLTest(test.TestCase):
def testRegistration(self):
class MyDist(normal.Normal):
pass
# Register KL to a lambda that spits out the name parameter
@kullback_leibler.RegisterKL(MyDist, MyDist)
def _kl(a, b, name=None): # pylint: disable=unused-argument,unused-variable
return name
a = MyDist(loc=0.0, scale=1.0)
self.assertEqual("OK", kullback_leibler.kl_divergence(a, a, name="OK"))
def testDomainErrorExceptions(self):
class MyDistException(normal.Normal):
pass
# Register KL to a lambda that spits out the name parameter
@kullback_leibler.RegisterKL(MyDistException, MyDistException)
# pylint: disable=unused-argument,unused-variable
def _kl(a, b, name=None):
return array_ops.identity([float("nan")])
# pylint: disable=unused-argument,unused-variable
with self.test_session():
a = MyDistException(loc=0.0, scale=1.0, allow_nan_stats=False)
kl = kullback_leibler.kl_divergence(a, a, allow_nan_stats=False)
with self.assertRaisesOpError(
"KL calculation between .* and .* returned NaN values"):
kl.eval()
with self.assertRaisesOpError(
"KL calculation between .* and .* returned NaN values"):
a.kl_divergence(a).eval()
a = MyDistException(loc=0.0, scale=1.0, allow_nan_stats=True)
kl_ok = kullback_leibler.kl_divergence(a, a)
self.assertAllEqual([float("nan")], kl_ok.eval())
self_kl_ok = a.kl_divergence(a)
self.assertAllEqual([float("nan")], self_kl_ok.eval())
cross_ok = a.cross_entropy(a)
self.assertAllEqual([float("nan")], cross_ok.eval())
def testRegistrationFailures(self):
class MyDist(normal.Normal):
pass
with self.assertRaisesRegexp(TypeError, "must be callable"):
kullback_leibler.RegisterKL(MyDist, MyDist)("blah")
# First registration is OK
kullback_leibler.RegisterKL(MyDist, MyDist)(lambda a, b: None)
# Second registration fails
with self.assertRaisesRegexp(ValueError, "has already been registered"):
kullback_leibler.RegisterKL(MyDist, MyDist)(lambda a, b: None)
def testExactRegistrationsAllMatch(self):
for (k, v) in _DIVERGENCES.items():
self.assertEqual(v, _registered_kl(*k))
def _testIndirectRegistration(self, fn):
class Sub1(normal.Normal):
def entropy(self):
return ""
class Sub2(normal.Normal):
def entropy(self):
return ""
class Sub11(Sub1):
def entropy(self):
return ""
# pylint: disable=unused-argument,unused-variable
@kullback_leibler.RegisterKL(Sub1, Sub1)
def _kl11(a, b, name=None):
return "sub1-1"
@kullback_leibler.RegisterKL(Sub1, Sub2)
def _kl12(a, b, name=None):
return "sub1-2"
@kullback_leibler.RegisterKL(Sub2, Sub1)
def _kl21(a, b, name=None):
return "sub2-1"
# pylint: enable=unused-argument,unused_variable
sub1 = Sub1(loc=0.0, scale=1.0)
sub2 = Sub2(loc=0.0, scale=1.0)
sub11 = Sub11(loc=0.0, scale=1.0)
self.assertEqual("sub1-1", fn(sub1, sub1))
self.assertEqual("sub1-2", fn(sub1, sub2))
self.assertEqual("sub2-1", fn(sub2, sub1))
self.assertEqual("sub1-1", fn(sub11, sub11))
self.assertEqual("sub1-1", fn(sub11, sub1))
self.assertEqual("sub1-2", fn(sub11, sub2))
self.assertEqual("sub1-1", fn(sub11, sub1))
self.assertEqual("sub1-2", fn(sub11, sub2))
self.assertEqual("sub2-1", fn(sub2, sub11))
self.assertEqual("sub1-1", fn(sub1, sub11))
def testIndirectRegistrationKLFun(self):
self._testIndirectRegistration(kullback_leibler.kl_divergence)
def testIndirectRegistrationKLSelf(self):
self._testIndirectRegistration(
lambda p, q: p.kl_divergence(q))
def testIndirectRegistrationCrossEntropy(self):
self._testIndirectRegistration(
lambda p, q: p.cross_entropy(q))
def testFunctionCrossEntropy(self):
self._testIndirectRegistration(kullback_leibler.cross_entropy)
if __name__ == "__main__":
test.main()
| apache-2.0 |
srvg/ansible | lib/ansible/plugins/become/runas.py | 29 | 2455 | # -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
name: runas
short_description: Run As user
description:
- This become plugins allows your remote/login user to execute commands as another user via the windows runas facility.
author: ansible (@core)
version_added: "2.8"
options:
become_user:
description: User you 'become' to execute the task
ini:
- section: privilege_escalation
key: become_user
- section: runas_become_plugin
key: user
vars:
- name: ansible_become_user
- name: ansible_runas_user
env:
- name: ANSIBLE_BECOME_USER
- name: ANSIBLE_RUNAS_USER
required: True
become_flags:
description: Options to pass to runas, a space delimited list of k=v pairs
default: ''
ini:
- section: privilege_escalation
key: become_flags
- section: runas_become_plugin
key: flags
vars:
- name: ansible_become_flags
- name: ansible_runas_flags
env:
- name: ANSIBLE_BECOME_FLAGS
- name: ANSIBLE_RUNAS_FLAGS
become_pass:
description: password
ini:
- section: runas_become_plugin
key: password
vars:
- name: ansible_become_password
- name: ansible_become_pass
- name: ansible_runas_pass
env:
- name: ANSIBLE_BECOME_PASS
- name: ANSIBLE_RUNAS_PASS
notes:
- runas is really implemented in the powershell module handler and as such can only be used with winrm connections.
- This plugin ignores the 'become_exe' setting as it uses an API and not an executable.
- The Secondary Logon service (seclogon) must be running to use runas
"""
from ansible.plugins.become import BecomeBase
class BecomeModule(BecomeBase):
name = 'runas'
def build_become_command(self, cmd, shell):
# runas is implemented inside the winrm connection plugin
return cmd
| gpl-3.0 |
stewartpark/django | django/utils/regex_helper.py | 432 | 12673 | """
Functions for reversing a regular expression (used in reverse URL resolving).
Used internally by Django and not intended for external use.
This is not, and is not intended to be, a complete reg-exp decompiler. It
should be good enough for a large class of URLS, however.
"""
from __future__ import unicode_literals
from django.utils import six
from django.utils.six.moves import zip
# Mapping of an escape character to a representative of that class. So, e.g.,
# "\w" is replaced by "x" in a reverse URL. A value of None means to ignore
# this sequence. Any missing key is mapped to itself.
ESCAPE_MAPPINGS = {
"A": None,
"b": None,
"B": None,
"d": "0",
"D": "x",
"s": " ",
"S": "x",
"w": "x",
"W": "!",
"Z": None,
}
class Choice(list):
"""
Used to represent multiple possibilities at this point in a pattern string.
We use a distinguished type, rather than a list, so that the usage in the
code is clear.
"""
class Group(list):
"""
Used to represent a capturing group in the pattern string.
"""
class NonCapture(list):
"""
Used to represent a non-capturing group in the pattern string.
"""
def normalize(pattern):
"""
Given a reg-exp pattern, normalizes it to an iterable of forms that
suffice for reverse matching. This does the following:
(1) For any repeating sections, keeps the minimum number of occurrences
permitted (this means zero for optional groups).
(2) If an optional group includes parameters, include one occurrence of
that group (along with the zero occurrence case from step (1)).
(3) Select the first (essentially an arbitrary) element from any character
class. Select an arbitrary character for any unordered class (e.g. '.'
or '\w') in the pattern.
(4) Ignore comments, look-ahead and look-behind assertions, and any of the
reg-exp flags that won't change what we construct ("iLmsu"). "(?x)" is
an error, however.
(5) Raise an error on any disjunctive ('|') constructs.
Django's URLs for forward resolving are either all positional arguments or
all keyword arguments. That is assumed here, as well. Although reverse
resolving can be done using positional args when keyword args are
specified, the two cannot be mixed in the same reverse() call.
"""
# Do a linear scan to work out the special features of this pattern. The
# idea is that we scan once here and collect all the information we need to
# make future decisions.
result = []
non_capturing_groups = []
consume_next = True
pattern_iter = next_char(iter(pattern))
num_args = 0
# A "while" loop is used here because later on we need to be able to peek
# at the next character and possibly go around without consuming another
# one at the top of the loop.
try:
ch, escaped = next(pattern_iter)
except StopIteration:
return [('', [])]
try:
while True:
if escaped:
result.append(ch)
elif ch == '.':
# Replace "any character" with an arbitrary representative.
result.append(".")
elif ch == '|':
# FIXME: One day we'll should do this, but not in 1.0.
raise NotImplementedError('Awaiting Implementation')
elif ch == "^":
pass
elif ch == '$':
break
elif ch == ')':
# This can only be the end of a non-capturing group, since all
# other unescaped parentheses are handled by the grouping
# section later (and the full group is handled there).
#
# We regroup everything inside the capturing group so that it
# can be quantified, if necessary.
start = non_capturing_groups.pop()
inner = NonCapture(result[start:])
result = result[:start] + [inner]
elif ch == '[':
# Replace ranges with the first character in the range.
ch, escaped = next(pattern_iter)
result.append(ch)
ch, escaped = next(pattern_iter)
while escaped or ch != ']':
ch, escaped = next(pattern_iter)
elif ch == '(':
# Some kind of group.
ch, escaped = next(pattern_iter)
if ch != '?' or escaped:
# A positional group
name = "_%d" % num_args
num_args += 1
result.append(Group((("%%(%s)s" % name), name)))
walk_to_end(ch, pattern_iter)
else:
ch, escaped = next(pattern_iter)
if ch in "iLmsu#!=<":
# All of these are ignorable. Walk to the end of the
# group.
walk_to_end(ch, pattern_iter)
elif ch == ':':
# Non-capturing group
non_capturing_groups.append(len(result))
elif ch != 'P':
# Anything else, other than a named group, is something
# we cannot reverse.
raise ValueError("Non-reversible reg-exp portion: '(?%s'" % ch)
else:
ch, escaped = next(pattern_iter)
if ch not in ('<', '='):
raise ValueError("Non-reversible reg-exp portion: '(?P%s'" % ch)
# We are in a named capturing group. Extra the name and
# then skip to the end.
if ch == '<':
terminal_char = '>'
# We are in a named backreference.
else:
terminal_char = ')'
name = []
ch, escaped = next(pattern_iter)
while ch != terminal_char:
name.append(ch)
ch, escaped = next(pattern_iter)
param = ''.join(name)
# Named backreferences have already consumed the
# parenthesis.
if terminal_char != ')':
result.append(Group((("%%(%s)s" % param), param)))
walk_to_end(ch, pattern_iter)
else:
result.append(Group((("%%(%s)s" % param), None)))
elif ch in "*?+{":
# Quantifiers affect the previous item in the result list.
count, ch = get_quantifier(ch, pattern_iter)
if ch:
# We had to look ahead, but it wasn't need to compute the
# quantifier, so use this character next time around the
# main loop.
consume_next = False
if count == 0:
if contains(result[-1], Group):
# If we are quantifying a capturing group (or
# something containing such a group) and the minimum is
# zero, we must also handle the case of one occurrence
# being present. All the quantifiers (except {0,0},
# which we conveniently ignore) that have a 0 minimum
# also allow a single occurrence.
result[-1] = Choice([None, result[-1]])
else:
result.pop()
elif count > 1:
result.extend([result[-1]] * (count - 1))
else:
# Anything else is a literal.
result.append(ch)
if consume_next:
ch, escaped = next(pattern_iter)
else:
consume_next = True
except StopIteration:
pass
except NotImplementedError:
# A case of using the disjunctive form. No results for you!
return [('', [])]
return list(zip(*flatten_result(result)))
def next_char(input_iter):
"""
An iterator that yields the next character from "pattern_iter", respecting
escape sequences. An escaped character is replaced by a representative of
its class (e.g. \w -> "x"). If the escaped character is one that is
skipped, it is not returned (the next character is returned instead).
Yields the next character, along with a boolean indicating whether it is a
raw (unescaped) character or not.
"""
for ch in input_iter:
if ch != '\\':
yield ch, False
continue
ch = next(input_iter)
representative = ESCAPE_MAPPINGS.get(ch, ch)
if representative is None:
continue
yield representative, True
def walk_to_end(ch, input_iter):
"""
The iterator is currently inside a capturing group. We want to walk to the
close of this group, skipping over any nested groups and handling escaped
parentheses correctly.
"""
if ch == '(':
nesting = 1
else:
nesting = 0
for ch, escaped in input_iter:
if escaped:
continue
elif ch == '(':
nesting += 1
elif ch == ')':
if not nesting:
return
nesting -= 1
def get_quantifier(ch, input_iter):
"""
Parse a quantifier from the input, where "ch" is the first character in the
quantifier.
Returns the minimum number of occurrences permitted by the quantifier and
either None or the next character from the input_iter if the next character
is not part of the quantifier.
"""
if ch in '*?+':
try:
ch2, escaped = next(input_iter)
except StopIteration:
ch2 = None
if ch2 == '?':
ch2 = None
if ch == '+':
return 1, ch2
return 0, ch2
quant = []
while ch != '}':
ch, escaped = next(input_iter)
quant.append(ch)
quant = quant[:-1]
values = ''.join(quant).split(',')
# Consume the trailing '?', if necessary.
try:
ch, escaped = next(input_iter)
except StopIteration:
ch = None
if ch == '?':
ch = None
return int(values[0]), ch
def contains(source, inst):
"""
Returns True if the "source" contains an instance of "inst". False,
otherwise.
"""
if isinstance(source, inst):
return True
if isinstance(source, NonCapture):
for elt in source:
if contains(elt, inst):
return True
return False
def flatten_result(source):
"""
Turns the given source sequence into a list of reg-exp possibilities and
their arguments. Returns a list of strings and a list of argument lists.
Each of the two lists will be of the same length.
"""
if source is None:
return [''], [[]]
if isinstance(source, Group):
if source[1] is None:
params = []
else:
params = [source[1]]
return [source[0]], [params]
result = ['']
result_args = [[]]
pos = last = 0
for pos, elt in enumerate(source):
if isinstance(elt, six.string_types):
continue
piece = ''.join(source[last:pos])
if isinstance(elt, Group):
piece += elt[0]
param = elt[1]
else:
param = None
last = pos + 1
for i in range(len(result)):
result[i] += piece
if param:
result_args[i].append(param)
if isinstance(elt, (Choice, NonCapture)):
if isinstance(elt, NonCapture):
elt = [elt]
inner_result, inner_args = [], []
for item in elt:
res, args = flatten_result(item)
inner_result.extend(res)
inner_args.extend(args)
new_result = []
new_args = []
for item, args in zip(result, result_args):
for i_item, i_args in zip(inner_result, inner_args):
new_result.append(item + i_item)
new_args.append(args[:] + i_args)
result = new_result
result_args = new_args
if pos >= last:
piece = ''.join(source[last:])
for i in range(len(result)):
result[i] += piece
return result, result_args
| bsd-3-clause |
CruiseDevice/coala | coalib/bearlib/aspects/meta.py | 10 | 3296 | import re
from inspect import getmembers, signature
from coala_utils.decorators import generate_repr
from .base import aspectbase
from .docs import Documentation
from .exceptions import AspectTypeError
from .taste import Taste
class aspectclass(type):
"""
Metaclass for aspectclasses.
Root aspectclass is :class:`coalib.bearlib.aspects.Root`.
"""
def __init__(cls, clsname, bases, clsattrs):
"""
Initializes the ``.subaspects`` dict on new aspectclasses.
"""
cls.subaspects = {}
@property
def tastes(cls):
"""
Get a dictionary of all taste names mapped to their
:class:`coalib.bearlib.aspects.Taste` instances.
"""
if cls.parent:
return dict(cls.parent.tastes, **cls._tastes)
return dict(cls._tastes)
def subaspect(cls, subcls):
"""
The sub-aspectclass decorator.
See :class:`coalib.bearlib.aspects.Root` for description
and usage.
"""
aspectname = subcls.__name__
sub_qualname = '%s.%s' % (cls.__qualname__, aspectname)
docs = getattr(subcls, 'docs', None)
aspectdocs = Documentation(subcls.__doc__, **{
attr: getattr(docs, attr, '') for attr in
list(signature(Documentation).parameters.keys())[1:]})
# search for tastes in the sub-aspectclass
subtastes = {}
for name, member in getmembers(subcls):
if isinstance(member, Taste):
# tell the taste its own name
member.name = name
# tell its owner name
member.aspect_name = sub_qualname
subtastes[name] = member
class Sub(subcls, aspectbase, metaclass=aspectclass):
__module__ = subcls.__module__
parent = cls
docs = aspectdocs
_tastes = subtastes
members = sorted(Sub.tastes)
if members:
Sub = generate_repr(*members)(Sub)
Sub.__name__ = aspectname
Sub.__qualname__ = sub_qualname
cls.subaspects[aspectname] = Sub
setattr(cls, aspectname, Sub)
return Sub
def __repr__(cls):
return '<%s %s>' % (type(cls).__name__, repr(cls.__qualname__))
def isaspect(item):
"""
This function checks whether or not an object is an ``aspectclass`` or an
instance of ``aspectclass``
"""
return isinstance(item, (aspectclass, aspectbase))
def assert_aspect(item):
"""
This function raises ``AspectTypeError`` when an object is not an
``aspectclass`` or an instance of ``aspectclass``
"""
if not isaspect(item):
raise AspectTypeError(item)
return item
def issubaspect(subaspect, aspect):
"""
This function checks whether or not ``subaspect`` is a subaspect of
``aspect``.
"""
subaspect = assert_aspect(subaspect)
aspect = assert_aspect(aspect)
aspect_qualname = (aspect.__qualname__ if isinstance(
aspect, aspectclass) else type(aspect).__qualname__)
subaspect_qualname = (subaspect.__qualname__ if isinstance(
subaspect, aspectclass) else type(subaspect).__qualname__)
return re.match(aspect_qualname+'(\.|$)', subaspect_qualname) is not None
| agpl-3.0 |
ArteliaTelemac/PostTelemac | PostTelemac/meshlayerlibs/pyqtgraph/parametertree/parameterTypes.py | 2 | 25391 | from ..Qt import QtCore, QtGui
from ..python2_3 import asUnicode
from .Parameter import Parameter, registerParameterType
from .ParameterItem import ParameterItem
from ..widgets.SpinBox import SpinBox
from ..widgets.ColorButton import ColorButton
from ..colormap import ColorMap
from .. import pixmaps as pixmaps
from .. import functions as fn
import os, sys
from ..pgcollections import OrderedDict
class WidgetParameterItem(ParameterItem):
"""
ParameterTree item with:
* label in second column for displaying value
* simple widget for editing value (displayed instead of label when item is selected)
* button that resets value to default
========================== =============================================================
**Registered Types:**
int Displays a :class:`SpinBox <pyqtgraph.SpinBox>` in integer
mode.
float Displays a :class:`SpinBox <pyqtgraph.SpinBox>`.
bool Displays a QCheckBox
str Displays a QLineEdit
color Displays a :class:`ColorButton <pyqtgraph.ColorButton>`
colormap Displays a :class:`GradientWidget <pyqtgraph.GradientWidget>`
========================== =============================================================
This class can be subclassed by overriding makeWidget() to provide a custom widget.
"""
def __init__(self, param, depth):
ParameterItem.__init__(self, param, depth)
self.hideWidget = True ## hide edit widget, replace with label when not selected
## set this to False to keep the editor widget always visible
## build widget into column 1 with a display label and default button.
w = self.makeWidget()
self.widget = w
self.eventProxy = EventProxy(w, self.widgetEventFilter)
self.defaultBtn = QtGui.QPushButton()
self.defaultBtn.setAutoDefault(False)
self.defaultBtn.setFixedWidth(20)
self.defaultBtn.setFixedHeight(20)
modDir = os.path.dirname(__file__)
self.defaultBtn.setIcon(QtGui.QIcon(pixmaps.getPixmap('default')))
self.defaultBtn.clicked.connect(self.defaultClicked)
self.displayLabel = QtGui.QLabel()
layout = QtGui.QHBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(2)
layout.addWidget(w)
layout.addWidget(self.displayLabel)
layout.addWidget(self.defaultBtn)
self.layoutWidget = QtGui.QWidget()
self.layoutWidget.setLayout(layout)
if w.sigChanged is not None:
w.sigChanged.connect(self.widgetValueChanged)
if hasattr(w, 'sigChanging'):
w.sigChanging.connect(self.widgetValueChanging)
## update value shown in widget.
opts = self.param.opts
if opts.get('value', None) is not None:
self.valueChanged(self, opts['value'], force=True)
else:
## no starting value was given; use whatever the widget has
self.widgetValueChanged()
self.updateDefaultBtn()
self.optsChanged(self.param, self.param.opts)
def makeWidget(self):
"""
Return a single widget that should be placed in the second tree column.
The widget must be given three attributes:
========== ============================================================
sigChanged a signal that is emitted when the widget's value is changed
value a function that returns the value
setValue a function that sets the value
========== ============================================================
This is a good function to override in subclasses.
"""
opts = self.param.opts
t = opts['type']
if t in ('int', 'float'):
defs = {
'value': 0, 'min': None, 'max': None,
'step': 1.0, 'dec': False,
'siPrefix': False, 'suffix': '', 'decimals': 3,
}
if t == 'int':
defs['int'] = True
defs['minStep'] = 1.0
defs['format'] = '{value:d}'
for k in defs:
if k in opts:
defs[k] = opts[k]
if 'limits' in opts:
defs['min'], defs['max'] = opts['limits']
w = SpinBox()
w.setOpts(**defs)
w.sigChanged = w.sigValueChanged
w.sigChanging = w.sigValueChanging
elif t == 'bool':
w = QtGui.QCheckBox()
w.sigChanged = w.toggled
w.value = w.isChecked
w.setValue = w.setChecked
w.setEnabled(not opts.get('readonly', False))
self.hideWidget = False
elif t == 'str':
w = QtGui.QLineEdit()
w.setStyleSheet('border: 0px')
w.sigChanged = w.editingFinished
w.value = lambda: asUnicode(w.text())
w.setValue = lambda v: w.setText(asUnicode(v))
w.sigChanging = w.textChanged
elif t == 'color':
w = ColorButton()
w.sigChanged = w.sigColorChanged
w.sigChanging = w.sigColorChanging
w.value = w.color
w.setValue = w.setColor
self.hideWidget = False
w.setFlat(True)
w.setEnabled(not opts.get('readonly', False))
elif t == 'colormap':
from ..widgets.GradientWidget import GradientWidget ## need this here to avoid import loop
w = GradientWidget(orientation='bottom')
w.sigChanged = w.sigGradientChangeFinished
w.sigChanging = w.sigGradientChanged
w.value = w.colorMap
w.setValue = w.setColorMap
self.hideWidget = False
else:
raise Exception("Unknown type '%s'" % asUnicode(t))
return w
def widgetEventFilter(self, obj, ev):
## filter widget's events
## catch TAB to change focus
## catch focusOut to hide editor
if ev.type() == ev.KeyPress:
if ev.key() == QtCore.Qt.Key_Tab:
self.focusNext(forward=True)
return True ## don't let anyone else see this event
elif ev.key() == QtCore.Qt.Key_Backtab:
self.focusNext(forward=False)
return True ## don't let anyone else see this event
return False
def setFocus(self):
self.showEditor()
def isFocusable(self):
return self.param.writable()
def valueChanged(self, param, val, force=False):
## called when the parameter's value has changed
ParameterItem.valueChanged(self, param, val)
self.widget.sigChanged.disconnect(self.widgetValueChanged)
try:
if force or val != self.widget.value():
self.widget.setValue(val)
self.updateDisplayLabel(val) ## always make sure label is updated, even if values match!
finally:
self.widget.sigChanged.connect(self.widgetValueChanged)
self.updateDefaultBtn()
def updateDefaultBtn(self):
## enable/disable default btn
self.defaultBtn.setEnabled(not self.param.valueIsDefault() and self.param.writable())
# hide / show
self.defaultBtn.setVisible(not self.param.readonly())
def updateDisplayLabel(self, value=None):
"""Update the display label to reflect the value of the parameter."""
if value is None:
value = self.param.value()
opts = self.param.opts
if isinstance(self.widget, QtGui.QAbstractSpinBox):
text = asUnicode(self.widget.lineEdit().text())
elif isinstance(self.widget, QtGui.QComboBox):
text = self.widget.currentText()
else:
text = asUnicode(value)
self.displayLabel.setText(text)
def widgetValueChanged(self):
## called when the widget's value has been changed by the user
val = self.widget.value()
newVal = self.param.setValue(val)
def widgetValueChanging(self, *args):
"""
Called when the widget's value is changing, but not finalized.
For example: editing text before pressing enter or changing focus.
"""
# This is a bit sketchy: assume the last argument of each signal is
# the value..
self.param.sigValueChanging.emit(self.param, args[-1])
def selected(self, sel):
"""Called when this item has been selected (sel=True) OR deselected (sel=False)"""
ParameterItem.selected(self, sel)
if self.widget is None:
return
if sel and self.param.writable():
self.showEditor()
elif self.hideWidget:
self.hideEditor()
def showEditor(self):
self.widget.show()
self.displayLabel.hide()
self.widget.setFocus(QtCore.Qt.OtherFocusReason)
if isinstance(self.widget, SpinBox):
self.widget.selectNumber() # select the numerical portion of the text for quick editing
def hideEditor(self):
self.widget.hide()
self.displayLabel.show()
def limitsChanged(self, param, limits):
"""Called when the parameter's limits have changed"""
ParameterItem.limitsChanged(self, param, limits)
t = self.param.opts['type']
if t == 'int' or t == 'float':
self.widget.setOpts(bounds=limits)
else:
return ## don't know what to do with any other types..
def defaultChanged(self, param, value):
self.updateDefaultBtn()
def treeWidgetChanged(self):
"""Called when this item is added or removed from a tree."""
ParameterItem.treeWidgetChanged(self)
## add all widgets for this item into the tree
if self.widget is not None:
tree = self.treeWidget()
if tree is None:
return
tree.setItemWidget(self, 1, self.layoutWidget)
self.displayLabel.hide()
self.selected(False)
def defaultClicked(self):
self.param.setToDefault()
def optsChanged(self, param, opts):
"""Called when any options are changed that are not
name, value, default, or limits"""
ParameterItem.optsChanged(self, param, opts)
if 'readonly' in opts:
self.updateDefaultBtn()
if isinstance(self.widget, (QtGui.QCheckBox,ColorButton)):
self.widget.setEnabled(not opts['readonly'])
if 'tip' in opts:
self.widget.setToolTip(opts['tip'])
## If widget is a SpinBox, pass options straight through
if isinstance(self.widget, SpinBox):
# send only options supported by spinbox
sbOpts = {}
if 'units' in opts and 'suffix' not in opts:
sbOpts['suffix'] = opts['units']
for k,v in opts.items():
if k in self.widget.opts:
sbOpts[k] = v
self.widget.setOpts(**sbOpts)
self.updateDisplayLabel()
class EventProxy(QtCore.QObject):
def __init__(self, qobj, callback):
QtCore.QObject.__init__(self)
self.callback = callback
qobj.installEventFilter(self)
def eventFilter(self, obj, ev):
return self.callback(obj, ev)
class SimpleParameter(Parameter):
itemClass = WidgetParameterItem
def __init__(self, *args, **kargs):
Parameter.__init__(self, *args, **kargs)
## override a few methods for color parameters
if self.opts['type'] == 'color':
self.value = self.colorValue
self.saveState = self.saveColorState
def setValue(self, value, blockSignal=None):
if self.opts['type'] == 'int':
value = int(value)
Parameter.setValue(self, value, blockSignal)
def colorValue(self):
return fn.mkColor(Parameter.value(self))
def saveColorState(self, *args, **kwds):
state = Parameter.saveState(self, *args, **kwds)
state['value'] = fn.colorTuple(self.value())
return state
def _interpretValue(self, v):
fn = {
'int': int,
'float': float,
'bool': bool,
'str': asUnicode,
'color': self._interpColor,
'colormap': self._interpColormap,
}[self.opts['type']]
return fn(v)
def _interpColor(self, v):
return fn.mkColor(v)
def _interpColormap(self, v):
if not isinstance(v, ColorMap):
raise TypeError("Cannot set colormap parameter from object %r" % v)
return v
registerParameterType('int', SimpleParameter, override=True)
registerParameterType('float', SimpleParameter, override=True)
registerParameterType('bool', SimpleParameter, override=True)
registerParameterType('str', SimpleParameter, override=True)
registerParameterType('color', SimpleParameter, override=True)
registerParameterType('colormap', SimpleParameter, override=True)
class GroupParameterItem(ParameterItem):
"""
Group parameters are used mainly as a generic parent item that holds (and groups!) a set
of child parameters. It also provides a simple mechanism for displaying a button or combo
that can be used to add new parameters to the group.
"""
def __init__(self, param, depth):
ParameterItem.__init__(self, param, depth)
self.updateDepth(depth)
self.addItem = None
if 'addText' in param.opts:
addText = param.opts['addText']
if 'addList' in param.opts:
self.addWidget = QtGui.QComboBox()
self.addWidget.setSizeAdjustPolicy(QtGui.QComboBox.AdjustToContents)
self.updateAddList()
self.addWidget.currentIndexChanged.connect(self.addChanged)
else:
self.addWidget = QtGui.QPushButton(addText)
self.addWidget.clicked.connect(self.addClicked)
w = QtGui.QWidget()
l = QtGui.QHBoxLayout()
l.setContentsMargins(0,0,0,0)
w.setLayout(l)
l.addWidget(self.addWidget)
l.addStretch()
self.addWidgetBox = w
self.addItem = QtGui.QTreeWidgetItem([])
self.addItem.setFlags(QtCore.Qt.ItemIsEnabled)
ParameterItem.addChild(self, self.addItem)
def updateDepth(self, depth):
## Change item's appearance based on its depth in the tree
## This allows highest-level groups to be displayed more prominently.
if depth == 0:
for c in [0,1]:
self.setBackground(c, QtGui.QBrush(QtGui.QColor(100,100,100)))
self.setForeground(c, QtGui.QBrush(QtGui.QColor(220,220,255)))
font = self.font(c)
font.setBold(True)
font.setPointSize(font.pointSize()+1)
self.setFont(c, font)
self.setSizeHint(0, QtCore.QSize(0, 25))
else:
for c in [0,1]:
self.setBackground(c, QtGui.QBrush(QtGui.QColor(220,220,220)))
self.setForeground(c, QtGui.QBrush(QtGui.QColor(50,50,50)))
font = self.font(c)
font.setBold(True)
#font.setPointSize(font.pointSize()+1)
self.setFont(c, font)
self.setSizeHint(0, QtCore.QSize(0, 20))
def addClicked(self):
"""Called when "add new" button is clicked
The parameter MUST have an 'addNew' method defined.
"""
self.param.addNew()
def addChanged(self):
"""Called when "add new" combo is changed
The parameter MUST have an 'addNew' method defined.
"""
if self.addWidget.currentIndex() == 0:
return
typ = asUnicode(self.addWidget.currentText())
self.param.addNew(typ)
self.addWidget.setCurrentIndex(0)
def treeWidgetChanged(self):
ParameterItem.treeWidgetChanged(self)
tw = self.treeWidget()
if tw is None:
return
tw.setFirstItemColumnSpanned(self, True)
if self.addItem is not None:
tw.setItemWidget(self.addItem, 0, self.addWidgetBox)
tw.setFirstItemColumnSpanned(self.addItem, True)
def addChild(self, child): ## make sure added childs are actually inserted before add btn
if self.addItem is not None:
ParameterItem.insertChild(self, self.childCount()-1, child)
else:
ParameterItem.addChild(self, child)
def optsChanged(self, param, opts):
ParameterItem.optsChanged(self, param, opts)
if 'addList' in opts:
self.updateAddList()
def updateAddList(self):
self.addWidget.blockSignals(True)
try:
self.addWidget.clear()
self.addWidget.addItem(self.param.opts['addText'])
for t in self.param.opts['addList']:
self.addWidget.addItem(t)
finally:
self.addWidget.blockSignals(False)
class GroupParameter(Parameter):
"""
Group parameters are used mainly as a generic parent item that holds (and groups!) a set
of child parameters.
It also provides a simple mechanism for displaying a button or combo
that can be used to add new parameters to the group. To enable this, the group
must be initialized with the 'addText' option (the text will be displayed on
a button which, when clicked, will cause addNew() to be called). If the 'addList'
option is specified as well, then a dropdown-list of addable items will be displayed
instead of a button.
"""
itemClass = GroupParameterItem
sigAddNew = QtCore.Signal(object, object) # self, type
def addNew(self, typ=None):
"""
This method is called when the user has requested to add a new item to the group.
By default, it emits ``sigAddNew(self, typ)``.
"""
self.sigAddNew.emit(self, typ)
def setAddList(self, vals):
"""Change the list of options available for the user to add to the group."""
self.setOpts(addList=vals)
registerParameterType('group', GroupParameter, override=True)
class ListParameterItem(WidgetParameterItem):
"""
WidgetParameterItem subclass providing comboBox that lets the user select from a list of options.
"""
def __init__(self, param, depth):
self.targetValue = None
WidgetParameterItem.__init__(self, param, depth)
def makeWidget(self):
opts = self.param.opts
t = opts['type']
w = QtGui.QComboBox()
w.setMaximumHeight(20) ## set to match height of spin box and line edit
w.sigChanged = w.currentIndexChanged
w.value = self.value
w.setValue = self.setValue
self.widget = w ## needs to be set before limits are changed
self.limitsChanged(self.param, self.param.opts['limits'])
if len(self.forward) > 0:
self.setValue(self.param.value())
return w
def value(self):
key = asUnicode(self.widget.currentText())
return self.forward.get(key, None)
def setValue(self, val):
self.targetValue = val
if val not in self.reverse[0]:
self.widget.setCurrentIndex(0)
else:
key = self.reverse[1][self.reverse[0].index(val)]
ind = self.widget.findText(key)
self.widget.setCurrentIndex(ind)
def limitsChanged(self, param, limits):
# set up forward / reverse mappings for name:value
if len(limits) == 0:
limits = [''] ## Can never have an empty list--there is always at least a singhe blank item.
self.forward, self.reverse = ListParameter.mapping(limits)
try:
self.widget.blockSignals(True)
val = self.targetValue #asUnicode(self.widget.currentText())
self.widget.clear()
for k in self.forward:
self.widget.addItem(k)
if k == val:
self.widget.setCurrentIndex(self.widget.count()-1)
self.updateDisplayLabel()
finally:
self.widget.blockSignals(False)
class ListParameter(Parameter):
itemClass = ListParameterItem
def __init__(self, **opts):
self.forward = OrderedDict() ## {name: value, ...}
self.reverse = ([], []) ## ([value, ...], [name, ...])
# Parameter uses 'limits' option to define the set of allowed values
if 'values' in opts:
opts['limits'] = opts['values']
if opts.get('limits', None) is None:
opts['limits'] = []
Parameter.__init__(self, **opts)
self.setLimits(opts['limits'])
def setLimits(self, limits):
self.forward, self.reverse = self.mapping(limits)
Parameter.setLimits(self, limits)
if len(self.reverse[0]) > 0 and self.value() not in self.reverse[0]:
self.setValue(self.reverse[0][0])
@staticmethod
def mapping(limits):
# Return forward and reverse mapping objects given a limit specification
forward = OrderedDict() ## {name: value, ...}
reverse = ([], []) ## ([value, ...], [name, ...])
if isinstance(limits, dict):
for k, v in limits.items():
forward[k] = v
reverse[0].append(v)
reverse[1].append(k)
else:
for v in limits:
n = asUnicode(v)
forward[n] = v
reverse[0].append(v)
reverse[1].append(n)
return forward, reverse
registerParameterType('list', ListParameter, override=True)
class ActionParameterItem(ParameterItem):
def __init__(self, param, depth):
ParameterItem.__init__(self, param, depth)
self.layoutWidget = QtGui.QWidget()
self.layout = QtGui.QHBoxLayout()
self.layout.setContentsMargins(0, 0, 0, 0)
self.layoutWidget.setLayout(self.layout)
title = param.opts.get('title', None)
if title is None:
title = param.name()
self.button = QtGui.QPushButton(title)
#self.layout.addSpacing(100)
self.layout.addWidget(self.button)
self.layout.addStretch()
self.button.clicked.connect(self.buttonClicked)
param.sigNameChanged.connect(self.paramRenamed)
self.setText(0, '')
def treeWidgetChanged(self):
ParameterItem.treeWidgetChanged(self)
tree = self.treeWidget()
if tree is None:
return
tree.setFirstItemColumnSpanned(self, True)
tree.setItemWidget(self, 0, self.layoutWidget)
def paramRenamed(self, param, name):
self.button.setText(name)
def buttonClicked(self):
self.param.activate()
class ActionParameter(Parameter):
"""Used for displaying a button within the tree."""
itemClass = ActionParameterItem
sigActivated = QtCore.Signal(object)
def activate(self):
self.sigActivated.emit(self)
self.emitStateChanged('activated', None)
registerParameterType('action', ActionParameter, override=True)
class TextParameterItem(WidgetParameterItem):
def __init__(self, param, depth):
WidgetParameterItem.__init__(self, param, depth)
self.hideWidget = False
self.subItem = QtGui.QTreeWidgetItem()
self.addChild(self.subItem)
def treeWidgetChanged(self):
## TODO: fix so that superclass method can be called
## (WidgetParameter should just natively support this style)
#WidgetParameterItem.treeWidgetChanged(self)
tw = self.treeWidget()
if tw is None:
return
tw.setFirstItemColumnSpanned(self.subItem, True)
tw.setItemWidget(self.subItem, 0, self.textBox)
# for now, these are copied from ParameterItem.treeWidgetChanged
self.setHidden(not self.param.opts.get('visible', True))
self.setExpanded(self.param.opts.get('expanded', True))
def makeWidget(self):
self.textBox = QtGui.QTextEdit()
self.textBox.setMaximumHeight(100)
self.textBox.setReadOnly(self.param.opts.get('readonly', False))
self.textBox.value = lambda: str(self.textBox.toPlainText())
self.textBox.setValue = self.textBox.setPlainText
self.textBox.sigChanged = self.textBox.textChanged
return self.textBox
class TextParameter(Parameter):
"""Editable string; displayed as large text box in the tree."""
itemClass = TextParameterItem
registerParameterType('text', TextParameter, override=True)
| gpl-3.0 |
LiveZenLK/CeygateERP | addons/product/pricelist.py | 10 | 19402 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from itertools import chain
import time
from openerp import tools
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT
from openerp.osv import fields, osv
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
from openerp.exceptions import UserError
from openerp import api, models, fields as Fields
#----------------------------------------------------------
# Price lists
#----------------------------------------------------------
class product_pricelist(osv.osv):
_name = "product.pricelist"
_description = "Pricelist"
_order = 'name'
_columns = {
'name': fields.char('Pricelist Name', required=True, translate=True),
'active': fields.boolean('Active', help="If unchecked, it will allow you to hide the pricelist without removing it."),
'item_ids': fields.one2many('product.pricelist.item', 'pricelist_id', 'Pricelist Items', copy=True),
'currency_id': fields.many2one('res.currency', 'Currency', required=True),
'company_id': fields.many2one('res.company', 'Company'),
}
def name_get(self, cr, uid, ids, context=None):
result= []
if not all(ids):
return result
for pl in self.browse(cr, uid, ids, context=context):
name = pl.name + ' ('+ pl.currency_id.name + ')'
result.append((pl.id,name))
return result
def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=100):
if name and operator == '=' and not args:
# search on the name of the pricelist and its currency, opposite of name_get(),
# Used by the magic context filter in the product search view.
query_args = {'name': name, 'limit': limit, 'lang': (context or {}).get('lang') or 'en_US'}
query = """SELECT p.id
FROM ((
SELECT pr.id, pr.name
FROM product_pricelist pr JOIN
res_currency cur ON
(pr.currency_id = cur.id)
WHERE pr.name || ' (' || cur.name || ')' = %(name)s
)
UNION (
SELECT tr.res_id as id, tr.value as name
FROM ir_translation tr JOIN
product_pricelist pr ON (
pr.id = tr.res_id AND
tr.type = 'model' AND
tr.name = 'product.pricelist,name' AND
tr.lang = %(lang)s
) JOIN
res_currency cur ON
(pr.currency_id = cur.id)
WHERE tr.value || ' (' || cur.name || ')' = %(name)s
)
) p
ORDER BY p.name"""
if limit:
query += " LIMIT %(limit)s"
cr.execute(query, query_args)
ids = [r[0] for r in cr.fetchall()]
# regular search() to apply ACLs - may limit results below limit in some cases
ids = self.search(cr, uid, [('id', 'in', ids)], limit=limit, context=context)
if ids:
return self.name_get(cr, uid, ids, context)
return super(product_pricelist, self).name_search(
cr, uid, name, args, operator=operator, context=context, limit=limit)
def _get_currency(self, cr, uid, ctx):
comp = self.pool.get('res.users').browse(cr, uid, uid).company_id
if not comp:
comp_id = self.pool.get('res.company').search(cr, uid, [])[0]
comp = self.pool.get('res.company').browse(cr, uid, comp_id)
return comp.currency_id.id
_defaults = {
'active': lambda *a: 1,
"currency_id": _get_currency
}
def price_rule_get_multi(self, cr, uid, ids, products_by_qty_by_partner, context=None):
"""multi products 'price_get'.
@param ids:
@param products_by_qty:
@param partner:
@param context: {
'date': Date of the pricelist (%Y-%m-%d),}
@return: a dict of dict with product_id as key and a dict 'price by pricelist' as value
"""
if not ids:
ids = self.pool.get('product.pricelist').search(cr, uid, [], context=context)
results = {}
for pricelist in self.browse(cr, uid, ids, context=context):
subres = self._price_rule_get_multi(cr, uid, pricelist, products_by_qty_by_partner, context=context)
for product_id, price in subres.items():
results.setdefault(product_id, {})
results[product_id][pricelist.id] = price
return results
def _price_get_multi(self, cr, uid, pricelist, products_by_qty_by_partner, context=None):
return dict((key, price[0]) for key, price in self._price_rule_get_multi(cr, uid, pricelist, products_by_qty_by_partner, context=context).items())
def _price_rule_get_multi(self, cr, uid, pricelist, products_by_qty_by_partner, context=None):
context = context or {}
date = context.get('date') and context['date'][0:10] or time.strftime(DEFAULT_SERVER_DATE_FORMAT)
products = map(lambda x: x[0], products_by_qty_by_partner)
product_uom_obj = self.pool.get('product.uom')
if not products:
return {}
categ_ids = {}
for p in products:
categ = p.categ_id
while categ:
categ_ids[categ.id] = True
categ = categ.parent_id
categ_ids = categ_ids.keys()
is_product_template = products[0]._name == "product.template"
if is_product_template:
prod_tmpl_ids = [tmpl.id for tmpl in products]
# all variants of all products
prod_ids = [p.id for p in
list(chain.from_iterable([t.product_variant_ids for t in products]))]
else:
prod_ids = [product.id for product in products]
prod_tmpl_ids = [product.product_tmpl_id.id for product in products]
# Load all rules
cr.execute(
'SELECT i.id '
'FROM product_pricelist_item AS i '
'WHERE (product_tmpl_id IS NULL OR product_tmpl_id = any(%s))'
'AND (product_id IS NULL OR product_id = any(%s))'
'AND (categ_id IS NULL OR categ_id = any(%s)) '
'AND (pricelist_id = %s) '
'AND ((i.date_start IS NULL OR i.date_start<=%s) AND (i.date_end IS NULL OR i.date_end>=%s))'
'ORDER BY applied_on, min_quantity desc',
(prod_tmpl_ids, prod_ids, categ_ids, pricelist.id, date, date))
item_ids = [x[0] for x in cr.fetchall()]
items = self.pool.get('product.pricelist.item').browse(cr, uid, item_ids, context=context)
results = {}
for product, qty, partner in products_by_qty_by_partner:
results[product.id] = 0.0
suitable_rule = False
# Final unit price is computed according to `qty` in the `qty_uom_id` UoM.
# An intermediary unit price may be computed according to a different UoM, in
# which case the price_uom_id contains that UoM.
# The final price will be converted to match `qty_uom_id`.
qty_uom_id = context.get('uom') or product.uom_id.id
price_uom_id = product.uom_id.id
qty_in_product_uom = qty
if qty_uom_id != product.uom_id.id:
try:
qty_in_product_uom = product_uom_obj._compute_qty(
cr, uid, context['uom'], qty, product.uom_id.id)
except UserError:
# Ignored - incompatible UoM in context, use default product UoM
pass
# if Public user try to access standard price from website sale, need to call _price_get.
price = self.pool['product.template']._price_get(cr, uid, [product], 'list_price', context=context)[product.id]
price_uom_id = qty_uom_id
for rule in items:
if rule.min_quantity and qty_in_product_uom < rule.min_quantity:
continue
if is_product_template:
if rule.product_tmpl_id and product.id != rule.product_tmpl_id.id:
continue
if rule.product_id and \
(product.product_variant_count > 1 or product.product_variant_ids[0].id != rule.product_id.id):
# product rule acceptable on template if has only one variant
continue
else:
if rule.product_tmpl_id and product.product_tmpl_id.id != rule.product_tmpl_id.id:
continue
if rule.product_id and product.id != rule.product_id.id:
continue
if rule.base == 'pricelist' and rule.base_pricelist_id:
price_tmp = self._price_get_multi(cr, uid, rule.base_pricelist_id, [(product, qty, partner)], context=context)[product.id]
ptype_src = rule.base_pricelist_id.currency_id.id
price = self.pool['res.currency'].compute(cr, uid, ptype_src, pricelist.currency_id.id, price_tmp, round=False, context=context)
else:
# if base option is public price take sale price else cost price of product
# price_get returns the price in the context UoM, i.e. qty_uom_id
price = self.pool['product.template']._price_get(cr, uid, [product], rule.base, context=context)[product.id]
convert_to_price_uom = (lambda price: product_uom_obj._compute_price(
cr, uid, product.uom_id.id,
price, price_uom_id))
if price is not False:
if rule.compute_price == 'fixed':
price = convert_to_price_uom(rule.fixed_price)
elif rule.compute_price == 'percentage':
price = (price - (price * (rule.percent_price / 100))) or 0.0
else:
#complete formula
price_limit = price
price = (price - (price * (rule.price_discount / 100))) or 0.0
if rule.price_round:
price = tools.float_round(price, precision_rounding=rule.price_round)
if rule.price_surcharge:
price_surcharge = convert_to_price_uom(rule.price_surcharge)
price += price_surcharge
if rule.price_min_margin:
price_min_margin = convert_to_price_uom(rule.price_min_margin)
price = max(price, price_limit + price_min_margin)
if rule.price_max_margin:
price_max_margin = convert_to_price_uom(rule.price_max_margin)
price = min(price, price_limit + price_max_margin)
suitable_rule = rule
break
# Final price conversion into pricelist currency
if suitable_rule and suitable_rule.compute_price != 'fixed' and suitable_rule.base != 'pricelist':
user_company = self.pool['res.users'].browse(cr, uid, uid, context=context).company_id
price = self.pool['res.currency'].compute(cr, uid, user_company.currency_id.id, pricelist.currency_id.id, price, context=context)
results[product.id] = (price, suitable_rule and suitable_rule.id or False)
return results
def price_get(self, cr, uid, ids, prod_id, qty, partner=None, context=None):
return dict((key, price[0]) for key, price in self.price_rule_get(cr, uid, ids, prod_id, qty, partner=partner, context=context).items())
def price_rule_get(self, cr, uid, ids, prod_id, qty, partner=None, context=None):
product = self.pool.get('product.product').browse(cr, uid, prod_id, context=context)
res_multi = self.price_rule_get_multi(cr, uid, ids, products_by_qty_by_partner=[(product, qty, partner)], context=context)
res = res_multi[prod_id]
return res
class product_pricelist_item(osv.osv):
_name = "product.pricelist.item"
_description = "Pricelist item"
_order = "applied_on, min_quantity desc"
def _check_recursion(self, cr, uid, ids, context=None):
for obj_list in self.browse(cr, uid, ids, context=context):
if obj_list.base == 'pricelist':
main_pricelist = obj_list.pricelist_id.id
other_pricelist = obj_list.base_pricelist_id.id
if main_pricelist == other_pricelist:
return False
return True
def _check_margin(self, cr, uid, ids, context=None):
for item in self.browse(cr, uid, ids, context=context):
if item.price_max_margin and item.price_min_margin and (item.price_min_margin > item.price_max_margin):
return False
return True
_columns = {
'product_tmpl_id': fields.many2one('product.template', 'Product Template', ondelete='cascade', help="Specify a template if this rule only applies to one product template. Keep empty otherwise."),
'product_id': fields.many2one('product.product', 'Product', ondelete='cascade', help="Specify a product if this rule only applies to one product. Keep empty otherwise."),
'categ_id': fields.many2one('product.category', 'Product Category', ondelete='cascade', help="Specify a product category if this rule only applies to products belonging to this category or its children categories. Keep empty otherwise."),
'min_quantity': fields.integer('Min. Quantity',
help="For the rule to apply, bought/sold quantity must be greater "
"than or equal to the minimum quantity specified in this field.\n"
"Expressed in the default unit of measure of the product."
),
'applied_on': fields.selection([('3_global', 'Global'),('2_product_category', ' Product Category'), ('1_product', 'Product'), ('0_product_variant', 'Product Variant')], string="Apply On", required=True,
help='Pricelist Item applicable on selected option'),
'sequence': fields.integer('Sequence', required=True, help="Gives the order in which the pricelist items will be checked. The evaluation gives highest priority to lowest sequence and stops as soon as a matching item is found."),
'base': fields.selection([('list_price', 'Public Price'), ('standard_price', 'Cost'), ('pricelist', 'Other Pricelist')], string="Based on", required=True,
help='Base price for computation. \n Public Price: The base price will be the Sale/public Price. \n Cost Price : The base price will be the cost price. \n Other Pricelist : Computation of the base price based on another Pricelist.'),
'base_pricelist_id': fields.many2one('product.pricelist', 'Other Pricelist'),
'pricelist_id': fields.many2one('product.pricelist', 'Pricelist'),
'price_surcharge': fields.float('Price Surcharge',
digits_compute= dp.get_precision('Product Price'), help='Specify the fixed amount to add or substract(if negative) to the amount calculated with the discount.'),
'price_discount': fields.float('Price Discount', digits=(16,2)),
'price_round': fields.float('Price Rounding',
digits_compute= dp.get_precision('Product Price'),
help="Sets the price so that it is a multiple of this value.\n" \
"Rounding is applied after the discount and before the surcharge.\n" \
"To have prices that end in 9.99, set rounding 10, surcharge -0.01" \
),
'price_min_margin': fields.float('Min. Price Margin',
digits_compute= dp.get_precision('Product Price'), help='Specify the minimum amount of margin over the base price.'),
'price_max_margin': fields.float('Max. Price Margin',
digits_compute= dp.get_precision('Product Price'), help='Specify the maximum amount of margin over the base price.'),
'company_id': fields.related('pricelist_id','company_id',type='many2one',
readonly=True, relation='res.company', string='Company', store=True),
'currency_id': fields.related('pricelist_id', 'currency_id', type='many2one',
readonly=True, relation='res.currency', string='Currency', store=True),
'date_start': fields.date('Start Date', help="Starting date for the pricelist item validation"),
'date_end': fields.date('End Date', help="Ending valid for the pricelist item validation"),
'compute_price': fields.selection([('fixed', 'Fix Price'), ('percentage', 'Percentage (discount)'), ('formula', 'Formula')], select=True, default='fixed'),
'fixed_price': fields.float('Fixed Price'),
'percent_price': fields.float('Percentage Price'),
}
_defaults = {
'base': 'list_price',
'min_quantity': 1,
'sequence': 5,
'price_discount': 0,
'applied_on': '3_global',
}
_constraints = [
(_check_recursion, 'Error! You cannot assign the Main Pricelist as Other Pricelist in PriceList Item!', ['base_pricelist_id']),
(_check_margin, 'Error! The minimum margin should be lower than the maximum margin.', ['price_min_margin', 'price_max_margin'])
]
class product_pricelist_item_new(models.Model):
_inherit = "product.pricelist.item"
@api.one
@api.depends('categ_id', 'product_tmpl_id', 'product_id', 'compute_price', 'fixed_price', \
'pricelist_id', 'percent_price', 'price_discount', 'price_surcharge')
def _get_pricelist_item_name_price(self):
if self.categ_id:
self.name = _("Category: %s") % (self.categ_id.name)
elif self.product_tmpl_id:
self.name = self.product_tmpl_id.name
elif self.product_id:
self.name = self.product_id.display_name.replace('[%s]' % self.product_id.code, '')
else:
self.name = _("All Products")
if self.compute_price == 'fixed':
self.price = ("%s %s") % (self.fixed_price, self.pricelist_id.currency_id.name)
elif self.compute_price == 'percentage':
self.price = _("%s %% discount") % (self.percent_price)
else:
self.price = _("%s %% discount and %s surcharge") % (abs(self.price_discount), self.price_surcharge)
#functional fields used for usability purposes
name = Fields.Char(compute='_get_pricelist_item_name_price', string='Name', multi='item_name_price', help="Explicit rule name for this pricelist line.")
price = Fields.Char(compute='_get_pricelist_item_name_price', string='Price', multi='item_name_price', help="Explicit rule name for this pricelist line.")
| gpl-3.0 |
radzhome/AWS-ElasticBeanstalk-CLI | eb/linux/python3/scli/cli_parse.py | 4 | 8680 | #!/usr/bin/env python
# ==============================================================================
# Copyright 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Amazon Software License (the "License"). You may not use
# this file except in compliance with the License. A copy of the License is
# located at
#
# http://aws.amazon.com/asl/
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or
# implied. See the License for the specific language governing permissions
# and limitations under the License.
#==============================================================================
import argparse
from contextlib import closing
import logging
from io import StringIO
from lib.utility import misc
from lib.elasticbeanstalk.model import EnvironmentTier
from scli.resources import CLISwitch, CLISwitchMsg, EBSCliAttr
from scli.constants import CommandType, ServiceDefault, ServiceRegionId, \
ParameterName, ParameterSource
from scli.parameter import Parameter
from scli.exception import ArgumentError
log = logging.getLogger('cli')
def _word_join(word_list, separator=''):
x = separator.join(map(misc.to_unicode, word_list))
return x
def command(string):
command = misc.to_unicode(string)
for item in CommandType:
if item.lower() == command.lower().strip():
return item
raise AttributeError(EBSCliAttr.InvalidCommand.format(command))
def subcommand(string):
subcommand = misc.to_unicode(string)
return subcommand
def _init_parser(parser):
commands = ', '.join(map(str.lower, CommandType))
parser.add_argument(CLISwitch[ParameterName.Command],
type=command,
metavar='COMMAND', help=commands)
parser.add_argument(CLISwitch[ParameterName.SubCommand],
type=subcommand, nargs='*',
default=None,
metavar='SUBCOMMAND', )
# AWS credential
parser.add_argument('-I', '--' + CLISwitch[ParameterName.AwsAccessKeyId],
dest=ParameterName.AwsAccessKeyId,
metavar='ACCESS_KEY_ID',
help=CLISwitchMsg[ParameterName.AwsAccessKeyId])
parser.add_argument('-S', '--' + CLISwitch[ParameterName.AwsSecretAccessKey],
dest=ParameterName.AwsSecretAccessKey,
metavar='SECRET_ACCESS_KEY',
help=CLISwitchMsg[ParameterName.AwsSecretAccessKey])
parser.add_argument('--' + CLISwitch[ParameterName.AwsCredentialFile],
dest=ParameterName.AwsCredentialFile,
metavar='FILE_PATH_NAME',
help=CLISwitchMsg[ParameterName.AwsCredentialFile])
# Application/environment
parser.add_argument('-t', '--' + CLISwitch[ParameterName.EnvironmentTier],
dest=ParameterName.EnvironmentTier,
metavar='ENVIRONMENT_TIER',
help=CLISwitchMsg[ParameterName.EnvironmentTier])
parser.add_argument('-s', '--' + CLISwitch[ParameterName.SolutionStack],
dest=ParameterName.SolutionStack, nargs='+',
metavar='',
help=CLISwitchMsg[ParameterName.SolutionStack])
parser.add_argument('-a', '--' + CLISwitch[ParameterName.ApplicationName],
dest=ParameterName.ApplicationName,
metavar='APPLICATION_NAME',
help=CLISwitchMsg[ParameterName.ApplicationName])
parser.add_argument('-l', '--' + CLISwitch[ParameterName.ApplicationVersionName],
dest=ParameterName.ApplicationVersionName,
metavar='VERSION_LABEL',
help=CLISwitchMsg[ParameterName.ApplicationVersionName])
parser.add_argument('-e', '--' + CLISwitch[ParameterName.EnvironmentName],
dest=ParameterName.EnvironmentName,
metavar='ENVIRONMENT_NAME',
help=CLISwitchMsg[ParameterName.EnvironmentName])
# Output
parser.add_argument('--' + CLISwitch[ParameterName.Verbose],
action='store_const', const=ServiceDefault.ENABLED,
dest=ParameterName.Verbose,
metavar='',
help=CLISwitchMsg[ParameterName.Verbose])
parser.add_argument('-f', '--' + CLISwitch[ParameterName.Force],
action='store_const', const=ServiceDefault.ENABLED,
dest=ParameterName.Force,
metavar='',
help=CLISwitchMsg[ParameterName.Force])
# Service
parser.add_argument('--' + CLISwitch[ParameterName.WaitForFinishTimeout], type=int,
dest=ParameterName.WaitForFinishTimeout,
metavar='TIMEOUT_IN_SEC',
help=str.format(CLISwitchMsg[ParameterName.WaitForFinishTimeout],
ServiceDefault.WAIT_TIMEOUT_IN_SEC))
parser.add_argument('--' + CLISwitch[ParameterName.Region],
dest=ParameterName.Region,
metavar='REGION',
help=CLISwitchMsg[ParameterName.Region])
parser.add_argument('--' + CLISwitch[ParameterName.ServiceEndpoint],
dest=ParameterName.ServiceEndpoint,
metavar='ENDPOINT',
help=CLISwitchMsg[ParameterName.ServiceEndpoint])
# SCli Helper switch
parser.add_argument('--version', action='version', version=EBSCliAttr.Version)
# List of non string parameters
NON_STRING_PARAMETERS = [ParameterName.EnvironmentTier]
def parse(parameter_pool, line=None):
''' Parse command arguments'''
parser = ArgumentParser(description=EBSCliAttr.Name,
usage=EBSCliAttr.Usage)
_init_parser(parser)
if line is not None:
args = vars(parser.parse_args(line.split()))
else:
args = vars(parser.parse_args())
# Post prcessing
if args[ParameterName.EnvironmentTier] is not None:
tier_serialized = args[ParameterName.EnvironmentTier]
args[ParameterName.EnvironmentTier] = EnvironmentTier.from_serialized_string(tier_serialized)
if args[ParameterName.SolutionStack] is not None:
solution_stack = _word_join(args[ParameterName.SolutionStack], ' ')
args[ParameterName.SolutionStack] = solution_stack
if args[ParameterName.Region] is not None:
region_id = args[ParameterName.Region]
region = list(ServiceRegionId.keys())[list(ServiceRegionId.values()).index(region_id)]
args[ParameterName.Region] = region
# Store command line arguments into parameter pool
for arg, value in args.items():
arg = misc.to_unicode(arg, convert_none=False)
# Try to convert string/list-of-string parameters to unicode
if arg not in NON_STRING_PARAMETERS:
if isinstance(value, list):
value = [misc.to_unicode(item) for item in value]
else:
value = misc.to_unicode(value, convert_none=False)
if arg == CLISwitch[ParameterName.Command]:
parameter_pool.put(Parameter(ParameterName.Command,
value,
ParameterSource.CliArgument))
elif arg == CLISwitch[ParameterName.SubCommand]:
parameter_pool.put(Parameter(ParameterName.SubCommand,
value,
ParameterSource.CliArgument))
elif value is not None:
parameter_pool.put(Parameter(arg,
value,
ParameterSource.CliArgument))
log.info('Finished parsing command line arguments')
if log.isEnabledFor(logging.DEBUG):
log.debug('Received arguments: {0}'. \
format(misc.collection_to_string(parameter_pool.parameter_names)))
return args
class ArgumentParser(argparse.ArgumentParser):
'''Subclass of argparse.ArgumentParser to override behavior of error()'''
def error(self, error_message):
with closing(StringIO()) as usage:
self.print_usage(usage)
message = EBSCliAttr.ErrorMsg.format(error_message, usage.getvalue(), self.prog)
raise ArgumentError(message)
| apache-2.0 |
kubeflow/pipelines | samples/core/XGBoost/xgboost_sample.py | 1 | 3056 | import kfp
from kfp import components
chicago_taxi_dataset_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e3337b8bdcd63636934954e592d4b32c95b49129/components/datasets/Chicago%20Taxi/component.yaml')
convert_csv_to_apache_parquet_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/0d7d6f41c92bdc05c2825232afe2b47e5cb6c4b3/components/_converters/ApacheParquet/from_CSV/component.yaml')
xgboost_train_on_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml')
xgboost_predict_on_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/31939086d66d633732f75300ce69eb60e9fb0269/components/XGBoost/Predict/component.yaml')
xgboost_train_on_parquet_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/0ae2f30ff24beeef1c64cc7c434f1f652c065192/components/XGBoost/Train/from_ApacheParquet/component.yaml')
xgboost_predict_on_parquet_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/31939086d66d633732f75300ce69eb60e9fb0269/components/XGBoost/Predict/from_ApacheParquet/component.yaml')
def xgboost_pipeline():
training_data_csv = chicago_taxi_dataset_op(
where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"',
select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total',
limit=10000,
).output
# Training and prediction on dataset in CSV format
model_trained_on_csv = xgboost_train_on_csv_op(
training_data=training_data_csv,
label_column=0,
objective='reg:squarederror',
num_iterations=200,
).outputs['model']
xgboost_predict_on_csv_op(
data=training_data_csv,
model=model_trained_on_csv,
label_column=0,
)
# Training and prediction on dataset in Apache Parquet format
training_data_parquet = convert_csv_to_apache_parquet_op(
training_data_csv
).output
model_trained_on_parquet = xgboost_train_on_parquet_op(
training_data=training_data_parquet,
label_column_name='tips',
objective='reg:squarederror',
num_iterations=200,
).outputs['model']
xgboost_predict_on_parquet_op(
data=training_data_parquet,
model=model_trained_on_parquet,
label_column_name='tips',
)
# Checking cross-format predictions
xgboost_predict_on_parquet_op(
data=training_data_parquet,
model=model_trained_on_csv,
label_column_name='tips',
)
xgboost_predict_on_csv_op(
data=training_data_csv,
model=model_trained_on_parquet,
label_column=0,
)
if __name__ == '__main__':
kfp_endpoint=None
kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(xgboost_pipeline, arguments={})
| apache-2.0 |
vaginessa/python-gsmmodem | gsmmodem/exceptions.py | 13 | 4484 | """ Module defines exceptions used by gsmmodem """
class GsmModemException(Exception):
""" Base exception raised for error conditions when interacting with the GSM modem """
class TimeoutException(GsmModemException):
""" Raised when a write command times out """
def __init__(self, data=None):
""" @param data: Any data that was read was read before timeout occurred (if applicable) """
super(TimeoutException, self).__init__(data)
self.data = data
class InvalidStateException(GsmModemException):
""" Raised when an API method call is invoked on an object that is in an incorrect state """
class InterruptedException(InvalidStateException):
""" Raised when execution of an AT command is interrupt by a state change.
May contain another exception that was the cause of the interruption """
def __init__(self, message, cause=None):
""" @param cause: the exception that caused this interruption (usually a CmeError) """
super(InterruptedException, self).__init__(message)
self.cause = cause
class CommandError(GsmModemException):
""" Raised if the modem returns an error in response to an AT command
May optionally include an error type (CME or CMS) and -code (error-specific).
"""
_description = ''
def __init__(self, command=None, type=None, code=None):
self.command = command
self.type = type
self.code = code
if type != None and code != None:
super(CommandError, self).__init__('{0} {1}{2}'.format(type, code, ' ({0})'.format(self._description) if len(self._description) > 0 else ''))
elif command != None:
super(CommandError, self).__init__(command)
else:
super(CommandError, self).__init__()
class CmeError(CommandError):
""" ME error result code : +CME ERROR: <error>
Issued in response to an AT command
"""
def __new__(cls, *args, **kwargs):
# Return a specialized version of this class if possible
if len(args) >= 2:
code = args[1]
if code == 11:
return PinRequiredError(args[0])
elif code == 16:
return IncorrectPinError(args[0])
elif code == 12:
return PukRequiredError(args[0])
return super(CmeError, cls).__new__(cls, *args, **kwargs)
def __init__(self, command, code):
super(CmeError, self).__init__(command, 'CME', code)
class SecurityException(CmeError):
""" Security-related CME error """
def __init__(self, command, code):
super(SecurityException, self).__init__(command, code)
class PinRequiredError(SecurityException):
""" Raised if an operation failed because the SIM card's PIN has not been entered """
_description = 'SIM card PIN is required'
def __init__(self, command, code=11):
super(PinRequiredError, self).__init__(command, code)
class IncorrectPinError(SecurityException):
""" Raised if an incorrect PIN is entered """
_description = 'Incorrect PIN entered'
def __init__(self, command, code=16):
super(IncorrectPinError, self).__init__(command, code)
class PukRequiredError(SecurityException):
""" Raised an operation failed because the SIM card's PUK is required (SIM locked) """
_description = "PUK required (SIM locked)"
def __init__(self, command, code=12):
super(PukRequiredError, self).__init__(command, code)
class CmsError(CommandError):
""" Message service failure result code: +CMS ERROR : <er>
Issued in response to an AT command
"""
def __new__(cls, *args, **kwargs):
# Return a specialized version of this class if possible
if len(args) >= 2:
code = args[1]
if code == 330:
return SmscNumberUnknownError(args[0])
return super(CmsError, cls).__new__(cls, *args, **kwargs)
def __init__(self, command, code):
super(CmsError, self).__init__(command, 'CMS', code)
class SmscNumberUnknownError(CmsError):
""" Raised if the SMSC (service centre) address is missing when trying to send an SMS message """
_description = 'SMSC number not set'
def __init__(self, command, code=330):
super(SmscNumberUnknownError, self).__init__(command, code)
class EncodingError(GsmModemException):
""" Raised if a decoding- or encoding operation failed """
| lgpl-3.0 |
Edraak/edx-platform | common/djangoapps/config_models/admin.py | 26 | 7283 | """
Admin site models for managing :class:`.ConfigurationModel` subclasses
"""
from django.forms import models
from django.contrib import admin
from django.contrib.admin import ListFilter
from django.core.cache import caches, InvalidCacheBackendError
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
try:
cache = caches['configuration'] # pylint: disable=invalid-name
except InvalidCacheBackendError:
from django.core.cache import cache
# pylint: disable=protected-access
class ConfigurationModelAdmin(admin.ModelAdmin):
"""
:class:`~django.contrib.admin.ModelAdmin` for :class:`.ConfigurationModel` subclasses
"""
date_hierarchy = 'change_date'
def get_actions(self, request):
return {
'revert': (ConfigurationModelAdmin.revert, 'revert', _('Revert to the selected configuration'))
}
def get_list_display(self, request):
return self.model._meta.get_all_field_names()
# Don't allow deletion of configuration
def has_delete_permission(self, request, obj=None):
return False
# Make all fields read-only when editing an object
def get_readonly_fields(self, request, obj=None):
if obj: # editing an existing object
return self.model._meta.get_all_field_names()
return self.readonly_fields
def add_view(self, request, form_url='', extra_context=None):
# Prepopulate new configuration entries with the value of the current config
get = request.GET.copy()
get.update(models.model_to_dict(self.model.current()))
request.GET = get
return super(ConfigurationModelAdmin, self).add_view(request, form_url, extra_context)
# Hide the save buttons in the change view
def change_view(self, request, object_id, form_url='', extra_context=None):
extra_context = extra_context or {}
extra_context['readonly'] = True
return super(ConfigurationModelAdmin, self).change_view(
request,
object_id,
form_url,
extra_context=extra_context
)
def save_model(self, request, obj, form, change):
obj.changed_by = request.user
super(ConfigurationModelAdmin, self).save_model(request, obj, form, change)
cache.delete(obj.cache_key_name(*(getattr(obj, key_name) for key_name in obj.KEY_FIELDS)))
cache.delete(obj.key_values_cache_key_name())
def revert(self, request, queryset):
"""
Admin action to revert a configuration back to the selected value
"""
if queryset.count() != 1:
self.message_user(request, _("Please select a single configuration to revert to."))
return
target = queryset[0]
target.id = None
self.save_model(request, target, None, False)
self.message_user(request, _("Reverted configuration."))
return HttpResponseRedirect(
reverse(
'admin:{}_{}_change'.format(
self.model._meta.app_label,
self.model._meta.model_name,
),
args=(target.id,),
)
)
class ShowHistoryFilter(ListFilter):
"""
Admin change view filter to show only the most recent (i.e. the "current") row for each
unique key value.
"""
title = _('Status')
parameter_name = 'show_history'
def __init__(self, request, params, model, model_admin):
super(ShowHistoryFilter, self).__init__(request, params, model, model_admin)
if self.parameter_name in params:
value = params.pop(self.parameter_name)
self.used_parameters[self.parameter_name] = value
def has_output(self):
""" Should this filter be shown? """
return True
def choices(self, cl):
""" Returns choices ready to be output in the template. """
show_all = self.used_parameters.get(self.parameter_name) == "1"
return (
{
'display': _('Current Configuration'),
'selected': not show_all,
'query_string': cl.get_query_string({}, [self.parameter_name]),
},
{
'display': _('All (Show History)'),
'selected': show_all,
'query_string': cl.get_query_string({self.parameter_name: "1"}, []),
}
)
def queryset(self, request, queryset):
""" Filter the queryset. No-op since it's done by KeyedConfigurationModelAdmin """
return queryset
def expected_parameters(self):
""" List the query string params used by this filter """
return [self.parameter_name]
class KeyedConfigurationModelAdmin(ConfigurationModelAdmin):
"""
:class:`~django.contrib.admin.ModelAdmin` for :class:`.ConfigurationModel` subclasses that
use extra keys (i.e. they have KEY_FIELDS set).
"""
date_hierarchy = None
list_filter = (ShowHistoryFilter, )
def get_queryset(self, request):
"""
Annote the queryset with an 'is_active' property that's true iff that row is the most
recently added row for that particular set of KEY_FIELDS values.
Filter the queryset to show only is_active rows by default.
"""
if request.GET.get(ShowHistoryFilter.parameter_name) == '1':
queryset = self.model.objects.with_active_flag()
else:
# Show only the most recent row for each key.
queryset = self.model.objects.current_set()
ordering = self.get_ordering(request)
if ordering:
return queryset.order_by(*ordering)
return queryset
def get_list_display(self, request):
""" Add a link to each row for creating a new row using the chosen row as a template """
return self.model._meta.get_all_field_names() + ['edit_link']
def add_view(self, request, form_url='', extra_context=None):
# Prepopulate new configuration entries with the value of the current config, if given:
if 'source' in request.GET:
get = request.GET.copy()
source_id = int(get.pop('source')[0])
source = get_object_or_404(self.model, pk=source_id)
get.update(models.model_to_dict(source))
request.GET = get
# Call our grandparent's add_view, skipping the parent code
# because the parent code has a different way to prepopulate new configuration entries
# with the value of the latest config, which doesn't make sense for keyed models.
# pylint: disable=bad-super-call
return super(ConfigurationModelAdmin, self).add_view(request, form_url, extra_context)
def edit_link(self, inst):
""" Edit link for the change view """
if not inst.is_active:
return u'--'
update_url = reverse('admin:{}_{}_add'.format(self.model._meta.app_label, self.model._meta.model_name))
update_url += "?source={}".format(inst.pk)
return u'<a href="{}">{}</a>'.format(update_url, _('Update'))
edit_link.allow_tags = True
edit_link.short_description = _('Update')
| agpl-3.0 |
fjbatresv/odoo | addons/document/report/__init__.py | 444 | 1068 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import document_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ARMmbed/yotta | yotta/lib/access_common.py | 3 | 12507 | # Copyright 2014 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import tarfile
import logging
import os
import hashlib
import tempfile
import shutil
import functools
import random
import errno
# version, , represent versions and specifications, internal
from yotta.lib import version
# fsutils, , misc filesystem utils, internal
from yotta.lib import fsutils
# folders, , where yotta stores things, internal
from yotta.lib import folders
# Ordered JSON, , read & write json, internal
from yotta.lib import ordered_json
# settings, , load and save settings, internal
from yotta.lib import settings
logger = logging.getLogger('access')
cache_logger = logging.getLogger('cache')
class AccessException(Exception):
pass
class Unavailable(AccessException):
pass
class TargetUnavailable(Unavailable):
pass
class SpecificationNotMet(AccessException):
pass
class NotInCache(KeyError):
pass
class RemoteVersion(version.Version):
def __init__(self, version_string, url=None, name='unknown', friendly_source='unknown', friendly_version=None):
self.name = name
self.version_string = version_string
self.friendly_version = friendly_version or version_string
self.friendly_source = friendly_source
super(RemoteVersion, self).__init__(version_string, url)
def unpackInto(self, directory):
raise NotImplementedError
def __repr__(self):
return u'%s@%s from %s' % (self.name, self.friendly_version, self.friendly_source)
def __str__(self):
import sys
# in python 3 __str__ must return a string (i.e. unicode), in
# python 2, it must not return unicode, so:
if sys.version_info[0] >= 3:
return self.__unicode__()
else:
return self.__unicode__().encode('utf8')
def __unicode__(self):
return self.__repr__()
class RemoteComponent(object):
@classmethod
def createFromSource(cls, url, name=None):
raise NotImplementedError
def versionSpec(self):
raise NotImplementedError
def availableVersions(self):
raise NotImplementedError
def tipVersion(self):
raise NotImplementedError
@classmethod
def remoteType(cls):
raise NotImplementedError
_max_cached_modules = None
def getMaxCachedModules():
global _max_cached_modules
if _max_cached_modules is None:
_max_cached_modules = settings.get('maxCachedModules')
if _max_cached_modules is None:
# arbitrary default value
_max_cached_modules = 400
return _max_cached_modules
def _encodeCacheKey(cache_key):
import sys
# if we're under python 2, and cache_key is unicode (it will be, but check
# to be defensive), encode it as ascii. This prevents coersion errors if
# the username has unicode charaters in it.
if sys.version_info[0] < 3 and isinstance(cache_key, unicode):
return cache_key.encode('ascii')
return cache_key
def pruneCache():
''' Prune the cache '''
cache_dir = folders.cacheDirectory()
def fullpath(f):
return os.path.join(cache_dir, f)
def getMTimeSafe(f):
# it's possible that another process removed the file before we stat
# it, handle this gracefully
try:
return os.stat(f).st_mtime
except FileNotFoundError:
import time
return time.clock()
# ensure cache exists
fsutils.mkDirP(cache_dir)
max_cached_modules = getMaxCachedModules()
for f in sorted(
[f for f in os.listdir(cache_dir) if
os.path.isfile(fullpath(f)) and not f.endswith('.json') and not f.endswith('.locked')
],
key = lambda f: getMTimeSafe(fullpath(f)),
reverse = True
)[max_cached_modules:]:
cache_logger.debug('cleaning up cache file %s', f)
removeFromCache(f)
cache_logger.debug('cache pruned to %s items', max_cached_modules)
def sometimesPruneCache(p):
''' return decorator to prune cache after calling fn with a probability of p'''
def decorator(fn):
@functools.wraps(fn)
def wrapped(*args, **kwargs):
r = fn(*args, **kwargs)
if random.random() < p:
pruneCache()
return r
return wrapped
return decorator
def unpackFrom(tar_file_path, to_directory):
# first unpack into a sibling directory of the specified directory, and
# then move it into place.
# we expect our tarballs to contain a single top-level directory. We strip
# off this name as we extract to minimise the path length
into_parent_dir = os.path.dirname(to_directory)
fsutils.mkDirP(into_parent_dir)
temp_directory = tempfile.mkdtemp(dir=into_parent_dir)
try:
with tarfile.open(tar_file_path) as tf:
strip_dirname = ''
# get the extraction directory name from the first part of the
# extraction paths: it should be the same for all members of
# the archive
for m in tf.getmembers():
split_path = fsutils.fullySplitPath(m.name)
logger.debug('process member: %s %s', m.name, split_path)
if os.path.isabs(m.name) or '..' in split_path:
raise ValueError('archive uses invalid paths')
if not strip_dirname:
if len(split_path) != 1 or not len(split_path[0]):
raise ValueError('archive does not appear to contain a single module')
strip_dirname = split_path[0]
continue
else:
if split_path[0] != strip_dirname:
raise ValueError('archive does not appear to contain a single module')
m.name = os.path.join(*split_path[1:])
tf.extract(m, path=temp_directory)
# make sure the destination directory doesn't exist:
fsutils.rmRf(to_directory)
shutil.move(temp_directory, to_directory)
temp_directory = None
logger.debug('extraction complete %s', to_directory)
except IOError as e:
if e.errno != errno.ENOENT:
logger.error('failed to extract tarfile %s', e)
fsutils.rmF(tar_file_path)
raise
finally:
if temp_directory is not None:
# if anything has failed, cleanup
fsutils.rmRf(temp_directory)
def removeFromCache(cache_key):
f = os.path.join(folders.cacheDirectory(), cache_key)
try:
fsutils.rmF(f)
# remove any metadata too, if it exists
fsutils.rmF(f + '.json')
except OSError as e:
# if we failed to remove either file, then it might be because another
# instance of yotta is using it, so just skip it this time.
pass
def unpackFromCache(cache_key, to_directory):
''' If the specified cache key exists, unpack the tarball into the
specified directory, otherwise raise NotInCache (a KeyError subclass).
'''
if cache_key is None:
raise NotInCache('"None" is never in cache')
cache_key = _encodeCacheKey(cache_key)
cache_dir = folders.cacheDirectory()
fsutils.mkDirP(cache_dir)
path = os.path.join(cache_dir, cache_key)
logger.debug('attempt to unpack from cache %s -> %s', path, to_directory)
try:
unpackFrom(path, to_directory)
try:
shutil.copy(path + '.json', os.path.join(to_directory, '.yotta_origin.json'))
except IOError as e:
if e.errno == errno.ENOENT:
pass
else:
raise
cache_logger.debug('unpacked %s from cache into %s', cache_key, to_directory)
return
except IOError as e:
if e.errno == errno.ENOENT:
cache_logger.debug('%s not in cache', cache_key)
raise NotInCache('not in cache')
except OSError as e:
if e.errno == errno.ENOTEMPTY:
logger.error('directory %s was not empty: probably simultaneous invocation of yotta! It is likely that downloaded sources are corrupted.')
else:
raise
def _downloadToCache(stream, hashinfo={}, origin_info=dict()):
''' Download the specified stream to a temporary cache directory, and
returns a cache key that can be used to access/remove the file.
You should use either removeFromCache(cache_key) or _moveCachedFile to
move the downloaded file to a known key after downloading.
'''
hash_name = None
hash_value = None
m = None
if len(hashinfo):
# check for hashes in preferred order. Currently this is just sha256
# (which the registry uses). Initial investigations suggest that github
# doesn't return a header with the hash of the file being downloaded.
for h in ('sha256',):
if h in hashinfo:
hash_name = h
hash_value = hashinfo[h]
m = getattr(hashlib, h)()
break
if not hash_name:
logger.warning('could not find supported hash type in %s', hashinfo)
cache_dir = folders.cacheDirectory()
fsutils.mkDirP(cache_dir)
file_size = 0
(download_file, download_fname) = tempfile.mkstemp(dir=cache_dir, suffix='.locked')
with os.fdopen(download_file, 'wb') as f:
f.seek(0)
for chunk in stream.iter_content(4096):
f.write(chunk)
if hash_name:
m.update(chunk)
if hash_name:
calculated_hash = m.hexdigest()
logger.debug(
'calculated %s hash: %s check against: %s' % (
hash_name, calculated_hash, hash_value
)
)
if hash_value and (hash_value != calculated_hash):
raise Exception('Hash verification failed.')
file_size = f.tell()
logger.debug('wrote tarfile of size: %s to %s', file_size, download_fname)
f.truncate()
extended_origin_info = {
'hash': hashinfo,
'size': file_size
}
extended_origin_info.update(origin_info)
ordered_json.dump(download_fname + '.json', extended_origin_info)
return os.path.basename(download_fname)
def _moveCachedFile(from_key, to_key):
''' Move a file atomically within the cache: used to make cached files
available at known keys, so they can be used by other processes.
'''
cache_dir = folders.cacheDirectory()
from_path = os.path.join(cache_dir, from_key)
to_path = os.path.join(cache_dir, to_key)
try:
os.rename(from_path, to_path)
# if moving the actual file was successful, then try to move the
# metadata:
os.rename(from_path+'.json', to_path+'.json')
except Exception as e:
# if the source doesn't exist, or the destination doesn't exist, remove
# the file instead.
# windows error 183 == file already exists
# (be careful not to use WindowsError on non-windows platforms as it
# isn't defined)
if (isinstance(e, OSError) and e.errno == errno.ENOENT) or \
(isinstance(e, getattr(__builtins__, "WindowsError", type(None))) and e.errno == 183):
fsutils.rmF(from_path)
else:
raise
@sometimesPruneCache(0.05)
def unpackTarballStream(stream, into_directory, hash={}, cache_key=None, origin_info=dict()):
''' Unpack a responses stream that contains a tarball into a directory. If
a hash is provided, then it will be used as a cache key (for future
requests you can try to retrieve the key value from the cache first,
before making the request)
'''
cache_key = _encodeCacheKey(cache_key)
# if the cache is disabled, then use a random cache key even if one was
# provided, so that the module is not persisted in the cache and its
# temporary download location is a random key:
if getMaxCachedModules() == 0:
cache_key = None
new_cache_key = _downloadToCache(stream, hash, origin_info)
unpackFromCache(new_cache_key, into_directory)
if cache_key is None:
# if we didn't provide a cache key, there's no point in storing the cache
removeFromCache(new_cache_key)
else:
# otherwise make this file available at the known cache key
_moveCachedFile(new_cache_key, cache_key)
| apache-2.0 |
joksnet/youtube-dl | youtube_dl/extractor/newgrounds.py | 5 | 1287 | import json
import re
from .common import InfoExtractor
from ..utils import determine_ext
class NewgroundsIE(InfoExtractor):
_VALID_URL = r'(?:https?://)?(?:www\.)?newgrounds\.com/audio/listen/(?P<id>\d+)'
_TEST = {
u'url': u'http://www.newgrounds.com/audio/listen/549479',
u'file': u'549479.mp3',
u'md5': u'fe6033d297591288fa1c1f780386f07a',
u'info_dict': {
u"title": u"B7 - BusMode",
u"uploader": u"Burn7",
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
music_id = mobj.group('id')
webpage = self._download_webpage(url, music_id)
title = self._html_search_regex(r',"name":"([^"]+)",', webpage, u'music title')
uploader = self._html_search_regex(r',"artist":"([^"]+)",', webpage, u'music uploader')
music_url_json_string = self._html_search_regex(r'({"url":"[^"]+"),', webpage, u'music url') + '}'
music_url_json = json.loads(music_url_json_string)
music_url = music_url_json['url']
return {
'id': music_id,
'title': title,
'url': music_url,
'uploader': uploader,
'ext': determine_ext(music_url),
}
| unlicense |
flavour/tldrmp | modules/geopy/geocoders/dot_us.py | 46 | 3194 | import getpass
from urllib import urlencode
from urllib2 import urlopen
from geopy.geocoders.base import Geocoder
from geopy import util
import csv
class GeocoderDotUS(Geocoder):
def __init__(self, username=None, password=None, format_string='%s'):
if username and (password is None):
password = getpass.getpass(
"geocoder.us password for %r: " % username
)
self.format_string = format_string
self.username = username
self.__password = password
def get_url(self):
username = self.username
password = self.__password
if username and password:
auth = '%s@%s:' % (username, password)
resource = 'member/service/namedcsv'
else:
auth = ''
resource = 'service/namedcsv'
return 'http://%sgeocoder.us/%s' % (auth, resource)
def geocode(self, query, exactly_one=True):
if isinstance(query, unicode):
query = query.encode('utf-8')
query_str = self.format_string % query
page = urlopen("%s?%s" % (
self.get_url(),
urlencode({'address':query_str})
))
reader = csv.reader(page)
places = [r for r in reader]
# GeoNames only returns the closest match, no matter what.
#
#if exactly_one and len(places) != 1:
# raise ValueError("Didn't find exactly one placemark! " \
# "(Found %d.)" % len(places))
#
#if exactly_one:
# return self._parse_result(places[0])
#else:
# return [self._parse_result(place) for place in places]
return self._parse_result(places[0])
@staticmethod
def _parse_result(result):
# turn x=y pairs ("lat=47.6", "long=-117.426") into dict key/value pairs:
place = dict(
filter(lambda x: len(x)>1, # strip off bits that aren't pairs (i.e. "geocoder modified" status string")
map(lambda x: x.split('=', 1), result) # split the key=val strings into (key, val) tuples
))
address = [
place.get('number', None),
place.get('prefix', None),
place.get('street', None),
place.get('type', None),
place.get('suffix', None)
]
city = place.get('city', None)
state = place.get('state', None)
zip_code = place.get('zip', None)
name = util.join_filter(", ", [
util.join_filter(" ", address),
city,
util.join_filter(" ", [state, zip_code])
])
latitude = place.get('lat', None)
longitude = place.get('long', None)
if latitude and longitude:
latlon = float(latitude), float(longitude)
else:
return None
# TODO use Point/Location object API in 0.95
#if latitude and longitude:
# point = Point(latitude, longitude)
#else:
# point = None
#return Location(name, point, dict(result))
return name, latlon
| mit |
j00bar/ansible | lib/ansible/modules/monitoring/stackdriver.py | 77 | 7327 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: stackdriver
short_description: Send code deploy and annotation events to stackdriver
description:
- Send code deploy and annotation events to Stackdriver
version_added: "1.6"
author: "Ben Whaley (@bwhaley)"
options:
key:
description:
- API key.
required: true
default: null
event:
description:
- The type of event to send, either annotation or deploy
choices: ['annotation', 'deploy']
required: false
default: null
revision_id:
description:
- The revision of the code that was deployed. Required for deploy events
required: false
default: null
deployed_by:
description:
- The person or robot responsible for deploying the code
required: false
default: "Ansible"
deployed_to:
description:
- "The environment code was deployed to. (ie: development, staging, production)"
required: false
default: null
repository:
description:
- The repository (or project) deployed
required: false
default: null
msg:
description:
- The contents of the annotation message, in plain text. Limited to 256 characters. Required for annotation.
required: false
default: null
annotated_by:
description:
- The person or robot who the annotation should be attributed to.
required: false
default: "Ansible"
level:
description:
- one of INFO/WARN/ERROR, defaults to INFO if not supplied. May affect display.
choices: ['INFO', 'WARN', 'ERROR']
required: false
default: 'INFO'
instance_id:
description:
- id of an EC2 instance that this event should be attached to, which will limit the contexts where this event is shown
required: false
default: null
event_epoch:
description:
- "Unix timestamp of where the event should appear in the timeline, defaults to now. Be careful with this."
required: false
default: null
'''
EXAMPLES = '''
- stackdriver:
key: AAAAAA
event: deploy
deployed_to: production
deployed_by: leeroyjenkins
repository: MyWebApp
revision_id: abcd123
- stackdriver:
key: AAAAAA
event: annotation
msg: Greetings from Ansible
annotated_by: leeroyjenkins
level: WARN
instance_id: i-abcd1234
'''
# ===========================================
# Stackdriver module specific support methods.
#
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
# Let snippet from module_utils/basic.py return a proper error in this case
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.urls import fetch_url
def send_deploy_event(module, key, revision_id, deployed_by='Ansible', deployed_to=None, repository=None):
"""Send a deploy event to Stackdriver"""
deploy_api = "https://event-gateway.stackdriver.com/v1/deployevent"
params = {}
params['revision_id'] = revision_id
params['deployed_by'] = deployed_by
if deployed_to:
params['deployed_to'] = deployed_to
if repository:
params['repository'] = repository
return do_send_request(module, deploy_api, params, key)
def send_annotation_event(module, key, msg, annotated_by='Ansible', level=None, instance_id=None, event_epoch=None):
"""Send an annotation event to Stackdriver"""
annotation_api = "https://event-gateway.stackdriver.com/v1/annotationevent"
params = {}
params['message'] = msg
if annotated_by:
params['annotated_by'] = annotated_by
if level:
params['level'] = level
if instance_id:
params['instance_id'] = instance_id
if event_epoch:
params['event_epoch'] = event_epoch
return do_send_request(module, annotation_api, params, key)
def do_send_request(module, url, params, key):
data = json.dumps(params)
headers = {
'Content-Type': 'application/json',
'x-stackdriver-apikey': key
}
response, info = fetch_url(module, url, headers=headers, data=data, method='POST')
if info['status'] != 200:
module.fail_json(msg="Unable to send msg: %s" % info['msg'])
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
key=dict(required=True),
event=dict(required=True, choices=['deploy', 'annotation']),
msg=dict(),
revision_id=dict(),
annotated_by=dict(default='Ansible'),
level=dict(default='INFO', choices=['INFO', 'WARN', 'ERROR']),
instance_id=dict(),
event_epoch=dict(),
deployed_by=dict(default='Ansible'),
deployed_to=dict(),
repository=dict(),
),
supports_check_mode=True
)
key = module.params["key"]
event = module.params["event"]
# Annotation params
msg = module.params["msg"]
annotated_by = module.params["annotated_by"]
level = module.params["level"]
instance_id = module.params["instance_id"]
event_epoch = module.params["event_epoch"]
# Deploy params
revision_id = module.params["revision_id"]
deployed_by = module.params["deployed_by"]
deployed_to = module.params["deployed_to"]
repository = module.params["repository"]
##################################################################
# deploy requires revision_id
# annotation requires msg
# We verify these manually
##################################################################
if event == 'deploy':
if not revision_id:
module.fail_json(msg="revision_id required for deploy events")
try:
send_deploy_event(module, key, revision_id, deployed_by, deployed_to, repository)
except Exception:
e = get_exception()
module.fail_json(msg="unable to sent deploy event: %s" % e)
if event == 'annotation':
if not msg:
module.fail_json(msg="msg required for annotation events")
try:
send_annotation_event(module, key, msg, annotated_by, level, instance_id, event_epoch)
except Exception:
e = get_exception()
module.fail_json(msg="unable to sent annotation event: %s" % e)
changed = True
module.exit_json(changed=changed, deployed_by=deployed_by)
if __name__ == '__main__':
main()
| gpl-3.0 |
gmarkall/numba | numba/tests/test_import.py | 1 | 2535 | import unittest
import subprocess
import sys
from numba.tests.support import TestCase
class TestNumbaImport(TestCase):
"""
Test behaviour of importing Numba.
"""
def run_in_subproc(self, code, flags=None):
if flags is None:
flags = []
cmd = [sys.executable,] + flags + ["-c", code]
popen = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = popen.communicate()
if popen.returncode != 0:
msg = "process failed with code %s: stderr follows\n%s\n"
raise AssertionError(msg % (popen.returncode, err.decode()))
return out, err
def test_laziness(self):
"""
Importing top-level numba features should not import too many modules.
"""
# A heuristic set of modules that shouldn't be imported immediately
blacklist = ['cffi',
'distutils',
'numba.cuda',
'numba.cpython.mathimpl',
'numba.cpython.randomimpl',
'numba.tests',
'numba.core.typing.collections',
'numba.core.typing.listdecl',
'numba.core.typing.npdatetime',
]
# Sanity check the modules still exist...
for mod in blacklist:
if mod not in ('cffi',):
__import__(mod)
code = """if 1:
from numba import jit, vectorize
from numba.core import types
import sys
print(list(sys.modules))
"""
out, _ = self.run_in_subproc(code)
modlist = set(eval(out.strip()))
unexpected = set(blacklist) & set(modlist)
self.assertFalse(unexpected, "some modules unexpectedly imported")
def test_no_accidental_warnings(self):
# checks that importing Numba isn't accidentally triggering warnings due
# to e.g. deprecated use of import locations from Python's stdlib
code = "import numba"
# See: https://github.com/numba/numba/issues/6831
# bug in setuptools/packaging causing a deprecation warning
flags = ["-Werror", "-Wignore::DeprecationWarning:packaging.version:"]
self.run_in_subproc(code, flags)
def test_import_star(self):
# checks that "from numba import *" works.
code = "from numba import *"
self.run_in_subproc(code)
if __name__ == '__main__':
unittest.main()
| bsd-2-clause |
YannThorimbert/PyWorld2D | saveload/io.py | 1 | 3224 | import pickle, os
import thorpy
from PyWorld2D.mapobjects.objects import MapObject
def ask_save(me):
choice = thorpy.launch_binary_choice("Do you want to save this map ?")
default_fn = me.get_fn().replace(".map","")
if choice:
fn = thorpy.get_user_text("Filename", default_fn, size=(me.W//2,40))
fn += ".map"
to_file(me, fn)
thorpy.functions.quit_menu_func()
def get_saved_files_button(root):
files = [fn for fn in os.listdir(root) if fn.endswith(".map")]
ddl = thorpy.DropDownListLauncher.make("Choose a file to load", "", files)
def unlaunch():
ddl.default_unlaunch()
thorpy.functions.quit_menu_func()
ddl.unlaunch_func = unlaunch
return ddl
def ask_load():
pass
################################################################################
def obj_to_file(obj, f):
for attr in obj.get_saved_attributes():
value = getattr(obj, attr)
pickle.dump(value, f)
def file_to_obj(f, obj):
for attr in obj.get_saved_attributes():
value = pickle.load(f)
setattr(obj, attr, value)
def to_file(me, fn):
print("Saving map to", fn)
tmp_name = me.map_initializer.name
me.map_initializer.name = fn.replace("_", " ")
with open(fn, "wb") as f:
obj_to_file(me.map_initializer, f) #store map properties
#save modified cells
print("dumping", len(me.modified_cells), "modified cells")
pickle.dump(len(me.modified_cells), f) #len(modified cells)
for x,y in me.modified_cells:
cell = me.lm.cells[x][y]
pickle.dump((x,y),f)
pickle.dump(cell.name,f) #cell name
#save modified objects
print("dumping", len(me.dynamic_objects), "dynamic objects")
pickle.dump(len(me.dynamic_objects), f) #len(dynamic_objects)
for obj in me.dynamic_objects:
pickle.dump(obj.get_cell_coord(), f) #coord
obj_to_file(obj, f) #dyn obj
me.map_initializer.name = tmp_name
def from_file_base(f):
"""Load map properties and re-generate the map"""
from editor.mapbuilding import MapInitializer
print("Loading map")
mi = MapInitializer("")
file_to_obj(f, mi)
me = mi.configure_map_editor()
return me
def from_file_cells(f, me):
"""Load cells and their logical content (names, properties, etc.)"""
print("Loading cells")
n = pickle.load(f) #len(modified cells)
for i in range(n):
x,y = pickle.load(f) #coord
name = pickle.load(f) #name
#
me.lm.cells[x][y].set_name(name)
def from_file_units(f, me):
"""Load units and their logical content (names, properties, etc.)"""
print("Loading units")
n = pickle.load(f) #len(dynamic_objects)
for i in range(n):
coord = pickle.load(f) #coord
a = {}
for attr_name in MapObject.get_saved_attributes():
a[attr_name] = pickle.load(f)
#
print("*** Loading unit", a["name"])
print(a)
obj = MapObject(me, fns=a["fns"], name=a["name"], factor=a["factor"],
relpos=a["relpos"], build=a["build"], new_type=a["new_type"])
obj_added = me.add_unit(coord, obj, a["quantity"])
| mit |
hezuoguang/ZGVL | WLServer/site-packages/django/core/signing.py | 110 | 6660 | """
Functions for creating and restoring url-safe signed JSON objects.
The format used looks like this:
>>> signing.dumps("hello")
'ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk'
There are two components here, separated by a ':'. The first component is a
URLsafe base64 encoded JSON of the object passed to dumps(). The second
component is a base64 encoded hmac/SHA1 hash of "$first_component:$secret"
signing.loads(s) checks the signature and returns the deserialised object.
If the signature fails, a BadSignature exception is raised.
>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk")
u'hello'
>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk-modified")
...
BadSignature: Signature failed: ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk-modified
You can optionally compress the JSON prior to base64 encoding it to save
space, using the compress=True argument. This checks if compression actually
helps and only applies compression if the result is a shorter string:
>>> signing.dumps(range(1, 20), compress=True)
'.eJwFwcERACAIwLCF-rCiILN47r-GyZVJsNgkxaFxoDgxcOHGxMKD_T7vhAml:1QaUaL:BA0thEZrp4FQVXIXuOvYJtLJSrQ'
The fact that the string is compressed is signalled by the prefixed '.' at the
start of the base64 JSON.
There are 65 url-safe characters: the 64 used by url-safe base64 and the ':'.
These functions make use of all of them.
"""
from __future__ import unicode_literals
import base64
import json
import time
import zlib
from django.conf import settings
from django.utils import baseconv
from django.utils.crypto import constant_time_compare, salted_hmac
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.module_loading import import_by_path
class BadSignature(Exception):
"""
Signature does not match
"""
pass
class SignatureExpired(BadSignature):
"""
Signature timestamp is older than required max_age
"""
pass
def b64_encode(s):
return base64.urlsafe_b64encode(s).strip(b'=')
def b64_decode(s):
pad = b'=' * (-len(s) % 4)
return base64.urlsafe_b64decode(s + pad)
def base64_hmac(salt, value, key):
return b64_encode(salted_hmac(salt, value, key).digest())
def get_cookie_signer(salt='django.core.signing.get_cookie_signer'):
Signer = import_by_path(settings.SIGNING_BACKEND)
return Signer('django.http.cookies' + settings.SECRET_KEY, salt=salt)
class JSONSerializer(object):
"""
Simple wrapper around json to be used in signing.dumps and
signing.loads.
"""
def dumps(self, obj):
return json.dumps(obj, separators=(',', ':')).encode('latin-1')
def loads(self, data):
return json.loads(data.decode('latin-1'))
def dumps(obj, key=None, salt='django.core.signing', serializer=JSONSerializer, compress=False):
"""
Returns URL-safe, sha1 signed base64 compressed JSON string. If key is
None, settings.SECRET_KEY is used instead.
If compress is True (not the default) checks if compressing using zlib can
save some space. Prepends a '.' to signify compression. This is included
in the signature, to protect against zip bombs.
Salt can be used to namespace the hash, so that a signed string is
only valid for a given namespace. Leaving this at the default
value or re-using a salt value across different parts of your
application without good cause is a security risk.
The serializer is expected to return a bytestring.
"""
data = serializer().dumps(obj)
# Flag for if it's been compressed or not
is_compressed = False
if compress:
# Avoid zlib dependency unless compress is being used
compressed = zlib.compress(data)
if len(compressed) < (len(data) - 1):
data = compressed
is_compressed = True
base64d = b64_encode(data)
if is_compressed:
base64d = b'.' + base64d
return TimestampSigner(key, salt=salt).sign(base64d)
def loads(s, key=None, salt='django.core.signing', serializer=JSONSerializer, max_age=None):
"""
Reverse of dumps(), raises BadSignature if signature fails.
The serializer is expected to accept a bytestring.
"""
# TimestampSigner.unsign always returns unicode but base64 and zlib
# compression operate on bytes.
base64d = force_bytes(TimestampSigner(key, salt=salt).unsign(s, max_age=max_age))
decompress = False
if base64d[:1] == b'.':
# It's compressed; uncompress it first
base64d = base64d[1:]
decompress = True
data = b64_decode(base64d)
if decompress:
data = zlib.decompress(data)
return serializer().loads(data)
class Signer(object):
def __init__(self, key=None, sep=':', salt=None):
# Use of native strings in all versions of Python
self.sep = str(sep)
self.key = str(key or settings.SECRET_KEY)
self.salt = str(salt or
'%s.%s' % (self.__class__.__module__, self.__class__.__name__))
def signature(self, value):
signature = base64_hmac(self.salt + 'signer', value, self.key)
# Convert the signature from bytes to str only on Python 3
return force_str(signature)
def sign(self, value):
value = force_str(value)
return str('%s%s%s') % (value, self.sep, self.signature(value))
def unsign(self, signed_value):
signed_value = force_str(signed_value)
if not self.sep in signed_value:
raise BadSignature('No "%s" found in value' % self.sep)
value, sig = signed_value.rsplit(self.sep, 1)
if constant_time_compare(sig, self.signature(value)):
return force_text(value)
raise BadSignature('Signature "%s" does not match' % sig)
class TimestampSigner(Signer):
def timestamp(self):
return baseconv.base62.encode(int(time.time()))
def sign(self, value):
value = force_str(value)
value = str('%s%s%s') % (value, self.sep, self.timestamp())
return super(TimestampSigner, self).sign(value)
def unsign(self, value, max_age=None):
"""
Retrieve original value and check it wasn't signed more
than max_age seconds ago.
"""
result = super(TimestampSigner, self).unsign(value)
value, timestamp = result.rsplit(self.sep, 1)
timestamp = baseconv.base62.decode(timestamp)
if max_age is not None:
# Check timestamp is not older than max_age
age = time.time() - timestamp
if age > max_age:
raise SignatureExpired(
'Signature age %s > %s seconds' % (age, max_age))
return value
| apache-2.0 |
yanirs/servo | tests/wpt/css-tests/tools/pywebsocket/src/test/testdata/handlers/origin_check_wsh.py | 499 | 1916 | # Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def web_socket_do_extra_handshake(request):
if request.ws_origin == 'http://example.com':
return
raise ValueError('Unacceptable origin: %r' % request.ws_origin)
def web_socket_transfer_data(request):
request.connection.write('origin_check_wsh.py is called for %s, %s' %
(request.ws_resource, request.ws_protocol))
# vi:sts=4 sw=4 et
| mpl-2.0 |
hefen1/chromium | third_party/tlslite/tlslite/utils/pem.py | 116 | 3587 | # Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
from .compat import *
import binascii
#This code is shared with tackpy (somewhat), so I'd rather make minimal
#changes, and preserve the use of a2b_base64 throughout.
def dePem(s, name):
"""Decode a PEM string into a bytearray of its payload.
The input must contain an appropriate PEM prefix and postfix
based on the input name string, e.g. for name="CERTIFICATE":
-----BEGIN CERTIFICATE-----
MIIBXDCCAUSgAwIBAgIBADANBgkqhkiG9w0BAQUFADAPMQ0wCwYDVQQDEwRUQUNL
...
KoZIhvcNAQEFBQADAwA5kw==
-----END CERTIFICATE-----
The first such PEM block in the input will be found, and its
payload will be base64 decoded and returned.
"""
prefix = "-----BEGIN %s-----" % name
postfix = "-----END %s-----" % name
start = s.find(prefix)
if start == -1:
raise SyntaxError("Missing PEM prefix")
end = s.find(postfix, start+len(prefix))
if end == -1:
raise SyntaxError("Missing PEM postfix")
s = s[start+len("-----BEGIN %s-----" % name) : end]
retBytes = a2b_base64(s) # May raise SyntaxError
return retBytes
def dePemList(s, name):
"""Decode a sequence of PEM blocks into a list of bytearrays.
The input must contain any number of PEM blocks, each with the appropriate
PEM prefix and postfix based on the input name string, e.g. for
name="TACK BREAK SIG". Arbitrary text can appear between and before and
after the PEM blocks. For example:
" Created by TACK.py 0.9.3 Created at 2012-02-01T00:30:10Z -----BEGIN TACK
BREAK SIG-----
ATKhrz5C6JHJW8BF5fLVrnQss6JnWVyEaC0p89LNhKPswvcC9/s6+vWLd9snYTUv
YMEBdw69PUP8JB4AdqA3K6Ap0Fgd9SSTOECeAKOUAym8zcYaXUwpk0+WuPYa7Zmm
SkbOlK4ywqt+amhWbg9txSGUwFO5tWUHT3QrnRlE/e3PeNFXLx5Bckg= -----END TACK
BREAK SIG----- Created by TACK.py 0.9.3 Created at 2012-02-01T00:30:11Z
-----BEGIN TACK BREAK SIG-----
ATKhrz5C6JHJW8BF5fLVrnQss6JnWVyEaC0p89LNhKPswvcC9/s6+vWLd9snYTUv
YMEBdw69PUP8JB4AdqA3K6BVCWfcjN36lx6JwxmZQncS6sww7DecFO/qjSePCxwM
+kdDqX/9/183nmjx6bf0ewhPXkA0nVXsDYZaydN8rJU1GaMlnjcIYxY= -----END TACK
BREAK SIG----- "
All such PEM blocks will be found, decoded, and return in an ordered list
of bytearrays, which may have zero elements if not PEM blocks are found.
"""
bList = []
prefix = "-----BEGIN %s-----" % name
postfix = "-----END %s-----" % name
while 1:
start = s.find(prefix)
if start == -1:
return bList
end = s.find(postfix, start+len(prefix))
if end == -1:
raise SyntaxError("Missing PEM postfix")
s2 = s[start+len(prefix) : end]
retBytes = a2b_base64(s2) # May raise SyntaxError
bList.append(retBytes)
s = s[end+len(postfix) : ]
def pem(b, name):
"""Encode a payload bytearray into a PEM string.
The input will be base64 encoded, then wrapped in a PEM prefix/postfix
based on the name string, e.g. for name="CERTIFICATE":
-----BEGIN CERTIFICATE-----
MIIBXDCCAUSgAwIBAgIBADANBgkqhkiG9w0BAQUFADAPMQ0wCwYDVQQDEwRUQUNL
...
KoZIhvcNAQEFBQADAwA5kw==
-----END CERTIFICATE-----
"""
s1 = b2a_base64(b)[:-1] # remove terminating \n
s2 = ""
while s1:
s2 += s1[:64] + "\n"
s1 = s1[64:]
s = ("-----BEGIN %s-----\n" % name) + s2 + \
("-----END %s-----\n" % name)
return s
def pemSniff(inStr, name):
searchStr = "-----BEGIN %s-----" % name
return searchStr in inStr
| bsd-3-clause |
SlimRoms/android_external_chromium_org | tools/telemetry/telemetry/timeline/thread.py | 8 | 9322 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import itertools
import telemetry.timeline.event_container as event_container
import telemetry.timeline.sample as tracing_sample
import telemetry.timeline.slice as tracing_slice
class Thread(event_container.TimelineEventContainer):
''' A Thread stores all the trace events collected for a particular
thread. We organize the synchronous slices on a thread by "subrows," where
subrow 0 has all the root slices, subrow 1 those nested 1 deep, and so on.
The asynchronous slices are stored in an AsyncSliceGroup object.
'''
def __init__(self, process, tid):
super(Thread, self).__init__('thread %s' % tid, parent=process)
self.tid = tid
self._async_slices = []
self._flow_events = []
self._samples = []
self._toplevel_slices = []
# State only valid during import.
self._open_slices = []
self._newly_added_slices = []
@property
def toplevel_slices(self):
return self._toplevel_slices
@property
def all_slices(self):
return list(self.IterAllSlices())
@property
def samples(self):
return self._samples
@property
def async_slices(self):
return self._async_slices
@property
def open_slice_count(self):
return len(self._open_slices)
def IterChildContainers(self):
return iter([])
def IterAllSlices(self):
for s in self._toplevel_slices:
yield s
for sub_slice in s.IterEventsInThisContainerRecrusively():
yield sub_slice
def IterAllSlicesInRange(self, start, end):
for s in self.IterAllSlices():
if s.start >= start and s.end <= end:
yield s
def IterAllSlicesOfName(self, name):
for s in self.IterAllSlices():
if s.name == name:
yield s
def IterAllAsyncSlices(self):
for async_slice in self._async_slices:
yield async_slice
for sub_slice in async_slice.IterEventsInThisContainerRecrusively():
yield sub_slice
def IterAllAsyncSlicesOfName(self, name):
for s in self.IterAllAsyncSlices():
if s.name == name:
yield s
def IterAllFlowEvents(self):
for flow_event in self._flow_events:
yield flow_event
def IterEventsInThisContainer(self):
return itertools.chain(
iter(self._newly_added_slices),
self.IterAllAsyncSlices(),
self.IterAllFlowEvents(),
self.IterAllSlices(),
iter(self._samples)
)
def AddSample(self, category, name, timestamp, args=None):
if len(self._samples) and timestamp < self._samples[-1].start:
raise ValueError(
'Samples must be added in increasing timestamp order')
sample = tracing_sample.Sample(self,
category, name, timestamp, args=args)
self._samples.append(sample)
def AddAsyncSlice(self, async_slice):
self._async_slices.append(async_slice)
def AddFlowEvent(self, flow_event):
self._flow_events.append(flow_event)
def BeginSlice(self, category, name, timestamp, thread_timestamp=None,
args=None):
"""Opens a new slice for the thread.
Calls to beginSlice and endSlice must be made with
non-monotonically-decreasing timestamps.
* category: Category to which the slice belongs.
* name: Name of the slice to add.
* timestamp: The timetsamp of the slice, in milliseconds.
* thread_timestamp: Thread specific clock (scheduled) timestamp of the
slice, in milliseconds.
* args: Arguments associated with
Returns newly opened slice
"""
if len(self._open_slices) > 0 and timestamp < self._open_slices[-1].start:
raise ValueError(
'Slices must be added in increasing timestamp order')
new_slice = tracing_slice.Slice(self, category, name, timestamp,
thread_timestamp=thread_timestamp,
args=args)
self._open_slices.append(new_slice)
new_slice.did_not_finish = True
self.PushSlice(new_slice)
return new_slice
def EndSlice(self, end_timestamp, end_thread_timestamp=None):
""" Ends the last begun slice in this group and pushes it onto the slice
array.
* end_timestamp: Timestamp when the slice ended in milliseconds
* end_thread_timestamp: Timestamp when the scheduled time of the slice ended
in milliseconds
returns completed slice.
"""
if not len(self._open_slices):
raise ValueError(
'EndSlice called without an open slice')
curr_slice = self._open_slices.pop()
if end_timestamp < curr_slice.start:
raise ValueError(
'Slice %s end time is before its start.' % curr_slice.name)
curr_slice.duration = end_timestamp - curr_slice.start
if end_thread_timestamp != None:
if curr_slice.thread_start == None:
raise ValueError(
'EndSlice with thread_timestamp called on open slice without ' +
'thread_timestamp')
curr_slice.thread_duration = (end_thread_timestamp -
curr_slice.thread_start)
curr_slice.did_not_finish = False
return curr_slice
def PushCompleteSlice(self, category, name, timestamp, duration,
thread_timestamp, thread_duration, args=None):
new_slice = tracing_slice.Slice(self, category, name, timestamp,
thread_timestamp=thread_timestamp,
args=args)
if duration == None:
new_slice.did_not_finish = True
else:
new_slice.duration = duration
new_slice.thread_duration = thread_duration
self.PushSlice(new_slice)
return new_slice
def PushSlice(self, new_slice):
self._newly_added_slices.append(new_slice)
return new_slice
def AutoCloseOpenSlices(self, max_timestamp, max_thread_timestamp):
for s in self._newly_added_slices:
if s.did_not_finish:
s.duration = max_timestamp - s.start
assert s.duration >= 0
if s.thread_start != None:
s.thread_duration = max_thread_timestamp - s.thread_start
assert s.thread_duration >= 0
self._open_slices = []
def IsTimestampValidForBeginOrEnd(self, timestamp):
if not len(self._open_slices):
return True
return timestamp >= self._open_slices[-1].start
def FinalizeImport(self):
self._BuildSliceSubRows()
def _BuildSliceSubRows(self):
'''This function works by walking through slices by start time.
The basic idea here is to insert each slice as deep into the subrow
list as it can go such that every subslice is fully contained by its
parent slice.
Visually, if we start with this:
0: [ a ]
1: [ b ]
2: [c][d]
To place this slice:
[e]
We first check row 2's last item, [d]. [e] wont fit into [d] (they dont
even intersect). So we go to row 1. That gives us [b], and [d] wont fit
into that either. So, we go to row 0 and its last slice, [a]. That can
completely contain [e], so that means we should add [e] as a subslice
of [a]. That puts it on row 1, yielding:
0: [ a ]
1: [ b ][e]
2: [c][d]
If we then get this slice:
[f]
We do the same deepest-to-shallowest walk of the subrows trying to fit
it. This time, it doesn't fit in any open slice. So, we simply append
it to row 0 (a root slice):
0: [ a ] [f]
1: [ b ][e]
'''
def CompareSlices(s1, s2):
if s1.start == s2.start:
# Break ties by having the slice with the greatest
# end timestamp come first.
return cmp(s2.end, s1.end)
return cmp(s1.start, s2.start)
assert len(self._toplevel_slices) == 0
if not len(self._newly_added_slices):
return
sorted_slices = sorted(self._newly_added_slices, cmp=CompareSlices)
root_slice = sorted_slices[0]
self._toplevel_slices.append(root_slice)
for s in sorted_slices[1:]:
if not self._AddSliceIfBounds(root_slice, s):
root_slice = s
self._toplevel_slices.append(root_slice)
self._newly_added_slices = []
def _AddSliceIfBounds(self, root, child):
''' Adds a child slice to a root slice its proper row.
Return False if the child slice is not in the bounds
of the root slice.
Because we know that the start time of child is >= the start time
of all other slices seen so far, we can just check the last slice
of each row for bounding.
'''
# The source trace data is in microseconds but we store it as milliseconds
# in floating-point. Since we can't represent micros as millis perfectly,
# two end=start+duration combos that should be the same will be slightly
# different. Round back to micros to ensure equality below.
child_end_micros = round(child.end * 1000)
root_end_micros = round(root.end * 1000)
if child.start >= root.start and child_end_micros <= root_end_micros:
if len(root.sub_slices) > 0:
if self._AddSliceIfBounds(root.sub_slices[-1], child):
return True
child.parent_slice = root
root.AddSubSlice(child)
return True
return False
| bsd-3-clause |
opendatagroup/cassius | trunk/cassius/utilities.py | 2 | 20034 | # Standard Python packages
import math
import numbers
import time
# Special dependencies
import numpy
# Cassius interdependencies
import mathtools
def unicode_number(x):
"""Convert a number to unicode, with appropriate substitutions."""
output = u"%g" % x
if output[0] == u"-":
output = u"\u2012" + output[1:]
index = output.find(u"e")
if index != -1:
uniout = unicode(output[:index]) + u"\u00d710"
saw_nonzero = False
for n in output[index+1:]:
if n == u"+": pass # uniout += u"\u207a"
elif n == u"-": uniout += u"\u207b"
elif n == u"0":
if saw_nonzero: uniout += u"\u2070"
elif n == u"1":
saw_nonzero = True
uniout += u"\u00b9"
elif n == u"2":
saw_nonzero = True
uniout += u"\u00b2"
elif n == u"3":
saw_nonzero = True
uniout += u"\u00b3"
elif u"4" <= n <= u"9":
saw_nonzero = True
if saw_nonzero: uniout += eval("u\"\\u%x\"" % (0x2070 + ord(n) - ord(u"0")))
else: uniout += n
if uniout[:2] == u"1\u00d7": uniout = uniout[2:]
return uniout
return output
def regular(step, start=0.):
"""Return a function that can be used to draw regular grid lines
or tick marks.
Arguments:
step (number): size of the spacing
start (number): starting value, indicating the offset
Returns:
The function, `f(low, high)` returned by `regular` maps
endpoints `low` and `high` to a numpy array of values
satisfying `step` and `start` between `low` and `high`.
Example::
>>> reg = regular(1., start=0.5)
>>> reg
<function regular(1, start=0.5) at 0x1e889b0>
>>> reg(0, 10)
array([ 0.5, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5])
"""
def output(low, high):
newstart = math.ceil((low - start)/step) * step + start
return numpy.arange(newstart, high, step, dtype=numpy.float)
output.func_name = "regular(%g, start=%g)" % (step, start)
return output
def _compute_majorticks(low, high, N, format):
eps = mathtools.epsilon * (high - low)
if N >= 0:
output = {}
x = low
for i in xrange(N):
if format == unicode_number and abs(x) < eps: label = u"0"
else: label = format(x)
output[x] = label
x += (high - low)/(N-1.)
return output
N = -N
counter = 0
granularity = 10**math.ceil(math.log10(max(abs(low), abs(high))))
lowN = math.ceil(1.*low / granularity)
highN = math.floor(1.*high / granularity)
while (lowN > highN):
countermod3 = counter % 3
if countermod3 == 0: granularity *= 0.5
elif countermod3 == 1: granularity *= 0.4
else: granularity *= 0.5
counter += 1
lowN = math.ceil(1.*low / granularity)
highN = math.floor(1.*high / granularity)
last_granularity = granularity
last_trial = None
while True:
trial = {}
for n in range(int(lowN), int(highN)+1):
x = n * granularity
if format == unicode_number and abs(x) < eps: label = u"0"
else: label = format(x)
trial[x] = label
if int(highN)+1 - int(lowN) >= N:
if last_trial == None:
v1, v2 = low, high
return {v1: format(v1), v2: format(v2)}
else:
low_in_ticks, high_in_ticks = False, False
for t in last_trial.keys():
if 1.*abs(t - low)/last_granularity < mathtools.epsilon: low_in_ticks = True
if 1.*abs(t - high)/last_granularity < mathtools.epsilon: high_in_ticks = True
lowN = 1.*low / last_granularity
highN = 1.*high / last_granularity
if abs(lowN - round(lowN)) < mathtools.epsilon and not low_in_ticks:
last_trial[low] = format(low)
if abs(highN - round(highN)) < mathtools.epsilon and not high_in_ticks:
last_trial[high] = format(high)
return last_trial
last_granularity = granularity
last_trial = trial
countermod3 = counter % 3
if countermod3 == 0: granularity *= 0.5
elif countermod3 == 1: granularity *= 0.4
else: granularity *= 0.5
counter += 1
lowN = math.ceil(1.*low / granularity)
highN = math.floor(1.*high / granularity)
def _compute_minorticks(low, high, major_ticks):
if len(major_ticks) < 2: major_ticks = {low: None, high: None}
major_ticks = major_ticks.keys()
major_ticks.sort()
granularities = []
for i in range(len(major_ticks)-1):
granularities.append(major_ticks[i+1] - major_ticks[i])
spacing = 10**(math.ceil(math.log10(min(granularities)) - 1))
output = {}
x = major_ticks[0] - math.ceil(1.*(major_ticks[0] - low) / spacing) * spacing
while x <= high:
if x >= low:
already_in_ticks = False
for t in major_ticks:
if abs(x-t) < mathtools.epsilon * (high - low): already_in_ticks = True
if not already_in_ticks: output[x] = None
x += spacing
return output
def _compute_logmajorticks(low, high, base, N, format):
if low >= high: raise ValueError, "low must be less than high"
if N == 1: raise ValueError, "N can be 0 or >1 to specify the exact number of ticks or negative to specify a maximum"
eps = mathtools.epsilon * (high - low)
if N >= 0:
output = {}
x = low
for i in xrange(N):
if format == unicode_number and abs(x) < eps: label = u"0"
else: label = format(x)
output[x] = label
x += (high - low)/(N-1.)
return output
N = -N
lowN = math.floor(math.log(low, base))
highN = math.ceil(math.log(high, base))
output = {}
for n in range(int(lowN), int(highN)+1):
x = base**n
label = format(x)
if low <= x <= high: output[x] = label
for i in range(1, len(output)):
keys = output.keys()
keys.sort()
keys = keys[::i]
values = map(lambda k: output[k], keys)
if len(values) <= N:
for k in output.keys():
if k not in keys:
output[k] = ""
break
if len(output) <= 2:
output2 = _compute_majorticks(low, high, N=-int(math.ceil(N/2.)), format=format)
lowest = min(output2)
for k in output:
if k < lowest: output2[k] = output[k]
output = output2
return output
def _compute_logminorticks(low, high, base):
if low >= high: raise ValueError, "low must be less than high"
lowN = math.floor(math.log(low, base))
highN = math.ceil(math.log(high, base))
output = {}
num_ticks = 0
for n in range(int(lowN), int(highN)+1):
x = base**n
if low <= x <= high: num_ticks += 1
for m in range(2, int(math.ceil(base))):
minix = m * x
if low <= minix <= high: output[minix] = None
if num_ticks <= 2: return {}
else: return output
def tickmarks(major=-10, minor=True, logbase=0, format=unicode_number):
"""Return a function that can be used to set standard tick marks.
Arguments:
major (number): exact number (if positive) or a maximum number of
"natural" values (multiples of 2 or 5) for the major (labeled) ticks
minor (bool): if True, also include minor (unlabeled) ticks
between the major ones
logbase (int): if 0, produce regular ticks; if positive, treat
as a base for logarithmic ticks
format (function or string): used to set labels of major ticks;
either a function mapping numbers to strings or a standard
format specifier (e.g. "%g", "%.2f", etc.)
Considerations:
To split a region into N equal-sized segments, ask for N+1
ticks.
Examples::
>>> ticks = tickmarks(minor=False, format="%g")
>>> ticks
<function tickmarks(major=-10, minor=False, logbase=0, format=%g) at 0x1b579b0>
# a function that can later be used to set tick-marks
>>> ticks(0., 10.)
{0.0: '0', 2.0: '2', 4.0: '4', 6.0: '6', 8.0: '8', 10.0: '10'}
>>> ticks = tickmarks(minor=False, logbase=10)
>>> ticks(10**7, 10**10)
{10000000: u'10\\u2077', 100000000: u'10\\u2078', 1000000000: u'10\\u2079', 10000000000: u'10\\xb9\\u2070'}
# the strings are unicode for 10^{7}, 10^{8}, 10^{9}, 10^{10}
>>> ticks = tickmarks(3, format="%g")
>>> ticks(0., 1.)
{0: '0', 0.5: '0.5', 0.2: None, 0.4: None, 1.0: '1', 0.3: None, 0.6: None, 0.1: None, 0.9: None, 0.7: None, 0.8: None}
# three major (labeled) tick-marks with minor tick-marks (labels=None) filling in the gaps
"""
if not callable(format):
tmp = format
format = lambda x: tmp % x
format.func_name = tmp
def linear_tickmarks(low, high):
if low >= high:
raise ValueError, "To compute tick-marks, 'low' must be lower than 'high'."
major_ticks = _compute_majorticks(low, high, major, format)
if minor:
minor_ticks = _compute_minorticks(low, high, major_ticks)
else:
minor_ticks = {}
minor_ticks.update(major_ticks)
return minor_ticks
def logarithmic_tickmarks(low, high):
if low >= high:
raise ValueError, "To compute tick-marks, 'low' must be lower than 'high'."
major_ticks = _compute_logmajorticks(low, high, logbase, major, format)
if minor:
minor_ticks = _compute_logminorticks(low, high, logbase)
else:
minor_ticks = {}
minor_ticks.update(major_ticks)
return minor_ticks
if logbase == 0: output = linear_tickmarks
else: output = logarithmic_tickmarks
output.func_name = "tickmarks(major=%d, minor=%s, logbase=%d, format=%s)" % (major, repr(minor), logbase, format.func_name)
return output
def calcrange(data, log=False):
"""Return the range (min, max) of a dataset, excluding any NANs."""
xmin, xmax = None, None
for x in data:
if not log or x > 0.:
if xmin is None or x < xmin: xmin = x
if xmax is None or x > xmax: xmax = x
if xmin is None and xmax is None:
if log:
return 0.1, 1.
else:
return 0., 1.
else:
return xmin, xmax
def calcrange_quartile(data, log=False):
"""Return the range (min, max) of a dataset, based on quartiles (stable against large numbers)."""
if not isinstance(data, numpy.ndarray):
data = numpy.array(data)
if log:
data = data[data > 0.]
if len(data) == 0:
if log: return 0.1, 1.
else: return 0., 1.
data = numpy.sort(data)
q1 = data[int(math.floor(0.25*len(data)))]
q3 = data[int(math.floor(0.75*len(data)))]
if log:
return q1 / (q3 - q1), q3 * (q3 - q1)
else:
return q1 - (q3 - q1), q3 + (q3 - q1)
def binning(data, low, high):
"""Return a number of bins for this dataset using the Freedman-Diaconis rule."""
if len(data) == 0: return 1
mask1 = (data >= low)
mask2 = (data < high)
mask3 = numpy.logical_and(mask1, mask2)
data = data[mask3]
if len(data) == 0: return 10
data.sort()
q1 = data[int(math.floor(0.25*len(data)))]
q3 = data[int(math.floor(0.75*len(data)))]
binwidth = 2. * (q3 - q1) / len(data)**(1./3.)
if binwidth > 0.:
return max(10, int(math.ceil((high - low)/binwidth)))
else:
return 10
def binning_sturges(data, low, high):
raise NotImplementedError # FIXME
def timesec(year=None, month=None, day=None, hour=None, min=None, sec=None):
"""Quickly obtain a number of seconds from the current time or a given time.
Arguments:
year (int): give a specific year; overrides current year
month (int): give a specific month; overrides current month
day (int): give a specific day of the month; overrides current day
hour (int): give a specific hour (24-hour clock); overrides current hour
min (int): give a specific minute; overrides current minute
sec (int): give a specific second; overrides current second
Returns:
Number of seconds since epoch (Jan 1, 1970) as a float with
fractional seconds. For the nearest number of seconds, round
the output.
"""
seconds, subsecs = divmod(time.time(), 1)
now = time.gmtime(int(seconds))
if year is None: year = now.tm_year
if month is None: month = now.tm_mon
if day is None: day = now.tm_mday
if hour is None: hour = now.tm_hour
if min is None: min = now.tm_min
if sec is None: sec = now.tm_sec
return time.mktime(time.struct_time((year, month, day, hour, min, sec, -1, -1, -1))) + subsecs
def fromtimestring(timestrings, format, subseconds=False, t0=0.):
"""Convert a time string or many time strings into a number(s) of seconds.
Arguments:
timestring (string or list of strings): time string(s) to be
converted
format (string): time formatting string (see `time
documentation
<http://docs.python.org/library/time.html#time.strftime>`_)
subseconds (bool): if True, interpret ".xxx" at the end of
the string as fractions of a second
t0 (number or time-string): the time from which to start
counting; zero is equivalent to Jan 1, 1970
Behavior:
If only one `timestring` is passed, the return value is a
single number; if a list of strings is passed, the return value
is a list of numbers.
Subseconds are _always_ at the end of the string, regardless of
where the seconds appear in the format (if at all).
"""
if isinstance(t0, (numbers.Number, numpy.number)) or format is None:
t0 = float(t0)
else:
if subseconds:
pytimestring, subsecs = t0.split(".")
subsecs = float("0." + subsecs)
else:
pytimestring, subsecs = t0, 0.
tmp = time.strptime(pytimestring, format)
tmp = [tmp.tm_year, tmp.tm_mon, tmp.tm_mday, tmp.tm_hour, tmp.tm_min, tmp.tm_sec, tmp.tm_wday, tmp.tm_yday, tmp.tm_isdst]
if format.find("%y") == -1 and format.find("%Y") == -1:
tmp[0] = 1970
tzoffset = 0
if format.find("%Z") == -1:
# if time.daylight:
# tzoffset = time.altzone
# else:
tzoffset = time.timezone
t0 = time.mktime(tuple(tmp)) - tzoffset + subsecs
single_value = False
if isinstance(timestrings, basestring):
single_value = True
timestrings = [timestrings]
output = numpy.empty(len(timestrings), dtype=numpy.float)
for i, timestring in enumerate(timestrings):
if format is None:
output[i] = float(timestring)
else:
if subseconds:
pytimestring, subsecs = timestring.split(".")
subsecs = float("0." + subsecs)
else:
pytimestring, subsecs = timestring, 0.
tmp = time.strptime(pytimestring, format)
tmp = [tmp.tm_year, tmp.tm_mon, tmp.tm_mday, tmp.tm_hour, tmp.tm_min, tmp.tm_sec, tmp.tm_wday, tmp.tm_yday, tmp.tm_isdst]
if format.find("%y") == -1 and format.find("%Y") == -1:
tmp[0] = 1970
tzoffset = 0
if format.find("%Z") == -1:
# if time.daylight:
# tzoffset = time.altzone
# else:
tzoffset = time.timezone
output[i] = time.mktime(tuple(tmp)) - tzoffset + subsecs - t0
if single_value: return output[0]
else: return output
def totimestring(timenumbers, format, subseconds=False, t0=0.):
"""Convert a number of seconds or a list of numbers into time string(s).
Arguments:
timenumbers (number or list of numbers): time(s) to be
converted
format (string): time formatting string (see `time
documentation
<http://docs.python.org/library/time.html#time.strftime>`_)
subseconds (bool): if True, append ".xxx" at the end of
the string as fractions of a second
t0 (number or time-string): the time from which to start
counting; zero is equivalent to Jan 1, 1970
Behavior:
If only one `timenumbers` is passed, the return value is a
single string; if a list of strings is passed, the return value
is a list of strings.
Subseconds are _always_ at the end of the string, regardless of
where the seconds appear in the format (if at all).
"""
if isinstance(t0, (numbers.Number, numpy.number)):
t0 = float(t0)
else:
if subseconds:
pytimestring, subsecs = t0.split(".")
subsecs = float("0." + subsecs)
else:
pytimestring, subsecs = t0, 0.
tmp = time.strptime(pytimestring, format)
tmp = [tmp.tm_year, tmp.tm_mon, tmp.tm_mday, tmp.tm_hour, tmp.tm_min, tmp.tm_sec, tmp.tm_wday, tmp.tm_yday, tmp.tm_isdst]
if format.find("%y") == -1 and format.find("%Y") == -1:
tmp[0] = 1970
tzoffset = 0
if format.find("%Z") == -1:
# if time.daylight:
# tzoffset = time.altzone
# else:
tzoffset = time.timezone
t0 = time.mktime(tuple(tmp)) - tzoffset + subsecs
single_value = False
if isinstance(timenumbers, (numbers.Number, numpy.number)):
single_value = True
timenumbers = [timenumbers]
output = []
for timenumber in timenumbers:
if subseconds:
subsecs, secs = math.modf(timenumber + t0)
ss = str(abs(subsecs))[2:]
if ss == "0":
output.append(time.strftime(format, time.gmtime(int(secs))))
else:
output.append("%s.%s" % (time.strftime(format, time.gmtime(int(secs))), ss))
else:
secs = round(timenumber + t0)
output.append(time.strftime(format, time.gmtime(int(secs))))
if single_value: return output[0]
else: return output
def timeticks(major, minor, format="%Y-%m-%d %H:%M:%S", subseconds=False, t0=0., start=None):
"""Set x tick-marks to temporally meaningful values.
Arguments:
major (number): number of seconds interval (may use combinations
of SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, or YEAR constants)
for major ticks (ticks with labels)
minor (number): same for minor ticks (shorter ticks without labels)
format (string): time format (see `time documentation
<http://docs.python.org/library/time.html#time.strftime>`_)
subseconds (bool): if True, interpret ".xxx" at the end of
the string as fractions of a second
t0 (number or time-string): the time from which to start
counting; zero is equivalent to Jan 1, 1970
start (number, string, or `None`): a time to set the offset
of the tick-marks (use `t0` if `None`)
Behavior:
A "month" is taken to be exactly 31 days and a "year" is
taken to be exactly 365 days. Week markers will only line
up with month markers at `start`.
"""
if start is None: start = t0
if isinstance(start, basestring): start = fromtimestring(start, format, subseconds, t0)
def timeticks(low, high):
newstart = math.ceil((low - start)/major) * major + start
return dict(map(lambda x: (x, totimestring(x, format, subseconds, t0)), numpy.arange(newstart, high, major, dtype=numpy.float)))
def timeminiticks(low, high):
newstart = math.ceil((low - start)/minor) * minor + start
return dict(map(lambda x: (x, None), numpy.arange(newstart, high, minor, dtype=numpy.float)))
return timeticks, timeminiticks
SECOND = 1.
MINUTE = 60.
HOUR = 60.*60.
DAY = 60.*60.*24.
WEEK = 60.*60.*24.*7.
MONTH = 60.*60.*24.*31.
YEAR = 60.*60.*24.*356.
| apache-2.0 |
ursine/Great68 | asm6502.py | 1 | 62272 | import re
class asm6502():
def __init__(self, debug=0):
# print "65C02 Assembler"
self.debuglevel = debug
self.text_of_lines = list() # of strings
self.lines = list() # parsed lines (symbol, opcode, addrmode, value
self.symbols = list() # of (name,line#) tuples
self.labeldict = dict()
self.labellist = list()
self.opcodelist = list()
self.opcodedict = dict()
self.addressmodes = dict()
self.addressmmodelist = list()
self.object_code = list() # 64 K entries to cover whole memory map
for i in xrange(0, 65536):
self.object_code.append(-1) # -1 indicate location not populated
self.littleendian = True # Use le and be directives to change this
self.genopcodelist() # generate the tables
self.build_opcode_map()
self.build_encoding_table()
# some handy lookups
self.decimal_digits = "0123456789"
self.hex_digits = "abcdefABCDEF0123456789"
self.octal_digits = "01234567"
self.letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"
self.allstuff = list()
self.line = 1
def clear_state(self):
self.text_of_lines = list() # of strings
self.lines = list() # parsed lines (symbol, opcode, addrmode, value
self.symbols = list() # of (name,line#) tuples
self.labeldict = dict()
self.labellist = list()
self.opcodelist = list()
self.opcodedict = dict()
self.addressmodes = dict()
self.addressmmodelist = list()
self.littleendian = True # Use le and be directives to change this
self.allstuff = list()
self.line = 1
def info(self, linenumber, text):
self.debug(1, "INFO: Line %d :%s" % (linenumber, text))
def warning(self, linenumber, linetext, text):
print "WARNING: Line %d :%s" % (linenumber, text)
print " " + linetext
def strip_comments(self, thestring):
self.debug(3, "string passed to strip_comments()=%s" % thestring)
position = thestring.find(';')
if (position == -1):
return (thestring, "")
else:
return (thestring[:position].rstrip(), thestring[position:].rstrip())
def debug(self, level=0, astring="No String Given"):
if (level > self.debuglevel):
pass
else:
print " DEBUG(%d):%s" % (level, astring)
# find a label at the front. Strip it and return the symbol
def strip_label(self, thestring, linenumber):
position = thestring.find(':')
if (position == -1):
return ("", thestring.strip())
else:
labelstr = thestring[:position].strip()
returnstr = thestring[position + 1:].strip()
position = labelstr.find(' ')
if (position == -1):
self.labeldict[labelstr] = linenumber
self.labellist.append((linenumber, labelstr))
self.debug(2, "Line %d Label %s found at line %d" % (linenumber, labelstr, linenumber))
return (labelstr, returnstr)
else:
labelstr = labelstr[:position]
self.warning(linenumber=linenumber, linetext=thestring,
text="More than one thing in the label field. Ignoring everything between the first space and the colon")
self.labellist.append((linenum, labelstr))
self.labeldict[labelstr] = linenum
self.info(linenumber, text="Label %s found at line %d" % (labelstr, linenumber))
return (labelstr, returnstr)
# Consider the next thing an opcode
# strip it and return the opcode with the remainder of the line
def strip_opcode(self, thestring, linenumber):
mystring = thestring.strip()
noopcode = False
noremainder = False
if len(mystring) == 0:
opcodestr = ""
remainderstr = ""
noopcode = True
noremainder = True
elif ' ' in mystring:
position = thestring.find(' ')
opcodestr = thestring[:position].strip()
remainderstr = thestring[position + 1:].strip()
noopcode = False
noremainder = False
else:
opcodestr = mystring
remainderstr = ""
noopcode = False
noremainder = True
if noopcode:
# print "no opcode or remainder"
return (("", ""))
else:
if noremainder:
# print "opcode %s but no remainder" % opcodestr
return ((opcodestr, ""))
else:
# print "opcode %s with remainder %s" % (opcodestr,remainderstr)
return ((opcodestr, remainderstr))
def check_opcode(self, opcode_in, linenumber):
opcode = opcode_in.lower()
if opcode == "":
self.debug(3, "check_opcode returning null")
return None
elif opcode in self.validopcodes:
self.opcodelist.append((linenumber, opcode))
self.debug(3, "check_opcode found %s in validopcodes" % opcode)
return opcode
elif opcode in self.validdirectives:
self.opcodelist.append((linenumber, opcode))
self.debug(3, "check_opcode found %s in validirectives" % opcode)
return opcode
else:
self.debug(3, "check_opcode could not find opcode %s " % opcode)
self.warning(linenumber=linenumber, linetext="", text="unknown opcode %s" % opcode)
return None
def identify_addressmodeformat(self, remainderstr, linenumber):
# remove all whitespace
thestring = remainderstr.replace(" ", "")
if (thestring == ""):
premode = "nothing"
value = ""
elif thestring[0] == "#":
# It's immediate
premode = "immediate"
value = thestring[1:]
elif (thestring == "a") or (thestring == "A"):
premode = "accumulator"
value = ""
elif re.search("""^\((.*),[xX]\)$""", thestring):
premode = "bracketedindexedx"
b = re.search("""^\((.*),[xX]\)$""", thestring)
value = b.group(1)
elif re.search("""^\((.*)\),[yY]$""", thestring):
premode = "bracketedcommay"
b = re.search("""^\((.*)\),[yY]$""", thestring)
value = b.group(1)
elif re.search("""^(.*),[xX]$""", thestring):
b = re.search("""^(.*),[xX]$""", thestring)
value = b.group(1)
premode = "numbercommax"
elif re.search("""^(.*),[yY]$""", thestring):
b = re.search("""^(.*),[yY]$""", thestring)
value = b.group(1)
premode = "numbercommay"
elif (thestring[0] == '$') or (thestring[0] == '@') \
or (thestring[0] == '%') \
or (thestring[0] == '&') \
or (thestring[0] in self.decimal_digits):
premode = "number"
value = thestring
elif ((thestring[0] in self.letters) and ((thestring != "A") or (thestring != "a"))):
premode = "number"
value = thestring
elif (thestring[0] == "+") or (thestring[0] == "-"):
premode = "offset"
value = thestring
elif re.search("""^\((.*),[xX]\)$""", thestring):
premode = "bracketedindexedx"
b = re.search("""^\((.*),[xX]\)$""", thestring)
value = b.group(1)
elif re.search("""^\((.*)\),[yY]$""", thestring):
premode = "bracketedcommay"
b = re.search("""^\((.*)\),[yY]$""", thestring)
value = b.group(1)
elif re.search("""^\(.*\)$""", thestring):
premode = "bracketed"
value = thestring[1:-1]
elif thestring[0] in self.letters:
premode = "name"
value = thestring
else:
self.warning(linenumber, linetext=remainderstr, text="Can\'t make sense of address mode %s" % remainderstr)
premode = "nothing"
value = ""
self.debug(2, "premode = %s, value = %s" % (premode, value))
# We've classified the basic formats in premode
# some formats mean different things with different instructions
# E.G. a number is an offset with a branch but absolute with a load
# So we need to cross check the combinations of instruction with format
# to derive the actual address mode and whether or not it is allowed.
return (premode, value)
# Address mode format name applied
# implicit ~ "implicit"
# immediate #num ~ "immediate"
# accumulator A ~ "accumulator"
# absolute $2000 ~ "absolute"
# zero page $20 ~ "zeropage"
# absolute indexed x $5000,X ~ "absolutex"
# absolute indexed y $5000,y ~ "absolutey"
# zeropage indexed x $20,X ~ "zeropagex"
# zeropage indexed y $20,Y ~ "zeropagey"
# relative +10 (or label) ~ "relative"
# zeropage indexed indirect x ($20,X) ~ "zeropageindexedindirectx"
# zeropage indexed indirect y ($20),Y ~ "zeropageindexedindirecty"
# absolute indexed indirect ($5000,X) - only JMP ~ "absoluteindexedindirect"
# zeropage indirect ($20) ~ "zeropageindirect"
# absolute indirect ($5000) - only JMP ~ "absoluteindirect"
#
# names are numbers..
def identify_addressmode(self, opcode, premode, value, linenumber):
if (opcode in self.implicitopcodes) and (premode == "nothing"):
return "implicit"
if (opcode in self.immediateopcodes) and (premode == "immediate"):
return "immediate"
if (opcode in self.accumulatoropcodes) and (premode == "accumulator"):
return "accumulator"
if (opcode in self.accumulatoropcodes) and (premode == "nothing"):
return "accumulator"
if (opcode == "jmp"):
if (premode == "bracketed"):
return "absoluteindirect"
if (premode == "bracketedindexedx"):
return "absoluteindexedindirect"
if (premode == "number"):
return "absolute"
return "UNDECIDED"
if (opcode in self.zeropageopcodes) and (premode == "number") and (self.decode_value(value) != -1):
if (self.decode_value(value) < 256):
return "zeropage"
if (opcode in self.relativeopcodes) and ((premode == "number") or (premode == "offset")):
return "relative"
if (opcode in self.absoluteopcodes) and (premode == "number"):
return "absolute"
self.debug(3, "IDENTIFY_ADDRESSMODE for zeropagex opcode=%s premode=%s" % (opcode, premode))
if (opcode in self.zeropagexopcodes):
self.debug(3, "opcode was in zeropagexopcodes")
else:
self.debug(3, "opcode wasnt in zeropagexopcodes")
if (opcode in self.zeropagexopcodes) and (premode == "numbercommax"):
self.debug(3, "IDENTIFY_ADDRESSMODE (opcode was in self.zeropagexopcodes) and (premode was== numbercommax)")
self.debug(3, "IDENTIFY_ADDRESSMODE decoded value = 0x%x" % self.decode_value(value))
if (self.decode_value(value) < 256):
return "zeropagex"
if (opcode in self.zeropageyopcodes) and (premode == "numbercommay"):
if (self.decode_value(value) < 256):
return "zeropagey"
if (opcode in self.absolutexopcodes) and (premode == "numbercommax"):
return "absolutex"
if (opcode in self.absoluteyopcodes) and (premode == "numbercommay"):
return "absolutey"
if (opcode in self.zeropageyopcodes) and (premode == "numbercommay"):
return "zeropagey"
if (opcode in self.zeropageindexedindirectxopcodes) and (premode == "bracketedindexedx"):
return "zeropageindexedindirectx"
if (opcode in self.zeropageindexedindirectyopcodes) and (premode == "bracketedcommay"):
return "zeropageindexedindirecty"
if (opcode in self.zeropageindirectopcodes) and (premode == "bracketed"):
if (self.decode_value(value) < 256):
return "zeropageindirect"
self.debug(2, "INFO: GOT TO END OF IDENTIFY_ADDRESSMODE: Line %d opcode:%s premode:%s" % (
linenumber, opcode, premode))
return "UNDECIDED"
def decode_extraquadwords(self, linenumber, linetext, s):
newstring = "["
for c in s:
if c == "$":
newstring = newstring + "0x"
elif c == "@":
newstring = newstring + "0"
else:
newstring = newstring + c
newstring = newstring + "]"
thelist = eval(newstring)
newlist = list()
for i in thelist:
if type(i) == int:
a = i & 0x00ff
b = (((i & 0x000000000000ff00) >> 8) & 0x000000ff)
c = (((i & 0x0000000000ff0000) >> 16) & 0x000000ff)
d = (((i & 0x00000000ff000000) >> 24) & 0x000000ff)
e = (((i & 0x000000ff00000000) >> 32) & 0x000000ff)
f = (((i & 0x0000ff0000000000) >> 40) & 0x000000ff)
g = (((i & 0x00ff000000000000) >> 48) & 0x000000ff)
h = (((i & 0xff00000000000000) >> 56) & 0x000000ff)
if (self.littleendian == True):
newlist.append(a)
newlist.append(b)
newlist.append(c)
newlist.append(d)
newlist.append(e)
newlist.append(f)
newlist.append(g)
newlist.append(h)
else:
newlist.append(g)
newlist.append(g)
newlist.append(f)
newlist.append(e)
newlist.append(d)
newlist.append(c)
newlist.append(b)
newlist.append(a)
else:
self.warning(linenumber, linetext, "Can't parse word string %s" % newstring)
emptylist = list()
return emptylist
return newlist
def decode_extradoublewords(self, linenumber, linetext, s):
newstring = "["
for c in s:
if c == "$":
newstring = newstring + "0x"
elif c == "@":
newstring = newstring + "0"
else:
newstring = newstring + c
newstring = newstring + "]"
thelist = eval(newstring)
newlist = list()
for i in thelist:
if type(i) == int:
a = i & 0x00ff
b = (((i & 0x0000ff00) >> 8) & 0x000000ff)
c = (((i & 0x00ff0000) >> 16) & 0x000000ff)
d = (((i & 0xff000000) >> 24) & 0x000000ff)
if (self.littleendian == True):
newlist.append(a)
newlist.append(b)
newlist.append(c)
newlist.append(d)
else:
newlist.append(d)
newlist.append(c)
newlist.append(b)
newlist.append(a)
else:
self.warning(linenumber, linetext, "Can't parse word string %s" % newstring)
emptylist = list()
return emptylist
return newlist
# Just count the number of bytes without working out what they are
def count_extrabytes(self, opcode, operand):
count = len(operand.split(','))
if opcode == "db":
return count
elif opcode == "dw":
return count * 2
elif opcode == "ddw":
return count * 4
elif opcode == "dqw":
return count * 8
else:
return None
def decode_extrawords(self, linenumber, linetext, s):
csl = s.split(',')
newlist = list()
for theword in csl:
if theword[0] == '&':
label = theword[1:]
value = self.symbols[label]
elif theword[0] == '$':
value = eval("0x" + theword[1:])
elif theword[0] == '@':
value = eval("0" + theword[1:])
else:
value = eval(theword)
if type(value) == int:
a = value & 0x00ff
b = (((value & 0xff00) >> 8) & 0x00ff)
if (self.littleendian == True):
newlist.append(a)
newlist.append(b)
else:
newlist.append(b)
newlist.append(a)
else:
self.warning(linenumber, linetext, "Can't parse word string %s" % newstring)
emptylist = list()
return emptylist
return newlist
def decode_extrabytes(self, linenumber, linetext, s):
newstring = "["
for c in s:
if c == "$":
newstring = newstring + "0x"
elif c == "@":
newstring = newstring + "0"
else:
newstring = newstring + c
newstring = newstring + "]"
# Now parse the list
thelist = eval(newstring)
newlist = list()
for i in thelist:
if type(i) == int:
newlist.append(i)
else:
self.warning(linenumber, linetext, "Can't parse byte string %s" % newstring)
emptylist = list()
return emptylist
return newlist
def decode_value(self, s):
if (s[0] == '$'):
ns = int(s[1:], 16)
return ns
if (s[0] == '@'):
ns = int(s[1:], 8)
return ns
if (s[0] == '%'):
ns = int(s[1:], 2)
return ns
if (s[0] in self.decimal_digits):
ns = int(s)
return ns
return (-1)
# Address mode format name applied
# implicit ~ "implicit"
# immediate #num ~ "immediate"
# accumulator A ~ "accumulator"
# absolute $2000 ~ "absolute"
# zero page $20 ~ "zeropage"
# absolute indexed x $5000,X ~ "absolutex"
# absolute indexed y $5000,y ~ "absolutey"
# zeropage indexed x $20,X ~ "zeropagex"
# zeropage indexed y $20,Y ~ "zeropagey"
# relative +10 (or label) ~ "relative"
# zeropage indexed indirect x ($20,X) ~ "zeropageindexedindirectx"
# zeropage indexed indirect y ($20),Y ~ "zeropageindexedindirecty"
# absolute indexed indirect ($5000,X) - only JMP ~ "absoluteindexedindirect"
# zeropage indirect ($20) ~ "zeropageindirect"
# absolute indirect ($5000) - only JMP ~ "absoluteindirect"
def genopcodelist(self):
self.modeswithlowbytevalue = \
["immediate", "absolute", "zeropage", "absolutex", "absolutey", \
"zeropagex", "zeropagey", "zeropageindexedindirectx", "zeropageindexedindirecty" \
"absoluteindexedindirect", "zeropageindirect",
"absoluteindirect"]
self.modeswithhighbytevalue = \
["absolute", "absolutex", "absolutey", \
"absoluteindexedindirect", "absoluteindirect"]
self.validdirectives = \
["db", "dw", "ddw", "dqw", "str", "org", "le", "be"]
self.validopcodes = \
["adc", "and", "asl", "bcc", "bcs", "beq", "bit", "bmi", "bne", \
"bpl", "bra", "brk", "bvc", "bvs", "clc", "cld", "cli", "clv", \
"cmp", "cpx", "cpy", "dea", "dec", "dex", "dey", "eor", "inc", "ina", "inx", \
"iny", "jmp", "jsr", "lda", "ldx", "ldy", "lsr", "nop", "ora", \
"pha", "php", "phx", "phy", "pla", "plp", "plx", "ply", "rol", \
"ror", "rti", "rts", "sbc", "sec", "sed", "sei", "sta", "stx", \
"sty", "stz", "tax", "tay", "trb", "tsb", "tsx", "txa", "txs", \
"tya"]
self.implicitopcodes = \
["brk", "clc", "cld", "cli", "clv", "dex", "dey", "inx", "iny", "nop", \
"pha", "php", "phx", "phy", "pla", "plp", "plx", "ply", "rti", "rts", \
"sec", "sed", "sei", "tax", "tay", "trb", "tsb", "tsx", "txa", "txs", \
"tya"]
self.immediateopcodes = \
["adc", "and", "bit", "cmp", "cpx", "cpy", "eor", "lda", "ldx", \
"ldy", "ora", "sbc"]
self.accumulatoropcodes = \
["asl", "dea", "dec", "ina", "inc", "lsr", "rol", "ror"]
self.zeropageopcodes = \
["adc", "and", "asl", "bit", "cmp", "cpx", "cpy", "dec", "eor", "inc", \
"lda", "ldx", "ldy", "lsr", "ora", "rol", "ror", "sbc", "sta", "stx", \
"sty", "stz", "trb", "tsb"]
self.absoluteopcodes = \
["adc", "and", "asl", "bit", "cmp", "cpx", "cpy", "dec", "eor", "inc", \
"jmp", "jsr", "lda", "ldx", "ldy", "lsr", "ora", "rol", "ror", "sbc", \
"sta", "stx", "sty", "stz", "trb", "tsb"]
self.absolutexopcodes = \
["adc", "and", "asl", "bit", "cmp", "dec", "eor", "inc", \
"lda", "lsr", "ora", "rol", "ror", "sbc", \
"sta", "stz", "ldy"]
self.absoluteyopcodes = \
["adc", "and", "cmp", "eor", \
"lda", "ldx", "ora", "sbc", "sta"]
self.zeropagexopcodes = \
["adc", "and", "cmp", "eor", "lda", "dec", "bit", "asl", "ldy", \
"ora", "sbc", "sta", "sty", "ror", "rol", "lsr", "inc", "stz"]
self.zeropageyopcodes = \
["ldx", "stx"]
self.relativeopcodes = \
["bmi", "bne", "bpl", "bra", "bvc", "bvs", "bcc", "bcs", "beq"]
self.zeropageindexedindirectxopcodes = \
["adc", "and", "cmp", "eor", "lda", "ora", "sbc", "sta"]
self.zeropageindexedindirectyopcodes = \
["adc", "and", "cmp", "eor", "lda", "ora", "sbc", "sta"]
self.zeropageindirectopcodes = \
["adc", "and", "cmp", "eor", "lda", "ora", "sbc", "sta"]
def build_opcode_map(self):
self.map = dict()
for opcode in self.validopcodes:
self.map[opcode] = list()
if opcode in self.implicitopcodes:
self.map[opcode].append("implicit")
if opcode in self.immediateopcodes:
self.map[opcode].append("immediate")
if opcode in self.accumulatoropcodes:
self.map[opcode].append("accumulator")
if opcode in self.zeropageopcodes:
self.map[opcode].append("zeropage")
if opcode in self.absoluteopcodes:
self.map[opcode].append("absolute")
if opcode in self.absolutexopcodes:
self.map[opcode].append("absolutex")
if opcode in self.absoluteyopcodes:
self.map[opcode].append("absolutey")
if opcode in self.zeropagexopcodes:
self.map[opcode].append("zeropagex")
if opcode in self.zeropageyopcodes:
self.map[opcode].append("zeropagey")
if opcode in self.relativeopcodes:
self.map[opcode].append("relative")
if opcode in self.zeropageindexedindirectxopcodes:
self.map[opcode].append("zeropageindexedindirectx")
if opcode in self.zeropageindexedindirectyopcodes:
self.map[opcode].append("zeropageindexedindirecty")
if opcode in self.zeropageindirectopcodes:
self.map[opcode].append("zeropageindirect")
def build_encoding_table(self):
self.hexcodes = dict()
self.hexcodes[0x00] = ("brk", "implicit")
self.hexcodes[0x10] = ("bpl", "relative")
self.hexcodes[0x20] = ("jsr", "absolute")
self.hexcodes[0x30] = ("bmi", "relative")
self.hexcodes[0x40] = ("rti", "implicit")
self.hexcodes[0x50] = ("bvc", "relative")
self.hexcodes[0x60] = ("rts", "implicit")
self.hexcodes[0x70] = ("bvs", "relative")
self.hexcodes[0x80] = ("bra", "relative")
self.hexcodes[0x90] = ("bcc", "relative")
self.hexcodes[0xA0] = ("ldy", "immediate")
self.hexcodes[0xB0] = ("bcs", "relative")
self.hexcodes[0xC0] = ("cpy", "immediate")
self.hexcodes[0xD0] = ("bne", "relative")
self.hexcodes[0xE0] = ("cpx", "immediate")
self.hexcodes[0xF0] = ("beq", "relative")
self.hexcodes[0x01] = ("ora", "zeropageindexedindirectx")
self.hexcodes[0x11] = ("ora", "zeropageindexedindirecty")
self.hexcodes[0x21] = ("and", "zeropageindexedindirectx")
self.hexcodes[0x31] = ("and", "zeropageindexedindirecty")
self.hexcodes[0x41] = ("eor", "zeropageindexedindirectx")
self.hexcodes[0x51] = ("eor", "zeropageindexedindirecty")
self.hexcodes[0x61] = ("adc", "zeropageindexedindirectx")
self.hexcodes[0x71] = ("adc", "zeropageindexedindirecty")
self.hexcodes[0x81] = ("sta", "zeropageindexedindirectx")
self.hexcodes[0x91] = ("sta", "zeropageindexedindirecty")
self.hexcodes[0xA1] = ("lda", "zeropageindexedindirectx")
self.hexcodes[0xB1] = ("lda", "zeropageindexedindirecty")
self.hexcodes[0xC1] = ("cmp", "zeropageindexedindirectx")
self.hexcodes[0xD1] = ("cmp", "zeropageindexedindirecty")
self.hexcodes[0xE1] = ("sbc", "zeropageindexedindirectx")
self.hexcodes[0xF1] = ("sbc", "zeropageindexedindirecty")
self.hexcodes[0x02] = ("", "")
self.hexcodes[0x12] = ("ora", "zeropageindirect")
self.hexcodes[0x22] = ("", "")
self.hexcodes[0x32] = ("and", "zeropageindirect")
self.hexcodes[0x42] = ("", "")
self.hexcodes[0x52] = ("eor", "zeropageindirect")
self.hexcodes[0x62] = ("", "")
self.hexcodes[0x72] = ("adc", "zeropageindirect")
self.hexcodes[0x82] = ("", "")
self.hexcodes[0x92] = ("sta", "zeropageindirect")
self.hexcodes[0xA2] = ("ldx", "immediate")
self.hexcodes[0xB2] = ("lda", "zeropageindirect")
self.hexcodes[0xC2] = ("", "")
self.hexcodes[0xD2] = ("cmp", "zeropageindirect")
self.hexcodes[0xE2] = ("", "")
self.hexcodes[0xF2] = ("sbc", "zeropageindirect")
self.hexcodes[0x03] = ("", "")
self.hexcodes[0x13] = ("", "")
self.hexcodes[0x23] = ("", "")
self.hexcodes[0x33] = ("", "")
self.hexcodes[0x43] = ("", "")
self.hexcodes[0x53] = ("", "")
self.hexcodes[0x63] = ("", "")
self.hexcodes[0x73] = ("", "")
self.hexcodes[0x83] = ("", "")
self.hexcodes[0x93] = ("", "")
self.hexcodes[0xA3] = ("", "")
self.hexcodes[0xB3] = ("", "")
self.hexcodes[0xC3] = ("", "")
self.hexcodes[0xD3] = ("", "")
self.hexcodes[0xE3] = ("", "")
self.hexcodes[0xF3] = ("", "")
self.hexcodes[0x04] = ("tsb", "zeropage")
self.hexcodes[0x14] = ("trb", "zeropage")
self.hexcodes[0x24] = ("bit", "zeropage")
self.hexcodes[0x34] = ("bit", "zeropagex")
self.hexcodes[0x44] = ("", "")
self.hexcodes[0x54] = ("", "")
self.hexcodes[0x64] = ("stz", "zeropage")
self.hexcodes[0x74] = ("stz", "zeropagex")
self.hexcodes[0x84] = ("sty", "zeropage")
self.hexcodes[0x94] = ("sty", "zeropagex")
self.hexcodes[0xA4] = ("ldy", "zeropage")
self.hexcodes[0xB4] = ("ldy", "zeropagex")
self.hexcodes[0xC4] = ("cpy", "zeropage")
self.hexcodes[0xD4] = ("", "")
self.hexcodes[0xE4] = ("cpx", "zeropage")
self.hexcodes[0xF4] = ("", "")
self.hexcodes[0x05] = ("ora", "zeropage")
self.hexcodes[0x15] = ("ora", "zeropagex")
self.hexcodes[0x25] = ("and", "zeropage")
self.hexcodes[0x35] = ("and", "zeropagex")
self.hexcodes[0x45] = ("eor", "zeropage")
self.hexcodes[0x55] = ("eor", "zeropagex")
self.hexcodes[0x65] = ("adc", "zeropage")
self.hexcodes[0x75] = ("adc", "zeropagex")
self.hexcodes[0x85] = ("sta", "zeropage")
self.hexcodes[0x95] = ("sta", "zeropagex")
self.hexcodes[0xA5] = ("lda", "zeropage")
self.hexcodes[0xB5] = ("lda", "zeropagex")
self.hexcodes[0xC5] = ("cmp", "zeropage")
self.hexcodes[0xD5] = ("cmp", "zeropagex")
self.hexcodes[0xE5] = ("sbc", "zeropage")
self.hexcodes[0xF5] = ("sbc", "zeropagex")
self.hexcodes[0x06] = ("asl", "zeropage")
self.hexcodes[0x16] = ("asl", "zeropagex")
self.hexcodes[0x26] = ("rol", "zeropage")
self.hexcodes[0x36] = ("rol", "zeropagex")
self.hexcodes[0x46] = ("lsr", "zeropage")
self.hexcodes[0x56] = ("lsr", "zeropagex")
self.hexcodes[0x66] = ("ror", "zeropage")
self.hexcodes[0x76] = ("ror", "zeropagex")
self.hexcodes[0x86] = ("stx", "zeropage")
self.hexcodes[0x96] = ("stx", "zeropagey")
self.hexcodes[0xA6] = ("ldx", "zeropage")
self.hexcodes[0xB6] = ("ldx", "zeropagey")
self.hexcodes[0xC6] = ("dec", "zeropage")
self.hexcodes[0xD6] = ("dec", "zeropagex")
self.hexcodes[0xE6] = ("inc", "zeropage")
self.hexcodes[0xF6] = ("inc", "zeropagex")
self.hexcodes[0x07] = ("", "")
self.hexcodes[0x17] = ("", "")
self.hexcodes[0x27] = ("", "")
self.hexcodes[0x37] = ("", "")
self.hexcodes[0x47] = ("", "")
self.hexcodes[0x57] = ("", "")
self.hexcodes[0x67] = ("", "")
self.hexcodes[0x77] = ("", "")
self.hexcodes[0x87] = ("", "")
self.hexcodes[0x97] = ("", "")
self.hexcodes[0xA7] = ("", "")
self.hexcodes[0xB7] = ("", "")
self.hexcodes[0xC7] = ("", "")
self.hexcodes[0xD7] = ("", "")
self.hexcodes[0xE7] = ("", "")
self.hexcodes[0xF7] = ("", "")
self.hexcodes[0x08] = ("php", "implicit")
self.hexcodes[0x18] = ("clc", "implicit")
self.hexcodes[0x28] = ("plp", "implicit")
self.hexcodes[0x38] = ("sec", "implicit")
self.hexcodes[0x48] = ("pha", "implicit")
self.hexcodes[0x58] = ("cli", "implicit")
self.hexcodes[0x68] = ("pla", "implicit")
self.hexcodes[0x78] = ("sei", "implicit")
self.hexcodes[0x88] = ("dey", "implicit")
self.hexcodes[0x98] = ("tya", "implicit")
self.hexcodes[0xA8] = ("tay", "implicit")
self.hexcodes[0xB8] = ("clv", "implicit")
self.hexcodes[0xC8] = ("iny", "implicit")
self.hexcodes[0xD8] = ("cld", "implicit")
self.hexcodes[0xE8] = ("inx", "implicit")
self.hexcodes[0xF8] = ("sed", "implicit")
self.hexcodes[0x09] = ("ora", "immediate")
self.hexcodes[0x19] = ("ora", "absolutey")
self.hexcodes[0x29] = ("and", "immediate")
self.hexcodes[0x39] = ("and", "absolutey")
self.hexcodes[0x49] = ("eor", "immediate")
self.hexcodes[0x59] = ("eor", "absolutey")
self.hexcodes[0x69] = ("adc", "immediate")
self.hexcodes[0x79] = ("adc", "absolutey")
self.hexcodes[0x89] = ("bit", "immediate")
self.hexcodes[0x99] = ("sta", "absolutey")
self.hexcodes[0xA9] = ("lda", "immediate")
self.hexcodes[0xB9] = ("lda", "absolutey")
self.hexcodes[0xC9] = ("cmp", "immediate")
self.hexcodes[0xD9] = ("cmp", "absolutey")
self.hexcodes[0xE9] = ("sbc", "immediate")
self.hexcodes[0xF9] = ("sbc", "absolutey")
self.hexcodes[0x0A] = ("asl", "accumulator")
self.hexcodes[0x1A] = ("ina", "accumulator")
self.hexcodes[0x2A] = ("rol", "accumulator")
self.hexcodes[0x3A] = ("dea", "accumulator")
self.hexcodes[0x4A] = ("lsr", "accumulator")
self.hexcodes[0x5A] = ("phy", "implicit")
self.hexcodes[0x6A] = ("ror", "accumulator")
self.hexcodes[0x7A] = ("ply", "implicit")
self.hexcodes[0x8A] = ("txa", "implicit")
self.hexcodes[0x9A] = ("txs", "implicit")
self.hexcodes[0xAA] = ("tax", "implicit")
self.hexcodes[0xBA] = ("tsx", "implicit")
self.hexcodes[0xCA] = ("dex", "implicit")
self.hexcodes[0xDA] = ("phx", "implicit")
self.hexcodes[0xEA] = ("nop", "implicit")
self.hexcodes[0xFA] = ("plx", "implicit")
self.hexcodes[0x0B] = ("", "")
self.hexcodes[0x1B] = ("", "")
self.hexcodes[0x2B] = ("", "")
self.hexcodes[0x3B] = ("", "")
self.hexcodes[0x4B] = ("", "")
self.hexcodes[0x5B] = ("", "")
self.hexcodes[0x6B] = ("", "")
self.hexcodes[0x7B] = ("", "")
self.hexcodes[0x8B] = ("", "")
self.hexcodes[0x9B] = ("", "")
self.hexcodes[0xAB] = ("", "")
self.hexcodes[0xBB] = ("", "")
self.hexcodes[0xCB] = ("", "")
self.hexcodes[0xDB] = ("", "")
self.hexcodes[0xEB] = ("", "")
self.hexcodes[0xFB] = ("", "")
self.hexcodes[0x0C] = ("tsb", "absolute")
self.hexcodes[0x1C] = ("trb", "absolute")
self.hexcodes[0x2C] = ("bit", "absolute")
self.hexcodes[0x3C] = ("bit", "absolutex")
self.hexcodes[0x4C] = ("jmp", "absolute")
self.hexcodes[0x5C] = ("", "")
self.hexcodes[0x6C] = ("jmp", "absoluteindirect")
self.hexcodes[0x7C] = ("jmp", "absoluteindexedindirect")
self.hexcodes[0x8C] = ("sty", "absolute")
self.hexcodes[0x9C] = ("stz", "absolute")
self.hexcodes[0xAC] = ("ldy", "absolute")
self.hexcodes[0xBC] = ("ldy", "absolutex")
self.hexcodes[0xCC] = ("cpy", "absolute")
self.hexcodes[0xDC] = ("", "")
self.hexcodes[0xEC] = ("cpx", "absolute")
self.hexcodes[0xFC] = ("", "")
self.hexcodes[0x0D] = ("ora", "absolute")
self.hexcodes[0x1D] = ("ora", "absolutex")
self.hexcodes[0x2D] = ("and", "absolute")
self.hexcodes[0x3D] = ("and", "absolutex")
self.hexcodes[0x4D] = ("eor", "absolute")
self.hexcodes[0x5D] = ("eor", "absolutex")
self.hexcodes[0x6D] = ("adc", "absolute")
self.hexcodes[0x7D] = ("adc", "absolutex")
self.hexcodes[0x8D] = ("sta", "absolute")
self.hexcodes[0x9D] = ("sta", "absolutex")
self.hexcodes[0xAD] = ("lda", "absolute")
self.hexcodes[0xBD] = ("lda", "absolutex")
self.hexcodes[0xCD] = ("cmp", "absolute")
self.hexcodes[0xDD] = ("cmp", "absolutex")
self.hexcodes[0xED] = ("sbc", "absolute")
self.hexcodes[0xFD] = ("sbc", "absolutex")
self.hexcodes[0x0E] = ("asl", "absolute")
self.hexcodes[0x1E] = ("asl", "absolutex")
self.hexcodes[0x2E] = ("rol", "absolute")
self.hexcodes[0x3E] = ("rol", "absolutex")
self.hexcodes[0x4E] = ("lsr", "absolute")
self.hexcodes[0x5E] = ("lsr", "absolutex")
self.hexcodes[0x6E] = ("ror", "absolute")
self.hexcodes[0x7E] = ("ror", "absolutex")
self.hexcodes[0x8E] = ("stx", "absolute")
self.hexcodes[0x9E] = ("stz", "absolutex")
self.hexcodes[0xAE] = ("ldx", "absolute")
self.hexcodes[0xBE] = ("ldx", "absolutey")
self.hexcodes[0xCE] = ("dec", "absolute")
self.hexcodes[0xDE] = ("dec", "absolutex")
self.hexcodes[0xEE] = ("inc", "absolute")
self.hexcodes[0xFE] = ("inc", "absolutex")
self.hexcodes[0x0F] = ("", "")
self.hexcodes[0x1F] = ("", "")
self.hexcodes[0x2F] = ("", "")
self.hexcodes[0x3F] = ("", "")
self.hexcodes[0x4F] = ("", "")
self.hexcodes[0x5F] = ("", "")
self.hexcodes[0x6F] = ("", "")
self.hexcodes[0x7F] = ("", "")
self.hexcodes[0x8F] = ("", "")
self.hexcodes[0x9F] = ("", "")
self.hexcodes[0xAF] = ("", "")
self.hexcodes[0xBF] = ("", "")
self.hexcodes[0xCF] = ("", "")
self.hexcodes[0xDF] = ("", "")
self.hexcodes[0xEF] = ("", "")
self.hexcodes[0xFF] = ("", "")
self.otherhexcodes = dict() # Make another list for synonyms
for hexval in xrange(256):
self.otherhexcodes[hexval] = ("", "")
self.otherhexcodes[0x1A] = ("inc", "accumulator")
self.otherhexcodes[0x3A] = ("dec", "accumulator")
self.otherhexcodes[0x90] = ("blt", "relative")
self.otherhexcodes[0xB0] = ("bge", "relative")
self.hexmap = dict()
for hexval in xrange(256):
op, mode = self.hexcodes[hexval]
astring = op + mode
if len(astring) > 1:
self.hexmap[astring] = hexval
op, mode = self.otherhexcodes[hexval]
astring = op + mode
if len(astring) > 1:
self.hexmap[astring] = hexval
# implicit ~ "implicit"
# immediate #num ~ "immediate"
# accumulator A ~ "accumulator"
# absolute $2000 ~ "absolute"
# zero page $20 ~ "zeropage"
# absolute indexed x $5000,X ~ "absolutex"
# absolute indexed y $5000,y ~ "absolutey"
# zeropage indexed x $20,X ~ "zeropagex"
# zeropage indexed y $20,Y ~ "zeropagey"
# relative +10 (or label) ~ "relative"
# zeropage indexed indirect x ($20,X) ~ "zeropageindexedindirectx"
# zeropage indexed indirect y ($20),Y ~ "zeropageindexedindirecty"
# absolute indexed indirect ($5000,X) - only JMP ~ "absoluteindexedindirect"
# zeropage indirect ($20) ~ "zeropageindirect"
# absolute indirect ($5000) - only JMP ~ "absoluteindirect"
def addrmode_length(self, addrmode):
if addrmode == "implicit":
return 0
if addrmode == "immediate":
return 1
if addrmode == "accumulator":
return 0
if addrmode == "absolute":
return 2
if addrmode == "zeropage":
return 1
if addrmode == "absolutex":
return 2
if addrmode == "absolutey":
return 2
if addrmode == "zeropagex":
return 1
if addrmode == "zeropagey":
return 1
if addrmode == "relative":
return 1
if addrmode == "zeropageindexedindirectx":
return 1
if addrmode == "zeropageindexedindirecty":
return 1
if addrmode == "absoluteindexedindirect":
return 2
if addrmode == "zeropageindirect":
return 1
if addrmode == "absoluteindirect":
return 2
def firstpasstext(self, thetuple):
(offset, linenumber, labelstring, opcode_val, lowbyte, highbyte, opcode, operand, addressmode, value, comment,
extrabytes, num_extrabytes, linetext) = thetuple
a = ("%d" % linenumber).ljust(4)
if (labelstring != None):
b = (": %s" % labelstring).ljust(10)
else:
b = " "
if (opcode_val == None):
c = " "
else:
if (opcode_val > -1):
c = "%02X " % opcode_val
else:
c = "?? "
if (lowbyte == None):
d = " "
else:
if (lowbyte > -1):
d = "%02X " % lowbyte
else:
d = "?? "
if (highbyte == None):
e = " "
else:
if (highbyte > -1):
e = "%02X " % highbyte
else:
e = "?? "
# Print the opcode in 4 spaces
if (opcode == None):
f = " "
else:
f = opcode.ljust(4)
# Either print the operand in 10 spaces or print 10 spaces
# when there is no operand
if (operand == None):
g = " "
else:
if (len(operand) > 0):
g = operand.ljust(10)
else:
g = " "
h = comment
astring = a + b + c + d + e + f + g + h
self.debug(1, astring)
return astring
def secondpasstext(self, thetuple):
(offset, linenumber, labelstring, opcode_val, lowbyte, highbyte, opcode, operand, addressmode, value, comment,
extrabytes, num_extrabytes, linetext) = thetuple
a = ("%d " % linenumber).ljust(5)
aa = ("%04X " % offset)
if (labelstring != None) and (labelstring != ""):
b = (": %s:" % labelstring).ljust(10)
else:
b = ": "
if (opcode_val == None):
c = " "
else:
if (opcode_val > -1):
c = "%02X " % opcode_val
else:
c = "?? "
if (lowbyte == None):
d = " "
else:
if (lowbyte > -1):
d = "%02X " % lowbyte
else:
d = "?? "
if (highbyte == None):
e = " "
else:
if (highbyte > -1):
e = "%02X " % highbyte
else:
e = "?? "
# Print the opcode in 4 spaces
if (opcode == None):
f = " "
else:
f = opcode.ljust(4)
if (operand == None):
g = " "
else:
if (len(operand) > 0):
g = operand.ljust(10)
else:
g = " "
h = comment
astring = a + aa + b + c + d + e + f + g + h
self.debug(1, astring)
self.debug(2, thetuple)
# If there are extra bytes from a db, dw, dq, do or text operator,
# print the resulting hex bytes on the next line.
if (extrabytes != None) and (len(extrabytes) > 1):
hexchars = ""
index = 0
for index in range(0, len(extrabytes) - 1):
hexchars = hexchars + "%02X " % extrabytes[index]
hexchars = hexchars + "%02X" % extrabytes[len(extrabytes) - 1]
bytestring = a + aa + ": " + hexchars
self.debug(1, bytestring)
return astring + "\n" + bytestring
return astring
# Separate out the label, opcode, operand and comment fields.
# Identify the address mode as we go along
# The results end up in self.allstuff in a tuple per entry
# -1 in fields indicates a value not known yet
# None in a field indicates that it doesn't exist
def parse_line(self, thestring):
linenumber = self.line
self.line += 1
thetext = "LINE #" + ("%d" % linenumber).ljust(5) + (": %s" % thestring)
self.debug(2, thetext)
mystring, comment = self.strip_comments(thestring)
labelstring, mystring = self.strip_label(mystring, linenumber)
opcode_anycase, operand = self.strip_opcode(mystring, linenumber)
opcode = self.check_opcode(opcode_anycase, linenumber)
premode, value = self.identify_addressmodeformat(operand, linenumber)
addressmode = self.identify_addressmode(opcode, premode, value, linenumber)
self.debug(3, "PARSE LINE: opcode=%s addressmode=%s" % (str(opcode), addressmode))
if (opcode != None) and (addressmode != "UNDECIDED"):
astring = opcode + addressmode
self.debug(3, "PARSE LINE 2 astring=%s" % astring)
if astring in self.hexmap:
self.debug(3, "PARSE LINE 3 astring=%s self.hexmap[astring]=0x%x" % (astring, self.hexmap[astring]))
opcode_val = self.hexmap[astring]
else:
opcode_val = None
else:
opcode_val = None
astring = ""
if (self.addrmode_length(addressmode) == 0):
lowbyte = None
highbyte = None
elif (self.addrmode_length(addressmode) == 1) and (self.decode_value(value) != -1):
lowbyte = self.decode_value(value) & 0x00FF
highbyte = None
elif (self.addrmode_length(addressmode) == 2) and (self.decode_value(value) != -1):
lowbyte = self.decode_value(value) & 0x00FF
highbyte = ((self.decode_value(value) & 0xFF00) >> 8) & 0x00FF
elif (self.addrmode_length(addressmode) == 1) and (self.decode_value(value) == -1):
lowbyte = -1
highbyte = None
elif (self.addrmode_length(addressmode) == 2) and (self.decode_value(value) == -1):
lowbyte = -1
highbyte = -1
else:
lowbyte = None
highbyte = None
offset = -1
# Handle switches between little endian and big endian
if (opcode == "le"):
self.littleendian = True
if (opcode == "be"):
self.littleendian = False
# interpret extra bytes from the db, dw, ddw, dqw directives.
extrabytes = list()
if (opcode == "db" or opcode == "dw" or opcode == "ddw" or opcode == "dqw"):
num_extrabytes = self.count_extrabytes(opcode, operand)
else:
num_extrabytes = None
# We are moving the extrabytes parsing to pass 3, so we can
# add label addresses into DWs and have the label defined when we need it.
#
# if (opcode=="db") and (operand != None) and (len(operand) > 0):
# extrabytes = self.decode_extrabytes(linenumber, thestring, operand)
# elif (opcode=="dw") and (operand != None) and (len(operand) > 0):
# extrabytes = self.decode_extrawords(linenumber, thestring, operand)
# elif (opcode=="ddw") and (operand != None) and (len(operand) > 0):
# extrabytes = self.decode_extradoublewords(linenumber, thestring, operand)
# elif (opcode=="dqw") and (operand != None) and (len(operand) > 0):
# extrabytes = self.decode_extraquadwords(linenumber, thestring, operand)
linetext = thestring
thetuple = (
offset, linenumber, labelstring, opcode_val, lowbyte, highbyte, opcode, operand, addressmode, value, comment,
extrabytes, num_extrabytes, linetext)
self.allstuff.append(thetuple)
self.firstpasstext(thetuple)
self.debug(2, "addressmode = %s" % addressmode)
self.debug(2, str(self.allstuff[linenumber - 1]))
self.debug(2, "-----------------------")
# Perform the three passes of the assembly
def assemble(self, lines):
self.clear_state()
# First pass, parse each line for label, opcode, operand and comments
self.debug(1, "First Pass")
for line in lines:
self.parse_line(line)
# Second pass, compute the offsets and populate the symbol table
self.debug(1, "Second Pass")
self.symbols = dict()
# Default to 0x0000. ORG directive overrides
self.address = 0x0000
# Add the offset to each line by counting the opcodes and operands
for i in xrange(len(self.allstuff)):
tuple = self.allstuff[i]
(offset, linenumber, labelstring, opcode_val, lowbyte, highbyte, opcode, operand, addressmode, value,
comment, extrabytes, num_extrabytes, linetext) = tuple
# Handle ORG directive
if (opcode == "org"):
newaddr = self.decode_value(value)
if (newaddr != -1):
self.address = newaddr & 0x00ffff
offset = self.address
if (opcode_val != None):
self.address += 1
if (lowbyte != None):
self.address += 1
if (highbyte != None):
self.address += 1
# self.address += len(extrabytes)
if type(num_extrabytes) == int:
self.address += num_extrabytes
# If there is a label, we now know its address. So store it in the symbol table
if (labelstring != None) and (labelstring != ""):
self.symbols[labelstring] = offset
tuple = (
offset, linenumber, labelstring, opcode_val, lowbyte, highbyte, opcode, operand, addressmode, value,
comment, extrabytes, num_extrabytes, linetext)
self.allstuff[i] = tuple
self.secondpasstext(tuple)
# Print out the symbol table
self.debug(1, "Symbol Table")
for label in self.symbols:
offset = self.symbols[label]
astring = (("%s" % label).ljust(10)) + (" = " + "$%04X" % offset)
self.debug(1, astring)
# Third pass
# Go through filling in the unknown values from the symbol table
self.debug(1, "Third Pass")
self.listing = list()
self.instruction_map = [None] * 65536 # A map for where the instructions are so the debugger can know
# where the start byte of real instructions are.
# The opcode is entered in the location
# non instruction locations are set to None.
for i in xrange(len(self.allstuff)):
tuple = self.allstuff[i]
(offset, linenumber, labelstring, opcode_val, lowbyte, highbyte, opcode, operand, addressmode, value,
comment, extrabytes, num_extrabytes, linetext) = tuple
# Compute the offset for relative branches
if (lowbyte == -1) and (addressmode == "relative"):
destination = self.symbols[value]
start = offset + 2 # Delta is relative to the first byte after the branch instruction
delta = destination - start
lowbyte = delta & 0x00ff
if (delta > 127) or (delta < -128):
self.warning(linenumber, "", "branch can't reach destination, delta is %d" % delta)
elif (lowbyte == -1) and (
(addressmode in self.modeswithlowbytevalue) or (addressmode in self.modeswithhighbytevalue)):
if (value in self.symbols):
newvalue = self.symbols[value]
lowbyte = newvalue & 0x00ff
if (highbyte == -1) and (addressmode in self.modeswithhighbytevalue):
if (value in self.symbols):
newvalue = self.symbols[value]
highbyte = ((newvalue & 0xff00) >> 8) & 0x00ff
# populate the extrabytes lists
if (opcode == "db") and (operand != None) and (len(operand) > 0):
extrabytes = self.decode_extrabytes(linenumber, linetext, operand)
elif (opcode == "dw") and (operand != None) and (len(operand) > 0):
extrabytes = self.decode_extrawords(linenumber, linetext, operand)
elif (opcode == "ddw") and (operand != None) and (len(operand) > 0):
extrabytes = self.decode_extradoublewords(linenumber, linetext, operand)
elif (opcode == "dqw") and (operand != None) and (len(operand) > 0):
extrabytes = self.decode_extraquadwords(linenumber, linetext, operand)
tuple = (
offset, linenumber, labelstring, opcode_val, lowbyte, highbyte, opcode, operand, addressmode, value,
comment, extrabytes, num_extrabytes, linetext)
self.allstuff[i] = tuple
line = self.secondpasstext(tuple)
self.listing.append(line)
# Fill in the instruction map
# This makes it easy for an interactive disassembler to
# know what is instruction code and what is data.
# By signaling which are operand bytes, it's easy to
# disassemble backwards from the current position
# None = Not an instruction or operand
# positive numbers < 256 = an opcode
# -1 = first operand byte
# -2 = second operand bytecount
if opcode_val != None:
self.instruction_map[offset] = opcode_val
if self.addrmode_length(addressmode) > 0:
self.instruction_map[offset + 1] = -1 # -1 signals the first operand byte
if self.addrmode_length(addressmode) > 1:
self.instruction_map[offset + 2] = -2 # -2 signals the second operand byte
# write generated bytes to object code map
addr = offset
if (opcode_val != None) and (opcode_val != -1):
self.object_code[addr] = opcode_val
addr = addr + 1
if (lowbyte != None):
self.object_code[addr] = lowbyte
addr = addr + 1
if (highbyte != None):
self.object_code[addr] = highbyte
addr = addr + 1
if (extrabytes != None):
for i in extrabytes:
self.object_code[addr] = i
addr = addr + 1
listingtext = list()
listingtext.append("LISTING")
listingtext += self.listing
symboltext = list()
symboltext.append("SYMBOL TABLE")
for label in self.symbols:
offset = self.symbols[label]
astring = (("%s" % label).ljust(10)) + (" = " + "$%04X" % offset)
symboltext.append(astring)
# print "LISTING"
# for i in self.listing:
# print i
#
# print
# print "SYMBOL TABLE"
# for label in self.symbols:
# offset = self.symbols[label]
# astring=(("%s" % label).ljust(10)) +(" = "+"$%04X" % offset)
# print astring
#
# print
# self.print_object_code()
return (listingtext, symboltext)
def print_object_code(self):
print "OBJECT CODE"
# Insert a star when there are empty spots in the memory map
i = 0
astring = ""
printed_a_star = 0
while (i < 65536):
if self.object_code[i] != -1:
printed_a_star = 0
astring = "%04X: %02X" % (i, self.object_code[i])
localrun = 1
i = i + 1
if (i < 65536):
nextval = self.object_code[i]
while (nextval != -1) and (localrun < 16):
astring = astring + " %02X" % self.object_code[i]
i = i + 1
localrun = localrun + 1
if (i < 65536):
nextval = self.object_code[i]
else:
nextval = -1
print astring
else:
print astring
else:
if (printed_a_star == 0):
print "*"
printed_a_star = 1
i = i + 1
def srecord_checksum(self, astring):
checksum = 0
for i in xrange(len(astring) / 2):
hexpair = "0x" + astring[(i * 2):(i * 2) + 2]
bytevalue = eval(hexpair)
checksum = checksum + bytevalue
checksum = checksum & 0x0ff
checksum = checksum ^ 0xff
return "%02x" % checksum
def str2asciibytes(self, astring):
ascii = ""
for c in astring:
num = ord(c)
ascii += "%02x" % num
return ascii
def srecords(self, version, revision, module_name, comment):
# print "S19 FORMAT OUTPUT"
# print
i = 0
astring = ""
theoutput = list()
bytelist = list()
bytecount = 0
address = 0
# Make the Header Record
if len(module_name) > 20:
modname_trimmed = module_name[:20]
else:
modname_trimmed = module_name.ljust(20)
if (len(comment) > 36):
comment_trimmed = comment[:36]
else:
comment_trimmed = comment
text = "%02x%02x" % (version, revision)
text = text + self.str2asciibytes(module_name + comment)
addr = "0000"
countedpart = addr + text
length = "%02x" % (len(addr + text))
checksum = self.srecord_checksum(length + addr + text)
header = "S0" + length + addr + text + checksum
theoutput.append(header)
last_addr = 0
while (i < 65536):
if self.object_code[i] != -1:
address = i
values = list()
values.append(self.object_code[i])
localrun = 1
i = i + 1
if (i < 65536):
nextval = self.object_code[i]
while (nextval != -1) and (localrun < 16):
values.append(nextval)
last_addr = i
i = i + 1
localrun = localrun + 1
if (i < 65536):
nextval = self.object_code[i]
else:
nextval = -1
# We reached 16 bytes, or hit the end or hit -1 So
# Output the data record
data = ""
for value in values:
data = ("%02X" % value) + data
addr = "%02x%02x" % (((address >> 8) & 0xff), (address & 0xff))
length = "%02x" % (len(addr + text))
checksum = self.srecord_checksum(length + addr + data)
record = "S1" + length + addr + data + checksum
theoutput.append(record)
else:
i = i + 1
# Output the count
record_count = len(theoutput)
data = "%02x%02x" % (((record_count >> 8) & 0xff), (record_count & 0xff))
length = "03"
checksum = self.srecord_checksum(length + data)
record = "S5" + length + data + checksum
theoutput.append(record)
# Output the terminator
length = "03"
addr = "%02x%02x" % (((last_addr >> 8) & 0xff), (last_addr & 0xff))
checksum = self.srecord_checksum(length + addr)
record = "S9" + length + addr + checksum
theoutput.append(record)
return (theoutput)
def print_srecords(self, version, revision, module_name, comment):
lines = self.srecords(version, revision, module_name, comment)
for line in lines:
print line
def intelhex(self):
# print "INTEL HEX FORMAT OUTPUT"
# print
# Insert a star when there are empty spots in the memory map
i = 0
astring = ""
theoutput = list()
bytelist = list()
bytecount = 0
address = 0
datarecord = "00"
eofrecord = ":00000001FF"
while (i < 65536):
if self.object_code[i] != -1:
address = i
values = list()
values.append(self.object_code[i])
localrun = 1
i = i + 1
if (i < 65536):
nextval = self.object_code[i]
while (nextval != -1) and (localrun < 16):
values.append(nextval)
i = i + 1
localrun = localrun + 1
if (i < 65536):
nextval = self.object_code[i]
else:
nextval = -1
length = len(values)
astring = ":%02X%04x" % (length, address)
astring += datarecord
for value in values:
astring += "%02X" % value
theoutput.append(astring)
else:
length = len(values)
astring = "addr=%04x len=%02x data=" % (address, length)
for value in values:
astring += "%02X" % value
theoutput.append(astring)
else:
i = i + 1
theoutput.append(eofrecord)
return theoutput
def print_intelhex(self):
lines = self.intelhex()
for line in lines:
print line
# returns entire 64K memory as hex in the form of 64 bytes per line.
def hex(self, noaddress=False):
# print "HEX FORMAT OUTPUT"
# print
theoutput = list()
for i in xrange(1024):
addr = 64 * i
# Prepend with an address field, or not if not desired
if noaddress:
line = ""
else:
line = "%04x:" % addr
# add the bytes as hex to the line
for j in xrange(64):
val = self.object_code[(i * 64) + j]
# Range check the bytes
if val < 0:
val = 0
if val > 255:
val = 255
line = line + ("%02x" % val)
theoutput.append(line)
return theoutput
def print_hex(self):
lines = self.hex()
for line in lines:
print line
| gpl-2.0 |
sbalde/edx-platform | common/lib/xmodule/xmodule/capa_module.py | 17 | 8819 | """Implements basics of Capa, including class CapaModule."""
import json
import logging
import sys
from lxml import etree
from pkg_resources import resource_string
import dogstats_wrapper as dog_stats_api
from .capa_base import CapaMixin, CapaFields, ComplexEncoder
from capa import responsetypes
from .progress import Progress
from xmodule.x_module import XModule, module_attr, DEPRECATION_VSCOMPAT_EVENT
from xmodule.raw_module import RawDescriptor
from xmodule.exceptions import NotFoundError, ProcessingError
log = logging.getLogger("edx.courseware")
class CapaModule(CapaMixin, XModule):
"""
An XModule implementing LonCapa format problems, implemented by way of
capa.capa_problem.LoncapaProblem
CapaModule.__init__ takes the same arguments as xmodule.x_module:XModule.__init__
"""
icon_class = 'problem'
js = {
'coffee': [
resource_string(__name__, 'js/src/capa/display.coffee'),
resource_string(__name__, 'js/src/javascript_loader.coffee'),
],
'js': [
resource_string(__name__, 'js/src/collapsible.js'),
resource_string(__name__, 'js/src/capa/imageinput.js'),
resource_string(__name__, 'js/src/capa/schematic.js'),
]
}
js_module_name = "Problem"
css = {'scss': [resource_string(__name__, 'css/capa/display.scss')]}
def __init__(self, *args, **kwargs):
"""
Accepts the same arguments as xmodule.x_module:XModule.__init__
"""
super(CapaModule, self).__init__(*args, **kwargs)
def handle_ajax(self, dispatch, data):
"""
This is called by courseware.module_render, to handle an AJAX call.
`data` is request.POST.
Returns a json dictionary:
{ 'progress_changed' : True/False,
'progress' : 'none'/'in_progress'/'done',
<other request-specific values here > }
"""
handlers = {
'hint_button': self.hint_button,
'problem_get': self.get_problem,
'problem_check': self.check_problem,
'problem_reset': self.reset_problem,
'problem_save': self.save_problem,
'problem_show': self.get_answer,
'score_update': self.update_score,
'input_ajax': self.handle_input_ajax,
'ungraded_response': self.handle_ungraded_response
}
_ = self.runtime.service(self, "i18n").ugettext
generic_error_message = _(
"We're sorry, there was an error with processing your request. "
"Please try reloading your page and trying again."
)
not_found_error_message = _(
"The state of this problem has changed since you loaded this page. "
"Please refresh your page."
)
if dispatch not in handlers:
return 'Error: {} is not a known capa action'.format(dispatch)
before = self.get_progress()
try:
result = handlers[dispatch](data)
except NotFoundError as err:
_, _, traceback_obj = sys.exc_info() # pylint: disable=redefined-outer-name
raise ProcessingError(not_found_error_message), None, traceback_obj
except Exception as err:
_, _, traceback_obj = sys.exc_info() # pylint: disable=redefined-outer-name
raise ProcessingError(generic_error_message), None, traceback_obj
after = self.get_progress()
result.update({
'progress_changed': after != before,
'progress_status': Progress.to_js_status_str(after),
'progress_detail': Progress.to_js_detail_str(after),
})
return json.dumps(result, cls=ComplexEncoder)
class CapaDescriptor(CapaFields, RawDescriptor):
"""
Module implementing problems in the LON-CAPA format,
as implemented by capa.capa_problem
"""
INDEX_CONTENT_TYPE = 'CAPA'
module_class = CapaModule
has_score = True
template_dir_name = 'problem'
mako_template = "widgets/problem-edit.html"
js = {'coffee': [resource_string(__name__, 'js/src/problem/edit.coffee')]}
js_module_name = "MarkdownEditingDescriptor"
css = {
'scss': [
resource_string(__name__, 'css/editor/edit.scss'),
resource_string(__name__, 'css/problem/edit.scss')
]
}
# The capa format specifies that what we call max_attempts in the code
# is the attribute `attempts`. This will do that conversion
metadata_translations = dict(RawDescriptor.metadata_translations)
metadata_translations['attempts'] = 'max_attempts'
@classmethod
def filter_templates(cls, template, course):
"""
Filter template that contains 'latex' from templates.
Show them only if use_latex_compiler is set to True in
course settings.
"""
return ('latex' not in template['template_id'] or course.use_latex_compiler)
def get_context(self):
_context = RawDescriptor.get_context(self)
_context.update({
'markdown': self.markdown,
'enable_markdown': self.markdown is not None,
'enable_latex_compiler': self.use_latex_compiler,
})
return _context
# VS[compat]
# TODO (cpennington): Delete this method once all fall 2012 course are being
# edited in the cms
@classmethod
def backcompat_paths(cls, path):
dog_stats_api.increment(
DEPRECATION_VSCOMPAT_EVENT,
tags=["location:capa_descriptor_backcompat_paths"]
)
return [
'problems/' + path[8:],
path[8:],
]
@property
def non_editable_metadata_fields(self):
non_editable_fields = super(CapaDescriptor, self).non_editable_metadata_fields
non_editable_fields.extend([
CapaDescriptor.due,
CapaDescriptor.graceperiod,
CapaDescriptor.force_save_button,
CapaDescriptor.markdown,
CapaDescriptor.text_customization,
CapaDescriptor.use_latex_compiler,
])
return non_editable_fields
@property
def problem_types(self):
""" Low-level problem type introspection for content libraries filtering by problem type """
tree = etree.XML(self.data) # pylint: disable=no-member
registered_tags = responsetypes.registry.registered_tags()
return set([node.tag for node in tree.iter() if node.tag in registered_tags])
@property
def has_responsive_ui(self):
"""
Returns whether this module has support for responsive UI.
"""
return self.lcp.has_responsive_ui
def index_dictionary(self):
"""
Return dictionary prepared with module content and type for indexing.
"""
result = super(CapaDescriptor, self).index_dictionary()
if not result:
result = {}
index = {
'content_type': self.INDEX_CONTENT_TYPE,
'problem_types': list(self.problem_types),
"display_name": self.display_name
}
result.update(index)
return result
# Proxy to CapaModule for access to any of its attributes
answer_available = module_attr('answer_available')
check_button_name = module_attr('check_button_name')
check_button_checking_name = module_attr('check_button_checking_name')
check_problem = module_attr('check_problem')
choose_new_seed = module_attr('choose_new_seed')
closed = module_attr('closed')
get_answer = module_attr('get_answer')
get_problem = module_attr('get_problem')
get_problem_html = module_attr('get_problem_html')
get_state_for_lcp = module_attr('get_state_for_lcp')
handle_input_ajax = module_attr('handle_input_ajax')
hint_button = module_attr('hint_button')
handle_problem_html_error = module_attr('handle_problem_html_error')
handle_ungraded_response = module_attr('handle_ungraded_response')
is_attempted = module_attr('is_attempted')
is_correct = module_attr('is_correct')
is_past_due = module_attr('is_past_due')
is_submitted = module_attr('is_submitted')
lcp = module_attr('lcp')
make_dict_of_responses = module_attr('make_dict_of_responses')
new_lcp = module_attr('new_lcp')
publish_grade = module_attr('publish_grade')
rescore_problem = module_attr('rescore_problem')
reset_problem = module_attr('reset_problem')
save_problem = module_attr('save_problem')
set_state_from_lcp = module_attr('set_state_from_lcp')
should_show_check_button = module_attr('should_show_check_button')
should_show_reset_button = module_attr('should_show_reset_button')
should_show_save_button = module_attr('should_show_save_button')
update_score = module_attr('update_score')
| agpl-3.0 |
joeyoung658/A-Level_2016-18 | Challenges/Hangman/hangman.py | 1 | 2597 | """ Hangman Game (v1.0)
Name: Joe Young
Date: 24/09/2016
"""
#Joe Young
#06/09/2016
import sys
import platform
if "windows" == platform.system():
sys.path.append(sys.path[0]+'\\Extra')
else:
sys.path.append(sys.path[0]+'//Extra')
from random import *
from time import *
import hangmanp
def load_file(filename):#Opens file and returns a full list of words
file = open (filename)
word_list = file.readlines()
file.close()
return word_list
def select_word(): #gets single random word from list
word_list = load_file("wordlists.txt")
single_word = (word_list[randint(0, len(word_list)-1)])
return single_word
def again():
while 0 != 1:
again = str(input("Would you like to play again?\n-")).lower()
if again == "yes":
print("You have choosen to play again!\n")
sleep(1)
return main()
elif again == "no":
print("You have choosen to not play again!")
sleep(3)
break
else:
print("Please input a vaild method!\n")
def main():
ran_word = select_word()
li = (len(ran_word)-1)
trys = 0
guess = 0
print(ran_word)
left = ["_" for i in range (li)]
while 0 != 1:
print (left)
letter = str(input("What is your guess?\n-")).lower()
if ran_word.find(letter) == -1:
print ("Your guess was incorrect\n")
if guess != 11:
print (hangmanp.hangman(guess))
guess = guess + 1
else:
if not(len(letter) == 1):
print("Please only entre one letter at a time!\n")
else:
if not((letter) in left):
print("Your guess was correct!\n")
else:
print("You have already correctly guessed", letter, "!\n")
for i in range(li):
if ran_word[i] == letter:
left[i] = letter
if not(("_") in left): #checks to see if user has won the game
print(left)
print("You have won the game!, it took you ", trys, " guesses!\n")
sleep(3)
return again()
break
trys = trys + 1
if not(guess != 11):
print("You have exceed your guesses!\n")
print("You have lost the game!, it took you ", trys, " to fail :(\n")
sleep(5)
return again()
break
if __name__ == "__main__":
main()
| gpl-3.0 |
gzzhanghao/mitmproxy | mitmproxy/builtins/dumper.py | 1 | 7768 | from __future__ import absolute_import, print_function, division
import itertools
import traceback
import click
from mitmproxy import contentviews
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import filt
from netlib import human
from netlib import strutils
def indent(n, text):
l = str(text).strip().splitlines()
pad = " " * n
return "\n".join(pad + i for i in l)
class Dumper():
def __init__(self):
self.filter = None
self.flow_detail = None
self.outfp = None
self.showhost = None
def echo(self, text, ident=None, **style):
if ident:
text = indent(ident, text)
click.secho(text, file=self.outfp, **style)
if self.outfp:
self.outfp.flush()
def _echo_message(self, message):
if self.flow_detail >= 2 and hasattr(message, "headers"):
headers = "\r\n".join(
"{}: {}".format(
click.style(
strutils.bytes_to_escaped_str(k), fg="blue", bold=True
),
click.style(
strutils.bytes_to_escaped_str(v), fg="blue"
)
)
for k, v in message.headers.fields
)
self.echo(headers, ident=4)
if self.flow_detail >= 3:
try:
content = message.content
except ValueError:
content = message.get_content(strict=False)
if content is None:
self.echo("(content missing)", ident=4)
elif content:
self.echo("")
try:
type, lines = contentviews.get_content_view(
contentviews.get("Auto"),
content,
headers=getattr(message, "headers", None)
)
except exceptions.ContentViewException:
s = "Content viewer failed: \n" + traceback.format_exc()
ctx.log.debug(s)
type, lines = contentviews.get_content_view(
contentviews.get("Raw"),
content,
headers=getattr(message, "headers", None)
)
styles = dict(
highlight=dict(bold=True),
offset=dict(fg="blue"),
header=dict(fg="green", bold=True),
text=dict(fg="green")
)
def colorful(line):
yield u" " # we can already indent here
for (style, text) in line:
yield click.style(text, **styles.get(style, {}))
if self.flow_detail == 3:
lines_to_echo = itertools.islice(lines, 70)
else:
lines_to_echo = lines
lines_to_echo = list(lines_to_echo)
content = u"\r\n".join(
u"".join(colorful(line)) for line in lines_to_echo
)
self.echo(content)
if next(lines, None):
self.echo("(cut off)", ident=4, dim=True)
if self.flow_detail >= 2:
self.echo("")
def _echo_request_line(self, flow):
if flow.request.stickycookie:
stickycookie = click.style(
"[stickycookie] ", fg="yellow", bold=True
)
else:
stickycookie = ""
if flow.client_conn:
client = click.style(
strutils.escape_control_characters(
flow.client_conn.address.host
),
bold=True
)
elif flow.request.is_replay:
client = click.style("[replay]", fg="yellow", bold=True)
else:
client = ""
method = flow.request.method
method_color = dict(
GET="green",
DELETE="red"
).get(method.upper(), "magenta")
method = click.style(
strutils.escape_control_characters(method),
fg=method_color,
bold=True
)
if self.showhost:
url = flow.request.pretty_url
else:
url = flow.request.url
url = click.style(strutils.escape_control_characters(url), bold=True)
httpversion = ""
if flow.request.http_version not in ("HTTP/1.1", "HTTP/1.0"):
# We hide "normal" HTTP 1.
httpversion = " " + flow.request.http_version
line = "{stickycookie}{client} {method} {url}{httpversion}".format(
stickycookie=stickycookie,
client=client,
method=method,
url=url,
httpversion=httpversion
)
self.echo(line)
def _echo_response_line(self, flow):
if flow.response.is_replay:
replay = click.style("[replay] ", fg="yellow", bold=True)
else:
replay = ""
code = flow.response.status_code
code_color = None
if 200 <= code < 300:
code_color = "green"
elif 300 <= code < 400:
code_color = "magenta"
elif 400 <= code < 600:
code_color = "red"
code = click.style(
str(code),
fg=code_color,
bold=True,
blink=(code == 418)
)
reason = click.style(
strutils.escape_control_characters(flow.response.reason),
fg=code_color,
bold=True
)
if flow.response.raw_content is None:
size = "(content missing)"
else:
size = human.pretty_size(len(flow.response.raw_content))
size = click.style(size, bold=True)
arrows = click.style(" <<", bold=True)
line = "{replay} {arrows} {code} {reason} {size}".format(
replay=replay,
arrows=arrows,
code=code,
reason=reason,
size=size
)
self.echo(line)
def echo_flow(self, f):
if f.request:
self._echo_request_line(f)
self._echo_message(f.request)
if f.response:
self._echo_response_line(f)
self._echo_message(f.response)
if f.error:
self.echo(" << {}".format(f.error.msg), bold=True, fg="red")
def match(self, f):
if self.flow_detail == 0:
return False
if not self.filt:
return True
elif f.match(self.filt):
return True
return False
def configure(self, options):
if options.filtstr:
self.filt = filt.parse(options.filtstr)
if not self.filt:
raise exceptions.OptionsError(
"Invalid filter expression: %s" % options.filtstr
)
else:
self.filt = None
self.flow_detail = options.flow_detail
self.outfp = options.tfile
self.showhost = options.showhost
def response(self, f):
if self.match(f):
self.echo_flow(f)
def error(self, f):
if self.match(f):
self.echo_flow(f)
def tcp_message(self, f):
# FIXME: Filter should be applied here
if self.options.flow_detail == 0:
return
message = f.messages[-1]
direction = "->" if message.from_client else "<-"
self.echo("{client} {direction} tcp {direction} {server}".format(
client=repr(f.client_conn.address),
server=repr(f.server_conn.address),
direction=direction,
))
self._echo_message(message)
| mit |
mwishoff/mattsWork | checkers/board.py | 1 | 5763 | # This class will represent the checkers board.
"""
~ 1 2 3 4 5 6 7 8
__________________
a | O O O O | a
b | O O O O | b
c | O O O O | c
d | - - - - | d
e | - - - - | e
f | x x x x | f
g | x x x x | g
h | x x x x | h
------------------
~ 1 2 3 4 5 6 7 8
"""
class Board:
def __init__(self):
self.board_neighbors = dict()
self.board_state = dict()
self.board_locations = []
self.init_board_neighbors()
self.init_board_locations()
self.init_board_state()
def print_board(self):
print(" ~ 1 2 3 4 5 6 7 8")
print(" __________________")
print("a | ", self.board_state['a2'], " ", self.board_state['a4'], " ", self.board_state['a6'], " ", self.board_state['a8'], "| a")
print("b |", self.board_state['b1'], " ", self.board_state['b3'], " ", self.board_state['b5'], " ", self.board_state['b7'], " | b")
print("c | ", self.board_state['c2'], " ", self.board_state['c4'], " ", self.board_state['c6'], " ", self.board_state['c8'], "| c")
print("d |", self.board_state['d1'], " ", self.board_state['d3'], " ", self.board_state['d5'], " ", self.board_state['d7'], " | d")
print("e | ", self.board_state['e2'], " ", self.board_state['e4'], " ", self.board_state['e6'], " ", self.board_state['e8'], "| e")
print("f |", self.board_state['f1'], " ", self.board_state['f3'], " ", self.board_state['f5'], " ", self.board_state['f7'], " | f")
print("g | ", self.board_state['g2'], " ", self.board_state['g4'], " ", self.board_state['g6'], " ", self.board_state['g8'], "| g")
print("h |", self.board_state['h1'], " ", self.board_state['h3'], " ", self.board_state['h5'], " ", self.board_state['h7'], " | h")
print(" ------------------")
print(" ~ 1 2 3 4 5 6 7 8")
"""
~ 1 2 3 4 5 6 7 8
__________________
a | O O O O | a
b | O O O O | b
c | O O O O | c
d | - - - - | d
e | - - - - | e
f | x x x x | f
g | x x x x | g
h | x x x x | h
------------------
~ 1 2 3 4 5 6 7 8
"""
def to_string(self):
print("*********************************")
for key in self.board_neighbors:
print("Key: ", key, " Value: ", self.board_neighbors[key])
print("*********************************")
for key in self.board_state:
print("Key: ", key, " Value: ", self.board_state[key])
print("*********************************")
for location in self.board_locations:
print("Location: ", location)
print("*********************************")
def init_board_neighbors(self):
print("Init neighbors")
self.board_neighbors['h1'] = ['g2']
self.board_neighbors['h3'] = ['g2', 'g4']
self.board_neighbors['h5'] = ['g4', 'g6']
self.board_neighbors['h7'] = ['g6', 'g8']
self.board_neighbors['g2'] = ['f1', 'f3', 'h1', 'h3']
self.board_neighbors['g4'] = ['f3', 'f5', 'h3', 'h5']
self.board_neighbors['g6'] = ['f5', 'f7', 'h5', 'h7']
self.board_neighbors['g8'] = ['f7', 'h7']
self.board_neighbors['f1'] = ['e2', 'g2']
self.board_neighbors['f3'] = ['e2', 'e4', 'g2', 'g4']
self.board_neighbors['f5'] = ['e4', 'e6', 'g4', 'g6']
self.board_neighbors['f7'] = ['e6', 'e8', 'g6', 'g8']
self.board_neighbors['e2'] = ['d1', 'd3', 'f1', 'f3']
self.board_neighbors['e4'] = ['d3', 'd5', 'f3', 'f5']
self.board_neighbors['e6'] = ['d5', 'd7', 'f5', 'f7']
self.board_neighbors['e8'] = ['d7', 'f7']
self.board_neighbors['d1'] = ['c2', 'e2']
self.board_neighbors['d3'] = ['c2', 'c4', 'e2', 'e4']
self.board_neighbors['d5'] = ['c4', 'c6', 'e4', 'e6']
self.board_neighbors['d7'] = ['c6', 'c8', 'e6', 'e8']
self.board_neighbors['c2'] = ['b1', 'b3', 'd1', 'd3']
self.board_neighbors['c4'] = ['b3', 'b5', 'd3', 'd5']
self.board_neighbors['c6'] = ['b5', 'b7', 'd5', 'd7']
self.board_neighbors['c8'] = ['b7', 'd7']
self.board_neighbors['b1'] = ['a2', 'c2']
self.board_neighbors['b3'] = ['a2', 'a4', 'c2', 'c4']
self.board_neighbors['b5'] = ['a4', 'a6', 'c4', 'c6']
self.board_neighbors['b7'] = ['a6', 'a8', 'c6', 'c8']
self.board_neighbors['a2'] = ['b1', 'b3']
self.board_neighbors['a4'] = ['b3', 'b5']
self.board_neighbors['a6'] = ['b5', 'b7']
self.board_neighbors['a8'] = ['b7']
def init_board_state(self):
print("Init state")
for location in self.board_locations:
if 'h' in location or 'g' in location or 'f' in location:
self.board_state[location] = 'X'
elif 'e' in location or 'd' in location:
self.board_state[location] = '-'
elif 'c' in location or 'b' in location or 'a' in location:
self.board_state[location] = 'O'
else:
print("ERROR: Init_board_state malfunction.", location, " unknown")
def init_board_locations(self):
print("init locations")
self.board_locations = ['h1', 'h3', 'h5', 'h7',
'g2', 'g4', 'g6', 'g8',
'f1', 'f3', 'f5', 'f7',
'e2', 'e4', 'e6', 'e8',
'd1', 'd3', 'd5', 'd7',
'c2', 'c4', 'c6', 'c8',
'b1', 'b3', 'b5', 'b7',
'a2', 'a4', 'a6', 'a8']
| gpl-3.0 |
FuzzJunket/pelican-plugins | pelican_comment_system/comment.py | 11 | 1929 | # -*- coding: utf-8 -*-
"""
Author: Bernhard Scheirle
"""
from __future__ import unicode_literals
import os
from pelican.contents import Content
from pelican.utils import slugify
from . import avatars
class Comment(Content):
mandatory_properties = ('author', 'date')
default_template = 'None'
def __init__(self, content, metadata, settings, source_path, context):
# Strip the path off the full filename.
name = os.path.split(source_path)[1]
if not hasattr(self, 'slug'):
# compute the slug before initializing the base Content object, so
# it doesn't get set there
# This is required because we need a slug containing the file
# extension.
self.slug = slugify(name, settings.get('SLUG_SUBSTITUTIONS', ()))
super(Comment, self).__init__(content, metadata, settings, source_path,
context)
self.replies = []
# Strip the extension from the filename.
name = os.path.splitext(name)[0]
self.avatar = avatars.getAvatarPath(name, metadata)
self.title = "Posted by: {}".format(metadata['author'])
def addReply(self, comment):
self.replies.append(comment)
def getReply(self, slug):
for reply in self.replies:
if reply.slug == slug:
return reply
else:
deepReply = reply.getReply(slug)
if deepReply is not None:
return deepReply
return None
def __lt__(self, other):
return self.metadata['date'] < other.metadata['date']
def sortReplies(self):
for r in self.replies:
r.sortReplies()
self.replies = sorted(self.replies)
def countReplies(self):
amount = 0
for r in self.replies:
amount += r.countReplies()
return amount + len(self.replies)
| agpl-3.0 |
core-bitcoin/bitcoin | qa/rpc-tests/sendheaders.py | 49 | 25728 | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.blocktools import create_block, create_coinbase
'''
SendHeadersTest -- test behavior of headers messages to announce blocks.
Setup:
- Two nodes, two p2p connections to node0. One p2p connection should only ever
receive inv's (omitted from testing description below, this is our control).
Second node is used for creating reorgs.
Part 1: No headers announcements before "sendheaders"
a. node mines a block [expect: inv]
send getdata for the block [expect: block]
b. node mines another block [expect: inv]
send getheaders and getdata [expect: headers, then block]
c. node mines another block [expect: inv]
peer mines a block, announces with header [expect: getdata]
d. node mines another block [expect: inv]
Part 2: After "sendheaders", headers announcements should generally work.
a. peer sends sendheaders [expect: no response]
peer sends getheaders with current tip [expect: no response]
b. node mines a block [expect: tip header]
c. for N in 1, ..., 10:
* for announce-type in {inv, header}
- peer mines N blocks, announces with announce-type
[ expect: getheaders/getdata or getdata, deliver block(s) ]
- node mines a block [ expect: 1 header ]
Part 3: Headers announcements stop after large reorg and resume after getheaders or inv from peer.
- For response-type in {inv, getheaders}
* node mines a 7 block reorg [ expect: headers announcement of 8 blocks ]
* node mines an 8-block reorg [ expect: inv at tip ]
* peer responds with getblocks/getdata [expect: inv, blocks ]
* node mines another block [ expect: inv at tip, peer sends getdata, expect: block ]
* node mines another block at tip [ expect: inv ]
* peer responds with getheaders with an old hashstop more than 8 blocks back [expect: headers]
* peer requests block [ expect: block ]
* node mines another block at tip [ expect: inv, peer sends getdata, expect: block ]
* peer sends response-type [expect headers if getheaders, getheaders/getdata if mining new block]
* node mines 1 block [expect: 1 header, peer responds with getdata]
Part 4: Test direct fetch behavior
a. Announce 2 old block headers.
Expect: no getdata requests.
b. Announce 3 new blocks via 1 headers message.
Expect: one getdata request for all 3 blocks.
(Send blocks.)
c. Announce 1 header that forks off the last two blocks.
Expect: no response.
d. Announce 1 more header that builds on that fork.
Expect: one getdata request for two blocks.
e. Announce 16 more headers that build on that fork.
Expect: getdata request for 14 more blocks.
f. Announce 1 more header that builds on that fork.
Expect: no response.
Part 5: Test handling of headers that don't connect.
a. Repeat 10 times:
1. Announce a header that doesn't connect.
Expect: getheaders message
2. Send headers chain.
Expect: getdata for the missing blocks, tip update.
b. Then send 9 more headers that don't connect.
Expect: getheaders message each time.
c. Announce a header that does connect.
Expect: no response.
d. Announce 49 headers that don't connect.
Expect: getheaders message each time.
e. Announce one more that doesn't connect.
Expect: disconnect.
'''
direct_fetch_response_time = 0.05
class BaseNode(SingleNodeConnCB):
def __init__(self):
SingleNodeConnCB.__init__(self)
self.last_inv = None
self.last_headers = None
self.last_block = None
self.last_getdata = None
self.block_announced = False
self.last_getheaders = None
self.disconnected = False
self.last_blockhash_announced = None
def clear_last_announcement(self):
with mininode_lock:
self.block_announced = False
self.last_inv = None
self.last_headers = None
# Request data for a list of block hashes
def get_data(self, block_hashes):
msg = msg_getdata()
for x in block_hashes:
msg.inv.append(CInv(2, x))
self.connection.send_message(msg)
def get_headers(self, locator, hashstop):
msg = msg_getheaders()
msg.locator.vHave = locator
msg.hashstop = hashstop
self.connection.send_message(msg)
def send_block_inv(self, blockhash):
msg = msg_inv()
msg.inv = [CInv(2, blockhash)]
self.connection.send_message(msg)
def on_inv(self, conn, message):
self.last_inv = message
self.block_announced = True
self.last_blockhash_announced = message.inv[-1].hash
def on_headers(self, conn, message):
self.last_headers = message
if len(message.headers):
self.block_announced = True
message.headers[-1].calc_sha256()
self.last_blockhash_announced = message.headers[-1].sha256
def on_block(self, conn, message):
self.last_block = message.block
self.last_block.calc_sha256()
def on_getdata(self, conn, message):
self.last_getdata = message
def on_getheaders(self, conn, message):
self.last_getheaders = message
def on_close(self, conn):
self.disconnected = True
# Test whether the last announcement we received had the
# right header or the right inv
# inv and headers should be lists of block hashes
def check_last_announcement(self, headers=None, inv=None):
expect_headers = headers if headers != None else []
expect_inv = inv if inv != None else []
test_function = lambda: self.block_announced
assert(wait_until(test_function, timeout=60))
with mininode_lock:
self.block_announced = False
success = True
compare_inv = []
if self.last_inv != None:
compare_inv = [x.hash for x in self.last_inv.inv]
if compare_inv != expect_inv:
success = False
hash_headers = []
if self.last_headers != None:
# treat headers as a list of block hashes
hash_headers = [ x.sha256 for x in self.last_headers.headers ]
if hash_headers != expect_headers:
success = False
self.last_inv = None
self.last_headers = None
return success
# Syncing helpers
def wait_for_block(self, blockhash, timeout=60):
test_function = lambda: self.last_block != None and self.last_block.sha256 == blockhash
assert(wait_until(test_function, timeout=timeout))
return
def wait_for_getheaders(self, timeout=60):
test_function = lambda: self.last_getheaders != None
assert(wait_until(test_function, timeout=timeout))
return
def wait_for_getdata(self, hash_list, timeout=60):
if hash_list == []:
return
test_function = lambda: self.last_getdata != None and [x.hash for x in self.last_getdata.inv] == hash_list
assert(wait_until(test_function, timeout=timeout))
return
def wait_for_disconnect(self, timeout=60):
test_function = lambda: self.disconnected
assert(wait_until(test_function, timeout=timeout))
return
def wait_for_block_announcement(self, block_hash, timeout=60):
test_function = lambda: self.last_blockhash_announced == block_hash
assert(wait_until(test_function, timeout=timeout))
return
def send_header_for_blocks(self, new_blocks):
headers_message = msg_headers()
headers_message.headers = [ CBlockHeader(b) for b in new_blocks ]
self.send_message(headers_message)
def send_getblocks(self, locator):
getblocks_message = msg_getblocks()
getblocks_message.locator.vHave = locator
self.send_message(getblocks_message)
# InvNode: This peer should only ever receive inv's, because it doesn't ever send a
# "sendheaders" message.
class InvNode(BaseNode):
def __init__(self):
BaseNode.__init__(self)
# TestNode: This peer is the one we use for most of the testing.
class TestNode(BaseNode):
def __init__(self):
BaseNode.__init__(self)
class SendHeadersTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
def setup_network(self):
self.nodes = []
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, [["-debug", "-logtimemicros=1"]]*2)
connect_nodes(self.nodes[0], 1)
# mine count blocks and return the new tip
def mine_blocks(self, count):
# Clear out last block announcement from each p2p listener
[ x.clear_last_announcement() for x in self.p2p_connections ]
self.nodes[0].generate(count)
return int(self.nodes[0].getbestblockhash(), 16)
# mine a reorg that invalidates length blocks (replacing them with
# length+1 blocks).
# Note: we clear the state of our p2p connections after the
# to-be-reorged-out blocks are mined, so that we don't break later tests.
# return the list of block hashes newly mined
def mine_reorg(self, length):
self.nodes[0].generate(length) # make sure all invalidated blocks are node0's
sync_blocks(self.nodes, wait=0.1)
for x in self.p2p_connections:
x.wait_for_block_announcement(int(self.nodes[0].getbestblockhash(), 16))
x.clear_last_announcement()
tip_height = self.nodes[1].getblockcount()
hash_to_invalidate = self.nodes[1].getblockhash(tip_height-(length-1))
self.nodes[1].invalidateblock(hash_to_invalidate)
all_hashes = self.nodes[1].generate(length+1) # Must be longer than the orig chain
sync_blocks(self.nodes, wait=0.1)
return [int(x, 16) for x in all_hashes]
def run_test(self):
# Setup the p2p connections and start up the network thread.
inv_node = InvNode()
test_node = TestNode()
self.p2p_connections = [inv_node, test_node]
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], inv_node))
# Set nServices to 0 for test_node, so no block download will occur outside of
# direct fetching
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node, services=0))
inv_node.add_connection(connections[0])
test_node.add_connection(connections[1])
NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
inv_node.wait_for_verack()
test_node.wait_for_verack()
tip = int(self.nodes[0].getbestblockhash(), 16)
# PART 1
# 1. Mine a block; expect inv announcements each time
print("Part 1: headers don't start before sendheaders message...")
for i in range(4):
old_tip = tip
tip = self.mine_blocks(1)
assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
assert_equal(test_node.check_last_announcement(inv=[tip]), True)
# Try a few different responses; none should affect next announcement
if i == 0:
# first request the block
test_node.get_data([tip])
test_node.wait_for_block(tip, timeout=5)
elif i == 1:
# next try requesting header and block
test_node.get_headers(locator=[old_tip], hashstop=tip)
test_node.get_data([tip])
test_node.wait_for_block(tip)
test_node.clear_last_announcement() # since we requested headers...
elif i == 2:
# this time announce own block via headers
height = self.nodes[0].getblockcount()
last_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time']
block_time = last_time + 1
new_block = create_block(tip, create_coinbase(height+1), block_time)
new_block.solve()
test_node.send_header_for_blocks([new_block])
test_node.wait_for_getdata([new_block.sha256], timeout=5)
test_node.send_message(msg_block(new_block))
test_node.sync_with_ping() # make sure this block is processed
inv_node.clear_last_announcement()
test_node.clear_last_announcement()
print("Part 1: success!")
print("Part 2: announce blocks with headers after sendheaders message...")
# PART 2
# 2. Send a sendheaders message and test that headers announcements
# commence and keep working.
test_node.send_message(msg_sendheaders())
prev_tip = int(self.nodes[0].getbestblockhash(), 16)
test_node.get_headers(locator=[prev_tip], hashstop=0)
test_node.sync_with_ping()
# Now that we've synced headers, headers announcements should work
tip = self.mine_blocks(1)
assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
assert_equal(test_node.check_last_announcement(headers=[tip]), True)
height = self.nodes[0].getblockcount()+1
block_time += 10 # Advance far enough ahead
for i in range(10):
# Mine i blocks, and alternate announcing either via
# inv (of tip) or via headers. After each, new blocks
# mined by the node should successfully be announced
# with block header, even though the blocks are never requested
for j in range(2):
blocks = []
for b in range(i+1):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
if j == 0:
# Announce via inv
test_node.send_block_inv(tip)
test_node.wait_for_getheaders(timeout=5)
# Should have received a getheaders now
test_node.send_header_for_blocks(blocks)
# Test that duplicate inv's won't result in duplicate
# getdata requests, or duplicate headers announcements
[ inv_node.send_block_inv(x.sha256) for x in blocks ]
test_node.wait_for_getdata([x.sha256 for x in blocks], timeout=5)
inv_node.sync_with_ping()
else:
# Announce via headers
test_node.send_header_for_blocks(blocks)
test_node.wait_for_getdata([x.sha256 for x in blocks], timeout=5)
# Test that duplicate headers won't result in duplicate
# getdata requests (the check is further down)
inv_node.send_header_for_blocks(blocks)
inv_node.sync_with_ping()
[ test_node.send_message(msg_block(x)) for x in blocks ]
test_node.sync_with_ping()
inv_node.sync_with_ping()
# This block should not be announced to the inv node (since it also
# broadcast it)
assert_equal(inv_node.last_inv, None)
assert_equal(inv_node.last_headers, None)
tip = self.mine_blocks(1)
assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
assert_equal(test_node.check_last_announcement(headers=[tip]), True)
height += 1
block_time += 1
print("Part 2: success!")
print("Part 3: headers announcements can stop after large reorg, and resume after headers/inv from peer...")
# PART 3. Headers announcements can stop after large reorg, and resume after
# getheaders or inv from peer.
for j in range(2):
# First try mining a reorg that can propagate with header announcement
new_block_hashes = self.mine_reorg(length=7)
tip = new_block_hashes[-1]
assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
assert_equal(test_node.check_last_announcement(headers=new_block_hashes), True)
block_time += 8
# Mine a too-large reorg, which should be announced with a single inv
new_block_hashes = self.mine_reorg(length=8)
tip = new_block_hashes[-1]
assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
assert_equal(test_node.check_last_announcement(inv=[tip]), True)
block_time += 9
fork_point = self.nodes[0].getblock("%02x" % new_block_hashes[0])["previousblockhash"]
fork_point = int(fork_point, 16)
# Use getblocks/getdata
test_node.send_getblocks(locator = [fork_point])
assert_equal(test_node.check_last_announcement(inv=new_block_hashes), True)
test_node.get_data(new_block_hashes)
test_node.wait_for_block(new_block_hashes[-1])
for i in range(3):
# Mine another block, still should get only an inv
tip = self.mine_blocks(1)
assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
assert_equal(test_node.check_last_announcement(inv=[tip]), True)
if i == 0:
# Just get the data -- shouldn't cause headers announcements to resume
test_node.get_data([tip])
test_node.wait_for_block(tip)
elif i == 1:
# Send a getheaders message that shouldn't trigger headers announcements
# to resume (best header sent will be too old)
test_node.get_headers(locator=[fork_point], hashstop=new_block_hashes[1])
test_node.get_data([tip])
test_node.wait_for_block(tip)
elif i == 2:
test_node.get_data([tip])
test_node.wait_for_block(tip)
# This time, try sending either a getheaders to trigger resumption
# of headers announcements, or mine a new block and inv it, also
# triggering resumption of headers announcements.
if j == 0:
test_node.get_headers(locator=[tip], hashstop=0)
test_node.sync_with_ping()
else:
test_node.send_block_inv(tip)
test_node.sync_with_ping()
# New blocks should now be announced with header
tip = self.mine_blocks(1)
assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
assert_equal(test_node.check_last_announcement(headers=[tip]), True)
print("Part 3: success!")
print("Part 4: Testing direct fetch behavior...")
tip = self.mine_blocks(1)
height = self.nodes[0].getblockcount() + 1
last_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time']
block_time = last_time + 1
# Create 2 blocks. Send the blocks, then send the headers.
blocks = []
for b in range(2):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
inv_node.send_message(msg_block(blocks[-1]))
inv_node.sync_with_ping() # Make sure blocks are processed
test_node.last_getdata = None
test_node.send_header_for_blocks(blocks)
test_node.sync_with_ping()
# should not have received any getdata messages
with mininode_lock:
assert_equal(test_node.last_getdata, None)
# This time, direct fetch should work
blocks = []
for b in range(3):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
test_node.send_header_for_blocks(blocks)
test_node.sync_with_ping()
test_node.wait_for_getdata([x.sha256 for x in blocks], timeout=direct_fetch_response_time)
[ test_node.send_message(msg_block(x)) for x in blocks ]
test_node.sync_with_ping()
# Now announce a header that forks the last two blocks
tip = blocks[0].sha256
height -= 1
blocks = []
# Create extra blocks for later
for b in range(20):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
# Announcing one block on fork should not trigger direct fetch
# (less work than tip)
test_node.last_getdata = None
test_node.send_header_for_blocks(blocks[0:1])
test_node.sync_with_ping()
with mininode_lock:
assert_equal(test_node.last_getdata, None)
# Announcing one more block on fork should trigger direct fetch for
# both blocks (same work as tip)
test_node.send_header_for_blocks(blocks[1:2])
test_node.sync_with_ping()
test_node.wait_for_getdata([x.sha256 for x in blocks[0:2]], timeout=direct_fetch_response_time)
# Announcing 16 more headers should trigger direct fetch for 14 more
# blocks
test_node.send_header_for_blocks(blocks[2:18])
test_node.sync_with_ping()
test_node.wait_for_getdata([x.sha256 for x in blocks[2:16]], timeout=direct_fetch_response_time)
# Announcing 1 more header should not trigger any response
test_node.last_getdata = None
test_node.send_header_for_blocks(blocks[18:19])
test_node.sync_with_ping()
with mininode_lock:
assert_equal(test_node.last_getdata, None)
print("Part 4: success!")
# Now deliver all those blocks we announced.
[ test_node.send_message(msg_block(x)) for x in blocks ]
print("Part 5: Testing handling of unconnecting headers")
# First we test that receipt of an unconnecting header doesn't prevent
# chain sync.
for i in range(10):
test_node.last_getdata = None
blocks = []
# Create two more blocks.
for j in range(2):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
# Send the header of the second block -> this won't connect.
with mininode_lock:
test_node.last_getheaders = None
test_node.send_header_for_blocks([blocks[1]])
test_node.wait_for_getheaders(timeout=1)
test_node.send_header_for_blocks(blocks)
test_node.wait_for_getdata([x.sha256 for x in blocks])
[ test_node.send_message(msg_block(x)) for x in blocks ]
test_node.sync_with_ping()
assert_equal(int(self.nodes[0].getbestblockhash(), 16), blocks[1].sha256)
blocks = []
# Now we test that if we repeatedly don't send connecting headers, we
# don't go into an infinite loop trying to get them to connect.
MAX_UNCONNECTING_HEADERS = 10
for j in range(MAX_UNCONNECTING_HEADERS+1):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
for i in range(1, MAX_UNCONNECTING_HEADERS):
# Send a header that doesn't connect, check that we get a getheaders.
with mininode_lock:
test_node.last_getheaders = None
test_node.send_header_for_blocks([blocks[i]])
test_node.wait_for_getheaders(timeout=1)
# Next header will connect, should re-set our count:
test_node.send_header_for_blocks([blocks[0]])
# Remove the first two entries (blocks[1] would connect):
blocks = blocks[2:]
# Now try to see how many unconnecting headers we can send
# before we get disconnected. Should be 5*MAX_UNCONNECTING_HEADERS
for i in range(5*MAX_UNCONNECTING_HEADERS - 1):
# Send a header that doesn't connect, check that we get a getheaders.
with mininode_lock:
test_node.last_getheaders = None
test_node.send_header_for_blocks([blocks[i%len(blocks)]])
test_node.wait_for_getheaders(timeout=1)
# Eventually this stops working.
with mininode_lock:
self.last_getheaders = None
test_node.send_header_for_blocks([blocks[-1]])
# Should get disconnected
test_node.wait_for_disconnect()
with mininode_lock:
self.last_getheaders = True
print("Part 5: success!")
# Finally, check that the inv node never received a getdata request,
# throughout the test
assert_equal(inv_node.last_getdata, None)
if __name__ == '__main__':
SendHeadersTest().main()
| mit |
ioos/system-test | Theme_3_Species_and_Habitat/Scenario_3A_SeaBirds/Scenario_3A_Important_Bird_Areas/Scenario_3A_SeaBirds.py | 3 | 8441 | # -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
from utilities import css_styles
css_styles()
# <markdowncell>
# # IOOS System Test - Theme 3 - Scenario A - [Description](https://github.com/ioos/system-test/wiki/Development-of-Test-Themes#scenario-3a-assessing-seabird-vulnerability-in-the-bering-sea)
#
# ## Assessing Seabird Vulnerability in the Bering Sea
#
# ## Questions
# 1. Can we discover, access, and overlay Important Bird Area polygons (and therefore other similar layers for additional important resource areas) on modeled datasets in the Bering Sea?
# 3. Is metadata for projected climate data layers and Important Bird Area polygons sufficient to determine a subset of polygons desired by a query?
# 4. Can a simple set statistics (e.g., mean and standard deviation) be derived from multiple variables in each of the six models to derive the forecast variability of climate conditions through time, through the end of the model runs (2003-2040)?
# 5. Can we create a standardized matrix or other display method for output variables that allow resource experts to easily assess projected changes in climate variables, within given ranges of time, and compare projected changes across multiple coupled oceanographic and climate models?
# 6. Can we develop a set of process-specific guidelines and a standardized set of outputs for a tool that would allow researchers to address a diversity of resource management questions relative to projected changes in climate for specific zones of interest?
# <markdowncell>
# ## Q1 - Can we discover, access, and overlay Important Bird Area polygons (and therefore other similar layers for additional important resource areas) on modeled datasets in the Bering Sea?
# <markdowncell>
# <div class="error"><strong>Discovery is not possible</strong> - No Important Bird Area polygons are not discoverable at this time. They are, however, available in a GeoServer 'known' to us. This should be fixed. The WFS service should be added to a queryable CSW.</div>
# <markdowncell>
# ##### Load 'known' WFS endpoint with Important Bird Area polygons
# <codecell>
from owslib.wfs import WebFeatureService
known_wfs = "http://solo.axiomalaska.com/geoserver/audubon/ows"
wfs = WebFeatureService(known_wfs, version='1.0.0')
print sorted(wfs.contents.keys())
# <markdowncell>
# ##### We already know that the 'audubon:audubon_ibas' layer is Import Bird Areas. Request 'geojson' response from the layer
# <codecell>
import geojson
geojson_response = wfs.getfeature(typename=['audubon:audubon_ibas'], maxfeatures=1, outputFormat="application/json", srsname="urn:x-ogc:def:crs:EPSG:4326").read()
feature = geojson.loads(geojson_response)
# <markdowncell>
# ##### Convert to Shapely geometry objects
# <codecell>
from shapely.geometry import shape
shapes = [shape(s.get("geometry")) for s in feature.get("features")]
# <markdowncell>
# ##### Map the geometry objects
# <codecell>
import folium
map_center = shapes[0].centroid
mapper = folium.Map(location=[map_center.x, map_center.y], zoom_start=6)
for s in shapes:
if hasattr(s.boundary, 'coords'):
mapper.line(s.boundary.coords, line_color='#FF0000', line_weight=5)
else:
for p in s:
mapper.line(p.boundary.coords, line_color='#FF0000', line_weight=5)
mapper._build_map()
from IPython.core.display import HTML
HTML('<iframe srcdoc="{srcdoc}" style="width: 100%; height: 535px; border: none"></iframe>'.format(srcdoc=mapper.HTML.replace('"', '"')))
# <markdowncell>
# ### Can we discover other datasets in this polygon area?
# <markdowncell>
# ##### Setup BCSW Filters to find models in the area of the Important Bird Polygon
# <codecell>
from owslib import fes
# Polygon filters
polygon_filters = []
for s in shapes:
f = fes.BBox(bbox=list(reversed(s.bounds)))
polygon_filters.append(f)
# If we have more than one polygon filter, OR them together
if len(polygon_filters) > 1:
polygon_filters = fes.Or(polygon_filters)
elif len(polygon_filters) == 1:
polygon_filters = polygon_filters[0]
# Name filters
name_filters = []
model_strings = ['roms', 'selfe', 'adcirc', 'ncom', 'hycom', 'fvcom', 'wrf', 'wrams']
for model in model_strings:
title_filter = fes.PropertyIsLike(propertyname='apiso:Title', literal='*%s*' % model, wildCard='*')
name_filters.append(title_filter)
subject_filter = fes.PropertyIsLike(propertyname='apiso:Subject', literal='*%s*' % model, wildCard='*')
name_filters.append(subject_filter)
# Or all of the name filters together
name_filters = fes.Or(name_filters)
# Final filters
filters = fes.And([polygon_filters, name_filters])
# <markdowncell>
# ##### The actual CSW filters look like this
# <codecell>
from owslib.etree import etree
print etree.tostring(filters.toXML(), pretty_print=True)
# <markdowncell>
# ##### Find all models contain in all CSW endpoints
# <codecell>
from owslib.csw import CatalogueServiceWeb
endpoints = ['http://www.nodc.noaa.gov/geoportal/csw',
'http://www.ngdc.noaa.gov/geoportal/csw',
'http://catalog.data.gov/csw-all',
#'http://cwic.csiss.gmu.edu/cwicv1/discovery',
'http://geoport.whoi.edu/geoportal/csw',
'https://edg.epa.gov/metadata/csw',
'http://cmgds.marine.usgs.gov/geonetwork/srv/en/csw',
'http://cida.usgs.gov/gdp/geonetwork/srv/en/csw',
'http://geodiscover.cgdi.ca/wes/serviceManagerCSW/csw',
'http://geoport.whoi.edu/gi-cat/services/cswiso']
# <markdowncell>
# ##### Filter out CSW servers that do not support a BBOX query
# <codecell>
bbox_endpoints = []
for url in endpoints:
queryables = []
try:
csw = CatalogueServiceWeb(url, timeout=20)
except BaseException:
print "Failure - %s - Timed out" % url
if "BBOX" in csw.filters.spatial_operators:
print "Success - %s - BBOX Query supported" % url
bbox_endpoints.append(url)
else:
print "Failure - %s - BBOX Query NOT supported" % url
# <codecell>
dap_urls = []
dap_services = ["urn:x-esri:specification:ServiceType:odp:url",
"urn:x-esri:specification:ServiceType:OPeNDAP"]
for url in bbox_endpoints:
print "*", url
try:
csw = CatalogueServiceWeb(url, timeout=20)
csw.getrecords2(constraints=[filters], maxrecords=1000, esn='full')
for record, item in csw.records.items():
print " -", item.title
# Get DAP URLs
url = next((d['url'] for d in item.references if d['scheme'] in dap_services), None)
if url:
print " + OPeNDAP URL: %s" % url
dap_urls.append(url)
else:
print " + No OPeNDAP service available"
except BaseException as e:
print " - FAILED", url, e.msg
# <markdowncell>
# ##### Get bounding polygons from each dataset
# <codecell>
from paegan.cdm.dataset import CommonDataset
lookup_standard_name = "sea_water_temperature"
# Filter out DAP servers that are taking FOREVER
dap_urls = [url for url in dap_urls if "data1.gfdl.noaa.gov" not in url]
dataset_polygons = {}
for i, dap in enumerate(dap_urls):
print '(%d/%s)' % (i+1, len(dap_urls)),
try:
cd = CommonDataset.open(dap)
except BaseException:
print "Could not access", dap
try:
var = cd.get_varname_from_stdname(standard_name=lookup_standard_name)[0]
dataset_polygons[dap] = cd.getboundingpolygon(var=var)
print "Retrieved bounding polygon from %s" % dap
except (IndexError, AssertionError):
print "No standard_name '%s' in '%s'" % (lookup_standard_name, dap)
# <markdowncell>
# ##### Overlay dataset polygons on top of Important Bird Area polygons
# <codecell>
import random
mapper = folium.Map(location=[map_center.x, map_center.y], zoom_start=0)
for name, polygon in dataset_polygons.items():
color = "#%06x" % random.randint(0,0xFFFFFF)
# Normal coordinates
mapper.line(polygon.boundary.coords, line_color=color, line_weight=3)
# Fliipped coordinates
mapper.line([(y,x) for x,y in polygon.boundary.coords], line_color=color, line_weight=3)
mapper._build_map()
from IPython.core.display import HTML
HTML('<iframe srcdoc="{srcdoc}" style="width: 100%; height: 535px; border: none"></iframe>'.format(srcdoc=mapper.HTML.replace('"', '"')))
# <codecell>
| unlicense |
hakatashi/youtube-dl | youtube_dl/extractor/raywenderlich.py | 21 | 6169 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from .vimeo import VimeoIE
from ..compat import compat_str
from ..utils import (
ExtractorError,
int_or_none,
merge_dicts,
try_get,
unescapeHTML,
unified_timestamp,
urljoin,
)
class RayWenderlichIE(InfoExtractor):
_VALID_URL = r'''(?x)
https?://
(?:
videos\.raywenderlich\.com/courses|
(?:www\.)?raywenderlich\.com
)/
(?P<course_id>[^/]+)/lessons/(?P<id>\d+)
'''
_TESTS = [{
'url': 'https://www.raywenderlich.com/3530-testing-in-ios/lessons/1',
'info_dict': {
'id': '248377018',
'ext': 'mp4',
'title': 'Introduction',
'description': 'md5:804d031b3efa9fcb49777d512d74f722',
'timestamp': 1513906277,
'upload_date': '20171222',
'duration': 133,
'uploader': 'Ray Wenderlich',
'uploader_id': 'user3304672',
},
'params': {
'noplaylist': True,
'skip_download': True,
},
'add_ie': [VimeoIE.ie_key()],
'expected_warnings': ['HTTP Error 403: Forbidden'],
}, {
'url': 'https://videos.raywenderlich.com/courses/105-testing-in-ios/lessons/1',
'only_matching': True,
}]
@staticmethod
def _extract_video_id(data, lesson_id):
if not data:
return
groups = try_get(data, lambda x: x['groups'], list) or []
if not groups:
return
for group in groups:
if not isinstance(group, dict):
continue
contents = try_get(data, lambda x: x['contents'], list) or []
for content in contents:
if not isinstance(content, dict):
continue
ordinal = int_or_none(content.get('ordinal'))
if ordinal != lesson_id:
continue
video_id = content.get('identifier')
if video_id:
return compat_str(video_id)
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
course_id, lesson_id = mobj.group('course_id', 'id')
display_id = '%s/%s' % (course_id, lesson_id)
webpage = self._download_webpage(url, display_id)
thumbnail = self._og_search_thumbnail(
webpage, default=None) or self._html_search_meta(
'twitter:image', webpage, 'thumbnail')
if '>Subscribe to unlock' in webpage:
raise ExtractorError(
'This content is only available for subscribers',
expected=True)
info = {
'thumbnail': thumbnail,
}
vimeo_id = self._search_regex(
r'data-vimeo-id=["\'](\d+)', webpage, 'vimeo id', default=None)
if not vimeo_id:
data = self._parse_json(
self._search_regex(
r'data-collection=(["\'])(?P<data>{.+?})\1', webpage,
'data collection', default='{}', group='data'),
display_id, transform_source=unescapeHTML, fatal=False)
video_id = self._extract_video_id(
data, lesson_id) or self._search_regex(
r'/videos/(\d+)/', thumbnail, 'video id')
headers = {
'Referer': url,
'X-Requested-With': 'XMLHttpRequest',
}
csrf_token = self._html_search_meta(
'csrf-token', webpage, 'csrf token', default=None)
if csrf_token:
headers['X-CSRF-Token'] = csrf_token
video = self._download_json(
'https://videos.raywenderlich.com/api/v1/videos/%s.json'
% video_id, display_id, headers=headers)['video']
vimeo_id = video['clips'][0]['provider_id']
info.update({
'_type': 'url_transparent',
'title': video.get('name'),
'description': video.get('description') or video.get(
'meta_description'),
'duration': int_or_none(video.get('duration')),
'timestamp': unified_timestamp(video.get('created_at')),
})
return merge_dicts(info, self.url_result(
VimeoIE._smuggle_referrer(
'https://player.vimeo.com/video/%s' % vimeo_id, url),
ie=VimeoIE.ie_key(), video_id=vimeo_id))
class RayWenderlichCourseIE(InfoExtractor):
_VALID_URL = r'''(?x)
https?://
(?:
videos\.raywenderlich\.com/courses|
(?:www\.)?raywenderlich\.com
)/
(?P<id>[^/]+)
'''
_TEST = {
'url': 'https://www.raywenderlich.com/3530-testing-in-ios',
'info_dict': {
'title': 'Testing in iOS',
'id': '3530-testing-in-ios',
},
'params': {
'noplaylist': False,
},
'playlist_count': 29,
}
@classmethod
def suitable(cls, url):
return False if RayWenderlichIE.suitable(url) else super(
RayWenderlichCourseIE, cls).suitable(url)
def _real_extract(self, url):
course_id = self._match_id(url)
webpage = self._download_webpage(url, course_id)
entries = []
lesson_urls = set()
for lesson_url in re.findall(
r'<a[^>]+\bhref=["\'](/%s/lessons/\d+)' % course_id, webpage):
if lesson_url in lesson_urls:
continue
lesson_urls.add(lesson_url)
entries.append(self.url_result(
urljoin(url, lesson_url), ie=RayWenderlichIE.ie_key()))
title = self._og_search_title(
webpage, default=None) or self._html_search_meta(
'twitter:title', webpage, 'title', default=None)
return self.playlist_result(entries, course_id, title)
| unlicense |
stacywsmith/ansible | lib/ansible/modules/network/bigswitch/bigmon_chain.py | 19 | 4937 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Ansible module to manage Big Monitoring Fabric service chains
# (c) 2016, Ted Elhourani <[email protected]>,
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: bigmon_chain
author: "Ted (@tedelhourani)"
short_description: Create and remove a bigmon inline service chain.
description:
- Create and remove a bigmon inline service chain.
version_added: "2.3"
options:
name:
description:
- The name of the chain.
required: true
state:
description:
- Whether the service chain should be present or absent.
default: present
choices: ['present', 'absent']
controller:
description:
- The controller IP address.
required: true
validate_certs:
description:
- If C(false), SSL certificates will not be validated. This should only be used
on personally controlled devices using self-signed certificates.
required: false
default: true
choices: [true, false]
access_token:
description:
- Bigmon access token. If this isn't set the the environment variable C(BIGSWITCH_ACCESS_TOKEN) is used.
'''
EXAMPLES = '''
- name: bigmon inline service chain
bigmon_chain:
name: MyChain
controller: '{{ inventory_hostname }}'
state: present
validate_certs: false
'''
RETURN = '''
{
"changed": true,
"invocation": {
"module_args": {
"access_token": null,
"controller": "192.168.86.221",
"name": "MyChain",
"state": "present",
"validate_certs": false
},
"module_name": "bigmon_chain"
}
}
'''
import os
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.bigswitch_utils import Rest, Response
from ansible.module_utils.pycompat24 import get_exception
def chain(module):
try:
access_token = module.params['access_token'] or os.environ['BIGSWITCH_ACCESS_TOKEN']
except KeyError:
e = get_exception()
module.fail_json(msg='Unable to load %s' % e.message )
name = module.params['name']
state = module.params['state']
controller = module.params['controller']
rest = Rest(module,
{'content-type': 'application/json', 'Cookie': 'session_cookie='+access_token},
'https://'+controller+':8443/api/v1/data/controller/applications/bigchain')
if None in (name, state, controller):
module.fail_json(msg='parameter `name` is missing')
response = rest.get('chain?config=true', data={})
if response.status_code != 200:
module.fail_json(msg="failed to obtain existing chain config: {}".format(response.json['description']))
config_present = False
matching = [chain for chain in response.json if chain['name'] == name]
if matching:
config_present = True
if state in ('present') and config_present:
module.exit_json(changed=False)
if state in ('absent') and not config_present:
module.exit_json(changed=False)
if state in ('present'):
response = rest.put('chain[name="%s"]' % name, data={'name': name})
if response.status_code == 204:
module.exit_json(changed=True)
else:
module.fail_json(msg="error creating chain '{}': {}".format(name, response.json['description']))
if state in ('absent'):
response = rest.delete('chain[name="%s"]' % name, data={})
if response.status_code == 204:
module.exit_json(changed=True)
else:
module.fail_json(msg="error deleting chain '{}': {}".format(name, response.json['description']))
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str', required=True),
controller=dict(type='str', required=True),
state=dict(choices=['present', 'absent'], default='present'),
validate_certs=dict(type='bool', default='True'),
access_token=dict(type='str', no_log=True)
)
)
try:
chain(module)
except Exception:
e = get_exception()
module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
| gpl-3.0 |
asm666/sympy | sympy/physics/quantum/tests/test_dagger.py | 124 | 1512 | from sympy import I, Matrix, symbols, conjugate, Expr, Integer
from sympy.physics.quantum.dagger import adjoint, Dagger
from sympy.external import import_module
from sympy.utilities.pytest import skip
def test_scalars():
x = symbols('x', complex=True)
assert Dagger(x) == conjugate(x)
assert Dagger(I*x) == -I*conjugate(x)
i = symbols('i', real=True)
assert Dagger(i) == i
p = symbols('p')
assert isinstance(Dagger(p), adjoint)
i = Integer(3)
assert Dagger(i) == i
A = symbols('A', commutative=False)
assert Dagger(A).is_commutative is False
def test_matrix():
x = symbols('x')
m = Matrix([[I, x*I], [2, 4]])
assert Dagger(m) == m.H
class Foo(Expr):
def _eval_adjoint(self):
return I
def test_eval_adjoint():
f = Foo()
d = Dagger(f)
assert d == I
np = import_module('numpy')
def test_numpy_dagger():
if not np:
skip("numpy not installed.")
a = np.matrix([[1.0, 2.0j], [-1.0j, 2.0]])
adag = a.copy().transpose().conjugate()
assert (Dagger(a) == adag).all()
scipy = import_module('scipy', __import__kwargs={'fromlist': ['sparse']})
def test_scipy_sparse_dagger():
if not np:
skip("numpy not installed.")
if not scipy:
skip("scipy not installed.")
else:
sparse = scipy.sparse
a = sparse.csr_matrix([[1.0 + 0.0j, 2.0j], [-1.0j, 2.0 + 0.0j]])
adag = a.copy().transpose().conjugate()
assert np.linalg.norm((Dagger(a) - adag).todense()) == 0.0
| bsd-3-clause |
5GExchange/nffg | setup.py | 2 | 1754 | # Copyright 2017 Janos Czentye, Balazs Nemeth, Balazs Sonkoly
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
from nffg import VERSION
MODULE_NAME = "nffg"
setup(name=MODULE_NAME,
version=VERSION,
description="Network Function Forwarding Graph",
author="Janos Czentye, Balazs Nemeth, Balazs Sonkoly",
long_description="Python-based implementation of "
"Network Function Forwarding Graph used by ESCAPE",
classifiers=[
'Development Status :: 4 - Beta',
"Intended Audience :: Telecommunications Industry",
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules'
],
keywords='networking NFV BiSBiS forwarding',
url="http://sb.tmit.bme.hu/escape",
author_email="{name}.{name}@tmit.bme.hu",
maintainer="Janos Czentye",
maintainer_email="[email protected]",
license="Apache 2.0",
install_requires=[
"networkx~=1.10"
],
package_dir={MODULE_NAME: "."},
packages=[MODULE_NAME],
scripts=["nffg_diff.py"],
include_package_data=True,
zip_safe=False)
| apache-2.0 |
bigswitch/nova | nova/objects/instance.py | 1 | 54454 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from oslo_utils import versionutils
from sqlalchemy.orm import joinedload
from nova.cells import opts as cells_opts
from nova.cells import rpcapi as cells_rpcapi
from nova.cells import utils as cells_utils
from nova import db
from nova.db.sqlalchemy import api as db_api
from nova.db.sqlalchemy import models
from nova import exception
from nova.i18n import _LE, _LW
from nova import notifications
from nova import objects
from nova.objects import base
from nova.objects import fields
from nova import utils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
# List of fields that can be joined in DB layer.
_INSTANCE_OPTIONAL_JOINED_FIELDS = ['metadata', 'system_metadata',
'info_cache', 'security_groups',
'pci_devices', 'tags', 'services']
# These are fields that are optional but don't translate to db columns
_INSTANCE_OPTIONAL_NON_COLUMN_FIELDS = ['fault', 'flavor', 'old_flavor',
'new_flavor', 'ec2_ids']
# These are fields that are optional and in instance_extra
_INSTANCE_EXTRA_FIELDS = ['numa_topology', 'pci_requests',
'flavor', 'vcpu_model', 'migration_context',
'keypairs']
# These are fields that can be specified as expected_attrs
INSTANCE_OPTIONAL_ATTRS = (_INSTANCE_OPTIONAL_JOINED_FIELDS +
_INSTANCE_OPTIONAL_NON_COLUMN_FIELDS +
_INSTANCE_EXTRA_FIELDS)
# These are fields that most query calls load by default
INSTANCE_DEFAULT_FIELDS = ['metadata', 'system_metadata',
'info_cache', 'security_groups']
# Maximum count of tags to one instance
MAX_TAG_COUNT = 50
def _expected_cols(expected_attrs):
"""Return expected_attrs that are columns needing joining.
NB: This function may modify expected_attrs if one
requested attribute requires another.
"""
if not expected_attrs:
return expected_attrs
simple_cols = [attr for attr in expected_attrs
if attr in _INSTANCE_OPTIONAL_JOINED_FIELDS]
complex_cols = ['extra.%s' % field
for field in _INSTANCE_EXTRA_FIELDS
if field in expected_attrs]
if complex_cols:
simple_cols.append('extra')
simple_cols = [x for x in simple_cols if x not in _INSTANCE_EXTRA_FIELDS]
return simple_cols + complex_cols
_NO_DATA_SENTINEL = object()
# TODO(berrange): Remove NovaObjectDictCompat
@base.NovaObjectRegistry.register
class Instance(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
# Version 2.0: Initial version
# Version 2.1: Added services
# Version 2.2: Added keypairs
VERSION = '2.2'
fields = {
'id': fields.IntegerField(),
'user_id': fields.StringField(nullable=True),
'project_id': fields.StringField(nullable=True),
'image_ref': fields.StringField(nullable=True),
'kernel_id': fields.StringField(nullable=True),
'ramdisk_id': fields.StringField(nullable=True),
'hostname': fields.StringField(nullable=True),
'launch_index': fields.IntegerField(nullable=True),
'key_name': fields.StringField(nullable=True),
'key_data': fields.StringField(nullable=True),
'power_state': fields.IntegerField(nullable=True),
'vm_state': fields.StringField(nullable=True),
'task_state': fields.StringField(nullable=True),
'services': fields.ObjectField('ServiceList'),
'memory_mb': fields.IntegerField(nullable=True),
'vcpus': fields.IntegerField(nullable=True),
'root_gb': fields.IntegerField(nullable=True),
'ephemeral_gb': fields.IntegerField(nullable=True),
'ephemeral_key_uuid': fields.UUIDField(nullable=True),
'host': fields.StringField(nullable=True),
'node': fields.StringField(nullable=True),
'instance_type_id': fields.IntegerField(nullable=True),
'user_data': fields.StringField(nullable=True),
'reservation_id': fields.StringField(nullable=True),
'launched_at': fields.DateTimeField(nullable=True),
'terminated_at': fields.DateTimeField(nullable=True),
'availability_zone': fields.StringField(nullable=True),
'display_name': fields.StringField(nullable=True),
'display_description': fields.StringField(nullable=True),
'launched_on': fields.StringField(nullable=True),
# NOTE(jdillaman): locked deprecated in favor of locked_by,
# to be removed in Icehouse
'locked': fields.BooleanField(default=False),
'locked_by': fields.StringField(nullable=True),
'os_type': fields.StringField(nullable=True),
'architecture': fields.StringField(nullable=True),
'vm_mode': fields.StringField(nullable=True),
'uuid': fields.UUIDField(),
'root_device_name': fields.StringField(nullable=True),
'default_ephemeral_device': fields.StringField(nullable=True),
'default_swap_device': fields.StringField(nullable=True),
'config_drive': fields.StringField(nullable=True),
'access_ip_v4': fields.IPV4AddressField(nullable=True),
'access_ip_v6': fields.IPV6AddressField(nullable=True),
'auto_disk_config': fields.BooleanField(default=False),
'progress': fields.IntegerField(nullable=True),
'shutdown_terminate': fields.BooleanField(default=False),
'disable_terminate': fields.BooleanField(default=False),
'cell_name': fields.StringField(nullable=True),
'metadata': fields.DictOfStringsField(),
'system_metadata': fields.DictOfNullableStringsField(),
'info_cache': fields.ObjectField('InstanceInfoCache',
nullable=True),
'security_groups': fields.ObjectField('SecurityGroupList'),
'fault': fields.ObjectField('InstanceFault', nullable=True),
'cleaned': fields.BooleanField(default=False),
'pci_devices': fields.ObjectField('PciDeviceList', nullable=True),
'numa_topology': fields.ObjectField('InstanceNUMATopology',
nullable=True),
'pci_requests': fields.ObjectField('InstancePCIRequests',
nullable=True),
'tags': fields.ObjectField('TagList'),
'flavor': fields.ObjectField('Flavor'),
'old_flavor': fields.ObjectField('Flavor', nullable=True),
'new_flavor': fields.ObjectField('Flavor', nullable=True),
'vcpu_model': fields.ObjectField('VirtCPUModel', nullable=True),
'ec2_ids': fields.ObjectField('EC2Ids'),
'migration_context': fields.ObjectField('MigrationContext',
nullable=True),
'keypairs': fields.ObjectField('KeyPairList'),
}
obj_extra_fields = ['name']
def obj_make_compatible(self, primitive, target_version):
super(Instance, self).obj_make_compatible(primitive, target_version)
target_version = versionutils.convert_version_to_tuple(target_version)
if target_version < (2, 2) and 'keypairs' in primitive:
del primitive['keypairs']
if target_version < (2, 1) and 'services' in primitive:
del primitive['services']
def __init__(self, *args, **kwargs):
super(Instance, self).__init__(*args, **kwargs)
self._reset_metadata_tracking()
@property
def image_meta(self):
return objects.ImageMeta.from_instance(self)
def _reset_metadata_tracking(self, fields=None):
if fields is None or 'system_metadata' in fields:
self._orig_system_metadata = (dict(self.system_metadata) if
'system_metadata' in self else {})
if fields is None or 'metadata' in fields:
self._orig_metadata = (dict(self.metadata) if
'metadata' in self else {})
def obj_reset_changes(self, fields=None, recursive=False):
super(Instance, self).obj_reset_changes(fields,
recursive=recursive)
self._reset_metadata_tracking(fields=fields)
def obj_what_changed(self):
changes = super(Instance, self).obj_what_changed()
if 'metadata' in self and self.metadata != self._orig_metadata:
changes.add('metadata')
if 'system_metadata' in self and (self.system_metadata !=
self._orig_system_metadata):
changes.add('system_metadata')
return changes
@classmethod
def _obj_from_primitive(cls, context, objver, primitive):
self = super(Instance, cls)._obj_from_primitive(context, objver,
primitive)
self._reset_metadata_tracking()
return self
@property
def name(self):
try:
base_name = CONF.instance_name_template % self.id
except TypeError:
# Support templates like "uuid-%(uuid)s", etc.
info = {}
# NOTE(russellb): Don't use self.iteritems() here, as it will
# result in infinite recursion on the name property.
for key in self.fields:
if key == 'name':
# NOTE(danms): prevent recursion
continue
elif not self.obj_attr_is_set(key):
# NOTE(danms): Don't trigger lazy-loads
continue
info[key] = self[key]
try:
base_name = CONF.instance_name_template % info
except KeyError:
base_name = self.uuid
return base_name
def _flavor_from_db(self, db_flavor):
"""Load instance flavor information from instance_extra."""
flavor_info = jsonutils.loads(db_flavor)
self.flavor = objects.Flavor.obj_from_primitive(flavor_info['cur'])
if flavor_info['old']:
self.old_flavor = objects.Flavor.obj_from_primitive(
flavor_info['old'])
else:
self.old_flavor = None
if flavor_info['new']:
self.new_flavor = objects.Flavor.obj_from_primitive(
flavor_info['new'])
else:
self.new_flavor = None
self.obj_reset_changes(['flavor', 'old_flavor', 'new_flavor'])
@staticmethod
def _from_db_object(context, instance, db_inst, expected_attrs=None):
"""Method to help with migration to objects.
Converts a database entity to a formal object.
"""
instance._context = context
if expected_attrs is None:
expected_attrs = []
# Most of the field names match right now, so be quick
for field in instance.fields:
if field in INSTANCE_OPTIONAL_ATTRS:
continue
elif field == 'deleted':
instance.deleted = db_inst['deleted'] == db_inst['id']
elif field == 'cleaned':
instance.cleaned = db_inst['cleaned'] == 1
else:
instance[field] = db_inst[field]
# NOTE(danms): We can be called with a dict instead of a
# SQLAlchemy object, so we have to be careful here
if hasattr(db_inst, '__dict__'):
have_extra = 'extra' in db_inst.__dict__ and db_inst['extra']
else:
have_extra = 'extra' in db_inst and db_inst['extra']
if 'metadata' in expected_attrs:
instance['metadata'] = utils.instance_meta(db_inst)
if 'system_metadata' in expected_attrs:
instance['system_metadata'] = utils.instance_sys_meta(db_inst)
if 'fault' in expected_attrs:
instance['fault'] = (
objects.InstanceFault.get_latest_for_instance(
context, instance.uuid))
if 'numa_topology' in expected_attrs:
if have_extra:
instance._load_numa_topology(
db_inst['extra'].get('numa_topology'))
else:
instance.numa_topology = None
if 'pci_requests' in expected_attrs:
if have_extra:
instance._load_pci_requests(
db_inst['extra'].get('pci_requests'))
else:
instance.pci_requests = None
if 'vcpu_model' in expected_attrs:
if have_extra:
instance._load_vcpu_model(
db_inst['extra'].get('vcpu_model'))
else:
instance.vcpu_model = None
if 'ec2_ids' in expected_attrs:
instance._load_ec2_ids()
if 'migration_context' in expected_attrs:
if have_extra:
instance._load_migration_context(
db_inst['extra'].get('migration_context'))
else:
instance.migration_context = None
if 'keypairs' in expected_attrs:
if have_extra:
instance._load_keypairs(db_inst['extra'].get('keypairs'))
if 'info_cache' in expected_attrs:
if db_inst.get('info_cache') is None:
instance.info_cache = None
elif not instance.obj_attr_is_set('info_cache'):
# TODO(danms): If this ever happens on a backlevel instance
# passed to us by a backlevel service, things will break
instance.info_cache = objects.InstanceInfoCache(context)
if instance.info_cache is not None:
instance.info_cache._from_db_object(context,
instance.info_cache,
db_inst['info_cache'])
if any([x in expected_attrs for x in ('flavor',
'old_flavor',
'new_flavor')]):
if have_extra and db_inst['extra'].get('flavor'):
instance._flavor_from_db(db_inst['extra']['flavor'])
# TODO(danms): If we are updating these on a backlevel instance,
# we'll end up sending back new versions of these objects (see
# above note for new info_caches
if 'pci_devices' in expected_attrs:
pci_devices = base.obj_make_list(
context, objects.PciDeviceList(context),
objects.PciDevice, db_inst['pci_devices'])
instance['pci_devices'] = pci_devices
if 'security_groups' in expected_attrs:
sec_groups = base.obj_make_list(
context, objects.SecurityGroupList(context),
objects.SecurityGroup, db_inst.get('security_groups', []))
instance['security_groups'] = sec_groups
if 'tags' in expected_attrs:
tags = base.obj_make_list(
context, objects.TagList(context),
objects.Tag, db_inst['tags'])
instance['tags'] = tags
if 'services' in expected_attrs:
services = base.obj_make_list(
context, objects.ServiceList(context),
objects.Service, db_inst['services'])
instance['services'] = services
instance.obj_reset_changes()
return instance
@staticmethod
@db.select_db_reader_mode
def _db_instance_get_by_uuid(context, uuid, columns_to_join,
use_slave=False):
return db.instance_get_by_uuid(context, uuid,
columns_to_join=columns_to_join)
@base.remotable_classmethod
def get_by_uuid(cls, context, uuid, expected_attrs=None, use_slave=False):
if expected_attrs is None:
expected_attrs = ['info_cache', 'security_groups']
columns_to_join = _expected_cols(expected_attrs)
db_inst = cls._db_instance_get_by_uuid(context, uuid, columns_to_join,
use_slave=use_slave)
return cls._from_db_object(context, cls(), db_inst,
expected_attrs)
@base.remotable_classmethod
def get_by_id(cls, context, inst_id, expected_attrs=None):
if expected_attrs is None:
expected_attrs = ['info_cache', 'security_groups']
columns_to_join = _expected_cols(expected_attrs)
db_inst = db.instance_get(context, inst_id,
columns_to_join=columns_to_join)
return cls._from_db_object(context, cls(), db_inst,
expected_attrs)
@base.remotable
def create(self):
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason='already created')
updates = self.obj_get_changes()
expected_attrs = [attr for attr in INSTANCE_DEFAULT_FIELDS
if attr in updates]
if 'security_groups' in updates:
updates['security_groups'] = [x.name for x in
updates['security_groups']]
if 'info_cache' in updates:
updates['info_cache'] = {
'network_info': updates['info_cache'].network_info.json()
}
updates['extra'] = {}
numa_topology = updates.pop('numa_topology', None)
expected_attrs.append('numa_topology')
if numa_topology:
updates['extra']['numa_topology'] = numa_topology._to_json()
else:
updates['extra']['numa_topology'] = None
pci_requests = updates.pop('pci_requests', None)
expected_attrs.append('pci_requests')
if pci_requests:
updates['extra']['pci_requests'] = (
pci_requests.to_json())
else:
updates['extra']['pci_requests'] = None
flavor = updates.pop('flavor', None)
if flavor:
expected_attrs.append('flavor')
old = ((self.obj_attr_is_set('old_flavor') and
self.old_flavor) and
self.old_flavor.obj_to_primitive() or None)
new = ((self.obj_attr_is_set('new_flavor') and
self.new_flavor) and
self.new_flavor.obj_to_primitive() or None)
flavor_info = {
'cur': self.flavor.obj_to_primitive(),
'old': old,
'new': new,
}
updates['extra']['flavor'] = jsonutils.dumps(flavor_info)
keypairs = updates.pop('keypairs', None)
if keypairs is not None:
expected_attrs.append('keypairs')
updates['extra']['keypairs'] = jsonutils.dumps(
keypairs.obj_to_primitive())
vcpu_model = updates.pop('vcpu_model', None)
expected_attrs.append('vcpu_model')
if vcpu_model:
updates['extra']['vcpu_model'] = (
jsonutils.dumps(vcpu_model.obj_to_primitive()))
else:
updates['extra']['vcpu_model'] = None
db_inst = db.instance_create(self._context, updates)
self._from_db_object(self._context, self, db_inst, expected_attrs)
# NOTE(danms): The EC2 ids are created on their first load. In order
# to avoid them being missing and having to be loaded later, we
# load them once here on create now that the instance record is
# created.
self._load_ec2_ids()
self.obj_reset_changes(['ec2_ids'])
@base.remotable
def destroy(self):
if not self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='destroy',
reason='already destroyed')
if not self.obj_attr_is_set('uuid'):
raise exception.ObjectActionError(action='destroy',
reason='no uuid')
if not self.obj_attr_is_set('host') or not self.host:
# NOTE(danms): If our host is not set, avoid a race
constraint = db.constraint(host=db.equal_any(None))
else:
constraint = None
cell_type = cells_opts.get_cell_type()
if cell_type is not None:
stale_instance = self.obj_clone()
try:
db_inst = db.instance_destroy(self._context, self.uuid,
constraint=constraint)
self._from_db_object(self._context, self, db_inst)
except exception.ConstraintNotMet:
raise exception.ObjectActionError(action='destroy',
reason='host changed')
if cell_type == 'compute':
cells_api = cells_rpcapi.CellsAPI()
cells_api.instance_destroy_at_top(self._context, stale_instance)
delattr(self, base.get_attrname('id'))
def _save_info_cache(self, context):
if self.info_cache:
with self.info_cache.obj_alternate_context(context):
self.info_cache.save()
def _save_security_groups(self, context):
security_groups = self.security_groups or []
for secgroup in security_groups:
with secgroup.obj_alternate_context(context):
secgroup.save()
self.security_groups.obj_reset_changes()
def _save_fault(self, context):
# NOTE(danms): I don't think we need to worry about this, do we?
pass
def _save_pci_requests(self, context):
# NOTE(danms): No need for this yet.
pass
def _save_pci_devices(self, context):
# NOTE(yjiang5): All devices held by PCI tracker, only PCI tracker
# permitted to update the DB. all change to devices from here will
# be dropped.
pass
def _save_flavor(self, context):
if not any([x in self.obj_what_changed() for x in
('flavor', 'old_flavor', 'new_flavor')]):
return
flavor_info = {
'cur': self.flavor.obj_to_primitive(),
'old': (self.old_flavor and
self.old_flavor.obj_to_primitive() or None),
'new': (self.new_flavor and
self.new_flavor.obj_to_primitive() or None),
}
self._extra_values_to_save['flavor'] = jsonutils.dumps(flavor_info)
self.obj_reset_changes(['flavor', 'old_flavor', 'new_flavor'])
def _save_old_flavor(self, context):
if 'old_flavor' in self.obj_what_changed():
self._save_flavor(context)
def _save_new_flavor(self, context):
if 'new_flavor' in self.obj_what_changed():
self._save_flavor(context)
def _save_ec2_ids(self, context):
# NOTE(hanlind): Read-only so no need to save this.
pass
def _save_keypairs(self, context):
# NOTE(danms): Read-only so no need to save this.
pass
def _save_extra_generic(self, field):
if field in self.obj_what_changed():
obj = getattr(self, field)
value = None
if obj is not None:
value = jsonutils.dumps(obj.obj_to_primitive())
self._extra_values_to_save[field] = value
@base.remotable
def save(self, expected_vm_state=None,
expected_task_state=None, admin_state_reset=False):
"""Save updates to this instance
Column-wise updates will be made based on the result of
self.what_changed(). If expected_task_state is provided,
it will be checked against the in-database copy of the
instance before updates are made.
:param:context: Security context
:param:expected_task_state: Optional tuple of valid task states
for the instance to be in
:param:expected_vm_state: Optional tuple of valid vm states
for the instance to be in
:param admin_state_reset: True if admin API is forcing setting
of task_state/vm_state
"""
# Store this on the class because _cell_name_blocks_sync is useless
# after the db update call below.
self._sync_cells = not self._cell_name_blocks_sync()
context = self._context
cell_type = cells_opts.get_cell_type()
if cell_type is not None:
# NOTE(comstud): We need to stash a copy of ourselves
# before any updates are applied. When we call the save
# methods on nested objects, we will lose any changes to
# them. But we need to make sure child cells can tell
# what is changed.
#
# We also need to nuke any updates to vm_state and task_state
# unless admin_state_reset is True. compute cells are
# authoritative for their view of vm_state and task_state.
stale_instance = self.obj_clone()
cells_update_from_api = (cell_type == 'api' and self.cell_name and
self._sync_cells)
if cells_update_from_api:
def _handle_cell_update_from_api():
cells_api = cells_rpcapi.CellsAPI()
cells_api.instance_update_from_api(context, stale_instance,
expected_vm_state,
expected_task_state,
admin_state_reset)
self._extra_values_to_save = {}
updates = {}
changes = self.obj_what_changed()
for field in self.fields:
# NOTE(danms): For object fields, we construct and call a
# helper method like self._save_$attrname()
if (self.obj_attr_is_set(field) and
isinstance(self.fields[field], fields.ObjectField)):
try:
getattr(self, '_save_%s' % field)(context)
except AttributeError:
if field in _INSTANCE_EXTRA_FIELDS:
self._save_extra_generic(field)
continue
LOG.exception(_LE('No save handler for %s'), field,
instance=self)
except db_exc.DBReferenceError as exp:
if exp.key != 'instance_uuid':
raise
# NOTE(melwitt): This will happen if we instance.save()
# before an instance.create() and FK constraint fails.
# In practice, this occurs in cells during a delete of
# an unscheduled instance. Otherwise, it could happen
# as a result of bug.
raise exception.InstanceNotFound(instance_id=self.uuid)
elif field in changes:
if (field == 'cell_name' and self[field] is not None and
self[field].startswith(cells_utils.BLOCK_SYNC_FLAG)):
updates[field] = self[field].replace(
cells_utils.BLOCK_SYNC_FLAG, '', 1)
else:
updates[field] = self[field]
if self._extra_values_to_save:
db.instance_extra_update_by_uuid(context, self.uuid,
self._extra_values_to_save)
if not updates:
if cells_update_from_api:
_handle_cell_update_from_api()
return
# Cleaned needs to be turned back into an int here
if 'cleaned' in updates:
if updates['cleaned']:
updates['cleaned'] = 1
else:
updates['cleaned'] = 0
if expected_task_state is not None:
updates['expected_task_state'] = expected_task_state
if expected_vm_state is not None:
updates['expected_vm_state'] = expected_vm_state
expected_attrs = [attr for attr in _INSTANCE_OPTIONAL_JOINED_FIELDS
if self.obj_attr_is_set(attr)]
if 'pci_devices' in expected_attrs:
# NOTE(danms): We don't refresh pci_devices on save right now
expected_attrs.remove('pci_devices')
# NOTE(alaski): We need to pull system_metadata for the
# notification.send_update() below. If we don't there's a KeyError
# when it tries to extract the flavor.
# NOTE(danms): If we have sysmeta, we need flavor since the caller
# might be expecting flavor information as a result
if 'system_metadata' not in expected_attrs:
expected_attrs.append('system_metadata')
expected_attrs.append('flavor')
old_ref, inst_ref = db.instance_update_and_get_original(
context, self.uuid, updates,
columns_to_join=_expected_cols(expected_attrs))
self._from_db_object(context, self, inst_ref,
expected_attrs=expected_attrs)
if cells_update_from_api:
_handle_cell_update_from_api()
elif cell_type == 'compute':
if self._sync_cells:
cells_api = cells_rpcapi.CellsAPI()
cells_api.instance_update_at_top(context, stale_instance)
def _notify():
# NOTE(danms): We have to be super careful here not to trigger
# any lazy-loads that will unmigrate or unbackport something. So,
# make a copy of the instance for notifications first.
new_ref = self.obj_clone()
notifications.send_update(context, old_ref, new_ref)
# NOTE(alaski): If cell synchronization is blocked it means we have
# already run this block of code in either the parent or child of this
# cell. Therefore this notification has already been sent.
if not self._sync_cells:
_notify = lambda: None # noqa: F811
_notify()
self.obj_reset_changes()
@base.remotable
def refresh(self, use_slave=False):
extra = [field for field in INSTANCE_OPTIONAL_ATTRS
if self.obj_attr_is_set(field)]
current = self.__class__.get_by_uuid(self._context, uuid=self.uuid,
expected_attrs=extra,
use_slave=use_slave)
# NOTE(danms): We orphan the instance copy so we do not unexpectedly
# trigger a lazy-load (which would mean we failed to calculate the
# expected_attrs properly)
current._context = None
for field in self.fields:
if self.obj_attr_is_set(field):
if field == 'info_cache':
self.info_cache.refresh()
elif self[field] != current[field]:
self[field] = current[field]
self.obj_reset_changes()
def _load_generic(self, attrname):
instance = self.__class__.get_by_uuid(self._context,
uuid=self.uuid,
expected_attrs=[attrname])
# NOTE(danms): Never allow us to recursively-load
if instance.obj_attr_is_set(attrname):
self[attrname] = instance[attrname]
else:
raise exception.ObjectActionError(
action='obj_load_attr',
reason='loading %s requires recursion' % attrname)
def _load_fault(self):
self.fault = objects.InstanceFault.get_latest_for_instance(
self._context, self.uuid)
def _load_numa_topology(self, db_topology=None):
if db_topology is not None:
self.numa_topology = \
objects.InstanceNUMATopology.obj_from_db_obj(self.uuid,
db_topology)
else:
try:
self.numa_topology = \
objects.InstanceNUMATopology.get_by_instance_uuid(
self._context, self.uuid)
except exception.NumaTopologyNotFound:
self.numa_topology = None
def _load_pci_requests(self, db_requests=None):
# FIXME: also do this if none!
if db_requests is not None:
self.pci_requests = objects.InstancePCIRequests.obj_from_db(
self._context, self.uuid, db_requests)
else:
self.pci_requests = \
objects.InstancePCIRequests.get_by_instance_uuid(
self._context, self.uuid)
def _load_flavor(self):
instance = self.__class__.get_by_uuid(
self._context, uuid=self.uuid,
expected_attrs=['flavor', 'system_metadata'])
# NOTE(danms): Orphan the instance to make sure we don't lazy-load
# anything below
instance._context = None
self.flavor = instance.flavor
self.old_flavor = instance.old_flavor
self.new_flavor = instance.new_flavor
# NOTE(danms): The query above may have migrated the flavor from
# system_metadata. Since we have it anyway, go ahead and refresh
# our system_metadata from it so that a save will be accurate.
instance.system_metadata.update(self.get('system_metadata', {}))
self.system_metadata = instance.system_metadata
def _load_vcpu_model(self, db_vcpu_model=None):
if db_vcpu_model is None:
self.vcpu_model = objects.VirtCPUModel.get_by_instance_uuid(
self._context, self.uuid)
else:
db_vcpu_model = jsonutils.loads(db_vcpu_model)
self.vcpu_model = objects.VirtCPUModel.obj_from_primitive(
db_vcpu_model)
def _load_ec2_ids(self):
self.ec2_ids = objects.EC2Ids.get_by_instance(self._context, self)
def _load_security_groups(self):
self.security_groups = objects.SecurityGroupList.get_by_instance(
self._context, self)
def _load_pci_devices(self):
self.pci_devices = objects.PciDeviceList.get_by_instance_uuid(
self._context, self.uuid)
def _load_migration_context(self, db_context=_NO_DATA_SENTINEL):
if db_context is _NO_DATA_SENTINEL:
try:
self.migration_context = (
objects.MigrationContext.get_by_instance_uuid(
self._context, self.uuid))
except exception.MigrationContextNotFound:
self.migration_context = None
elif db_context is None:
self.migration_context = None
else:
self.migration_context = objects.MigrationContext.obj_from_db_obj(
db_context)
def _load_keypairs(self, db_keypairs=_NO_DATA_SENTINEL):
if db_keypairs is _NO_DATA_SENTINEL:
inst = objects.Instance.get_by_uuid(self._context, self.uuid,
expected_attrs=['keypairs'])
if 'keypairs' in inst:
self.keypairs = inst.keypairs
self.keypairs.obj_reset_changes(recursive=True)
self.obj_reset_changes(['keypairs'])
return
# NOTE(danms): We need to load from the old location by name
# if we don't have them in extra. Only do this from the main
# database as instances were created with keypairs in extra
# before keypairs were moved to the api database.
self.keypairs = objects.KeyPairList(objects=[])
try:
key = objects.KeyPair.get_by_name(self._context,
self.user_id,
self.key_name,
localonly=True)
self.keypairs.objects.append(key)
except exception.KeypairNotFound:
pass
# NOTE(danms): If we loaded from legacy, we leave the keypairs
# attribute dirty in hopes someone else will save it for us
elif db_keypairs:
self.keypairs = objects.KeyPairList.obj_from_primitive(
jsonutils.loads(db_keypairs))
self.obj_reset_changes(['keypairs'])
def apply_migration_context(self):
if self.migration_context:
self.numa_topology = self.migration_context.new_numa_topology
else:
LOG.debug("Trying to apply a migration context that does not "
"seem to be set for this instance", instance=self)
def revert_migration_context(self):
if self.migration_context:
self.numa_topology = self.migration_context.old_numa_topology
else:
LOG.debug("Trying to revert a migration context that does not "
"seem to be set for this instance", instance=self)
@contextlib.contextmanager
def mutated_migration_context(self):
"""Context manager to temporarily apply the migration context.
Calling .save() from within the context manager means that the mutated
context will be saved which can cause incorrect resource tracking, and
should be avoided.
"""
current_numa_topo = self.numa_topology
self.apply_migration_context()
try:
yield
finally:
self.numa_topology = current_numa_topo
@base.remotable
def drop_migration_context(self):
if self.migration_context:
db.instance_extra_update_by_uuid(self._context, self.uuid,
{'migration_context': None})
self.migration_context = None
def clear_numa_topology(self):
numa_topology = self.numa_topology
if numa_topology is not None:
self.numa_topology = numa_topology.clear_host_pinning()
def obj_load_attr(self, attrname):
if attrname not in INSTANCE_OPTIONAL_ATTRS:
raise exception.ObjectActionError(
action='obj_load_attr',
reason='attribute %s not lazy-loadable' % attrname)
if not self._context:
raise exception.OrphanedObjectError(method='obj_load_attr',
objtype=self.obj_name())
LOG.debug("Lazy-loading '%(attr)s' on %(name)s uuid %(uuid)s",
{'attr': attrname,
'name': self.obj_name(),
'uuid': self.uuid,
})
# NOTE(danms): We handle some fields differently here so that we
# can be more efficient
if attrname == 'fault':
self._load_fault()
elif attrname == 'numa_topology':
self._load_numa_topology()
elif attrname == 'pci_requests':
self._load_pci_requests()
elif attrname == 'vcpu_model':
self._load_vcpu_model()
elif attrname == 'ec2_ids':
self._load_ec2_ids()
elif attrname == 'migration_context':
self._load_migration_context()
elif attrname == 'keypairs':
# NOTE(danms): Let keypairs control its own destiny for
# resetting changes.
return self._load_keypairs()
elif attrname == 'security_groups':
self._load_security_groups()
elif attrname == 'pci_devices':
self._load_pci_devices()
elif 'flavor' in attrname:
self._load_flavor()
elif attrname == 'services' and self.deleted:
# NOTE(mriedem): The join in the data model for instances.services
# filters on instances.deleted == 0, so if the instance is deleted
# don't attempt to even load services since we'll fail.
self.services = objects.ServiceList(self._context)
else:
# FIXME(comstud): This should be optimized to only load the attr.
self._load_generic(attrname)
self.obj_reset_changes([attrname])
def get_flavor(self, namespace=None):
prefix = ('%s_' % namespace) if namespace is not None else ''
attr = '%sflavor' % prefix
try:
return getattr(self, attr)
except exception.FlavorNotFound:
# NOTE(danms): This only happens in the case where we don't
# have flavor information in sysmeta or extra, and doing
# this triggers a lookup based on our instance_type_id for
# (very) legacy instances. That legacy code expects a None here,
# so emulate it for this helper, even though the actual attribute
# is not nullable.
return None
@base.remotable
def delete_metadata_key(self, key):
"""Optimized metadata delete method.
This provides a more efficient way to delete a single metadata
key, instead of just calling instance.save(). This should be called
with the key still present in self.metadata, which it will update
after completion.
"""
db.instance_metadata_delete(self._context, self.uuid, key)
md_was_changed = 'metadata' in self.obj_what_changed()
del self.metadata[key]
self._orig_metadata.pop(key, None)
notifications.send_update(self._context, self, self)
if not md_was_changed:
self.obj_reset_changes(['metadata'])
def _cell_name_blocks_sync(self):
if (self.obj_attr_is_set('cell_name') and
self.cell_name is not None and
self.cell_name.startswith(cells_utils.BLOCK_SYNC_FLAG)):
return True
return False
def _normalize_cell_name(self):
"""Undo skip_cell_sync()'s cell_name modification if applied"""
if not self.obj_attr_is_set('cell_name') or self.cell_name is None:
return
cn_changed = 'cell_name' in self.obj_what_changed()
if self.cell_name.startswith(cells_utils.BLOCK_SYNC_FLAG):
self.cell_name = self.cell_name.replace(
cells_utils.BLOCK_SYNC_FLAG, '', 1)
# cell_name is not normally an empty string, this means it was None
# or unset before cells_utils.BLOCK_SYNC_FLAG was applied.
if len(self.cell_name) == 0:
self.cell_name = None
if not cn_changed:
self.obj_reset_changes(['cell_name'])
@contextlib.contextmanager
def skip_cells_sync(self):
"""Context manager to save an instance without syncing cells.
Temporarily disables the cells syncing logic, if enabled. This should
only be used when saving an instance that has been passed down/up from
another cell in order to avoid passing it back to the originator to be
re-saved.
"""
cn_changed = 'cell_name' in self.obj_what_changed()
if not self.obj_attr_is_set('cell_name') or self.cell_name is None:
self.cell_name = ''
self.cell_name = '%s%s' % (cells_utils.BLOCK_SYNC_FLAG, self.cell_name)
if not cn_changed:
self.obj_reset_changes(['cell_name'])
try:
yield
finally:
self._normalize_cell_name()
def _make_instance_list(context, inst_list, db_inst_list, expected_attrs):
get_fault = expected_attrs and 'fault' in expected_attrs
inst_faults = {}
if get_fault:
# Build an instance_uuid:latest-fault mapping
expected_attrs.remove('fault')
instance_uuids = [inst['uuid'] for inst in db_inst_list]
faults = objects.InstanceFaultList.get_by_instance_uuids(
context, instance_uuids)
for fault in faults:
if fault.instance_uuid not in inst_faults:
inst_faults[fault.instance_uuid] = fault
inst_cls = objects.Instance
inst_list.objects = []
for db_inst in db_inst_list:
inst_obj = inst_cls._from_db_object(
context, inst_cls(context), db_inst,
expected_attrs=expected_attrs)
if get_fault:
inst_obj.fault = inst_faults.get(inst_obj.uuid, None)
inst_list.objects.append(inst_obj)
inst_list.obj_reset_changes()
return inst_list
@base.NovaObjectRegistry.register
class InstanceList(base.ObjectListBase, base.NovaObject):
# Version 2.0: Initial Version
VERSION = '2.0'
fields = {
'objects': fields.ListOfObjectsField('Instance'),
}
@classmethod
@db.select_db_reader_mode
def _get_by_filters_impl(cls, context, filters,
sort_key='created_at', sort_dir='desc', limit=None,
marker=None, expected_attrs=None, use_slave=False,
sort_keys=None, sort_dirs=None):
if sort_keys or sort_dirs:
db_inst_list = db.instance_get_all_by_filters_sort(
context, filters, limit=limit, marker=marker,
columns_to_join=_expected_cols(expected_attrs),
sort_keys=sort_keys, sort_dirs=sort_dirs)
else:
db_inst_list = db.instance_get_all_by_filters(
context, filters, sort_key, sort_dir, limit=limit,
marker=marker, columns_to_join=_expected_cols(expected_attrs))
return _make_instance_list(context, cls(), db_inst_list,
expected_attrs)
@base.remotable_classmethod
def get_by_filters(cls, context, filters,
sort_key='created_at', sort_dir='desc', limit=None,
marker=None, expected_attrs=None, use_slave=False,
sort_keys=None, sort_dirs=None):
return cls._get_by_filters_impl(
context, filters, sort_key=sort_key, sort_dir=sort_dir,
limit=limit, marker=marker, expected_attrs=expected_attrs,
use_slave=use_slave, sort_keys=sort_keys, sort_dirs=sort_dirs)
@staticmethod
@db.select_db_reader_mode
def _db_instance_get_all_by_host(context, host, columns_to_join,
use_slave=False):
return db.instance_get_all_by_host(context, host,
columns_to_join=columns_to_join)
@base.remotable_classmethod
def get_by_host(cls, context, host, expected_attrs=None, use_slave=False):
db_inst_list = cls._db_instance_get_all_by_host(
context, host, columns_to_join=_expected_cols(expected_attrs),
use_slave=use_slave)
return _make_instance_list(context, cls(), db_inst_list,
expected_attrs)
@base.remotable_classmethod
def get_by_host_and_node(cls, context, host, node, expected_attrs=None):
db_inst_list = db.instance_get_all_by_host_and_node(
context, host, node,
columns_to_join=_expected_cols(expected_attrs))
return _make_instance_list(context, cls(), db_inst_list,
expected_attrs)
@base.remotable_classmethod
def get_by_host_and_not_type(cls, context, host, type_id=None,
expected_attrs=None):
db_inst_list = db.instance_get_all_by_host_and_not_type(
context, host, type_id=type_id)
return _make_instance_list(context, cls(), db_inst_list,
expected_attrs)
@base.remotable_classmethod
def get_all(cls, context, expected_attrs=None):
"""Returns all instances on all nodes."""
db_instances = db.instance_get_all(
context, columns_to_join=_expected_cols(expected_attrs))
return _make_instance_list(context, cls(), db_instances,
expected_attrs)
@base.remotable_classmethod
def get_hung_in_rebooting(cls, context, reboot_window,
expected_attrs=None):
db_inst_list = db.instance_get_all_hung_in_rebooting(context,
reboot_window)
return _make_instance_list(context, cls(), db_inst_list,
expected_attrs)
@staticmethod
@db.select_db_reader_mode
def _db_instance_get_active_by_window_joined(
context, begin, end, project_id, host, columns_to_join,
use_slave=False):
return db.instance_get_active_by_window_joined(
context, begin, end, project_id, host,
columns_to_join=columns_to_join)
@base.remotable_classmethod
def _get_active_by_window_joined(cls, context, begin, end=None,
project_id=None, host=None,
expected_attrs=None,
use_slave=False):
# NOTE(mriedem): We need to convert the begin/end timestamp strings
# to timezone-aware datetime objects for the DB API call.
begin = timeutils.parse_isotime(begin)
end = timeutils.parse_isotime(end) if end else None
db_inst_list = cls._db_instance_get_active_by_window_joined(
context, begin, end, project_id, host,
columns_to_join=_expected_cols(expected_attrs),
use_slave=use_slave)
return _make_instance_list(context, cls(), db_inst_list,
expected_attrs)
@classmethod
def get_active_by_window_joined(cls, context, begin, end=None,
project_id=None, host=None,
expected_attrs=None,
use_slave=False):
"""Get instances and joins active during a certain time window.
:param:context: nova request context
:param:begin: datetime for the start of the time window
:param:end: datetime for the end of the time window
:param:project_id: used to filter instances by project
:param:host: used to filter instances on a given compute host
:param:expected_attrs: list of related fields that can be joined
in the database layer when querying for instances
:param use_slave if True, ship this query off to a DB slave
:returns: InstanceList
"""
# NOTE(mriedem): We have to convert the datetime objects to string
# primitives for the remote call.
begin = utils.isotime(begin)
end = utils.isotime(end) if end else None
return cls._get_active_by_window_joined(context, begin, end,
project_id, host,
expected_attrs,
use_slave=use_slave)
@base.remotable_classmethod
def get_by_security_group_id(cls, context, security_group_id):
db_secgroup = db.security_group_get(
context, security_group_id,
columns_to_join=['instances.info_cache',
'instances.system_metadata'])
return _make_instance_list(context, cls(), db_secgroup['instances'],
['info_cache', 'system_metadata'])
@classmethod
def get_by_security_group(cls, context, security_group):
return cls.get_by_security_group_id(context, security_group.id)
@base.remotable_classmethod
def get_by_grantee_security_group_ids(cls, context, security_group_ids):
db_instances = db.instance_get_all_by_grantee_security_groups(
context, security_group_ids)
return _make_instance_list(context, cls(), db_instances, [])
def fill_faults(self):
"""Batch query the database for our instances' faults.
:returns: A list of instance uuids for which faults were found.
"""
uuids = [inst.uuid for inst in self]
faults = objects.InstanceFaultList.get_by_instance_uuids(
self._context, uuids)
faults_by_uuid = {}
for fault in faults:
if fault.instance_uuid not in faults_by_uuid:
faults_by_uuid[fault.instance_uuid] = fault
for instance in self:
if instance.uuid in faults_by_uuid:
instance.fault = faults_by_uuid[instance.uuid]
else:
# NOTE(danms): Otherwise the caller will cause a lazy-load
# when checking it, and we know there are none
instance.fault = None
instance.obj_reset_changes(['fault'])
return faults_by_uuid.keys()
@db_api.main_context_manager.writer
def _migrate_instance_keypairs(ctxt, count):
db_extras = ctxt.session.query(models.InstanceExtra).\
options(joinedload('instance')).\
filter_by(keypairs=None).\
filter_by(deleted=0).\
limit(count).\
all()
count_all = len(db_extras)
count_hit = 0
for db_extra in db_extras:
key_name = db_extra.instance.key_name
keypairs = objects.KeyPairList(objects=[])
if key_name:
try:
key = objects.KeyPair.get_by_name(ctxt,
db_extra.instance.user_id,
key_name)
keypairs.objects.append(key)
except exception.KeypairNotFound:
LOG.warning(
_LW('Instance %(uuid)s keypair %(keyname)s not found'),
{'uuid': db_extra.instance_uuid, 'keyname': key_name})
db_extra.keypairs = jsonutils.dumps(keypairs.obj_to_primitive())
db_extra.save(ctxt.session)
count_hit += 1
return count_all, count_hit
def migrate_instance_keypairs(ctxt, count):
return _migrate_instance_keypairs(ctxt, count)
| apache-2.0 |
chrisfilda/edx_platform | common/lib/xmodule/xmodule/error_module.py | 17 | 6742 | """
Modules that get shown to the users when an error has occurred while
loading or rendering other modules
"""
import hashlib
import logging
import json
import sys
from lxml import etree
from xmodule.x_module import XModule, XModuleDescriptor
from xmodule.errortracker import exc_info_to_str
from xmodule.modulestore import Location
from xblock.fields import String, Scope, ScopeIds
from xblock.field_data import DictFieldData
log = logging.getLogger(__name__)
# NOTE: This is not the most beautiful design in the world, but there's no good
# way to tell if the module is being used in a staff context or not. Errors that get discovered
# at course load time are turned into ErrorDescriptor objects, and automatically hidden from students.
# Unfortunately, we can also have errors when loading modules mid-request, and then we need to decide
# what to show, and the logic for that belongs in the LMS (e.g. in get_module), so the error handler
# decides whether to create a staff or not-staff module.
class ErrorFields(object):
"""
XBlock fields used by the ErrorModules
"""
contents = String(scope=Scope.content)
error_msg = String(scope=Scope.content)
display_name = String(scope=Scope.settings)
class ErrorModule(ErrorFields, XModule):
"""
Module that gets shown to staff when there has been an error while
loading or rendering other modules
"""
def get_html(self):
'''Show an error to staff.
TODO (vshnayder): proper style, divs, etc.
'''
# staff get to see all the details
return self.system.render_template('module-error.html', {
'staff_access': True,
'data': self.contents,
'error': self.error_msg,
})
class NonStaffErrorModule(ErrorFields, XModule):
"""
Module that gets shown to students when there has been an error while
loading or rendering other modules
"""
def get_html(self):
'''Show an error to a student.
TODO (vshnayder): proper style, divs, etc.
'''
# staff get to see all the details
return self.system.render_template('module-error.html', {
'staff_access': False,
'data': "",
'error': "",
})
class ErrorDescriptor(ErrorFields, XModuleDescriptor):
"""
Module that provides a raw editing view of broken xml.
"""
module_class = ErrorModule
def get_html(self):
return u''
@classmethod
def _construct(cls, system, contents, error_msg, location):
location = Location(location)
if error_msg is None:
# this string is not marked for translation because we don't have
# access to the user context, and this will only be seen by staff
error_msg = 'Error not available'
if location.category == 'error':
location = location.replace(
# Pick a unique url_name -- the sha1 hash of the contents.
# NOTE: We could try to pull out the url_name of the errored descriptor,
# but url_names aren't guaranteed to be unique between descriptor types,
# and ErrorDescriptor can wrap any type. When the wrapped module is fixed,
# it will be written out with the original url_name.
name=hashlib.sha1(contents.encode('utf8')).hexdigest()
)
# real metadata stays in the content, but add a display name
field_data = DictFieldData({
'error_msg': str(error_msg),
'contents': contents,
'location': location,
'category': 'error'
})
return system.construct_xblock_from_class(
cls,
# The error module doesn't use scoped data, and thus doesn't need
# real scope keys
ScopeIds('error', None, location, location),
field_data,
)
def get_context(self):
return {
'module': self,
'data': self.contents,
}
@classmethod
def from_json(cls, json_data, system, location, error_msg='Error not available'):
return cls._construct(
system,
json.dumps(json_data, skipkeys=False, indent=4),
error_msg,
location=location
)
@classmethod
def from_descriptor(cls, descriptor, error_msg=None):
return cls._construct(
descriptor.runtime,
str(descriptor),
error_msg,
location=descriptor.location,
)
@classmethod
def from_xml(cls, xml_data, system, id_generator, # pylint: disable=arguments-differ
error_msg=None):
'''Create an instance of this descriptor from the supplied data.
Does not require that xml_data be parseable--just stores it and exports
as-is if not.
Takes an extra, optional, parameter--the error that caused an
issue. (should be a string, or convert usefully into one).
'''
try:
# If this is already an error tag, don't want to re-wrap it.
xml_obj = etree.fromstring(xml_data)
if xml_obj.tag == 'error':
xml_data = xml_obj.text
error_node = xml_obj.find('error_msg')
if error_node is not None:
error_msg = error_node.text
else:
error_msg = None
except etree.XMLSyntaxError:
# Save the error to display later--overrides other problems
error_msg = exc_info_to_str(sys.exc_info())
return cls._construct(system, xml_data, error_msg, location=id_generator.create_definition('error'))
def export_to_xml(self, resource_fs):
'''
If the definition data is invalid xml, export it wrapped in an "error"
tag. If it is valid, export without the wrapper.
NOTE: There may still be problems with the valid xml--it could be
missing required attributes, could have the wrong tags, refer to missing
files, etc. That would just get re-wrapped on import.
'''
try:
xml = etree.fromstring(self.contents)
return etree.tostring(xml, encoding='unicode')
except etree.XMLSyntaxError:
# still not valid.
root = etree.Element('error')
root.text = self.contents
err_node = etree.SubElement(root, 'error_msg')
err_node.text = self.error_msg
return etree.tostring(root, encoding='unicode')
class NonStaffErrorDescriptor(ErrorDescriptor):
"""
Module that provides non-staff error messages.
"""
module_class = NonStaffErrorModule
| agpl-3.0 |
gezb/osmc | package/mediacenter-addon-osmc/src/script.module.xmltodict/lib/xmltodict.py | 53 | 12257 | #!/usr/bin/env python
"Makes working with XML feel like you are working with JSON"
from xml.parsers import expat
from xml.sax.saxutils import XMLGenerator
from xml.sax.xmlreader import AttributesImpl
try: # pragma no cover
from cStringIO import StringIO
except ImportError: # pragma no cover
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
try: # pragma no cover
from collections import OrderedDict
except ImportError: # pragma no cover
try:
from ordereddict import OrderedDict
except ImportError:
OrderedDict = dict
try: # pragma no cover
_basestring = basestring
except NameError: # pragma no cover
_basestring = str
try: # pragma no cover
_unicode = unicode
except NameError: # pragma no cover
_unicode = str
__author__ = 'Martin Blech'
__version__ = '0.9.0'
__license__ = 'MIT'
class ParsingInterrupted(Exception):
pass
class _DictSAXHandler(object):
def __init__(self,
item_depth=0,
item_callback=lambda *args: True,
xml_attribs=True,
attr_prefix='@',
cdata_key='#text',
force_cdata=False,
cdata_separator='',
postprocessor=None,
dict_constructor=OrderedDict,
strip_whitespace=True,
namespace_separator=':',
namespaces=None):
self.path = []
self.stack = []
self.data = None
self.item = None
self.item_depth = item_depth
self.xml_attribs = xml_attribs
self.item_callback = item_callback
self.attr_prefix = attr_prefix
self.cdata_key = cdata_key
self.force_cdata = force_cdata
self.cdata_separator = cdata_separator
self.postprocessor = postprocessor
self.dict_constructor = dict_constructor
self.strip_whitespace = strip_whitespace
self.namespace_separator = namespace_separator
self.namespaces = namespaces
def _build_name(self, full_name):
if not self.namespaces:
return full_name
i = full_name.rfind(self.namespace_separator)
if i == -1:
return full_name
namespace, name = full_name[:i], full_name[i+1:]
short_namespace = self.namespaces.get(namespace, namespace)
if not short_namespace:
return name
else:
return self.namespace_separator.join((short_namespace, name))
def _attrs_to_dict(self, attrs):
if isinstance(attrs, dict):
return attrs
return self.dict_constructor(zip(attrs[0::2], attrs[1::2]))
def startElement(self, full_name, attrs):
name = self._build_name(full_name)
attrs = self._attrs_to_dict(attrs)
self.path.append((name, attrs or None))
if len(self.path) > self.item_depth:
self.stack.append((self.item, self.data))
if self.xml_attribs:
attrs = self.dict_constructor(
(self.attr_prefix+key, value)
for (key, value) in attrs.items())
else:
attrs = None
self.item = attrs or None
self.data = None
def endElement(self, full_name):
name = self._build_name(full_name)
if len(self.path) == self.item_depth:
item = self.item
if item is None:
item = self.data
should_continue = self.item_callback(self.path, item)
if not should_continue:
raise ParsingInterrupted()
if len(self.stack):
item, data = self.item, self.data
self.item, self.data = self.stack.pop()
if self.strip_whitespace and data is not None:
data = data.strip() or None
if data and self.force_cdata and item is None:
item = self.dict_constructor()
if item is not None:
if data:
self.push_data(item, self.cdata_key, data)
self.item = self.push_data(self.item, name, item)
else:
self.item = self.push_data(self.item, name, data)
else:
self.item = self.data = None
self.path.pop()
def characters(self, data):
if not self.data:
self.data = data
else:
self.data += self.cdata_separator + data
def push_data(self, item, key, data):
if self.postprocessor is not None:
result = self.postprocessor(self.path, key, data)
if result is None:
return item
key, data = result
if item is None:
item = self.dict_constructor()
try:
value = item[key]
if isinstance(value, list):
value.append(data)
else:
item[key] = [value, data]
except KeyError:
item[key] = data
return item
def parse(xml_input, encoding=None, expat=expat, process_namespaces=False,
namespace_separator=':', **kwargs):
"""Parse the given XML input and convert it into a dictionary.
`xml_input` can either be a `string` or a file-like object.
If `xml_attribs` is `True`, element attributes are put in the dictionary
among regular child elements, using `@` as a prefix to avoid collisions. If
set to `False`, they are just ignored.
Simple example::
>>> import xmltodict
>>> doc = xmltodict.parse(\"\"\"
... <a prop="x">
... <b>1</b>
... <b>2</b>
... </a>
... \"\"\")
>>> doc['a']['@prop']
u'x'
>>> doc['a']['b']
[u'1', u'2']
If `item_depth` is `0`, the function returns a dictionary for the root
element (default behavior). Otherwise, it calls `item_callback` every time
an item at the specified depth is found and returns `None` in the end
(streaming mode).
The callback function receives two parameters: the `path` from the document
root to the item (name-attribs pairs), and the `item` (dict). If the
callback's return value is false-ish, parsing will be stopped with the
:class:`ParsingInterrupted` exception.
Streaming example::
>>> def handle(path, item):
... print 'path:%s item:%s' % (path, item)
... return True
...
>>> xmltodict.parse(\"\"\"
... <a prop="x">
... <b>1</b>
... <b>2</b>
... </a>\"\"\", item_depth=2, item_callback=handle)
path:[(u'a', {u'prop': u'x'}), (u'b', None)] item:1
path:[(u'a', {u'prop': u'x'}), (u'b', None)] item:2
The optional argument `postprocessor` is a function that takes `path`,
`key` and `value` as positional arguments and returns a new `(key, value)`
pair where both `key` and `value` may have changed. Usage example::
>>> def postprocessor(path, key, value):
... try:
... return key + ':int', int(value)
... except (ValueError, TypeError):
... return key, value
>>> xmltodict.parse('<a><b>1</b><b>2</b><b>x</b></a>',
... postprocessor=postprocessor)
OrderedDict([(u'a', OrderedDict([(u'b:int', [1, 2]), (u'b', u'x')]))])
You can pass an alternate version of `expat` (such as `defusedexpat`) by
using the `expat` parameter. E.g:
>>> import defusedexpat
>>> xmltodict.parse('<a>hello</a>', expat=defusedexpat.pyexpat)
OrderedDict([(u'a', u'hello')])
"""
handler = _DictSAXHandler(namespace_separator=namespace_separator,
**kwargs)
if isinstance(xml_input, _unicode):
if not encoding:
encoding = 'utf-8'
xml_input = xml_input.encode(encoding)
if not process_namespaces:
namespace_separator = None
parser = expat.ParserCreate(
encoding,
namespace_separator
)
try:
parser.ordered_attributes = True
except AttributeError:
# Jython's expat does not support ordered_attributes
pass
parser.StartElementHandler = handler.startElement
parser.EndElementHandler = handler.endElement
parser.CharacterDataHandler = handler.characters
parser.buffer_text = True
try:
parser.ParseFile(xml_input)
except (TypeError, AttributeError):
parser.Parse(xml_input, True)
return handler.item
def _emit(key, value, content_handler,
attr_prefix='@',
cdata_key='#text',
depth=0,
preprocessor=None,
pretty=False,
newl='\n',
indent='\t'):
if preprocessor is not None:
result = preprocessor(key, value)
if result is None:
return
key, value = result
if not isinstance(value, (list, tuple)):
value = [value]
if depth == 0 and len(value) > 1:
raise ValueError('document with multiple roots')
for v in value:
if v is None:
v = OrderedDict()
elif not isinstance(v, dict):
v = _unicode(v)
if isinstance(v, _basestring):
v = OrderedDict(((cdata_key, v),))
cdata = None
attrs = OrderedDict()
children = []
for ik, iv in v.items():
if ik == cdata_key:
cdata = iv
continue
if ik.startswith(attr_prefix):
attrs[ik[len(attr_prefix):]] = iv
continue
children.append((ik, iv))
if pretty:
content_handler.ignorableWhitespace(depth * indent)
content_handler.startElement(key, AttributesImpl(attrs))
if pretty and children:
content_handler.ignorableWhitespace(newl)
for child_key, child_value in children:
_emit(child_key, child_value, content_handler,
attr_prefix, cdata_key, depth+1, preprocessor,
pretty, newl, indent)
if cdata is not None:
content_handler.characters(cdata)
if pretty and children:
content_handler.ignorableWhitespace(depth * indent)
content_handler.endElement(key)
if pretty and depth:
content_handler.ignorableWhitespace(newl)
def unparse(input_dict, output=None, encoding='utf-8', full_document=True,
**kwargs):
"""Emit an XML document for the given `input_dict` (reverse of `parse`).
The resulting XML document is returned as a string, but if `output` (a
file-like object) is specified, it is written there instead.
Dictionary keys prefixed with `attr_prefix` (default=`'@'`) are interpreted
as XML node attributes, whereas keys equal to `cdata_key`
(default=`'#text'`) are treated as character data.
The `pretty` parameter (default=`False`) enables pretty-printing. In this
mode, lines are terminated with `'\n'` and indented with `'\t'`, but this
can be customized with the `newl` and `indent` parameters.
"""
((key, value),) = input_dict.items()
must_return = False
if output is None:
output = StringIO()
must_return = True
content_handler = XMLGenerator(output, encoding)
if full_document:
content_handler.startDocument()
_emit(key, value, content_handler, **kwargs)
if full_document:
content_handler.endDocument()
if must_return:
value = output.getvalue()
try: # pragma no cover
value = value.decode(encoding)
except AttributeError: # pragma no cover
pass
return value
if __name__ == '__main__': # pragma: no cover
import sys
import marshal
(item_depth,) = sys.argv[1:]
item_depth = int(item_depth)
def handle_item(path, item):
marshal.dump((path, item), sys.stdout)
return True
try:
root = parse(sys.stdin,
item_depth=item_depth,
item_callback=handle_item,
dict_constructor=dict)
if item_depth == 0:
handle_item([], root)
except KeyboardInterrupt:
pass
| gpl-2.0 |
rdobson/transfervm | transfertests/manualnetwork_test.py | 2 | 1862 |
import httplib
import logging
import unittest
import urllib2
import testsetup
import transferclient
import moreasserts
M = 1024 * 1024
def assertVdiZero(self, ip, port, record, vdi_mb):
# Make a new record with the IP and port fields updated
r = dict(record, ip=ip, port=port)
moreasserts.assertVdiIsZeroUsingHttpGet(self, r, vdi_mb)
class StaticIpTest(unittest.TestCase):
ip = '10.80.237.211' # Hopefully this is free at the moment!
mask = '255.255.240.0'
gw = '10.80.224.1'
def testConfiguration(self):
hostname, network, vdi = testsetup.setup_host_and_network(templates=1, vdi_mb=10, dangerous_test=True)
transferclient.expose(hostname, vdi_uuid=vdi, network_uuid=network, transfer_mode='http', network_mode='manual', network_ip=self.ip, network_mask=self.mask, network_gateway=self.gw)
record = transferclient.get_record(hostname, vdi_uuid=vdi)
self.assertEquals(self.ip, record['ip'])
def testConnection(self):
hostname, network, vdi = testsetup.setup_host_and_network(templates=1, vdi_mb=10, dangerous_test=True)
transferclient.expose(hostname, vdi_uuid=vdi, network_uuid=network, transfer_mode='http', network_mode='manual', network_ip=self.ip, network_mask=self.mask, network_gateway=self.gw)
record = transferclient.get_record(hostname, vdi_uuid=vdi)
# Test GET to the ip
assertVdiZero(self, self.ip, record['port'], record, 10)
class CustomPortTest(unittest.TestCase):
def testGetOnPort123(self):
hostname, network, vdi = testsetup.setup_host_and_network(templates=1, vdi_mb=10)
transferclient.expose(hostname, vdi_uuid=vdi, network_uuid=network, transfer_mode='http', network_port='123')
record = transferclient.get_record(hostname, vdi_uuid=vdi)
assertVdiZero(self, record['ip'], '123', record, 10)
| gpl-2.0 |
odoo-turkiye/odoo | addons/account_check_writing/report/__init__.py | 446 | 1066 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import check_print
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
RedhawkSDR/integration-gnuhawk | components/noise_source_f/tests/test_noise_source_f.py | 1 | 4535 | #!/usr/bin/env python
#
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is part of GNUHAWK.
#
# GNUHAWK is free software: you can redistribute it and/or modify is under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# GNUHAWK is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with
# this program. If not, see http://www.gnu.org/licenses/.
#
import unittest
import ossie.utils.testing
import os
from omniORB import any
class ComponentTests(ossie.utils.testing.ScaComponentTestCase):
"""Test for all component implementations in noise_source_f"""
def testScaBasicBehavior(self):
#######################################################################
# Launch the component with the default execparams
execparams = self.getPropertySet(kinds=("execparam",), modes=("readwrite", "writeonly"), includeNil=False)
execparams = dict([(x.id, any.from_any(x.value)) for x in execparams])
self.launch(execparams)
#######################################################################
# Verify the basic state of the component
self.assertNotEqual(self.comp, None)
self.assertEqual(self.comp.ref._non_existent(), False)
self.assertEqual(self.comp.ref._is_a("IDL:CF/Resource:1.0"), True)
self.assertEqual(self.spd.get_id(), self.comp.ref._get_identifier())
#######################################################################
# Simulate regular component startup
# Verify that initialize nor configure throw errors
self.comp.initialize()
configureProps = self.getPropertySet(kinds=("configure",), modes=("readwrite", "writeonly"), includeNil=False)
self.comp.configure(configureProps)
#######################################################################
# Validate that query returns all expected parameters
# Query of '[]' should return the following set of properties
expectedProps = []
expectedProps.extend(self.getPropertySet(kinds=("configure", "execparam"), modes=("readwrite", "readonly"), includeNil=True))
expectedProps.extend(self.getPropertySet(kinds=("allocate",), action="external", includeNil=True))
props = self.comp.query([])
props = dict((x.id, any.from_any(x.value)) for x in props)
# Query may return more than expected, but not less
for expectedProp in expectedProps:
self.assertEquals(props.has_key(expectedProp.id), True)
#######################################################################
# Verify that all expected ports are available
for port in self.scd.get_componentfeatures().get_ports().get_uses():
port_obj = self.comp.getPort(str(port.get_usesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a("IDL:CF/Port:1.0"), True)
for port in self.scd.get_componentfeatures().get_ports().get_provides():
port_obj = self.comp.getPort(str(port.get_providesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a(port.get_repid()), True)
#######################################################################
# Make sure start and stop can be called without throwing exceptions
self.comp.start()
self.comp.stop()
#######################################################################
# Simulate regular component shutdown
self.comp.releaseObject()
# TODO Add additional tests here
#
# See:
# ossie.utils.bulkio.bulkio_helpers,
# ossie.utils.bluefile.bluefile_helpers
# for modules that will assist with testing components with BULKIO ports
if __name__ == "__main__":
ossie.utils.testing.main("../noise_source_f.spd.xml") # By default tests all implementations
| gpl-3.0 |
thinkasoft/ProyectoRD-dev | l10n_ve_imex/model/customs_form.py | 1 | 15041 | # -*- encoding: utf-8 -*-
###############################################################################
# Module Writen to OpenERP, Open Source Management Solution
# Copyright (c) 2013 Vauxoo C.A. (http://openerp.com.ve/)
# All Rights Reserved
############# Credits #########################################################
# Coded by: Juan Marzquez (Tecvemar, c.a.) <[email protected]>
# Katherine Zaoral <[email protected]>
# Planified by:
# Juan Marquez <[email protected]>
# Humberto Arocha <[email protected]>
# Audited by: Humberto Arocha <[email protected]>
###############################################################################
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
###############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
import openerp.pooler
import openerp.addons.decimal_precision as dp
import time
class customs_form(osv.osv):
_name = 'customs.form'
_description = ''
def name_get(self, cr, uid, ids, context):
if not len(ids):
return []
res = []
so_brw = self.browse(cr, uid, ids, context)
for item in so_brw:
res.append((item.id,
'F86 # %s - %s' % (item.name, item.ref or '')))
return res
def _amount_total(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for f86 in self.browse(cr, uid, ids, context=context):
amount_total = 0.0
for line in f86.cfl_ids:
amount_total += line.amount
res[f86.id] = amount_total
return res
def _default_cfl_ids(self, cr, uid, context=None):
""" Gets default cfl_ids from customs_duty. """
obj_ct = self.pool.get('customs.duty')
ct_ids = obj_ct.search(cr, uid, [], context=context)
res = []
for id in ct_ids:
vat = obj_ct.browse(cr, uid, id, context=context)
res.append({'tax_code': id,
'amount': 0.0, 'vat_detail': vat.vat_detail})
return res
def _gen_account_move_line(self, company_id, account_id, partner_id, name,
debit, credit):
return (0, 0, {
'auto': True,
'company_id': company_id,
'account_id': account_id,
'partner_id': partner_id,
'name': name[:64],
'debit': debit,
'credit': credit,
'reconcile': False,
})
_columns = {
'name': fields.char('Form #', size=16, required=True, readonly=True,
states={'draft': [('readonly', False)]}),
'ref': fields.char('Reference', size=64, required=False, readonly=True,
states={'draft': [('readonly', False)]}),
'date': fields.date('Date', required=True, readonly=True,
states={'draft': [('readonly', False)]},
select=True),
'company_id': fields.many2one('res.company', 'Company', required=True,
readonly=True, ondelete='restrict'),
'broker_id': fields.many2one('res.partner', 'Broker',
change_default=True, readonly=True,
states={'draft': [('readonly', False)]},
ondelete='restrict'),
'ref_reg': fields.char('Reg. number', size=16, required=False,
readonly=True,
states={'draft': [('readonly', False)]}),
'date_reg': fields.date('Reg. date', required=False, readonly=True,
states={'draft': [('readonly', False)]},
select=True),
'ref_liq': fields.char('Liq. number', size=16, required=False,
readonly=True,
states={'draft': [('readonly', False)]}),
'date_liq': fields.date('liq. date', required=True, readonly=True,
states={'draft': [('readonly', False)]},
select=True),
'customs_facility_id': fields.many2one(
'customs.facility', 'Customs Facility', change_default=True,
readonly=True, states={'draft': [('readonly', False)]},
ondelete='restrict'),
'cfl_ids': fields.one2many('customs.form.line', 'customs_form_id',
'Tax lines', readonly=True,
states={'draft': [('readonly', False)]}),
'amount_total': fields.function(_amount_total, method=True,
type='float', string='Amount total',
store=False),
'move_id': fields.many2one('account.move', 'Account move',
ondelete='restrict', select=True,
readonly=True,
help="The move of this entry line."),
'narration': fields.text('Notes', readonly=False),
'state': fields.selection([('draft', 'Draft'), ('open', 'Open'),
('done', 'Done'), ('cancel', 'Cancelled')],
string='State', required=True,
readonly=True),
}
_defaults = {
'date': lambda *a: time.strftime('%Y-%m-%d'),
'company_id': lambda self, cr, uid, c:
self.pool.get('res.company')._company_default_get(cr, uid,
'customs.form',
context=c),
'cfl_ids': _default_cfl_ids,
'state': lambda *a: 'draft',
}
_sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'The form # must be unique!'),
]
def create_account_move_lines(self, cr, uid, f86, context=None):
""" Creates the account.move.lines from cfl_ids detail except for
taxes with "vat_detail", in this case create debits from
cfl_ids.imex_tax_line and get debit account from account_tax model
"""
lines = []
context = context or {}
company_id = context.get('f86_company_id')
rp_obj = self.pool.get('res.partner')
#~ expenses
for line in f86.cfl_ids:
debits = []
acc_part_brw = rp_obj._find_accounting_partner(line.tax_code.partner_id)
if line.tax_code.vat_detail:
for vat in line.imex_tax_line:
if vat.tax_amount:
debits.append(
{'account_id': vat.tax_id.account_collected_id.id,
'amount': vat.tax_amount,
'tax_info': ' (%s)' % vat.tax_id.name})
else:
if line.amount:
debits.append({'account_id': line.tax_code.account_id.id,
'amount': line.amount, 'tax_info': ''})
credit_account_id = \
acc_part_brw.property_account_payable.id
for debit in debits:
if not debit['account_id'] or not credit_account_id:
raise osv.except_osv(
_('Error!'), _('No account found, please check \
customs taxes settings (%s)') % line.tax_code.name)
lines.append(
self._gen_account_move_line(
company_id, debit['account_id'],
acc_part_brw.id, '[%s] %s - %s%s'
% (line.tax_code.code, line.tax_code.ref,
line.tax_code.name, debit['tax_info']),
debit['amount'], 0.0)
)
lines.append(self._gen_account_move_line(
company_id, credit_account_id, acc_part_brw.id,
'F86 #%s - %s' % (f86.name, line.tax_code.name), 0.0,
line.amount))
lines.reverse() # set real order ;-)
return lines
def create_account_move(self, cr, uid, ids, context=None):
context = context or {}
so_brw = self.browse(cr, uid, ids, context=context)
for f86 in so_brw:
if f86.move_id: # ~ The move is already done, nothing to do
return []
obj_move = self.pool.get('account.move')
obj_cfg = self.pool.get('customs.form.config')
company_id = self.pool.get('res.users').browse(
cr, uid, uid, context=context).company_id.id
cfg_id = obj_cfg.search(cr, uid, [('company_id', '=', company_id)],
context=context)
if cfg_id:
f86_cfg = obj_cfg.browse(cr, uid, cfg_id[0], context=context)
else:
raise osv.except_osv(_('Error!'),
_('Please set a valid configuration in \
the imex settings'))
context.update({'f86_company_id': company_id, 'f86_config': f86_cfg})
move_ids = []
for f86 in so_brw:
move = {
'ref': 'F86 #%s' % f86.name,
'journal_id': f86_cfg.journal_id.id,
'date': f86.date_liq,
'company_id': company_id,
'state': 'draft',
'to_check': False,
'narration': _('Form 86 # %s\n\tReference: %s\n\tBroker: %s')
% (f86.name, f86.ref or '', f86.broker_id.name or ''),
}
lines = self.create_account_move_lines(cr, uid, f86, context=context)
if lines:
move.update({'line_id': lines})
move_id = obj_move.create(cr, uid, move, context=context)
obj_move.post(cr, uid, [move_id], context=context)
if move_id:
move_ids.append(move_id)
self.write(cr, uid, f86.id, {'move_id': move_id}, context=context)
return move_ids
def button_draft(self, cr, uid, ids, context=None):
context = context or {}
vals = {'state': 'draft'}
return self.write(cr, uid, ids, vals, context=context)
def button_open(self, cr, uid, ids, context=None):
context = context or {}
vals = {'state': 'open'}
return self.write(cr, uid, ids, vals, context=context)
def button_done(self, cr, uid, ids, context=None):
context = context or {}
self.create_account_move(cr, uid, ids, context=context)
vals = {'state': 'done'}
return self.write(cr, uid, ids, vals, context=context)
def button_cancel(self, cr, uid, ids, context=None):
context = context or {}
f86 = self.browse(cr, uid, ids[0], context=context)
f86_move_id = f86.move_id.id if f86 and f86.move_id else False
vals = {'state': 'cancel', 'move_id': 0}
if f86_move_id:
self.pool.get('account.move').unlink(cr, uid, [f86_move_id],
context=context)
return self.write(cr, uid, ids, vals, context=context)
def test_draft(self, cr, uid, ids, *args):
return True
def test_open(self, cr, uid, ids, *args):
ids = isinstance(ids, (int, long)) and [ids] or ids
for f86 in self.browse(cr, uid, ids, context={}):
if f86.amount_total <= 0:
raise osv.except_osv(_('Warning!'),
_('You must indicate a amount'))
vat_invoices = [] # for tax (vat) related invoices
for line in f86.cfl_ids:
if line.vat_detail:
vat_total = line.amount
for vat in line.imex_tax_line:
vat_total -= vat.tax_amount
if vat.imex_inv_id.id not in vat_invoices:
vat_invoices.append(vat.imex_inv_id.id)
if abs(vat_total) > 0.001:
raise osv.except_osv(
_('Warning!'),
_('The vat detail data does not correspond with '
'vat amount in line: %s') % line.tax_code.name)
return True
def test_done(self, cr, uid, ids, *args):
return True
def test_cancel(self, cr, uid, ids, *args):
if len(ids) != 1:
raise osv.except_osv(
_('Error!'),
_('Multiple operations not allowed'))
for f86 in self.browse(cr, uid, ids, context=None):
#~ Validate account_move.state != draft
if f86.move_id and f86.move_id.state != 'draft':
raise osv.except_osv(
_('Error!'),
_('Can\'t cancel a import while account move state <> \
"Draft" (%s)') % f86.move_id.name)
return True
class customs_form_line(osv.osv):
_name = 'customs.form.line'
_description = ''
_rec_name = 'tax_code'
_columns = {
'customs_form_id': fields.many2one('customs.form', 'Customs',
required=True, ondelete='cascade'),
'tax_code': fields.many2one('customs.duty', 'Tax',
ondelete='restrict', required=True,
readonly=False),
'amount': fields.float('Amount', required=True,
digits_compute=dp.get_precision('Account')),
'imex_tax_line': fields.one2many(
'account.invoice.tax', 'cfl_id', 'Vat lines',
attrs="{'readonly':[('vat_detail','=',True)], \
'required':[('vat_detail','=',True)]}"),
'vat_detail': fields.related('tax_code', 'vat_detail', type='boolean',
string='Tax detail', store=False,
readonly=True)
}
_defaults = {
}
_sql_constraints = [
('code_uniq', 'UNIQUE(customs_form_id,tax_code)',
'The code must be unique! (for this form)'),
]
| agpl-3.0 |
fzheng/codejam | lib/python2.7/site-packages/pip/cmdoptions.py | 136 | 15878 | """
shared options and groups
The principle here is to define options once, but *not* instantiate them
globally. One reason being that options with action='append' can carry state
between parses. pip parses general options twice internally, and shouldn't
pass on state. To be consistent, all options will follow this design.
"""
from __future__ import absolute_import
from functools import partial
from optparse import OptionGroup, SUPPRESS_HELP, Option
import warnings
from pip.index import (
FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary,
fmt_ctl_no_use_wheel)
from pip.models import PyPI
from pip.locations import USER_CACHE_DIR, src_prefix
from pip.utils.hashes import STRONG_HASHES
def make_option_group(group, parser):
"""
Return an OptionGroup object
group -- assumed to be dict with 'name' and 'options' keys
parser -- an optparse Parser
"""
option_group = OptionGroup(parser, group['name'])
for option in group['options']:
option_group.add_option(option())
return option_group
def resolve_wheel_no_use_binary(options):
if not options.use_wheel:
control = options.format_control
fmt_ctl_no_use_wheel(control)
def check_install_build_global(options, check_options=None):
"""Disable wheels if per-setup.py call options are set.
:param options: The OptionParser options to update.
:param check_options: The options to check, if not supplied defaults to
options.
"""
if check_options is None:
check_options = options
def getname(n):
return getattr(check_options, n, None)
names = ["build_options", "global_options", "install_options"]
if any(map(getname, names)):
control = options.format_control
fmt_ctl_no_binary(control)
warnings.warn(
'Disabling all use of wheels due to the use of --build-options '
'/ --global-options / --install-options.', stacklevel=2)
###########
# options #
###########
help_ = partial(
Option,
'-h', '--help',
dest='help',
action='help',
help='Show help.')
isolated_mode = partial(
Option,
"--isolated",
dest="isolated_mode",
action="store_true",
default=False,
help=(
"Run pip in an isolated mode, ignoring environment variables and user "
"configuration."
),
)
require_virtualenv = partial(
Option,
# Run only if inside a virtualenv, bail if not.
'--require-virtualenv', '--require-venv',
dest='require_venv',
action='store_true',
default=False,
help=SUPPRESS_HELP)
verbose = partial(
Option,
'-v', '--verbose',
dest='verbose',
action='count',
default=0,
help='Give more output. Option is additive, and can be used up to 3 times.'
)
version = partial(
Option,
'-V', '--version',
dest='version',
action='store_true',
help='Show version and exit.')
quiet = partial(
Option,
'-q', '--quiet',
dest='quiet',
action='count',
default=0,
help='Give less output.')
log = partial(
Option,
"--log", "--log-file", "--local-log",
dest="log",
metavar="path",
help="Path to a verbose appending log."
)
no_input = partial(
Option,
# Don't ask for input
'--no-input',
dest='no_input',
action='store_true',
default=False,
help=SUPPRESS_HELP)
proxy = partial(
Option,
'--proxy',
dest='proxy',
type='str',
default='',
help="Specify a proxy in the form [user:passwd@]proxy.server:port.")
retries = partial(
Option,
'--retries',
dest='retries',
type='int',
default=5,
help="Maximum number of retries each connection should attempt "
"(default %default times).")
timeout = partial(
Option,
'--timeout', '--default-timeout',
metavar='sec',
dest='timeout',
type='float',
default=15,
help='Set the socket timeout (default %default seconds).')
default_vcs = partial(
Option,
# The default version control system for editables, e.g. 'svn'
'--default-vcs',
dest='default_vcs',
type='str',
default='',
help=SUPPRESS_HELP)
skip_requirements_regex = partial(
Option,
# A regex to be used to skip requirements
'--skip-requirements-regex',
dest='skip_requirements_regex',
type='str',
default='',
help=SUPPRESS_HELP)
def exists_action():
return Option(
# Option when path already exist
'--exists-action',
dest='exists_action',
type='choice',
choices=['s', 'i', 'w', 'b'],
default=[],
action='append',
metavar='action',
help="Default action when a path already exists: "
"(s)witch, (i)gnore, (w)ipe, (b)ackup.")
cert = partial(
Option,
'--cert',
dest='cert',
type='str',
metavar='path',
help="Path to alternate CA bundle.")
client_cert = partial(
Option,
'--client-cert',
dest='client_cert',
type='str',
default=None,
metavar='path',
help="Path to SSL client certificate, a single file containing the "
"private key and the certificate in PEM format.")
index_url = partial(
Option,
'-i', '--index-url', '--pypi-url',
dest='index_url',
metavar='URL',
default=PyPI.simple_url,
help='Base URL of Python Package Index (default %default).')
def extra_index_url():
return Option(
'--extra-index-url',
dest='extra_index_urls',
metavar='URL',
action='append',
default=[],
help='Extra URLs of package indexes to use in addition to --index-url.'
)
no_index = partial(
Option,
'--no-index',
dest='no_index',
action='store_true',
default=False,
help='Ignore package index (only looking at --find-links URLs instead).')
def find_links():
return Option(
'-f', '--find-links',
dest='find_links',
action='append',
default=[],
metavar='url',
help="If a url or path to an html file, then parse for links to "
"archives. If a local path or file:// url that's a directory, "
"then look for archives in the directory listing.")
def allow_external():
return Option(
"--allow-external",
dest="allow_external",
action="append",
default=[],
metavar="PACKAGE",
help=SUPPRESS_HELP,
)
allow_all_external = partial(
Option,
"--allow-all-external",
dest="allow_all_external",
action="store_true",
default=False,
help=SUPPRESS_HELP,
)
def trusted_host():
return Option(
"--trusted-host",
dest="trusted_hosts",
action="append",
metavar="HOSTNAME",
default=[],
help="Mark this host as trusted, even though it does not have valid "
"or any HTTPS.",
)
# Remove after 7.0
no_allow_external = partial(
Option,
"--no-allow-external",
dest="allow_all_external",
action="store_false",
default=False,
help=SUPPRESS_HELP,
)
# Remove --allow-insecure after 7.0
def allow_unsafe():
return Option(
"--allow-unverified", "--allow-insecure",
dest="allow_unverified",
action="append",
default=[],
metavar="PACKAGE",
help=SUPPRESS_HELP,
)
# Remove after 7.0
no_allow_unsafe = partial(
Option,
"--no-allow-insecure",
dest="allow_all_insecure",
action="store_false",
default=False,
help=SUPPRESS_HELP
)
# Remove after 1.5
process_dependency_links = partial(
Option,
"--process-dependency-links",
dest="process_dependency_links",
action="store_true",
default=False,
help="Enable the processing of dependency links.",
)
def constraints():
return Option(
'-c', '--constraint',
dest='constraints',
action='append',
default=[],
metavar='file',
help='Constrain versions using the given constraints file. '
'This option can be used multiple times.')
def requirements():
return Option(
'-r', '--requirement',
dest='requirements',
action='append',
default=[],
metavar='file',
help='Install from the given requirements file. '
'This option can be used multiple times.')
def editable():
return Option(
'-e', '--editable',
dest='editables',
action='append',
default=[],
metavar='path/url',
help=('Install a project in editable mode (i.e. setuptools '
'"develop mode") from a local project path or a VCS url.'),
)
src = partial(
Option,
'--src', '--source', '--source-dir', '--source-directory',
dest='src_dir',
metavar='dir',
default=src_prefix,
help='Directory to check out editable projects into. '
'The default in a virtualenv is "<venv path>/src". '
'The default for global installs is "<current dir>/src".'
)
# XXX: deprecated, remove in 9.0
use_wheel = partial(
Option,
'--use-wheel',
dest='use_wheel',
action='store_true',
default=True,
help=SUPPRESS_HELP,
)
# XXX: deprecated, remove in 9.0
no_use_wheel = partial(
Option,
'--no-use-wheel',
dest='use_wheel',
action='store_false',
default=True,
help=('Do not Find and prefer wheel archives when searching indexes and '
'find-links locations. DEPRECATED in favour of --no-binary.'),
)
def _get_format_control(values, option):
"""Get a format_control object."""
return getattr(values, option.dest)
def _handle_no_binary(option, opt_str, value, parser):
existing = getattr(parser.values, option.dest)
fmt_ctl_handle_mutual_exclude(
value, existing.no_binary, existing.only_binary)
def _handle_only_binary(option, opt_str, value, parser):
existing = getattr(parser.values, option.dest)
fmt_ctl_handle_mutual_exclude(
value, existing.only_binary, existing.no_binary)
def no_binary():
return Option(
"--no-binary", dest="format_control", action="callback",
callback=_handle_no_binary, type="str",
default=FormatControl(set(), set()),
help="Do not use binary packages. Can be supplied multiple times, and "
"each time adds to the existing value. Accepts either :all: to "
"disable all binary packages, :none: to empty the set, or one or "
"more package names with commas between them. Note that some "
"packages are tricky to compile and may fail to install when "
"this option is used on them.")
def only_binary():
return Option(
"--only-binary", dest="format_control", action="callback",
callback=_handle_only_binary, type="str",
default=FormatControl(set(), set()),
help="Do not use source packages. Can be supplied multiple times, and "
"each time adds to the existing value. Accepts either :all: to "
"disable all source packages, :none: to empty the set, or one or "
"more package names with commas between them. Packages without "
"binary distributions will fail to install when this option is "
"used on them.")
cache_dir = partial(
Option,
"--cache-dir",
dest="cache_dir",
default=USER_CACHE_DIR,
metavar="dir",
help="Store the cache data in <dir>."
)
no_cache = partial(
Option,
"--no-cache-dir",
dest="cache_dir",
action="store_false",
help="Disable the cache.",
)
no_deps = partial(
Option,
'--no-deps', '--no-dependencies',
dest='ignore_dependencies',
action='store_true',
default=False,
help="Don't install package dependencies.")
build_dir = partial(
Option,
'-b', '--build', '--build-dir', '--build-directory',
dest='build_dir',
metavar='dir',
help='Directory to unpack packages into and build in.'
)
install_options = partial(
Option,
'--install-option',
dest='install_options',
action='append',
metavar='options',
help="Extra arguments to be supplied to the setup.py install "
"command (use like --install-option=\"--install-scripts=/usr/local/"
"bin\"). Use multiple --install-option options to pass multiple "
"options to setup.py install. If you are using an option with a "
"directory path, be sure to use absolute path.")
global_options = partial(
Option,
'--global-option',
dest='global_options',
action='append',
metavar='options',
help="Extra global options to be supplied to the setup.py "
"call before the install command.")
no_clean = partial(
Option,
'--no-clean',
action='store_true',
default=False,
help="Don't clean up build directories.")
pre = partial(
Option,
'--pre',
action='store_true',
default=False,
help="Include pre-release and development versions. By default, "
"pip only finds stable versions.")
disable_pip_version_check = partial(
Option,
"--disable-pip-version-check",
dest="disable_pip_version_check",
action="store_true",
default=False,
help="Don't periodically check PyPI to determine whether a new version "
"of pip is available for download. Implied with --no-index.")
# Deprecated, Remove later
always_unzip = partial(
Option,
'-Z', '--always-unzip',
dest='always_unzip',
action='store_true',
help=SUPPRESS_HELP,
)
def _merge_hash(option, opt_str, value, parser):
"""Given a value spelled "algo:digest", append the digest to a list
pointed to in a dict by the algo name."""
if not parser.values.hashes:
parser.values.hashes = {}
try:
algo, digest = value.split(':', 1)
except ValueError:
parser.error('Arguments to %s must be a hash name '
'followed by a value, like --hash=sha256:abcde...' %
opt_str)
if algo not in STRONG_HASHES:
parser.error('Allowed hash algorithms for %s are %s.' %
(opt_str, ', '.join(STRONG_HASHES)))
parser.values.hashes.setdefault(algo, []).append(digest)
hash = partial(
Option,
'--hash',
# Hash values eventually end up in InstallRequirement.hashes due to
# __dict__ copying in process_line().
dest='hashes',
action='callback',
callback=_merge_hash,
type='string',
help="Verify that the package's archive matches this "
'hash before installing. Example: --hash=sha256:abcdef...')
require_hashes = partial(
Option,
'--require-hashes',
dest='require_hashes',
action='store_true',
default=False,
help='Require a hash to check each requirement against, for '
'repeatable installs. This option is implied when any package in a '
'requirements file has a --hash option.')
##########
# groups #
##########
general_group = {
'name': 'General Options',
'options': [
help_,
isolated_mode,
require_virtualenv,
verbose,
version,
quiet,
log,
no_input,
proxy,
retries,
timeout,
default_vcs,
skip_requirements_regex,
exists_action,
trusted_host,
cert,
client_cert,
cache_dir,
no_cache,
disable_pip_version_check,
]
}
non_deprecated_index_group = {
'name': 'Package Index Options',
'options': [
index_url,
extra_index_url,
no_index,
find_links,
process_dependency_links,
]
}
index_group = {
'name': 'Package Index Options (including deprecated options)',
'options': non_deprecated_index_group['options'] + [
allow_external,
allow_all_external,
no_allow_external,
allow_unsafe,
no_allow_unsafe,
]
}
| mit |
collex100/odoo | addons/account_anglo_saxon/__openerp__.py | 264 | 2393 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Anglo-Saxon Accounting',
'version': '1.2',
'author': 'OpenERP SA, Veritos',
'website': 'https://www.odoo.com',
'description': """
This module supports the Anglo-Saxon accounting methodology by changing the accounting logic with stock transactions.
=====================================================================================================================
The difference between the Anglo-Saxon accounting countries and the Rhine
(or also called Continental accounting) countries is the moment of taking
the Cost of Goods Sold versus Cost of Sales. Anglo-Saxons accounting does
take the cost when sales invoice is created, Continental accounting will
take the cost at the moment the goods are shipped.
This module will add this functionality by using a interim account, to
store the value of shipped goods and will contra book this interim
account when the invoice is created to transfer this amount to the
debtor or creditor account. Secondly, price differences between actual
purchase price and fixed product standard price are booked on a separate
account.""",
'depends': ['product', 'purchase'],
'category': 'Accounting & Finance',
'demo': [],
'data': ['product_view.xml'],
'test': ['test/anglo_saxon.yml', 'test/anglo_saxon_avg_fifo.yml'],
'auto_install': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
prarthitm/edxplatform | lms/djangoapps/teams/migrations/0001_initial.py | 20 | 6186 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django_countries.fields
from django.conf import settings
import student.models
from openedx.core.djangoapps.xmodule_django.models import CourseKeyField
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='CourseTeam',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('team_id', models.CharField(unique=True, max_length=255)),
('discussion_topic_id', models.CharField(unique=True, max_length=255)),
('name', models.CharField(max_length=255, db_index=True)),
('course_id', CourseKeyField(max_length=255, db_index=True)),
('topic_id', models.CharField(db_index=True, max_length=255, blank=True)),
('date_created', models.DateTimeField(auto_now_add=True)),
('description', models.CharField(max_length=300)),
('country', django_countries.fields.CountryField(blank=True, max_length=2)),
('language', student.models.LanguageField(blank=True, help_text='Optional language the team uses as ISO 639-1 code.', max_length=16, choices=[['aa', 'Afar'], ['ab', 'Abkhazian'], ['af', 'Afrikaans'], ['ak', 'Akan'], ['sq', 'Albanian'], ['am', 'Amharic'], ['ar', 'Arabic'], ['an', 'Aragonese'], ['hy', 'Armenian'], ['as', 'Assamese'], ['av', 'Avaric'], ['ae', 'Avestan'], ['ay', 'Aymara'], ['az', 'Azerbaijani'], ['ba', 'Bashkir'], ['bm', 'Bambara'], ['eu', 'Basque'], ['be', 'Belarusian'], ['bn', 'Bengali'], ['bh', 'Bihari languages'], ['bi', 'Bislama'], ['bs', 'Bosnian'], ['br', 'Breton'], ['bg', 'Bulgarian'], ['my', 'Burmese'], ['ca', 'Catalan'], ['ch', 'Chamorro'], ['ce', 'Chechen'], ['zh', 'Chinese'], ['zh_HANS', 'Simplified Chinese'], ['zh_HANT', 'Traditional Chinese'], ['cu', 'Church Slavic'], ['cv', 'Chuvash'], ['kw', 'Cornish'], ['co', 'Corsican'], ['cr', 'Cree'], ['cs', 'Czech'], ['da', 'Danish'], ['dv', 'Divehi'], ['nl', 'Dutch'], ['dz', 'Dzongkha'], ['en', 'English'], ['eo', 'Esperanto'], ['et', 'Estonian'], ['ee', 'Ewe'], ['fo', 'Faroese'], ['fj', 'Fijian'], ['fi', 'Finnish'], ['fr', 'French'], ['fy', 'Western Frisian'], ['ff', 'Fulah'], ['ka', 'Georgian'], ['de', 'German'], ['gd', 'Gaelic'], ['ga', 'Irish'], ['gl', 'Galician'], ['gv', 'Manx'], ['el', 'Greek'], ['gn', 'Guarani'], ['gu', 'Gujarati'], ['ht', 'Haitian'], ['ha', 'Hausa'], ['he', 'Hebrew'], ['hz', 'Herero'], ['hi', 'Hindi'], ['ho', 'Hiri Motu'], ['hr', 'Croatian'], ['hu', 'Hungarian'], ['ig', 'Igbo'], ['is', 'Icelandic'], ['io', 'Ido'], ['ii', 'Sichuan Yi'], ['iu', 'Inuktitut'], ['ie', 'Interlingue'], ['ia', 'Interlingua'], ['id', 'Indonesian'], ['ik', 'Inupiaq'], ['it', 'Italian'], ['jv', 'Javanese'], ['ja', 'Japanese'], ['kl', 'Kalaallisut'], ['kn', 'Kannada'], ['ks', 'Kashmiri'], ['kr', 'Kanuri'], ['kk', 'Kazakh'], ['km', 'Central Khmer'], ['ki', 'Kikuyu'], ['rw', 'Kinyarwanda'], ['ky', 'Kirghiz'], ['kv', 'Komi'], ['kg', 'Kongo'], ['ko', 'Korean'], ['kj', 'Kuanyama'], ['ku', 'Kurdish'], ['lo', 'Lao'], ['la', 'Latin'], ['lv', 'Latvian'], ['li', 'Limburgan'], ['ln', 'Lingala'], ['lt', 'Lithuanian'], ['lb', 'Luxembourgish'], ['lu', 'Luba-Katanga'], ['lg', 'Ganda'], ['mk', 'Macedonian'], ['mh', 'Marshallese'], ['ml', 'Malayalam'], ['mi', 'Maori'], ['mr', 'Marathi'], ['ms', 'Malay'], ['mg', 'Malagasy'], ['mt', 'Maltese'], ['mn', 'Mongolian'], ['na', 'Nauru'], ['nv', 'Navajo'], ['nr', 'Ndebele, South'], ['nd', 'Ndebele, North'], ['ng', 'Ndonga'], ['ne', 'Nepali'], ['nn', 'Norwegian Nynorsk'], ['nb', 'Bokm\xe5l, Norwegian'], ['no', 'Norwegian'], ['ny', 'Chichewa'], ['oc', 'Occitan'], ['oj', 'Ojibwa'], ['or', 'Oriya'], ['om', 'Oromo'], ['os', 'Ossetian'], ['pa', 'Panjabi'], ['fa', 'Persian'], ['pi', 'Pali'], ['pl', 'Polish'], ['pt', 'Portuguese'], ['ps', 'Pushto'], ['qu', 'Quechua'], ['rm', 'Romansh'], ['ro', 'Romanian'], ['rn', 'Rundi'], ['ru', 'Russian'], ['sg', 'Sango'], ['sa', 'Sanskrit'], ['si', 'Sinhala'], ['sk', 'Slovak'], ['sl', 'Slovenian'], ['se', 'Northern Sami'], ['sm', 'Samoan'], ['sn', 'Shona'], ['sd', 'Sindhi'], ['so', 'Somali'], ['st', 'Sotho, Southern'], ['es', 'Spanish'], ['sc', 'Sardinian'], ['sr', 'Serbian'], ['ss', 'Swati'], ['su', 'Sundanese'], ['sw', 'Swahili'], ['sv', 'Swedish'], ['ty', 'Tahitian'], ['ta', 'Tamil'], ['tt', 'Tatar'], ['te', 'Telugu'], ['tg', 'Tajik'], ['tl', 'Tagalog'], ['th', 'Thai'], ['bo', 'Tibetan'], ['ti', 'Tigrinya'], ['to', 'Tonga (Tonga Islands)'], ['tn', 'Tswana'], ['ts', 'Tsonga'], ['tk', 'Turkmen'], ['tr', 'Turkish'], ['tw', 'Twi'], ['ug', 'Uighur'], ['uk', 'Ukrainian'], ['ur', 'Urdu'], ['uz', 'Uzbek'], ['ve', 'Venda'], ['vi', 'Vietnamese'], ['vo', 'Volap\xfck'], ['cy', 'Welsh'], ['wa', 'Walloon'], ['wo', 'Wolof'], ['xh', 'Xhosa'], ['yi', 'Yiddish'], ['yo', 'Yoruba'], ['za', 'Zhuang'], ['zu', 'Zulu']])),
('last_activity_at', models.DateTimeField(db_index=True)),
('team_size', models.IntegerField(default=0, db_index=True)),
],
),
migrations.CreateModel(
name='CourseTeamMembership',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date_joined', models.DateTimeField(auto_now_add=True)),
('last_activity_at', models.DateTimeField()),
('team', models.ForeignKey(related_name='membership', to='teams.CourseTeam')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='courseteam',
name='users',
field=models.ManyToManyField(related_name='teams', through='teams.CourseTeamMembership', to=settings.AUTH_USER_MODEL, db_index=True),
),
migrations.AlterUniqueTogether(
name='courseteammembership',
unique_together=set([('user', 'team')]),
),
]
| agpl-3.0 |
knowledgepoint-devs/askbot-devel | askbot/views/meta.py | 1 | 8905 | """
:synopsis: remaining "secondary" views for askbot
This module contains a collection of views displaying all sorts of secondary and mostly static content.
"""
from django.shortcuts import render_to_response, get_object_or_404
from django.core.urlresolvers import reverse
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.shortcuts import render
from django.template import RequestContext, Template
from django.template.loader import get_template
from django.http import Http404
from django.http import HttpResponse
from django.http import HttpResponseForbidden
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.utils import translation
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from django.views import static
from django.views.decorators import csrf
from django.db.models import Max, Count
from askbot import skins
from askbot.conf import settings as askbot_settings
from askbot.forms import FeedbackForm
from askbot.utils.url_utils import get_login_url
from askbot.utils.forms import get_next_url
from askbot.mail import mail_moderators, send_mail
from askbot.models import BadgeData, Award, User, Tag
from askbot.models import badges as badge_data
from askbot.models import get_feed_url
from askbot.skins.loaders import render_text_into_skin
from askbot.utils.decorators import admins_only
from askbot.utils.forms import get_next_url
from askbot.utils import functions
from recaptcha_works.decorators import fix_recaptcha_remote_ip
import re
def generic_view(request, template = None, page_class = None):
"""this may be not necessary, since it is just a rewrite of render"""
if request is None: # a plug for strange import errors in django startup
return render_to_response('django_error.html')
return render(request, template, {'page_class': page_class})
PUBLIC_VARIABLES = ('CUSTOM_CSS', 'CUSTOM_JS')
def config_variable(request, variable_name = None, mimetype = None):
"""Print value from the configuration settings
as response content. All parameters are required.
"""
if variable_name in PUBLIC_VARIABLES:
#todo add http header-based caching here!!!
output = getattr(askbot_settings, variable_name, '')
return HttpResponse(output, mimetype = mimetype)
else:
return HttpResponseForbidden()
def about(request, template='about.html'):
title = _('About %(site)s') % {'site': askbot_settings.APP_SHORT_NAME}
data = {
'title': title,
'page_class': 'meta',
'content': askbot_settings.FORUM_ABOUT
}
return render(request, 'static_page.html', data)
def page_not_found(request, template='404.html'):
return generic_view(request, template)
def server_error(request, template='500.html'):
return generic_view(request, template)
def help(request):
data = {
'app_name': askbot_settings.APP_SHORT_NAME,
'page_class': 'meta'
}
return render(request, 'help.html', data)
def faq(request):
if askbot_settings.FORUM_FAQ.strip() != '':
data = {
'title': _('FAQ'),
'content': askbot_settings.FORUM_FAQ,
'page_class': 'meta',
}
return render(request, 'static_page.html', data)
else:
data = {
'gravatar_faq_url': reverse('faq') + '#gravatar',
'ask_question_url': get_feed_url('ask'),
'page_class': 'meta',
}
return render(request, 'faq_static.html', data)
@csrf.csrf_protect
@fix_recaptcha_remote_ip
def feedback(request):
data = {'page_class': 'meta'}
form = None
if askbot_settings.ALLOW_ANONYMOUS_FEEDBACK is False:
if request.user.is_anonymous():
message = _('Please sign in or register to send your feedback')
request.user.message_set.create(message=message)
redirect_url = get_login_url() + '?next=' + request.path
return HttpResponseRedirect(redirect_url)
if request.method == "POST":
form = FeedbackForm(user=request.user, data=request.POST)
if form.is_valid():
if not request.user.is_authenticated():
data['email'] = form.cleaned_data.get('email', None)
else:
data['email'] = request.user.email
data['message'] = form.cleaned_data['message']
data['name'] = form.cleaned_data.get('name', None)
template = get_template('email/feedback_email.txt')
message = template.render(RequestContext(request, data))
headers = {}
if data['email']:
headers = {'Reply-To': data['email']}
subject = _('Q&A forum feedback')
if askbot_settings.FEEDBACK_EMAILS:
recipients = re.split('\s*,\s*', askbot_settings.FEEDBACK_EMAILS)
for recipient in recipients:
send_mail(
subject_line=subject,
body_text=message,
headers=headers,
recipient=recipient
)
else:
mail_moderators(
subject_line=subject,
body_text=message,
headers=headers
)
msg = _('Thanks for the feedback!')
request.user.message_set.create(message=msg)
return HttpResponseRedirect(get_next_url(request))
else:
form = FeedbackForm(
user=request.user,
initial={'next':get_next_url(request)}
)
data['form'] = form
return render(request, 'feedback.html', data)
feedback.CANCEL_MESSAGE=ugettext_lazy('We look forward to hearing your feedback! Please, give it next time :)')
def privacy(request):
data = {
'title': _('Privacy policy'),
'page_class': 'meta',
'content': askbot_settings.FORUM_PRIVACY
}
return render(request, 'static_page.html', data)
def badges(request):#user status/reputation system
#todo: supplement database data with the stuff from badges.py
if askbot_settings.BADGES_MODE != 'public':
raise Http404
known_badges = badge_data.BADGES.keys()
badges = BadgeData.objects.filter(slug__in = known_badges).order_by('slug')
my_badge_ids = list()
if request.user.is_authenticated():
my_badge_ids = Award.objects.filter(
user=request.user
).values_list(
'badge_id', flat=True
).distinct()
data = {
'active_tab': 'badges',
'badges' : badges,
'page_class': 'meta',
'my_badge_ids' : my_badge_ids
}
return render(request, 'badges.html', data)
def badge(request, id):
#todo: supplement database data with the stuff from badges.py
badge = get_object_or_404(BadgeData, id=id)
badge_recipients = User.objects.filter(
award_user__badge = badge
).annotate(
last_awarded_at = Max('award_user__awarded_at'),
award_count = Count('award_user')
).order_by(
'-last_awarded_at'
)
data = {
'active_tab': 'badges',
'badge_recipients' : badge_recipients,
'badge' : badge,
'page_class': 'meta',
}
return render(request, 'badge.html', data)
@admins_only
def list_suggested_tags(request):
"""moderators and administrators can list tags that are
in the moderation queue, apply suggested tag to questions
or cancel the moderation reuest."""
if askbot_settings.ENABLE_TAG_MODERATION == False:
raise Http404
tags = Tag.objects.filter(
status = Tag.STATUS_SUGGESTED,
language_code=translation.get_language()
)
tags = tags.order_by('-used_count', 'name')
#paginate moderated tags
paginator = Paginator(tags, 20)
page_no = request.GET.get('page', '1')
try:
page = paginator.page(page_no)
except (EmptyPage, InvalidPage):
page = paginator.page(paginator.num_pages)
paginator_context = functions.setup_paginator({
'is_paginated' : True,
'pages': paginator.num_pages,
'current_page_number': page_no,
'page_object': page,
'base_url' : request.path
})
data = {
'tags': page.object_list,
'active_tab': 'tags',
'tab_id': 'suggested',
'page_class': 'moderate-tags-page',
'page_title': _('Suggested tags'),
'paginator_context' : paginator_context,
}
return render(request, 'list_suggested_tags.html', data)
| gpl-3.0 |
wrigri/libcloud | docs/examples/storage/publish_static_website_on_cf.py | 58 | 1117 | from StringIO import StringIO
from libcloud.storage.types import Provider
from libcloud.storage.providers import get_driver
CloudFiles = get_driver(Provider.CLOUDFILES_US)
driver = CloudFiles('username', 'api key')
container = driver.create_container(container_name='my_website')
iterator1 = StringIO('<p>Hello World from Libcloud!</p>')
iterator2 = StringIO('<p>Oh, noez, 404!!</p>')
iterator3 = StringIO('<p>Oh, noez, 401!!</p>')
driver.upload_object_via_stream(iterator=iterator1, container=container,
object_name='index.html')
driver.upload_object_via_stream(iterator=iterator2, container=container,
object_name='404error.html')
driver.upload_object_via_stream(iterator=iterator3, container=container,
object_name='401error.html')
driver.ex_enable_static_website(container=container)
driver.ex_set_error_page(container=container, file_name='error.html')
driver.enable_container_cdn(container=container)
print('All done you can view the website at: ' +
driver.get_container_cdn_url(container=container))
| apache-2.0 |
pulinagrawal/nupic | tests/unit/nupic/data/generators/pattern_machine_test.py | 15 | 3539 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import unittest
from nupic.data.generators.pattern_machine import (PatternMachine,
ConsecutivePatternMachine)
class PatternMachineTest(unittest.TestCase):
def setUp(self):
self.patternMachine = PatternMachine(10000, 5, num=50)
def testGet(self):
patternA = self.patternMachine.get(48)
self.assertEqual(len(patternA), 5)
patternB = self.patternMachine.get(49)
self.assertEqual(len(patternB), 5)
self.assertEqual(patternA & patternB, set())
def testGetOutOfBounds(self):
args = [50]
self.assertRaises(IndexError, self.patternMachine.get, *args)
def testAddNoise(self):
patternMachine = PatternMachine(10000, 1000, num=1)
pattern = patternMachine.get(0)
noisy = patternMachine.addNoise(pattern, 0.0)
self.assertEqual(len(pattern & noisy), 1000)
noisy = patternMachine.addNoise(pattern, 0.5)
self.assertTrue(400 < len(pattern & noisy) < 600)
noisy = patternMachine.addNoise(pattern, 1.0)
self.assertTrue(50 < len(pattern & noisy) < 150)
def testNumbersForBit(self):
pattern = self.patternMachine.get(49)
for bit in pattern:
self.assertEqual(self.patternMachine.numbersForBit(bit), set([49]))
def testNumbersForBitOutOfBounds(self):
args = [10000]
self.assertRaises(IndexError, self.patternMachine.numbersForBit, *args)
def testNumberMapForBits(self):
pattern = self.patternMachine.get(49)
numberMap = self.patternMachine.numberMapForBits(pattern)
self.assertEqual(numberMap.keys(), [49])
self.assertEqual(numberMap[49], pattern)
def testWList(self):
w = [4, 7, 11]
patternMachine = PatternMachine(100, w, num=50)
widths = dict((el, 0) for el in w)
for i in range(50):
pattern = patternMachine.get(i)
width = len(pattern)
self.assertTrue(width in w)
widths[len(pattern)] += 1
for i in w:
self.assertTrue(widths[i] > 0)
class ConsecutivePatternMachineTest(unittest.TestCase):
def setUp(self):
self.patternMachine = ConsecutivePatternMachine(100, 5)
def testGet(self):
pattern = self.patternMachine.get(18)
self.assertEqual(len(pattern), 5)
self.assertEqual(pattern, set([90, 91, 92, 93, 94]))
pattern = self.patternMachine.get(19)
self.assertEqual(len(pattern), 5)
self.assertEqual(pattern, set([95, 96, 97, 98, 99]))
def testGetOutOfBounds(self):
args = [20]
self.assertRaises(IndexError, self.patternMachine.get, *args)
if __name__ == '__main__':
unittest.main()
| agpl-3.0 |
c4fcm/MediaMeter-Dashboard | app/core/authentication.py | 1 | 2010 | import hashlib
import datetime
from flask_login import UserMixin, AnonymousUserMixin
import mediacloud as mcapi
import app
from app.core import db, mc
# User class
class User(UserMixin):
def __init__(self, username, key, active=True, profile=None):
self.name = username
self.id = key
self.key = key
self.active = active
self.created = datetime.datetime.now()
self.profile = profile
def is_active(self):
return self.active
def is_anonymous(self):
return False
def is_authenticated(self):
return True
def get_key(self):
return self.key
def create_in_db_if_needed(self):
if self.exists_in_db():
app.core.logger.debug(" user %s already in db" % self.name)
return
app.core.logger.debug(" user %s created in db" % self.name)
db.users.insert({'username':self.name})
def exists_in_db(self):
return db.users.find_one({'username':self.name}) is not None
@classmethod
def get(cls, userid):
try:
return User.cached[userid]
except KeyError:
return None
User.cached = {}
def load_from_db_by_username(username):
return db.users.find_one({'username': username})
def authenticate_by_key(username, key):
user_mc = mcapi.MediaCloud(key)
if user_mc.verifyAuthToken():
profile = user_mc.userProfile()
user = User(username, key, profile=profile)
User.cached[user.id] = user
return user
return AnonymousUserMixin()
def authenticate_by_password(username, password):
try:
results = mc.authLogin(username, password)
if 'error' in results:
return AnonymousUserMixin()
profile = results['profile']
key = profile['api_key']
user = User(username, key, profile=profile)
User.cached[user.id] = user
return user
except Exception:
return AnonymousUserMixin()
| apache-2.0 |
ncteisen/grpc | tools/run_tests/performance/scenario_config.py | 7 | 44124 | # Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# performance scenario configuration for various languages
import math
WARMUP_SECONDS = 5
JAVA_WARMUP_SECONDS = 15 # Java needs more warmup time for JIT to kick in.
BENCHMARK_SECONDS = 30
SMOKETEST = 'smoketest'
SCALABLE = 'scalable'
INPROC = 'inproc'
SWEEP = 'sweep'
DEFAULT_CATEGORIES = [SCALABLE, SMOKETEST]
SECURE_SECARGS = {
'use_test_ca': True,
'server_host_override': 'foo.test.google.fr'
}
HISTOGRAM_PARAMS = {
'resolution': 0.01,
'max_possible': 60e9,
}
# target number of RPCs outstanding on across all client channels in
# non-ping-pong tests (since we can only specify per-channel numbers, the
# actual target will be slightly higher)
OUTSTANDING_REQUESTS = {'async': 6400, 'async-limited': 800, 'sync': 1000}
# wide is the number of client channels in multi-channel tests (1 otherwise)
WIDE = 64
def _get_secargs(is_secure):
if is_secure:
return SECURE_SECARGS
else:
return None
def remove_nonproto_fields(scenario):
"""Remove special-purpose that contains some extra info about the scenario
but don't belong to the ScenarioConfig protobuf message"""
scenario.pop('CATEGORIES', None)
scenario.pop('CLIENT_LANGUAGE', None)
scenario.pop('SERVER_LANGUAGE', None)
scenario.pop('EXCLUDED_POLL_ENGINES', None)
return scenario
def geometric_progression(start, stop, step):
n = start
while n < stop:
yield int(round(n))
n *= step
def _payload_type(use_generic_payload, req_size, resp_size):
r = {}
sizes = {
'req_size': req_size,
'resp_size': resp_size,
}
if use_generic_payload:
r['bytebuf_params'] = sizes
else:
r['simple_params'] = sizes
return r
def _load_params(offered_load):
r = {}
if offered_load is None:
r['closed_loop'] = {}
else:
load = {}
load['offered_load'] = offered_load
r['poisson'] = load
return r
def _add_channel_arg(config, key, value):
if 'channel_args' in config:
channel_args = config['channel_args']
else:
channel_args = []
config['channel_args'] = channel_args
arg = {'name': key}
if isinstance(value, int):
arg['int_value'] = value
else:
arg['str_value'] = value
channel_args.append(arg)
def _ping_pong_scenario(name,
rpc_type,
client_type,
server_type,
secure=True,
use_generic_payload=False,
req_size=0,
resp_size=0,
unconstrained_client=None,
client_language=None,
server_language=None,
async_server_threads=0,
server_threads_per_cq=0,
client_threads_per_cq=0,
warmup_seconds=WARMUP_SECONDS,
categories=DEFAULT_CATEGORIES,
channels=None,
outstanding=None,
num_clients=None,
resource_quota_size=None,
messages_per_stream=None,
excluded_poll_engines=[],
minimal_stack=False,
offered_load=None):
"""Creates a basic ping pong scenario."""
scenario = {
'name': name,
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': client_type,
'security_params': _get_secargs(secure),
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'threads_per_cq': client_threads_per_cq,
'rpc_type': rpc_type,
'histogram_params': HISTOGRAM_PARAMS,
'channel_args': [],
},
'server_config': {
'server_type': server_type,
'security_params': _get_secargs(secure),
'async_server_threads': async_server_threads,
'threads_per_cq': server_threads_per_cq,
'channel_args': [],
},
'warmup_seconds': warmup_seconds,
'benchmark_seconds': BENCHMARK_SECONDS
}
if resource_quota_size:
scenario['server_config']['resource_quota_size'] = resource_quota_size
if use_generic_payload:
if server_type != 'ASYNC_GENERIC_SERVER':
raise Exception('Use ASYNC_GENERIC_SERVER for generic payload.')
scenario['server_config']['payload_config'] = _payload_type(
use_generic_payload, req_size, resp_size)
scenario['client_config']['payload_config'] = _payload_type(
use_generic_payload, req_size, resp_size)
# Optimization target of 'throughput' does not work well with epoll1 polling
# engine. Use the default value of 'blend'
optimization_target = 'throughput'
if unconstrained_client:
outstanding_calls = outstanding if outstanding is not None else OUTSTANDING_REQUESTS[
unconstrained_client]
# clamp buffer usage to something reasonable (16 gig for now)
MAX_MEMORY_USE = 16 * 1024 * 1024 * 1024
if outstanding_calls * max(req_size, resp_size) > MAX_MEMORY_USE:
outstanding_calls = max(1,
MAX_MEMORY_USE / max(req_size, resp_size))
wide = channels if channels is not None else WIDE
deep = int(math.ceil(1.0 * outstanding_calls / wide))
scenario[
'num_clients'] = num_clients if num_clients is not None else 0 # use as many clients as available.
scenario['client_config']['outstanding_rpcs_per_channel'] = deep
scenario['client_config']['client_channels'] = wide
scenario['client_config']['async_client_threads'] = 0
if offered_load is not None:
optimization_target = 'latency'
else:
scenario['client_config']['outstanding_rpcs_per_channel'] = 1
scenario['client_config']['client_channels'] = 1
scenario['client_config']['async_client_threads'] = 1
optimization_target = 'latency'
scenario['client_config']['load_params'] = _load_params(offered_load)
optimization_channel_arg = {
'name': 'grpc.optimization_target',
'str_value': optimization_target
}
scenario['client_config']['channel_args'].append(optimization_channel_arg)
scenario['server_config']['channel_args'].append(optimization_channel_arg)
if minimal_stack:
_add_channel_arg(scenario['client_config'], 'grpc.minimal_stack', 1)
_add_channel_arg(scenario['server_config'], 'grpc.minimal_stack', 1)
if messages_per_stream:
scenario['client_config']['messages_per_stream'] = messages_per_stream
if client_language:
# the CLIENT_LANGUAGE field is recognized by run_performance_tests.py
scenario['CLIENT_LANGUAGE'] = client_language
if server_language:
# the SERVER_LANGUAGE field is recognized by run_performance_tests.py
scenario['SERVER_LANGUAGE'] = server_language
if categories:
scenario['CATEGORIES'] = categories
if len(excluded_poll_engines):
# The polling engines for which this scenario is excluded
scenario['EXCLUDED_POLL_ENGINES'] = excluded_poll_engines
return scenario
class CXXLanguage:
def __init__(self):
self.safename = 'cxx'
def worker_cmdline(self):
return ['bins/opt/qps_worker']
def worker_port_offset(self):
return 0
def scenarios(self):
# TODO(ctiller): add 70% load latency test
yield _ping_pong_scenario(
'cpp_protobuf_async_unary_1channel_100rpcs_1MB',
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
req_size=1024 * 1024,
resp_size=1024 * 1024,
unconstrained_client='async',
outstanding=100,
channels=1,
num_clients=1,
secure=False,
categories=[SMOKETEST] + [INPROC] + [SCALABLE])
yield _ping_pong_scenario(
'cpp_protobuf_async_streaming_from_client_1channel_1MB',
rpc_type='STREAMING_FROM_CLIENT',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
req_size=1024 * 1024,
resp_size=1024 * 1024,
unconstrained_client='async',
outstanding=1,
channels=1,
num_clients=1,
secure=False,
categories=[SMOKETEST] + [INPROC] + [SCALABLE])
yield _ping_pong_scenario(
'cpp_protobuf_async_unary_75Kqps_600channel_60Krpcs_300Breq_50Bresp',
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
req_size=300,
resp_size=50,
unconstrained_client='async',
outstanding=30000,
channels=300,
offered_load=37500,
secure=False,
async_server_threads=16,
server_threads_per_cq=1,
categories=[SMOKETEST] + [SCALABLE])
for secure in [True, False]:
secstr = 'secure' if secure else 'insecure'
smoketest_categories = ([SMOKETEST]
if secure else [INPROC]) + [SCALABLE]
yield _ping_pong_scenario(
'cpp_generic_async_streaming_ping_pong_%s' % secstr,
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
use_generic_payload=True,
async_server_threads=1,
secure=secure,
categories=smoketest_categories)
yield _ping_pong_scenario(
'cpp_generic_async_streaming_qps_unconstrained_%s' % secstr,
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
unconstrained_client='async',
use_generic_payload=True,
secure=secure,
minimal_stack=not secure,
categories=smoketest_categories + [SCALABLE])
for mps in geometric_progression(1, 20, 10):
yield _ping_pong_scenario(
'cpp_generic_async_streaming_qps_unconstrained_%smps_%s' %
(mps, secstr),
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
unconstrained_client='async',
use_generic_payload=True,
secure=secure,
messages_per_stream=mps,
minimal_stack=not secure,
categories=smoketest_categories + [SCALABLE])
for mps in geometric_progression(1, 200, math.sqrt(10)):
yield _ping_pong_scenario(
'cpp_generic_async_streaming_qps_unconstrained_%smps_%s' %
(mps, secstr),
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
unconstrained_client='async',
use_generic_payload=True,
secure=secure,
messages_per_stream=mps,
minimal_stack=not secure,
categories=[SWEEP])
yield _ping_pong_scenario(
'cpp_generic_async_streaming_qps_1channel_1MBmsg_%s' % secstr,
rpc_type='STREAMING',
req_size=1024 * 1024,
resp_size=1024 * 1024,
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
unconstrained_client='async',
use_generic_payload=True,
secure=secure,
minimal_stack=not secure,
categories=smoketest_categories + [SCALABLE],
channels=1,
outstanding=100)
yield _ping_pong_scenario(
'cpp_generic_async_streaming_qps_unconstrained_64KBmsg_%s' %
secstr,
rpc_type='STREAMING',
req_size=64 * 1024,
resp_size=64 * 1024,
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
unconstrained_client='async',
use_generic_payload=True,
secure=secure,
minimal_stack=not secure,
categories=smoketest_categories + [SCALABLE])
yield _ping_pong_scenario(
'cpp_generic_async_streaming_qps_unconstrained_1cq_%s' % secstr,
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
unconstrained_client='async-limited',
use_generic_payload=True,
secure=secure,
client_threads_per_cq=1000000,
server_threads_per_cq=1000000,
categories=smoketest_categories + [SCALABLE])
yield _ping_pong_scenario(
'cpp_generic_async_streaming_qps_unconstrained_2waysharedcq_%s'
% secstr,
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
unconstrained_client='async',
use_generic_payload=True,
secure=secure,
client_threads_per_cq=2,
server_threads_per_cq=2,
categories=smoketest_categories + [SCALABLE])
yield _ping_pong_scenario(
'cpp_protobuf_async_streaming_qps_unconstrained_1cq_%s' %
secstr,
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
unconstrained_client='async-limited',
secure=secure,
client_threads_per_cq=1000000,
server_threads_per_cq=1000000,
categories=smoketest_categories + [SCALABLE])
yield _ping_pong_scenario(
'cpp_protobuf_async_streaming_qps_unconstrained_2waysharedcq_%s'
% secstr,
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
unconstrained_client='async',
secure=secure,
client_threads_per_cq=2,
server_threads_per_cq=2,
categories=smoketest_categories + [SCALABLE])
yield _ping_pong_scenario(
'cpp_protobuf_async_unary_qps_unconstrained_1cq_%s' % secstr,
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
unconstrained_client='async-limited',
secure=secure,
client_threads_per_cq=1000000,
server_threads_per_cq=1000000,
categories=smoketest_categories + [SCALABLE])
yield _ping_pong_scenario(
'cpp_protobuf_async_unary_qps_unconstrained_2waysharedcq_%s' %
secstr,
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
unconstrained_client='async',
secure=secure,
client_threads_per_cq=2,
server_threads_per_cq=2,
categories=smoketest_categories + [SCALABLE])
yield _ping_pong_scenario(
'cpp_generic_async_streaming_qps_one_server_core_%s' % secstr,
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
unconstrained_client='async-limited',
use_generic_payload=True,
async_server_threads=1,
minimal_stack=not secure,
secure=secure)
yield _ping_pong_scenario(
'cpp_protobuf_async_client_sync_server_unary_qps_unconstrained_%s'
% (secstr),
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='SYNC_SERVER',
unconstrained_client='async',
secure=secure,
minimal_stack=not secure,
categories=smoketest_categories + [SCALABLE],
excluded_poll_engines=['poll-cv'])
yield _ping_pong_scenario(
'cpp_protobuf_async_client_unary_1channel_64wide_128Breq_8MBresp_%s'
% (secstr),
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
channels=1,
outstanding=64,
req_size=128,
resp_size=8 * 1024 * 1024,
secure=secure,
minimal_stack=not secure,
categories=smoketest_categories + [SCALABLE])
yield _ping_pong_scenario(
'cpp_protobuf_async_client_sync_server_streaming_qps_unconstrained_%s'
% secstr,
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='SYNC_SERVER',
unconstrained_client='async',
secure=secure,
minimal_stack=not secure,
categories=smoketest_categories + [SCALABLE],
excluded_poll_engines=['poll-cv'])
yield _ping_pong_scenario(
'cpp_protobuf_async_unary_ping_pong_%s_1MB' % secstr,
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
req_size=1024 * 1024,
resp_size=1024 * 1024,
secure=secure,
minimal_stack=not secure,
categories=smoketest_categories + [SCALABLE])
for rpc_type in [
'unary', 'streaming', 'streaming_from_client',
'streaming_from_server'
]:
for synchronicity in ['sync', 'async']:
yield _ping_pong_scenario(
'cpp_protobuf_%s_%s_ping_pong_%s' % (synchronicity,
rpc_type, secstr),
rpc_type=rpc_type.upper(),
client_type='%s_CLIENT' % synchronicity.upper(),
server_type='%s_SERVER' % synchronicity.upper(),
async_server_threads=1,
minimal_stack=not secure,
secure=secure)
for size in geometric_progression(1, 1024 * 1024 * 1024 + 1,
8):
yield _ping_pong_scenario(
'cpp_protobuf_%s_%s_qps_unconstrained_%s_%db' %
(synchronicity, rpc_type, secstr, size),
rpc_type=rpc_type.upper(),
req_size=size,
resp_size=size,
client_type='%s_CLIENT' % synchronicity.upper(),
server_type='%s_SERVER' % synchronicity.upper(),
unconstrained_client=synchronicity,
secure=secure,
minimal_stack=not secure,
categories=[SWEEP])
yield _ping_pong_scenario(
'cpp_protobuf_%s_%s_qps_unconstrained_%s' %
(synchronicity, rpc_type, secstr),
rpc_type=rpc_type.upper(),
client_type='%s_CLIENT' % synchronicity.upper(),
server_type='%s_SERVER' % synchronicity.upper(),
unconstrained_client=synchronicity,
secure=secure,
minimal_stack=not secure,
server_threads_per_cq=3,
client_threads_per_cq=3,
categories=smoketest_categories + [SCALABLE])
# TODO(vjpai): Re-enable this test. It has a lot of timeouts
# and hasn't yet been conclusively identified as a test failure
# or race in the library
# yield _ping_pong_scenario(
# 'cpp_protobuf_%s_%s_qps_unconstrained_%s_500kib_resource_quota' % (synchronicity, rpc_type, secstr),
# rpc_type=rpc_type.upper(),
# client_type='%s_CLIENT' % synchronicity.upper(),
# server_type='%s_SERVER' % synchronicity.upper(),
# unconstrained_client=synchronicity,
# secure=secure,
# categories=smoketest_categories+[SCALABLE],
# resource_quota_size=500*1024)
if rpc_type == 'streaming':
for mps in geometric_progression(1, 20, 10):
yield _ping_pong_scenario(
'cpp_protobuf_%s_%s_qps_unconstrained_%smps_%s'
% (synchronicity, rpc_type, mps, secstr),
rpc_type=rpc_type.upper(),
client_type='%s_CLIENT' % synchronicity.upper(),
server_type='%s_SERVER' % synchronicity.upper(),
unconstrained_client=synchronicity,
secure=secure,
messages_per_stream=mps,
minimal_stack=not secure,
categories=smoketest_categories + [SCALABLE])
for mps in geometric_progression(1, 200, math.sqrt(10)):
yield _ping_pong_scenario(
'cpp_protobuf_%s_%s_qps_unconstrained_%smps_%s'
% (synchronicity, rpc_type, mps, secstr),
rpc_type=rpc_type.upper(),
client_type='%s_CLIENT' % synchronicity.upper(),
server_type='%s_SERVER' % synchronicity.upper(),
unconstrained_client=synchronicity,
secure=secure,
messages_per_stream=mps,
minimal_stack=not secure,
categories=[SWEEP])
for channels in geometric_progression(
1, 20000, math.sqrt(10)):
for outstanding in geometric_progression(
1, 200000, math.sqrt(10)):
if synchronicity == 'sync' and outstanding > 1200:
continue
if outstanding < channels: continue
yield _ping_pong_scenario(
'cpp_protobuf_%s_%s_qps_unconstrained_%s_%d_channels_%d_outstanding'
% (synchronicity, rpc_type, secstr, channels,
outstanding),
rpc_type=rpc_type.upper(),
client_type='%s_CLIENT' % synchronicity.upper(),
server_type='%s_SERVER' % synchronicity.upper(),
unconstrained_client=synchronicity,
secure=secure,
minimal_stack=not secure,
categories=[SWEEP],
channels=channels,
outstanding=outstanding)
def __str__(self):
return 'c++'
class CSharpLanguage:
def __init__(self):
self.safename = str(self)
def worker_cmdline(self):
return ['tools/run_tests/performance/run_worker_csharp.sh']
def worker_port_offset(self):
return 100
def scenarios(self):
yield _ping_pong_scenario(
'csharp_generic_async_streaming_ping_pong',
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
use_generic_payload=True,
categories=[SMOKETEST, SCALABLE])
yield _ping_pong_scenario(
'csharp_generic_async_streaming_ping_pong_insecure_1MB',
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
req_size=1024 * 1024,
resp_size=1024 * 1024,
use_generic_payload=True,
secure=False,
categories=[SMOKETEST, SCALABLE])
yield _ping_pong_scenario(
'csharp_generic_async_streaming_qps_unconstrained_insecure',
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
unconstrained_client='async',
use_generic_payload=True,
secure=False,
categories=[SMOKETEST, SCALABLE])
yield _ping_pong_scenario(
'csharp_protobuf_async_streaming_ping_pong',
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER')
yield _ping_pong_scenario(
'csharp_protobuf_async_unary_ping_pong',
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
categories=[SMOKETEST, SCALABLE])
yield _ping_pong_scenario(
'csharp_protobuf_sync_to_async_unary_ping_pong',
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='ASYNC_SERVER')
yield _ping_pong_scenario(
'csharp_protobuf_async_unary_qps_unconstrained',
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
unconstrained_client='async',
categories=[SMOKETEST, SCALABLE])
yield _ping_pong_scenario(
'csharp_protobuf_async_streaming_qps_unconstrained',
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
unconstrained_client='async',
categories=[SCALABLE])
yield _ping_pong_scenario(
'csharp_to_cpp_protobuf_sync_unary_ping_pong',
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
server_language='c++',
async_server_threads=1,
categories=[SMOKETEST, SCALABLE])
yield _ping_pong_scenario(
'csharp_to_cpp_protobuf_async_streaming_ping_pong',
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
server_language='c++',
async_server_threads=1)
yield _ping_pong_scenario(
'csharp_to_cpp_protobuf_async_unary_qps_unconstrained',
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
unconstrained_client='async',
server_language='c++',
categories=[SCALABLE])
yield _ping_pong_scenario(
'csharp_to_cpp_protobuf_sync_to_async_unary_qps_unconstrained',
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='ASYNC_SERVER',
unconstrained_client='sync',
server_language='c++',
categories=[SCALABLE])
yield _ping_pong_scenario(
'cpp_to_csharp_protobuf_async_unary_qps_unconstrained',
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
unconstrained_client='async',
client_language='c++',
categories=[SCALABLE])
yield _ping_pong_scenario(
'csharp_protobuf_async_unary_ping_pong_1MB',
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
req_size=1024 * 1024,
resp_size=1024 * 1024,
categories=[SMOKETEST, SCALABLE])
def __str__(self):
return 'csharp'
class PythonLanguage:
def __init__(self):
self.safename = 'python'
def worker_cmdline(self):
return ['tools/run_tests/performance/run_worker_python.sh']
def worker_port_offset(self):
return 500
def scenarios(self):
yield _ping_pong_scenario(
'python_generic_sync_streaming_ping_pong',
rpc_type='STREAMING',
client_type='SYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
use_generic_payload=True,
categories=[SMOKETEST, SCALABLE])
yield _ping_pong_scenario(
'python_protobuf_sync_streaming_ping_pong',
rpc_type='STREAMING',
client_type='SYNC_CLIENT',
server_type='ASYNC_SERVER')
yield _ping_pong_scenario(
'python_protobuf_async_unary_ping_pong',
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER')
yield _ping_pong_scenario(
'python_protobuf_sync_unary_ping_pong',
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='ASYNC_SERVER',
categories=[SMOKETEST, SCALABLE])
yield _ping_pong_scenario(
'python_protobuf_sync_unary_qps_unconstrained',
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='ASYNC_SERVER',
unconstrained_client='sync')
yield _ping_pong_scenario(
'python_protobuf_sync_streaming_qps_unconstrained',
rpc_type='STREAMING',
client_type='SYNC_CLIENT',
server_type='ASYNC_SERVER',
unconstrained_client='sync')
yield _ping_pong_scenario(
'python_to_cpp_protobuf_sync_unary_ping_pong',
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='ASYNC_SERVER',
server_language='c++',
async_server_threads=1,
categories=[SMOKETEST, SCALABLE])
yield _ping_pong_scenario(
'python_to_cpp_protobuf_sync_streaming_ping_pong',
rpc_type='STREAMING',
client_type='SYNC_CLIENT',
server_type='ASYNC_SERVER',
server_language='c++',
async_server_threads=1)
yield _ping_pong_scenario(
'python_protobuf_sync_unary_ping_pong_1MB',
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='ASYNC_SERVER',
req_size=1024 * 1024,
resp_size=1024 * 1024,
categories=[SMOKETEST, SCALABLE])
def __str__(self):
return 'python'
class RubyLanguage:
def __init__(self):
pass
self.safename = str(self)
def worker_cmdline(self):
return ['tools/run_tests/performance/run_worker_ruby.sh']
def worker_port_offset(self):
return 300
def scenarios(self):
yield _ping_pong_scenario(
'ruby_protobuf_sync_streaming_ping_pong',
rpc_type='STREAMING',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
categories=[SMOKETEST, SCALABLE])
yield _ping_pong_scenario(
'ruby_protobuf_unary_ping_pong',
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
categories=[SMOKETEST, SCALABLE])
yield _ping_pong_scenario(
'ruby_protobuf_sync_unary_qps_unconstrained',
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
unconstrained_client='sync')
yield _ping_pong_scenario(
'ruby_protobuf_sync_streaming_qps_unconstrained',
rpc_type='STREAMING',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
unconstrained_client='sync')
yield _ping_pong_scenario(
'ruby_to_cpp_protobuf_sync_unary_ping_pong',
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
server_language='c++',
async_server_threads=1)
yield _ping_pong_scenario(
'ruby_to_cpp_protobuf_sync_streaming_ping_pong',
rpc_type='STREAMING',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
server_language='c++',
async_server_threads=1)
yield _ping_pong_scenario(
'ruby_protobuf_unary_ping_pong_1MB',
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
req_size=1024 * 1024,
resp_size=1024 * 1024,
categories=[SMOKETEST, SCALABLE])
def __str__(self):
return 'ruby'
class Php7Language:
def __init__(self, php7_protobuf_c=False):
pass
self.php7_protobuf_c = php7_protobuf_c
self.safename = str(self)
def worker_cmdline(self):
if self.php7_protobuf_c:
return [
'tools/run_tests/performance/run_worker_php.sh',
'--use_protobuf_c_extension'
]
return ['tools/run_tests/performance/run_worker_php.sh']
def worker_port_offset(self):
if self.php7_protobuf_c:
return 900
return 800
def scenarios(self):
php7_extension_mode = 'php7_protobuf_php_extension'
if self.php7_protobuf_c:
php7_extension_mode = 'php7_protobuf_c_extension'
yield _ping_pong_scenario(
'%s_to_cpp_protobuf_sync_unary_ping_pong' % php7_extension_mode,
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
server_language='c++',
async_server_threads=1)
yield _ping_pong_scenario(
'%s_to_cpp_protobuf_sync_streaming_ping_pong' % php7_extension_mode,
rpc_type='STREAMING',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
server_language='c++',
async_server_threads=1)
# TODO(ddyihai): Investigate why when async_server_threads=1/CPU usage 340%, the QPS performs
# better than async_server_threads=0/CPU usage 490%.
yield _ping_pong_scenario(
'%s_to_cpp_protobuf_sync_unary_qps_unconstrained' %
php7_extension_mode,
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='ASYNC_SERVER',
server_language='c++',
outstanding=1,
async_server_threads=1,
unconstrained_client='sync')
yield _ping_pong_scenario(
'%s_to_cpp_protobuf_sync_streaming_qps_unconstrained' %
php7_extension_mode,
rpc_type='STREAMING',
client_type='SYNC_CLIENT',
server_type='ASYNC_SERVER',
server_language='c++',
outstanding=1,
async_server_threads=1,
unconstrained_client='sync')
def __str__(self):
if self.php7_protobuf_c:
return 'php7_protobuf_c'
return 'php7'
class JavaLanguage:
def __init__(self):
pass
self.safename = str(self)
def worker_cmdline(self):
return ['tools/run_tests/performance/run_worker_java.sh']
def worker_port_offset(self):
return 400
def scenarios(self):
for secure in [True, False]:
secstr = 'secure' if secure else 'insecure'
smoketest_categories = ([SMOKETEST] if secure else []) + [SCALABLE]
yield _ping_pong_scenario(
'java_generic_async_streaming_ping_pong_%s' % secstr,
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
use_generic_payload=True,
async_server_threads=1,
secure=secure,
warmup_seconds=JAVA_WARMUP_SECONDS,
categories=smoketest_categories)
yield _ping_pong_scenario(
'java_protobuf_async_streaming_ping_pong_%s' % secstr,
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
async_server_threads=1,
secure=secure,
warmup_seconds=JAVA_WARMUP_SECONDS)
yield _ping_pong_scenario(
'java_protobuf_async_unary_ping_pong_%s' % secstr,
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
async_server_threads=1,
secure=secure,
warmup_seconds=JAVA_WARMUP_SECONDS,
categories=smoketest_categories)
yield _ping_pong_scenario(
'java_protobuf_unary_ping_pong_%s' % secstr,
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
async_server_threads=1,
secure=secure,
warmup_seconds=JAVA_WARMUP_SECONDS)
yield _ping_pong_scenario(
'java_protobuf_async_unary_qps_unconstrained_%s' % secstr,
rpc_type='UNARY',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
unconstrained_client='async',
secure=secure,
warmup_seconds=JAVA_WARMUP_SECONDS,
categories=smoketest_categories + [SCALABLE])
yield _ping_pong_scenario(
'java_protobuf_async_streaming_qps_unconstrained_%s' % secstr,
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_SERVER',
unconstrained_client='async',
secure=secure,
warmup_seconds=JAVA_WARMUP_SECONDS,
categories=[SCALABLE])
yield _ping_pong_scenario(
'java_generic_async_streaming_qps_unconstrained_%s' % secstr,
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
unconstrained_client='async',
use_generic_payload=True,
secure=secure,
warmup_seconds=JAVA_WARMUP_SECONDS,
categories=[SCALABLE])
yield _ping_pong_scenario(
'java_generic_async_streaming_qps_one_server_core_%s' % secstr,
rpc_type='STREAMING',
client_type='ASYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
unconstrained_client='async-limited',
use_generic_payload=True,
async_server_threads=1,
secure=secure,
warmup_seconds=JAVA_WARMUP_SECONDS)
# TODO(jtattermusch): add scenarios java vs C++
def __str__(self):
return 'java'
class GoLanguage:
def __init__(self):
pass
self.safename = str(self)
def worker_cmdline(self):
return ['tools/run_tests/performance/run_worker_go.sh']
def worker_port_offset(self):
return 600
def scenarios(self):
for secure in [True, False]:
secstr = 'secure' if secure else 'insecure'
smoketest_categories = ([SMOKETEST] if secure else []) + [SCALABLE]
# ASYNC_GENERIC_SERVER for Go actually uses a sync streaming server,
# but that's mostly because of lack of better name of the enum value.
yield _ping_pong_scenario(
'go_generic_sync_streaming_ping_pong_%s' % secstr,
rpc_type='STREAMING',
client_type='SYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
use_generic_payload=True,
async_server_threads=1,
secure=secure,
categories=smoketest_categories)
yield _ping_pong_scenario(
'go_protobuf_sync_streaming_ping_pong_%s' % secstr,
rpc_type='STREAMING',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
async_server_threads=1,
secure=secure)
yield _ping_pong_scenario(
'go_protobuf_sync_unary_ping_pong_%s' % secstr,
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
async_server_threads=1,
secure=secure,
categories=smoketest_categories)
# unconstrained_client='async' is intended (client uses goroutines)
yield _ping_pong_scenario(
'go_protobuf_sync_unary_qps_unconstrained_%s' % secstr,
rpc_type='UNARY',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
unconstrained_client='async',
secure=secure,
categories=smoketest_categories + [SCALABLE])
# unconstrained_client='async' is intended (client uses goroutines)
yield _ping_pong_scenario(
'go_protobuf_sync_streaming_qps_unconstrained_%s' % secstr,
rpc_type='STREAMING',
client_type='SYNC_CLIENT',
server_type='SYNC_SERVER',
unconstrained_client='async',
secure=secure,
categories=[SCALABLE])
# unconstrained_client='async' is intended (client uses goroutines)
# ASYNC_GENERIC_SERVER for Go actually uses a sync streaming server,
# but that's mostly because of lack of better name of the enum value.
yield _ping_pong_scenario(
'go_generic_sync_streaming_qps_unconstrained_%s' % secstr,
rpc_type='STREAMING',
client_type='SYNC_CLIENT',
server_type='ASYNC_GENERIC_SERVER',
unconstrained_client='async',
use_generic_payload=True,
secure=secure,
categories=[SCALABLE])
# TODO(jtattermusch): add scenarios go vs C++
def __str__(self):
return 'go'
LANGUAGES = {
'c++': CXXLanguage(),
'csharp': CSharpLanguage(),
'ruby': RubyLanguage(),
'php7': Php7Language(),
'php7_protobuf_c': Php7Language(php7_protobuf_c=True),
'java': JavaLanguage(),
'python': PythonLanguage(),
'go': GoLanguage(),
}
| apache-2.0 |
freedesktop-unofficial-mirror/papyon | papyon/media/conference.py | 6 | 12851 | # -*- coding: utf-8 -*-
#
# papyon - a python client library for Msn
#
# Copyright (C) 2009 Collabora Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from papyon.media import *
from papyon.event.media import *
import pygst
pygst.require('0.10')
import farsight
import gobject
import gst
import logging
import sys
logger = logging.getLogger("papyon.media.conference")
codecs_definitions = {
"audio" : [
(114, "x-msrta", farsight.MEDIA_TYPE_AUDIO, 16000),
(111, "SIREN", farsight.MEDIA_TYPE_AUDIO, 16000),
(112, "G7221", farsight.MEDIA_TYPE_AUDIO, 16000),
(115, "x-msrta", farsight.MEDIA_TYPE_AUDIO, 8000),
(116, "SIREN", farsight.MEDIA_TYPE_AUDIO, 8000),
(4, "G723", farsight.MEDIA_TYPE_AUDIO, 8000),
(8, "PCMA", farsight.MEDIA_TYPE_AUDIO, 8000),
(0, "PCMU", farsight.MEDIA_TYPE_AUDIO, 8000),
(97, "RED", farsight.MEDIA_TYPE_AUDIO, 8000),
(101, "telephone-event", farsight.MEDIA_TYPE_AUDIO, 8000)
],
"video" : [
(121, "x-rtvc1", farsight.MEDIA_TYPE_VIDEO, 90000),
(34, "H263", farsight.MEDIA_TYPE_VIDEO, 90000)
]
}
types = {
0 : None,
farsight.CANDIDATE_TYPE_HOST : "host",
farsight.CANDIDATE_TYPE_SRFLX : "srflx",
farsight.CANDIDATE_TYPE_PRFLX : "prflx",
farsight.CANDIDATE_TYPE_RELAY : "relay"
}
protos = {
farsight.NETWORK_PROTOCOL_TCP : "TCP",
farsight.NETWORK_PROTOCOL_UDP : "UDP"
}
media_names = {
farsight.MEDIA_TYPE_AUDIO : "audio",
farsight.MEDIA_TYPE_VIDEO : "video"
}
media_types = {
"audio" : farsight.MEDIA_TYPE_AUDIO,
"video" : farsight.MEDIA_TYPE_VIDEO
}
class Conference(gobject.GObject):
def __init__(self):
gobject.GObject.__init__(self)
def set_source(self, source):
pass
class MediaSessionHandler(MediaSessionEventInterface):
def __init__(self, session):
MediaSessionEventInterface.__init__(self, session)
self._conference = None
self._handlers = []
self._setup()
for stream in session.streams:
self.on_stream_added(stream)
def _setup(self):
self._pipeline = gst.Pipeline()
bus = self._pipeline.get_bus()
bus.add_signal_watch()
bus.connect("message", self.on_bus_message)
if self._session.type is MediaSessionType.WEBCAM_RECV:
name = "fsmsncamrecvconference"
elif self._session.type is MediaSessionType.WEBCAM_SEND:
name = "fsmsncamsendconference"
else:
name = "fsrtpconference"
self._conference = gst.element_factory_make(name)
self._participant = self._conference.new_participant("")
self._pipeline.add(self._conference)
self._pipeline.set_state(gst.STATE_PLAYING)
#FIXME Create FsElementAddedNotifier
def on_stream_added(self, stream):
logger.debug("Stream \"%s\" added" % stream.name)
handler = MediaStreamHandler(stream)
handler.setup(self._conference, self._pipeline, self._participant,
self._session.type)
self._handlers.append(handler)
if self._session.type is MediaSessionType.WEBCAM_RECV or\
self._session.type is MediaSessionType.WEBCAM_SEND:
stream.set_local_codecs([])
def on_bus_message(self, bus, msg):
ret = gst.BUS_PASS
if msg.type == gst.MESSAGE_ELEMENT:
s = msg.structure
if s.has_name("farsight-error"):
logger.error("Farsight error : %s" % s['error-msg'])
if s.has_name("farsight-codecs-changed"):
logger.debug("Farsight codecs changed")
ret = gst.BUS_DROP
ready = s["session"].get_property("codecs-ready")
if ready:
codecs = s["session"].get_property("codecs")
name = media_names[s["session"].get_property("media-type")]
stream = self._session.get_stream(name)
stream.set_local_codecs(convert_fs_codecs(codecs))
if s.has_name("farsight-new-local-candidate"):
logger.debug("New local candidate")
ret = gst.BUS_DROP
name = media_names[s["stream"].get_property("session").get_property("media-type")]
candidate = convert_fs_candidate(s["candidate"])
stream = self._session.get_stream(name)
stream.new_local_candidate(candidate)
if s.has_name("farsight-local-candidates-prepared"):
logger.debug("Local candidates are prepared")
ret = gst.BUS_DROP
type = s["stream"].get_property("session").get_property("media-type")
name = media_names[type]
stream = self._session.get_stream(name)
stream.local_candidates_prepared()
if s.has_name("farsight-new-active-candidate-pair"):
logger.debug("New active candidate pair")
ret = gst.BUS_DROP
type = s["stream"].get_property("session").get_property("media-type")
name = media_names[type]
stream = self._session.get_stream(name)
local = s["local-candidate"]
remote = s["remote-candidate"]
local = convert_fs_candidate(local)
remote = convert_fs_candidate(remote)
stream.new_active_candidate_pair(local, remote)
return ret
class MediaStreamHandler(MediaStreamEventInterface):
def __init__(self, stream):
MediaStreamEventInterface.__init__(self, stream)
def setup(self, conference, pipeline, participant, type):
relays = []
for r in self._stream.relays:
relay = gst.Structure("relay")
relay.set_value("username", r.username)
relay.set_value("password", r.password)
relay.set_value("ip", r.ip)
relay.set_value("port", r.port, "uint")
relays.append(relay)
if type in (MediaSessionType.SIP, MediaSessionType.TUNNELED_SIP):
if type is MediaSessionType.TUNNELED_SIP:
compatibility_mode = 3
else:
compatibility_mode = 2
params = {"stun-ip" : "64.14.48.28", "stun-port" : 3478,
"compatibility-mode" : compatibility_mode,
"controlling-mode": self._stream.created_locally,
"relay-info": relays}
else:
params = {}
media_type = media_types[self._stream.name]
self.fssession = conference.new_session(media_type)
self.fssession.set_codec_preferences(build_codecs(self._stream.name))
self.fsstream = self.fssession.new_stream(participant,
self._stream.direction, "nice", params)
self.fsstream.connect("src-pad-added", self.on_src_pad_added, pipeline)
source = make_source(self._stream.name)
pipeline.add(source)
source.get_pad("src").link(self.fssession.get_property("sink-pad"))
pipeline.set_state(gst.STATE_PLAYING)
def on_stream_closed(self):
del self.fsstream
def on_remote_candidates_received(self, candidates):
candidates = filter(lambda x: x.transport == "UDP", candidates)
candidates = convert_media_candidates(candidates)
self.fsstream.set_remote_candidates(candidates)
def on_remote_codecs_received(self, codecs):
codecs = convert_media_codecs(codecs, self._stream.name)
self.fsstream.set_remote_codecs(codecs)
def on_src_pad_added(self, stream, pad, codec, pipeline):
sink = make_sink(self._stream.name)
pipeline.add(sink)
sink.set_state(gst.STATE_PLAYING)
pad.link(sink.get_pad("sink"))
# Farsight utility functions
def create_notifier(pipeline, filename):
notifier = farsight.ElementAddedNotifier()
notifier.add(pipeline)
notifier.set_properties_from_file(filename)
return notifier
def convert_fs_candidate(fscandidate):
candidate = MediaCandidate()
candidate.ip = fscandidate.ip
candidate.port = fscandidate.port
candidate.foundation = fscandidate.foundation
candidate.component_id = fscandidate.component_id
candidate.transport = protos[fscandidate.proto]
candidate.priority = int(fscandidate.priority)
candidate.username = fscandidate.username
candidate.password = fscandidate.password
candidate.type = types[fscandidate.type]
candidate.base_ip = fscandidate.base_ip
candidate.base_port = fscandidate.base_port
return candidate
def convert_media_candidates(candidates):
fscandidates = []
for candidate in candidates:
proto = farsight.NETWORK_PROTOCOL_TCP
if candidate.transport == "UDP":
proto = farsight.NETWORK_PROTOCOL_UDP
type = 0
for k,v in types.iteritems():
if v == candidate.type:
type = k
fscandidate = farsight.Candidate()
fscandidate.foundation = candidate.foundation
fscandidate.ip = candidate.ip
fscandidate.port = candidate.port
fscandidate.component_id = candidate.component_id
fscandidate.proto = proto
fscandidate.type = type
fscandidate.username = candidate.username
fscandidate.password = candidate.password
fscandidate.priority = int(candidate.priority)
fscandidates.append(fscandidate)
return fscandidates
def build_codecs(type):
codecs = []
for args in codecs_definitions[type]:
codec = farsight.Codec(*args)
codecs.append(codec)
return codecs
def convert_fs_codecs(fscodecs):
codecs = []
for fscodec in fscodecs:
codec = MediaCodec()
codec.payload = fscodec.id
codec.encoding = fscodec.encoding_name
codec.clockrate = fscodec.clock_rate
codec.params = dict(fscodec.optional_params)
codecs.append(codec)
return codecs
def convert_media_codecs(codecs, name):
fscodecs = []
media_type = media_types[name]
for codec in codecs:
fscodec = farsight.Codec(
codec.payload,
codec.encoding,
media_type,
codec.clockrate)
fscodec.optional_params = codec.params.items()
fscodecs.append(fscodec)
return fscodecs
# GStreamer utility functions
def make_source(media_name):
func = globals()["make_%s_source" % media_name]
return func()
def make_sink(media_name):
func = globals()["make_%s_sink" % media_name]
return func()
def make_audio_source(name="audiotestsrc"):
element = gst.element_factory_make(name)
element.set_property("is-live", True)
return element
def make_audio_sink(async=False):
return gst.element_factory_make("autoaudiosink")
def make_video_source(name="videotestsrc"):
"Make a bin with a video source in it, defaulting to first webcamera "
bin = gst.Bin("videosrc")
src = gst.element_factory_make(name, name)
src.set_property("is-live", True)
src.set_property("pattern", 0)
bin.add(src)
filter = gst.element_factory_make("capsfilter")
filter.set_property("caps", gst.Caps("video/x-raw-yuv , width=[300,500] , height=[200,500], framerate=[20/1,30/1]"))
bin.add(filter)
src.link(filter)
videoscale = gst.element_factory_make("videoscale")
bin.add(videoscale)
filter.link(videoscale)
bin.add_pad(gst.GhostPad("src", videoscale.get_pad("src")))
return bin
def make_video_sink(async=False):
"Make a bin with a video sink in it, that will be displayed on xid."
bin = gst.Bin("videosink")
sink = gst.element_factory_make("ximagesink", "imagesink")
sink.set_property("sync", async)
sink.set_property("async", async)
bin.add(sink)
colorspace = gst.element_factory_make("ffmpegcolorspace")
bin.add(colorspace)
videoscale = gst.element_factory_make("videoscale")
bin.add(videoscale)
videoscale.link(colorspace)
colorspace.link(sink)
bin.add_pad(gst.GhostPad("sink", videoscale.get_pad("sink")))
#sink.set_data("xid", xid) #Future work - proper gui place for imagesink ?
return bin
| gpl-2.0 |
noodle-learns-programming/aiohttp | tests/test_client_functional.py | 2 | 55390 | """Http client functional tests."""
import binascii
import gc
import io
import os.path
import json
import http.cookies
import asyncio
import unittest
from unittest import mock
import aiohttp
from aiohttp import client, helpers
from aiohttp import test_utils
from aiohttp.multidict import MultiDict
from aiohttp.multipart import MultipartWriter
class TestHttpClientFunctional(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
def tearDown(self):
# just in case if we have transport close callbacks
test_utils.run_briefly(self.loop)
self.loop.close()
gc.collect()
def test_HTTP_200_OK_METHOD(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
for meth in ('get', 'post', 'put', 'delete', 'head'):
r = self.loop.run_until_complete(
client.request(meth, httpd.url('method', meth),
loop=self.loop))
content1 = self.loop.run_until_complete(r.read())
content2 = self.loop.run_until_complete(r.read())
content = content1.decode()
self.assertEqual(r.status, 200)
if meth == 'head':
self.assertEqual(b'', content1)
else:
self.assertIn('"method": "%s"' % meth.upper(), content)
self.assertEqual(content1, content2)
r.close()
def test_HTTP_200_OK_METHOD_connector(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
conn = aiohttp.TCPConnector(
conn_timeout=0.2, resolve=True, loop=self.loop)
conn.clear_resolved_hosts()
for meth in ('get', 'post', 'put', 'delete', 'head'):
r = self.loop.run_until_complete(
client.request(
meth, httpd.url('method', meth),
connector=conn, loop=self.loop))
content1 = self.loop.run_until_complete(r.read())
content2 = self.loop.run_until_complete(r.read())
content = content1.decode()
self.assertEqual(r.status, 200)
if meth == 'head':
self.assertEqual(b'', content1)
else:
self.assertIn('"method": "%s"' % meth.upper(), content)
self.assertEqual(content1, content2)
r.close()
def test_use_global_loop(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
try:
asyncio.set_event_loop(self.loop)
r = self.loop.run_until_complete(
client.request('get', httpd.url('method', 'get')))
finally:
asyncio.set_event_loop(None)
content1 = self.loop.run_until_complete(r.read())
content2 = self.loop.run_until_complete(r.read())
content = content1.decode()
self.assertEqual(r.status, 200)
self.assertIn('"method": "GET"', content)
self.assertEqual(content1, content2)
r.close()
def test_HTTP_302_REDIRECT_GET(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
@asyncio.coroutine
def go():
r = yield from client.request('get',
httpd.url('redirect', 2),
loop=self.loop)
self.assertEqual(r.status, 200)
self.assertEqual(2, httpd['redirects'])
r.close()
self.loop.run_until_complete(go())
def test_HTTP_302_REDIRECT_NON_HTTP(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
@asyncio.coroutine
def go():
with self.assertRaises(ValueError):
yield from client.request('get',
httpd.url('redirect_err'),
loop=self.loop)
self.loop.run_until_complete(go())
def test_HTTP_302_REDIRECT_POST(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
r = self.loop.run_until_complete(
client.request('post', httpd.url('redirect', 2),
data={'some': 'data'}, loop=self.loop))
content = self.loop.run_until_complete(r.content.read())
content = content.decode()
self.assertEqual(r.status, 200)
self.assertIn('"method": "GET"', content)
self.assertEqual(2, httpd['redirects'])
r.close()
def test_HTTP_302_REDIRECT_POST_with_content_length_header(self):
data = json.dumps({'some': 'data'})
with test_utils.run_server(self.loop, router=Functional) as httpd:
r = self.loop.run_until_complete(
client.request('post', httpd.url('redirect', 2),
data=data,
headers={'Content-Length': str(len(data))},
loop=self.loop))
content = self.loop.run_until_complete(r.content.read())
content = content.decode()
self.assertEqual(r.status, 200)
self.assertIn('"method": "GET"', content)
self.assertEqual(2, httpd['redirects'])
r.close()
def test_HTTP_307_REDIRECT_POST(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
r = self.loop.run_until_complete(
client.request('post', httpd.url('redirect_307', 2),
data={'some': 'data'}, loop=self.loop))
content = self.loop.run_until_complete(r.content.read())
content = content.decode()
self.assertEqual(r.status, 200)
self.assertIn('"method": "POST"', content)
self.assertEqual(2, httpd['redirects'])
r.close()
def test_HTTP_302_max_redirects(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
r = self.loop.run_until_complete(
client.request('get', httpd.url('redirect', 5),
max_redirects=2, loop=self.loop))
self.assertEqual(r.status, 302)
self.assertEqual(2, httpd['redirects'])
r.close()
def test_HTTP_200_GET_WITH_PARAMS(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
r = self.loop.run_until_complete(
client.request('get', httpd.url('method', 'get'),
params={'q': 'test'}, loop=self.loop))
content = self.loop.run_until_complete(r.content.read())
content = content.decode()
self.assertIn('"query": "q=test"', content)
self.assertEqual(r.status, 200)
r.close()
def test_HTTP_200_GET_MultiDict_PARAMS(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
r = self.loop.run_until_complete(
client.request('get', httpd.url('method', 'get'),
params=MultiDict(
[('q', 'test1'), ('q', 'test2')]),
loop=self.loop))
content = self.loop.run_until_complete(r.content.read())
content = content.decode()
self.assertIn('"query": "q=test1&q=test2"', content)
self.assertEqual(r.status, 200)
r.close()
def test_HTTP_200_GET_WITH_MIXED_PARAMS(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
@asyncio.coroutine
def go():
r = yield from client.request(
'get', httpd.url('method', 'get') + '?test=true',
params={'q': 'test'}, loop=self.loop)
content = yield from r.content.read()
content = content.decode()
self.assertIn('"query": "test=true&q=test"', content)
self.assertEqual(r.status, 200)
r.close()
# let loop to make one iteration to call connection_lost
# and close socket
yield from asyncio.sleep(0, loop=self.loop)
self.loop.run_until_complete(go())
def test_POST_DATA(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
r = self.loop.run_until_complete(
client.request('post', url, data={'some': 'data'},
loop=self.loop))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertEqual({'some': ['data']}, content['form'])
self.assertEqual(r.status, 200)
r.close()
def test_POST_DATA_with_explicit_formdata(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
form = aiohttp.FormData()
form.add_field('name', 'text')
r = self.loop.run_until_complete(
client.request('post', url,
data=form,
loop=self.loop))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertEqual({'name': ['text']}, content['form'])
self.assertEqual(r.status, 200)
r.close()
def test_POST_DATA_with_charset(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
form = aiohttp.FormData()
form.add_field('name', 'текст',
content_type='text/plain; charset=koi8-r')
r = self.loop.run_until_complete(
client.request(
'post', url, data=form,
loop=self.loop))
content = self.loop.run_until_complete(r.json())
self.assertEqual(1, len(content['multipart-data']))
field = content['multipart-data'][0]
self.assertEqual('name', field['name'])
self.assertEqual('текст', field['data'])
self.assertEqual(r.status, 200)
def test_POST_DATA_with_content_transfer_encoding(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
form = aiohttp.FormData()
form.add_field('name', b'123',
content_transfer_encoding='base64')
r = self.loop.run_until_complete(
client.request(
'post', url, data=form,
loop=self.loop))
content = self.loop.run_until_complete(r.json())
self.assertEqual(1, len(content['multipart-data']))
field = content['multipart-data'][0]
self.assertEqual('name', field['name'])
self.assertEqual(b'123', binascii.a2b_base64(field['data']))
# self.assertEqual('base64', field['content-transfer-encoding'])
self.assertEqual(r.status, 200)
def test_POST_MultiDict(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
r = self.loop.run_until_complete(
client.request('post', url, data=MultiDict(
[('q', 'test1'), ('q', 'test2')]),
loop=self.loop))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertEqual({'q': ['test1', 'test2']}, content['form'])
self.assertEqual(r.status, 200)
r.close()
def test_POST_DATA_DEFLATE(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
r = self.loop.run_until_complete(
client.request('post', url,
data={'some': 'data'}, compress=True,
loop=self.loop))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertEqual('deflate', content['compression'])
self.assertEqual({'some': ['data']}, content['form'])
self.assertEqual(r.status, 200)
r.close()
def test_POST_FILES(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
here = os.path.dirname(__file__)
fname = os.path.join(here, 'sample.key')
with open(fname) as f:
r = self.loop.run_until_complete(
client.request(
'post', url, data={'some': f, 'test': b'data'},
chunked=1024,
headers={'Transfer-Encoding': 'chunked'},
loop=self.loop))
content = self.loop.run_until_complete(r.json())
files = list(
sorted(content['multipart-data'],
key=lambda d: d['name']))
f.seek(0)
filename = os.path.split(f.name)[-1]
self.assertEqual(2, len(content['multipart-data']))
self.assertEqual('some', files[0]['name'])
self.assertEqual(filename, files[0]['filename'])
self.assertEqual(f.read(), files[0]['data'])
self.assertEqual(r.status, 200)
r.close()
def test_POST_FILES_DEPRECATED(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
here = os.path.dirname(__file__)
fname = os.path.join(here, 'sample.key')
with open(fname) as f:
with self.assertWarns(DeprecationWarning):
r = self.loop.run_until_complete(
client.request(
'post', url, files={'some': f, 'test': b'data'},
chunked=1024,
headers={'Transfer-Encoding': 'chunked'},
loop=self.loop))
content = self.loop.run_until_complete(r.json())
files = list(
sorted(content['multipart-data'],
key=lambda d: d['name']))
f.seek(0)
filename = os.path.split(f.name)[-1]
self.assertEqual(2, len(content['multipart-data']))
self.assertEqual('some', files[0]['name'])
self.assertEqual(filename, files[0]['filename'])
self.assertEqual(f.read(), files[0]['data'])
self.assertEqual(r.status, 200)
r.close()
def test_POST_FILES_DEFLATE(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
here = os.path.dirname(__file__)
fname = os.path.join(here, 'sample.key')
with open(fname) as f:
r = self.loop.run_until_complete(
client.request('post', url, data={'some': f},
chunked=1024, compress='deflate',
loop=self.loop))
content = self.loop.run_until_complete(r.json())
f.seek(0)
filename = os.path.split(f.name)[-1]
self.assertEqual('deflate', content['compression'])
self.assertEqual(1, len(content['multipart-data']))
self.assertEqual(
'some', content['multipart-data'][0]['name'])
self.assertEqual(
filename, content['multipart-data'][0]['filename'])
self.assertEqual(
f.read(), content['multipart-data'][0]['data'])
self.assertEqual(r.status, 200)
r.close()
def test_POST_FILES_STR(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
here = os.path.dirname(__file__)
fname = os.path.join(here, 'sample.key')
with open(fname) as f:
r = self.loop.run_until_complete(
client.request('post', url, data=[('some', f.read())],
loop=self.loop))
content = self.loop.run_until_complete(r.json())
f.seek(0)
self.assertEqual(1, len(content['form']))
self.assertIn('some', content['form'])
self.assertEqual(f.read(), content['form']['some'][0])
self.assertEqual(r.status, 200)
r.close()
def test_POST_FILES_STR_SIMPLE(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
here = os.path.dirname(__file__)
fname = os.path.join(here, 'sample.key')
with open(fname) as f:
r = self.loop.run_until_complete(
client.request('post', url, data=f.read(), loop=self.loop))
content = self.loop.run_until_complete(r.json())
f.seek(0)
self.assertEqual(f.read(), content['content'])
self.assertEqual(r.status, 200)
r.close()
def test_POST_FILES_LIST(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
here = os.path.dirname(__file__)
fname = os.path.join(here, 'sample.key')
with open(fname) as f:
r = self.loop.run_until_complete(
client.request('post', url, data=[('some', f)],
loop=self.loop))
content = self.loop.run_until_complete(r.json())
f.seek(0)
filename = os.path.split(f.name)[-1]
self.assertEqual(1, len(content['multipart-data']))
self.assertEqual(
'some', content['multipart-data'][0]['name'])
self.assertEqual(
filename, content['multipart-data'][0]['filename'])
self.assertEqual(
f.read(), content['multipart-data'][0]['data'])
self.assertEqual(r.status, 200)
r.close()
def test_POST_FILES_LIST_CT(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
here = os.path.dirname(__file__)
fname = os.path.join(here, 'sample.key')
with open(fname) as f:
form = aiohttp.FormData()
form.add_field('some', f, content_type='text/plain')
r = self.loop.run_until_complete(
client.request('post', url, loop=self.loop,
data=form))
content = self.loop.run_until_complete(r.json())
f.seek(0)
filename = os.path.split(f.name)[-1]
self.assertEqual(1, len(content['multipart-data']))
self.assertEqual(
'some', content['multipart-data'][0]['name'])
self.assertEqual(
filename, content['multipart-data'][0]['filename'])
self.assertEqual(
f.read(), content['multipart-data'][0]['data'])
self.assertEqual(
'text/plain', content['multipart-data'][0]['content-type'])
self.assertEqual(r.status, 200)
r.close()
def test_POST_FILES_SINGLE(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
here = os.path.dirname(__file__)
fname = os.path.join(here, 'sample.key')
with open(fname) as f:
with self.assertRaises(ValueError):
self.loop.run_until_complete(
client.request('post', url, data=f, loop=self.loop))
def test_POST_FILES_SINGLE_BINARY(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
here = os.path.dirname(__file__)
fname = os.path.join(here, 'sample.key')
with open(fname, 'rb') as f:
r = self.loop.run_until_complete(
client.request('post', url, data=f, loop=self.loop))
content = self.loop.run_until_complete(r.json())
f.seek(0)
self.assertEqual(0, len(content['multipart-data']))
self.assertEqual(content['content'], f.read().decode())
# if system cannot determine 'application/pgp-keys' MIME type
# then use 'application/octet-stream' default
self.assertIn(content['headers']['Content-Type'],
('application/pgp-keys',
'application/octet-stream'))
self.assertEqual(r.status, 200)
r.close()
def test_POST_FILES_IO(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
data = io.BytesIO(b'data')
r = self.loop.run_until_complete(
client.request('post', url, data=[data], loop=self.loop))
content = self.loop.run_until_complete(r.json())
self.assertEqual(1, len(content['multipart-data']))
self.assertEqual(
{'content-type': 'application/octet-stream',
'data': 'data',
'filename': 'unknown',
'filename*': "utf-8''unknown",
'name': 'unknown'}, content['multipart-data'][0])
self.assertEqual(r.status, 200)
r.close()
def test_POST_MULTIPART(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
with MultipartWriter('form-data') as writer:
writer.append('foo')
writer.append_json({'bar': 'баз'})
writer.append_form([('тест', '4'), ('сетс', '2')])
r = self.loop.run_until_complete(
client.request('post', url, data=writer, loop=self.loop))
content = self.loop.run_until_complete(r.json())
self.assertEqual(3, len(content['multipart-data']))
self.assertEqual({'content-type': 'text/plain', 'data': 'foo'},
content['multipart-data'][0])
self.assertEqual({'content-type': 'application/json',
'data': '{"bar": "\\u0431\\u0430\\u0437"}'},
content['multipart-data'][1])
self.assertEqual(
{'content-type': 'application/x-www-form-urlencoded',
'data': '%D1%82%D0%B5%D1%81%D1%82=4&'
'%D1%81%D0%B5%D1%82%D1%81=2'},
content['multipart-data'][2])
self.assertEqual(r.status, 200)
r.close()
def test_POST_FILES_IO_WITH_PARAMS(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
data = io.BytesIO(b'data')
r = self.loop.run_until_complete(
client.request('post', url,
data=(('test', 'true'),
MultiDict(
[('q', 't1'), ('q', 't2')]),
data),
loop=self.loop))
content = self.loop.run_until_complete(r.json())
self.assertEqual(4, len(content['multipart-data']))
self.assertEqual(
{'content-type': 'text/plain',
'data': 'true',
'name': 'test'}, content['multipart-data'][0])
self.assertEqual(
{'content-type': 'application/octet-stream',
'data': 'data',
'filename': 'unknown',
'filename*': "utf-8''unknown",
'name': 'unknown'}, content['multipart-data'][1])
self.assertEqual(
{'content-type': 'text/plain',
'data': 't1',
'name': 'q'}, content['multipart-data'][2])
self.assertEqual(
{'content-type': 'text/plain',
'data': 't2',
'name': 'q'}, content['multipart-data'][3])
self.assertEqual(r.status, 200)
r.close()
def test_POST_FILES_WITH_DATA(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
here = os.path.dirname(__file__)
fname = os.path.join(here, 'sample.key')
with open(fname) as f:
r = self.loop.run_until_complete(
client.request('post', url, loop=self.loop,
data={'test': 'true', 'some': f}))
content = self.loop.run_until_complete(r.json())
files = list(
sorted(content['multipart-data'],
key=lambda d: d['name']))
self.assertEqual(2, len(content['multipart-data']))
self.assertEqual('test', files[1]['name'])
self.assertEqual('true', files[1]['data'])
f.seek(0)
filename = os.path.split(f.name)[-1]
self.assertEqual('some', files[0]['name'])
self.assertEqual(filename, files[0]['filename'])
self.assertEqual(f.read(), files[0]['data'])
self.assertEqual(r.status, 200)
r.close()
def test_POST_STREAM_DATA(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
here = os.path.dirname(__file__)
fname = os.path.join(here, 'sample.key')
with open(fname, 'rb') as f:
data = f.read()
fut = asyncio.Future(loop=self.loop)
@asyncio.coroutine
def stream():
yield from fut
yield data
self.loop.call_later(0.01, fut.set_result, True)
r = self.loop.run_until_complete(
client.request(
'post', url, data=stream(),
headers={'Content-Length': str(len(data))},
loop=self.loop))
content = self.loop.run_until_complete(r.json())
r.close()
self.assertEqual(str(len(data)),
content['headers']['Content-Length'])
self.assertEqual('application/octet-stream',
content['headers']['Content-Type'])
def test_POST_StreamReader(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
here = os.path.dirname(__file__)
fname = os.path.join(here, 'sample.key')
with open(fname, 'rb') as f:
data = f.read()
stream = aiohttp.StreamReader(loop=self.loop)
stream.feed_data(data)
stream.feed_eof()
r = self.loop.run_until_complete(
client.request(
'post', url, data=stream,
headers={'Content-Length': str(len(data))},
loop=self.loop))
content = self.loop.run_until_complete(r.json())
r.close()
self.assertEqual(str(len(data)),
content['headers']['Content-Length'])
def test_POST_DataQueue(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
here = os.path.dirname(__file__)
fname = os.path.join(here, 'sample.key')
with open(fname, 'rb') as f:
data = f.read()
stream = aiohttp.DataQueue(loop=self.loop)
stream.feed_data(data[:100], 100)
stream.feed_data(data[100:], len(data[100:]))
stream.feed_eof()
r = self.loop.run_until_complete(
client.request(
'post', url, data=stream,
headers={'Content-Length': str(len(data))},
loop=self.loop))
content = self.loop.run_until_complete(r.json())
r.close()
self.assertEqual(str(len(data)),
content['headers']['Content-Length'])
def test_POST_ChunksQueue(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
here = os.path.dirname(__file__)
fname = os.path.join(here, 'sample.key')
with open(fname, 'rb') as f:
data = f.read()
stream = aiohttp.ChunksQueue(loop=self.loop)
stream.feed_data(data[:100], 100)
d = data[100:]
stream.feed_data(d, len(d))
stream.feed_eof()
r = self.loop.run_until_complete(
client.request(
'post', url, data=stream,
headers={'Content-Length': str(len(data))},
loop=self.loop))
content = self.loop.run_until_complete(r.json())
r.close()
self.assertEqual(str(len(data)),
content['headers']['Content-Length'])
def test_expect_continue(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
r = self.loop.run_until_complete(
client.request('post', url, data={'some': 'data'},
expect100=True, loop=self.loop))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertEqual('100-continue', content['headers']['Expect'])
self.assertEqual(r.status, 200)
r.close()
def test_encoding(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
r = self.loop.run_until_complete(
client.request('get', httpd.url('encoding', 'deflate'),
loop=self.loop))
self.assertEqual(r.status, 200)
r.close()
r = self.loop.run_until_complete(
client.request('get', httpd.url('encoding', 'gzip'),
loop=self.loop))
self.assertEqual(r.status, 200)
r.close()
def test_cookies(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
c = http.cookies.Morsel()
c.set('test3', '456', '456')
r = self.loop.run_until_complete(
client.request(
'get', httpd.url('method', 'get'), loop=self.loop,
cookies={'test1': '123', 'test2': c}))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.content.read())
self.assertIn(b'"Cookie": "test1=123; test3=456"', bytes(content))
r.close()
@mock.patch('aiohttp.client_reqrep.client_logger')
def test_set_cookies(self, m_log):
with test_utils.run_server(self.loop, router=Functional) as httpd:
resp = self.loop.run_until_complete(
client.request('get', httpd.url('cookies'), loop=self.loop))
self.assertEqual(resp.status, 200)
self.assertEqual(list(sorted(resp.cookies.keys())), ['c1', 'c2'])
self.assertEqual(resp.cookies['c1'].value, 'cookie1')
self.assertEqual(resp.cookies['c2'].value, 'cookie2')
resp.close()
m_log.warning.assert_called_with('Can not load response cookies: %s',
mock.ANY)
def test_share_cookies(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
with self.assertWarns(DeprecationWarning):
conn = aiohttp.TCPConnector(share_cookies=True, loop=self.loop)
resp = self.loop.run_until_complete(
client.request('get', httpd.url('cookies'),
connector=conn, loop=self.loop))
self.assertIn('SET-COOKIE', resp.headers)
self.assertEqual(resp.cookies['c1'].value, 'cookie1')
self.assertEqual(resp.cookies['c2'].value, 'cookie2')
self.assertEqual(conn.cookies, resp.cookies)
resp.close()
resp2 = self.loop.run_until_complete(
client.request('get', httpd.url('method', 'get'),
connector=conn, loop=self.loop))
self.assertNotIn('SET-COOKIE', resp2.headers)
data = self.loop.run_until_complete(resp2.json())
self.assertEqual(data['headers']['Cookie'],
'c1=cookie1; c2=cookie2')
resp2.close()
def test_chunked(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
r = self.loop.run_until_complete(
client.request('get', httpd.url('chunked'), loop=self.loop))
self.assertEqual(r.status, 200)
self.assertEqual(r.headers.getone('TRANSFER-ENCODING'), 'chunked')
content = self.loop.run_until_complete(r.json())
self.assertEqual(content['path'], '/chunked')
r.close()
def test_broken_connection(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
r = self.loop.run_until_complete(
client.request('get', httpd.url('broken'), loop=self.loop))
self.assertEqual(r.status, 200)
self.assertRaises(
aiohttp.ServerDisconnectedError,
self.loop.run_until_complete, r.json())
r.close()
def test_request_conn_error(self):
self.assertRaises(
aiohttp.ClientConnectionError,
self.loop.run_until_complete,
client.request('get', 'http://0.0.0.0:1', loop=self.loop))
def test_request_conn_closed(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
httpd['close'] = True
self.assertRaises(
aiohttp.ClientHttpProcessingError,
self.loop.run_until_complete,
client.request(
'get', httpd.url('method', 'get'), loop=self.loop))
def test_keepalive(self):
from aiohttp import connector
with self.assertWarns(DeprecationWarning):
c = connector.TCPConnector(share_cookies=True, loop=self.loop)
with test_utils.run_server(self.loop, router=Functional) as httpd:
r = self.loop.run_until_complete(
client.request('get', httpd.url('keepalive',),
connector=c, loop=self.loop))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertEqual(content['content'], 'requests=1')
r.close()
r = self.loop.run_until_complete(
client.request('get', httpd.url('keepalive'),
connector=c, loop=self.loop))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertEqual(content['content'], 'requests=2')
r.close()
c.close()
def test_session_close(self):
conn = aiohttp.TCPConnector(loop=self.loop)
with test_utils.run_server(self.loop, router=Functional) as httpd:
r = self.loop.run_until_complete(
client.request(
'get', httpd.url('keepalive') + '?close=1',
connector=conn, loop=self.loop))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertEqual(content['content'], 'requests=1')
r.close()
r = self.loop.run_until_complete(
client.request('get', httpd.url('keepalive'),
connector=conn, loop=self.loop))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertEqual(content['content'], 'requests=1')
r.close()
conn.close()
@mock.patch('aiohttp.client_reqrep.client_logger')
def test_connector_cookies(self, m_log):
from aiohttp import connector
with self.assertWarns(DeprecationWarning):
conn = connector.TCPConnector(share_cookies=True, loop=self.loop)
with test_utils.run_server(self.loop, router=Functional) as httpd:
conn.update_cookies({'test': '1'})
r = self.loop.run_until_complete(
client.request('get', httpd.url('cookies'),
connector=conn, loop=self.loop))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertEqual(content['headers']['Cookie'], 'test=1')
r.close()
cookies = sorted([(k, v.value) for k, v in conn.cookies.items()])
self.assertEqual(
cookies, [('c1', 'cookie1'), ('c2', 'cookie2'), ('test', '1')])
m_log.warning.assert_called_with(
'Can not load response cookies: %s', mock.ANY)
conn.close()
def test_multidict_headers(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('method', 'post')
data = b'sample data'
r = self.loop.run_until_complete(
client.request(
'post', url, data=data,
headers=MultiDict(
{'Content-Length': str(len(data))}),
loop=self.loop))
content = self.loop.run_until_complete(r.json())
r.close()
self.assertEqual(str(len(data)),
content['headers']['Content-Length'])
def test_close_implicit_connector(self):
@asyncio.coroutine
def go(url):
r = yield from client.request('GET', url, loop=self.loop)
connection = r.connection
self.assertIsNotNone(connection)
connector = connection._connector
self.assertIsNotNone(connector)
yield from r.read()
self.assertEqual(0, len(connector._conns))
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('keepalive')
self.loop.run_until_complete(go(url))
def test_dont_close_explicit_connector(self):
@asyncio.coroutine
def go(url):
connector = aiohttp.TCPConnector(loop=self.loop)
r = yield from client.request('GET', url,
connector=connector,
loop=self.loop)
yield from r.read()
self.assertEqual(1, len(connector._conns))
connector.close()
with test_utils.run_server(self.loop, router=Functional) as httpd:
url = httpd.url('keepalive')
self.loop.run_until_complete(go(url))
def test_server_close_keepalive_connection(self):
class Proto(asyncio.Protocol):
def connection_made(self, transport):
self.transp = transport
self.data = b''
def data_received(self, data):
self.data += data
if data.endswith(b'\r\n\r\n'):
self.transp.write(
b'HTTP/1.1 200 OK\r\n'
b'CONTENT-LENGTH: 2\r\n'
b'CONNECTION: close\r\n'
b'\r\n'
b'ok')
self.transp.close()
def connection_lost(self, exc):
self.transp = None
@asyncio.coroutine
def go():
server = yield from self.loop.create_server(
Proto, '127.0.0.1')
addr = server.sockets[0].getsockname()
connector = aiohttp.TCPConnector(loop=self.loop)
url = 'http://{}:{}/'.format(*addr)
for i in range(2):
r = yield from client.request('GET', url,
connector=connector,
loop=self.loop)
yield from r.read()
self.assertEqual(0, len(connector._conns))
connector.close()
server.close()
yield from server.wait_closed()
self.loop.run_until_complete(go())
def test_handle_keepalive_on_closed_connection(self):
class Proto(asyncio.Protocol):
def connection_made(self, transport):
self.transp = transport
self.data = b''
def data_received(self, data):
self.data += data
if data.endswith(b'\r\n\r\n'):
self.transp.write(
b'HTTP/1.1 200 OK\r\n'
b'CONTENT-LENGTH: 2\r\n'
b'\r\n'
b'ok')
self.transp.close()
def connection_lost(self, exc):
self.transp = None
@asyncio.coroutine
def go():
server = yield from self.loop.create_server(
Proto, '127.0.0.1')
addr = server.sockets[0].getsockname()
connector = aiohttp.TCPConnector(loop=self.loop)
url = 'http://{}:{}/'.format(*addr)
r = yield from client.request('GET', url,
connector=connector,
loop=self.loop)
yield from r.read()
self.assertEqual(1, len(connector._conns))
with self.assertRaises(aiohttp.ClientError):
yield from client.request('GET', url,
connector=connector,
loop=self.loop)
self.assertEqual(0, len(connector._conns))
connector.close()
server.close()
yield from server.wait_closed()
self.loop.run_until_complete(go())
def test_share_cookie_partial_update(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
with self.assertWarns(DeprecationWarning):
conn = aiohttp.TCPConnector(share_cookies=True, loop=self.loop)
# Set c1 and c2 cookie
resp = self.loop.run_until_complete(
client.request('get', httpd.url('cookies'),
connector=conn, loop=self.loop))
self.assertEqual(resp.cookies['c1'].value, 'cookie1')
self.assertEqual(resp.cookies['c2'].value, 'cookie2')
self.assertEqual(conn.cookies, resp.cookies)
resp.close()
# Update c1 at server side
resp = self.loop.run_until_complete(
client.request('get', httpd.url('cookies_partial'),
connector=conn, loop=self.loop))
self.assertEqual(resp.cookies['c1'].value, 'other_cookie1')
resp.close()
# Assert, that we send updated cookies in next requests
r = self.loop.run_until_complete(
client.request('get', httpd.url('method', 'get'),
connector=conn, loop=self.loop))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertEqual(
content['headers']['Cookie'],
'c1=other_cookie1; c2=cookie2')
r.close()
def test_connector_cookie_merge(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
with self.assertWarns(DeprecationWarning):
conn = aiohttp.TCPConnector(share_cookies=True, loop=self.loop)
conn.update_cookies({
"c1": "connector_cookie1",
"c2": "connector_cookie2",
})
# Update c1 using direct cookies attribute of request
r = self.loop.run_until_complete(
client.request('get', httpd.url('method', 'get'),
cookies={"c1": "direct_cookie1"},
connector=conn, loop=self.loop))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertEqual(
content['headers']['Cookie'],
'c1=direct_cookie1; c2=connector_cookie2')
r.close()
def test_session_cookies(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
session = client.ClientSession(loop=self.loop)
resp = self.loop.run_until_complete(
session.request('get', httpd.url('cookies')))
self.assertEqual(resp.cookies['c1'].value, 'cookie1')
self.assertEqual(resp.cookies['c2'].value, 'cookie2')
self.assertEqual(session.cookies, resp.cookies)
resp.close()
# Assert, that we send those cookies in next requests
r = self.loop.run_until_complete(
session.request('get', httpd.url('method', 'get')))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertEqual(
content['headers']['Cookie'], 'c1=cookie1; c2=cookie2')
r.close()
session.close()
def test_session_headers(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
session = client.ClientSession(
loop=self.loop, headers={
"X-Real-IP": "192.168.0.1"
})
r = self.loop.run_until_complete(
session.request('get', httpd.url('method', 'get')))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertIn(
"X-Real-Ip", content['headers'])
self.assertEqual(
content['headers']["X-Real-Ip"], "192.168.0.1")
r.close()
session.close()
def test_session_headers_merge(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
session = client.ClientSession(
loop=self.loop, headers=[
("X-Real-IP", "192.168.0.1"),
("X-Sent-By", "requests")])
r = self.loop.run_until_complete(
session.request('get', httpd.url('method', 'get'),
headers={"X-Sent-By": "aiohttp"}))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertIn(
"X-Real-Ip", content['headers'])
self.assertIn(
"X-Sent-By", content['headers'])
self.assertEqual(
content['headers']["X-Real-Ip"], "192.168.0.1")
self.assertEqual(
content['headers']["X-Sent-By"], "aiohttp")
r.close()
session.close()
def test_session_auth(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
session = client.ClientSession(
loop=self.loop, auth=helpers.BasicAuth("login", "pass"))
r = self.loop.run_until_complete(
session.request('get', httpd.url('method', 'get')))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertIn(
"Authorization", content['headers'])
self.assertEqual(
content['headers']["Authorization"], "Basic bG9naW46cGFzcw==")
r.close()
session.close()
def test_session_auth_override(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
session = client.ClientSession(
loop=self.loop, auth=helpers.BasicAuth("login", "pass"))
r = self.loop.run_until_complete(
session.request('get', httpd.url('method', 'get'),
auth=helpers.BasicAuth("other_login", "pass")))
self.assertEqual(r.status, 200)
content = self.loop.run_until_complete(r.json())
self.assertIn(
"Authorization", content['headers'])
self.assertEqual(
content['headers']["Authorization"],
"Basic b3RoZXJfbG9naW46cGFzcw==")
r.close()
session.close()
def test_session_auth_header_conflict(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
session = client.ClientSession(
loop=self.loop, auth=helpers.BasicAuth("login", "pass"))
headers = {'Authorization': "Basic b3RoZXJfbG9naW46cGFzcw=="}
with self.assertRaises(ValueError):
self.loop.run_until_complete(
session.request('get', httpd.url('method', 'get'),
headers=headers))
session.close()
def test_shortcuts(self):
with test_utils.run_server(self.loop, router=Functional) as httpd:
for meth in ('get', 'post', 'put', 'delete',
'head', 'patch', 'options'):
coro = getattr(client, meth)
r = self.loop.run_until_complete(
coro(httpd.url('method', meth), loop=self.loop))
content1 = self.loop.run_until_complete(r.read())
content2 = self.loop.run_until_complete(r.read())
content = content1.decode()
self.assertEqual(r.status, 200)
if meth == 'head':
self.assertEqual(b'', content1)
else:
self.assertIn('"method": "%s"' % meth.upper(), content)
self.assertEqual(content1, content2)
r.close()
class Functional(test_utils.Router):
@test_utils.Router.define('/method/([A-Za-z]+)$')
def method(self, match):
self._response(self._start_response(200))
@test_utils.Router.define('/redirect_err$')
def redirect_err(self, match):
self._response(
self._start_response(302),
headers={'Location': 'ftp://127.0.0.1/test/'})
@test_utils.Router.define('/redirect/([0-9]+)$')
def redirect(self, match):
no = int(match.group(1).upper())
rno = self._props['redirects'] = self._props.get('redirects', 0) + 1
if rno >= no:
self._response(
self._start_response(302),
headers={'Location': '/method/%s' % self._method.lower()})
else:
self._response(
self._start_response(302),
headers={'Location': self._path})
@test_utils.Router.define('/redirect_307/([0-9]+)$')
def redirect_307(self, match):
no = int(match.group(1).upper())
rno = self._props['redirects'] = self._props.get('redirects', 0) + 1
if rno >= no:
self._response(
self._start_response(307),
headers={'Location': '/method/%s' % self._method.lower()})
else:
self._response(
self._start_response(307),
headers={'Location': self._path})
@test_utils.Router.define('/encoding/(gzip|deflate)$')
def encoding(self, match):
mode = match.group(1)
resp = self._start_response(200)
resp.add_compression_filter(mode)
resp.add_chunking_filter(100)
self._response(resp, headers={'Content-encoding': mode}, chunked=True)
@test_utils.Router.define('/chunked$')
def chunked(self, match):
resp = self._start_response(200)
resp.add_chunking_filter(100)
self._response(resp, chunked=True)
@test_utils.Router.define('/keepalive$')
def keepalive(self, match):
self._transport._requests = getattr(
self._transport, '_requests', 0) + 1
resp = self._start_response(200)
if 'close=' in self._query:
self._response(
resp, 'requests={}'.format(self._transport._requests))
else:
self._response(
resp, 'requests={}'.format(self._transport._requests),
headers={'CONNECTION': 'keep-alive'})
@test_utils.Router.define('/cookies$')
def cookies(self, match):
cookies = http.cookies.SimpleCookie()
cookies['c1'] = 'cookie1'
cookies['c2'] = 'cookie2'
resp = self._start_response(200)
for cookie in cookies.output(header='').split('\n'):
resp.add_header('Set-Cookie', cookie.strip())
resp.add_header(
'Set-Cookie',
'ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}='
'{925EC0B8-CB17-4BEB-8A35-1033813B0523}; HttpOnly; Path=/')
self._response(resp)
@test_utils.Router.define('/cookies_partial$')
def cookies_partial(self, match):
cookies = http.cookies.SimpleCookie()
cookies['c1'] = 'other_cookie1'
resp = self._start_response(200)
for cookie in cookies.output(header='').split('\n'):
resp.add_header('Set-Cookie', cookie.strip())
self._response(resp)
@test_utils.Router.define('/broken$')
def broken(self, match):
resp = self._start_response(200)
def write_body(resp, body):
self._transport.close()
raise ValueError()
self._response(
resp,
body=json.dumps({'t': (b'0' * 1024).decode('utf-8')}),
write_body=write_body)
| apache-2.0 |
sorenk/ansible | lib/ansible/modules/storage/purestorage/purefa_pg.py | 50 | 5569 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Simon Dodsley ([email protected])
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: purefa_pg
version_added: '2.4'
short_description: Manage protection groups on Pure Storage FlashArrays
description:
- Create, delete or modify protection groups on Pure Storage FlashArrays.
author:
- Simon Dodsley (@sdodsley)
options:
pgroup:
description:
- The name of the protection group.
required: true
state:
description:
- Define whether the protection group should exist or not.
default: present
choices: [ absent, present ]
volume:
description:
- List of existing volumes to add to protection group.
host:
description:
- List of existing hosts to add to protection group.
hostgroup:
description:
- List of existing hostgroups to add to protection group.
eradicate:
description:
- Define whether to eradicate the protection group on delete and leave in trash.
type : bool
default: 'no'
enabled:
description:
- Define whether to enabled snapshots for the protection group.
type : bool
default: 'yes'
extends_documentation_fragment:
- purestorage.fa
'''
EXAMPLES = r'''
- name: Create new protection group
purefa_pg:
pgroup: foo
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: Create new protection group with snapshots disabled
purefa_pg:
pgroup: foo
enabled: false
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: Delete protection group
purefa_pg:
pgroup: foo
eradicate: true
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: absent
- name: Create protection group for hostgroups
purefa_pg:
pgroup: bar
hostgroup:
- hg1
- hg2
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: Create protection group for hosts
purefa_pg:
pgroup: bar
host:
- host1
- host2
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: Create protection group for volumes
purefa_pg:
pgroup: bar
volume:
- vol1
- vol2
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
'''
RETURN = r'''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pure import get_system, purefa_argument_spec
try:
from purestorage import purestorage
HAS_PURESTORAGE = True
except ImportError:
HAS_PURESTORAGE = False
def get_pgroup(module, array):
pgroup = None
for h in array.list_pgroups():
if h["name"] == module.params['pgroup']:
pgroup = h
break
return pgroup
def make_pgroup(module, array):
changed = True
if not module.check_mode:
host = array.create_pgroup(module.params['pgroup'])
array.set_pgroup(module.params['pgroup'], snap_enabled=module.params['enabled'])
if module.params['volume']:
array.set_pgroup(module.params['pgroup'], vollist=module.params['volume'])
if module.params['host']:
array.set_pgroup(module.params['pgroup'], hostlist=module.params['host'])
if module.params['hostgroup']:
array.set_pgroup(module.params['pgroup'], hgrouplist=module.params['hostgroup'])
module.exit_json(changed=changed)
def update_pgroup(module, array):
changed = False
pgroup = module.params['pgroup']
module.exit_json(changed=changed)
def delete_pgroup(module, array):
changed = True
if not module.check_mode:
array.destroy_pgroup(module.params['pgroup'])
if module.params['eradicate']:
array.eradicate_pgroup(module.params['pgroup'])
module.exit_json(changed=changed)
def main():
argument_spec = purefa_argument_spec()
argument_spec.update(dict(
pgroup=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['absent', 'present']),
volume=dict(type='list'),
host=dict(type='list'),
hostgroup=dict(type='list'),
eradicate=dict(type='bool', default=False),
enabled=dict(type='bool', default=True),
))
module = AnsibleModule(argument_spec, supports_check_mode=True)
if not HAS_PURESTORAGE:
module.fail_json(msg='purestorage sdk is required for this module in host')
state = module.params['state']
array = get_system(module)
pgroup = get_pgroup(module, array)
if module.params['host']:
try:
for h in module.params['host']:
array.get_host(h)
except:
module.fail_json(msg='Host {} not found'.format(h))
if module.params['hostgroup']:
try:
for hg in module.params['hostgroup']:
array.get_hgroup(hg)
except:
module.fail_json(msg='Hostgroup {} not found'.format(hg))
if pgroup and state == 'present':
update_pgroup(module, array)
elif pgroup and state == 'absent':
delete_pgroup(module, array)
elif pgroup is None and state == 'absent':
module.exit_json(changed=False)
else:
make_pgroup(module, array)
if __name__ == '__main__':
main()
| gpl-3.0 |
garbear/EventGhost | eg/Classes/DigitOnlyValidator.py | 1 | 2037 | # This file is part of EventGhost.
# Copyright (C) 2005 Lars-Peter Voss <[email protected]>
#
# EventGhost is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# EventGhost is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EventGhost; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
import wx
import string
class DigitOnlyValidator(wx.PyValidator):
def __init__(self, choices=None):
wx.PyValidator.__init__(self)
self.choices = choices
self.Bind(wx.EVT_CHAR, self.OnChar)
def Clone(self):
return DigitOnlyValidator(self.choices)
def TransferToWindow(self):
return True
def Validate(self, win):
tc = self.GetWindow()
val = tc.GetValue()
if self.choices is not None:
try:
i = self.choices.index(val)
return True
except:
pass
for x in val:
if x not in string.digits:
return False
return True
def OnChar(self, event):
key = event.GetKeyCode()
if key < wx.WXK_SPACE or key == wx.WXK_DELETE or key > 255:
event.Skip()
return
if chr(key) in string.digits:
event.Skip()
return
if not wx.Validator_IsSilent():
wx.Bell()
# Returning without calling event.Skip eats the event before it
# gets to the text control
return
| gpl-2.0 |
NPRA/EmissionCalculatorLib | emission/planner.py | 2 | 13540 | import json
try:
from urllib.request import urlopen # Python 3
from urllib.parse import urlencode
except ImportError:
from urllib import urlopen # Python 2
from urllib import urlencode
import socket
import math
from . import vehicles, log
from . import EmissionsJsonParser
from .exceptions import RouteError
from . import models
from six.moves import urllib
def enum(**named_values):
return type('Enum', (), named_values)
# List of possible pollutant types
PollutantTypes = enum(
CH4='CH4',
CO='CO',
EC='EC',
NOx='NOx',
PM_EXHAUST='PM Exhaust',
VOC='VOC')
# URL to remote route webservice
ROUTE_URL_BASE = "https://www.vegvesen.no/ws/no/vegvesen/ruteplan/routingService_v1_0/routingService/"
class Route:
"""Represent a route object from the NVDB RoutingService"""
def __init__(self, distance, minutes, path, id):
self.distance = distance
self.minutes = minutes
self.path = path
self.pollutants = {}
self.distances = []
self.id = id
def hours_and_minutes(self):
"""Return hours:minutes as a string
representation, based on the total amount
of minutes for the route.
"""
hours, minutes = divmod(self.minutes, 60)
return "{}:{}".format(hours, minutes)
def velocity(self):
"""Calculate the velocity
"""
total_time = self.minutes * 60
return (self.distance / total_time) * 3.6
def add_pollutant(self, p, calc_emission):
if p not in self.pollutants:
self.pollutants[p] = []
self.pollutants[p].append(calc_emission)
def add_distances(self, distances):
self.distances.append(distances)
def total_emission(self, pollutant):
total = sum(self.pollutants[pollutant])
return total
def __repl__(self):
fmt = "Route(distance={}, minutes={})"
return fmt.format(self.distance, self.minutes)
def __str__(self):
return self.__repl__()
def __eq__(self, other):
return self.minutes == other.minutes
def __lt__(self, other):
return self.minutes < other.minutes
class RouteSet:
"""A collection of Route objects"""
def __init__(self, routes=None):
if routes is None:
self._lst = []
else:
self._lst = routes
def __getitem__(self, item):
return self._lst[item]
def __iter__(self):
return iter(self._lst)
def __len__(self):
return len(self._lst)
def add(self, route):
self._lst.append(route)
def __repl__(self):
return "RouteSet({})".format("\n".join([str(r) for r in self._lst]))
def __str__(self):
return self.__repl__()
def sort(self, key=None, reverse=False):
self._lst.sort(key=key, reverse=reverse)
def __hash__(self):
return hash(self._lst)
class Planner:
"""This class takes a start, stop and vehicle input to give the user
a set of possible road routes sorted after the least pollution. Also
more metadata about each route is provided.
"""
def __init__(self, start, stop, vehicle):
self._start = start
self._stop = stop
if not isinstance(vehicle, vehicles.Vehicle):
raise ValueError("Vehicle is not of correct type. Check vehicle implementations.")
self._vehicle = vehicle
# self._emissionJson = EmissionsJsonParser(vehicle)
# self._emissionJson._init_values_from_input_file()
self._emissionDb = None # EmissionsJsonParser(self._vehicle)
self.routes = RouteSet()
self._pollutants = {}
@property
def pollutants(self):
return self._pollutants
def add_pollutant(self, pollutant_type):
# validate input
if pollutant_type not in PollutantTypes.__dict__.values():
raise ValueError("pollutant_type needs to be one of the types defined in planner.PollutantTypes")
if pollutant_type not in self._pollutants:
self._pollutants[pollutant_type] = None
else:
log.debug("warning: pollutant already added..")
log.debug("self._pollutants = {}".format(self._pollutants))
@property
def coordinates(self):
return "{start[0]},{start[1]};{end[0]},{end[1]}".format(
start=self._start, end=self._stop)
@staticmethod
def build_url(vehicle, coordinates, format="json", geometryformat="isoz"):
"""Construct a well formed url for the routing service which
NPRA is using.
"""
load = vehicle.load if vehicle.load > -1.0 else 0
params = {
"format": format,
"height": vehicle.height,
"length": vehicle.length,
"stops": coordinates,
"load": load,
"geometryformat": geometryformat,
"lang": "nb-no",
}
return '?'.join([ROUTE_URL_BASE, urlencode(params)])
def _get_routes(self):
socket.setdefaulttimeout(30)
try:
url = Planner.build_url(self._vehicle, self.coordinates)
log.debug("Calling: {}".format(url))
log.debug("coordinates: {}".format(self.coordinates))
req = urllib.request.Request(url)
response = urllib.request.urlopen(req)
data = response.read()
self._json_data = json.loads(data.decode("utf-8"))
if 'messages' in self._json_data:
raise RouteError("Missing 'messages' in returned JSON data.")
except IOError as err:
log.debug("ioerror: {}".format(err))
self._json_data = {}
raise RouteError("IOError: {}".format(err))
except ValueError:
log.warning("Bad data from remote routing service: \n{}".format(data))
self._json_data = {}
raise RouteError("Bad data from remote routing service: \n{}".format(data))
@staticmethod
def _get_distance_2d(point1, point2):
distance = math.sqrt((point1[0] - point2[0])**2 + (point1[1] - point2[1])**2)
return distance
@staticmethod
def _get_distance_3d(point1, point2):
distance = math.sqrt((point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2 + (point2[2] - point1[2]) ** 2)
return distance
@staticmethod
def _get_slope(point1, point2):
distance = Planner._get_distance_3d(point1, point2)
slope = 0.0
if distance:
slope = math.degrees(math.asin((float(point2[2]) - float(point1[2])) / distance))
return slope
def _get_pollutants_for_vehicle(self):
"""Retrieve pollutions parameters for the vehicle provided
to the planner. Only include the pollutants provided in
'self._pollutants'
"""
from . import session
category = models.Category.get_for_type(self._vehicle)
if not category:
raise ValueError("Unable to find Category for vehicle: {}".format(category))
fuel = session.query(models.Fuel).filter_by(name=self._vehicle.fuel_type).first()
if not fuel:
raise ValueError("Unable to find Fuel in database: name={}".format(self._vehicle.fuel_type))
segment = session.query(models.Segment).filter_by(name=self._vehicle.segment).first()
if not segment:
raise ValueError("Unable to find segment in database: name={}".format(str(self._vehicle.segment)))
filter_parms = {
"category": category,
"fuel": fuel,
"segment": segment
}
euro_std = session.query(models.EuroStd).filter_by(name=self._vehicle.euro_std).first()
if euro_std:
filter_parms.update({"eurostd": euro_std})
mode = session.query(models.Mode).filter_by(name=self._vehicle.mode).first()
if mode:
filter_parms.update({"mode": mode})
if self._vehicle.load > -1.0:
filter_parms.update({"load": self._vehicle.load})
# Get Parameters based on the other items found above
params = session.query(models.Parameter).filter_by(**filter_parms)
return params.all()
def get_emission(self, parameters, slope=None):
pollutant = None
if len(parameters) > 1:
# We have many parameters instances for a single pollutant.
# This means that we have multiple 'slopes' in our table.
# Need therefore to find slope or extrapolate/interpolate the value.
positive_slopes = [0, 0.02, 0.04, 0.06]
negative_slopes = [-0.06, -0.04, -0.02, 0]
x = [x for x in parameters if x.slope == slope]
if any(x):
pollutant = x[0]
else:
slopes_for_pollutant = []
if slope > 0.0:
tmp_pollutants = [x for x in parameters if x.slope in positive_slopes]
slopes_for_pollutant = map(Planner.calculate, tmp_pollutants)
extrapolate = Extrapolate(positive_slopes, slopes_for_pollutant)
tmp = extrapolate[slope]
log.debug("Extrapolated value: {}".format(tmp))
return tmp
else:
tmp_pollutants = [x for x in parameters if x.slope in negative_slopes]
slopes_for_pollutant = map(Planner.calculate, tmp_pollutants)
interpolate = Interpolate(negative_slopes, slopes_for_pollutant)
tmp = interpolate[slope]
log.debug("Interpolated value: {}".format(tmp))
return tmp
else:
pollutant = parameters[0]
tmp = Planner.calculate(pollutant)
log.debug("tmp: {}".format(tmp))
return tmp
@staticmethod
def calculate(parameter):
"""Equation copied from the EU spreadsheet
"""
alpha = parameter.ALPHA
beta = parameter.BETA
delta = parameter.DELTA
epsilon = parameter.EPSILON
gamma = parameter.GAMMA
hta = parameter.HTA
reduct_fact = parameter.REDUCTIONFACTOR
speed = parameter.SPEED
v_max = parameter.MAXSPEED
v_min = parameter.MINSPEED
zita = parameter.ZITA
""" ((alpha*speed^2) + (beta*speed) + gamma + (delta/speed))/((epsilon*speed^2) * (zita * speed + htz))"""
try:
result = (alpha * math.pow(speed, 2)) + (beta * speed) + gamma + (delta / speed)
result /= (epsilon * math.pow(speed, 2)) + ((zita * speed) + hta)
result *= (1 - reduct_fact)
except ZeroDivisionError:
result = 0.0
return result
def _calculate_emissions(self):
"""Calculate total emission from a route of x,y,z points based on a path between
two points (A -> B). https://www.vegvesen.no/vegkart/vegkart/.
For a simple static emission calculation play with:
- self._get_pollutants_for_vehicle()
- Planner.calculate(parameter)
"""
parameters = self._get_pollutants_for_vehicle()
self.routes = RouteSet()
if "routes" not in self._json_data:
log.debug("Error in returned JSON data from web service.")
log.debug("data: {}".format(self._json_data))
return
# Create a "set" of Routes. The planner web service will
# return 2-4 routes with different paths.
for idx, r in enumerate(self._json_data["routes"]["features"]):
attributes = r.get("attributes")
route = Route(distance=attributes.get("Total_Meters"),
minutes=attributes.get("Total_Minutes"),
path=r.get("geometry").get("paths")[0], id = idx)
self.routes.add(route)
log.debug("Nr of routes: {}".format(len(self.routes)))
for i, route in enumerate(self.routes):
# A list of x,y,z points that all together represents the route
path_coordinates = route.path
distances = []
# Nifty little trick to loop over 'path_coordinates',
# but keep a reference to the 'prev' item to calculate the
# distance between them
iter_points = iter(path_coordinates)
prev = next(iter_points)
for point in path_coordinates:
if not distances:
# first point
distances.append(Planner._get_distance_3d(prev, point) / 1000)
else:
distances.append(distances[-1] + Planner._get_distance_3d(prev, point) / 1000)
point_slope = Planner._get_slope(prev, point)
# Calculate emission for each pollutants the user has asked for
for p in self._pollutants:
parms = [x for x in parameters if x.pollutant.name.startswith(p)]
calc_emission = self.get_emission(parms, point_slope)
route.add_pollutant(p, calc_emission)
prev = point
route.add_distances(distances)
def run(self):
"""
Use the input data and send a HTTP request to route planner.
Construct a 'Routes' object containing all the possible 'Route' objects.
Also compute the pollution factor for each route based on the 'Route' data and
the vehicle choosen.
"""
self._get_routes()
self._calculate_emissions()
| bsd-2-clause |
tafia/servo | python/mozlog/mozlog/structured/scripts/format.py | 46 | 1310 | import argparse
import sys
from .. import handlers, commandline, reader
def get_parser(add_help=True):
parser = argparse.ArgumentParser("format",
description="Format a structured log stream", add_help=add_help)
parser.add_argument("--input", action="store", default=None,
help="Filename to read from, defaults to stdin")
parser.add_argument("--output", action="store", default=None,
help="Filename to write to, defaults to stdout")
parser.add_argument("format", choices=commandline.log_formatters.keys(),
help="Format to use")
return parser
def main(**kwargs):
if kwargs["input"] is None:
input_file = sys.stdin
else:
input_file = open(kwargs["input"])
if kwargs["output"] is None:
output_file = sys.stdout
else:
output_file = open(kwargs["output"], "w")
formatter = commandline.log_formatters[kwargs["format"]][0]()
handler = handlers.StreamHandler(stream=output_file,
formatter=formatter)
for data in reader.read(input_file):
handler(data)
if __name__ == "__main__":
parser = get_parser()
args = parser.parse_args()
kwargs = vars(args)
main(**kwargs)
| mpl-2.0 |
trunet/txXBee | setup.py | 2 | 1115 | #!/usr/bin/env python
"""
txXBee installation script
"""
from setuptools import setup
import os
import sys
import subprocess
import txXBee
setup(
name = "txXBee",
version = txXBee.__version__,
author = "Wagner Sartori Junior",
author_email = "[email protected]",
url = "http://github.com/trunet/txxbee",
description = "XBee Protocol for Twisted",
scripts = [],
license="COPYING",
packages = ["txXBee"],
install_requires = ['XBee'],
long_description = """XBee os an easy-to-implement embedded short- and long-range wireless modules leveraging industry standard and cutting-edge designs for global flexibility from http://www.digi.com/.
This library implements txXBee for the Twisted Python framework.
""",
classifiers = [
"Framework :: Twisted",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Topic :: Communications",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities"
]
)
| gpl-3.0 |
farhi-naz/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/style/checkers/xml.py | 187 | 2044 | # Copyright (C) 2010 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Checks WebKit style for XML files."""
from __future__ import absolute_import
from xml.parsers import expat
class XMLChecker(object):
"""Processes XML lines for checking style."""
def __init__(self, file_path, handle_style_error):
self._handle_style_error = handle_style_error
self._handle_style_error.turn_off_line_filtering()
def check(self, lines):
parser = expat.ParserCreate()
try:
for line in lines:
parser.Parse(line)
parser.Parse('\n')
parser.Parse('', True)
except expat.ExpatError, error:
self._handle_style_error(error.lineno, 'xml/syntax', 5, expat.ErrorString(error.code))
| bsd-3-clause |
wavemind/gcb17ml | modules/announcements/announcements.py | 5 | 14259 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes and methods to create and manage Announcements."""
__author__ = 'Saifu Angto ([email protected])'
import datetime
import urllib
from common import tags
from common import utils
from common.schema_fields import FieldArray
from common.schema_fields import FieldRegistry
from common.schema_fields import SchemaField
from controllers.utils import BaseHandler
from controllers.utils import BaseRESTHandler
from controllers.utils import ReflectiveRequestHandler
from controllers.utils import XsrfTokenManager
from models import custom_modules
from models import entities
from models import models
from models import notify
from models import roles
from models import transforms
from models.models import MemcacheManager
from models.models import Student
import modules.announcements.samples as samples
from modules.dashboard.label_editor import LabelGroupsHelper
from modules.oeditor import oeditor
from google.appengine.ext import db
class AnnouncementsRights(object):
"""Manages view/edit rights for announcements."""
@classmethod
def can_view(cls, unused_handler):
return True
@classmethod
def can_edit(cls, handler):
return roles.Roles.is_course_admin(handler.app_context)
@classmethod
def can_delete(cls, handler):
return cls.can_edit(handler)
@classmethod
def can_add(cls, handler):
return cls.can_edit(handler)
@classmethod
def apply_rights(cls, handler, items):
"""Filter out items that current user can't see."""
if AnnouncementsRights.can_edit(handler):
return items
allowed = []
for item in items:
if not item.is_draft:
allowed.append(item)
return allowed
class AnnouncementsHandler(BaseHandler, ReflectiveRequestHandler):
"""Handler for announcements."""
default_action = 'list'
get_actions = [default_action, 'edit']
post_actions = ['add', 'delete']
@classmethod
def get_child_routes(cls):
"""Add child handlers for REST."""
return [('/rest/announcements/item', AnnouncementsItemRESTHandler)]
def get_action_url(self, action, key=None):
args = {'action': action}
if key:
args['key'] = key
return self.canonicalize_url(
'/announcements?%s' % urllib.urlencode(args))
def format_items_for_template(self, items):
"""Formats a list of entities into template values."""
template_items = []
for item in items:
item = transforms.entity_to_dict(item)
# add 'edit' actions
if AnnouncementsRights.can_edit(self):
item['edit_action'] = self.get_action_url(
'edit', key=item['key'])
item['delete_xsrf_token'] = self.create_xsrf_token('delete')
item['delete_action'] = self.get_action_url(
'delete', key=item['key'])
template_items.append(item)
output = {}
output['children'] = template_items
# add 'add' action
if AnnouncementsRights.can_edit(self):
output['add_xsrf_token'] = self.create_xsrf_token('add')
output['add_action'] = self.get_action_url('add')
return output
def put_sample_announcements(self):
"""Loads sample data into a database."""
items = []
for item in samples.SAMPLE_ANNOUNCEMENTS:
entity = AnnouncementEntity()
transforms.dict_to_entity(entity, item)
entity.put()
items.append(entity)
return items
def _render(self):
self.template_value['navbar'] = {'announcements': True}
self.render('announcements.html')
def get_list(self):
"""Shows a list of announcements."""
student = None
user = self.personalize_page_and_get_user()
transient_student = False
if user is None:
transient_student = True
else:
student = Student.get_enrolled_student_by_email(user.email())
if not student:
transient_student = True
self.template_value['transient_student'] = transient_student
items = AnnouncementEntity.get_announcements()
if not items and AnnouncementsRights.can_edit(self):
items = self.put_sample_announcements()
items = AnnouncementsRights.apply_rights(self, items)
if not roles.Roles.is_course_admin(self.get_course().app_context):
items = models.LabelDAO.apply_course_track_labels_to_student_labels(
self.get_course(), student, items)
self.template_value['announcements'] = self.format_items_for_template(
items)
self._render()
def get_edit(self):
"""Shows an editor for an announcement."""
user = self.personalize_page_and_get_user()
if not user or not AnnouncementsRights.can_edit(self):
self.error(401)
return
key = self.request.get('key')
schema = AnnouncementsItemRESTHandler.SCHEMA(
'Announcement',
self.get_course().get_course_announcement_list_email())
exit_url = self.canonicalize_url(
'/announcements#%s' % urllib.quote(key, safe=''))
rest_url = self.canonicalize_url('/rest/announcements/item')
form_html = oeditor.ObjectEditor.get_html_for(
self,
schema.get_json_schema(),
schema.get_schema_dict(),
key, rest_url, exit_url,
required_modules=AnnouncementsItemRESTHandler.REQUIRED_MODULES)
self.template_value['content'] = form_html
self._render()
def post_delete(self):
"""Deletes an announcement."""
if not AnnouncementsRights.can_delete(self):
self.error(401)
return
key = self.request.get('key')
entity = AnnouncementEntity.get(key)
if entity:
entity.delete()
self.redirect('/announcements')
def post_add(self):
"""Adds a new announcement and redirects to an editor for it."""
if not AnnouncementsRights.can_add(self):
self.error(401)
return
entity = AnnouncementEntity()
entity.title = 'Sample Announcement'
entity.date = datetime.datetime.now().date()
entity.html = 'Here is my announcement!'
entity.is_draft = True
entity.put()
self.redirect(self.get_action_url('edit', key=entity.key()))
DRAFT_TEXT = 'Private'
PUBLISHED_TEXT = 'Public'
class AnnouncementsItemRESTHandler(BaseRESTHandler):
"""Provides REST API for an announcement."""
REQUIRED_MODULES = [
'inputex-date', 'gcb-rte', 'inputex-select', 'inputex-string',
'inputex-uneditable', 'inputex-checkbox', 'inputex-list',
'inputex-hidden']
@classmethod
def SCHEMA(cls, title, announcement_email):
schema = FieldRegistry(title)
schema.add_property(SchemaField(
'key', 'ID', 'string', editable=False,
extra_schema_dict_values={'className': 'inputEx-Field keyHolder'}))
schema.add_property(SchemaField(
'title', 'Title', 'string', optional=True))
schema.add_property(SchemaField(
'html', 'Body', 'html', optional=True,
extra_schema_dict_values={
'supportCustomTags': tags.CAN_USE_DYNAMIC_TAGS.value,
'excludedCustomTags': tags.EditorBlacklists.COURSE_SCOPE}))
schema.add_property(SchemaField(
'date', 'Date', 'date',
optional=True, extra_schema_dict_values={
'_type': 'date', 'dateFormat': 'Y-m-d',
'valueFormat': 'Y-m-d'}))
schema.add_property(FieldArray(
'label_groups', 'Labels',
item_type=LabelGroupsHelper.make_labels_group_schema_field(),
extra_schema_dict_values={
'className': 'inputEx-Field label-group-list'}))
schema.add_property(SchemaField(
'send_email', 'Send Email', 'boolean', optional=True,
extra_schema_dict_values={
'description':
AnnouncementsItemRESTHandler.get_send_email_description(
announcement_email)}))
schema.add_property(SchemaField(
'is_draft', 'Status', 'boolean',
select_data=[(True, DRAFT_TEXT), (False, PUBLISHED_TEXT)],
extra_schema_dict_values={'className': 'split-from-main-group'}))
return schema
@classmethod
def get_send_email_description(cls, announcement_email):
"""Get the description for Send Email field."""
if announcement_email:
return 'Email will be sent to : ' + announcement_email
return 'Announcement list not configured.'
def get(self):
"""Handles REST GET verb and returns an object as JSON payload."""
key = self.request.get('key')
try:
entity = AnnouncementEntity.get(key)
except db.BadKeyError:
entity = None
if not entity:
transforms.send_json_response(
self, 404, 'Object not found.', {'key': key})
return
viewable = AnnouncementsRights.apply_rights(self, [entity])
if not viewable:
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
entity = viewable[0]
schema = AnnouncementsItemRESTHandler.SCHEMA(
'Announcement',
self.get_course().get_course_announcement_list_email())
entity_dict = transforms.entity_to_dict(entity)
entity_dict['label_groups'] = (
LabelGroupsHelper.announcement_labels_to_dict(entity))
json_payload = transforms.dict_to_json(
entity_dict, schema.get_json_schema_dict())
transforms.send_json_response(
self, 200, 'Success.',
payload_dict=json_payload,
xsrf_token=XsrfTokenManager.create_xsrf_token(
'announcement-put'))
def put(self):
"""Handles REST PUT verb with JSON payload."""
request = transforms.loads(self.request.get('request'))
key = request.get('key')
if not self.assert_xsrf_token_or_fail(
request, 'announcement-put', {'key': key}):
return
if not AnnouncementsRights.can_edit(self):
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
entity = AnnouncementEntity.get(key)
if not entity:
transforms.send_json_response(
self, 404, 'Object not found.', {'key': key})
return
schema = AnnouncementsItemRESTHandler.SCHEMA(
'Announcement',
self.get_course().get_course_announcement_list_email())
payload = request.get('payload')
update_dict = transforms.json_to_dict(
transforms.loads(payload), schema.get_json_schema_dict())
entity.labels = utils.list_to_text(
LabelGroupsHelper.decode_labels_group(
update_dict.get('label_groups')))
transforms.dict_to_entity(entity, update_dict)
entity.put()
email_sent = False
if entity.send_email:
email_manager = notify.EmailManager(self.get_course())
email_sent = email_manager.send_announcement(
entity.title, entity.html)
if entity.send_email and not email_sent:
if not self.get_course().get_course_announcement_list_email():
message = 'Saved. Announcement list not configured.'
else:
message = 'Saved, but there was an error sending email.'
else:
message = 'Saved.'
transforms.send_json_response(self, 200, message)
class AnnouncementEntity(entities.BaseEntity):
"""A class that represents a persistent database entity of announcement."""
title = db.StringProperty(indexed=False)
date = db.DateProperty()
html = db.TextProperty(indexed=False)
labels = db.StringProperty(indexed=False)
is_draft = db.BooleanProperty()
send_email = db.BooleanProperty()
memcache_key = 'announcements'
@classmethod
def get_announcements(cls, allow_cached=True):
items = MemcacheManager.get(cls.memcache_key)
if not allow_cached or items is None:
items = AnnouncementEntity.all().order('-date').fetch(1000)
# TODO(psimakov): prepare to exceed 1MB max item size
# read more here: http://stackoverflow.com
# /questions/5081502/memcache-1-mb-limit-in-google-app-engine
MemcacheManager.set(cls.memcache_key, items)
return items
def put(self):
"""Do the normal put() and also invalidate memcache."""
result = super(AnnouncementEntity, self).put()
MemcacheManager.delete(self.memcache_key)
return result
def delete(self):
"""Do the normal delete() and invalidate memcache."""
super(AnnouncementEntity, self).delete()
MemcacheManager.delete(self.memcache_key)
custom_module = None
def register_module():
"""Registers this module in the registry."""
announcement_handlers = [('/announcements', AnnouncementsHandler)]
global custom_module
custom_module = custom_modules.Module(
'Course Announcements',
'A set of pages for managing course announcements.',
[], announcement_handlers)
return custom_module
| apache-2.0 |
nvoron23/hue | desktop/core/ext-py/Paste-1.7.2/tests/test_httpheaders.py | 9 | 6173 | from paste.httpheaders import *
import time
def _test_generic(collection):
assert 'bing' == VIA(collection)
REFERER.update(collection,'internal:/some/path')
assert 'internal:/some/path' == REFERER(collection)
CACHE_CONTROL.update(collection,max_age=1234)
CONTENT_DISPOSITION.update(collection,filename="bingles.txt")
PRAGMA.update(collection,"test","multi",'valued="items"')
assert 'public, max-age=1234' == CACHE_CONTROL(collection)
assert 'attachment; filename="bingles.txt"' == \
CONTENT_DISPOSITION(collection)
assert 'test, multi, valued="items"' == PRAGMA(collection)
VIA.delete(collection)
def test_environ():
collection = {'HTTP_VIA':'bing', 'wsgi.version': '1.0' }
_test_generic(collection)
assert collection == {'wsgi.version': '1.0',
'HTTP_PRAGMA': 'test, multi, valued="items"',
'HTTP_REFERER': 'internal:/some/path',
'HTTP_CONTENT_DISPOSITION': 'attachment; filename="bingles.txt"',
'HTTP_CACHE_CONTROL': 'public, max-age=1234'
}
def test_environ_cgi():
environ = {'CONTENT_TYPE': 'text/plain', 'wsgi.version': '1.0',
'HTTP_CONTENT_TYPE': 'ignored/invalid',
'CONTENT_LENGTH': '200'}
assert 'text/plain' == CONTENT_TYPE(environ)
assert '200' == CONTENT_LENGTH(environ)
CONTENT_TYPE.update(environ,'new/type')
assert 'new/type' == CONTENT_TYPE(environ)
CONTENT_TYPE.delete(environ)
assert '' == CONTENT_TYPE(environ)
assert 'ignored/invalid' == environ['HTTP_CONTENT_TYPE']
def test_response_headers():
collection = [('via', 'bing')]
_test_generic(collection)
normalize_headers(collection)
assert collection == [
('Cache-Control', 'public, max-age=1234'),
('Pragma', 'test, multi, valued="items"'),
('Referer', 'internal:/some/path'),
('Content-Disposition', 'attachment; filename="bingles.txt"')
]
def test_cache_control():
assert 'public' == CACHE_CONTROL()
assert 'public' == CACHE_CONTROL(public=True)
assert 'private' == CACHE_CONTROL(private=True)
assert 'no-cache' == CACHE_CONTROL(no_cache=True)
assert 'private, no-store' == CACHE_CONTROL(private=True, no_store=True)
assert 'public, max-age=60' == CACHE_CONTROL(max_age=60)
assert 'public, max-age=86400' == \
CACHE_CONTROL(max_age=CACHE_CONTROL.ONE_DAY)
CACHE_CONTROL.extensions['community'] = str
assert 'public, community="bingles"' == \
CACHE_CONTROL(community="bingles")
headers = []
CACHE_CONTROL.apply(headers,max_age=60)
assert 'public, max-age=60' == CACHE_CONTROL(headers)
assert EXPIRES.parse(headers) > time.time()
assert EXPIRES.parse(headers) < time.time() + 60
def test_content_disposition():
assert 'attachment' == CONTENT_DISPOSITION()
assert 'attachment' == CONTENT_DISPOSITION(attachment=True)
assert 'inline' == CONTENT_DISPOSITION(inline=True)
assert 'inline; filename="test.txt"' == \
CONTENT_DISPOSITION(inline=True, filename="test.txt")
assert 'attachment; filename="test.txt"' == \
CONTENT_DISPOSITION(filename="/some/path/test.txt")
headers = []
CONTENT_DISPOSITION.apply(headers,filename="test.txt")
assert 'text/plain' == CONTENT_TYPE(headers)
CONTENT_DISPOSITION.apply(headers,filename="test")
assert 'text/plain' == CONTENT_TYPE(headers)
CONTENT_DISPOSITION.apply(headers,filename="test.html")
assert 'text/plain' == CONTENT_TYPE(headers)
headers = [('Content-Type', 'application/octet-stream')]
CONTENT_DISPOSITION.apply(headers,filename="test.txt")
assert 'text/plain' == CONTENT_TYPE(headers)
assert headers == [
('Content-Type', 'text/plain'),
('Content-Disposition', 'attachment; filename="test.txt"')
]
def test_range():
assert ('bytes',[(0,300)]) == RANGE.parse("bytes=0-300")
assert ('bytes',[(0,300)]) == RANGE.parse("bytes = -300")
assert ('bytes',[(0,None)]) == RANGE.parse("bytes= -")
assert ('bytes',[(0,None)]) == RANGE.parse("bytes=0 - ")
assert ('bytes',[(300,None)]) == RANGE.parse(" BYTES=300-")
assert ('bytes',[(4,5),(6,7)]) == RANGE.parse(" Bytes = 4 - 5,6 - 07 ")
assert ('bytes',[(0,5),(7,None)]) == RANGE.parse(" bytes=-5,7-")
assert ('bytes',[(0,5),(7,None)]) == RANGE.parse(" bytes=-5,7-")
assert ('bytes',[(0,5),(7,None)]) == RANGE.parse(" bytes=-5,7-")
assert None == RANGE.parse("")
assert None == RANGE.parse("bytes=0,300")
assert None == RANGE.parse("bytes=-7,5-")
def test_copy():
environ = {'HTTP_VIA':'bing', 'wsgi.version': '1.0' }
response_headers = []
VIA.update(response_headers,environ)
assert response_headers == [('Via', 'bing')]
def test_sorting():
# verify the HTTP_HEADERS are set with their canonical form
sample = [WWW_AUTHENTICATE, VIA, ACCEPT, DATE,
ACCEPT_CHARSET, AGE, ALLOW, CACHE_CONTROL,
CONTENT_ENCODING, ETAG, CONTENT_TYPE, FROM,
EXPIRES, RANGE, UPGRADE, VARY, ALLOW]
sample.sort()
sample = [str(x) for x in sample]
assert sample == [
# general headers first
'Cache-Control', 'Date', 'Upgrade', 'Via',
# request headers next
'Accept', 'Accept-Charset', 'From', 'Range',
# response headers following
'Age', 'ETag', 'Vary', 'WWW-Authenticate',
# entity headers (/w expected duplicate)
'Allow', 'Allow', 'Content-Encoding', 'Content-Type', 'Expires'
]
def test_normalize():
response_headers = [
('www-authenticate','Response AuthMessage'),
('unknown-header','Unknown Sorted Last'),
('Via','General Bingles'),
('aLLoW','Entity Allow Something'),
('ETAG','Response 34234'),
('expires','Entity An-Expiration-Date'),
('date','General A-Date')]
normalize_headers(response_headers, strict=False)
assert response_headers == [
('Date', 'General A-Date'),
('Via', 'General Bingles'),
('ETag', 'Response 34234'),
('WWW-Authenticate', 'Response AuthMessage'),
('Allow', 'Entity Allow Something'),
('Expires', 'Entity An-Expiration-Date'),
('Unknown-Header', 'Unknown Sorted Last')]
| apache-2.0 |
optima-ict/odoo | addons/report_webkit/convert.py | 47 | 1389 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (c) 2010 Camptocamp SA (http://www.camptocamp.com)
# Author : Nicolas Bessi (Camptocamp)
from openerp.tools import convert
original_xml_import = convert.xml_import
class WebkitXMLImport(original_xml_import):
# Override of xml import in order to add webkit_header tag in report tag.
# As discussed with the R&D Team, the current XML processing API does
# not offer enough flexibity to do it in a cleaner way.
# The solution is not meant to be long term solution, but at least
# allows chaining of several overrides of the _tag_report method,
# and does not require a copy/paste of the original code.
def _tag_report(self, cr, rec, data_node=None, mode=None):
report_id = super(WebkitXMLImport, self)._tag_report(cr, rec, data_node)
if rec.get('report_type') == 'webkit':
header = rec.get('webkit_header')
if header:
if header in ('False', '0', 'None'):
webkit_header_id = False
else:
webkit_header_id = self.id_get(cr, header)
self.pool.get('ir.actions.report.xml').write(cr, self.uid,
report_id, {'webkit_header': webkit_header_id})
return report_id
convert.xml_import = WebkitXMLImport
| agpl-3.0 |
Nick-Hall/gramps | gramps/gen/plug/docgen/paperstyle.py | 4 | 6305 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
# Copyright (C) 2002 Gary Shao
# Copyright (C) 2007 Brian G. Matherly
# Copyright (C) 2009 Benny Malengier
# Copyright (C) 2009 Gary Burton
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# standard python modules
#
#-------------------------------------------------------------------------
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ...const import GRAMPS_LOCALE as glocale
_ = glocale.translation.sgettext
#-------------------------------------------------------------------------
#
# set up logging
#
#-------------------------------------------------------------------------
import logging
log = logging.getLogger(".paperstyle")
#-------------------------------------------------------------------------
#
# Page orientation
#
#-------------------------------------------------------------------------
PAPER_PORTRAIT = 0
PAPER_LANDSCAPE = 1
#------------------------------------------------------------------------
#
# PaperSize
#
#------------------------------------------------------------------------
class PaperSize:
"""
Defines the dimensions of a sheet of paper. All dimensions are in
centimeters.
"""
def __init__(self, name, height, width):
"""
Create a new paper style with.
:param name: name of the new style
:param height: page height in centimeters
:param width: page width in centimeters
"""
self.name = name
self.height = height
self.width = width
if self.name == 'Letter':
self.trans_pname = _('Letter', 'paper size')
elif self.name == 'Legal':
self.trans_pname = _('Legal', 'paper size')
elif self.name == 'Custom Size':
self.trans_pname = _('Custom Size')
else:
self.trans_pname = None
def get_name(self):
"Return the name of the paper style"
return self.name
def get_height(self):
"Return the page height in cm"
return self.height
def set_height(self, height):
"Set the page height in cm"
self.height = height
def get_width(self):
"Return the page width in cm"
return self.width
def set_width(self, width):
"Set the page width in cm"
self.width = width
def get_height_inches(self):
"Return the page height in inches"
return self.height / 2.54
def get_width_inches(self):
"Return the page width in inches"
return self.width / 2.54
#------------------------------------------------------------------------
#
# PaperStyle
#
#------------------------------------------------------------------------
class PaperStyle:
"""
Define the various options for a sheet of paper.
"""
def __init__(self, size, orientation,
lmargin=2.54, rmargin=2.54, tmargin=2.54, bmargin=2.54):
"""
Create a new paper style.
:param size: size of the new style
:type size: :class:`.PaperSize`
:param orientation: page orientation
:type orientation: PAPER_PORTRAIT or PAPER_LANDSCAPE
"""
self.__orientation = orientation
if orientation == PAPER_PORTRAIT:
self.__size = PaperSize(size.get_name(),
size.get_height(),
size.get_width())
else:
self.__size = PaperSize(size.get_name(),
size.get_width(),
size.get_height())
self.__lmargin = lmargin
self.__rmargin = rmargin
self.__tmargin = tmargin
self.__bmargin = bmargin
def get_size(self):
"""
Return the size of the paper.
:returns: object indicating the paper size
:rtype: :class:`.PaperSize`
"""
return self.__size
def get_orientation(self):
"""
Return the orientation of the page.
:returns: PAPER_PORTRIAT or PAPER_LANDSCAPE
:rtype: int
"""
return self.__orientation
def get_usable_width(self):
"""
Return the width of the page area in centimeters.
The value is the page width less the margins.
"""
return self.__size.get_width() - (self.__rmargin + self.__lmargin)
def get_usable_height(self):
"""
Return the height of the page area in centimeters.
The value is the page height less the margins.
"""
return self.__size.get_height() - (self.__tmargin + self.__bmargin)
def get_right_margin(self):
"""
Return the right margin.
:returns: Right margin in centimeters
:rtype: float
"""
return self.__rmargin
def get_left_margin(self):
"""
Return the left margin.
:returns: Left margin in centimeters
:rtype: float
"""
return self.__lmargin
def get_top_margin(self):
"""
Return the top margin.
:returns: Top margin in centimeters
:rtype: float
"""
return self.__tmargin
def get_bottom_margin(self):
"""
Return the bottom margin.
:returns: Bottom margin in centimeters
:rtype: float
"""
return self.__bmargin
| gpl-2.0 |
kordano/samba-ldb-mdb | buildtools/wafadmin/3rdparty/batched_cc.py | 16 | 4654 | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"""
Batched builds - compile faster
instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
cc -c ../file1.c ../file2.c ../file3.c
Files are output on the directory where the compiler is called, and dependencies are more difficult
to track (do not run the command on all source files if only one file changes)
As such, we do as if the files were compiled one by one, but no command is actually run:
replace each cc/cpp Task by a TaskSlave
A new task called TaskMaster collects the signatures from each slave and finds out the command-line
to run.
To set this up, the method ccroot::create_task is replaced by a new version, to enable batched builds
it is only necessary to import this module in the configuration (no other change required)
"""
MAX_BATCH = 50
MAXPARALLEL = False
EXT_C = ['.c', '.cc', '.cpp', '.cxx']
import os, threading
import TaskGen, Task, ccroot, Build, Logs
from TaskGen import extension, feature, before
from Constants import *
cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} -c ${SRCLST}'
cc_fun = Task.compile_fun_noshell('batched_cc', cc_str)[0]
cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} -c ${SRCLST}'
cxx_fun = Task.compile_fun_noshell('batched_cxx', cxx_str)[0]
count = 70000
class batch_task(Task.Task):
color = 'RED'
after = 'cc cxx'
before = 'cc_link cxx_link static_link'
def __str__(self):
return '(batch compilation for %d slaves)\n' % len(self.slaves)
def __init__(self, *k, **kw):
Task.Task.__init__(self, *k, **kw)
self.slaves = []
self.inputs = []
self.hasrun = 0
global count
count += 1
self.idx = count
def add_slave(self, slave):
self.slaves.append(slave)
self.set_run_after(slave)
def runnable_status(self):
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
for t in self.slaves:
#if t.executed:
if t.hasrun != SKIPPED:
return RUN_ME
return SKIP_ME
def run(self):
outputs = []
self.outputs = []
srclst = []
slaves = []
for t in self.slaves:
if t.hasrun != SKIPPED:
slaves.append(t)
srclst.append(t.inputs[0].abspath(self.env))
self.env.SRCLST = srclst
self.cwd = slaves[0].inputs[0].parent.abspath(self.env)
env = self.env
app = env.append_unique
cpppath_st = env['CPPPATH_ST']
env._CCINCFLAGS = env.CXXINCFLAGS = []
# local flags come first
# set the user-defined includes paths
for i in env['INC_PATHS']:
app('_CCINCFLAGS', cpppath_st % i.abspath())
app('_CXXINCFLAGS', cpppath_st % i.abspath())
app('_CCINCFLAGS', cpppath_st % i.abspath(env))
app('_CXXINCFLAGS', cpppath_st % i.abspath(env))
# set the library include paths
for i in env['CPPPATH']:
app('_CCINCFLAGS', cpppath_st % i)
app('_CXXINCFLAGS', cpppath_st % i)
if self.slaves[0].__class__.__name__ == 'cc':
ret = cc_fun(self)
else:
ret = cxx_fun(self)
if ret:
return ret
for t in slaves:
t.old_post_run()
from TaskGen import extension, feature, after
import cc, cxx
def wrap(fun):
def foo(self, node):
# we cannot control the extension, this sucks
self.obj_ext = '.o'
task = fun(self, node)
if not getattr(self, 'masters', None):
self.masters = {}
self.allmasters = []
if not node.parent.id in self.masters:
m = self.masters[node.parent.id] = self.master = self.create_task('batch')
self.allmasters.append(m)
else:
m = self.masters[node.parent.id]
if len(m.slaves) > MAX_BATCH:
m = self.masters[node.parent.id] = self.master = self.create_task('batch')
self.allmasters.append(m)
m.add_slave(task)
return task
return foo
c_hook = wrap(cc.c_hook)
extension(cc.EXT_CC)(c_hook)
cxx_hook = wrap(cxx.cxx_hook)
extension(cxx.EXT_CXX)(cxx_hook)
@feature('cprogram', 'cshlib', 'cstaticlib')
@after('apply_link')
def link_after_masters(self):
if getattr(self, 'allmasters', None):
for m in self.allmasters:
self.link_task.set_run_after(m)
for c in ['cc', 'cxx']:
t = Task.TaskBase.classes[c]
def run(self):
pass
def post_run(self):
#self.executed=1
pass
def can_retrieve_cache(self):
if self.old_can_retrieve_cache():
for m in self.generator.allmasters:
try:
m.slaves.remove(self)
except ValueError:
pass #this task wasn't included in that master
return 1
else:
return None
setattr(t, 'oldrun', t.__dict__['run'])
setattr(t, 'run', run)
setattr(t, 'old_post_run', t.post_run)
setattr(t, 'post_run', post_run)
setattr(t, 'old_can_retrieve_cache', t.can_retrieve_cache)
setattr(t, 'can_retrieve_cache', can_retrieve_cache)
| gpl-3.0 |
t4skforce/pyspider | pyspider/database/__init__.py | 3 | 3182 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<[email protected]>
# http://binux.me
# Created on 2014-10-08 15:04:08
import urlparse
def connect_database(url):
"""
create database object by url
mysql:
mysql+type://user:passwd@host:port/database
sqlite:
# relative path
sqlite+type:///path/to/database.db
# absolute path
sqlite+type:////path/to/database.db
# memory database
sqlite+type://
mongodb:
mongodb+type://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]]
type:
taskdb
projectdb
resultdb
"""
parsed = urlparse.urlparse(url)
engine, dbtype = parsed.scheme.split('+')
if engine == 'mysql':
parames = {}
if parsed.username:
parames['user'] = parsed.username
if parsed.password:
parames['passwd'] = parsed.password
if parsed.hostname:
parames['host'] = parsed.hostname
if parsed.port:
parames['port'] = parsed.port
if parsed.path.strip('/'):
parames['database'] = parsed.path.strip('/')
if dbtype == 'taskdb':
from .mysql.taskdb import TaskDB
return TaskDB(**parames)
elif dbtype == 'projectdb':
from .mysql.projectdb import ProjectDB
return ProjectDB(**parames)
elif dbtype == 'resultdb':
from .mysql.resultdb import ResultDB
return ResultDB(**parames)
else:
raise Exception('unknow database type: %s' % dbtype)
elif engine == 'sqlite':
if parsed.path.startswith('//'):
path = '/' + parsed.path.strip('/')
elif parsed.path.startswith('/'):
path = './' + parsed.path.strip('/')
elif not parsed.path:
path = ':memory:'
else:
raise Exception('error path: %s' % parsed.path)
if dbtype == 'taskdb':
from .sqlite.taskdb import TaskDB
return TaskDB(path)
elif dbtype == 'projectdb':
from .sqlite.projectdb import ProjectDB
return ProjectDB(path)
elif dbtype == 'resultdb':
from .sqlite.resultdb import ResultDB
return ResultDB(path)
else:
raise Exception('unknow database type: %s' % dbtype)
elif engine == 'mongodb':
url = url.replace(parsed.scheme, 'mongodb')
parames = {}
if parsed.path.strip('/'):
parames['database'] = parsed.path.strip('/')
if dbtype == 'taskdb':
from .mongodb.taskdb import TaskDB
return TaskDB(url, **parames)
elif dbtype == 'projectdb':
from .mongodb.projectdb import ProjectDB
return ProjectDB(url, **parames)
elif dbtype == 'resultdb':
from .mongodb.resultdb import ResultDB
return ResultDB(url, **parames)
else:
raise Exception('unknow database type: %s' % dbtype)
else:
raise Exception('unknow engine: %s' % engine)
| apache-2.0 |
levigross/pyscanner | mytests/django/contrib/gis/db/backends/spatialite/base.py | 77 | 3463 | from ctypes.util import find_library
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.sqlite3.base import (
_sqlite_extract, _sqlite_date_trunc, _sqlite_regexp, _sqlite_format_dtdelta,
connection_created, Database, DatabaseWrapper as SQLiteDatabaseWrapper,
SQLiteCursorWrapper)
from django.contrib.gis.db.backends.spatialite.client import SpatiaLiteClient
from django.contrib.gis.db.backends.spatialite.creation import SpatiaLiteCreation
from django.contrib.gis.db.backends.spatialite.introspection import SpatiaLiteIntrospection
from django.contrib.gis.db.backends.spatialite.operations import SpatiaLiteOperations
class DatabaseWrapper(SQLiteDatabaseWrapper):
def __init__(self, *args, **kwargs):
# Before we get too far, make sure pysqlite 2.5+ is installed.
if Database.version_info < (2, 5, 0):
raise ImproperlyConfigured('Only versions of pysqlite 2.5+ are '
'compatible with SpatiaLite and GeoDjango.')
# Trying to find the location of the SpatiaLite library.
# Here we are figuring out the path to the SpatiaLite library
# (`libspatialite`). If it's not in the system library path (e.g., it
# cannot be found by `ctypes.util.find_library`), then it may be set
# manually in the settings via the `SPATIALITE_LIBRARY_PATH` setting.
self.spatialite_lib = getattr(settings, 'SPATIALITE_LIBRARY_PATH',
find_library('spatialite'))
if not self.spatialite_lib:
raise ImproperlyConfigured('Unable to locate the SpatiaLite library. '
'Make sure it is in your library path, or set '
'SPATIALITE_LIBRARY_PATH in your settings.'
)
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SpatiaLiteOperations(self)
self.client = SpatiaLiteClient(self)
self.creation = SpatiaLiteCreation(self)
self.introspection = SpatiaLiteIntrospection(self)
def _cursor(self):
if self.connection is None:
self._sqlite_create_connection()
## From here on, customized for GeoDjango ##
# Enabling extension loading on the SQLite connection.
try:
self.connection.enable_load_extension(True)
except AttributeError:
raise ImproperlyConfigured('The pysqlite library does not support C extension loading. '
'Both SQLite and pysqlite must be configured to allow '
'the loading of extensions to use SpatiaLite.'
)
# Loading the SpatiaLite library extension on the connection, and returning
# the created cursor.
cur = self.connection.cursor(factory=SQLiteCursorWrapper)
try:
cur.execute("SELECT load_extension(%s)", (self.spatialite_lib,))
except Exception, msg:
raise ImproperlyConfigured('Unable to load the SpatiaLite library extension '
'"%s" because: %s' % (self.spatialite_lib, msg))
return cur
else:
return self.connection.cursor(factory=SQLiteCursorWrapper)
| mit |
John-Hart/autorest | src/client/Python/msrestazure/test/unittest_exceptions.py | 10 | 7224 | #--------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#--------------------------------------------------------------------------
import json
import unittest
try:
from unittest import mock
except ImportError:
import mock
from requests import Response
from msrest import Deserializer, Configuration
from msrest.exceptions import RequestException
from msrestazure.azure_exceptions import CloudErrorData, CloudError
class TestCloudException(unittest.TestCase):
def setUp(self):
self.cfg = Configuration("https://my_endpoint.com")
self._d = Deserializer()
self._d.dependencies = {'CloudErrorData': CloudErrorData}
return super(TestCloudException, self).setUp()
def test_cloud_exception(self):
message = {
'code': '500',
'message': 'Bad Request',
'values': {'invalid_attribute':'data'}
}
cloud_exp = self._d(CloudErrorData(), message)
self.assertEqual(cloud_exp.message, 'Bad Request')
self.assertEqual(cloud_exp.error, '500')
self.assertEqual(cloud_exp.data['invalid_attribute'], 'data')
message = {
'code': '500',
'message': {'value': 'Bad Request\nRequest:34875\nTime:1999-12-31T23:59:59-23:59'},
'values': {'invalid_attribute':'data'}
}
cloud_exp = self._d(CloudErrorData(), message)
self.assertEqual(cloud_exp.message, 'Bad Request')
self.assertEqual(cloud_exp.error, '500')
self.assertEqual(cloud_exp.data['invalid_attribute'], 'data')
message = {
'code': '500',
'message': {'value': 'Bad Request\nRequest:34875'},
'values': {'invalid_attribute':'data'}
}
cloud_exp = self._d(CloudErrorData(), message)
self.assertEqual(cloud_exp.message, 'Bad Request')
self.assertEqual(cloud_exp.request_id, '34875')
self.assertEqual(cloud_exp.error, '500')
self.assertEqual(cloud_exp.data['invalid_attribute'], 'data')
message = {}
cloud_exp = self._d(CloudErrorData(), message)
self.assertEqual(cloud_exp.message, None)
self.assertEqual(cloud_exp.error, None)
message = ('{\r\n "odata.metadata":"https://account.region.batch.azure.com/$metadata#'
'Microsoft.Azure.Batch.Protocol.Entities.Container.errors/@Element","code":'
'"InvalidHeaderValue","message":{\r\n "lang":"en-US","value":"The value '
'for one of the HTTP headers is not in the correct format.\\nRequestId:5f4c1f05-'
'603a-4495-8e80-01f776310bbd\\nTime:2016-01-04T22:12:33.9245931Z"\r\n },'
'"values":[\r\n {\r\n "key":"HeaderName","value":"Content-Type"\r\n }'
',{\r\n "key":"HeaderValue","value":"application/json; odata=minimalmetadata;'
' charset=utf-8"\r\n }\r\n ]\r\n}')
message = json.loads(message)
cloud_exp = self._d(CloudErrorData(), message)
self.assertEqual(
cloud_exp.message,
"The value for one of the HTTP headers is not in the correct format.")
def test_cloud_error(self):
response = mock.create_autospec(Response)
response.status_code = 400
response.reason = 'BadRequest'
message = {
'code': '500',
'message': {'value': 'Bad Request\nRequest:34875\nTime:1999-12-31T23:59:59-23:59'},
'values': {'invalid_attribute':'data'}
}
response.content = json.dumps(message)
response.json = lambda: json.loads(response.content)
error = CloudError(response)
self.assertEqual(error.message, 'Bad Request')
self.assertEqual(error.status_code, 400)
self.assertIsInstance(error.error, CloudErrorData)
message = { 'error': {
'code': '500',
'message': {'value': 'Bad Request\nRequest:34875\nTime:1999-12-31T23:59:59-23:59'},
'values': {'invalid_attribute':'data'}
}}
response.content = json.dumps(message)
error = CloudError(response)
self.assertEqual(error.message, 'Bad Request')
self.assertEqual(error.status_code, 400)
self.assertIsInstance(error.error, CloudErrorData)
error = CloudError(response, "Request failed with bad status")
self.assertEqual(error.message, "Request failed with bad status")
self.assertEqual(error.status_code, 400)
self.assertIsInstance(error.error, Response)
response.content = "{"
error = CloudError(response)
self.assertEqual(error.message, "Operation failed with status: 400. Details: none")
response.content = json.dumps({'message':'server error'})
error = CloudError(response)
self.assertEqual(error.message, "server error")
self.assertEqual(error.status_code, 400)
response.content = "{"
response.raise_for_status.side_effect = RequestException("FAILED!")
error = CloudError(response)
self.assertEqual(error.message,
"Operation failed with status: 'BadRequest'. Details: FAILED!")
self.assertIsInstance(error.error, RequestException)
response.content = '{\r\n "odata.metadata":"https://account.region.batch.azure.com/$metadata#Microsoft.Azure.Batch.Protocol.Entities.Container.errors/@Element","code":"InvalidHeaderValue","message":{\r\n "lang":"en-US","value":"The value for one of the HTTP headers is not in the correct format.\\nRequestId:5f4c1f05-603a-4495-8e80-01f776310bbd\\nTime:2016-01-04T22:12:33.9245931Z"\r\n },"values":[\r\n {\r\n "key":"HeaderName","value":"Content-Type"\r\n },{\r\n "key":"HeaderValue","value":"application/json; odata=minimalmetadata; charset=utf-8"\r\n }\r\n ]\r\n}'
error = CloudError(response)
self.assertIsInstance(error.error, CloudErrorData)
if __name__ == '__main__':
unittest.main()
| mit |
jbarbuto/raven-python | raven/contrib/zope/__init__.py | 3 | 4165 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
raven.contrib.zope
~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from inspect import getouterframes, currentframe, getinnerframes
from raven.handlers.logging import SentryHandler
from ZConfig.components.logger.factory import Factory
import logging
from raven.utils.stacks import iter_stack_frames
logger = logging.getLogger(__name__)
class ZopeSentryHandlerFactory(Factory):
def getLevel(self):
return self.section.level
def create(self):
return ZopeSentryHandler(**self.section.__dict__)
def __init__(self, section):
Factory.__init__(self)
self.section = section
class ZopeSentryHandler(SentryHandler):
'''
Zope unfortunately eats the stack trace information.
To get the stack trace information and other useful information
from the request object, this class looks into the different stack
frames when the emit method is invoked.
'''
def __init__(self, *args, **kw):
super(ZopeSentryHandler, self).__init__(*args, **kw)
level = kw.get('level', logging.ERROR)
self.setLevel(level)
def can_record(self, record):
return not (
record.name == 'raven' or
record.name.startswith(('sentry.errors', 'raven.'))
)
def emit(self, record):
if record.levelno <= logging.ERROR and self.can_record(record):
request = None
exc_info = None
for frame_info in getouterframes(currentframe()):
frame = frame_info[0]
if not request:
request = frame.f_locals.get('request', None)
if not request:
view = frame.f_locals.get('self', None)
try:
request = getattr(view, 'request', None)
except RuntimeError:
request = None
if not exc_info:
exc_info = frame.f_locals.get('exc_info', None)
if not hasattr(exc_info, '__getitem__'):
exc_info = None
if request and exc_info:
break
if exc_info:
record.exc_info = exc_info
record.stack = \
iter_stack_frames(getinnerframes(exc_info[2]))
if request:
try:
body_pos = request.stdin.tell()
request.stdin.seek(0)
body = request.stdin.read()
request.stdin.seek(body_pos)
http = dict(headers=request.environ,
url=request.getURL(),
method=request.method,
host=request.environ.get('REMOTE_ADDR',
''), data=body)
if 'HTTP_USER_AGENT' in http['headers']:
if 'User-Agent' not in http['headers']:
http['headers']['User-Agent'] = \
http['headers']['HTTP_USER_AGENT']
if 'QUERY_STRING' in http['headers']:
http['query_string'] = http['headers']['QUERY_STRING']
setattr(record, 'request', http)
user = request.get('AUTHENTICATED_USER', None)
if user is not None:
user_dict = dict(id=user.getId(),
is_authenticated=user.has_role('Authenticated'),
email=user.getProperty('email') or '')
else:
user_dict = {'is_authenticated': False}
setattr(record, 'user', user_dict)
except (AttributeError, KeyError):
logger.warning('Could not extract data from request', exc_info=True)
return super(ZopeSentryHandler, self).emit(record)
| bsd-3-clause |
andrewyoung1991/abjad | abjad/tools/scoretools/test/test_scoretools_Note_grace.py | 2 | 2385 | # -*- encoding: utf-8 -*-
from abjad import *
def test_scoretools_Note_grace_01():
r'''Attach one grace note.
'''
note = Note("c'4")
grace_container = scoretools.GraceContainer([Note(2, (1, 16))])
attach(grace_container, note)
assert systemtools.TestManager.compare(
note,
r'''
\grace {
d'16
}
c'4
'''
)
def test_scoretools_Note_grace_02():
r'''Attach several grace notes.
'''
note = Note("c'4")
grace_container = scoretools.GraceContainer([Note(0, (1, 16)), Note(2, (1, 16)), Note(4, (1, 16))])
attach(grace_container, note)
assert systemtools.TestManager.compare(
note,
r'''
\grace {
c'16
d'16
e'16
}
c'4
'''
)
def test_scoretools_Note_grace_03():
r'''Attach one appoggiatura.
'''
note = Note("c'4")
grace_container = scoretools.GraceContainer([Note(2, (1, 16))], kind='appoggiatura')
attach(grace_container, note)
assert systemtools.TestManager.compare(
note,
r'''
\appoggiatura {
d'16
}
c'4
'''
)
def test_scoretools_Note_grace_04():
r'''Attach one acciaccatura.
'''
note = Note("c'4")
grace = scoretools.GraceContainer([Note(2, (1, 16))], kind='acciaccatura')
attach(grace, note)
assert systemtools.TestManager.compare(
note,
r'''
\acciaccatura {
d'16
}
c'4
'''
)
def test_scoretools_Note_grace_05():
r'''Attach one after grace note.
'''
note = Note("c'4")
grace = scoretools.GraceContainer([Note(2, (1, 16))], kind='after')
attach(grace, note)
assert systemtools.TestManager.compare(
note,
r'''
\afterGrace
c'4
{
d'16
}
'''
)
def test_scoretools_Note_grace_06():
r'''Attach several after grace notes.
'''
note = Note("c'4")
grace = scoretools.GraceContainer([Note(0, (1, 16)), Note(2, (1, 16)), Note(4, (1, 16))], kind='after')
attach(grace, note)
assert systemtools.TestManager.compare(
note,
r'''
\afterGrace
c'4
{
c'16
d'16
e'16
}
'''
) | gpl-3.0 |
sid-kap/pants | src/python/pants/scm/scm.py | 16 | 3749 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from abc import abstractmethod, abstractproperty
from pants.util.meta import AbstractClass
class Scm(AbstractClass):
"""Abstracts high-level scm operations needed by pants core and pants tasks."""
class ScmException(Exception):
"""Indicates a problem interacting with the scm."""
class RemoteException(ScmException):
"""Indicates a problem performing a remote scm operation."""
class LocalException(ScmException):
"""Indicates a problem performing a local scm operation."""
@abstractproperty
def current_rev_identifier(self):
"""Identifier for the tip/head of the current branch eg. "HEAD" in git"""
@abstractproperty
def commit_id(self):
"""Returns the id of the current commit."""
@abstractproperty
def server_url(self):
"""Returns the url of the (default) remote server."""
@abstractproperty
def tag_name(self):
"""Returns the name of the current tag if any."""
@abstractproperty
def branch_name(self):
"""Returns the name of the current branch if any."""
@abstractmethod
def commit_date(self, commit_reference):
"""Returns the commit date of the referenced commit."""
@abstractmethod
def changed_files(self, from_commit=None, include_untracked=False, relative_to=None):
"""Returns a list of files with uncommitted changes or else files changed since from_commit.
If include_untracked=True then any workspace files that are un-tracked by the scm and not
ignored will be included as well.
If relative_to is None, then the paths will be relative to the working tree of the SCM
implementation (which might NOT match the buildroot.)
"""
@abstractmethod
def changes_in(self, diffspec, relative_to=None):
"""Returns a list of files changed by some diffspec (eg sha, range, ref, etc)
:param str diffspec: Some diffspec meaningful to the SCM.
:param str relative_to: a path to which results should be relative (instead of SCM root)
"""
@abstractmethod
def changelog(self, from_commit=None, files=None):
"""Produces a changelog from the given commit or the 1st commit if none is specified until the
present workspace commit for the changes affecting the given files.
If no files are given then the full change log should be produced.
"""
@abstractmethod
def refresh(self):
"""Refreshes the local workspace with any changes on the server.
Subclasses should raise some form of ScmException to indicate a refresh error whether it be
a conflict or a communication channel error.
"""
@abstractmethod
def tag(self, name, message=None):
"""Tags the state in the local workspace and ensures this tag is on the server.
Subclasses should raise RemoteException if there is a problem getting the tag to the server.
"""
@abstractmethod
def commit(self, message):
"""Commits all the changes for tracked files in the local workspace.
Subclasses should raise LocalException if there is a problem making the commit.
"""
@abstractmethod
def add(self, *paths):
"""Add paths to the set of tracked files.
Subclasses should raise LocalException if there is a problem adding the paths.
"""
@abstractmethod
def push(self):
"""Push the current branch of the local repository to the corresponding local branch
on the server
Subclasses should raise RemoteException if there is a problem getting the commit to the
server.
"""
| apache-2.0 |
klmitch/glance | glance/tests/functional/v2/test_metadef_resourcetypes.py | 2 | 10755 | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
import six
import webob.exc
from wsme.rest import json
from glance.api import policy
from glance.api.v2.model.metadef_resource_type import ResourceType
from glance.api.v2.model.metadef_resource_type import ResourceTypeAssociation
from glance.api.v2.model.metadef_resource_type import ResourceTypeAssociations
from glance.api.v2.model.metadef_resource_type import ResourceTypes
from glance.common import exception
from glance.common import wsgi
import glance.db
import glance.gateway
from glance.i18n import _, _LE
import glance.notifier
import glance.schema
LOG = logging.getLogger(__name__)
class ResourceTypeController(object):
def __init__(self, db_api=None, policy_enforcer=None):
self.db_api = db_api or glance.db.get_api()
self.policy = policy_enforcer or policy.Enforcer()
self.gateway = glance.gateway.Gateway(db_api=self.db_api,
policy_enforcer=self.policy)
def index(self, req):
try:
filters = {'namespace': None}
rs_type_repo = self.gateway.get_metadef_resource_type_repo(
req.context)
db_resource_type_list = rs_type_repo.list(filters=filters)
resource_type_list = [ResourceType.to_wsme_model(
resource_type) for resource_type in db_resource_type_list]
resource_types = ResourceTypes()
resource_types.resource_types = resource_type_list
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except Exception as e:
LOG.error(e)
raise webob.exc.HTTPInternalServerError(e)
return resource_types
def show(self, req, namespace):
try:
filters = {'namespace': namespace}
rs_type_repo = self.gateway.get_metadef_resource_type_repo(
req.context)
db_resource_type_list = rs_type_repo.list(filters=filters)
resource_type_list = [ResourceTypeAssociation.to_wsme_model(
resource_type) for resource_type in db_resource_type_list]
resource_types = ResourceTypeAssociations()
resource_types.resource_type_associations = resource_type_list
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except Exception as e:
LOG.error(e)
raise webob.exc.HTTPInternalServerError(e)
return resource_types
def create(self, req, resource_type, namespace):
rs_type_factory = self.gateway.get_metadef_resource_type_factory(
req.context)
rs_type_repo = self.gateway.get_metadef_resource_type_repo(req.context)
try:
new_resource_type = rs_type_factory.new_resource_type(
namespace=namespace, **resource_type.to_dict())
rs_type_repo.add(new_resource_type)
except exception.Forbidden as e:
msg = (_LE("Forbidden to create resource type. "
"Reason: %(reason)s")
% {'reason': encodeutils.exception_to_unicode(e)})
LOG.error(msg)
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Duplicate as e:
raise webob.exc.HTTPConflict(explanation=e.msg)
except Exception as e:
LOG.error(e)
raise webob.exc.HTTPInternalServerError()
return ResourceTypeAssociation.to_wsme_model(new_resource_type)
def delete(self, req, namespace, resource_type):
rs_type_repo = self.gateway.get_metadef_resource_type_repo(req.context)
try:
filters = {}
found = False
filters['namespace'] = namespace
db_resource_type_list = rs_type_repo.list(filters=filters)
for db_resource_type in db_resource_type_list:
if db_resource_type.name == resource_type:
db_resource_type.delete()
rs_type_repo.remove(db_resource_type)
found = True
if not found:
raise exception.NotFound()
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
msg = (_("Failed to find resource type %(resourcetype)s to "
"delete") % {'resourcetype': resource_type})
LOG.error(msg)
raise webob.exc.HTTPNotFound(explanation=msg)
except Exception as e:
LOG.error(e)
raise webob.exc.HTTPInternalServerError()
class RequestDeserializer(wsgi.JSONRequestDeserializer):
_disallowed_properties = ['created_at', 'updated_at']
def __init__(self, schema=None):
super(RequestDeserializer, self).__init__()
self.schema = schema or get_schema()
def _get_request_body(self, request):
output = super(RequestDeserializer, self).default(request)
if 'body' not in output:
msg = _('Body expected in request.')
raise webob.exc.HTTPBadRequest(explanation=msg)
return output['body']
@classmethod
def _check_allowed(cls, image):
for key in cls._disallowed_properties:
if key in image:
msg = _("Attribute '%s' is read-only.") % key
raise webob.exc.HTTPForbidden(
explanation=encodeutils.exception_to_unicode(msg))
def create(self, request):
body = self._get_request_body(request)
self._check_allowed(body)
try:
self.schema.validate(body)
except exception.InvalidObject as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
resource_type = json.fromjson(ResourceTypeAssociation, body)
return dict(resource_type=resource_type)
class ResponseSerializer(wsgi.JSONResponseSerializer):
def __init__(self, schema=None):
super(ResponseSerializer, self).__init__()
self.schema = schema
def show(self, response, result):
resource_type_json = json.tojson(ResourceTypeAssociations, result)
body = jsonutils.dumps(resource_type_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.content_type = 'application/json'
def index(self, response, result):
resource_type_json = json.tojson(ResourceTypes, result)
body = jsonutils.dumps(resource_type_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.content_type = 'application/json'
def create(self, response, result):
resource_type_json = json.tojson(ResourceTypeAssociation, result)
response.status_int = 201
body = jsonutils.dumps(resource_type_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.content_type = 'application/json'
def delete(self, response, result):
response.status_int = 204
def _get_base_properties():
return {
'name': {
'type': 'string',
'description': _('Resource type names should be aligned with Heat '
'resource types whenever possible: '
'http://docs.openstack.org/developer/heat/'
'template_guide/openstack.html'),
'maxLength': 80,
},
'prefix': {
'type': 'string',
'description': _('Specifies the prefix to use for the given '
'resource type. Any properties in the namespace '
'should be prefixed with this prefix when being '
'applied to the specified resource type. Must '
'include prefix separator (e.g. a colon :).'),
'maxLength': 80,
},
'properties_target': {
'type': 'string',
'description': _('Some resource types allow more than one key / '
'value pair per instance. For example, Cinder '
'allows user and image metadata on volumes. Only '
'the image properties metadata is evaluated by '
'Nova (scheduling or drivers). This property '
'allows a namespace target to remove the '
'ambiguity.'),
'maxLength': 80,
},
"created_at": {
"type": "string",
"readOnly": True,
"description": _("Date and time of resource type association"),
"format": "date-time"
},
"updated_at": {
"type": "string",
"readOnly": True,
"description": _("Date and time of the last resource type "
"association modification"),
"format": "date-time"
}
}
def get_schema():
properties = _get_base_properties()
mandatory_attrs = ResourceTypeAssociation.get_mandatory_attrs()
schema = glance.schema.Schema(
'resource_type_association',
properties,
required=mandatory_attrs,
)
return schema
def get_collection_schema():
resource_type_schema = get_schema()
return glance.schema.CollectionSchema('resource_type_associations',
resource_type_schema)
def create_resource():
"""ResourceTypeAssociation resource factory method"""
schema = get_schema()
deserializer = RequestDeserializer(schema)
serializer = ResponseSerializer(schema)
controller = ResourceTypeController()
return wsgi.Resource(controller, deserializer, serializer)
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.