repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values | var_hash
int64 -9,223,186,179,200,150,000
9,223,291,175B
| doc_hash
int64 -9,223,304,365,658,930,000
9,223,309,051B
| line_mean
float64 3.5
99.8
| line_max
int64 13
999
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
GranPC/linux-asus-flo | scripts/build-all.py | 1182 | 9486 | #! /usr/bin/env python
# Copyright (c) 2009-2011, The Linux Foundation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of The Linux Foundation nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Build the kernel for all targets using the Android build environment.
#
# TODO: Accept arguments to indicate what to build.
import glob
from optparse import OptionParser
import subprocess
import os
import os.path
import shutil
import sys
version = 'build-all.py, version 0.01'
build_dir = '../all-kernels'
make_command = ["vmlinux", "modules"]
make_env = os.environ
make_env.update({
'ARCH': 'arm',
'CROSS_COMPILE': 'arm-none-linux-gnueabi-',
'KCONFIG_NOTIMESTAMP': 'true' })
all_options = {}
def error(msg):
sys.stderr.write("error: %s\n" % msg)
def fail(msg):
"""Fail with a user-printed message"""
error(msg)
sys.exit(1)
def check_kernel():
"""Ensure that PWD is a kernel directory"""
if (not os.path.isfile('MAINTAINERS') or
not os.path.isfile('arch/arm/mach-msm/Kconfig')):
fail("This doesn't seem to be an MSM kernel dir")
def check_build():
"""Ensure that the build directory is present."""
if not os.path.isdir(build_dir):
try:
os.makedirs(build_dir)
except OSError as exc:
if exc.errno == errno.EEXIST:
pass
else:
raise
def update_config(file, str):
print 'Updating %s with \'%s\'\n' % (file, str)
defconfig = open(file, 'a')
defconfig.write(str + '\n')
defconfig.close()
def scan_configs():
"""Get the full list of defconfigs appropriate for this tree."""
names = {}
for n in glob.glob('arch/arm/configs/[fm]sm[0-9-]*_defconfig'):
names[os.path.basename(n)[:-10]] = n
for n in glob.glob('arch/arm/configs/qsd*_defconfig'):
names[os.path.basename(n)[:-10]] = n
for n in glob.glob('arch/arm/configs/apq*_defconfig'):
names[os.path.basename(n)[:-10]] = n
return names
class Builder:
def __init__(self, logname):
self.logname = logname
self.fd = open(logname, 'w')
def run(self, args):
devnull = open('/dev/null', 'r')
proc = subprocess.Popen(args, stdin=devnull,
env=make_env,
bufsize=0,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
count = 0
# for line in proc.stdout:
rawfd = proc.stdout.fileno()
while True:
line = os.read(rawfd, 1024)
if not line:
break
self.fd.write(line)
self.fd.flush()
if all_options.verbose:
sys.stdout.write(line)
sys.stdout.flush()
else:
for i in range(line.count('\n')):
count += 1
if count == 64:
count = 0
print
sys.stdout.write('.')
sys.stdout.flush()
print
result = proc.wait()
self.fd.close()
return result
failed_targets = []
def build(target):
dest_dir = os.path.join(build_dir, target)
log_name = '%s/log-%s.log' % (build_dir, target)
print 'Building %s in %s log %s' % (target, dest_dir, log_name)
if not os.path.isdir(dest_dir):
os.mkdir(dest_dir)
defconfig = 'arch/arm/configs/%s_defconfig' % target
dotconfig = '%s/.config' % dest_dir
savedefconfig = '%s/defconfig' % dest_dir
shutil.copyfile(defconfig, dotconfig)
devnull = open('/dev/null', 'r')
subprocess.check_call(['make', 'O=%s' % dest_dir,
'%s_defconfig' % target], env=make_env, stdin=devnull)
devnull.close()
if not all_options.updateconfigs:
build = Builder(log_name)
result = build.run(['make', 'O=%s' % dest_dir] + make_command)
if result != 0:
if all_options.keep_going:
failed_targets.append(target)
fail_or_error = error
else:
fail_or_error = fail
fail_or_error("Failed to build %s, see %s" % (target, build.logname))
# Copy the defconfig back.
if all_options.configs or all_options.updateconfigs:
devnull = open('/dev/null', 'r')
subprocess.check_call(['make', 'O=%s' % dest_dir,
'savedefconfig'], env=make_env, stdin=devnull)
devnull.close()
shutil.copyfile(savedefconfig, defconfig)
def build_many(allconf, targets):
print "Building %d target(s)" % len(targets)
for target in targets:
if all_options.updateconfigs:
update_config(allconf[target], all_options.updateconfigs)
build(target)
if failed_targets:
fail('\n '.join(["Failed targets:"] +
[target for target in failed_targets]))
def main():
global make_command
check_kernel()
check_build()
configs = scan_configs()
usage = ("""
%prog [options] all -- Build all targets
%prog [options] target target ... -- List specific targets
%prog [options] perf -- Build all perf targets
%prog [options] noperf -- Build all non-perf targets""")
parser = OptionParser(usage=usage, version=version)
parser.add_option('--configs', action='store_true',
dest='configs',
help="Copy configs back into tree")
parser.add_option('--list', action='store_true',
dest='list',
help='List available targets')
parser.add_option('-v', '--verbose', action='store_true',
dest='verbose',
help='Output to stdout in addition to log file')
parser.add_option('--oldconfig', action='store_true',
dest='oldconfig',
help='Only process "make oldconfig"')
parser.add_option('--updateconfigs',
dest='updateconfigs',
help="Update defconfigs with provided option setting, "
"e.g. --updateconfigs=\'CONFIG_USE_THING=y\'")
parser.add_option('-j', '--jobs', type='int', dest="jobs",
help="Number of simultaneous jobs")
parser.add_option('-l', '--load-average', type='int',
dest='load_average',
help="Don't start multiple jobs unless load is below LOAD_AVERAGE")
parser.add_option('-k', '--keep-going', action='store_true',
dest='keep_going', default=False,
help="Keep building other targets if a target fails")
parser.add_option('-m', '--make-target', action='append',
help='Build the indicated make target (default: %s)' %
' '.join(make_command))
(options, args) = parser.parse_args()
global all_options
all_options = options
if options.list:
print "Available targets:"
for target in configs.keys():
print " %s" % target
sys.exit(0)
if options.oldconfig:
make_command = ["oldconfig"]
elif options.make_target:
make_command = options.make_target
if options.jobs:
make_command.append("-j%d" % options.jobs)
if options.load_average:
make_command.append("-l%d" % options.load_average)
if args == ['all']:
build_many(configs, configs.keys())
elif args == ['perf']:
targets = []
for t in configs.keys():
if "perf" in t:
targets.append(t)
build_many(configs, targets)
elif args == ['noperf']:
targets = []
for t in configs.keys():
if "perf" not in t:
targets.append(t)
build_many(configs, targets)
elif len(args) > 0:
targets = []
for t in args:
if t not in configs.keys():
parser.error("Target '%s' not one of %s" % (t, configs.keys()))
targets.append(t)
build_many(configs, targets)
else:
parser.error("Must specify a target to build, or 'all'")
if __name__ == "__main__":
main()
| gpl-2.0 | 7,418,122,200,342,528,000 | 1,903,170,466,947,049,500 | 34.395522 | 81 | 0.593928 | false |
ritzk/ansible-modules-core | cloud/openstack/_nova_compute.py | 66 | 22440 | #!/usr/bin/python
#coding: utf-8 -*-
# (c) 2013, Benno Joy <[email protected]>
# (c) 2013, John Dewey <[email protected]>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import operator
import os
import time
try:
from novaclient.v1_1 import client as nova_client
from novaclient.v1_1 import floating_ips
from novaclient import exceptions
from novaclient import utils
HAS_NOVACLIENT = True
except ImportError:
HAS_NOVACLIENT = False
DOCUMENTATION = '''
---
module: nova_compute
version_added: "1.2"
deprecated: Deprecated in 2.0. Use os_server instead
short_description: Create/Delete VMs from OpenStack
description:
- Create or Remove virtual machines from Openstack.
options:
login_username:
description:
- login username to authenticate to keystone
required: true
default: admin
login_password:
description:
- Password of login user
required: true
default: 'yes'
login_tenant_name:
description:
- The tenant name of the login user
required: true
default: 'yes'
auth_url:
description:
- The keystone url for authentication
required: false
default: 'http://127.0.0.1:35357/v2.0/'
region_name:
description:
- Name of the region
required: false
default: None
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
name:
description:
- Name that has to be given to the instance
required: true
default: None
image_id:
description:
- The id of the base image to boot. Mutually exclusive with image_name
required: true
default: None
image_name:
description:
- The name of the base image to boot. Mutually exclusive with image_id
required: true
default: None
version_added: "1.8"
image_exclude:
description:
- Text to use to filter image names, for the case, such as HP, where there are multiple image names matching the common identifying portions. image_exclude is a negative match filter - it is text that may not exist in the image name. Defaults to "(deprecated)"
version_added: "1.8"
flavor_id:
description:
- The id of the flavor in which the new VM has to be created. Mutually exclusive with flavor_ram
required: false
default: 1
flavor_ram:
description:
- The minimum amount of ram in MB that the flavor in which the new VM has to be created must have. Mutually exclusive with flavor_id
required: false
default: 1
version_added: "1.8"
flavor_include:
description:
- Text to use to filter flavor names, for the case, such as Rackspace, where there are multiple flavors that have the same ram count. flavor_include is a positive match filter - it must exist in the flavor name.
version_added: "1.8"
key_name:
description:
- The key pair name to be used when creating a VM
required: false
default: None
security_groups:
description:
- The name of the security group to which the VM should be added
required: false
default: None
nics:
description:
- A list of network id's to which the VM's interface should be attached
required: false
default: None
auto_floating_ip:
description:
- Should a floating ip be auto created and assigned
required: false
default: 'no'
version_added: "1.8"
floating_ips:
description:
- list of valid floating IPs that pre-exist to assign to this node
required: false
default: None
version_added: "1.8"
floating_ip_pools:
description:
- list of floating IP pools from which to choose a floating IP
required: false
default: None
version_added: "1.8"
availability_zone:
description:
- Name of the availability zone
required: false
default: None
version_added: "1.8"
meta:
description:
- A list of key value pairs that should be provided as a metadata to the new VM
required: false
default: None
wait:
description:
- If the module should wait for the VM to be created.
required: false
default: 'yes'
wait_for:
description:
- The amount of time the module should wait for the VM to get into active state
required: false
default: 180
config_drive:
description:
- Whether to boot the server with config drive enabled
required: false
default: 'no'
version_added: "1.8"
user_data:
description:
- Opaque blob of data which is made available to the instance
required: false
default: None
version_added: "1.6"
scheduler_hints:
description:
- Arbitrary key/value pairs to the scheduler for custom use
required: false
default: None
version_added: "1.9"
requirements:
- "python >= 2.6"
- "python-novaclient"
'''
EXAMPLES = '''
# Creates a new VM and attaches to a network and passes metadata to the instance
- nova_compute:
state: present
login_username: admin
login_password: admin
login_tenant_name: admin
name: vm1
image_id: 4f905f38-e52a-43d2-b6ec-754a13ffb529
key_name: ansible_key
wait_for: 200
flavor_id: 4
nics:
- net-id: 34605f38-e52a-25d2-b6ec-754a13ffb723
meta:
hostname: test1
group: uge_master
# Creates a new VM in HP Cloud AE1 region availability zone az2 and automatically assigns a floating IP
- name: launch a nova instance
hosts: localhost
tasks:
- name: launch an instance
nova_compute:
state: present
login_username: username
login_password: Equality7-2521
login_tenant_name: username-project1
name: vm1
auth_url: https://region-b.geo-1.identity.hpcloudsvc.com:35357/v2.0/
region_name: region-b.geo-1
availability_zone: az2
image_id: 9302692b-b787-4b52-a3a6-daebb79cb498
key_name: test
wait_for: 200
flavor_id: 101
security_groups: default
auto_floating_ip: yes
# Creates a new VM in HP Cloud AE1 region availability zone az2 and assigns a pre-known floating IP
- name: launch a nova instance
hosts: localhost
tasks:
- name: launch an instance
nova_compute:
state: present
login_username: username
login_password: Equality7-2521
login_tenant_name: username-project1
name: vm1
auth_url: https://region-b.geo-1.identity.hpcloudsvc.com:35357/v2.0/
region_name: region-b.geo-1
availability_zone: az2
image_id: 9302692b-b787-4b52-a3a6-daebb79cb498
key_name: test
wait_for: 200
flavor_id: 101
floating-ips:
- 12.34.56.79
# Creates a new VM with 4G of RAM on Ubuntu Trusty, ignoring deprecated images
- name: launch a nova instance
hosts: localhost
tasks:
- name: launch an instance
nova_compute:
name: vm1
state: present
login_username: username
login_password: Equality7-2521
login_tenant_name: username-project1
auth_url: https://region-b.geo-1.identity.hpcloudsvc.com:35357/v2.0/
region_name: region-b.geo-1
image_name: Ubuntu Server 14.04
image_exclude: deprecated
flavor_ram: 4096
# Creates a new VM with 4G of RAM on Ubuntu Trusty on a Rackspace Performance node in DFW
- name: launch a nova instance
hosts: localhost
tasks:
- name: launch an instance
nova_compute:
name: vm1
state: present
login_username: username
login_password: Equality7-2521
login_tenant_name: username-project1
auth_url: https://identity.api.rackspacecloud.com/v2.0/
region_name: DFW
image_name: Ubuntu 14.04 LTS (Trusty Tahr) (PVHVM)
flavor_ram: 4096
flavor_include: Performance
'''
def _delete_server(module, nova):
name = None
server_list = None
try:
server_list = nova.servers.list(True, {'name': module.params['name']})
if server_list:
server = [x for x in server_list if x.name == module.params['name']]
nova.servers.delete(server.pop())
except Exception, e:
module.fail_json( msg = "Error in deleting vm: %s" % e.message)
if module.params['wait'] == 'no':
module.exit_json(changed = True, result = "deleted")
expire = time.time() + int(module.params['wait_for'])
while time.time() < expire:
name = nova.servers.list(True, {'name': module.params['name']})
if not name:
module.exit_json(changed = True, result = "deleted")
time.sleep(5)
module.fail_json(msg = "Timed out waiting for server to get deleted, please check manually")
def _add_floating_ip_from_pool(module, nova, server):
# instantiate FloatingIPManager object
floating_ip_obj = floating_ips.FloatingIPManager(nova)
# empty dict and list
usable_floating_ips = {}
pools = []
# user specified
pools = module.params['floating_ip_pools']
# get the list of all floating IPs. Mileage may
# vary according to Nova Compute configuration
# per cloud provider
all_floating_ips = floating_ip_obj.list()
# iterate through all pools of IP address. Empty
# string means all and is the default value
for pool in pools:
# temporary list per pool
pool_ips = []
# loop through all floating IPs
for f_ip in all_floating_ips:
# if not reserved and the correct pool, add
if f_ip.fixed_ip is None and (f_ip.pool == pool):
pool_ips.append(f_ip.ip)
# only need one
break
# if the list is empty, add for this pool
if not pool_ips:
try:
new_ip = nova.floating_ips.create(pool)
except Exception, e:
module.fail_json(msg = "Unable to create floating ip: %s" % (e.message))
pool_ips.append(new_ip.ip)
# Add to the main list
usable_floating_ips[pool] = pool_ips
# finally, add ip(s) to instance for each pool
for pool in usable_floating_ips:
for ip in usable_floating_ips[pool]:
try:
server.add_floating_ip(ip)
# We only need to assign one ip - but there is an inherent
# race condition and some other cloud operation may have
# stolen an available floating ip
break
except Exception, e:
module.fail_json(msg = "Error attaching IP %s to instance %s: %s " % (ip, server.id, e.message))
def _add_floating_ip_list(module, server, ips):
# add ip(s) to instance
for ip in ips:
try:
server.add_floating_ip(ip)
except Exception, e:
module.fail_json(msg = "Error attaching IP %s to instance %s: %s " % (ip, server.id, e.message))
def _add_auto_floating_ip(module, nova, server):
try:
new_ip = nova.floating_ips.create()
except Exception as e:
module.fail_json(msg = "Unable to create floating ip: %s" % (e))
try:
server.add_floating_ip(new_ip)
except Exception as e:
# Clean up - we auto-created this ip, and it's not attached
# to the server, so the cloud will not know what to do with it
server.floating_ips.delete(new_ip)
module.fail_json(msg = "Error attaching IP %s to instance %s: %s " % (ip, server.id, e.message))
def _add_floating_ip(module, nova, server):
if module.params['floating_ip_pools']:
_add_floating_ip_from_pool(module, nova, server)
elif module.params['floating_ips']:
_add_floating_ip_list(module, server, module.params['floating_ips'])
elif module.params['auto_floating_ip']:
_add_auto_floating_ip(module, nova, server)
else:
return server
# this may look redundant, but if there is now a
# floating IP, then it needs to be obtained from
# a recent server object if the above code path exec'd
try:
server = nova.servers.get(server.id)
except Exception, e:
module.fail_json(msg = "Error in getting info from instance: %s " % e.message)
return server
def _get_image_id(module, nova):
if module.params['image_name']:
for image in nova.images.list():
if (module.params['image_name'] in image.name and (
not module.params['image_exclude']
or module.params['image_exclude'] not in image.name)):
return image.id
module.fail_json(msg = "Error finding image id from name(%s)" % module.params['image_name'])
return module.params['image_id']
def _get_flavor_id(module, nova):
if module.params['flavor_ram']:
for flavor in sorted(nova.flavors.list(), key=operator.attrgetter('ram')):
if (flavor.ram >= module.params['flavor_ram'] and
(not module.params['flavor_include'] or module.params['flavor_include'] in flavor.name)):
return flavor.id
module.fail_json(msg = "Error finding flavor with %sMB of RAM" % module.params['flavor_ram'])
return module.params['flavor_id']
def _create_server(module, nova):
image_id = _get_image_id(module, nova)
flavor_id = _get_flavor_id(module, nova)
bootargs = [module.params['name'], image_id, flavor_id]
bootkwargs = {
'nics' : module.params['nics'],
'meta' : module.params['meta'],
'security_groups': module.params['security_groups'].split(','),
#userdata is unhyphenated in novaclient, but hyphenated here for consistency with the ec2 module:
'userdata': module.params['user_data'],
'config_drive': module.params['config_drive'],
}
for optional_param in ('region_name', 'key_name', 'availability_zone', 'scheduler_hints'):
if module.params[optional_param]:
bootkwargs[optional_param] = module.params[optional_param]
try:
server = nova.servers.create(*bootargs, **bootkwargs)
server = nova.servers.get(server.id)
except Exception, e:
module.fail_json( msg = "Error in creating instance: %s " % e.message)
if module.params['wait'] == 'yes':
expire = time.time() + int(module.params['wait_for'])
while time.time() < expire:
try:
server = nova.servers.get(server.id)
except Exception, e:
module.fail_json( msg = "Error in getting info from instance: %s" % e.message)
if server.status == 'ACTIVE':
server = _add_floating_ip(module, nova, server)
private = openstack_find_nova_addresses(getattr(server, 'addresses'), 'fixed', 'private')
public = openstack_find_nova_addresses(getattr(server, 'addresses'), 'floating', 'public')
# now exit with info
module.exit_json(changed = True, id = server.id, private_ip=''.join(private), public_ip=''.join(public), status = server.status, info = server._info)
if server.status == 'ERROR':
module.fail_json(msg = "Error in creating the server, please check logs")
time.sleep(2)
module.fail_json(msg = "Timeout waiting for the server to come up.. Please check manually")
if server.status == 'ERROR':
module.fail_json(msg = "Error in creating the server.. Please check manually")
private = openstack_find_nova_addresses(getattr(server, 'addresses'), 'fixed', 'private')
public = openstack_find_nova_addresses(getattr(server, 'addresses'), 'floating', 'public')
module.exit_json(changed = True, id = info['id'], private_ip=''.join(private), public_ip=''.join(public), status = server.status, info = server._info)
def _delete_floating_ip_list(module, nova, server, extra_ips):
for ip in extra_ips:
nova.servers.remove_floating_ip(server=server.id, address=ip)
def _check_floating_ips(module, nova, server):
changed = False
if module.params['floating_ip_pools'] or module.params['floating_ips'] or module.params['auto_floating_ip']:
ips = openstack_find_nova_addresses(server.addresses, 'floating')
if not ips:
# If we're configured to have a floating but we don't have one,
# let's add one
server = _add_floating_ip(module, nova, server)
changed = True
elif module.params['floating_ips']:
# we were configured to have specific ips, let's make sure we have
# those
missing_ips = []
for ip in module.params['floating_ips']:
if ip not in ips:
missing_ips.append(ip)
if missing_ips:
server = _add_floating_ip_list(module, server, missing_ips)
changed = True
extra_ips = []
for ip in ips:
if ip not in module.params['floating_ips']:
extra_ips.append(ip)
if extra_ips:
_delete_floating_ip_list(module, server, extra_ips)
changed = True
return (changed, server)
def _get_server_state(module, nova):
server = None
try:
servers = nova.servers.list(True, {'name': module.params['name']})
if servers:
# the {'name': module.params['name']} will also return servers
# with names that partially match the server name, so we have to
# strictly filter here
servers = [x for x in servers if x.name == module.params['name']]
if servers:
server = servers[0]
except Exception, e:
module.fail_json(msg = "Error in getting the server list: %s" % e.message)
if server and module.params['state'] == 'present':
if server.status != 'ACTIVE':
module.fail_json( msg="The VM is available but not Active. state:" + server.status)
(ip_changed, server) = _check_floating_ips(module, nova, server)
private = openstack_find_nova_addresses(getattr(server, 'addresses'), 'fixed', 'private')
public = openstack_find_nova_addresses(getattr(server, 'addresses'), 'floating', 'public')
module.exit_json(changed = ip_changed, id = server.id, public_ip = public, private_ip = private, info = server._info)
if server and module.params['state'] == 'absent':
return True
if module.params['state'] == 'absent':
module.exit_json(changed = False, result = "not present")
return True
def main():
argument_spec = openstack_argument_spec()
argument_spec.update(dict(
name = dict(required=True),
image_id = dict(default=None),
image_name = dict(default=None),
image_exclude = dict(default='(deprecated)'),
flavor_id = dict(default=1),
flavor_ram = dict(default=None, type='int'),
flavor_include = dict(default=None),
key_name = dict(default=None),
security_groups = dict(default='default'),
nics = dict(default=None),
meta = dict(default=None),
wait = dict(default='yes', choices=['yes', 'no']),
wait_for = dict(default=180),
state = dict(default='present', choices=['absent', 'present']),
user_data = dict(default=None),
config_drive = dict(default=False, type='bool'),
auto_floating_ip = dict(default=False, type='bool'),
floating_ips = dict(default=None),
floating_ip_pools = dict(default=None),
scheduler_hints = dict(default=None),
))
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=[
['auto_floating_ip','floating_ips'],
['auto_floating_ip','floating_ip_pools'],
['floating_ips','floating_ip_pools'],
['image_id','image_name'],
['flavor_id','flavor_ram'],
],
)
if not HAS_NOVACLIENT:
module.fail_json(msg='python-novaclient is required for this module')
nova = nova_client.Client(module.params['login_username'],
module.params['login_password'],
module.params['login_tenant_name'],
module.params['auth_url'],
region_name=module.params['region_name'],
service_type='compute')
try:
nova.authenticate()
except exceptions.Unauthorized, e:
module.fail_json(msg = "Invalid OpenStack Nova credentials.: %s" % e.message)
except exceptions.AuthorizationFailure, e:
module.fail_json(msg = "Unable to authorize user: %s" % e.message)
if module.params['state'] == 'present':
if not module.params['image_id'] and not module.params['image_name']:
module.fail_json( msg = "Parameter 'image_id' or `image_name` is required if state == 'present'")
else:
_get_server_state(module, nova)
_create_server(module, nova)
if module.params['state'] == 'absent':
_get_server_state(module, nova)
_delete_server(module, nova)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 | -3,246,284,865,714,475,000 | 7,559,814,215,558,358,000 | 36.462437 | 268 | 0.610695 | false |
amosonn/distributed | distributed/diagnostics/plugin.py | 2 | 1779 | from __future__ import print_function, division, absolute_import
import logging
logger = logging.getLogger(__name__)
class SchedulerPlugin(object):
""" Interface to extend the Scheduler
The scheduler operates by triggering and responding to events like
``task_finished``, ``update_graph``, ``task_erred``, etc..
A plugin enables custom code to run at each of those same events. The
scheduler will run the analogous methods on this class when each event is
triggered. This runs user code within the scheduler thread that can
perform arbitrary operations in synchrony with the scheduler itself.
Plugins are often used for diagnostics and measurement, but have full
access to the scheduler and could in principle affect core scheduling.
To implement a plugin implement some of the methods of this class and add
the plugin to the scheduler with ``Scheduler.add_plugin(myplugin)``.
Examples
--------
>>> class Counter(SchedulerPlugin):
... def __init__(self):
... self.counter = 0
...
... def transition(self, key, start, finish, *args, **kwargs):
... if start == 'processing' and finish == 'memory':
... self.counter += 1
...
... def restart(self, scheduler):
... self.counter = 0
>>> c = Counter()
>>> scheduler.add_plugin(c) # doctest: +SKIP
"""
def update_graph(self, scheduler, dsk=None, keys=None,
restrictions=None, **kwargs):
""" Run when a new graph / tasks enter the scheduler """
pass
def restart(self, scheduler, **kwargs):
""" Run when the scheduler restarts itself """
pass
def transition(self, key, start, finish, *args, **kwargs):
pass
| bsd-3-clause | 896,861,195,776,210,700 | 6,939,237,330,055,789,000 | 33.211538 | 77 | 0.63575 | false |
andrewklau/openshift-tools | openshift/installer/vendored/openshift-ansible-git-2016-04-27/playbooks/common/openshift-cluster/upgrades/library/openshift_upgrade_config.py | 91 | 5294 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# vim: expandtab:tabstop=4:shiftwidth=4
"""Ansible module for modifying OpenShift configs during an upgrade"""
import os
import yaml
DOCUMENTATION = '''
---
module: openshift_upgrade_config
short_description: OpenShift Upgrade Config
author: Jason DeTiberus
requirements: [ ]
'''
EXAMPLES = '''
'''
def modify_api_levels(level_list, remove, ensure, msg_prepend='',
msg_append=''):
""" modify_api_levels """
changed = False
changes = []
if not isinstance(remove, list):
remove = []
if not isinstance(ensure, list):
ensure = []
if not isinstance(level_list, list):
new_list = []
changed = True
changes.append("%s created missing %s" % (msg_prepend, msg_append))
else:
new_list = level_list
for level in remove:
if level in new_list:
new_list.remove(level)
changed = True
changes.append("%s removed %s %s" % (msg_prepend, level, msg_append))
for level in ensure:
if level not in new_list:
new_list.append(level)
changed = True
changes.append("%s added %s %s" % (msg_prepend, level, msg_append))
return {'new_list': new_list, 'changed': changed, 'changes': changes}
def upgrade_master_3_0_to_3_1(ansible_module, config_base, backup):
"""Main upgrade method for 3.0 to 3.1."""
changes = []
# Facts do not get transferred to the hosts where custom modules run,
# need to make some assumptions here.
master_config = os.path.join(config_base, 'master/master-config.yaml')
master_cfg_file = open(master_config, 'r')
config = yaml.safe_load(master_cfg_file.read())
master_cfg_file.close()
# Remove unsupported api versions and ensure supported api versions from
# master config
unsupported_levels = ['v1beta1', 'v1beta2', 'v1beta3']
supported_levels = ['v1']
result = modify_api_levels(config.get('apiLevels'), unsupported_levels,
supported_levels, 'master-config.yaml:', 'from apiLevels')
if result['changed']:
config['apiLevels'] = result['new_list']
changes.append(result['changes'])
if 'kubernetesMasterConfig' in config and 'apiLevels' in config['kubernetesMasterConfig']:
config['kubernetesMasterConfig'].pop('apiLevels')
changes.append('master-config.yaml: removed kubernetesMasterConfig.apiLevels')
# Add masterCA to serviceAccountConfig
if 'serviceAccountConfig' in config and 'masterCA' not in config['serviceAccountConfig']:
config['serviceAccountConfig']['masterCA'] = config['oauthConfig'].get('masterCA', 'ca.crt')
# Add proxyClientInfo to master-config
if 'proxyClientInfo' not in config['kubernetesMasterConfig']:
config['kubernetesMasterConfig']['proxyClientInfo'] = {
'certFile': 'master.proxy-client.crt',
'keyFile': 'master.proxy-client.key'
}
changes.append("master-config.yaml: added proxyClientInfo")
if len(changes) > 0:
if backup:
# TODO: Check success:
ansible_module.backup_local(master_config)
# Write the modified config:
out_file = open(master_config, 'w')
out_file.write(yaml.safe_dump(config, default_flow_style=False))
out_file.close()
return changes
def upgrade_master(ansible_module, config_base, from_version, to_version, backup):
"""Upgrade entry point."""
if from_version == '3.0':
if to_version == '3.1':
return upgrade_master_3_0_to_3_1(ansible_module, config_base, backup)
def main():
""" main """
# disabling pylint errors for global-variable-undefined and invalid-name
# for 'global module' usage, since it is required to use ansible_facts
# pylint: disable=global-variable-undefined, invalid-name,
# redefined-outer-name
global module
module = AnsibleModule(
argument_spec=dict(
config_base=dict(required=True),
from_version=dict(required=True, choices=['3.0']),
to_version=dict(required=True, choices=['3.1']),
role=dict(required=True, choices=['master']),
backup=dict(required=False, default=True, type='bool')
),
supports_check_mode=True,
)
from_version = module.params['from_version']
to_version = module.params['to_version']
role = module.params['role']
backup = module.params['backup']
config_base = module.params['config_base']
try:
changes = []
if role == 'master':
changes = upgrade_master(module, config_base, from_version,
to_version, backup)
changed = len(changes) > 0
return module.exit_json(changed=changed, changes=changes)
# ignore broad-except error to avoid stack trace to ansible user
# pylint: disable=broad-except
except Exception, e:
return module.fail_json(msg=str(e))
# ignore pylint errors related to the module_utils import
# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| apache-2.0 | -6,722,433,923,640,206,000 | 4,855,231,476,481,394,000 | 32.506329 | 100 | 0.630525 | false |
ddasilva/numpy | numpy/distutils/command/build_scripts.py | 264 | 1731 | """ Modified version of build_scripts that handles building scripts from functions.
"""
from __future__ import division, absolute_import, print_function
from distutils.command.build_scripts import build_scripts as old_build_scripts
from numpy.distutils import log
from numpy.distutils.misc_util import is_string
class build_scripts(old_build_scripts):
def generate_scripts(self, scripts):
new_scripts = []
func_scripts = []
for script in scripts:
if is_string(script):
new_scripts.append(script)
else:
func_scripts.append(script)
if not func_scripts:
return new_scripts
build_dir = self.build_dir
self.mkpath(build_dir)
for func in func_scripts:
script = func(build_dir)
if not script:
continue
if is_string(script):
log.info(" adding '%s' to scripts" % (script,))
new_scripts.append(script)
else:
[log.info(" adding '%s' to scripts" % (s,)) for s in script]
new_scripts.extend(list(script))
return new_scripts
def run (self):
if not self.scripts:
return
self.scripts = self.generate_scripts(self.scripts)
# Now make sure that the distribution object has this list of scripts.
# setuptools' develop command requires that this be a list of filenames,
# not functions.
self.distribution.scripts = self.scripts
return old_build_scripts.run(self)
def get_source_files(self):
from numpy.distutils.misc_util import get_script_files
return get_script_files(self.scripts)
| bsd-3-clause | 995,148,139,395,684,100 | -2,364,483,354,786,778,000 | 32.941176 | 83 | 0.608319 | false |
LockScreen/Backend | venv/lib/python2.7/site-packages/boto/codedeploy/layer1.py | 135 | 40600 | # Copyright (c) 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.compat import json
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.codedeploy import exceptions
class CodeDeployConnection(AWSQueryConnection):
"""
AWS CodeDeploy **Overview**
This is the AWS CodeDeploy API Reference. This guide provides
descriptions of the AWS CodeDeploy APIs. For additional
information, see the `AWS CodeDeploy User Guide`_.
**Using the APIs**
You can use the AWS CodeDeploy APIs to work with the following
items:
+ Applications , which are unique identifiers that AWS CodeDeploy
uses to ensure that the correct combinations of revisions,
deployment configurations, and deployment groups are being
referenced during deployments. You can work with applications by
calling CreateApplication, DeleteApplication, GetApplication,
ListApplications, BatchGetApplications, and UpdateApplication to
create, delete, and get information about applications, and to
change information about an application, respectively.
+ Deployment configurations , which are sets of deployment rules
and deployment success and failure conditions that AWS CodeDeploy
uses during deployments. You can work with deployment
configurations by calling CreateDeploymentConfig,
DeleteDeploymentConfig, GetDeploymentConfig, and
ListDeploymentConfigs to create, delete, and get information about
deployment configurations, respectively.
+ Deployment groups , which represent groups of Amazon EC2
instances to which application revisions can be deployed. You can
work with deployment groups by calling CreateDeploymentGroup,
DeleteDeploymentGroup, GetDeploymentGroup, ListDeploymentGroups,
and UpdateDeploymentGroup to create, delete, and get information
about single and multiple deployment groups, and to change
information about a deployment group, respectively.
+ Deployment instances (also known simply as instances ), which
represent Amazon EC2 instances to which application revisions are
deployed. Deployment instances are identified by their Amazon EC2
tags or Auto Scaling group names. Deployment instances belong to
deployment groups. You can work with deployment instances by
calling GetDeploymentInstance and ListDeploymentInstances to get
information about single and multiple deployment instances,
respectively.
+ Deployments , which represent the process of deploying revisions
to deployment groups. You can work with deployments by calling
CreateDeployment, GetDeployment, ListDeployments,
BatchGetDeployments, and StopDeployment to create and get
information about deployments, and to stop a deployment,
respectively.
+ Application revisions (also known simply as revisions ), which
are archive files that are stored in Amazon S3 buckets or GitHub
repositories. These revisions contain source content (such as
source code, web pages, executable files, any deployment scripts,
and similar) along with an Application Specification file (AppSpec
file). (The AppSpec file is unique to AWS CodeDeploy; it defines a
series of deployment actions that you want AWS CodeDeploy to
execute.) An application revision is uniquely identified by its
Amazon S3 object key and its ETag, version, or both. Application
revisions are deployed to deployment groups. You can work with
application revisions by calling GetApplicationRevision,
ListApplicationRevisions, and RegisterApplicationRevision to get
information about application revisions and to inform AWS
CodeDeploy about an application revision, respectively.
"""
APIVersion = "2014-10-06"
DefaultRegionName = "us-east-1"
DefaultRegionEndpoint = "codedeploy.us-east-1.amazonaws.com"
ServiceName = "codedeploy"
TargetPrefix = "CodeDeploy_20141006"
ResponseError = JSONResponseError
_faults = {
"InvalidDeploymentIdException": exceptions.InvalidDeploymentIdException,
"InvalidDeploymentGroupNameException": exceptions.InvalidDeploymentGroupNameException,
"DeploymentConfigAlreadyExistsException": exceptions.DeploymentConfigAlreadyExistsException,
"InvalidRoleException": exceptions.InvalidRoleException,
"RoleRequiredException": exceptions.RoleRequiredException,
"DeploymentGroupAlreadyExistsException": exceptions.DeploymentGroupAlreadyExistsException,
"DeploymentConfigLimitExceededException": exceptions.DeploymentConfigLimitExceededException,
"InvalidNextTokenException": exceptions.InvalidNextTokenException,
"InvalidDeploymentConfigNameException": exceptions.InvalidDeploymentConfigNameException,
"InvalidSortByException": exceptions.InvalidSortByException,
"InstanceDoesNotExistException": exceptions.InstanceDoesNotExistException,
"InvalidMinimumHealthyHostValueException": exceptions.InvalidMinimumHealthyHostValueException,
"ApplicationLimitExceededException": exceptions.ApplicationLimitExceededException,
"ApplicationNameRequiredException": exceptions.ApplicationNameRequiredException,
"InvalidEC2TagException": exceptions.InvalidEC2TagException,
"DeploymentDoesNotExistException": exceptions.DeploymentDoesNotExistException,
"DeploymentLimitExceededException": exceptions.DeploymentLimitExceededException,
"InvalidInstanceStatusException": exceptions.InvalidInstanceStatusException,
"RevisionRequiredException": exceptions.RevisionRequiredException,
"InvalidBucketNameFilterException": exceptions.InvalidBucketNameFilterException,
"DeploymentGroupLimitExceededException": exceptions.DeploymentGroupLimitExceededException,
"DeploymentGroupDoesNotExistException": exceptions.DeploymentGroupDoesNotExistException,
"DeploymentConfigNameRequiredException": exceptions.DeploymentConfigNameRequiredException,
"DeploymentAlreadyCompletedException": exceptions.DeploymentAlreadyCompletedException,
"RevisionDoesNotExistException": exceptions.RevisionDoesNotExistException,
"DeploymentGroupNameRequiredException": exceptions.DeploymentGroupNameRequiredException,
"DeploymentIdRequiredException": exceptions.DeploymentIdRequiredException,
"DeploymentConfigDoesNotExistException": exceptions.DeploymentConfigDoesNotExistException,
"BucketNameFilterRequiredException": exceptions.BucketNameFilterRequiredException,
"InvalidTimeRangeException": exceptions.InvalidTimeRangeException,
"ApplicationDoesNotExistException": exceptions.ApplicationDoesNotExistException,
"InvalidRevisionException": exceptions.InvalidRevisionException,
"InvalidSortOrderException": exceptions.InvalidSortOrderException,
"InvalidOperationException": exceptions.InvalidOperationException,
"InvalidAutoScalingGroupException": exceptions.InvalidAutoScalingGroupException,
"InvalidApplicationNameException": exceptions.InvalidApplicationNameException,
"DescriptionTooLongException": exceptions.DescriptionTooLongException,
"ApplicationAlreadyExistsException": exceptions.ApplicationAlreadyExistsException,
"InvalidDeployedStateFilterException": exceptions.InvalidDeployedStateFilterException,
"DeploymentNotStartedException": exceptions.DeploymentNotStartedException,
"DeploymentConfigInUseException": exceptions.DeploymentConfigInUseException,
"InstanceIdRequiredException": exceptions.InstanceIdRequiredException,
"InvalidKeyPrefixFilterException": exceptions.InvalidKeyPrefixFilterException,
"InvalidDeploymentStatusException": exceptions.InvalidDeploymentStatusException,
}
def __init__(self, **kwargs):
region = kwargs.pop('region', None)
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
if 'host' not in kwargs or kwargs['host'] is None:
kwargs['host'] = region.endpoint
super(CodeDeployConnection, self).__init__(**kwargs)
self.region = region
def _required_auth_capability(self):
return ['hmac-v4']
def batch_get_applications(self, application_names=None):
"""
Gets information about one or more applications.
:type application_names: list
:param application_names: A list of application names, with multiple
application names separated by spaces.
"""
params = {}
if application_names is not None:
params['applicationNames'] = application_names
return self.make_request(action='BatchGetApplications',
body=json.dumps(params))
def batch_get_deployments(self, deployment_ids=None):
"""
Gets information about one or more deployments.
:type deployment_ids: list
:param deployment_ids: A list of deployment IDs, with multiple
deployment IDs separated by spaces.
"""
params = {}
if deployment_ids is not None:
params['deploymentIds'] = deployment_ids
return self.make_request(action='BatchGetDeployments',
body=json.dumps(params))
def create_application(self, application_name):
"""
Creates a new application.
:type application_name: string
:param application_name: The name of the application. This name must be
unique within the AWS user account.
"""
params = {'applicationName': application_name, }
return self.make_request(action='CreateApplication',
body=json.dumps(params))
def create_deployment(self, application_name, deployment_group_name=None,
revision=None, deployment_config_name=None,
description=None,
ignore_application_stop_failures=None):
"""
Deploys an application revision to the specified deployment
group.
:type application_name: string
:param application_name: The name of an existing AWS CodeDeploy
application within the AWS user account.
:type deployment_group_name: string
:param deployment_group_name: The deployment group's name.
:type revision: dict
:param revision: The type of revision to deploy, along with information
about the revision's location.
:type deployment_config_name: string
:param deployment_config_name: The name of an existing deployment
configuration within the AWS user account.
If not specified, the value configured in the deployment group will be
used as the default. If the deployment group does not have a
deployment configuration associated with it, then
CodeDeployDefault.OneAtATime will be used by default.
:type description: string
:param description: A comment about the deployment.
:type ignore_application_stop_failures: boolean
:param ignore_application_stop_failures: If set to true, then if the
deployment causes the ApplicationStop deployment lifecycle event to
fail to a specific instance, the deployment will not be considered
to have failed to that instance at that point and will continue on
to the BeforeInstall deployment lifecycle event.
If set to false or not specified, then if the deployment causes the
ApplicationStop deployment lifecycle event to fail to a specific
instance, the deployment will stop to that instance, and the
deployment to that instance will be considered to have failed.
"""
params = {'applicationName': application_name, }
if deployment_group_name is not None:
params['deploymentGroupName'] = deployment_group_name
if revision is not None:
params['revision'] = revision
if deployment_config_name is not None:
params['deploymentConfigName'] = deployment_config_name
if description is not None:
params['description'] = description
if ignore_application_stop_failures is not None:
params['ignoreApplicationStopFailures'] = ignore_application_stop_failures
return self.make_request(action='CreateDeployment',
body=json.dumps(params))
def create_deployment_config(self, deployment_config_name,
minimum_healthy_hosts=None):
"""
Creates a new deployment configuration.
:type deployment_config_name: string
:param deployment_config_name: The name of the deployment configuration
to create.
:type minimum_healthy_hosts: dict
:param minimum_healthy_hosts: The minimum number of healthy instances
that should be available at any time during the deployment. There
are two parameters expected in the input: type and value.
The type parameter takes either of the following values:
+ HOST_COUNT: The value parameter represents the minimum number of
healthy instances, as an absolute value.
+ FLEET_PERCENT: The value parameter represents the minimum number of
healthy instances, as a percentage of the total number of instances
in the deployment. If you specify FLEET_PERCENT, then at the start
of the deployment AWS CodeDeploy converts the percentage to the
equivalent number of instances and rounds fractional instances up.
The value parameter takes an integer.
For example, to set a minimum of 95% healthy instances, specify a type
of FLEET_PERCENT and a value of 95.
"""
params = {'deploymentConfigName': deployment_config_name, }
if minimum_healthy_hosts is not None:
params['minimumHealthyHosts'] = minimum_healthy_hosts
return self.make_request(action='CreateDeploymentConfig',
body=json.dumps(params))
def create_deployment_group(self, application_name,
deployment_group_name,
deployment_config_name=None,
ec_2_tag_filters=None,
auto_scaling_groups=None,
service_role_arn=None):
"""
Creates a new deployment group for application revisions to be
deployed to.
:type application_name: string
:param application_name: The name of an existing AWS CodeDeploy
application within the AWS user account.
:type deployment_group_name: string
:param deployment_group_name: The name of an existing deployment group
for the specified application.
:type deployment_config_name: string
:param deployment_config_name: If specified, the deployment
configuration name must be one of the predefined values, or it can
be a custom deployment configuration:
+ CodeDeployDefault.AllAtOnce deploys an application revision to up to
all of the Amazon EC2 instances at once. The overall deployment
succeeds if the application revision deploys to at least one of the
instances. The overall deployment fails after the application
revision fails to deploy to all of the instances. For example, for
9 instances, deploy to up to all 9 instances at once. The overall
deployment succeeds if any of the 9 instances is successfully
deployed to, and it fails if all 9 instances fail to be deployed
to.
+ CodeDeployDefault.HalfAtATime deploys to up to half of the instances
at a time (with fractions rounded down). The overall deployment
succeeds if the application revision deploys to at least half of
the instances (with fractions rounded up); otherwise, the
deployment fails. For example, for 9 instances, deploy to up to 4
instances at a time. The overall deployment succeeds if 5 or more
instances are successfully deployed to; otherwise, the deployment
fails. Note that the deployment may successfully deploy to some
instances, even if the overall deployment fails.
+ CodeDeployDefault.OneAtATime deploys the application revision to only
one of the instances at a time. The overall deployment succeeds if
the application revision deploys to all of the instances. The
overall deployment fails after the application revision first fails
to deploy to any one instance. For example, for 9 instances, deploy
to one instance at a time. The overall deployment succeeds if all 9
instances are successfully deployed to, and it fails if any of one
of the 9 instances fail to be deployed to. Note that the deployment
may successfully deploy to some instances, even if the overall
deployment fails. This is the default deployment configuration if a
configuration isn't specified for either the deployment or the
deployment group.
To create a custom deployment configuration, call the create deployment
configuration operation.
:type ec_2_tag_filters: list
:param ec_2_tag_filters: The Amazon EC2 tags to filter on.
:type auto_scaling_groups: list
:param auto_scaling_groups: A list of associated Auto Scaling groups.
:type service_role_arn: string
:param service_role_arn: A service role ARN that allows AWS CodeDeploy
to act on the user's behalf when interacting with AWS services.
"""
params = {
'applicationName': application_name,
'deploymentGroupName': deployment_group_name,
}
if deployment_config_name is not None:
params['deploymentConfigName'] = deployment_config_name
if ec_2_tag_filters is not None:
params['ec2TagFilters'] = ec_2_tag_filters
if auto_scaling_groups is not None:
params['autoScalingGroups'] = auto_scaling_groups
if service_role_arn is not None:
params['serviceRoleArn'] = service_role_arn
return self.make_request(action='CreateDeploymentGroup',
body=json.dumps(params))
def delete_application(self, application_name):
"""
Deletes an application.
:type application_name: string
:param application_name: The name of an existing AWS CodeDeploy
application within the AWS user account.
"""
params = {'applicationName': application_name, }
return self.make_request(action='DeleteApplication',
body=json.dumps(params))
def delete_deployment_config(self, deployment_config_name):
"""
Deletes a deployment configuration.
A deployment configuration cannot be deleted if it is
currently in use. Also, predefined configurations cannot be
deleted.
:type deployment_config_name: string
:param deployment_config_name: The name of an existing deployment
configuration within the AWS user account.
"""
params = {'deploymentConfigName': deployment_config_name, }
return self.make_request(action='DeleteDeploymentConfig',
body=json.dumps(params))
def delete_deployment_group(self, application_name,
deployment_group_name):
"""
Deletes a deployment group.
:type application_name: string
:param application_name: The name of an existing AWS CodeDeploy
application within the AWS user account.
:type deployment_group_name: string
:param deployment_group_name: The name of an existing deployment group
for the specified application.
"""
params = {
'applicationName': application_name,
'deploymentGroupName': deployment_group_name,
}
return self.make_request(action='DeleteDeploymentGroup',
body=json.dumps(params))
def get_application(self, application_name):
"""
Gets information about an application.
:type application_name: string
:param application_name: The name of an existing AWS CodeDeploy
application within the AWS user account.
"""
params = {'applicationName': application_name, }
return self.make_request(action='GetApplication',
body=json.dumps(params))
def get_application_revision(self, application_name, revision):
"""
Gets information about an application revision.
:type application_name: string
:param application_name: The name of the application that corresponds
to the revision.
:type revision: dict
:param revision: Information about the application revision to get,
including the revision's type and its location.
"""
params = {
'applicationName': application_name,
'revision': revision,
}
return self.make_request(action='GetApplicationRevision',
body=json.dumps(params))
def get_deployment(self, deployment_id):
"""
Gets information about a deployment.
:type deployment_id: string
:param deployment_id: An existing deployment ID within the AWS user
account.
"""
params = {'deploymentId': deployment_id, }
return self.make_request(action='GetDeployment',
body=json.dumps(params))
def get_deployment_config(self, deployment_config_name):
"""
Gets information about a deployment configuration.
:type deployment_config_name: string
:param deployment_config_name: The name of an existing deployment
configuration within the AWS user account.
"""
params = {'deploymentConfigName': deployment_config_name, }
return self.make_request(action='GetDeploymentConfig',
body=json.dumps(params))
def get_deployment_group(self, application_name, deployment_group_name):
"""
Gets information about a deployment group.
:type application_name: string
:param application_name: The name of an existing AWS CodeDeploy
application within the AWS user account.
:type deployment_group_name: string
:param deployment_group_name: The name of an existing deployment group
for the specified application.
"""
params = {
'applicationName': application_name,
'deploymentGroupName': deployment_group_name,
}
return self.make_request(action='GetDeploymentGroup',
body=json.dumps(params))
def get_deployment_instance(self, deployment_id, instance_id):
"""
Gets information about an Amazon EC2 instance as part of a
deployment.
:type deployment_id: string
:param deployment_id: The unique ID of a deployment.
:type instance_id: string
:param instance_id: The unique ID of an Amazon EC2 instance in the
deployment's deployment group.
"""
params = {
'deploymentId': deployment_id,
'instanceId': instance_id,
}
return self.make_request(action='GetDeploymentInstance',
body=json.dumps(params))
def list_application_revisions(self, application_name, sort_by=None,
sort_order=None, s_3_bucket=None,
s_3_key_prefix=None, deployed=None,
next_token=None):
"""
Lists information about revisions for an application.
:type application_name: string
:param application_name: The name of an existing AWS CodeDeploy
application within the AWS user account.
:type sort_by: string
:param sort_by: The column name to sort the list results by:
+ registerTime: Sort the list results by when the revisions were
registered with AWS CodeDeploy.
+ firstUsedTime: Sort the list results by when the revisions were first
used by in a deployment.
+ lastUsedTime: Sort the list results by when the revisions were last
used in a deployment.
If not specified or set to null, the results will be returned in an
arbitrary order.
:type sort_order: string
:param sort_order: The order to sort the list results by:
+ ascending: Sort the list results in ascending order.
+ descending: Sort the list results in descending order.
If not specified, the results will be sorted in ascending order.
If set to null, the results will be sorted in an arbitrary order.
:type s_3_bucket: string
:param s_3_bucket: A specific Amazon S3 bucket name to limit the search
for revisions.
If set to null, then all of the user's buckets will be searched.
:type s_3_key_prefix: string
:param s_3_key_prefix: A specific key prefix for the set of Amazon S3
objects to limit the search for revisions.
:type deployed: string
:param deployed:
Whether to list revisions based on whether the revision is the target
revision of an deployment group:
+ include: List revisions that are target revisions of a deployment
group.
+ exclude: Do not list revisions that are target revisions of a
deployment group.
+ ignore: List all revisions, regardless of whether they are target
revisions of a deployment group.
:type next_token: string
:param next_token: An identifier that was returned from the previous
list application revisions call, which can be used to return the
next set of applications in the list.
"""
params = {'applicationName': application_name, }
if sort_by is not None:
params['sortBy'] = sort_by
if sort_order is not None:
params['sortOrder'] = sort_order
if s_3_bucket is not None:
params['s3Bucket'] = s_3_bucket
if s_3_key_prefix is not None:
params['s3KeyPrefix'] = s_3_key_prefix
if deployed is not None:
params['deployed'] = deployed
if next_token is not None:
params['nextToken'] = next_token
return self.make_request(action='ListApplicationRevisions',
body=json.dumps(params))
def list_applications(self, next_token=None):
"""
Lists the applications registered within the AWS user account.
:type next_token: string
:param next_token: An identifier that was returned from the previous
list applications call, which can be used to return the next set of
applications in the list.
"""
params = {}
if next_token is not None:
params['nextToken'] = next_token
return self.make_request(action='ListApplications',
body=json.dumps(params))
def list_deployment_configs(self, next_token=None):
"""
Lists the deployment configurations within the AWS user
account.
:type next_token: string
:param next_token: An identifier that was returned from the previous
list deployment configurations call, which can be used to return
the next set of deployment configurations in the list.
"""
params = {}
if next_token is not None:
params['nextToken'] = next_token
return self.make_request(action='ListDeploymentConfigs',
body=json.dumps(params))
def list_deployment_groups(self, application_name, next_token=None):
"""
Lists the deployment groups for an application registered
within the AWS user account.
:type application_name: string
:param application_name: The name of an existing AWS CodeDeploy
application within the AWS user account.
:type next_token: string
:param next_token: An identifier that was returned from the previous
list deployment groups call, which can be used to return the next
set of deployment groups in the list.
"""
params = {'applicationName': application_name, }
if next_token is not None:
params['nextToken'] = next_token
return self.make_request(action='ListDeploymentGroups',
body=json.dumps(params))
def list_deployment_instances(self, deployment_id, next_token=None,
instance_status_filter=None):
"""
Lists the Amazon EC2 instances for a deployment within the AWS
user account.
:type deployment_id: string
:param deployment_id: The unique ID of a deployment.
:type next_token: string
:param next_token: An identifier that was returned from the previous
list deployment instances call, which can be used to return the
next set of deployment instances in the list.
:type instance_status_filter: list
:param instance_status_filter:
A subset of instances to list, by status:
+ Pending: Include in the resulting list those instances with pending
deployments.
+ InProgress: Include in the resulting list those instances with in-
progress deployments.
+ Succeeded: Include in the resulting list those instances with
succeeded deployments.
+ Failed: Include in the resulting list those instances with failed
deployments.
+ Skipped: Include in the resulting list those instances with skipped
deployments.
+ Unknown: Include in the resulting list those instances with
deployments in an unknown state.
"""
params = {'deploymentId': deployment_id, }
if next_token is not None:
params['nextToken'] = next_token
if instance_status_filter is not None:
params['instanceStatusFilter'] = instance_status_filter
return self.make_request(action='ListDeploymentInstances',
body=json.dumps(params))
def list_deployments(self, application_name=None,
deployment_group_name=None,
include_only_statuses=None, create_time_range=None,
next_token=None):
"""
Lists the deployments under a deployment group for an
application registered within the AWS user account.
:type application_name: string
:param application_name: The name of an existing AWS CodeDeploy
application within the AWS user account.
:type deployment_group_name: string
:param deployment_group_name: The name of an existing deployment group
for the specified application.
:type include_only_statuses: list
:param include_only_statuses: A subset of deployments to list, by
status:
+ Created: Include in the resulting list created deployments.
+ Queued: Include in the resulting list queued deployments.
+ In Progress: Include in the resulting list in-progress deployments.
+ Succeeded: Include in the resulting list succeeded deployments.
+ Failed: Include in the resulting list failed deployments.
+ Aborted: Include in the resulting list aborted deployments.
:type create_time_range: dict
:param create_time_range: A deployment creation start- and end-time
range for returning a subset of the list of deployments.
:type next_token: string
:param next_token: An identifier that was returned from the previous
list deployments call, which can be used to return the next set of
deployments in the list.
"""
params = {}
if application_name is not None:
params['applicationName'] = application_name
if deployment_group_name is not None:
params['deploymentGroupName'] = deployment_group_name
if include_only_statuses is not None:
params['includeOnlyStatuses'] = include_only_statuses
if create_time_range is not None:
params['createTimeRange'] = create_time_range
if next_token is not None:
params['nextToken'] = next_token
return self.make_request(action='ListDeployments',
body=json.dumps(params))
def register_application_revision(self, application_name, revision,
description=None):
"""
Registers with AWS CodeDeploy a revision for the specified
application.
:type application_name: string
:param application_name: The name of an existing AWS CodeDeploy
application within the AWS user account.
:type description: string
:param description: A comment about the revision.
:type revision: dict
:param revision: Information about the application revision to
register, including the revision's type and its location.
"""
params = {
'applicationName': application_name,
'revision': revision,
}
if description is not None:
params['description'] = description
return self.make_request(action='RegisterApplicationRevision',
body=json.dumps(params))
def stop_deployment(self, deployment_id):
"""
Attempts to stop an ongoing deployment.
:type deployment_id: string
:param deployment_id: The unique ID of a deployment.
"""
params = {'deploymentId': deployment_id, }
return self.make_request(action='StopDeployment',
body=json.dumps(params))
def update_application(self, application_name=None,
new_application_name=None):
"""
Changes an existing application's name.
:type application_name: string
:param application_name: The current name of the application that you
want to change.
:type new_application_name: string
:param new_application_name: The new name that you want to change the
application to.
"""
params = {}
if application_name is not None:
params['applicationName'] = application_name
if new_application_name is not None:
params['newApplicationName'] = new_application_name
return self.make_request(action='UpdateApplication',
body=json.dumps(params))
def update_deployment_group(self, application_name,
current_deployment_group_name,
new_deployment_group_name=None,
deployment_config_name=None,
ec_2_tag_filters=None,
auto_scaling_groups=None,
service_role_arn=None):
"""
Changes information about an existing deployment group.
:type application_name: string
:param application_name: The application name corresponding to the
deployment group to update.
:type current_deployment_group_name: string
:param current_deployment_group_name: The current name of the existing
deployment group.
:type new_deployment_group_name: string
:param new_deployment_group_name: The new name of the deployment group,
if you want to change it.
:type deployment_config_name: string
:param deployment_config_name: The replacement deployment configuration
name to use, if you want to change it.
:type ec_2_tag_filters: list
:param ec_2_tag_filters: The replacement set of Amazon EC2 tags to
filter on, if you want to change them.
:type auto_scaling_groups: list
:param auto_scaling_groups: The replacement list of Auto Scaling groups
to be included in the deployment group, if you want to change them.
:type service_role_arn: string
:param service_role_arn: A replacement service role's ARN, if you want
to change it.
"""
params = {
'applicationName': application_name,
'currentDeploymentGroupName': current_deployment_group_name,
}
if new_deployment_group_name is not None:
params['newDeploymentGroupName'] = new_deployment_group_name
if deployment_config_name is not None:
params['deploymentConfigName'] = deployment_config_name
if ec_2_tag_filters is not None:
params['ec2TagFilters'] = ec_2_tag_filters
if auto_scaling_groups is not None:
params['autoScalingGroups'] = auto_scaling_groups
if service_role_arn is not None:
params['serviceRoleArn'] = service_role_arn
return self.make_request(action='UpdateDeploymentGroup',
body=json.dumps(params))
def make_request(self, action, body):
headers = {
'X-Amz-Target': '%s.%s' % (self.TargetPrefix, action),
'Host': self.region.endpoint,
'Content-Type': 'application/x-amz-json-1.1',
'Content-Length': str(len(body)),
}
http_request = self.build_base_http_request(
method='POST', path='/', auth_path='/', params={},
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
return json.loads(response_body)
else:
json_body = json.loads(response_body)
fault_name = json_body.get('__type', None)
exception_class = self._faults.get(fault_name, self.ResponseError)
raise exception_class(response.status, response.reason,
body=json_body)
| mit | 9,017,657,333,143,805,000 | -5,104,122,495,154,570,000 | 44.16129 | 102 | 0.654384 | false |
Bauble/bauble.api | bauble/routes/auth.py | 1 | 4072 | """
All routes in Bauble use HTTP basic auth.
"""
from datetime import datetime, timedelta
import os
import smtplib
import bottle
from bottle import request
import sqlalchemy as sa
import bauble
import bauble.config as config
import bauble.db as db
import bauble.email as email
from bauble import app, API_ROOT
from bauble.middleware import basic_auth
from bauble.model import User
from bauble.utils import create_unique_token
def create_access_token():
return create_unique_token(), datetime.now() + timedelta(weeks=2)
def create_password_reset_token():
return create_unique_token(), datetime.now() + timedelta(days=1)
@app.get(API_ROOT + "/login")
def login():
auth = request.auth
if not auth:
bottle.abort(401, "No Authorization header.")
username, password = auth
session = db.Session()
try:
user = session.query(User).filter(sa.func.lower(User.email) == username.lower()).first()
if not user or not user.password == password:
bottle.abort(401) # not authorized
user.access_token, user.access_token_expiration = create_access_token()
user.last_accessed = datetime.now()
session.commit()
user_json = user.json()
finally:
session.close()
return user_json
@app.get(API_ROOT + "/logout")
@basic_auth
def logout():
request.user.access_token = None
request.user.access_token_expiration = None
request.session.commit()
@app.post(API_ROOT + "/forgot-password")
def forgot_password():
user_email = request.params.get('email', None)
if not user_email and not user_email.contains('@'):
bottle.abort("Valid email address required")
session = None
try:
session = db.Session()
user = session.query(User)\
.filter(sa.func.lower(User.email) == user_email.lower())\
.first()
if not user:
bottle.abort(422, "Could not get a user with the requested email address")
token, expiration = create_password_reset_token()
user.password_reset_token = token
user.password_reset_token_expiration = expiration
session.commit()
finally:
if session:
session.close()
app_url = config.get("BAUBLE_APP_URL")
mappings = {'token': token, 'email': user_email, 'app_url': app_url}
try:
email.send_template('reset_password.txt', mappings, **{
'to': user_email,
'from': '[email protected]',
'subject': 'Bauble Password Reset'})
except smtplib.SMTPException as exc:
print(exc)
bottle.abort(500, 'Could not send reset password email.')
@app.post(API_ROOT + "/reset-password")
def reset_password():
session = None
user_email = request.json['email']
try:
session = db.Session()
user = session.query(User).filter(sa.func.lower(User.email) == user_email.lower()).first()
if user is None:
print('use is None')
# TODO: is this the correct status code?
bottle.abort(422, 'A user could be be found with the provided email')
if request.json['token'] != user.password_reset_token or \
(request.json['token'] == user.password_reset_token and user.password_reset_token_expiration < datetime.now()):
# TODO: is this the correct status code?
bottle.abort(422, 'Invalid password reset token')
# TODO: need to set the expiration
user.password_reset_token = None
user.password_reset_token_expiration = None
user.password = request.json['password']
user.access_token, user.access_token_expiration = create_access_token()
user.last_accesseed = datetime.now()
session.commit()
user_json = user.json()
# except Exception as exc:
# print('type(exc): ', type(exc))
# print(exc)
# bottle.abort(400, "Could not get a user with the requested email address")
finally:
if session:
session.close()
return user_json
| bsd-3-clause | 6,051,561,308,636,403,000 | 5,644,107,449,433,881,000 | 29.38806 | 122 | 0.631139 | false |
lepture/terminal | setup.py | 1 | 1369 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
kwargs = {}
if sys.platform == 'win32':
kwargs['install_requires'] = ['colorama']
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import terminal
from email.utils import parseaddr
author, author_email = parseaddr(terminal.__author__)
setup(
name='terminal',
version=terminal.__version__,
author=author,
author_email=author_email,
url=terminal.__homepage__,
packages=['terminal'],
description=terminal.__doc__,
long_description=open('README.rst').read(),
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: Implementation',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
**kwargs
)
| bsd-3-clause | -5,936,193,968,169,972,000 | -7,110,678,823,095,393,000 | 28.76087 | 70 | 0.615778 | false |
masayukig/tempest | tempest/lib/services/volume/v3/backups_client.py | 2 | 5348 | # Copyright 2017 FiberHome Telecommunication Technologies CO.,LTD
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest.lib.common import rest_client
from tempest.lib import exceptions as lib_exc
from tempest.lib.services.volume import base_client
class BackupsClient(base_client.BaseClient):
"""Volume V3 Backups client"""
def create_backup(self, **kwargs):
"""Creates a backup of volume.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/block-storage/v3/index.html#create-a-backup
"""
post_body = json.dumps({'backup': kwargs})
resp, body = self.post('backups', post_body)
body = json.loads(body)
self.expected_success(202, resp.status)
return rest_client.ResponseBody(resp, body)
def update_backup(self, backup_id, **kwargs):
"""Updates the specified volume backup.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/block-storage/v3/#update-a-backup
"""
put_body = json.dumps({'backup': kwargs})
resp, body = self.put('backups/%s' % backup_id, put_body)
body = json.loads(body)
self.expected_success(200, resp.status)
return rest_client.ResponseBody(resp, body)
def restore_backup(self, backup_id, **kwargs):
"""Restore volume from backup.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/block-storage/v3/index.html#restore-a-backup
"""
post_body = json.dumps({'restore': kwargs})
resp, body = self.post('backups/%s/restore' % (backup_id), post_body)
body = json.loads(body)
self.expected_success(202, resp.status)
return rest_client.ResponseBody(resp, body)
def delete_backup(self, backup_id):
"""Delete a backup of volume."""
resp, body = self.delete('backups/%s' % backup_id)
self.expected_success(202, resp.status)
return rest_client.ResponseBody(resp, body)
def show_backup(self, backup_id):
"""Returns the details of a single backup."""
url = "backups/%s" % backup_id
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return rest_client.ResponseBody(resp, body)
def list_backups(self, detail=False, **params):
"""List all the tenant's backups.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/block-storage/v3/index.html#list-backups-for-project
https://docs.openstack.org/api-ref/block-storage/v3/index.html#list-backups-with-detail
"""
url = "backups"
if detail:
url += "/detail"
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return rest_client.ResponseBody(resp, body)
def export_backup(self, backup_id):
"""Export backup metadata record."""
url = "backups/%s/export_record" % backup_id
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return rest_client.ResponseBody(resp, body)
def import_backup(self, **kwargs):
"""Import backup metadata record.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/block-storage/v3/index.html#import-a-backup
"""
post_body = json.dumps({'backup-record': kwargs})
resp, body = self.post("backups/import_record", post_body)
body = json.loads(body)
self.expected_success(201, resp.status)
return rest_client.ResponseBody(resp, body)
def reset_backup_status(self, backup_id, status):
"""Reset the specified backup's status."""
post_body = json.dumps({'os-reset_status': {"status": status}})
resp, body = self.post('backups/%s/action' % backup_id, post_body)
self.expected_success(202, resp.status)
return rest_client.ResponseBody(resp, body)
def is_resource_deleted(self, id):
try:
self.show_backup(id)
except lib_exc.NotFound:
return True
return False
@property
def resource_type(self):
"""Returns the primary type of resource this client works with."""
return 'backup'
| apache-2.0 | -9,037,778,682,167,763,000 | -7,502,438,826,478,344,000 | 38.323529 | 95 | 0.645662 | false |
AllenDowney/SoftwareSystems | hw04/wave3/thinkdsp.py | 23 | 31996 | """This file contains code used in "Think DSP",
by Allen B. Downey, available from greenteapress.com
Copyright 2013 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
import array
import math
import numpy
import random
import scipy
import scipy.stats
import struct
import subprocess
import thinkplot
from fractions import gcd
from wave import open as open_wave
import matplotlib.pyplot as pyplot
PI2 = math.pi * 2
def random_seed(x):
"""Initialize the random and numpy.random generators.
x: int seed
"""
random.seed(x)
numpy.random.seed(x)
class UnimplementedMethodException(Exception):
"""Exception if someone calls a method that should be overridden."""
class WavFileWriter(object):
"""Writes wav files."""
def __init__(self, filename='sound.wav', framerate=11025):
"""Opens the file and sets parameters.
filename: string
framerate: samples per second
"""
self.filename = filename
self.framerate = framerate
self.nchannels = 1
self.sampwidth = 2
self.bits = self.sampwidth * 8
self.bound = 2**(self.bits-1) - 1
self.fmt = 'h'
self.dtype = numpy.int16
self.fp = open_wave(self.filename, 'w')
self.fp.setnchannels(self.nchannels)
self.fp.setsampwidth(self.sampwidth)
self.fp.setframerate(self.framerate)
def write(self, wave):
"""Writes a wave.
wave: Wave
"""
zs = wave.quantize(self.bound, self.dtype)
self.fp.writeframes(zs.tostring())
def close(self, duration=0):
"""Closes the file.
duration: how many seconds of silence to append
"""
if duration:
self.write(rest(duration))
self.fp.close()
def read_wave(filename='sound.wav'):
"""Reads a wave file.
filename: string
returns: Wave
"""
fp = open_wave(filename, 'r')
nchannels = fp.getnchannels()
nframes = fp.getnframes()
sampwidth = fp.getsampwidth()
framerate = fp.getframerate()
z_str = fp.readframes(nframes)
fp.close()
dtype_map = {1:numpy.int8, 2:numpy.int16}
assert sampwidth in dtype_map
ys = numpy.fromstring(z_str, dtype=dtype_map[sampwidth])
wave = Wave(ys, framerate)
return wave
def play_wave(filename='sound.wav', player='aplay'):
"""Plays a wave file.
filename: string
player: string name of executable that plays wav files
"""
cmd = '%s %s' % (player, filename)
popen = subprocess.Popen(cmd, shell=True)
popen.communicate()
class _SpectrumParent(object):
"""Contains code common to Spectrum and DCT.
"""
@property
def max_freq(self):
return self.framerate / 2.0
@property
def freq_res(self):
return self.max_freq / (len(self.fs) - 1)
def plot(self, low=0, high=None, **options):
"""Plots amplitude vs frequency.
low: int index to start at
high: int index to end at
"""
thinkplot.plot(self.fs[low:high], self.amps[low:high], **options)
def plot_power(self, low=0, high=None, **options):
"""Plots power vs frequency.
low: int index to start at
high: int index to end at
"""
thinkplot.plot(self.fs[low:high], self.power[low:high], **options)
def estimate_slope(self):
"""Runs linear regression on log power vs log frequency.
returns: slope, inter, r2, p, stderr
"""
x = numpy.log(self.fs[1:])
y = numpy.log(self.power[1:])
t = scipy.stats.linregress(x,y)
return t
def peaks(self):
"""Finds the highest peaks and their frequencies.
returns: sorted list of (amplitude, frequency) pairs
"""
t = zip(self.amps, self.fs)
t.sort(reverse=True)
return t
class Spectrum(_SpectrumParent):
"""Represents the spectrum of a signal."""
def __init__(self, hs, framerate):
self.hs = hs
self.framerate = framerate
n = len(hs)
self.fs = numpy.linspace(0, self.max_freq, n)
def __add__(self, other):
if other == 0:
return self
assert self.framerate == other.framerate
hs = self.hs + other.hs
return Spectrum(hs, self.framerate)
__radd__ = __add__
@property
def real(self):
"""Returns the real part of the hs (read-only property)."""
return numpy.real(self.hs)
@property
def imag(self):
"""Returns the imaginary part of the hs (read-only property)."""
return numpy.imag(self.hs)
@property
def amps(self):
"""Returns a sequence of amplitudes (read-only property)."""
return numpy.absolute(self.hs)
@property
def power(self):
"""Returns a sequence of powers (read-only property)."""
return self.amps ** 2
def low_pass(self, cutoff, factor=0):
"""Attenuate frequencies above the cutoff.
cutoff: frequency in Hz
factor: what to multiply the magnitude by
"""
for i in xrange(len(self.hs)):
if self.fs[i] > cutoff:
self.hs[i] *= factor
def high_pass(self, cutoff, factor=0):
"""Attenuate frequencies below the cutoff.
cutoff: frequency in Hz
factor: what to multiply the magnitude by
"""
for i in xrange(len(self.hs)):
if self.fs[i] < cutoff:
self.hs[i] *= factor
def band_stop(self, low_cutoff, high_cutoff, factor=0):
"""Attenuate frequencies between the cutoffs.
low_cutoff: frequency in Hz
high_cutoff: frequency in Hz
factor: what to multiply the magnitude by
"""
for i in xrange(len(self.hs)):
if low_cutoff < self.fs[i] < high_cutoff:
self.hs[i] = 0
def pink_filter(self, beta=1):
"""Apply a filter that would make white noise pink.
beta: exponent of the pink noise
"""
denom = self.fs ** (beta/2.0)
denom[0] = 1
self.hs /= denom
def angles(self, i):
"""Computes phase angles in radians.
returns: list of phase angles
"""
return numpy.angle(self.hs)
def make_integrated_spectrum(self):
"""Makes an integrated spectrum.
"""
cs = numpy.cumsum(self.power)
cs /= cs[-1]
return IntegratedSpectrum(cs, self.fs)
def make_wave(self):
"""Transforms to the time domain.
returns: Wave
"""
ys = numpy.fft.irfft(self.hs)
return Wave(ys, self.framerate)
class IntegratedSpectrum(object):
"""Represents the integral of a spectrum."""
def __init__(self, cs, fs):
"""Initializes an integrated spectrum:
cs: sequence of cumulative amplitudes
fs: sequence of frequences
"""
self.cs = cs
self.fs = fs
def plot_power(self, low=0, high=None, expo=False, **options):
"""Plots the integrated spectrum.
low: int index to start at
high: int index to end at
"""
cs = self.cs[low:high]
fs = self.fs[low:high]
if expo:
cs = numpy.exp(cs)
thinkplot.Plot(fs, cs, **options)
def estimate_slope(self, low=1, high=-12000):
"""Runs linear regression on log cumulative power vs log frequency.
returns: slope, inter, r2, p, stderr
"""
#print self.fs[low:high]
#print self.cs[low:high]
x = numpy.log(self.fs[low:high])
y = numpy.log(self.cs[low:high])
t = scipy.stats.linregress(x,y)
return t
class Dct(_SpectrumParent):
"""Represents the spectrum of a signal."""
def __init__(self, amps, framerate):
self.amps = amps
self.framerate = framerate
n = len(amps)
self.fs = numpy.arange(n) / float(n) * self.max_freq
def make_wave(self):
"""Transforms to the time domain.
returns: Wave
"""
ys = scipy.fftpack.dct(self.amps, type=3) / 2
return Wave(ys, self.framerate)
class Spectrogram(object):
"""Represents the spectrum of a signal."""
def __init__(self, spec_map, seg_length, window_func=None):
"""Initialize the spectrogram.
spec_map: map from float time to Spectrum
seg_length: number of samples in each segment
window_func: function that computes the window
"""
self.spec_map = spec_map
self.seg_length = seg_length
self.window_func = window_func
def any_spectrum(self):
"""Returns an arbitrary spectrum from the spectrogram."""
return self.spec_map.itervalues().next()
@property
def time_res(self):
"""Time resolution in seconds."""
spectrum = self.any_spectrum()
return float(self.seg_length) / spectrum.framerate
@property
def freq_res(self):
"""Frequency resolution in Hz."""
return self.any_spectrum().freq_res
def times(self):
"""Sorted sequence of times.
returns: sequence of float times in seconds
"""
ts = sorted(self.spec_map.iterkeys())
return ts
def frequencies(self):
"""Sequence of frequencies.
returns: sequence of float freqencies in Hz.
"""
fs = self.any_spectrum().fs
return fs
def plot(self, low=0, high=None, **options):
"""Make a pseudocolor plot.
low: index of the lowest frequency component to plot
high: index of the highest frequency component to plot
"""
ts = self.times()
fs = self.frequencies()[low:high]
# make the array
size = len(fs), len(ts)
array = numpy.zeros(size, dtype=numpy.float)
# copy amplitude from each spectrum into a column of the array
for i, t in enumerate(ts):
spectrum = self.spec_map[t]
array[:,i] = spectrum.amps[low:high]
thinkplot.pcolor(ts, fs, array, **options)
def make_wave(self):
"""Inverts the spectrogram and returns a Wave.
returns: Wave
"""
res = []
for t, spectrum in sorted(self.spec_map.iteritems()):
wave = spectrum.make_wave()
n = len(wave)
if self.window_func:
window = 1 / self.window_func(n)
wave.window(window)
i = int(round(t * wave.framerate))
start = i - n / 2
end = start + n
res.append((start, end, wave))
starts, ends, waves = zip(*res)
low = min(starts)
high = max(ends)
ys = numpy.zeros(high-low, numpy.float)
for start, end, wave in res:
ys[start:end] = wave.ys
return Wave(ys, wave.framerate)
class Wave(object):
"""Represents a discrete-time waveform.
Note: the ys attribute is a "wave array" which is a numpy
array of floats.
"""
def __init__(self, ys, framerate, start=0):
"""Initializes the wave.
ys: wave array
framerate: samples per second
"""
self.ys = ys
self.framerate = framerate
self.start = start
def __len__(self):
return len(self.ys)
@property
def duration(self):
"""Duration (property).
returns: float duration in seconds
"""
return len(self.ys) / float(self.framerate)
def __or__(self, other):
"""Concatenates two waves.
other: Wave
returns: Wave
"""
if self.framerate != other.framerate:
raise ValueError('Wave.__or__: framerates do not agree')
ys = numpy.concatenate((self.ys, other.ys))
return Wave(ys, self.framerate)
def quantize(self, bound, dtype):
"""Maps the waveform to quanta.
bound: maximum amplitude
dtype: numpy data type or string
returns: quantized signal
"""
return quantize(self.ys, bound, dtype)
def apodize(self, denom=20, duration=0.1):
"""Tapers the amplitude at the beginning and end of the signal.
Tapers either the given duration of time or the given
fraction of the total duration, whichever is less.
denom: float fraction of the segment to taper
duration: float duration of the taper in seconds
"""
self.ys = apodize(self.ys, self.framerate, denom, duration)
def hamming(self):
"""Apply a Hamming window to the wave.
"""
self.ys *= numpy.hamming(len(self.ys))
def window(self, window):
"""Apply a window to the wave.
window: sequence of multipliers, same length as self.ys
"""
self.ys *= window
def normalize(self, amp=1.0):
"""Normalizes the signal to the given amplitude.
amp: float amplitude
"""
self.ys = normalize(self.ys, amp=amp)
def unbias(self):
"""Unbiases the signal.
"""
self.ys = unbias(self.ys)
def segment(self, start=0, duration=None):
"""Extracts a segment.
start: float start time in seconds
duration: float duration in seconds
returns: Wave
"""
i = start * self.framerate
if duration is None:
j = None
else:
j = i + duration * self.framerate
ys = self.ys[i:j]
return Wave(ys, self.framerate)
def make_spectrum(self):
"""Computes the spectrum using FFT.
returns: Spectrum
"""
hs = numpy.fft.rfft(self.ys)
return Spectrum(hs, self.framerate)
def make_dct(self):
amps = scipy.fftpack.dct(self.ys, type=2)
return Dct(amps, self.framerate)
def make_spectrogram(self, seg_length, window_func=numpy.hamming):
"""Computes the spectrogram of the wave.
seg_length: number of samples in each segment
window_func: function used to compute the window
returns: Spectrogram
"""
n = len(self.ys)
window = window_func(seg_length)
start, end, step = 0, seg_length, seg_length / 2
spec_map = {}
while end < n:
ys = self.ys[start:end] * window
hs = numpy.fft.rfft(ys)
t = (start + end) / 2.0 / self.framerate
spec_map[t] = Spectrum(hs, self.framerate)
start += step
end += step
return Spectrogram(spec_map, seg_length, window_func)
def plot(self, **options):
"""Plots the wave.
"""
n = len(self.ys)
ts = numpy.linspace(0, self.duration, n)
thinkplot.plot(ts, self.ys, **options)
def corr(self, other):
"""Correlation coefficient two waves.
other: Wave
returns: 2x2 covariance matrix
"""
mat = self.cov_mat(other)
corr = mat[0][1] / math.sqrt(mat[0][0] * mat[1][1])
return corr
def cov_mat(self, other):
"""Covariance matrix of two waves.
other: Wave
returns: 2x2 covariance matrix
"""
return numpy.cov(self.ys, other.ys)
def cov(self, other):
"""Covariance of two unbiased waves.
other: Wave
returns: float
"""
total = sum(self.ys * other.ys) / len(self.ys)
return total
def cos_cov(self, k):
"""Covariance with a cosine signal.
freq: freq of the cosine signal in Hz
returns: float covariance
"""
n = len(self.ys)
factor = math.pi * k / n
ys = [math.cos(factor * (i+0.5)) for i in range(n)]
total = 2 * sum(self.ys * ys)
return total
def cos_transform(self):
"""Discrete cosine transform.
returns: list of frequency, cov pairs
"""
n = len(self.ys)
res = []
for k in range(n):
cov = self.cos_cov(k)
res.append((k, cov))
return res
def write(self, filename='sound.wav'):
"""Write a wave file.
filename: string
"""
print 'Writing', filename
wfile = WavFileWriter(filename, self.framerate)
wfile.write(self)
wfile.close()
def play(self, filename='sound.wav'):
"""Plays a wave file.
filename: string
"""
self.write(filename)
play_wave(filename)
def unbias(ys):
"""Shifts a wave array so it has mean 0.
ys: wave array
returns: wave array
"""
return ys - ys.mean()
def normalize(ys, amp=1.0):
"""Normalizes a wave array so the maximum amplitude is +amp or -amp.
ys: wave array
amp: max amplitude (pos or neg) in result
returns: wave array
"""
high, low = abs(max(ys)), abs(min(ys))
return amp * ys / max(high, low)
def quantize(ys, bound, dtype):
"""Maps the waveform to quanta.
ys: wave array
bound: maximum amplitude
dtype: numpy data type of the result
returns: quantized signal
"""
if max(ys) > 1 or min(ys) < -1:
print 'Warning: normalizing before quantizing.'
ys = normalize(ys)
zs = (ys * bound).astype(dtype)
return zs
def apodize(ys, framerate, denom=20, duration=0.1):
"""Tapers the amplitude at the beginning and end of the signal.
Tapers either the given duration of time or the given
fraction of the total duration, whichever is less.
ys: wave array
framerate: int frames per second
denom: float fraction of the segment to taper
duration: float duration of the taper in seconds
returns: wave array
"""
# a fixed fraction of the segment
n = len(ys)
k1 = n / denom
# a fixed duration of time
k2 = int(duration * framerate)
k = min(k1, k2)
w1 = numpy.linspace(0, 1, k)
w2 = numpy.ones(n - 2*k)
w3 = numpy.linspace(1, 0, k)
window = numpy.concatenate((w1, w2, w3))
return ys * window
class Signal(object):
"""Represents a time-varying signal."""
def __add__(self, other):
"""Adds two signals.
other: Signal
returns: Signal
"""
if other == 0:
return self
return SumSignal(self, other)
__radd__ = __add__
@property
def period(self):
"""Period of the signal in seconds (property).
For non-periodic signals, use the default, 0.1 seconds
returns: float seconds
"""
return 0.1
def plot(self, framerate=11025):
"""Plots the signal.
framerate: samples per second
"""
duration = self.period * 3
wave = self.make_wave(duration, start=0, framerate=framerate)
wave.plot()
def make_wave(self, duration=1, start=0, framerate=11025):
"""Makes a Wave object.
duration: float seconds
start: float seconds
framerate: int frames per second
returns: Wave
"""
dt = 1.0 / framerate
ts = numpy.arange(start, duration, dt)
ys = self.evaluate(ts)
return Wave(ys, framerate=framerate, start=start)
def infer_framerate(ts):
"""Given ts, find the framerate.
Assumes that the ts are equally spaced.
ts: sequence of times in seconds
returns: frames per second
"""
dt = ts[1] - ts[0]
framerate = 1.0 / dt
return framerate
class SumSignal(Signal):
"""Represents the sum of signals."""
def __init__(self, *args):
"""Initializes the sum.
args: tuple of signals
"""
self.signals = args
@property
def period(self):
"""Period of the signal in seconds.
Note: this is not correct; it's mostly a placekeeper.
But it is correct for a harmonic sequence where all
component frequencies are multiples of the fundamental.
returns: float seconds
"""
return max(sig.period for sig in self.signals)
def evaluate(self, ts):
"""Evaluates the signal at the given times.
ts: float array of times
returns: float wave array
"""
return sum(sig.evaluate(ts) for sig in self.signals)
class Sinusoid(Signal):
"""Represents a sinusoidal signal."""
def __init__(self, freq=440, amp=1.0, offset=0, func=numpy.sin):
"""Initializes a sinusoidal signal.
freq: float frequency in Hz
amp: float amplitude, 1.0 is nominal max
offset: float phase offset in radians
func: function that maps phase to amplitude
"""
self.freq = freq
self.amp = amp
self.offset = offset
self.func = func
@property
def period(self):
"""Period of the signal in seconds.
returns: float seconds
"""
return 1.0 / self.freq
def evaluate(self, ts):
"""Evaluates the signal at the given times.
ts: float array of times
returns: float wave array
"""
phases = PI2 * self.freq * ts + self.offset
ys = self.amp * self.func(phases)
return ys
def CosSignal(freq=440, amp=1.0, offset=0):
"""Makes a consine Sinusoid.
freq: float frequency in Hz
amp: float amplitude, 1.0 is nominal max
offset: float phase offset in radians
returns: Sinusoid object
"""
return Sinusoid(freq, amp, offset, func=numpy.cos)
def SinSignal(freq=440, amp=1.0, offset=0):
"""Makes a sine Sinusoid.
freq: float frequency in Hz
amp: float amplitude, 1.0 is nominal max
offset: float phase offset in radians
returns: Sinusoid object
"""
return Sinusoid(freq, amp, offset, func=numpy.sin)
class SquareSignal(Sinusoid):
"""Represents a square signal."""
def evaluate(self, ts):
"""Evaluates the signal at the given times.
ts: float array of times
returns: float wave array
"""
cycles = self.freq * ts + self.offset / PI2
frac, _ = numpy.modf(cycles)
ys = self.amp * numpy.sign(unbias(frac))
return ys
class SawtoothSignal(Sinusoid):
"""Represents a sawtooth signal."""
def evaluate(self, ts):
"""Evaluates the signal at the given times.
ts: float array of times
returns: float wave array
"""
cycles = self.freq * ts + self.offset / PI2
frac, _ = numpy.modf(cycles)
ys = normalize(unbias(frac), self.amp)
return ys
class ParabolicSignal(Sinusoid):
"""Represents a parabolic signal."""
def evaluate(self, ts):
"""Evaluates the signal at the given times.
ts: float array of times
returns: float wave array
"""
cycles = self.freq * ts + self.offset / PI2
frac, _ = numpy.modf(cycles)
ys = frac**2
ys = normalize(unbias(ys), self.amp)
return ys
class GlottalSignal(Sinusoid):
"""Represents a periodic signal that resembles a glottal signal."""
def evaluate(self, ts):
"""Evaluates the signal at the given times.
ts: float array of times
returns: float wave array
"""
cycles = self.freq * ts + self.offset / PI2
frac, _ = numpy.modf(cycles)
ys = frac**4 * (1-frac)
ys = normalize(unbias(ys), self.amp)
return ys
class TriangleSignal(Sinusoid):
"""Represents a triangle signal."""
def evaluate(self, ts):
"""Evaluates the signal at the given times.
ts: float array of times
returns: float wave array
"""
cycles = self.freq * ts + self.offset / PI2
frac, _ = numpy.modf(cycles)
ys = numpy.abs(frac - 0.5)
ys = normalize(unbias(ys), self.amp)
return ys
class Chirp(Signal):
"""Represents a signal with variable frequency."""
def __init__(self, start=440, end=880, amp=1.0):
"""Initializes a linear chirp.
start: float frequency in Hz
end: float frequency in Hz
amp: float amplitude, 1.0 is nominal max
"""
self.start = start
self.end = end
self.amp = amp
@property
def period(self):
"""Period of the signal in seconds.
returns: float seconds
"""
return ValueError('Non-periodic signal.')
def evaluate(self, ts):
"""Evaluates the signal at the given times.
ts: float array of times
returns: float wave array
"""
freqs = numpy.linspace(self.start, self.end, len(ts)-1)
return self._evaluate(ts, freqs)
def _evaluate(self, ts, freqs):
"""Helper function that evaluates the signal.
ts: float array of times
freqs: float array of frequencies during each interval
"""
#n = len(freqs)
#print freqs[::n/2]
dts = numpy.diff(ts)
dps = PI2 * freqs * dts
phases = numpy.cumsum(dps)
phases = numpy.insert(phases, 0, 0)
ys = self.amp * numpy.cos(phases)
return ys
class ExpoChirp(Chirp):
"""Represents a signal with varying frequency."""
def evaluate(self, ts):
"""Evaluates the signal at the given times.
ts: float array of times
returns: float wave array
"""
start, end = math.log10(self.start), math.log10(self.end)
freqs = numpy.logspace(start, end, len(ts)-1)
return self._evaluate(ts, freqs)
class SilentSignal(Signal):
"""Represents silence."""
def evaluate(self, ts):
"""Evaluates the signal at the given times.
ts: float array of times
returns: float wave array
"""
return numpy.zeros(len(ts))
class _Noise(Signal):
"""Represents a noise signal (abstract parent class)."""
def __init__(self, amp=1.0):
"""Initializes a white noise signal.
amp: float amplitude, 1.0 is nominal max
"""
self.amp = amp
@property
def period(self):
"""Period of the signal in seconds.
returns: float seconds
"""
return ValueError('Non-periodic signal.')
class UncorrelatedUniformNoise(_Noise):
"""Represents uncorrelated uniform noise."""
def evaluate(self, ts):
"""Evaluates the signal at the given times.
ts: float array of times
returns: float wave array
"""
ys = numpy.random.uniform(-self.amp, self.amp, len(ts))
return ys
class UncorrelatedGaussianNoise(_Noise):
"""Represents uncorrelated gaussian noise."""
def evaluate(self, ts):
"""Evaluates the signal at the given times.
ts: float array of times
returns: float wave array
"""
ys = numpy.random.normal(0, 1, len(ts))
ys = normalize(ys, self.amp)
return ys
class BrownianNoise(_Noise):
"""Represents Brownian noise, aka red noise."""
def evaluate(self, ts):
"""Evaluates the signal at the given times.
Computes Brownian noise by taking the cumulative sum of
a uniform random series.
ts: float array of times
returns: float wave array
"""
#dys = numpy.random.normal(0, 1, len(ts))
dys = numpy.random.uniform(-1, 1, len(ts))
#ys = numpy.cumsum(dys)
ys = scipy.integrate.cumtrapz(dys, ts)
ys = normalize(unbias(ys), self.amp)
return ys
class PinkNoise(_Noise):
"""Represents Brownian noise, aka red noise."""
def __init__(self, amp=1.0, beta=1.0):
"""Initializes a pink noise signal.
amp: float amplitude, 1.0 is nominal max
"""
self.amp = amp
self.beta = beta
def make_wave(self, duration=1, start=0, framerate=11025):
"""Makes a Wave object.
duration: float seconds
start: float seconds
framerate: int frames per second
returns: Wave
"""
signal = UncorrelatedUniformNoise()
wave = signal.make_wave(duration, start, framerate)
spectrum = wave.make_spectrum()
spectrum.pink_filter(beta=self.beta)
wave2 = spectrum.make_wave()
wave2.unbias()
wave2.normalize(self.amp)
return wave2
def rest(duration):
"""Makes a rest of the given duration.
duration: float seconds
returns: Wave
"""
signal = SilentSignal()
wave = signal.make_wave(duration)
return wave
def make_note(midi_num, duration, sig_cons=CosSignal, framerate=11025):
"""Make a MIDI note with the given duration.
midi_num: int MIDI note number
duration: float seconds
sig_cons: Signal constructor function
framerate: int frames per second
returns: Wave
"""
freq = midi_to_freq(midi_num)
signal = sig_cons(freq)
wave = signal.make_wave(duration, framerate=framerate)
wave.apodize()
return wave
def make_chord(midi_nums, duration, sig_cons=CosSignal, framerate=11025):
"""Make a chord with the given duration.
midi_nums: sequence of int MIDI note numbers
duration: float seconds
sig_cons: Signal constructor function
framerate: int frames per second
returns: Wave
"""
freqs = [midi_to_freq(num) for num in midi_nums]
signal = sum(sig_cons(freq) for freq in freqs)
wave = signal.make_wave(duration, framerate=framerate)
wave.apodize()
return wave
def midi_to_freq(midi_num):
"""Converts MIDI note number to frequency.
midi_num: int MIDI note number
returns: float frequency in Hz
"""
x = (midi_num - 69) / 12.0
freq = 440.0 * 2**x
return freq
def sin_wave(freq, duration=1, offset=0):
"""Makes a sine wave with the given parameters.
freq: float cycles per second
duration: float seconds
offset: float radians
returns: Wave
"""
signal = SinSignal(freq, offset=offset)
wave = signal.make_wave(duration)
return wave
def cos_wave(freq, duration=1, offset=0):
"""Makes a cosine wave with the given parameters.
freq: float cycles per second
duration: float seconds
offset: float radians
returns: Wave
"""
signal = CosSignal(freq, offset=offset)
wave = signal.make_wave(duration)
return wave
def mag(a):
"""Computes the magnitude of a numpy array.
a: numpy array
returns: float
"""
return numpy.sqrt(numpy.dot(a, a))
def main():
cos_basis = cos_wave(440)
sin_basis = sin_wave(440)
wave = cos_wave(440, offset=math.pi/2)
cos_cov = cos_basis.cov(wave)
sin_cov = sin_basis.cov(wave)
print cos_cov, sin_cov, mag((cos_cov, sin_cov))
return
wfile = WavFileWriter()
for sig_cons in [SinSignal, TriangleSignal, SawtoothSignal,
GlottalSignal, ParabolicSignal, SquareSignal]:
print sig_cons
sig = sig_cons(440)
wave = sig.make_wave(1)
wave.apodize()
wfile.write(wave)
wfile.close()
return
signal = GlottalSignal(440)
signal.plot()
pyplot.show()
return
wfile = WavFileWriter()
for m in range(60, 0, -1):
wfile.write(make_note(m, 0.25))
wfile.close()
return
wave1 = make_note(69, 1)
wave2 = make_chord([69, 72, 76], 1)
wave = wave1 | wave2
wfile = WavFileWriter()
wfile.write(wave)
wfile.close()
return
sig1 = CosSignal(freq=440)
sig2 = CosSignal(freq=523.25)
sig3 = CosSignal(freq=660)
sig4 = CosSignal(freq=880)
sig5 = CosSignal(freq=987)
sig = sig1 + sig2 + sig3 + sig4
#wave = Wave(sig, duration=0.02)
#wave.plot()
wave = sig.make_wave(duration=1)
#wave.normalize()
wfile = WavFileWriter(wave)
wfile.write()
wfile.close()
if __name__ == '__main__':
main()
| gpl-3.0 | -6,860,292,400,440,313,000 | 8,237,503,869,066,414,000 | 23.783888 | 75 | 0.579197 | false |
magfest/mivs | mivs/models.py | 1 | 16621 | from mivs import *
def href(url):
return ('http://' + url) if url and not url.startswith('http') else url
class ReviewMixin:
@property
def video_reviews(self):
return [r for r in self.reviews if r.video_status != c.PENDING]
@property
def game_reviews(self):
return [r for r in self.reviews if r.game_status != c.PENDING]
@Session.model_mixin
class SessionMixin:
def logged_in_studio(self):
try:
return self.indie_studio(cherrypy.session['studio_id'])
except:
raise HTTPRedirect('../mivs_applications/studio')
def logged_in_judge(self):
judge = self.admin_attendee().admin_account.judge
if judge:
return judge
else:
raise HTTPRedirect('../accounts/homepage?message={}', 'You have been given judge access but not had a judge entry created for you - please contact a MIVS admin to correct this.')
def code_for(self, game):
if game.unlimited_code:
return game.unlimited_code
else:
for code in self.logged_in_judge().codes:
if code.game == game:
return code
def delete_screenshot(self, screenshot):
self.delete(screenshot)
try:
os.remove(screenshot.filepath)
except:
pass
self.commit()
def indie_judges(self):
return self.query(IndieJudge).join(IndieJudge.admin_account).join(AdminAccount.attendee).order_by(Attendee.full_name).all()
def indie_games(self):
return self.query(IndieGame).options(joinedload(IndieGame.studio), joinedload(IndieGame.reviews)).order_by('name').all()
@Session.model_mixin
class AdminAccount:
judge = relationship('IndieJudge', uselist=False, backref='admin_account')
@Session.model_mixin
class Group:
studio = relationship('IndieStudio', uselist=False, backref='group')
class IndieJudge(MagModel, ReviewMixin):
admin_id = Column(UUID, ForeignKey('admin_account.id'))
genres = Column(MultiChoice(c.MIVS_INDIE_JUDGE_GENRE_OPTS))
platforms = Column(MultiChoice(c.MIVS_INDIE_PLATFORM_OPTS))
platforms_text = Column(UnicodeText)
staff_notes = Column(UnicodeText)
codes = relationship('IndieGameCode', backref='judge')
reviews = relationship('IndieGameReview', backref='judge')
email_model_name = 'judge'
@property
def judging_complete(self):
return len(self.reviews) == len(self.game_reviews)
@property
def mivs_all_genres(self):
return c.MIVS_ALL_GENRES in self.genres_ints
@property
def attendee(self):
return self.admin_account.attendee
@property
def full_name(self):
return self.attendee.full_name
@property
def email(self):
return self.attendee.email
class IndieStudio(MagModel):
group_id = Column(UUID, ForeignKey('group.id'), nullable=True)
name = Column(UnicodeText, unique=True)
address = Column(UnicodeText)
website = Column(UnicodeText)
twitter = Column(UnicodeText)
facebook = Column(UnicodeText)
status = Column(Choice(c.MIVS_STUDIO_STATUS_OPTS), default=c.NEW, admin_only=True)
staff_notes = Column(UnicodeText, admin_only=True)
registered = Column(UTCDateTime, server_default=utcnow())
games = relationship('IndieGame', backref='studio', order_by='IndieGame.title')
developers = relationship('IndieDeveloper', backref='studio', order_by='IndieDeveloper.last_name')
email_model_name = 'studio'
@property
def confirm_deadline(self):
return sorted([g for g in self.games if g.accepted], key=lambda g: g.accepted)[0].accepted\
+ timedelta(days=c.MIVS_CONFIRM_DEADLINE)
@property
def after_confirm_deadline(self):
return self.confirm_deadline < localized_now()
@property
def website_href(self):
return href(self.website)
@property
def email(self):
return [dev.email for dev in self.developers if dev.primary_contact]
@property
def primary_contact(self):
return [dev for dev in self.developers if dev.primary_contact][0]
@property
def submitted_games(self):
return [g for g in self.games if g.submitted]
@property
def comped_badges(self):
return c.MIVS_INDIE_BADGE_COMPS * len([g for g in self.games if g.status == c.ACCEPTED])
@property
def unclaimed_badges(self):
return max(0, self.comped_badges - len([d for d in self.developers if not d.matching_attendee]))
class IndieDeveloper(MagModel):
studio_id = Column(UUID, ForeignKey('indie_studio.id'))
primary_contact = Column(Boolean, default=False) # just means they receive emails
first_name = Column(UnicodeText)
last_name = Column(UnicodeText)
email = Column(UnicodeText)
cellphone = Column(UnicodeText)
@property
def full_name(self):
return self.first_name + ' ' + self.last_name
@property
def matching_attendee(self):
return self.session.query(Attendee).filter(
func.lower(Attendee.first_name) == self.first_name.lower(),
func.lower(Attendee.last_name) == self.last_name.lower(),
func.lower(Attendee.email) == self.email.lower()
).first()
class IndieGame(MagModel, ReviewMixin):
studio_id = Column(UUID, ForeignKey('indie_studio.id'))
title = Column(UnicodeText)
brief_description = Column(UnicodeText) # 140 max
genres = Column(MultiChoice(c.MIVS_INDIE_GENRE_OPTS))
platforms = Column(MultiChoice(c.MIVS_INDIE_PLATFORM_OPTS))
platforms_text = Column(UnicodeText)
description = Column(UnicodeText) # 500 max
how_to_play = Column(UnicodeText) # 1000 max
link_to_video = Column(UnicodeText)
link_to_game = Column(UnicodeText)
password_to_game = Column(UnicodeText)
code_type = Column(Choice(c.MIVS_CODE_TYPE_OPTS), default=c.NO_CODE)
code_instructions = Column(UnicodeText)
build_status = Column(Choice(c.MIVS_BUILD_STATUS_OPTS), default=c.PRE_ALPHA)
build_notes = Column(UnicodeText) # 500 max
shown_events = Column(UnicodeText)
video_submitted = Column(Boolean, default=False)
submitted = Column(Boolean, default=False)
agreed_liability = Column(Boolean, default=False)
agreed_showtimes = Column(Boolean, default=False)
agreed_reminder1 = Column(Boolean, default=False)
agreed_reminder2 = Column(Boolean, default=False)
alumni_years = Column(MultiChoice(c.PREV_MIVS_YEAR_OPTS))
alumni_update = Column(UnicodeText)
link_to_promo_video = Column(UnicodeText)
link_to_webpage = Column(UnicodeText)
twitter = Column(UnicodeText)
facebook = Column(UnicodeText)
other_social_media = Column(UnicodeText)
tournament_at_event = Column(Boolean, default=False)
tournament_prizes = Column(UnicodeText)
has_multiplayer = Column(Boolean, default=False)
player_count = Column(UnicodeText)
multiplayer_game_length = Column(Integer, nullable=True) # Length in minutes
leaderboard_challenge = Column(Boolean, default=False)
status = Column(Choice(c.MIVS_GAME_STATUS_OPTS), default=c.NEW, admin_only=True)
judge_notes = Column(UnicodeText, admin_only=True)
registered = Column(UTCDateTime, server_default=utcnow())
waitlisted = Column(UTCDateTime, nullable=True)
accepted = Column(UTCDateTime, nullable=True)
codes = relationship('IndieGameCode', backref='game')
reviews = relationship('IndieGameReview', backref='game')
images = relationship(
'IndieGameImage', backref='game', order_by='IndieGameImage.id')
email_model_name = 'game'
@presave_adjustment
def accepted_time(self):
if self.status == c.ACCEPTED and not self.accepted:
self.accepted = datetime.now(UTC)
@presave_adjustment
def waitlisted_time(self):
if self.status == c.WAITLISTED and not self.waitlisted:
self.waitlisted = datetime.now(UTC)
@property
def email(self):
return self.studio.email
@property
def reviews_to_email(self):
return [review for review in self.reviews if review.send_to_studio]
@property
def video_href(self):
return href(self.link_to_video)
@property
def href(self):
return href(self.link_to_game)
@property
def screenshots(self):
return [img for img in self.images if img.is_screenshot]
@property
def best_screenshots(self):
return [img for img in self.images if img.is_screenshot and img.use_in_promo]
def best_screenshot_downloads(self, count=2):
all_images = reversed(sorted(
self.images,
key=lambda img: (
img.is_screenshot and img.use_in_promo,
img.is_screenshot,
img.use_in_promo)))
screenshots = []
for i, screenshot in enumerate(all_images):
if os.path.exists(screenshot.filepath):
screenshots.append(screenshot)
if len(screenshots) >= count:
break
return screenshots
def best_screenshot_download_filenames(self, count=2):
nonchars = re.compile(r'[\W]+')
best_screenshots = self.best_screenshot_downloads(count)
screenshots = []
for i, screenshot in enumerate(best_screenshots):
if os.path.exists(screenshot.filepath):
name = '_'.join([s for s in self.title.lower().split() if s])
name = nonchars.sub('', name)
filename = '{}_{}.{}'.format(name, len(screenshots) + 1, screenshot.extension.lower())
screenshots.append(filename)
if len(screenshots) >= count:
break
return screenshots + ([''] * (count - len(screenshots)))
@property
def promo_image(self):
return next(iter([img for img in self.images if not img.is_screenshot]), None)
@property
def missing_steps(self):
steps = []
if not self.link_to_game:
steps.append('You have not yet included a link to where the judges can access your game')
if self.code_type != c.NO_CODE and self.link_to_game:
if not self.codes:
steps.append('You have not yet attached any codes to this game for our judges to use')
elif not self.unlimited_code and len(self.codes) < c.MIVS_CODES_REQUIRED:
steps.append('You have not attached the {} codes you must provide for our judges'.format(c.MIVS_CODES_REQUIRED))
if not self.agreed_showtimes:
steps.append('You must agree to the showtimes detailed on the game form')
if not self.agreed_liability:
steps.append('You must check the box that agrees to our liability waiver')
return steps
@property
def video_broken(self):
for r in self.reviews:
if r.video_status == c.BAD_LINK:
return True
@property
def unlimited_code(self):
for code in self.codes:
if code.unlimited_use:
return code
@property
def video_submittable(self):
return bool(self.link_to_video)
@property
def submittable(self):
return not self.missing_steps
@property
def scores(self):
return [r.game_score for r in self.reviews if r.game_score]
@property
def score_sum(self):
return sum(self.scores, 0)
@property
def average_score(self):
return (self.score_sum / len(self.scores)) if self.scores else 0
@property
def has_issues(self):
return any(r.has_issues for r in self.reviews)
@property
def confirmed(self):
return self.status == c.ACCEPTED and self.studio and self.studio.group_id
class IndieGameImage(MagModel):
game_id = Column(UUID, ForeignKey('indie_game.id'))
filename = Column(UnicodeText)
content_type = Column(UnicodeText)
extension = Column(UnicodeText)
description = Column(UnicodeText)
use_in_promo = Column(Boolean, default=False)
is_screenshot = Column(Boolean, default=True)
@property
def url(self):
return '../mivs_applications/view_image?id={}'.format(self.id)
@property
def filepath(self):
return os.path.join(c.MIVS_GAME_IMAGE_DIR, str(self.id))
class IndieGameCode(MagModel):
game_id = Column(UUID, ForeignKey('indie_game.id'))
judge_id = Column(UUID, ForeignKey('indie_judge.id'), nullable=True)
code = Column(UnicodeText)
unlimited_use = Column(Boolean, default=False)
judge_notes = Column(UnicodeText, admin_only=True)
@property
def type_label(self):
return 'Unlimited-Use' if self.unlimited_use else 'Single-Person'
class IndieGameReview(MagModel):
game_id = Column(UUID, ForeignKey('indie_game.id'))
judge_id = Column(UUID, ForeignKey('indie_judge.id'))
video_status = Column(Choice(c.MIVS_VIDEO_REVIEW_STATUS_OPTS), default=c.PENDING)
game_status = Column(Choice(c.MIVS_GAME_REVIEW_STATUS_OPTS), default=c.PENDING)
game_content_bad = Column(Boolean, default=False)
video_score = Column(Choice(c.MIVS_VIDEO_REVIEW_OPTS), default=c.PENDING)
game_score = Column(Integer, default=0) # 0 = not reviewed, 1-10 score (10 is best)
video_review = Column(UnicodeText)
game_review = Column(UnicodeText)
developer_response = Column(UnicodeText)
staff_notes = Column(UnicodeText)
send_to_studio = Column(Boolean, default=False)
__table_args__ = (UniqueConstraint('game_id', 'judge_id', name='review_game_judge_uniq'),)
@presave_adjustment
def no_score_if_broken(self):
if self.has_video_issues:
self.video_score = c.PENDING
if self.has_game_issues:
self.game_score = 0
@property
def has_video_issues(self):
return self.video_status in c.MIVS_PROBLEM_STATUSES
@property
def has_game_issues(self):
if self.game_status != c.COULD_NOT_PLAY:
return self.game_status in c.MIVS_PROBLEM_STATUSES
@property
def has_issues(self):
return self.has_video_issues or self.has_game_issues
@on_startup
def add_applicant_restriction():
"""
We use convenience functions for our form handling, e.g. to instantiate an
attendee from an id or from form data we use the session.attendee() method.
This method runs on startup and overrides the methods which are used for the
game application forms to add a new "applicant" parameter. If truthy, this
triggers three additional behaviors:
1) We check that there is currently a logged in studio, and redirect to the
initial application form if there is not.
2) We check that the item being edited belongs to the currently-logged-in
studio and raise an exception if it does not. This check is bypassed for
new things which have not yet been saved to the database.
3) If the model is one with a "studio" relationship, we set that to the
currently-logged-in studio.
We do not perform these kinds of checks for indie judges, for two reasons:
1) We're less concerned about judges abusively editing each other's reviews.
2) There are probably some legitimate use cases for one judge to be able to
edit another's reviews, e.g. to correct typos or reset a review's status
after a link has been fixed, etc.
"""
def override_getter(method_name):
orig_getter = getattr(Session.SessionMixin, method_name)
@wraps(orig_getter)
def with_applicant(self, *args, **kwargs):
applicant = kwargs.pop('applicant', False)
instance = orig_getter(self, *args, **kwargs)
if applicant:
studio = self.logged_in_studio()
if hasattr(instance.__class__, 'game'):
assert instance.is_new or studio == instance.game.studio
else:
assert instance.is_new or studio == instance.studio
instance.studio = studio
return instance
setattr(Session.SessionMixin, method_name, with_applicant)
for name in ['indie_developer', 'indie_game', 'indie_game_code', 'indie_game_image']:
override_getter(name)
| agpl-3.0 | -1,204,899,819,224,061,000 | -1,202,702,425,137,089,800 | 35.610132 | 190 | 0.64232 | false |
SQbQxeKd3JHD8/simple_ConTeXt | scripts/log.py | 2 | 1073 | from typing import Any, Dict, List, Optional
from . import cite
from . import utilities
def parse(
text: str, script: str, opts: Dict[str, Any], timeout: float = 5,
) -> Dict[str, List[str]]:
result = cite.parse_common_luatex(
text, script, opts, input_as_stdin=True, timeout=timeout,
)
return do_format(result)
def do_format(data: Optional[dict]) -> Dict[str, List[str]]:
result = {"main": [], "errors": []} # type: Dict[str, List[str]]
if not isinstance(data, list):
return result
errors = []
for entry in data:
if not isinstance(entry, list) or not entry:
continue
class_ = entry[1]
if class_.endswith("error"):
errors.append(entry)
else:
result["main"].append(entry)
result["errors"] = utilities.deduplicate_list(errors)
return result
def compile_errors(errors: List[List[str]]) -> str:
result = ""
for err in errors:
if len(err) > 2:
result += "".join(" - line {}, {}: {}\n".format(*err))
return result
| mit | 7,156,856,694,667,036,000 | 1,230,309,897,835,153,400 | 27.236842 | 69 | 0.581547 | false |
ValFadeev/ansible-modules-core | cloud/linode/linode.py | 142 | 18004 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: linode
short_description: create / delete / stop / restart an instance in Linode Public Cloud
description:
- creates / deletes a Linode Public Cloud instance and optionally waits for it to be 'running'.
version_added: "1.3"
options:
state:
description:
- Indicate desired state of the resource
choices: ['present', 'active', 'started', 'absent', 'deleted', 'stopped', 'restarted']
default: present
api_key:
description:
- Linode API key
default: null
name:
description:
- Name to give the instance (alphanumeric, dashes, underscore)
- To keep sanity on the Linode Web Console, name is prepended with LinodeID_
default: null
type: string
linode_id:
description:
- Unique ID of a linode server
aliases: lid
default: null
type: integer
plan:
description:
- plan to use for the instance (Linode plan)
default: null
type: integer
payment_term:
description:
- payment term to use for the instance (payment term in months)
default: 1
type: integer
choices: [1, 12, 24]
password:
description:
- root password to apply to a new server (auto generated if missing)
default: null
type: string
ssh_pub_key:
description:
- SSH public key applied to root user
default: null
type: string
swap:
description:
- swap size in MB
default: 512
type: integer
distribution:
description:
- distribution to use for the instance (Linode Distribution)
default: null
type: integer
datacenter:
description:
- datacenter to create an instance in (Linode Datacenter)
default: null
type: integer
wait:
description:
- wait for the instance to be in state 'running' before returning
default: "no"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
requirements:
- "python >= 2.6"
- "linode-python"
- "pycurl"
author: "Vincent Viallet (@zbal)"
notes:
- LINODE_API_KEY env variable can be used instead
'''
EXAMPLES = '''
# Create a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
plan: 1
datacenter: 2
distribution: 99
password: 'superSecureRootPassword'
ssh_pub_key: 'ssh-rsa qwerty'
swap: 768
wait: yes
wait_timeout: 600
state: present
# Ensure a running server (create if missing)
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
plan: 1
datacenter: 2
distribution: 99
password: 'superSecureRootPassword'
ssh_pub_key: 'ssh-rsa qwerty'
swap: 768
wait: yes
wait_timeout: 600
state: present
# Delete a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
state: absent
# Stop a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
state: stopped
# Reboot a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
state: restarted
'''
import time
import os
try:
import pycurl
HAS_PYCURL = True
except ImportError:
HAS_PYCURL = False
try:
from linode import api as linode_api
HAS_LINODE = True
except ImportError:
HAS_LINODE = False
def randompass():
'''
Generate a long random password that comply to Linode requirements
'''
# Linode API currently requires the following:
# It must contain at least two of these four character classes:
# lower case letters - upper case letters - numbers - punctuation
# we play it safe :)
import random
import string
# as of python 2.4, this reseeds the PRNG from urandom
random.seed()
lower = ''.join(random.choice(string.ascii_lowercase) for x in range(6))
upper = ''.join(random.choice(string.ascii_uppercase) for x in range(6))
number = ''.join(random.choice(string.digits) for x in range(6))
punct = ''.join(random.choice(string.punctuation) for x in range(6))
p = lower + upper + number + punct
return ''.join(random.sample(p, len(p)))
def getInstanceDetails(api, server):
'''
Return the details of an instance, populating IPs, etc.
'''
instance = {'id': server['LINODEID'],
'name': server['LABEL'],
'public': [],
'private': []}
# Populate with ips
for ip in api.linode_ip_list(LinodeId=server['LINODEID']):
if ip['ISPUBLIC'] and 'ipv4' not in instance:
instance['ipv4'] = ip['IPADDRESS']
instance['fqdn'] = ip['RDNS_NAME']
if ip['ISPUBLIC']:
instance['public'].append({'ipv4': ip['IPADDRESS'],
'fqdn': ip['RDNS_NAME'],
'ip_id': ip['IPADDRESSID']})
else:
instance['private'].append({'ipv4': ip['IPADDRESS'],
'fqdn': ip['RDNS_NAME'],
'ip_id': ip['IPADDRESSID']})
return instance
def linodeServers(module, api, state, name, plan, distribution, datacenter, linode_id,
payment_term, password, ssh_pub_key, swap, wait, wait_timeout):
instances = []
changed = False
new_server = False
servers = []
disks = []
configs = []
jobs = []
# See if we can match an existing server details with the provided linode_id
if linode_id:
# For the moment we only consider linode_id as criteria for match
# Later we can use more (size, name, etc.) and update existing
servers = api.linode_list(LinodeId=linode_id)
# Attempt to fetch details about disks and configs only if servers are
# found with linode_id
if servers:
disks = api.linode_disk_list(LinodeId=linode_id)
configs = api.linode_config_list(LinodeId=linode_id)
# Act on the state
if state in ('active', 'present', 'started'):
# TODO: validate all the plan / distribution / datacenter are valid
# Multi step process/validation:
# - need linode_id (entity)
# - need disk_id for linode_id - create disk from distrib
# - need config_id for linode_id - create config (need kernel)
# Any create step triggers a job that need to be waited for.
if not servers:
for arg in ('name', 'plan', 'distribution', 'datacenter'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
# Create linode entity
new_server = True
try:
res = api.linode_create(DatacenterID=datacenter, PlanID=plan,
PaymentTerm=payment_term)
linode_id = res['LinodeID']
# Update linode Label to match name
api.linode_update(LinodeId=linode_id, Label='%s_%s' % (linode_id, name))
# Save server
servers = api.linode_list(LinodeId=linode_id)
except Exception, e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
if not disks:
for arg in ('name', 'linode_id', 'distribution'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
# Create disks (1 from distrib, 1 for SWAP)
new_server = True
try:
if not password:
# Password is required on creation, if not provided generate one
password = randompass()
if not swap:
swap = 512
# Create data disk
size = servers[0]['TOTALHD'] - swap
if ssh_pub_key:
res = api.linode_disk_createfromdistribution(
LinodeId=linode_id, DistributionID=distribution,
rootPass=password, rootSSHKey=ssh_pub_key,
Label='%s data disk (lid: %s)' % (name, linode_id), Size=size)
else:
res = api.linode_disk_createfromdistribution(
LinodeId=linode_id, DistributionID=distribution, rootPass=password,
Label='%s data disk (lid: %s)' % (name, linode_id), Size=size)
jobs.append(res['JobID'])
# Create SWAP disk
res = api.linode_disk_create(LinodeId=linode_id, Type='swap',
Label='%s swap disk (lid: %s)' % (name, linode_id),
Size=swap)
jobs.append(res['JobID'])
except Exception, e:
# TODO: destroy linode ?
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
if not configs:
for arg in ('name', 'linode_id', 'distribution'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
# Check architecture
for distrib in api.avail_distributions():
if distrib['DISTRIBUTIONID'] != distribution:
continue
arch = '32'
if distrib['IS64BIT']:
arch = '64'
break
# Get latest kernel matching arch
for kernel in api.avail_kernels():
if not kernel['LABEL'].startswith('Latest %s' % arch):
continue
kernel_id = kernel['KERNELID']
break
# Get disk list
disks_id = []
for disk in api.linode_disk_list(LinodeId=linode_id):
if disk['TYPE'] == 'ext3':
disks_id.insert(0, str(disk['DISKID']))
continue
disks_id.append(str(disk['DISKID']))
# Trick to get the 9 items in the list
while len(disks_id) < 9:
disks_id.append('')
disks_list = ','.join(disks_id)
# Create config
new_server = True
try:
api.linode_config_create(LinodeId=linode_id, KernelId=kernel_id,
Disklist=disks_list, Label='%s config' % name)
configs = api.linode_config_list(LinodeId=linode_id)
except Exception, e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
# Start / Ensure servers are running
for server in servers:
# Refresh server state
server = api.linode_list(LinodeId=server['LINODEID'])[0]
# Ensure existing servers are up and running, boot if necessary
if server['STATUS'] != 1:
res = api.linode_boot(LinodeId=linode_id)
jobs.append(res['JobID'])
changed = True
# wait here until the instances are up
wait_timeout = time.time() + wait_timeout
while wait and wait_timeout > time.time():
# refresh the server details
server = api.linode_list(LinodeId=server['LINODEID'])[0]
# status:
# -2: Boot failed
# 1: Running
if server['STATUS'] in (-2, 1):
break
time.sleep(5)
if wait and wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = 'Timeout waiting on %s (lid: %s)' %
(server['LABEL'], server['LINODEID']))
# Get a fresh copy of the server details
server = api.linode_list(LinodeId=server['LINODEID'])[0]
if server['STATUS'] == -2:
module.fail_json(msg = '%s (lid: %s) failed to boot' %
(server['LABEL'], server['LINODEID']))
# From now on we know the task is a success
# Build instance report
instance = getInstanceDetails(api, server)
# depending on wait flag select the status
if wait:
instance['status'] = 'Running'
else:
instance['status'] = 'Starting'
# Return the root password if this is a new box and no SSH key
# has been provided
if new_server and not ssh_pub_key:
instance['password'] = password
instances.append(instance)
elif state in ('stopped'):
for arg in ('name', 'linode_id'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
if not servers:
module.fail_json(msg = 'Server %s (lid: %s) not found' % (name, linode_id))
for server in servers:
instance = getInstanceDetails(api, server)
if server['STATUS'] != 2:
try:
res = api.linode_shutdown(LinodeId=linode_id)
except Exception, e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
instance['status'] = 'Stopping'
changed = True
else:
instance['status'] = 'Stopped'
instances.append(instance)
elif state in ('restarted'):
for arg in ('name', 'linode_id'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
if not servers:
module.fail_json(msg = 'Server %s (lid: %s) not found' % (name, linode_id))
for server in servers:
instance = getInstanceDetails(api, server)
try:
res = api.linode_reboot(LinodeId=server['LINODEID'])
except Exception, e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
instance['status'] = 'Restarting'
changed = True
instances.append(instance)
elif state in ('absent', 'deleted'):
for server in servers:
instance = getInstanceDetails(api, server)
try:
api.linode_delete(LinodeId=server['LINODEID'], skipChecks=True)
except Exception, e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
instance['status'] = 'Deleting'
changed = True
instances.append(instance)
# Ease parsing if only 1 instance
if len(instances) == 1:
module.exit_json(changed=changed, instance=instances[0])
module.exit_json(changed=changed, instances=instances)
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['active', 'present', 'started',
'deleted', 'absent', 'stopped',
'restarted']),
api_key = dict(),
name = dict(type='str'),
plan = dict(type='int'),
distribution = dict(type='int'),
datacenter = dict(type='int'),
linode_id = dict(type='int', aliases=['lid']),
payment_term = dict(type='int', default=1, choices=[1, 12, 24]),
password = dict(type='str'),
ssh_pub_key = dict(type='str'),
swap = dict(type='int', default=512),
wait = dict(type='bool', default=True),
wait_timeout = dict(default=300),
)
)
if not HAS_PYCURL:
module.fail_json(msg='pycurl required for this module')
if not HAS_LINODE:
module.fail_json(msg='linode-python required for this module')
state = module.params.get('state')
api_key = module.params.get('api_key')
name = module.params.get('name')
plan = module.params.get('plan')
distribution = module.params.get('distribution')
datacenter = module.params.get('datacenter')
linode_id = module.params.get('linode_id')
payment_term = module.params.get('payment_term')
password = module.params.get('password')
ssh_pub_key = module.params.get('ssh_pub_key')
swap = module.params.get('swap')
wait = module.params.get('wait')
wait_timeout = int(module.params.get('wait_timeout'))
# Setup the api_key
if not api_key:
try:
api_key = os.environ['LINODE_API_KEY']
except KeyError, e:
module.fail_json(msg = 'Unable to load %s' % e.message)
# setup the auth
try:
api = linode_api.Api(api_key)
api.test_echo()
except Exception, e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
linodeServers(module, api, state, name, plan, distribution, datacenter, linode_id,
payment_term, password, ssh_pub_key, swap, wait, wait_timeout)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 | -1,324,951,394,297,170,200 | -9,154,805,773,900,149,000 | 34.936128 | 100 | 0.556765 | false |
sjsrey/pysal_core | pysal_core/io/IOHandlers/mtx.py | 2 | 6874 | import os.path
import scipy.io as sio
from .. import FileIO
from ...weights.weights import W, WSP
from ...weights.util import full, full2W
from warnings import warn
__author__ = "Myunghwa Hwang <[email protected]>"
__all__ = ["MtxIO"]
class MtxIO(FileIO.FileIO):
"""
Opens, reads, and writes weights file objects in Matrix Market MTX format.
The Matrix Market MTX format is used to facilitate the exchange of matrix data.
In PySAL, it is being tested as a new file format for delivering
the weights information of a spatial weights matrix.
Although the MTX format supports both full and sparse matrices with different
data types, it is assumed that spatial weights files in the mtx format always
use the sparse (or coordinate) format with real data values.
For now, no additional assumption (e.g., symmetry) is made of the structure
of a weights matrix.
With the above assumptions,
the structure of a MTX file containing a spatial weights matrix
can be defined as follows:
%%MatrixMarket matrix coordinate real general <--- header 1 (constant)
% Comments starts <---
% .... | 0 or more comment lines
% Comments ends <---
M N L <--- header 2, rows, columns, entries
I1 J1 A(I1,J1) <---
... | L entry lines
IL JL A(IL,JL) <---
In the MTX foramt, the index for rows or columns starts with 1.
PySAL uses mtx io tools in scipy.
Thus, it is subject to all limits that scipy currently has.
Reengineering might be required, since scipy currently reads in
the entire entry into memory.
References
----------
MTX format specification
http://math.nist.gov/MatrixMarket/formats.html
scipy matlab io
http://docs.scipy.org/doc/scipy/reference/tutorial/io.html
"""
FORMATS = ['mtx']
MODES = ['r', 'w']
def __init__(self, *args, **kwargs):
FileIO.FileIO.__init__(self, *args, **kwargs)
self.file = open(self.dataPath, self.mode + 'b')
def read(self, n=-1, sparse=False):
"""
sparse: boolean
if true, return pysal WSP object
if false, return pysal W object
"""
self._sparse = sparse
self._complain_ifclosed(self.closed)
return self._read()
def seek(self, pos):
if pos == 0:
self.file.seek(0)
self.pos = 0
def _read(self):
"""Reads MatrixMarket mtx file
Returns a pysal.weights.weights.W or pysal.weights.weights.WSP object
Examples
--------
Type 'dir(w)' at the interpreter to see what methods are supported.
Open a MatrixMarket mtx file and read it into a pysal weights object
>>> f = pysal.open(pysal.examples.get_path('wmat.mtx'),'r')
>>> w = f.read()
Get the number of observations from the header
>>> w.n
49
Get the mean number of neighbors
>>> w.mean_neighbors
4.7346938775510203
Get neighbor weights for a single observation
>>> w[1]
{2: 0.33329999999999999, 5: 0.33329999999999999, 6: 0.33329999999999999}
>>> f.close()
>>> f = pysal.open(pysal.examples.get_path('wmat.mtx'),'r')
>>> wsp = f.read(sparse=True)
Get the number of observations from the header
>>> wsp.n
49
Get row from the weights matrix. Note that the first row in the sparse
matrix (the 0th row) corresponds to ID 1 from the original mtx file
read in.
>>> print wsp.sparse[0].todense()
[[ 0. 0.3333 0. 0. 0.3333 0.3333 0. 0. 0. 0.
0. 0. 0. 0. 0. 0. 0. 0. 0. 0.
0. 0. 0. 0. 0. 0. 0. 0. 0. 0.
0. 0. 0. 0. 0. 0. 0. 0. 0. 0.
0. 0. 0. 0. 0. 0. 0. 0. 0. ]]
"""
if self.pos > 0:
raise StopIteration
mtx = sio.mmread(self.file)
ids = range(1, mtx.shape[0] + 1) # matrix market indexes start at one
wsp = WSP(mtx, ids)
if self._sparse:
w = wsp
else:
w = wsp.to_W()
self.pos += 1
return w
def write(self, obj):
"""
Parameters
----------
.write(weightsObject)
accepts a weights object
Returns
------
a MatrixMarket mtx file
write a weights object to the opened mtx file.
Examples
--------
>>> import tempfile, pysal, os
>>> testfile = pysal.open(pysal.examples.get_path('wmat.mtx'),'r')
>>> w = testfile.read()
Create a temporary file for this example
>>> f = tempfile.NamedTemporaryFile(suffix='.mtx')
Reassign to new var
>>> fname = f.name
Close the temporary named file
>>> f.close()
Open the new file in write mode
>>> o = pysal.open(fname,'w')
Write the Weights object into the open file
>>> o.write(w)
>>> o.close()
Read in the newly created mtx file
>>> wnew = pysal.open(fname,'r').read()
Compare values from old to new
>>> wnew.pct_nonzero == w.pct_nonzero
True
Clean up temporary file created for this example
>>> os.remove(fname)
Go to the beginning of the test file
>>> testfile.seek(0)
Create a sparse weights instance from the test file
>>> wsp = testfile.read(sparse=True)
Open the new file in write mode
>>> o = pysal.open(fname,'w')
Write the sparse weights object into the open file
>>> o.write(wsp)
>>> o.close()
Read in the newly created mtx file
>>> wsp_new = pysal.open(fname,'r').read(sparse=True)
Compare values from old to new
>>> wsp_new.s0 == wsp.s0
True
Clean up temporary file created for this example
>>> os.remove(fname)
"""
self._complain_ifclosed(self.closed)
if issubclass(type(obj), W) or issubclass(type(obj), WSP):
w = obj.sparse
sio.mmwrite(self.file, w, comment='Generated by PySAL',
field='real', precision=7)
self.pos += 1
else:
raise TypeError("Expected a pysal weights object, got: %s" % (
type(obj)))
def close(self):
self.file.close()
FileIO.FileIO.close(self)
| bsd-3-clause | 9,139,495,808,400,297,000 | -4,480,196,323,564,455,000 | 27.641667 | 87 | 0.531568 | false |
sahmed95/sympy | sympy/simplify/ratsimp.py | 49 | 7542 | from __future__ import print_function, division
from sympy.core import symbols, Add, Dummy
from sympy.core.compatibility import combinations_with_replacement
from sympy.core.numbers import Rational
from sympy.polys import cancel, ComputationFailed, parallel_poly_from_expr, reduced, Poly
from sympy.polys.monomials import Monomial, monomial_div
from sympy.polys.polyerrors import PolificationFailed
from sympy.utilities.misc import debug
def ratsimp(expr):
"""
Put an expression over a common denominator, cancel and reduce.
Examples
========
>>> from sympy import ratsimp
>>> from sympy.abc import x, y
>>> ratsimp(1/x + 1/y)
(x + y)/(x*y)
"""
f, g = cancel(expr).as_numer_denom()
try:
Q, r = reduced(f, [g], field=True, expand=False)
except ComputationFailed:
return f/g
return Add(*Q) + cancel(r/g)
def ratsimpmodprime(expr, G, *gens, **args):
"""
Simplifies a rational expression ``expr`` modulo the prime ideal
generated by ``G``. ``G`` should be a Groebner basis of the
ideal.
>>> from sympy.simplify.ratsimp import ratsimpmodprime
>>> from sympy.abc import x, y
>>> eq = (x + y**5 + y)/(x - y)
>>> ratsimpmodprime(eq, [x*y**5 - x - y], x, y, order='lex')
(x**2 + x*y + x + y)/(x**2 - x*y)
If ``polynomial`` is False, the algorithm computes a rational
simplification which minimizes the sum of the total degrees of
the numerator and the denominator.
If ``polynomial`` is True, this function just brings numerator and
denominator into a canonical form. This is much faster, but has
potentially worse results.
References
==========
M. Monagan, R. Pearce, Rational Simplification Modulo a Polynomial
Ideal,
http://citeseer.ist.psu.edu/viewdoc/summary?doi=10.1.1.163.6984
(specifically, the second algorithm)
"""
from sympy import solve
quick = args.pop('quick', True)
polynomial = args.pop('polynomial', False)
debug('ratsimpmodprime', expr)
# usual preparation of polynomials:
num, denom = cancel(expr).as_numer_denom()
try:
polys, opt = parallel_poly_from_expr([num, denom] + G, *gens, **args)
except PolificationFailed:
return expr
domain = opt.domain
if domain.has_assoc_Field:
opt.domain = domain.get_field()
else:
raise DomainError(
"can't compute rational simplification over %s" % domain)
# compute only once
leading_monomials = [g.LM(opt.order) for g in polys[2:]]
tested = set()
def staircase(n):
"""
Compute all monomials with degree less than ``n`` that are
not divisible by any element of ``leading_monomials``.
"""
if n == 0:
return [1]
S = []
for mi in combinations_with_replacement(range(len(opt.gens)), n):
m = [0]*len(opt.gens)
for i in mi:
m[i] += 1
if all([monomial_div(m, lmg) is None for lmg in
leading_monomials]):
S.append(m)
return [Monomial(s).as_expr(*opt.gens) for s in S] + staircase(n - 1)
def _ratsimpmodprime(a, b, allsol, N=0, D=0):
"""
Computes a rational simplification of ``a/b`` which minimizes
the sum of the total degrees of the numerator and the denominator.
The algorithm proceeds by looking at ``a * d - b * c`` modulo
the ideal generated by ``G`` for some ``c`` and ``d`` with degree
less than ``a`` and ``b`` respectively.
The coefficients of ``c`` and ``d`` are indeterminates and thus
the coefficients of the normalform of ``a * d - b * c`` are
linear polynomials in these indeterminates.
If these linear polynomials, considered as system of
equations, have a nontrivial solution, then `\frac{a}{b}
\equiv \frac{c}{d}` modulo the ideal generated by ``G``. So,
by construction, the degree of ``c`` and ``d`` is less than
the degree of ``a`` and ``b``, so a simpler representation
has been found.
After a simpler representation has been found, the algorithm
tries to reduce the degree of the numerator and denominator
and returns the result afterwards.
As an extension, if quick=False, we look at all possible degrees such
that the total degree is less than *or equal to* the best current
solution. We retain a list of all solutions of minimal degree, and try
to find the best one at the end.
"""
c, d = a, b
steps = 0
maxdeg = a.total_degree() + b.total_degree()
if quick:
bound = maxdeg - 1
else:
bound = maxdeg
while N + D <= bound:
if (N, D) in tested:
break
tested.add((N, D))
M1 = staircase(N)
M2 = staircase(D)
debug('%s / %s: %s, %s' % (N, D, M1, M2))
Cs = symbols("c:%d" % len(M1), cls=Dummy)
Ds = symbols("d:%d" % len(M2), cls=Dummy)
ng = Cs + Ds
c_hat = Poly(
sum([Cs[i] * M1[i] for i in range(len(M1))]), opt.gens + ng)
d_hat = Poly(
sum([Ds[i] * M2[i] for i in range(len(M2))]), opt.gens + ng)
r = reduced(a * d_hat - b * c_hat, G, opt.gens + ng,
order=opt.order, polys=True)[1]
S = Poly(r, gens=opt.gens).coeffs()
sol = solve(S, Cs + Ds, particular=True, quick=True)
if sol and not all([s == 0 for s in sol.values()]):
c = c_hat.subs(sol)
d = d_hat.subs(sol)
# The "free" variables occuring before as parameters
# might still be in the substituted c, d, so set them
# to the value chosen before:
c = c.subs(dict(list(zip(Cs + Ds, [1] * (len(Cs) + len(Ds))))))
d = d.subs(dict(list(zip(Cs + Ds, [1] * (len(Cs) + len(Ds))))))
c = Poly(c, opt.gens)
d = Poly(d, opt.gens)
if d == 0:
raise ValueError('Ideal not prime?')
allsol.append((c_hat, d_hat, S, Cs + Ds))
if N + D != maxdeg:
allsol = [allsol[-1]]
break
steps += 1
N += 1
D += 1
if steps > 0:
c, d, allsol = _ratsimpmodprime(c, d, allsol, N, D - steps)
c, d, allsol = _ratsimpmodprime(c, d, allsol, N - steps, D)
return c, d, allsol
# preprocessing. this improves performance a bit when deg(num)
# and deg(denom) are large:
num = reduced(num, G, opt.gens, order=opt.order)[1]
denom = reduced(denom, G, opt.gens, order=opt.order)[1]
if polynomial:
return (num/denom).cancel()
c, d, allsol = _ratsimpmodprime(
Poly(num, opt.gens), Poly(denom, opt.gens), [])
if not quick and allsol:
debug('Looking for best minimal solution. Got: %s' % len(allsol))
newsol = []
for c_hat, d_hat, S, ng in allsol:
sol = solve(S, ng, particular=True, quick=False)
newsol.append((c_hat.subs(sol), d_hat.subs(sol)))
c, d = min(newsol, key=lambda x: len(x[0].terms()) + len(x[1].terms()))
if not domain.has_Field:
cn, c = c.clear_denoms(convert=True)
dn, d = d.clear_denoms(convert=True)
r = Rational(cn, dn)
return (c*r.q)/(d*r.p)
| bsd-3-clause | -4,411,790,456,449,431,000 | -7,119,407,690,021,989,000 | 33.59633 | 89 | 0.561124 | false |
allisony/pyspeckit | pyspeckit/spectrum/models/utils/ammonia_offset_calculation.py | 7 | 2955 | from __future__ import print_function
import re
import numpy as np
from astropy import units as u
from astropy import constants
from astroquery.splatalogue import Splatalogue, utils
# Query splatalogue, keeping all of the line strength columns
# Both Lovas and CDMS/JPL can be used
nh3 = Splatalogue.query_lines(20*u.GHz, 40*u.GHz, chemical_name=' NH3 ',
show_upper_degeneracy=True,
line_strengths=['ls1','ls2','ls3','ls4'])
numbers = {1:'one',
2:'two',
3:'three',
4:'four',
5:'five',
6:'six',
7:'seven',
8:'eight',
9:'nine',}
tbls = {}
for line in (1,2,3,4,5,6,7,8,9):
reline = re.compile('^{n}\({n}\).*-{n}'.format(n=line))
tbl = utils.minimize_table(nh3[np.array([bool(reline.search(x))
if bool(x) else False
for x in
nh3['Resolved QNs']],
dtype='bool')],
columns=['Species', 'Chemical Name', 'Resolved QNs',
'Freq-GHz', 'Meas Freq-GHz',
'Log<sub>10</sub> (A<sub>ij</sub>)',
'CDMS/JPL Intensity',
'Linelist',
'E_U (K)', 'Upper State Degeneracy'])
if len(tbl) == 0:
pass
# Select only TopModel lines from CDMS/JPL
tbls[line] = tbl[tbl['Linelist'] == 'TopModel']
for par in ('tau_wts','voff_lines','aval','freq'):
print(par)
for line in (1,2,3,4,5,6,7,8): # 9 not available
tbl = tbls[line]
degeneracyline = tbl['Upper State Degeneracy']
intensityline = 10**tbl['CDMSJPL_Intensity']
main = np.argmax(intensityline)
centerline = tbl['Freq'][main]
voff_linesline = np.array((centerline-tbl['Freq'])/centerline) * constants.c
aval = (10**tbl['log10_Aij']).sum()
weightline = intensityline/intensityline.sum()
if par == 'freq':
print("'{n}{n}': {f},".format(n=numbers[line], f=centerline))
elif par == 'voff_lines':
print("'{n}{n}': [{v}],".format(n=numbers[line],
v=", ".join(str(x)
for x in voff_linesline.to(u.km/u.s).value)))
elif par == 'tau_wts':
#print "'{n}{n}': {d},".format(n=numbers[line], d=np.array(degeneracyline))
print("'{n}{n}': [{d}],".format(n=numbers[line],
d=", ".join(str(x) for x in weightline)))
elif par == 'aval':
print("'{n}{n}': {d:e},".format(n=numbers[line], d=aval))
| mit | -3,097,287,458,223,617,500 | -3,568,298,159,231,523,300 | 37.376623 | 101 | 0.446024 | false |
dusenberrymw/systemml | src/main/python/systemml/converters.py | 8 | 12296 | #-------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#-------------------------------------------------------------
__all__ = [ 'getNumCols', 'convertToMatrixBlock', 'convert_caffemodel', 'convert_lmdb_to_jpeg', 'convertToNumPyArr', 'convertToPandasDF', 'SUPPORTED_TYPES' , 'convertToLabeledDF', 'convertImageToNumPyArr', 'getDatasetMean']
import numpy as np
import pandas as pd
import os
import math
from pyspark.context import SparkContext
from scipy.sparse import coo_matrix, spmatrix, csr_matrix
from .classloader import *
SUPPORTED_TYPES = (np.ndarray, pd.DataFrame, spmatrix)
DATASET_MEAN = {'VGG_ILSVRC_19_2014':[103.939, 116.779, 123.68]}
def getNumCols(numPyArr):
if numPyArr.ndim == 1:
return 1
else:
return numPyArr.shape[1]
def get_pretty_str(key, value):
return '\t"' + key + '": ' + str(value) + ',\n'
def save_tensor_csv(tensor, file_path, shouldTranspose):
w = w.reshape(w.shape[0], -1)
if shouldTranspose:
w = w.T
np.savetxt(file_path, w, delimiter=',')
with open(file_path + '.mtd', 'w') as file:
file.write('{\n\t"data_type": "matrix",\n\t"value_type": "double",\n')
file.write(get_pretty_str('rows', w.shape[0]))
file.write(get_pretty_str('cols', w.shape[1]))
file.write(get_pretty_str('nnz', np.count_nonzero(w)))
file.write('\t"format": "csv",\n\t"description": {\n\t\t"author": "SystemML"\n\t}\n}\n')
def convert_caffemodel(sc, deploy_file, caffemodel_file, output_dir, format="binary", is_caffe_installed=False):
"""
Saves the weights and bias in the caffemodel file to output_dir in the specified format.
This method does not requires caffe to be installed.
Parameters
----------
sc: SparkContext
SparkContext
deploy_file: string
Path to the input network file
caffemodel_file: string
Path to the input caffemodel file
output_dir: string
Path to the output directory
format: string
Format of the weights and bias (can be binary, csv or text)
is_caffe_installed: bool
True if caffe is installed
"""
if is_caffe_installed:
if format != 'csv':
raise ValueError('The format ' + str(format) + ' is not supported when caffe is installed. Hint: Please specify format=csv')
import caffe
net = caffe.Net(deploy_file, caffemodel_file, caffe.TEST)
for layerName in net.params.keys():
num_parameters = len(net.params[layerName])
if num_parameters == 0:
continue
elif num_parameters == 2:
# Weights and Biases
layerType = net.layers[list(net._layer_names).index(layerName)].type
shouldTranspose = True if layerType == 'InnerProduct' else False
save_tensor_csv(net.params[layerName][0].data, os.path.join(output_dir, layerName + '_weight.mtx'), shouldTranspose)
save_tensor_csv(net.params[layerName][1].data, os.path.join(output_dir, layerName + '_bias.mtx'), shouldTranspose)
elif num_parameters == 1:
# Only Weight
layerType = net.layers[list(net._layer_names).index(layerName)].type
shouldTranspose = True if layerType == 'InnerProduct' else False
save_tensor_csv(net.params[layerName][0].data, os.path.join(output_dir, layerName + '_weight.mtx'), shouldTranspose)
else:
raise ValueError('Unsupported number of parameters:' + str(num_parameters))
else:
createJavaObject(sc, 'dummy')
utilObj = sc._jvm.org.apache.sysml.api.dl.Utils()
utilObj.saveCaffeModelFile(sc._jsc, deploy_file, caffemodel_file, output_dir, format)
def convert_lmdb_to_jpeg(lmdb_img_file, output_dir):
"""
Saves the images in the lmdb file as jpeg in the output_dir. This method requires caffe to be installed along with lmdb and cv2 package.
To install cv2 package, do `pip install opencv-python`.
Parameters
----------
lmdb_img_file: string
Path to the input lmdb file
output_dir: string
Output directory for images (local filesystem)
"""
import lmdb, caffe, cv2
lmdb_cursor = lmdb.open(lmdb_file, readonly=True).begin().cursor()
datum = caffe.proto.caffe_pb2.Datum()
i = 1
for _, value in lmdb_cursor:
datum.ParseFromString(value)
data = caffe.io.datum_to_array(datum)
output_file_path = os.path.join(output_dir, 'file_' + str(i) + '.jpg')
image = np.transpose(data, (1,2,0)) # CxHxW to HxWxC in cv2
cv2.imwrite(output_file_path, image)
i = i + 1
def convertToLabeledDF(sparkSession, X, y=None):
from pyspark.ml.feature import VectorAssembler
if y is not None:
pd1 = pd.DataFrame(X)
pd2 = pd.DataFrame(y, columns=['label'])
pdf = pd.concat([pd1, pd2], axis=1)
inputColumns = ['C' + str(i) for i in pd1.columns]
outputColumns = inputColumns + ['label']
else:
pdf = pd.DataFrame(X)
inputColumns = ['C' + str(i) for i in pdf.columns]
outputColumns = inputColumns
assembler = VectorAssembler(inputCols=inputColumns, outputCol='features')
out = assembler.transform(sparkSession.createDataFrame(pdf, outputColumns))
if y is not None:
return out.select('features', 'label')
else:
return out.select('features')
def _convertSPMatrixToMB(sc, src):
src = coo_matrix(src, dtype=np.float64)
numRows = src.shape[0]
numCols = src.shape[1]
data = src.data
row = src.row.astype(np.int32)
col = src.col.astype(np.int32)
nnz = len(src.col)
buf1 = bytearray(data.tostring())
buf2 = bytearray(row.tostring())
buf3 = bytearray(col.tostring())
createJavaObject(sc, 'dummy')
return sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.convertSciPyCOOToMB(buf1, buf2, buf3, numRows, numCols, nnz)
def _convertDenseMatrixToMB(sc, src):
numCols = getNumCols(src)
numRows = src.shape[0]
arr = src.ravel().astype(np.float64)
buf = bytearray(arr.tostring())
createJavaObject(sc, 'dummy')
return sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.convertPy4JArrayToMB(buf, numRows, numCols)
def _copyRowBlock(i, sc, ret, src, numRowsPerBlock, rlen, clen):
rowIndex = int(i / numRowsPerBlock)
tmp = src[i:min(i+numRowsPerBlock, rlen),]
mb = _convertSPMatrixToMB(sc, tmp) if isinstance(src, spmatrix) else _convertDenseMatrixToMB(sc, tmp)
sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.copyRowBlocks(mb, rowIndex, ret, numRowsPerBlock, rlen, clen)
return i
def convertToMatrixBlock(sc, src, maxSizeBlockInMB=8):
if not isinstance(sc, SparkContext):
raise TypeError('sc needs to be of type SparkContext')
isSparse = True if isinstance(src, spmatrix) else False
src = np.asarray(src, dtype=np.float64) if not isSparse else src
if len(src.shape) != 2:
src_type = str(type(src).__name__)
raise TypeError('Expected 2-dimensional ' + src_type + ', instead passed ' + str(len(src.shape)) + '-dimensional ' + src_type)
# Ignoring sparsity for computing numRowsPerBlock for now
numRowsPerBlock = int(math.ceil((maxSizeBlockInMB*1000000) / (src.shape[1]*8)))
multiBlockTransfer = False if numRowsPerBlock >= src.shape[0] else True
if not multiBlockTransfer:
return _convertSPMatrixToMB(sc, src) if isSparse else _convertDenseMatrixToMB(sc, src)
else:
# Since coo_matrix does not have range indexing
src = csr_matrix(src) if isSparse else src
rlen = int(src.shape[0])
clen = int(src.shape[1])
ret = sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.allocateDenseOrSparse(rlen, clen, isSparse)
[ _copyRowBlock(i, sc, ret, src, numRowsPerBlock, rlen, clen) for i in range(0, src.shape[0], numRowsPerBlock) ]
sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.postProcessAfterCopying(ret)
return ret
def convertToNumPyArr(sc, mb):
if isinstance(sc, SparkContext):
numRows = mb.getNumRows()
numCols = mb.getNumColumns()
createJavaObject(sc, 'dummy')
buf = sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.convertMBtoPy4JDenseArr(mb)
return np.frombuffer(buf, count=numRows*numCols, dtype=np.float64).reshape((numRows, numCols))
else:
raise TypeError('sc needs to be of type SparkContext') # TODO: We can generalize this by creating py4j gateway ourselves
# Returns the mean of a model if defined otherwise None
def getDatasetMean(dataset_name):
"""
Parameters
----------
dataset_name: Name of the dataset used to train model. This name is artificial name based on dataset used to train the model.
Returns
-------
mean: Mean value of model if its defined in the list DATASET_MEAN else None.
"""
try:
mean = DATASET_MEAN[dataset_name.upper()]
except:
mean = None
return mean
# Example usage: convertImageToNumPyArr(im, img_shape=(3, 224, 224), add_rotated_images=True, add_mirrored_images=True)
# The above call returns a numpy array of shape (6, 50176) in NCHW format
def convertImageToNumPyArr(im, img_shape=None, add_rotated_images=False, add_mirrored_images=False,
color_mode = 'RGB', mean=None):
## Input Parameters
# color_mode: In case of VGG models which expect image data in BGR format instead of RGB for other most models,
# color_mode parameter is used to process image data in BGR format.
# mean: mean value is used to subtract from input data from every pixel value. By default value is None, so mean value not subtracted.
if img_shape is not None:
num_channels = img_shape[0]
size = (img_shape[1], img_shape[2])
else:
num_channels = 1 if im.mode == 'L' else 3
size = None
if num_channels != 1 and num_channels != 3:
raise ValueError('Expected the number of channels to be either 1 or 3')
from PIL import Image
if size is not None:
im = im.resize(size, Image.LANCZOS)
expected_mode = 'L' if num_channels == 1 else 'RGB'
if expected_mode is not im.mode:
im = im.convert(expected_mode)
def _im2NumPy(im):
if expected_mode == 'L':
return np.asarray(im.getdata()).reshape((1, -1))
else:
im = (np.array(im).astype(np.float))
# (H,W,C) -> (C,H,W)
im = im.transpose(2, 0, 1)
# RGB -> BGR
if color_mode == 'BGR':
im = im[...,::-1]
# Subtract Mean
if mean is not None:
for c in range(3):
im[:, :, c] = im[:, :, c] - mean[c]
# (C,H,W) --> (1, C*H*W)
return im.reshape((1, -1))
ret = _im2NumPy(im)
if add_rotated_images:
ret = np.vstack((ret, _im2NumPy(im.rotate(90)), _im2NumPy(im.rotate(180)), _im2NumPy(im.rotate(270)) ))
if add_mirrored_images:
ret = np.vstack((ret, _im2NumPy(im.transpose(Image.FLIP_LEFT_RIGHT)), _im2NumPy(im.transpose(Image.FLIP_TOP_BOTTOM))))
return ret
def convertToPandasDF(X):
if not isinstance(X, pd.DataFrame):
return pd.DataFrame(X, columns=['C' + str(i) for i in range(getNumCols(X))])
return X
| apache-2.0 | -3,674,486,655,585,421,300 | 6,770,813,311,362,848,000 | 39.986667 | 223 | 0.647365 | false |
jcass77/mopidy | mopidy/mpd/protocol/music_db.py | 2 | 17104 | from __future__ import absolute_import, unicode_literals
import functools
import itertools
from mopidy.models import Track
from mopidy.mpd import exceptions, protocol, translator
_SEARCH_MAPPING = {
'album': 'album',
'albumartist': 'albumartist',
'any': 'any',
'artist': 'artist',
'comment': 'comment',
'composer': 'composer',
'date': 'date',
'file': 'uri',
'filename': 'uri',
'genre': 'genre',
'performer': 'performer',
'title': 'track_name',
'track': 'track_no'}
_LIST_MAPPING = {
'album': 'album',
'albumartist': 'albumartist',
'artist': 'artist',
'composer': 'composer',
'date': 'date',
'genre': 'genre',
'performer': 'performer'}
def _query_from_mpd_search_parameters(parameters, mapping):
query = {}
parameters = list(parameters)
while parameters:
# TODO: does it matter that this is now case insensitive
field = mapping.get(parameters.pop(0).lower())
if not field:
raise exceptions.MpdArgError('incorrect arguments')
if not parameters:
raise ValueError
value = parameters.pop(0)
if value.strip():
query.setdefault(field, []).append(value)
return query
def _get_field(field, search_results):
return list(itertools.chain(*[getattr(r, field) for r in search_results]))
_get_albums = functools.partial(_get_field, 'albums')
_get_artists = functools.partial(_get_field, 'artists')
_get_tracks = functools.partial(_get_field, 'tracks')
def _album_as_track(album):
return Track(
uri=album.uri,
name='Album: ' + album.name,
artists=album.artists,
album=album,
date=album.date)
def _artist_as_track(artist):
return Track(
uri=artist.uri,
name='Artist: ' + artist.name,
artists=[artist])
@protocol.commands.add('count')
def count(context, *args):
"""
*musicpd.org, music database section:*
``count {TAG} {NEEDLE}``
Counts the number of songs and their total playtime in the db
matching ``TAG`` exactly.
*GMPC:*
- does not add quotes around the tag argument.
- use multiple tag-needle pairs to make more specific searches.
"""
try:
query = _query_from_mpd_search_parameters(args, _SEARCH_MAPPING)
except ValueError:
raise exceptions.MpdArgError('incorrect arguments')
results = context.core.library.find_exact(**query).get()
result_tracks = _get_tracks(results)
return [
('songs', len(result_tracks)),
('playtime', sum(track.length for track in result_tracks) / 1000),
]
@protocol.commands.add('find')
def find(context, *args):
"""
*musicpd.org, music database section:*
``find {TYPE} {WHAT}``
Finds songs in the db that are exactly ``WHAT``. ``TYPE`` can be any
tag supported by MPD, or one of the two special parameters - ``file``
to search by full path (relative to database root), and ``any`` to
match against all available tags. ``WHAT`` is what to find.
*GMPC:*
- does not add quotes around the field argument.
- also uses ``find album "[ALBUM]" artist "[ARTIST]"`` to list album
tracks.
*ncmpc:*
- does not add quotes around the field argument.
- capitalizes the type argument.
*ncmpcpp:*
- also uses the search type "date".
- uses "file" instead of "filename".
"""
try:
query = _query_from_mpd_search_parameters(args, _SEARCH_MAPPING)
except ValueError:
return
results = context.core.library.find_exact(**query).get()
result_tracks = []
if ('artist' not in query and
'albumartist' not in query and
'composer' not in query and
'performer' not in query):
result_tracks += [_artist_as_track(a) for a in _get_artists(results)]
if 'album' not in query:
result_tracks += [_album_as_track(a) for a in _get_albums(results)]
result_tracks += _get_tracks(results)
return translator.tracks_to_mpd_format(result_tracks)
@protocol.commands.add('findadd')
def findadd(context, *args):
"""
*musicpd.org, music database section:*
``findadd {TYPE} {WHAT}``
Finds songs in the db that are exactly ``WHAT`` and adds them to
current playlist. Parameters have the same meaning as for ``find``.
"""
try:
query = _query_from_mpd_search_parameters(args, _SEARCH_MAPPING)
except ValueError:
return
results = context.core.library.find_exact(**query).get()
context.core.tracklist.add(_get_tracks(results))
@protocol.commands.add('list')
def list_(context, *args):
"""
*musicpd.org, music database section:*
``list {TYPE} [ARTIST]``
Lists all tags of the specified type. ``TYPE`` should be ``album``,
``artist``, ``albumartist``, ``date``, or ``genre``.
``ARTIST`` is an optional parameter when type is ``album``,
``date``, or ``genre``. This filters the result list by an artist.
*Clarifications:*
The musicpd.org documentation for ``list`` is far from complete. The
command also supports the following variant:
``list {TYPE} {QUERY}``
Where ``QUERY`` applies to all ``TYPE``. ``QUERY`` is one or more pairs
of a field name and a value. If the ``QUERY`` consists of more than one
pair, the pairs are AND-ed together to find the result. Examples of
valid queries and what they should return:
``list "artist" "artist" "ABBA"``
List artists where the artist name is "ABBA". Response::
Artist: ABBA
OK
``list "album" "artist" "ABBA"``
Lists albums where the artist name is "ABBA". Response::
Album: More ABBA Gold: More ABBA Hits
Album: Absolute More Christmas
Album: Gold: Greatest Hits
OK
``list "artist" "album" "Gold: Greatest Hits"``
Lists artists where the album name is "Gold: Greatest Hits".
Response::
Artist: ABBA
OK
``list "artist" "artist" "ABBA" "artist" "TLC"``
Lists artists where the artist name is "ABBA" *and* "TLC". Should
never match anything. Response::
OK
``list "date" "artist" "ABBA"``
Lists dates where artist name is "ABBA". Response::
Date:
Date: 1992
Date: 1993
OK
``list "date" "artist" "ABBA" "album" "Gold: Greatest Hits"``
Lists dates where artist name is "ABBA" and album name is "Gold:
Greatest Hits". Response::
Date: 1992
OK
``list "genre" "artist" "The Rolling Stones"``
Lists genres where artist name is "The Rolling Stones". Response::
Genre:
Genre: Rock
OK
*GMPC:*
- does not add quotes around the field argument.
*ncmpc:*
- does not add quotes around the field argument.
- capitalizes the field argument.
"""
parameters = list(args)
if not parameters:
raise exceptions.MpdArgError('incorrect arguments')
field = parameters.pop(0).lower()
if field not in _LIST_MAPPING:
raise exceptions.MpdArgError('incorrect arguments')
if len(parameters) == 1:
if field != 'album':
raise exceptions.MpdArgError('should be "Album" for 3 arguments')
return _list_album(context, {'artist': parameters})
try:
query = _query_from_mpd_search_parameters(parameters, _LIST_MAPPING)
except exceptions.MpdArgError as e:
e.message = 'not able to parse args'
raise
except ValueError:
return
if field == 'artist':
return _list_artist(context, query)
if field == 'albumartist':
return _list_albumartist(context, query)
elif field == 'album':
return _list_album(context, query)
elif field == 'composer':
return _list_composer(context, query)
elif field == 'performer':
return _list_performer(context, query)
elif field == 'date':
return _list_date(context, query)
elif field == 'genre':
return _list_genre(context, query)
def _list_artist(context, query):
artists = set()
results = context.core.library.find_exact(**query).get()
for track in _get_tracks(results):
for artist in track.artists:
if artist.name:
artists.add(('Artist', artist.name))
return artists
def _list_albumartist(context, query):
albumartists = set()
results = context.core.library.find_exact(**query).get()
for track in _get_tracks(results):
if track.album:
for artist in track.album.artists:
if artist.name:
albumartists.add(('AlbumArtist', artist.name))
return albumartists
def _list_album(context, query):
albums = set()
results = context.core.library.find_exact(**query).get()
for track in _get_tracks(results):
if track.album and track.album.name:
albums.add(('Album', track.album.name))
return albums
def _list_composer(context, query):
composers = set()
results = context.core.library.find_exact(**query).get()
for track in _get_tracks(results):
for composer in track.composers:
if composer.name:
composers.add(('Composer', composer.name))
return composers
def _list_performer(context, query):
performers = set()
results = context.core.library.find_exact(**query).get()
for track in _get_tracks(results):
for performer in track.performers:
if performer.name:
performers.add(('Performer', performer.name))
return performers
def _list_date(context, query):
dates = set()
results = context.core.library.find_exact(**query).get()
for track in _get_tracks(results):
if track.date:
dates.add(('Date', track.date))
return dates
def _list_genre(context, query):
genres = set()
results = context.core.library.find_exact(**query).get()
for track in _get_tracks(results):
if track.genre:
genres.add(('Genre', track.genre))
return genres
@protocol.commands.add('listall')
def listall(context, uri=None):
"""
*musicpd.org, music database section:*
``listall [URI]``
Lists all songs and directories in ``URI``.
"""
result = []
for path, track_ref in context.browse(uri, lookup=False):
if not track_ref:
result.append(('directory', path))
else:
result.append(('file', track_ref.uri))
if not result:
raise exceptions.MpdNoExistError('Not found')
return result
@protocol.commands.add('listallinfo')
def listallinfo(context, uri=None):
"""
*musicpd.org, music database section:*
``listallinfo [URI]``
Same as ``listall``, except it also returns metadata info in the
same format as ``lsinfo``.
"""
result = []
for path, lookup_future in context.browse(uri):
if not lookup_future:
result.append(('directory', path))
else:
for track in lookup_future.get():
result.extend(translator.track_to_mpd_format(track))
return result
@protocol.commands.add('lsinfo')
def lsinfo(context, uri=None):
"""
*musicpd.org, music database section:*
``lsinfo [URI]``
Lists the contents of the directory ``URI``.
When listing the root directory, this currently returns the list of
stored playlists. This behavior is deprecated; use
``listplaylists`` instead.
MPD returns the same result, including both playlists and the files and
directories located at the root level, for both ``lsinfo``, ``lsinfo
""``, and ``lsinfo "/"``.
"""
result = []
for path, lookup_future in context.browse(uri, recursive=False):
if not lookup_future:
result.append(('directory', path.lstrip('/')))
else:
tracks = lookup_future.get()
if tracks:
result.extend(translator.track_to_mpd_format(tracks[0]))
if uri in (None, '', '/'):
result.extend(protocol.stored_playlists.listplaylists(context))
return result
@protocol.commands.add('rescan')
def rescan(context, uri=None):
"""
*musicpd.org, music database section:*
``rescan [URI]``
Same as ``update``, but also rescans unmodified files.
"""
return {'updating_db': 0} # TODO
@protocol.commands.add('search')
def search(context, *args):
"""
*musicpd.org, music database section:*
``search {TYPE} {WHAT} [...]``
Searches for any song that contains ``WHAT``. Parameters have the same
meaning as for ``find``, except that search is not case sensitive.
*GMPC:*
- does not add quotes around the field argument.
- uses the undocumented field ``any``.
- searches for multiple words like this::
search any "foo" any "bar" any "baz"
*ncmpc:*
- does not add quotes around the field argument.
- capitalizes the field argument.
*ncmpcpp:*
- also uses the search type "date".
- uses "file" instead of "filename".
"""
try:
query = _query_from_mpd_search_parameters(args, _SEARCH_MAPPING)
except ValueError:
return
results = context.core.library.search(**query).get()
artists = [_artist_as_track(a) for a in _get_artists(results)]
albums = [_album_as_track(a) for a in _get_albums(results)]
tracks = _get_tracks(results)
return translator.tracks_to_mpd_format(artists + albums + tracks)
@protocol.commands.add('searchadd')
def searchadd(context, *args):
"""
*musicpd.org, music database section:*
``searchadd {TYPE} {WHAT} [...]``
Searches for any song that contains ``WHAT`` in tag ``TYPE`` and adds
them to current playlist.
Parameters have the same meaning as for ``find``, except that search is
not case sensitive.
"""
try:
query = _query_from_mpd_search_parameters(args, _SEARCH_MAPPING)
except ValueError:
return
results = context.core.library.search(**query).get()
context.core.tracklist.add(_get_tracks(results))
@protocol.commands.add('searchaddpl')
def searchaddpl(context, *args):
"""
*musicpd.org, music database section:*
``searchaddpl {NAME} {TYPE} {WHAT} [...]``
Searches for any song that contains ``WHAT`` in tag ``TYPE`` and adds
them to the playlist named ``NAME``.
If a playlist by that name doesn't exist it is created.
Parameters have the same meaning as for ``find``, except that search is
not case sensitive.
"""
parameters = list(args)
if not parameters:
raise exceptions.MpdArgError('incorrect arguments')
playlist_name = parameters.pop(0)
try:
query = _query_from_mpd_search_parameters(parameters, _SEARCH_MAPPING)
except ValueError:
return
results = context.core.library.search(**query).get()
playlist = context.lookup_playlist_from_name(playlist_name)
if not playlist:
playlist = context.core.playlists.create(playlist_name).get()
tracks = list(playlist.tracks) + _get_tracks(results)
playlist = playlist.copy(tracks=tracks)
context.core.playlists.save(playlist)
@protocol.commands.add('update')
def update(context, uri=None):
"""
*musicpd.org, music database section:*
``update [URI]``
Updates the music database: find new files, remove deleted files,
update modified files.
``URI`` is a particular directory or song/file to update. If you do
not specify it, everything is updated.
Prints ``updating_db: JOBID`` where ``JOBID`` is a positive number
identifying the update job. You can read the current job id in the
``status`` response.
"""
return {'updating_db': 0} # TODO
# TODO: add at least reflection tests before adding NotImplemented version
# @protocol.commands.add('readcomments')
def readcomments(context, uri):
"""
*musicpd.org, music database section:*
``readcomments [URI]``
Read "comments" (i.e. key-value pairs) from the file specified by
"URI". This "URI" can be a path relative to the music directory or a
URL in the form "file:///foo/bar.ogg".
This command may be used to list metadata of remote files (e.g. URI
beginning with "http://" or "smb://").
The response consists of lines in the form "KEY: VALUE". Comments with
suspicious characters (e.g. newlines) are ignored silently.
The meaning of these depends on the codec, and not all decoder plugins
support it. For example, on Ogg files, this lists the Vorbis comments.
"""
pass
| apache-2.0 | -4,541,680,792,246,713,000 | -2,717,878,102,280,457,000 | 28.849913 | 79 | 0.611728 | false |
nicolasnoble/grpc | examples/python/interceptors/default_value/default_value_client_interceptor.py | 18 | 2058 | # Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interceptor that adds headers to outgoing requests."""
import grpc
class _ConcreteValue(grpc.Future):
def __init__(self, result):
self._result = result
def cancel(self):
return False
def cancelled(self):
return False
def running(self):
return False
def done(self):
return True
def result(self, timeout=None):
return self._result
def exception(self, timeout=None):
return None
def traceback(self, timeout=None):
return None
def add_done_callback(self, fn):
fn(self._result)
class DefaultValueClientInterceptor(grpc.UnaryUnaryClientInterceptor,
grpc.StreamUnaryClientInterceptor):
def __init__(self, value):
self._default = _ConcreteValue(value)
def _intercept_call(self, continuation, client_call_details,
request_or_iterator):
response = continuation(client_call_details, request_or_iterator)
return self._default if response.exception() else response
def intercept_unary_unary(self, continuation, client_call_details, request):
return self._intercept_call(continuation, client_call_details, request)
def intercept_stream_unary(self, continuation, client_call_details,
request_iterator):
return self._intercept_call(continuation, client_call_details,
request_iterator)
| apache-2.0 | 3,219,438,996,686,002,700 | -1,524,938,002,435,010,600 | 30.181818 | 80 | 0.666181 | false |
takeshineshiro/heat | heat/objects/event.py | 7 | 3074 | # Copyright 2014 Intel Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Event object
"""
from oslo_versionedobjects import base
from oslo_versionedobjects import fields
from heat.db import api as db_api
from heat.objects import fields as heat_fields
class Event(base.VersionedObject, base.VersionedObjectDictCompat):
fields = {
'id': fields.IntegerField(),
'stack_id': fields.StringField(),
'uuid': fields.StringField(),
'resource_action': fields.StringField(nullable=True),
'resource_status': fields.StringField(nullable=True),
'resource_name': fields.StringField(nullable=True),
'physical_resource_id': fields.StringField(nullable=True),
'resource_status_reason': fields.StringField(nullable=True),
'resource_type': fields.StringField(nullable=True),
'resource_properties': heat_fields.JsonField(nullable=True),
'created_at': fields.DateTimeField(read_only=True),
'updated_at': fields.DateTimeField(nullable=True),
}
@staticmethod
def _from_db_object(context, event, db_event):
for field in event.fields:
event[field] = db_event[field]
event._context = context
event.obj_reset_changes()
return event
@classmethod
def get_by_id(cls, context, event_id):
db_event = db_api.event_get(context, event_id)
return cls._from_db_object(context, cls(context), db_event)
@classmethod
def get_all(cls, context):
return [cls._from_db_object(context, cls(), db_event)
for db_event in db_api.event_get_all(context)]
@classmethod
def get_all_by_tenant(cls, context, **kwargs):
return [cls._from_db_object(context, cls(), db_event)
for db_event in db_api.event_get_all_by_tenant(context,
**kwargs)]
@classmethod
def get_all_by_stack(cls, context, stack_id, **kwargs):
return [cls._from_db_object(context, cls(), db_event)
for db_event in db_api.event_get_all_by_stack(context,
stack_id,
**kwargs)]
@classmethod
def count_all_by_stack(cls, context, stack_id):
return db_api.event_count_all_by_stack(context, stack_id)
@classmethod
def create(cls, context, values):
return cls._from_db_object(context, cls(),
db_api.event_create(context, values))
| apache-2.0 | -3,585,079,171,943,054,300 | 2,344,324,711,458,594,000 | 37.425 | 75 | 0.626545 | false |
capone212/crashtec | src/crashtec/symbolsmngr/bindownloader.py | 1 | 6478 | '''
Created on 27.03.2013
HB! :)
@author: capone
'''
# TODO: Style!!! rewrite it all
import urllib
import os
import socket
import urlparse
import logging
from crashtec.db.provider import routines as dbroutines
from crashtec.db.provider import filter
from crashtec.config import symbolsmngrconfig
from crashtec.utils.exceptions import CtGeneralError
from crashtec.utils.exceptions import CtCriticalError
import unzipBinaries
import dbmodel
import definitions
_logger = logging.getLogger('symbolsmngr')
# Strips dangerous info (like credentials)
def safe_log_url(url):
return url
class Cache(object):
def __init__(self, instance_name):
self.agent_name = instance_name
# Returns appropriate directory path if specified url in cache,
# returns None otherwise.
def lookup_binary_path(self, binary_url):
d = dbmodel
f = filter.FieldFilterFactory
stripped_url = self.strip_url(binary_url)
cursor = dbroutines.select_from(d.SYMBOLS_TABLE, db_filter=(
(f(d.SYMBOLS_URL) == stripped_url) &
(f(d.SYMBOLS_AGENT_ID) == self.agent_name))
)
record = cursor.fetch_one()
if record:
return record[d.SYMBOLS_LOCAL_DIR]
# Makes new record in cache
# Throws on error.
def register_binary(self, url, binary_dirrectory):
stripped_url = self.strip_url(url)
record = dbroutines.Record()
d = dbmodel
record[d.SYMBOLS_TRANSACTION_ID] = definitions.EMPTY_TRANSACTION
record[d.SYMBOLS_URL] = stripped_url
record[d.SYMBOLS_AGENT_ID] = self.agent_name
record[d.SYMBOLS_LOCAL_DIR] = binary_dirrectory
dbroutines.create_new_record(d.SYMBOLS_TABLE, record)
# Strip's all unpersistent (like server address) info from url.
def strip_url(self, binary_url):
parsed_url = urlparse.urlparse(binary_url)
if (not parsed_url):
raise CtCriticalError("Could not parse url: %s" %
safe_log_url(binary_url))
return parsed_url.path
class StorageProvider(object):
# Returns directory where binary may be placed
# the path is unique for passed url
# and guarantied to be empty (at least for first time).
# Throws on errors.
def __init__(self, config = symbolsmngrconfig):
self.config = config
def create_place_for_binary(self, binary_url):
parsed_url = urlparse.urlparse(binary_url)
if (not parsed_url):
raise CtCriticalError("Could not parse url: %s" %
safe_log_url(binary_url))
dirrectory = os.path.normpath(self.config.BINARY_LOCAL_ROOT +
parsed_url.path)
try:
if (not os.path.exists(dirrectory)):
os.makedirs(dirrectory)
except OSError as err:
raise CtGeneralError("Error while creating directory: %s" % err)
return dirrectory
class HttpDownloader(object):
# Downloads specified url to destination folder.
# Returns downloaded file path, throws on errors.
def download_binary(self, url, dest_folder):
self.reset_state()
time_out = socket.getdefaulttimeout()
parsed_url = urlparse.urlparse(url)
file_name = os.path.join(dest_folder, os.path.basename(parsed_url.path))
try:
# TODO: make it configurable
socket.setdefaulttimeout(10)
urllib.urlretrieve(url, file_name, self.reportHook);
except Exception as exc:
raise CtGeneralError("Failed to download %s error: %s" % (url, exc))
finally:
socket.setdefaulttimeout(time_out)
return file_name
def reset_state(self):
self._percents = 0;
def reportHook(self, downloaded, blockSize, totalFileSize):
blocks_amount = totalFileSize / blockSize
if (blocks_amount == 0):
return
percents = (downloaded * 100) / blocks_amount
# report every X percent downloaded
REPORT_EACH_PERCENT = 10
percents = (percents / REPORT_EACH_PERCENT) * REPORT_EACH_PERCENT;
if (percents != self._percents):
_logger.info("Downloaded %s%%", percents)
self._percents = percents
class ZipUnpacker(object):
# Unpacks specified binary package and returns destination folder.
# Throws on errors.
def unpack(self, package_file, destination):
_logger.info("Unzipping binary %s ..." % package_file)
binary_dirrectory = unzipBinaries.unzipBinary(package_file, destination)
if (not binary_dirrectory):
raise CtGeneralError("Can't extract zip file %s" % package_file)
return binary_dirrectory
class BinaryDownloader(object):
def __init__(self, cache, storage, downloader, unpacker):
self.cache = cache
self.storage = storage
self.downloader = downloader
self.unpacker = unpacker
# Downloads binaries from url, unpack them and return
# destination directory.
def download_and_unpack(self, url):
# Lookup cache for binary first.
cached_path = self.cache.lookup_binary_path(url)
if cached_path:
_logger.info("Detected binary dir: %s", cached_path)
return cached_path
_logger.debug("Start processing binary url : %s", safe_log_url(url))
destination_folder = self.storage.create_place_for_binary(url)
package_file = self.downloader.download_binary(url, destination_folder)
unpacked_binaries_folder = self.unpacker.unpack(package_file,
destination_folder)
self.drop_package_file(package_file)
self.cache.register_binary(url, unpacked_binaries_folder)
_logger.debug("Processing binary url finished : %s", safe_log_url(url))
return unpacked_binaries_folder
# Feel free to override it in subclasses
def drop_package_file(self, package_file):
# Delete package_file file
os.remove(package_file)
def craete_default_downloader(instance_name):
return BinaryDownloader(Cache(instance_name),
StorageProvider(),
HttpDownloader(),
ZipUnpacker())
| gpl-3.0 | 1,200,889,554,640,104,200 | -5,147,921,495,179,860,000 | 36.662791 | 80 | 0.619636 | false |
kinverarity1/pyexperiment | tests/test_logger.py | 3 | 5395 | """Tests the state module of pyexperiment
Written by Peter Duerr
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
import unittest
import io
import logging
import tempfile
import os
import time
import re
from pyexperiment import log
from pyexperiment import Logger
from pyexperiment.utils.stdout_redirector import stdout_redirector
class TestLogger(unittest.TestCase):
"""Test basic logging
"""
def setUp(self):
"""Setup test fixture
"""
self.log_stream = io.StringIO()
Logger.CONSOLE_STREAM_HANDLER = logging.StreamHandler(self.log_stream)
log.reset_instance()
def tearDown(self):
"""Teardown test fixture
"""
Logger.CONSOLE_STREAM_HANDLER = logging.StreamHandler()
log.close()
log.reset_instance()
def test_fatal_console_logging(self):
"""Test the most basic console logging at the fatal level
"""
log.initialize(console_level=logging.INFO)
log.fatal("Test")
log.close()
# Something should be logged
self.assertNotEqual(len(self.log_stream.getvalue()), 0)
log.initialize(console_level=logging.DEBUG)
log.fatal("Test")
log.close()
# Something should be logged
self.assertNotEqual(len(self.log_stream.getvalue()), 0)
def test_info_console_logging(self):
"""Test the most basic console logging at the fatal level
"""
log.initialize(console_level=logging.FATAL)
log.info("Test")
log.close()
# Something should be logged
self.assertEqual(len(self.log_stream.getvalue()), 0)
log.initialize(console_level=logging.DEBUG)
log.info("Test")
log.close()
# Something should be logged
self.assertNotEqual(len(self.log_stream.getvalue()), 0)
def test_pre_init_logger(self):
"""Test that logging before initializing the logger works
"""
log.fatal("Test")
# Nothing should be logged yet
self.assertEqual(len(self.log_stream.getvalue()), 0)
log.initialize()
# Something should be logged here
self.assertNotEqual(len(self.log_stream.getvalue()), 0)
def test_file_logger_writes_to_file(self):
"""Test logging to file writes something to the log file
"""
with tempfile.NamedTemporaryFile() as temp:
log.initialize(filename=temp.name, no_backups=0)
log.fatal("Test")
log.close()
# Make sure file exists
self.assertTrue(os.path.isfile(temp.name))
lines = temp.readlines()
# There should be exactly one line in the file now
self.assertEqual(len(lines), 1)
def test_timing_logger_logs(self):
"""Test timing code logs a message
"""
# Nothing should be logged yet
self.assertEqual(len(self.log_stream.getvalue()), 0)
log.initialize()
# Still, nothing should be logged yet
self.assertEqual(len(self.log_stream.getvalue()), 0)
with log.timed(level=logging.FATAL):
_ = 1 + 1
log.close()
# Something should be logged
self.assertNotEqual(len(self.log_stream.getvalue()), 0)
def test_print_timings_prints(self):
"""Test timing code and printing really prints a message
"""
buf = io.StringIO()
# Nothing should be logged yet
self.assertEqual(len(self.log_stream.getvalue()), 0)
log.initialize()
# Still, nothing should be logged yet
self.assertEqual(len(self.log_stream.getvalue()), 0)
with log.timed(level=logging.FATAL):
_ = 1 + 1
with stdout_redirector(buf):
log.print_timings()
# Something should be printed
self.assertNotEqual(len(buf.getvalue()), 0)
log.close()
# Something should be logged
self.assertNotEqual(len(self.log_stream.getvalue()), 0)
def test_print_timings_correct(self):
"""Test timing is about right
"""
buf = io.StringIO()
# Nothing should be logged yet
self.assertEqual(len(self.log_stream.getvalue()), 0)
log.initialize()
# Still, nothing should be logged yet
self.assertEqual(len(self.log_stream.getvalue()), 0)
for _ in range(3):
with log.timed("Foo", level=logging.FATAL):
time.sleep(0.01)
with stdout_redirector(buf):
log.print_timings()
# Should print correct stats
self.assertRegexpMatches(buf.getvalue(), r'\'Foo\'')
self.assertRegexpMatches(buf.getvalue(), r'3 times')
self.assertRegexpMatches(buf.getvalue(), r'total = 0.03')
self.assertRegexpMatches(buf.getvalue(), r'median = 0.01')
log.close()
# Correct timings should be logged three times
self.assertRegexpMatches(self.log_stream.getvalue(), r'Foo')
self.assertEqual(len(re.findall(r'Foo',
self.log_stream.getvalue())), 3)
self.assertRegexpMatches(self.log_stream.getvalue(), r'took 0.01')
self.assertEqual(len(re.findall(r'took 0.01',
self.log_stream.getvalue())), 3)
| mit | 7,434,859,055,077,458,000 | 374,648,027,598,117,100 | 30.366279 | 78 | 0.609082 | false |
piiswrong/mxnet | cpp-package/scripts/lint.py | 41 | 7350 | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=protected-access, unused-variable, locally-disabled, redefined-variable-type
"""Lint helper to generate lint summary of source.
Copyright by Contributors
"""
import codecs
import sys
import re
import os
import cpplint
from cpplint import _cpplint_state
from pylint import epylint
CXX_SUFFIX = set(['cc', 'c', 'cpp', 'h', 'cu', 'hpp'])
PYTHON_SUFFIX = set(['py'])
class LintHelper(object):
"""Class to help runing the lint and records summary"""
@staticmethod
def _print_summary_map(strm, result_map, ftype):
"""Print summary of certain result map."""
if len(result_map) == 0:
return 0
npass = len([x for k, x in result_map.iteritems() if len(x) == 0])
strm.write('=====%d/%d %s files passed check=====\n' % (npass, len(result_map), ftype))
for fname, emap in result_map.iteritems():
if len(emap) == 0:
continue
strm.write('%s: %d Errors of %d Categories map=%s\n' % (
fname, sum(emap.values()), len(emap), str(emap)))
return len(result_map) - npass
def __init__(self):
self.project_name = None
self.cpp_header_map = {}
self.cpp_src_map = {}
self.python_map = {}
pylint_disable = ['superfluous-parens',
'too-many-instance-attributes',
'too-few-public-methods']
# setup pylint
self.pylint_opts = ['--extension-pkg-whitelist=numpy',
'--disable=' + ','.join(pylint_disable)]
self.pylint_cats = set(['error', 'warning', 'convention', 'refactor'])
# setup cpp lint
cpplint_args = ['.', '--extensions=' + (','.join(CXX_SUFFIX))]
_ = cpplint.ParseArguments(cpplint_args)
cpplint._SetFilters(','.join(['-build/c++11',
'-build/namespaces',
'-build/include',
'-build/header_guard',
'+build/include_what_you_use',
'+build/include_order']))
cpplint._SetCountingStyle('toplevel')
cpplint._line_length = 100
def process_cpp(self, path, suffix):
"""Process a cpp file."""
_cpplint_state.ResetErrorCounts()
cpplint.ProcessFile(str(path), _cpplint_state.verbose_level)
_cpplint_state.PrintErrorCounts()
errors = _cpplint_state.errors_by_category.copy()
if suffix == 'h':
self.cpp_header_map[str(path)] = errors
else:
self.cpp_src_map[str(path)] = errors
def process_python(self, path):
"""Process a python file."""
(pylint_stdout, pylint_stderr) = epylint.py_run(
' '.join([str(path)] + self.pylint_opts), return_std=True)
emap = {}
print pylint_stderr.read()
for line in pylint_stdout:
sys.stderr.write(line)
key = line.split(':')[-1].split('(')[0].strip()
if key not in self.pylint_cats:
continue
if key not in emap:
emap[key] = 1
else:
emap[key] += 1
sys.stderr.write('\n')
self.python_map[str(path)] = emap
def print_summary(self, strm):
"""Print summary of lint."""
nerr = 0
nerr += LintHelper._print_summary_map(strm, self.cpp_header_map, 'cpp-header')
nerr += LintHelper._print_summary_map(strm, self.cpp_src_map, 'cpp-soruce')
nerr += LintHelper._print_summary_map(strm, self.python_map, 'python')
if nerr == 0:
strm.write('All passed!\n')
else:
strm.write('%d files failed lint\n' % nerr)
return nerr
# singleton helper for lint check
_HELPER = LintHelper()
def get_header_guard_dmlc(filename):
"""Get Header Guard Convention for DMLC Projects.
For headers in include, directly use the path
For headers in src, use project name plus path
Examples: with project-name = dmlc
include/dmlc/timer.h -> DMLC_TIMTER_H_
src/io/libsvm_parser.h -> DMLC_IO_LIBSVM_PARSER_H_
"""
fileinfo = cpplint.FileInfo(filename)
file_path_from_root = fileinfo.RepositoryName()
inc_list = ['include', 'api', 'wrapper']
if file_path_from_root.find('src/') != -1 and _HELPER.project_name is not None:
idx = file_path_from_root.find('src/')
file_path_from_root = _HELPER.project_name + file_path_from_root[idx + 3:]
else:
for spath in inc_list:
prefix = spath + os.sep
if file_path_from_root.startswith(prefix):
file_path_from_root = re.sub('^' + prefix, '', file_path_from_root)
break
return re.sub(r'[-./\s]', '_', file_path_from_root).upper() + '_'
cpplint.GetHeaderGuardCPPVariable = get_header_guard_dmlc
def process(fname, allow_type):
"""Process a file."""
fname = str(fname)
# HACK: ignore op.h which is automatically generated
if fname.endswith('op.h'):
return
arr = fname.rsplit('.', 1)
if fname.find('#') != -1 or arr[-1] not in allow_type:
return
if arr[-1] in CXX_SUFFIX:
_HELPER.process_cpp(fname, arr[-1])
if arr[-1] in PYTHON_SUFFIX:
_HELPER.process_python(fname)
def main():
"""Main entry function."""
if len(sys.argv) < 3:
print('Usage: <project-name> <filetype> <list-of-path to traverse>')
print('\tfiletype can be python/cpp/all')
exit(-1)
_HELPER.project_name = sys.argv[1]
file_type = sys.argv[2]
allow_type = []
if file_type == 'python' or file_type == 'all':
allow_type += [x for x in PYTHON_SUFFIX]
if file_type == 'cpp' or file_type == 'all':
allow_type += [x for x in CXX_SUFFIX]
allow_type = set(allow_type)
if os.name != 'nt':
sys.stderr = codecs.StreamReaderWriter(sys.stderr,
codecs.getreader('utf8'),
codecs.getwriter('utf8'),
'replace')
for path in sys.argv[3:]:
if os.path.isfile(path):
process(path, allow_type)
else:
for root, dirs, files in os.walk(path):
for name in files:
process(os.path.join(root, name), allow_type)
nerr = _HELPER.print_summary(sys.stderr)
sys.exit(nerr > 0)
if __name__ == '__main__':
main()
| apache-2.0 | -6,601,867,443,183,231,000 | -3,355,820,077,653,228,000 | 37.28125 | 95 | 0.571565 | false |
BMJHayward/django | django/utils/translation/trans_null.py | 467 | 1408 | # These are versions of the functions in django.utils.translation.trans_real
# that don't actually do anything. This is purely for performance, so that
# settings.USE_I18N = False can use this module rather than trans_real.py.
from django.conf import settings
from django.utils.encoding import force_text
def ngettext(singular, plural, number):
if number == 1:
return singular
return plural
ngettext_lazy = ngettext
def ungettext(singular, plural, number):
return force_text(ngettext(singular, plural, number))
def pgettext(context, message):
return ugettext(message)
def npgettext(context, singular, plural, number):
return ungettext(singular, plural, number)
activate = lambda x: None
deactivate = deactivate_all = lambda: None
get_language = lambda: settings.LANGUAGE_CODE
get_language_bidi = lambda: settings.LANGUAGE_CODE in settings.LANGUAGES_BIDI
check_for_language = lambda x: True
def gettext(message):
return message
def ugettext(message):
return force_text(gettext(message))
gettext_noop = gettext_lazy = _ = gettext
def to_locale(language):
p = language.find('-')
if p >= 0:
return language[:p].lower() + '_' + language[p + 1:].upper()
else:
return language.lower()
def get_language_from_request(request, check_path=False):
return settings.LANGUAGE_CODE
def get_language_from_path(request):
return None
| bsd-3-clause | -64,238,705,058,105,060 | 153,410,276,095,928,800 | 23.701754 | 77 | 0.721591 | false |
cloudera/hue | desktop/core/ext-py/boto-2.46.1/boto/services/result.py | 153 | 5596 | #!/usr/bin/env python
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import os
from datetime import datetime, timedelta
from boto.utils import parse_ts
import boto
class ResultProcessor(object):
LogFileName = 'log.csv'
def __init__(self, batch_name, sd, mimetype_files=None):
self.sd = sd
self.batch = batch_name
self.log_fp = None
self.num_files = 0
self.total_time = 0
self.min_time = timedelta.max
self.max_time = timedelta.min
self.earliest_time = datetime.max
self.latest_time = datetime.min
self.queue = self.sd.get_obj('output_queue')
self.domain = self.sd.get_obj('output_domain')
def calculate_stats(self, msg):
start_time = parse_ts(msg['Service-Read'])
end_time = parse_ts(msg['Service-Write'])
elapsed_time = end_time - start_time
if elapsed_time > self.max_time:
self.max_time = elapsed_time
if elapsed_time < self.min_time:
self.min_time = elapsed_time
self.total_time += elapsed_time.seconds
if start_time < self.earliest_time:
self.earliest_time = start_time
if end_time > self.latest_time:
self.latest_time = end_time
def log_message(self, msg, path):
keys = sorted(msg.keys())
if not self.log_fp:
self.log_fp = open(os.path.join(path, self.LogFileName), 'a')
line = ','.join(keys)
self.log_fp.write(line+'\n')
values = []
for key in keys:
value = msg[key]
if value.find(',') > 0:
value = '"%s"' % value
values.append(value)
line = ','.join(values)
self.log_fp.write(line+'\n')
def process_record(self, record, path, get_file=True):
self.log_message(record, path)
self.calculate_stats(record)
outputs = record['OutputKey'].split(',')
if 'OutputBucket' in record:
bucket = boto.lookup('s3', record['OutputBucket'])
else:
bucket = boto.lookup('s3', record['Bucket'])
for output in outputs:
if get_file:
key_name = output.split(';')[0]
key = bucket.lookup(key_name)
file_name = os.path.join(path, key_name)
print('retrieving file: %s to %s' % (key_name, file_name))
key.get_contents_to_filename(file_name)
self.num_files += 1
def get_results_from_queue(self, path, get_file=True, delete_msg=True):
m = self.queue.read()
while m:
if 'Batch' in m and m['Batch'] == self.batch:
self.process_record(m, path, get_file)
if delete_msg:
self.queue.delete_message(m)
m = self.queue.read()
def get_results_from_domain(self, path, get_file=True):
rs = self.domain.query("['Batch'='%s']" % self.batch)
for item in rs:
self.process_record(item, path, get_file)
def get_results_from_bucket(self, path):
bucket = self.sd.get_obj('output_bucket')
if bucket:
print('No output queue or domain, just retrieving files from output_bucket')
for key in bucket:
file_name = os.path.join(path, key)
print('retrieving file: %s to %s' % (key, file_name))
key.get_contents_to_filename(file_name)
self.num_files + 1
def get_results(self, path, get_file=True, delete_msg=True):
if not os.path.isdir(path):
os.mkdir(path)
if self.queue:
self.get_results_from_queue(path, get_file)
elif self.domain:
self.get_results_from_domain(path, get_file)
else:
self.get_results_from_bucket(path)
if self.log_fp:
self.log_fp.close()
print('%d results successfully retrieved.' % self.num_files)
if self.num_files > 0:
self.avg_time = float(self.total_time)/self.num_files
print('Minimum Processing Time: %d' % self.min_time.seconds)
print('Maximum Processing Time: %d' % self.max_time.seconds)
print('Average Processing Time: %f' % self.avg_time)
self.elapsed_time = self.latest_time-self.earliest_time
print('Elapsed Time: %d' % self.elapsed_time.seconds)
tput = 1.0 / ((self.elapsed_time.seconds/60.0) / self.num_files)
print('Throughput: %f transactions / minute' % tput)
| apache-2.0 | 7,289,633,838,137,786,000 | 1,581,706,853,125,947,000 | 40.451852 | 88 | 0.602395 | false |
ArianaGashi/Techstitution | venv/lib/python2.7/site-packages/jinja2/debug.py | 335 | 11553 | # -*- coding: utf-8 -*-
"""
jinja2.debug
~~~~~~~~~~~~
Implements the debug interface for Jinja. This module does some pretty
ugly stuff with the Python traceback system in order to achieve tracebacks
with correct line numbers, locals and contents.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import sys
import traceback
from types import TracebackType, CodeType
from jinja2.utils import missing, internal_code
from jinja2.exceptions import TemplateSyntaxError
from jinja2._compat import iteritems, reraise, PY2
# on pypy we can take advantage of transparent proxies
try:
from __pypy__ import tproxy
except ImportError:
tproxy = None
# how does the raise helper look like?
try:
exec("raise TypeError, 'foo'")
except SyntaxError:
raise_helper = 'raise __jinja_exception__[1]'
except TypeError:
raise_helper = 'raise __jinja_exception__[0], __jinja_exception__[1]'
class TracebackFrameProxy(object):
"""Proxies a traceback frame."""
def __init__(self, tb):
self.tb = tb
self._tb_next = None
@property
def tb_next(self):
return self._tb_next
def set_next(self, next):
if tb_set_next is not None:
try:
tb_set_next(self.tb, next and next.tb or None)
except Exception:
# this function can fail due to all the hackery it does
# on various python implementations. We just catch errors
# down and ignore them if necessary.
pass
self._tb_next = next
@property
def is_jinja_frame(self):
return '__jinja_template__' in self.tb.tb_frame.f_globals
def __getattr__(self, name):
return getattr(self.tb, name)
def make_frame_proxy(frame):
proxy = TracebackFrameProxy(frame)
if tproxy is None:
return proxy
def operation_handler(operation, *args, **kwargs):
if operation in ('__getattribute__', '__getattr__'):
return getattr(proxy, args[0])
elif operation == '__setattr__':
proxy.__setattr__(*args, **kwargs)
else:
return getattr(proxy, operation)(*args, **kwargs)
return tproxy(TracebackType, operation_handler)
class ProcessedTraceback(object):
"""Holds a Jinja preprocessed traceback for printing or reraising."""
def __init__(self, exc_type, exc_value, frames):
assert frames, 'no frames for this traceback?'
self.exc_type = exc_type
self.exc_value = exc_value
self.frames = frames
# newly concatenate the frames (which are proxies)
prev_tb = None
for tb in self.frames:
if prev_tb is not None:
prev_tb.set_next(tb)
prev_tb = tb
prev_tb.set_next(None)
def render_as_text(self, limit=None):
"""Return a string with the traceback."""
lines = traceback.format_exception(self.exc_type, self.exc_value,
self.frames[0], limit=limit)
return ''.join(lines).rstrip()
def render_as_html(self, full=False):
"""Return a unicode string with the traceback as rendered HTML."""
from jinja2.debugrenderer import render_traceback
return u'%s\n\n<!--\n%s\n-->' % (
render_traceback(self, full=full),
self.render_as_text().decode('utf-8', 'replace')
)
@property
def is_template_syntax_error(self):
"""`True` if this is a template syntax error."""
return isinstance(self.exc_value, TemplateSyntaxError)
@property
def exc_info(self):
"""Exception info tuple with a proxy around the frame objects."""
return self.exc_type, self.exc_value, self.frames[0]
@property
def standard_exc_info(self):
"""Standard python exc_info for re-raising"""
tb = self.frames[0]
# the frame will be an actual traceback (or transparent proxy) if
# we are on pypy or a python implementation with support for tproxy
if type(tb) is not TracebackType:
tb = tb.tb
return self.exc_type, self.exc_value, tb
def make_traceback(exc_info, source_hint=None):
"""Creates a processed traceback object from the exc_info."""
exc_type, exc_value, tb = exc_info
if isinstance(exc_value, TemplateSyntaxError):
exc_info = translate_syntax_error(exc_value, source_hint)
initial_skip = 0
else:
initial_skip = 1
return translate_exception(exc_info, initial_skip)
def translate_syntax_error(error, source=None):
"""Rewrites a syntax error to please traceback systems."""
error.source = source
error.translated = True
exc_info = (error.__class__, error, None)
filename = error.filename
if filename is None:
filename = '<unknown>'
return fake_exc_info(exc_info, filename, error.lineno)
def translate_exception(exc_info, initial_skip=0):
"""If passed an exc_info it will automatically rewrite the exceptions
all the way down to the correct line numbers and frames.
"""
tb = exc_info[2]
frames = []
# skip some internal frames if wanted
for x in range(initial_skip):
if tb is not None:
tb = tb.tb_next
initial_tb = tb
while tb is not None:
# skip frames decorated with @internalcode. These are internal
# calls we can't avoid and that are useless in template debugging
# output.
if tb.tb_frame.f_code in internal_code:
tb = tb.tb_next
continue
# save a reference to the next frame if we override the current
# one with a faked one.
next = tb.tb_next
# fake template exceptions
template = tb.tb_frame.f_globals.get('__jinja_template__')
if template is not None:
lineno = template.get_corresponding_lineno(tb.tb_lineno)
tb = fake_exc_info(exc_info[:2] + (tb,), template.filename,
lineno)[2]
frames.append(make_frame_proxy(tb))
tb = next
# if we don't have any exceptions in the frames left, we have to
# reraise it unchanged.
# XXX: can we backup here? when could this happen?
if not frames:
reraise(exc_info[0], exc_info[1], exc_info[2])
return ProcessedTraceback(exc_info[0], exc_info[1], frames)
def fake_exc_info(exc_info, filename, lineno):
"""Helper for `translate_exception`."""
exc_type, exc_value, tb = exc_info
# figure the real context out
if tb is not None:
real_locals = tb.tb_frame.f_locals.copy()
ctx = real_locals.get('context')
if ctx:
locals = ctx.get_all()
else:
locals = {}
for name, value in iteritems(real_locals):
if name.startswith('l_') and value is not missing:
locals[name[2:]] = value
# if there is a local called __jinja_exception__, we get
# rid of it to not break the debug functionality.
locals.pop('__jinja_exception__', None)
else:
locals = {}
# assamble fake globals we need
globals = {
'__name__': filename,
'__file__': filename,
'__jinja_exception__': exc_info[:2],
# we don't want to keep the reference to the template around
# to not cause circular dependencies, but we mark it as Jinja
# frame for the ProcessedTraceback
'__jinja_template__': None
}
# and fake the exception
code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec')
# if it's possible, change the name of the code. This won't work
# on some python environments such as google appengine
try:
if tb is None:
location = 'template'
else:
function = tb.tb_frame.f_code.co_name
if function == 'root':
location = 'top-level template code'
elif function.startswith('block_'):
location = 'block "%s"' % function[6:]
else:
location = 'template'
if PY2:
code = CodeType(0, code.co_nlocals, code.co_stacksize,
code.co_flags, code.co_code, code.co_consts,
code.co_names, code.co_varnames, filename,
location, code.co_firstlineno,
code.co_lnotab, (), ())
else:
code = CodeType(0, code.co_kwonlyargcount,
code.co_nlocals, code.co_stacksize,
code.co_flags, code.co_code, code.co_consts,
code.co_names, code.co_varnames, filename,
location, code.co_firstlineno,
code.co_lnotab, (), ())
except Exception as e:
pass
# execute the code and catch the new traceback
try:
exec(code, globals, locals)
except:
exc_info = sys.exc_info()
new_tb = exc_info[2].tb_next
# return without this frame
return exc_info[:2] + (new_tb,)
def _init_ugly_crap():
"""This function implements a few ugly things so that we can patch the
traceback objects. The function returned allows resetting `tb_next` on
any python traceback object. Do not attempt to use this on non cpython
interpreters
"""
import ctypes
from types import TracebackType
if PY2:
# figure out size of _Py_ssize_t for Python 2:
if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'):
_Py_ssize_t = ctypes.c_int64
else:
_Py_ssize_t = ctypes.c_int
else:
# platform ssize_t on Python 3
_Py_ssize_t = ctypes.c_ssize_t
# regular python
class _PyObject(ctypes.Structure):
pass
_PyObject._fields_ = [
('ob_refcnt', _Py_ssize_t),
('ob_type', ctypes.POINTER(_PyObject))
]
# python with trace
if hasattr(sys, 'getobjects'):
class _PyObject(ctypes.Structure):
pass
_PyObject._fields_ = [
('_ob_next', ctypes.POINTER(_PyObject)),
('_ob_prev', ctypes.POINTER(_PyObject)),
('ob_refcnt', _Py_ssize_t),
('ob_type', ctypes.POINTER(_PyObject))
]
class _Traceback(_PyObject):
pass
_Traceback._fields_ = [
('tb_next', ctypes.POINTER(_Traceback)),
('tb_frame', ctypes.POINTER(_PyObject)),
('tb_lasti', ctypes.c_int),
('tb_lineno', ctypes.c_int)
]
def tb_set_next(tb, next):
"""Set the tb_next attribute of a traceback object."""
if not (isinstance(tb, TracebackType) and
(next is None or isinstance(next, TracebackType))):
raise TypeError('tb_set_next arguments must be traceback objects')
obj = _Traceback.from_address(id(tb))
if tb.tb_next is not None:
old = _Traceback.from_address(id(tb.tb_next))
old.ob_refcnt -= 1
if next is None:
obj.tb_next = ctypes.POINTER(_Traceback)()
else:
next = _Traceback.from_address(id(next))
next.ob_refcnt += 1
obj.tb_next = ctypes.pointer(next)
return tb_set_next
# try to get a tb_set_next implementation if we don't have transparent
# proxies.
tb_set_next = None
if tproxy is None:
try:
tb_set_next = _init_ugly_crap()
except:
pass
del _init_ugly_crap
| cc0-1.0 | -3,678,230,417,984,152,600 | -3,889,917,598,585,294,000 | 32.008571 | 78 | 0.586168 | false |
Wuteyan/VTK | Examples/Modelling/Python/constrainedDelaunay.py | 15 | 4503 | #!/usr/bin/env python
# This example demonstrates how to use a constraint polygon in
# Delaunay triangulation.
import vtk
from vtk.util.colors import peacock
# Generate the input points and constrained edges/polygons.
points = vtk.vtkPoints()
points.InsertPoint(0, 1, 4, 0)
points.InsertPoint(1, 3, 4, 0)
points.InsertPoint(2, 7, 4, 0)
points.InsertPoint(3, 11, 4, 0)
points.InsertPoint(4, 13, 4, 0)
points.InsertPoint(5, 13, 8, 0)
points.InsertPoint(6, 13, 12, 0)
points.InsertPoint(7, 10, 12, 0)
points.InsertPoint(8, 7, 12, 0)
points.InsertPoint(9, 4, 12, 0)
points.InsertPoint(10, 1, 12, 0)
points.InsertPoint(11, 1, 8, 0)
points.InsertPoint(12, 3.5, 5, 0)
points.InsertPoint(13, 4.5, 5, 0)
points.InsertPoint(14, 5.5, 8, 0)
points.InsertPoint(15, 6.5, 8, 0)
points.InsertPoint(16, 6.5, 5, 0)
points.InsertPoint(17, 7.5, 5, 0)
points.InsertPoint(18, 7.5, 8, 0)
points.InsertPoint(19, 9, 8, 0)
points.InsertPoint(20, 9, 5, 0)
points.InsertPoint(21, 10, 5, 0)
points.InsertPoint(22, 10, 7, 0)
points.InsertPoint(23, 11, 5, 0)
points.InsertPoint(24, 12, 5, 0)
points.InsertPoint(25, 10.5, 8, 0)
points.InsertPoint(26, 12, 11, 0)
points.InsertPoint(27, 11, 11, 0)
points.InsertPoint(28, 10, 9, 0)
points.InsertPoint(29, 10, 11, 0)
points.InsertPoint(30, 9, 11, 0)
points.InsertPoint(31, 9, 9, 0)
points.InsertPoint(32, 7.5, 9, 0)
points.InsertPoint(33, 7.5, 11, 0)
points.InsertPoint(34, 6.5, 11, 0)
points.InsertPoint(35, 6.5, 9, 0)
points.InsertPoint(36, 5, 9, 0)
points.InsertPoint(37, 4, 6, 0)
points.InsertPoint(38, 3, 9, 0)
points.InsertPoint(39, 2, 9, 0)
polys = vtk.vtkCellArray()
polys.InsertNextCell(12)
polys.InsertCellPoint(0)
polys.InsertCellPoint(1)
polys.InsertCellPoint(2)
polys.InsertCellPoint(3)
polys.InsertCellPoint(4)
polys.InsertCellPoint(5)
polys.InsertCellPoint(6)
polys.InsertCellPoint(7)
polys.InsertCellPoint(8)
polys.InsertCellPoint(9)
polys.InsertCellPoint(10)
polys.InsertCellPoint(11)
polys.InsertNextCell(28)
polys.InsertCellPoint(39)
polys.InsertCellPoint(38)
polys.InsertCellPoint(37)
polys.InsertCellPoint(36)
polys.InsertCellPoint(35)
polys.InsertCellPoint(34)
polys.InsertCellPoint(33)
polys.InsertCellPoint(32)
polys.InsertCellPoint(31)
polys.InsertCellPoint(30)
polys.InsertCellPoint(29)
polys.InsertCellPoint(28)
polys.InsertCellPoint(27)
polys.InsertCellPoint(26)
polys.InsertCellPoint(25)
polys.InsertCellPoint(24)
polys.InsertCellPoint(23)
polys.InsertCellPoint(22)
polys.InsertCellPoint(21)
polys.InsertCellPoint(20)
polys.InsertCellPoint(19)
polys.InsertCellPoint(18)
polys.InsertCellPoint(17)
polys.InsertCellPoint(16)
polys.InsertCellPoint(15)
polys.InsertCellPoint(14)
polys.InsertCellPoint(13)
polys.InsertCellPoint(12)
polyData = vtk.vtkPolyData()
polyData.SetPoints(points)
polyData.SetPolys(polys)
# Notice this trick. The SetInput() method accepts a vtkPolyData that
# is also the input to the Delaunay filter. The points of the
# vtkPolyData are used to generate the triangulation; the polygons are
# used to create a constraint region. The polygons are very carefully
# created and ordered in the right direction to indicate inside and
# outside of the polygon.
delny = vtk.vtkDelaunay2D()
delny.SetInput(polyData)
delny.SetSource(polyData)
mapMesh = vtk.vtkPolyDataMapper()
mapMesh.SetInputConnection(delny.GetOutputPort())
meshActor = vtk.vtkActor()
meshActor.SetMapper(mapMesh)
# Now we just pretty the mesh up with tubed edges and balls at the
# vertices.
extract = vtk.vtkExtractEdges()
extract.SetInputConnection(delny.GetOutputPort())
tubes = vtk.vtkTubeFilter()
tubes.SetInputConnection(extract.GetOutputPort())
tubes.SetRadius(0.1)
tubes.SetNumberOfSides(6)
mapEdges = vtk.vtkPolyDataMapper()
mapEdges.SetInputConnection(tubes.GetOutputPort())
edgeActor = vtk.vtkActor()
edgeActor.SetMapper(mapEdges)
edgeActor.GetProperty().SetColor(peacock)
edgeActor.GetProperty().SetSpecularColor(1, 1, 1)
edgeActor.GetProperty().SetSpecular(0.3)
edgeActor.GetProperty().SetSpecularPower(20)
edgeActor.GetProperty().SetAmbient(0.2)
edgeActor.GetProperty().SetDiffuse(0.8)
# Create the rendering window, renderer, and interactive renderer
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
ren.AddActor(meshActor)
ren.AddActor(edgeActor)
ren.SetBackground(0, 0, 0)
renWin.SetSize(450, 300)
ren.ResetCamera()
ren.GetActiveCamera().Zoom(2)
iren.Initialize()
renWin.Render()
iren.Start()
| bsd-3-clause | 4,920,181,719,797,961,000 | -5,912,985,003,883,341,000 | 29.02 | 70 | 0.775705 | false |
GitAngel/django | tests/field_subclassing/fields.py | 35 | 2704 | from __future__ import unicode_literals
import json
import warnings
from django.db import models
from django.utils import six
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text, python_2_unicode_compatible
# Catch warning about subfieldbase -- remove in Django 1.10
warnings.filterwarnings(
'ignore',
'SubfieldBase has been deprecated. Use Field.from_db_value instead.',
RemovedInDjango110Warning
)
@python_2_unicode_compatible
class Small(object):
"""
A simple class to show that non-trivial Python objects can be used as
attributes.
"""
def __init__(self, first, second):
self.first, self.second = first, second
def __str__(self):
return '%s%s' % (force_text(self.first), force_text(self.second))
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.first == other.first and self.second == other.second
return False
class SmallField(six.with_metaclass(models.SubfieldBase, models.Field)):
"""
Turns the "Small" class into a Django field. Because of the similarities
with normal character fields and the fact that Small.__unicode__ does
something sensible, we don't need to implement a lot here.
"""
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 2
super(SmallField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'CharField'
def to_python(self, value):
if isinstance(value, Small):
return value
return Small(value[0], value[1])
def get_db_prep_save(self, value, connection):
return six.text_type(value)
def get_prep_lookup(self, lookup_type, value):
if lookup_type == 'exact':
return force_text(value)
if lookup_type == 'in':
return [force_text(v) for v in value]
if lookup_type == 'isnull':
return []
raise TypeError('Invalid lookup type: %r' % lookup_type)
class SmallerField(SmallField):
pass
class JSONField(six.with_metaclass(models.SubfieldBase, models.TextField)):
description = ("JSONField automatically serializes and deserializes values to "
"and from JSON.")
def to_python(self, value):
if not value:
return None
if isinstance(value, six.string_types):
value = json.loads(value)
return value
def get_db_prep_save(self, value, connection):
if value is None:
return None
return json.dumps(value)
class CustomTypedField(models.TextField):
def db_type(self, connection):
return 'custom_field'
| bsd-3-clause | -3,097,145,197,718,090,000 | -6,870,847,057,085,661,000 | 27.765957 | 83 | 0.648299 | false |
slozier/ironpython2 | Src/StdLib/Lib/test/test_wait3.py | 136 | 1062 | """This test checks for correct wait3() behavior.
"""
import os
import time
import unittest
from test.fork_wait import ForkWait
from test.test_support import run_unittest, reap_children
try:
os.fork
except AttributeError:
raise unittest.SkipTest, "os.fork not defined -- skipping test_wait3"
try:
os.wait3
except AttributeError:
raise unittest.SkipTest, "os.wait3 not defined -- skipping test_wait3"
class Wait3Test(ForkWait):
def wait_impl(self, cpid):
for i in range(10):
# wait3() shouldn't hang, but some of the buildbots seem to hang
# in the forking tests. This is an attempt to fix the problem.
spid, status, rusage = os.wait3(os.WNOHANG)
if spid == cpid:
break
time.sleep(1.0)
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
self.assertTrue(rusage)
def test_main():
run_unittest(Wait3Test)
reap_children()
if __name__ == "__main__":
test_main()
| apache-2.0 | 1,267,016,817,443,522,000 | -6,965,370,442,084,289,000 | 26.230769 | 87 | 0.636535 | false |
nhippenmeyer/django | tests/field_subclassing/tests.py | 214 | 4475 | from __future__ import unicode_literals
import inspect
from django.core import exceptions, serializers
from django.db import connection
from django.test import SimpleTestCase, TestCase
from .fields import CustomTypedField, Small
from .models import ChoicesModel, DataModel, MyModel, OtherModel
class CustomField(TestCase):
def test_refresh(self):
d = DataModel.objects.create(data=[1, 2, 3])
d.refresh_from_db(fields=['data'])
self.assertIsInstance(d.data, list)
self.assertEqual(d.data, [1, 2, 3])
def test_defer(self):
d = DataModel.objects.create(data=[1, 2, 3])
self.assertIsInstance(d.data, list)
d = DataModel.objects.get(pk=d.pk)
self.assertIsInstance(d.data, list)
self.assertEqual(d.data, [1, 2, 3])
d = DataModel.objects.defer("data").get(pk=d.pk)
self.assertIsInstance(d.data, list)
self.assertEqual(d.data, [1, 2, 3])
# Refetch for save
d = DataModel.objects.defer("data").get(pk=d.pk)
d.save()
d = DataModel.objects.get(pk=d.pk)
self.assertIsInstance(d.data, list)
self.assertEqual(d.data, [1, 2, 3])
def test_custom_field(self):
# Creating a model with custom fields is done as per normal.
s = Small(1, 2)
self.assertEqual(str(s), "12")
m = MyModel.objects.create(name="m", data=s)
# Custom fields still have normal field's attributes.
self.assertEqual(m._meta.get_field("data").verbose_name, "small field")
# The m.data attribute has been initialized correctly. It's a Small
# object.
self.assertEqual((m.data.first, m.data.second), (1, 2))
# The data loads back from the database correctly and 'data' has the
# right type.
m1 = MyModel.objects.get(pk=m.pk)
self.assertIsInstance(m1.data, Small)
self.assertEqual(str(m1.data), "12")
# We can do normal filtering on the custom field (and will get an error
# when we use a lookup type that does not make sense).
s1 = Small(1, 3)
s2 = Small("a", "b")
self.assertQuerysetEqual(
MyModel.objects.filter(data__in=[s, s1, s2]), [
"m",
],
lambda m: m.name,
)
self.assertRaises(TypeError, lambda: MyModel.objects.filter(data__lt=s))
# Serialization works, too.
stream = serializers.serialize("json", MyModel.objects.all())
self.assertJSONEqual(stream, [{
"pk": m1.pk,
"model": "field_subclassing.mymodel",
"fields": {"data": "12", "name": "m"}
}])
obj = list(serializers.deserialize("json", stream))[0]
self.assertEqual(obj.object, m)
# Test retrieving custom field data
m.delete()
m1 = MyModel.objects.create(name="1", data=Small(1, 2))
MyModel.objects.create(name="2", data=Small(2, 3))
self.assertQuerysetEqual(
MyModel.objects.all(), [
"12",
"23",
],
lambda m: str(m.data),
ordered=False
)
def test_field_subclassing(self):
o = OtherModel.objects.create(data=Small("a", "b"))
o = OtherModel.objects.get()
self.assertEqual(o.data.first, "a")
self.assertEqual(o.data.second, "b")
def test_subfieldbase_plays_nice_with_module_inspect(self):
"""
Custom fields should play nice with python standard module inspect.
http://users.rcn.com/python/download/Descriptor.htm#properties
"""
# Even when looking for totally different properties, SubfieldBase's
# non property like behavior made inspect crash. Refs #12568.
data = dict(inspect.getmembers(MyModel))
self.assertIn('__module__', data)
self.assertEqual(data['__module__'], 'field_subclassing.models')
def test_validation_of_choices_for_custom_field(self):
# a valid choice
o = ChoicesModel.objects.create(data=Small('a', 'b'))
o.full_clean()
# an invalid choice
o = ChoicesModel.objects.create(data=Small('d', 'e'))
with self.assertRaises(exceptions.ValidationError):
o.full_clean()
class TestDbType(SimpleTestCase):
def test_db_parameters_respects_db_type(self):
f = CustomTypedField()
self.assertEqual(f.db_parameters(connection)['type'], 'custom_field')
| bsd-3-clause | -3,384,138,374,455,747,000 | 2,287,123,740,991,687,400 | 33.423077 | 80 | 0.603575 | false |
theo-l/django | django/views/static.py | 6 | 4553 | """
Views and functions for serving static files. These are only to be used
during development, and SHOULD NOT be used in a production setting.
"""
import mimetypes
import posixpath
import re
from pathlib import Path
from django.http import (
FileResponse, Http404, HttpResponse, HttpResponseNotModified,
)
from django.template import Context, Engine, TemplateDoesNotExist, loader
from django.utils._os import safe_join
from django.utils.http import http_date, parse_http_date
from django.utils.translation import gettext as _, gettext_lazy
def serve(request, path, document_root=None, show_indexes=False):
"""
Serve static files below a given point in the directory structure.
To use, put a URL pattern such as::
from django.views.static import serve
path('<path:path>', serve, {'document_root': '/path/to/my/files/'})
in your URLconf. You must provide the ``document_root`` param. You may
also set ``show_indexes`` to ``True`` if you'd like to serve a basic index
of the directory. This index view will use the template hardcoded below,
but if you'd like to override it, you can create a template called
``static/directory_index.html``.
"""
path = posixpath.normpath(path).lstrip('/')
fullpath = Path(safe_join(document_root, path))
if fullpath.is_dir():
if show_indexes:
return directory_index(path, fullpath)
raise Http404(_("Directory indexes are not allowed here."))
if not fullpath.exists():
raise Http404(_('“%(path)s” does not exist') % {'path': fullpath})
# Respect the If-Modified-Since header.
statobj = fullpath.stat()
if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),
statobj.st_mtime, statobj.st_size):
return HttpResponseNotModified()
content_type, encoding = mimetypes.guess_type(str(fullpath))
content_type = content_type or 'application/octet-stream'
response = FileResponse(fullpath.open('rb'), content_type=content_type)
response["Last-Modified"] = http_date(statobj.st_mtime)
if encoding:
response["Content-Encoding"] = encoding
return response
DEFAULT_DIRECTORY_INDEX_TEMPLATE = """
{% load i18n %}
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8">
<meta http-equiv="Content-Language" content="en-us">
<meta name="robots" content="NONE,NOARCHIVE">
<title>{% blocktranslate %}Index of {{ directory }}{% endblocktranslate %}</title>
</head>
<body>
<h1>{% blocktranslate %}Index of {{ directory }}{% endblocktranslate %}</h1>
<ul>
{% if directory != "/" %}
<li><a href="../">../</a></li>
{% endif %}
{% for f in file_list %}
<li><a href="{{ f|urlencode }}">{{ f }}</a></li>
{% endfor %}
</ul>
</body>
</html>
"""
template_translatable = gettext_lazy("Index of %(directory)s")
def directory_index(path, fullpath):
try:
t = loader.select_template([
'static/directory_index.html',
'static/directory_index',
])
except TemplateDoesNotExist:
t = Engine(libraries={'i18n': 'django.templatetags.i18n'}).from_string(DEFAULT_DIRECTORY_INDEX_TEMPLATE)
c = Context()
else:
c = {}
files = []
for f in fullpath.iterdir():
if not f.name.startswith('.'):
url = str(f.relative_to(fullpath))
if f.is_dir():
url += '/'
files.append(url)
c.update({
'directory': path + '/',
'file_list': files,
})
return HttpResponse(t.render(c))
def was_modified_since(header=None, mtime=0, size=0):
"""
Was something modified since the user last downloaded it?
header
This is the value of the If-Modified-Since header. If this is None,
I'll just return True.
mtime
This is the modification time of the item we're talking about.
size
This is the size of the item we're talking about.
"""
try:
if header is None:
raise ValueError
matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", header,
re.IGNORECASE)
header_mtime = parse_http_date(matches[1])
header_len = matches[3]
if header_len and int(header_len) != size:
raise ValueError
if int(mtime) > header_mtime:
raise ValueError
except (AttributeError, ValueError, OverflowError):
return True
return False
| bsd-3-clause | 4,591,788,199,916,836,400 | -1,785,577,415,910,483,700 | 32.696296 | 112 | 0.626291 | false |
ajvpot/CTFd | migrations/versions/75e8ab9a0014_add_fields_and_fieldentries_tables.py | 4 | 1867 | """Add Fields and FieldEntries tables
Revision ID: 75e8ab9a0014
Revises: 0366ba6575ca
Create Date: 2020-08-19 00:36:17.579497
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "75e8ab9a0014"
down_revision = "0366ba6575ca"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"fields",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.Text(), nullable=True),
sa.Column("type", sa.String(length=80), nullable=True),
sa.Column("field_type", sa.String(length=80), nullable=True),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("required", sa.Boolean(), nullable=True),
sa.Column("public", sa.Boolean(), nullable=True),
sa.Column("editable", sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"field_entries",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("type", sa.String(length=80), nullable=True),
sa.Column("value", sa.JSON(), nullable=True),
sa.Column("field_id", sa.Integer(), nullable=True),
sa.Column("user_id", sa.Integer(), nullable=True),
sa.Column("team_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(["field_id"], ["fields.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["team_id"], ["teams.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("field_entries")
op.drop_table("fields")
# ### end Alembic commands ###
| apache-2.0 | 9,179,930,016,034,198,000 | 7,898,169,932,800,714,000 | 34.226415 | 81 | 0.628281 | false |
WorldViews/Spirals | KinPy/KinOSCWatcher.py | 2 | 5277 |
import os, socket, time
import threading
#import MessageBoard
import traceback
import OSC
OSC_SERVER = None
ALL_JOINTS = {
'HandRight': 'RIGHT_HAND',
'HandLeft': 'LEFT_HAND',
'WristRight': 'RIGHT_WRIST',
'WristLeft': 'LEFT_WRIST',
'ElbowRight': 'RIGHT_ELBOW',
'ElbowLeft': 'LEFT_ELBOW',
'ShoulderRight': 'RIGHT_SHOULDER',
'ShoulderLeft': 'LEFT_SHOULDER',
'Neck': 'NECK',
'Head': 'HEAD',
'SpineMid': 'MID_SPINE',
'SpineBase': 'BASE_SPINE',
'HipRight': 'RIGHT_HIP',
'HipLeft': 'LEFT_HIP',
'KneeRight': 'RIGHT_KNEE',
'KneeLeft': 'LEFT_KNEE',
'AnkleRight': 'RIGHT_ANKLE',
'AnkleLeft': 'LEFT_ANKLE',
'FootRight': 'RIGHT_FOOT',
'FootLeft': 'LEFT_FOOT'
}
JOINTS = {
'HandRight': 'RIGHT_HAND',
'HandLeft': 'LEFT_HAND',
'ElbowRight': 'RIGHT_ELBOW',
'ElbowLeft': 'LEFT_ELBOW',
'Head': 'HEAD'
}
KINECT_CONTROL = None
"""
This is a simple class for holding the message associate
with a body, and some other information such as body num
or timing.
"""
class Body:
numBodies = 0
bodyById = {}
@staticmethod
def getBody(bodyId):
if bodyId in Body.bodyById:
return Body.bodyById[bodyId]
# MyOSCHandler.numPeople += 1
# personNum = MyOSCHandler.numPeople
body = Body(bodyId)
Body.bodyById[bodyId] = body
return body
def __init__(self, id):
Body.numBodies += 1
self.bodyId = id
self.personNum = Body.numBodies
self.msg = None
def setJoint(self, joint, xyz, trackState):
"""
This gets called with a joint position and acculumates
the joint information in a message. When this gets called
with a joint that is already in the message, it is assumed
the message is "complete" (i.e. has a complete set of
the joints being watched) and a single message is sent
with all those joints.
"""
global OSC_SERVER
#print "buildMessage", bodyId, joint, xyz
if JOINTS != None:
jname = JOINTS[joint]
else:
jname = joint
msg = self.msg
if msg != None and jname in msg:
#print "sending message!!!!", msg
if OSC_SERVER.kinSkelHandler:
OSC_SERVER.kinSkelHandler(msg)
msg = None
if msg == None:
msg = {'msgType':'kinect.skeleton.pose',
'personNum': self.personNum}
msg[jname] = xyz
c = .2
if trackState == 'Tracked':
c = 1.0
msg["%s_c" % jname] = c
self.msg = msg
class MyOSCHandler(OSC.OSCRequestHandler):
def dispatchMessage(self, pattern, tags, data):
parts = pattern.split("/")
if len(parts) != 5:
print "Unexpected number of parts"
return []
bodyId = parts[2]
if parts[3] == "hands":
if tags != "ss":
print "Unexpected format", tags
print "pattern:", pattern
return []
elif parts[3] == "joints":
joint = parts[4]
if tags != "fffs":
print "Unexpected format", tags
print "pattern:", pattern
return []
if JOINTS and joint not in JOINTS:
return []
#print "data: %s\n" % (data,)
x,y,z,trackState = data
pos = 1000.0*x, 1000.0*y, 1000.0*z
body = Body.getBody(bodyId)
body.setJoint(joint, pos, trackState)
else:
print "Unexpected pattern", pattern
return []
if self.server.kinJointHandler:
body = Body.getBody(bodyId)
msg = {'msgType': 'joint', 'personNum': body.personNum, 'joint': joint,
'bodyId': bodyId, 'pos': [x,y,z]}
self.server.kinJointHandler(msg)
# if SERVER:
# SERVER.sendMessageToAllClients(msg)
return []
#class MyOSCServer(OSC.ThreadingOSCServer):
class MyOSCServer(OSC.OSCServer):
RequestHandlerClass = MyOSCHandler
def bodyMsgHandler(msg):
print msg
OSC_HOST_ADDR = None
OSC_PORT = 12345
def getOSC_ADDR():
global OSC_HOST_ADDR
if not OSC_HOST_ADDR:
host = socket.gethostname()
OSC_HOST_ADDR = socket.gethostbyname(host)
"""
path = "%s.OSC_PARAMS.json"
if os.path.exists(path):
try:
params = json.load(file(path))
return tuple(params['OSC_ADDR'])
except:
traceback.print_exc()
return OSC_ADDR
"""
return OSC_HOST_ADDR, OSC_PORT
def startOSC(kinSkelHandler=None, kinJointHandler=None):
global OSC_SERVER
addr = getOSC_ADDR()
print "Using addr:", addr
s = MyOSCServer(addr)
OSC_SERVER = s
s.kinSkelHandler = kinSkelHandler
s.kinJointHandler = kinJointHandler
#s.app = app
s.addMsgHandler("/bodies", bodyMsgHandler)
t = threading.Thread(target=s.serve_forever)
t.start()
#t.setDaemon(True)
#s.serve_forever()
def kinSkelHandler(msg):
if 0:
print msg
def kinJointHandler(msg):
if 0:
print msg
def run(setupServer=True):
startOSC(kinSkelHandler, kinJointHandler)
while 1:
time.sleep(1)
if __name__ == '__main__':
run()
| mit | 5,120,757,731,373,876,000 | -6,031,096,825,855,588,000 | 25.923469 | 81 | 0.571347 | false |
int19h/PTVS | Python/Product/Miniconda/Miniconda3-x64/Lib/heapq.py | 14 | 23017 | """Heap queue algorithm (a.k.a. priority queue).
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
all k, counting elements from 0. For the sake of comparison,
non-existing elements are considered to be infinite. The interesting
property of a heap is that a[0] is always its smallest element.
Usage:
heap = [] # creates an empty heap
heappush(heap, item) # pushes a new item on the heap
item = heappop(heap) # pops the smallest item from the heap
item = heap[0] # smallest item on the heap without popping it
heapify(x) # transforms list into a heap, in-place, in linear time
item = heapreplace(heap, item) # pops and returns smallest item, and adds
# new item; the heap size is unchanged
Our API differs from textbook heap algorithms as follows:
- We use 0-based indexing. This makes the relationship between the
index for a node and the indexes for its children slightly less
obvious, but is more suitable since Python uses 0-based indexing.
- Our heappop() method returns the smallest item, not the largest.
These two make it possible to view the heap as a regular Python list
without surprises: heap[0] is the smallest item, and heap.sort()
maintains the heap invariant!
"""
# Original code by Kevin O'Connor, augmented by Tim Peters and Raymond Hettinger
__about__ = """Heap queues
[explanation by François Pinard]
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
all k, counting elements from 0. For the sake of comparison,
non-existing elements are considered to be infinite. The interesting
property of a heap is that a[0] is always its smallest element.
The strange invariant above is meant to be an efficient memory
representation for a tournament. The numbers below are `k', not a[k]:
0
1 2
3 4 5 6
7 8 9 10 11 12 13 14
15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
In the tree above, each cell `k' is topping `2*k+1' and `2*k+2'. In
a usual binary tournament we see in sports, each cell is the winner
over the two cells it tops, and we can trace the winner down the tree
to see all opponents s/he had. However, in many computer applications
of such tournaments, we do not need to trace the history of a winner.
To be more memory efficient, when a winner is promoted, we try to
replace it by something else at a lower level, and the rule becomes
that a cell and the two cells it tops contain three different items,
but the top cell "wins" over the two topped cells.
If this heap invariant is protected at all time, index 0 is clearly
the overall winner. The simplest algorithmic way to remove it and
find the "next" winner is to move some loser (let's say cell 30 in the
diagram above) into the 0 position, and then percolate this new 0 down
the tree, exchanging values, until the invariant is re-established.
This is clearly logarithmic on the total number of items in the tree.
By iterating over all items, you get an O(n ln n) sort.
A nice feature of this sort is that you can efficiently insert new
items while the sort is going on, provided that the inserted items are
not "better" than the last 0'th element you extracted. This is
especially useful in simulation contexts, where the tree holds all
incoming events, and the "win" condition means the smallest scheduled
time. When an event schedule other events for execution, they are
scheduled into the future, so they can easily go into the heap. So, a
heap is a good structure for implementing schedulers (this is what I
used for my MIDI sequencer :-).
Various structures for implementing schedulers have been extensively
studied, and heaps are good for this, as they are reasonably speedy,
the speed is almost constant, and the worst case is not much different
than the average case. However, there are other representations which
are more efficient overall, yet the worst cases might be terrible.
Heaps are also very useful in big disk sorts. You most probably all
know that a big sort implies producing "runs" (which are pre-sorted
sequences, which size is usually related to the amount of CPU memory),
followed by a merging passes for these runs, which merging is often
very cleverly organised[1]. It is very important that the initial
sort produces the longest runs possible. Tournaments are a good way
to that. If, using all the memory available to hold a tournament, you
replace and percolate items that happen to fit the current run, you'll
produce runs which are twice the size of the memory for random input,
and much better for input fuzzily ordered.
Moreover, if you output the 0'th item on disk and get an input which
may not fit in the current tournament (because the value "wins" over
the last output value), it cannot fit in the heap, so the size of the
heap decreases. The freed memory could be cleverly reused immediately
for progressively building a second heap, which grows at exactly the
same rate the first heap is melting. When the first heap completely
vanishes, you switch heaps and start a new run. Clever and quite
effective!
In a word, heaps are useful memory structures to know. I use them in
a few applications, and I think it is good to keep a `heap' module
around. :-)
--------------------
[1] The disk balancing algorithms which are current, nowadays, are
more annoying than clever, and this is a consequence of the seeking
capabilities of the disks. On devices which cannot seek, like big
tape drives, the story was quite different, and one had to be very
clever to ensure (far in advance) that each tape movement will be the
most effective possible (that is, will best participate at
"progressing" the merge). Some tapes were even able to read
backwards, and this was also used to avoid the rewinding time.
Believe me, real good tape sorts were quite spectacular to watch!
From all times, sorting has always been a Great Art! :-)
"""
__all__ = ['heappush', 'heappop', 'heapify', 'heapreplace', 'merge',
'nlargest', 'nsmallest', 'heappushpop']
def heappush(heap, item):
"""Push item onto heap, maintaining the heap invariant."""
heap.append(item)
_siftdown(heap, 0, len(heap)-1)
def heappop(heap):
"""Pop the smallest item off the heap, maintaining the heap invariant."""
lastelt = heap.pop() # raises appropriate IndexError if heap is empty
if heap:
returnitem = heap[0]
heap[0] = lastelt
_siftup(heap, 0)
return returnitem
return lastelt
def heapreplace(heap, item):
"""Pop and return the current smallest value, and add the new item.
This is more efficient than heappop() followed by heappush(), and can be
more appropriate when using a fixed-size heap. Note that the value
returned may be larger than item! That constrains reasonable uses of
this routine unless written as part of a conditional replacement:
if item > heap[0]:
item = heapreplace(heap, item)
"""
returnitem = heap[0] # raises appropriate IndexError if heap is empty
heap[0] = item
_siftup(heap, 0)
return returnitem
def heappushpop(heap, item):
"""Fast version of a heappush followed by a heappop."""
if heap and heap[0] < item:
item, heap[0] = heap[0], item
_siftup(heap, 0)
return item
def heapify(x):
"""Transform list into a heap, in-place, in O(len(x)) time."""
n = len(x)
# Transform bottom-up. The largest index there's any point to looking at
# is the largest with a child index in-range, so must have 2*i + 1 < n,
# or i < (n-1)/2. If n is even = 2*j, this is (2*j-1)/2 = j-1/2 so
# j-1 is the largest, which is n//2 - 1. If n is odd = 2*j+1, this is
# (2*j+1-1)/2 = j so j-1 is the largest, and that's again n//2-1.
for i in reversed(range(n//2)):
_siftup(x, i)
def _heappop_max(heap):
"""Maxheap version of a heappop."""
lastelt = heap.pop() # raises appropriate IndexError if heap is empty
if heap:
returnitem = heap[0]
heap[0] = lastelt
_siftup_max(heap, 0)
return returnitem
return lastelt
def _heapreplace_max(heap, item):
"""Maxheap version of a heappop followed by a heappush."""
returnitem = heap[0] # raises appropriate IndexError if heap is empty
heap[0] = item
_siftup_max(heap, 0)
return returnitem
def _heapify_max(x):
"""Transform list into a maxheap, in-place, in O(len(x)) time."""
n = len(x)
for i in reversed(range(n//2)):
_siftup_max(x, i)
# 'heap' is a heap at all indices >= startpos, except possibly for pos. pos
# is the index of a leaf with a possibly out-of-order value. Restore the
# heap invariant.
def _siftdown(heap, startpos, pos):
newitem = heap[pos]
# Follow the path to the root, moving parents down until finding a place
# newitem fits.
while pos > startpos:
parentpos = (pos - 1) >> 1
parent = heap[parentpos]
if newitem < parent:
heap[pos] = parent
pos = parentpos
continue
break
heap[pos] = newitem
# The child indices of heap index pos are already heaps, and we want to make
# a heap at index pos too. We do this by bubbling the smaller child of
# pos up (and so on with that child's children, etc) until hitting a leaf,
# then using _siftdown to move the oddball originally at index pos into place.
#
# We *could* break out of the loop as soon as we find a pos where newitem <=
# both its children, but turns out that's not a good idea, and despite that
# many books write the algorithm that way. During a heap pop, the last array
# element is sifted in, and that tends to be large, so that comparing it
# against values starting from the root usually doesn't pay (= usually doesn't
# get us out of the loop early). See Knuth, Volume 3, where this is
# explained and quantified in an exercise.
#
# Cutting the # of comparisons is important, since these routines have no
# way to extract "the priority" from an array element, so that intelligence
# is likely to be hiding in custom comparison methods, or in array elements
# storing (priority, record) tuples. Comparisons are thus potentially
# expensive.
#
# On random arrays of length 1000, making this change cut the number of
# comparisons made by heapify() a little, and those made by exhaustive
# heappop() a lot, in accord with theory. Here are typical results from 3
# runs (3 just to demonstrate how small the variance is):
#
# Compares needed by heapify Compares needed by 1000 heappops
# -------------------------- --------------------------------
# 1837 cut to 1663 14996 cut to 8680
# 1855 cut to 1659 14966 cut to 8678
# 1847 cut to 1660 15024 cut to 8703
#
# Building the heap by using heappush() 1000 times instead required
# 2198, 2148, and 2219 compares: heapify() is more efficient, when
# you can use it.
#
# The total compares needed by list.sort() on the same lists were 8627,
# 8627, and 8632 (this should be compared to the sum of heapify() and
# heappop() compares): list.sort() is (unsurprisingly!) more efficient
# for sorting.
def _siftup(heap, pos):
endpos = len(heap)
startpos = pos
newitem = heap[pos]
# Bubble up the smaller child until hitting a leaf.
childpos = 2*pos + 1 # leftmost child position
while childpos < endpos:
# Set childpos to index of smaller child.
rightpos = childpos + 1
if rightpos < endpos and not heap[childpos] < heap[rightpos]:
childpos = rightpos
# Move the smaller child up.
heap[pos] = heap[childpos]
pos = childpos
childpos = 2*pos + 1
# The leaf at pos is empty now. Put newitem there, and bubble it up
# to its final resting place (by sifting its parents down).
heap[pos] = newitem
_siftdown(heap, startpos, pos)
def _siftdown_max(heap, startpos, pos):
'Maxheap variant of _siftdown'
newitem = heap[pos]
# Follow the path to the root, moving parents down until finding a place
# newitem fits.
while pos > startpos:
parentpos = (pos - 1) >> 1
parent = heap[parentpos]
if parent < newitem:
heap[pos] = parent
pos = parentpos
continue
break
heap[pos] = newitem
def _siftup_max(heap, pos):
'Maxheap variant of _siftup'
endpos = len(heap)
startpos = pos
newitem = heap[pos]
# Bubble up the larger child until hitting a leaf.
childpos = 2*pos + 1 # leftmost child position
while childpos < endpos:
# Set childpos to index of larger child.
rightpos = childpos + 1
if rightpos < endpos and not heap[rightpos] < heap[childpos]:
childpos = rightpos
# Move the larger child up.
heap[pos] = heap[childpos]
pos = childpos
childpos = 2*pos + 1
# The leaf at pos is empty now. Put newitem there, and bubble it up
# to its final resting place (by sifting its parents down).
heap[pos] = newitem
_siftdown_max(heap, startpos, pos)
def merge(*iterables, key=None, reverse=False):
'''Merge multiple sorted inputs into a single sorted output.
Similar to sorted(itertools.chain(*iterables)) but returns a generator,
does not pull the data into memory all at once, and assumes that each of
the input streams is already sorted (smallest to largest).
>>> list(merge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25]))
[0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25]
If *key* is not None, applies a key function to each element to determine
its sort order.
>>> list(merge(['dog', 'horse'], ['cat', 'fish', 'kangaroo'], key=len))
['dog', 'cat', 'fish', 'horse', 'kangaroo']
'''
h = []
h_append = h.append
if reverse:
_heapify = _heapify_max
_heappop = _heappop_max
_heapreplace = _heapreplace_max
direction = -1
else:
_heapify = heapify
_heappop = heappop
_heapreplace = heapreplace
direction = 1
if key is None:
for order, it in enumerate(map(iter, iterables)):
try:
next = it.__next__
h_append([next(), order * direction, next])
except StopIteration:
pass
_heapify(h)
while len(h) > 1:
try:
while True:
value, order, next = s = h[0]
yield value
s[0] = next() # raises StopIteration when exhausted
_heapreplace(h, s) # restore heap condition
except StopIteration:
_heappop(h) # remove empty iterator
if h:
# fast case when only a single iterator remains
value, order, next = h[0]
yield value
yield from next.__self__
return
for order, it in enumerate(map(iter, iterables)):
try:
next = it.__next__
value = next()
h_append([key(value), order * direction, value, next])
except StopIteration:
pass
_heapify(h)
while len(h) > 1:
try:
while True:
key_value, order, value, next = s = h[0]
yield value
value = next()
s[0] = key(value)
s[2] = value
_heapreplace(h, s)
except StopIteration:
_heappop(h)
if h:
key_value, order, value, next = h[0]
yield value
yield from next.__self__
# Algorithm notes for nlargest() and nsmallest()
# ==============================================
#
# Make a single pass over the data while keeping the k most extreme values
# in a heap. Memory consumption is limited to keeping k values in a list.
#
# Measured performance for random inputs:
#
# number of comparisons
# n inputs k-extreme values (average of 5 trials) % more than min()
# ------------- ---------------- --------------------- -----------------
# 1,000 100 3,317 231.7%
# 10,000 100 14,046 40.5%
# 100,000 100 105,749 5.7%
# 1,000,000 100 1,007,751 0.8%
# 10,000,000 100 10,009,401 0.1%
#
# Theoretical number of comparisons for k smallest of n random inputs:
#
# Step Comparisons Action
# ---- -------------------------- ---------------------------
# 1 1.66 * k heapify the first k-inputs
# 2 n - k compare remaining elements to top of heap
# 3 k * (1 + lg2(k)) * ln(n/k) replace the topmost value on the heap
# 4 k * lg2(k) - (k/2) final sort of the k most extreme values
#
# Combining and simplifying for a rough estimate gives:
#
# comparisons = n + k * (log(k, 2) * log(n/k) + log(k, 2) + log(n/k))
#
# Computing the number of comparisons for step 3:
# -----------------------------------------------
# * For the i-th new value from the iterable, the probability of being in the
# k most extreme values is k/i. For example, the probability of the 101st
# value seen being in the 100 most extreme values is 100/101.
# * If the value is a new extreme value, the cost of inserting it into the
# heap is 1 + log(k, 2).
# * The probability times the cost gives:
# (k/i) * (1 + log(k, 2))
# * Summing across the remaining n-k elements gives:
# sum((k/i) * (1 + log(k, 2)) for i in range(k+1, n+1))
# * This reduces to:
# (H(n) - H(k)) * k * (1 + log(k, 2))
# * Where H(n) is the n-th harmonic number estimated by:
# gamma = 0.5772156649
# H(n) = log(n, e) + gamma + 1 / (2 * n)
# http://en.wikipedia.org/wiki/Harmonic_series_(mathematics)#Rate_of_divergence
# * Substituting the H(n) formula:
# comparisons = k * (1 + log(k, 2)) * (log(n/k, e) + (1/n - 1/k) / 2)
#
# Worst-case for step 3:
# ----------------------
# In the worst case, the input data is reversed sorted so that every new element
# must be inserted in the heap:
#
# comparisons = 1.66 * k + log(k, 2) * (n - k)
#
# Alternative Algorithms
# ----------------------
# Other algorithms were not used because they:
# 1) Took much more auxiliary memory,
# 2) Made multiple passes over the data.
# 3) Made more comparisons in common cases (small k, large n, semi-random input).
# See the more detailed comparison of approach at:
# http://code.activestate.com/recipes/577573-compare-algorithms-for-heapqsmallest
def nsmallest(n, iterable, key=None):
"""Find the n smallest elements in a dataset.
Equivalent to: sorted(iterable, key=key)[:n]
"""
# Short-cut for n==1 is to use min()
if n == 1:
it = iter(iterable)
sentinel = object()
if key is None:
result = min(it, default=sentinel)
else:
result = min(it, default=sentinel, key=key)
return [] if result is sentinel else [result]
# When n>=size, it's faster to use sorted()
try:
size = len(iterable)
except (TypeError, AttributeError):
pass
else:
if n >= size:
return sorted(iterable, key=key)[:n]
# When key is none, use simpler decoration
if key is None:
it = iter(iterable)
# put the range(n) first so that zip() doesn't
# consume one too many elements from the iterator
result = [(elem, i) for i, elem in zip(range(n), it)]
if not result:
return result
_heapify_max(result)
top = result[0][0]
order = n
_heapreplace = _heapreplace_max
for elem in it:
if elem < top:
_heapreplace(result, (elem, order))
top, _order = result[0]
order += 1
result.sort()
return [elem for (elem, order) in result]
# General case, slowest method
it = iter(iterable)
result = [(key(elem), i, elem) for i, elem in zip(range(n), it)]
if not result:
return result
_heapify_max(result)
top = result[0][0]
order = n
_heapreplace = _heapreplace_max
for elem in it:
k = key(elem)
if k < top:
_heapreplace(result, (k, order, elem))
top, _order, _elem = result[0]
order += 1
result.sort()
return [elem for (k, order, elem) in result]
def nlargest(n, iterable, key=None):
"""Find the n largest elements in a dataset.
Equivalent to: sorted(iterable, key=key, reverse=True)[:n]
"""
# Short-cut for n==1 is to use max()
if n == 1:
it = iter(iterable)
sentinel = object()
if key is None:
result = max(it, default=sentinel)
else:
result = max(it, default=sentinel, key=key)
return [] if result is sentinel else [result]
# When n>=size, it's faster to use sorted()
try:
size = len(iterable)
except (TypeError, AttributeError):
pass
else:
if n >= size:
return sorted(iterable, key=key, reverse=True)[:n]
# When key is none, use simpler decoration
if key is None:
it = iter(iterable)
result = [(elem, i) for i, elem in zip(range(0, -n, -1), it)]
if not result:
return result
heapify(result)
top = result[0][0]
order = -n
_heapreplace = heapreplace
for elem in it:
if top < elem:
_heapreplace(result, (elem, order))
top, _order = result[0]
order -= 1
result.sort(reverse=True)
return [elem for (elem, order) in result]
# General case, slowest method
it = iter(iterable)
result = [(key(elem), i, elem) for i, elem in zip(range(0, -n, -1), it)]
if not result:
return result
heapify(result)
top = result[0][0]
order = -n
_heapreplace = heapreplace
for elem in it:
k = key(elem)
if top < k:
_heapreplace(result, (k, order, elem))
top, _order, _elem = result[0]
order -= 1
result.sort(reverse=True)
return [elem for (k, order, elem) in result]
# If available, use C implementation
try:
from _heapq import *
except ImportError:
pass
try:
from _heapq import _heapreplace_max
except ImportError:
pass
try:
from _heapq import _heapify_max
except ImportError:
pass
try:
from _heapq import _heappop_max
except ImportError:
pass
if __name__ == "__main__":
import doctest
print(doctest.testmod())
| apache-2.0 | -5,036,424,080,158,213,000 | 6,290,767,295,970,075,000 | 36.917628 | 81 | 0.617006 | false |
xlqian/navitia | release/script_release.py | 1 | 16196 | # -*- coding: utf-8 -*-
# Copyright (c) 2001-2018, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, division
import os
os.environ['LC_ALL'] = 'en_US'
os.environ['GIT_PYTHON_TRACE'] = '1' # can be 0 (no trace), 1 (git commands) or full (git commands + git output)
from git import *
from datetime import datetime
import subprocess
import re
from sys import exit, argv
from shutil import copyfile
from os import remove, stat
import codecs
import requests
import logging
def get_tag_name(version):
return "v{maj}.{min}.{hf}".format(maj=version[0], min=version[1], hf=version[2])
class ReleaseManager:
def __init__(self, release_type, remote_name="canalTP"):
self.directory = ".."
self.changelog_filename = self.directory + "/debian/changelog"
self.data_version_filename = self.directory + "/source/type/data.cpp"
self.release_type = release_type
self.repo = Repo(self.directory)
self.git = self.repo.git
# we fetch latest version from remote
self.remote_name = remote_name
print("fetching from {}...".format(remote_name))
self.repo.remote(remote_name).fetch("--tags")
# and we update dev and release branches
print("rebasing dev and release...")
# TODO quit on error
self.git.rebase(remote_name + "/dev", "dev")
self.dev_data_version = self.get_data_version()
remote_release = remote_name + "/release"
try:
self.git.checkout("-B release ", remote_release)
except Exception as e:
print("Cannot checkout 'release':{}, creating from distant branch".format(str(e)))
self.git.checkout("-b", "release", remote_release)
print("checking that release was merged into dev...")
unmerged = self.git.branch("--no-merged", "dev", '--no-color')
is_release_unmerged = re.search(" release(\n|$)", unmerged)
if is_release_unmerged:
print(is_release_unmerged.group(0))
print("ABORTING: {rem}/release branch was not merged in {rem}/dev".format(rem=remote_name))
print("This is required before releasing. You may use (be careful):")
print("git checkout dev; git submodule update --recursive")
print("git merge release")
exit(1)
print("current branch: {}".format(self.repo.active_branch))
self.version = None
self.str_version = ""
self.latest_tag = ""
# if API rate limit exceeded use, get 'personal access token' on github then provide:
# self.auth = ('user', 'pass')
self.auth = None
def get_data_version(self):
f_data_version = codecs.open(self.data_version_filename, 'r', 'utf-8')
version = None
for line in f_data_version:
res = re.search('^ *const .*data_version *= *([0-9]+) *;.*$', line)
if res:
version = res.group(1)
break
if version is None:
print("ABORTING: data_version could not be retrieved from {f}".format(f=self.data_version_filename))
exit(1)
print("Current data_version is " + version)
try:
return int(version)
except ValueError:
print("ABORTING: data_version {d} is not an Integer".format(d=version))
exit(1)
def get_new_version_number(self):
latest_version = None
last_tag = self.git.describe('--tags', abbrev=0)
version = re.search('.*(\d+\.\d+\.\d+).*', last_tag)
if version:
latest_version = version.group(1)
if not latest_version:
print("no latest version found")
exit(1)
version_n = latest_version.split('.')
print("latest version is {}".format(version_n))
self.version = [int(i) for i in version_n]
self.latest_tag = get_tag_name(self.version)
print("last tag is " + self.latest_tag)
if self.release_type == "regular":
if self.version[0] > self.dev_data_version:
print(
"ABORTING: data_version {d} is < to latest tag {t}".format(
d=self.dev_data_version, t=self.latest_tag
)
)
exit(1)
elif self.version[0] < self.dev_data_version: # major version
self.version[0] = self.dev_data_version
self.version[1] = self.version[2] = 0
else: # versions equal: minor version
self.version[0] = self.dev_data_version
self.version[1] += 1
self.version[2] = 0
elif self.release_type == "major":
self.version[0] += 1
self.version[1] = self.version[2] = 0
elif self.release_type == "minor":
self.version[1] += 1
self.version[2] = 0
elif self.release_type == "hotfix":
self.version[2] += 1
else:
exit(5)
if self.version[0] > self.dev_data_version:
print(
"ABORTING: data_version {d} is < to tag {t} to be published".format(
d=self.dev_data_version, t=self.latest_tag
)
)
exit(1)
self.str_version = "{maj}.{min}.{hf}".format(
maj=self.version[0], min=self.version[1], hf=self.version[2]
)
print("New version is {}".format(self.str_version))
return self.str_version
def checkout_parent_branch(self):
parent = ""
if self.release_type == "hotfix":
parent = "release"
else:
parent = "dev"
self.git.checkout(parent)
self.git.submodule('update', '--recursive')
print("current branch {}".format(self.repo.active_branch))
def closed_pr_generator(self):
# lazy get all closed PR ordered by last updated
closed_pr = []
page = 1
while True:
query = (
"https://api.github.com/repos/CanalTP/navitia/"
"pulls?state=closed&base=dev&sort=updated&direction=desc&page={page}".format(page=page)
)
print("query github api: " + query)
github_response = requests.get(query, auth=self.auth)
if github_response.status_code != 200:
message = github_response.json()['message']
print(u' * Impossible to retrieve PR\n * ' + message)
return
closed_pr = github_response.json()
if not closed_pr:
print("Reached end of PR list")
return
for pr in closed_pr:
yield pr
page += 1
def get_merged_pullrequest(self):
lines = []
nb_successive_merged_pr = 0
for pr in self.closed_pr_generator():
title = pr['title']
url = pr['html_url']
pr_head_sha = pr['head']['sha']
# test if PR was merged (not simply closed)
# and if distant/release contains HEAD of PR
# (stops after 10 successive merged PR)
if pr['merged_at']:
branches = []
try:
branches = self.git.branch('-r', '--contains', pr_head_sha, '--no-color') + '\n'
except:
print(
"ERROR while searching for commit in release branch: "
"Following PR added to changelog, remove it if needed.\n"
)
# adding separators before and after to match only branch name
release_branch_name = ' ' + self.remote_name + '/release\n'
if release_branch_name in branches:
nb_successive_merged_pr += 1
if nb_successive_merged_pr >= 10:
break
else:
# doing the label search as late as possible to save api calls
has_excluded_label = False
label_query = pr['_links']['issue']['href'] + '/labels'
labels = requests.get(label_query, auth=self.auth).json()
if any(label['name'] in ("hotfix", "not_in_changelog") for label in labels):
has_excluded_label = True
if not has_excluded_label:
lines.append(u' * {title} <{url}>\n'.format(title=title, url=url))
print(lines[-1])
nb_successive_merged_pr = 0
return lines
def create_changelog(self):
write_lines = [u'navitia2 (%s) unstable; urgency=low\n' % self.str_version, u'\n']
if self.release_type != "hotfix":
pullrequests = self.get_merged_pullrequest()
write_lines.extend(pullrequests)
else:
write_lines.append(u' * \n')
author_name = self.git.config('user.name')
author_mail = self.git.config('user.email')
write_lines.extend(
[
u'\n',
u' -- {name} <{mail}> {now} +0100\n'.format(
name=author_name, mail=author_mail, now=datetime.now().strftime("%a, %d %b %Y %H:%m:%S")
),
u'\n',
]
)
return write_lines
def update_changelog(self):
print("updating changelog")
changelog = self.create_changelog()
f_changelog = None
back_filename = self.changelog_filename + "~"
try:
f_changelog = codecs.open(self.changelog_filename, 'r', 'utf-8')
except IOError:
print("Unable to open file: " + self.changelog_filename)
exit(1)
f_changelogback = codecs.open(back_filename, "w", "utf-8")
for line in changelog:
f_changelogback.write(line)
for line in f_changelog:
f_changelogback.write(line)
f_changelog.close()
f_changelogback.close()
last_modified = stat(back_filename)
(stdout, stderr) = subprocess.Popen(
["vim", back_filename, "--nofork"], stderr=subprocess.PIPE
).communicate()
after = stat(back_filename)
if last_modified == after:
print("No changes made, we stop")
remove(back_filename)
exit(2)
copyfile(back_filename, self.changelog_filename)
self.git.add(os.path.abspath(self.changelog_filename))
def get_modified_changelog(self):
# the changelog might have been modified by the user, so we have to read it again
f_changelog = codecs.open(self.changelog_filename, 'r', 'utf-8')
lines = []
nb_version = 0
for line in f_changelog:
# each version are separated by a line like
# navitia2 (0.94.1) unstable; urgency=low
if line.startswith("navitia2 "):
nb_version += 1
continue
if nb_version >= 2:
break # we can stop
if nb_version == 0:
continue
lines.append(line + u'\n')
f_changelog.close()
return lines
def publish_release(self, temp_branch):
self.git.checkout("release")
self.git.submodule('update', '--recursive')
# merge with the release branch
self.git.merge(temp_branch, "release", '--no-ff')
print("current branch {}".format(self.repo.active_branch))
# we tag the release
tag_message = u'Version {}\n'.format(self.str_version)
changelog = self.get_modified_changelog()
for change in changelog:
tag_message += change
print("tag: " + tag_message)
self.repo.create_tag(get_tag_name(self.version), message=tag_message)
# and we merge back the release branch to dev (at least for the tag in release)
self.git.merge("release", "dev", '--no-ff')
print("publishing the release")
print("Check the release, you will probably want to merge release in dev:")
print(" git checkout dev; git submodule update --recursive")
print(" git merge release")
print("And when you're happy do:")
print(" git push {} release dev --tags".format(self.remote_name))
# TODO: when we'll be confident, we will do that automaticaly
def release_the_kraken(self, new_version):
tmp_name = "release_%s" % new_version
self.checkout_parent_branch()
# we then create a new temporary branch
print("creating temporary release branch {}".format(tmp_name))
self.git.checkout(b=tmp_name)
print("current branch {}".format(self.repo.active_branch))
self.update_changelog()
self.git.commit(m="Version %s" % self.str_version)
if self.release_type == "hotfix":
print("now time to do your actual hotfix! (cherry-pick commits)")
print("PLEASE check that \"release\" COMPILES and TESTS!")
print("Note: you'll have to merge/tag/push manually after your fix:")
print(" git checkout release")
print(" git merge --no-ff {tmp_branch}".format(tmp_branch=tmp_name))
print(
" git tag -a {} #then add message on Version and mention concerned PRs".format(
get_tag_name(self.version)
)
)
print(" git checkout dev")
print(" git merge --ff release")
print(" git push {} release dev --tags".format(self.remote_name))
# TODO2 try to script that (put 2 hotfix param, like hotfix init and hotfix publish ?)
exit(0)
self.publish_release(tmp_name)
def get_release_type():
if raw_input("Do you need a binarization ? [Y/n] ").lower() == "y":
return "major"
if raw_input("Have you changed the API or Data interface ? [Y/n] ").lower() == "y":
return "major"
if raw_input("Are the changes backward compatible ? [Y/n] ").lower() == "y":
return "minor"
if raw_input("Are you hotfixing ? [Y/n] ").lower() == "y":
return "hotfix"
raise RuntimeError("Couldn't find out the release type")
if __name__ == '__main__':
if len(argv) < 1:
print("mandatory argument: {regular|major|minor|hotfix}")
print("possible additional argument: remote (default is CanalTP)")
exit(5)
logging.basicConfig(level=logging.INFO)
release_type = get_release_type()
remote = argv[1] if len(argv) >= 2 else "CanalTP"
manager = ReleaseManager(release_type, remote_name=remote)
new_version = manager.get_new_version_number()
print("Release type: {}".format(release_type))
print("Release version: {}".format(new_version))
if raw_input("Shall we proceed ? [Y/n] ").lower() != "y":
exit(6)
manager.release_the_kraken(new_version)
| agpl-3.0 | -1,983,759,797,826,636,000 | 6,716,248,643,792,137,000 | 35.151786 | 113 | 0.566066 | false |
pycrystem/pycrystem | pyxem/signals/tensor_field.py | 1 | 3246 | # -*- coding: utf-8 -*-
# Copyright 2016-2020 The pyXem developers
#
# This file is part of pyXem.
#
# pyXem is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyXem is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyXem. If not, see <http://www.gnu.org/licenses/>.
from hyperspy.signals import Signal2D
import numpy as np
from scipy.linalg import polar
from hyperspy.utils import stack
import math
from pyxem.signals.strain_map import StrainMap
"""
Signal class for Tensor Fields
"""
def _polar_decomposition(image, side):
"""Perform a polar decomposition of a second rank tensor.
Parameters
----------
image : np.array()
Matrix on which to form polar decomposition.
side : str
'left' or 'right' the side on which to perform polar decomposition.
Returns
-------
U, R : np.array()
Stretch and rotation matrices obtained by polar decomposition.
"""
return np.array(polar(image, side=side))
def _get_rotation_angle(matrix):
"""Find the rotation angle associated with a given rotation matrix.
Parameters
----------
matrix : np.array()
A rotation matrix.
Returns
-------
angle : np.array()
Rotation angle associated with matrix.
"""
return np.array(-math.asin(matrix[1, 0]))
class DisplacementGradientMap(Signal2D):
_signal_type = "tensor_field"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Check that the signal dimensions are (3,3) for it to be a valid
# TensorField
def polar_decomposition(self):
"""Perform polar decomposition on the second rank tensors describing
the TensorField. The polar decomposition is right handed and given by
:math:`D = RU`
Returns
-------
R : TensorField
The orthogonal matrix describing the rotation field.
U : TensorField
The strain tensor field.
"""
RU = self.map(_polar_decomposition, side="right", inplace=False)
return RU.isig[:, :, 0], RU.isig[:, :, 1]
def get_strain_maps(self):
"""Obtain strain maps from the displacement gradient tensor at each
navigation position in the small strain approximation.
Returns
-------
strain_results : BaseSignal
Signal of shape < 4 | , > , navigation order is e11,e22,e12,theta
"""
R, U = self.polar_decomposition()
e11 = -U.isig[0, 0].T + 1
e12 = U.isig[0, 1].T
e21 = U.isig[1, 0].T
e22 = -U.isig[1, 1].T + 1
theta = R.map(_get_rotation_angle, inplace=False)
theta.axes_manager.set_signal_dimension(2)
strain_results = stack([e11, e22, e12, theta])
return StrainMap(strain_results)
| gpl-3.0 | 1,714,106,052,389,648,000 | -6,878,778,619,072,376,000 | 27.725664 | 77 | 0.637708 | false |
srm912/servo | tests/wpt/web-platform-tests/html/tools/update_html5lib_tests.py | 125 | 5358 | import sys
import os
import hashlib
import urllib
import itertools
import re
import json
import glob
import shutil
try:
import genshi
from genshi.template import MarkupTemplate
from html5lib.tests import support
except ImportError:
print """This script requires the Genshi templating library and html5lib source
It is recommended that these are installed in a virtualenv:
virtualenv venv
source venv/bin/activate
pip install genshi
cd venv
git clone [email protected]:html5lib/html5lib-python.git html5lib
cd html5lib
git submodule init
git submodule update
pip install -e ./
Then run this script again, with the virtual environment still active.
When you are done, type "deactivate" to deactivate the virtual environment.
"""
TESTS_PATH = "html/syntax/parsing/"
def get_paths():
script_path = os.path.split(os.path.abspath(__file__))[0]
repo_base = get_repo_base(script_path)
tests_path = os.path.join(repo_base, TESTS_PATH)
return script_path, tests_path
def get_repo_base(path):
while path:
if os.path.exists(os.path.join(path, ".git")):
return path
else:
path = os.path.split(path)[0]
def get_expected(data):
data = "#document\n" + data
return data
def get_hash(data, container=None):
if container == None:
container = ""
return hashlib.sha1("#container%s#data%s"%(container.encode("utf8"),
data.encode("utf8"))).hexdigest()
def make_tests(script_dir, out_dir, input_file_name, test_data):
tests = []
innerHTML_tests = []
ids_seen = {}
print input_file_name
for test in test_data:
if "script-off" in test:
continue
is_innerHTML = "document-fragment" in test
data = test["data"]
container = test["document-fragment"] if is_innerHTML else None
assert test["document"], test
expected = get_expected(test["document"])
test_list = innerHTML_tests if is_innerHTML else tests
test_id = get_hash(data, container)
if test_id in ids_seen:
print "WARNING: id %s seen multiple times in file %s this time for test (%s, %s) before for test %s, skipping"%(test_id, input_file_name, container, data, ids_seen[test_id])
continue
ids_seen[test_id] = (container, data)
test_list.append({'string_uri_encoded_input':"\"%s\""%urllib.quote(data.encode("utf8")),
'input':data,
'expected':expected,
'string_escaped_expected':json.dumps(urllib.quote(expected.encode("utf8"))),
'id':test_id,
'container':container
})
path_normal = None
if tests:
path_normal = write_test_file(script_dir, out_dir,
tests, "html5lib_%s"%input_file_name,
"html5lib_test.xml")
path_innerHTML = None
if innerHTML_tests:
path_innerHTML = write_test_file(script_dir, out_dir,
innerHTML_tests, "html5lib_innerHTML_%s"%input_file_name,
"html5lib_test_fragment.xml")
return path_normal, path_innerHTML
def write_test_file(script_dir, out_dir, tests, file_name, template_file_name):
file_name = os.path.join(out_dir, file_name + ".html")
short_name = os.path.split(file_name)[1]
with open(os.path.join(script_dir, template_file_name)) as f:
template = MarkupTemplate(f)
stream = template.generate(file_name=short_name, tests=tests)
with open(file_name, "w") as f:
f.write(stream.render('html', doctype='html5',
encoding="utf8"))
return file_name
def escape_js_string(in_data):
return in_data.encode("utf8").encode("string-escape")
def serialize_filenames(test_filenames):
return "[" + ",\n".join("\"%s\""%item for item in test_filenames) + "]"
def main():
script_dir, out_dir = get_paths()
test_files = []
inner_html_files = []
if len(sys.argv) > 2:
test_iterator = itertools.izip(
itertools.repeat(False),
sorted(os.path.abspath(item) for item in
glob.glob(os.path.join(sys.argv[2], "*.dat"))))
else:
test_iterator = itertools.chain(
itertools.izip(itertools.repeat(False),
sorted(support.get_data_files("tree-construction"))),
itertools.izip(itertools.repeat(True),
sorted(support.get_data_files(
os.path.join("tree-construction", "scripted")))))
for (scripted, test_file) in test_iterator:
input_file_name = os.path.splitext(os.path.split(test_file)[1])[0]
if scripted:
input_file_name = "scripted_" + input_file_name
test_data = support.TestData(test_file)
test_filename, inner_html_file_name = make_tests(script_dir, out_dir,
input_file_name, test_data)
if test_filename is not None:
test_files.append(test_filename)
if inner_html_file_name is not None:
inner_html_files.append(inner_html_file_name)
if __name__ == "__main__":
main()
| mpl-2.0 | 1,100,577,346,861,616,000 | 5,212,437,489,933,208,000 | 34.25 | 185 | 0.592945 | false |
NSAmelchev/ignite | modules/platforms/python/pyignite/cache.py | 11 | 22098 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Iterable, Optional, Union
from .datatypes import prop_codes
from .exceptions import (
CacheCreationError, CacheError, ParameterError, SQLError,
)
from .utils import cache_id, is_wrapped, status_to_exception, unwrap_binary
from .api.cache_config import (
cache_create, cache_create_with_config,
cache_get_or_create, cache_get_or_create_with_config,
cache_destroy, cache_get_configuration,
)
from .api.key_value import (
cache_get, cache_put, cache_get_all, cache_put_all, cache_replace,
cache_clear, cache_clear_key, cache_clear_keys,
cache_contains_key, cache_contains_keys,
cache_get_and_put, cache_get_and_put_if_absent, cache_put_if_absent,
cache_get_and_remove, cache_get_and_replace,
cache_remove_key, cache_remove_keys, cache_remove_all,
cache_remove_if_equals, cache_replace_if_equals, cache_get_size,
)
from .api.sql import scan, scan_cursor_get_page, sql, sql_cursor_get_page
PROP_CODES = set([
getattr(prop_codes, x)
for x in dir(prop_codes)
if x.startswith('PROP_')
])
CACHE_CREATE_FUNCS = {
True: {
True: cache_get_or_create_with_config,
False: cache_create_with_config,
},
False: {
True: cache_get_or_create,
False: cache_create,
},
}
class Cache:
"""
Ignite cache abstraction. Users should never use this class directly,
but construct its instances with
:py:meth:`~pyignite.client.Client.create_cache`,
:py:meth:`~pyignite.client.Client.get_or_create_cache` or
:py:meth:`~pyignite.client.Client.get_cache` methods instead. See
:ref:`this example <create_cache>` on how to do it.
"""
_cache_id = None
_name = None
_client = None
_settings = None
@staticmethod
def _validate_settings(
settings: Union[str, dict]=None, get_only: bool=False,
):
if any([
not settings,
type(settings) not in (str, dict),
type(settings) is dict and prop_codes.PROP_NAME not in settings,
]):
raise ParameterError('You should supply at least cache name')
if all([
type(settings) is dict,
not set(settings).issubset(PROP_CODES),
]):
raise ParameterError('One or more settings was not recognized')
if get_only and type(settings) is dict and len(settings) != 1:
raise ParameterError('Only cache name allowed as a parameter')
def __init__(
self, client: 'Client', settings: Union[str, dict]=None,
with_get: bool=False, get_only: bool=False,
):
"""
Initialize cache object.
:param client: Ignite client,
:param settings: cache settings. Can be a string (cache name) or a dict
of cache properties and their values. In this case PROP_NAME is
mandatory,
:param with_get: (optional) do not raise exception, if the cache
is already exists. Defaults to False,
:param get_only: (optional) do not communicate with Ignite server
at all, only create Cache instance. Defaults to False.
"""
self._client = client
self._validate_settings(settings)
if type(settings) == str:
self._name = settings
else:
self._name = settings[prop_codes.PROP_NAME]
if not get_only:
func = CACHE_CREATE_FUNCS[type(settings) is dict][with_get]
result = func(client, settings)
if result.status != 0:
raise CacheCreationError(result.message)
self._cache_id = cache_id(self._name)
@property
def settings(self) -> Optional[dict]:
"""
Lazy Cache settings. See the :ref:`example <sql_cache_read>`
of reading this property.
All cache properties are documented here: :ref:`cache_props`.
:return: dict of cache properties and their values.
"""
if self._settings is None:
config_result = cache_get_configuration(self._client, self._cache_id)
if config_result.status == 0:
self._settings = config_result.value
else:
raise CacheError(config_result.message)
return self._settings
@property
def name(self) -> str:
"""
Lazy cache name.
:return: cache name string.
"""
if self._name is None:
self._name = self.settings[prop_codes.PROP_NAME]
return self._name
@property
def client(self) -> 'Client':
"""
Ignite :class:`~pyignite.client.Client` object.
:return: Client object, through which the cache is accessed.
"""
return self._client
@property
def cache_id(self) -> int:
"""
Cache ID.
:return: integer value of the cache ID.
"""
return self._cache_id
def _process_binary(self, value: Any) -> Any:
"""
Detects and recursively unwraps Binary Object.
:param value: anything that could be a Binary Object,
:return: the result of the Binary Object unwrapping with all other data
left intact.
"""
if is_wrapped(value):
return unwrap_binary(self._client, value)
return value
@status_to_exception(CacheError)
def destroy(self):
"""
Destroys cache with a given name.
"""
return cache_destroy(self._client, self._cache_id)
@status_to_exception(CacheError)
def get(self, key, key_hint: object=None) -> Any:
"""
Retrieves a value from cache by key.
:param key: key for the cache entry. Can be of any supported type,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:return: value retrieved.
"""
result = cache_get(self._client, self._cache_id, key, key_hint=key_hint)
result.value = self._process_binary(result.value)
return result
@status_to_exception(CacheError)
def put(self, key, value, key_hint: object=None, value_hint: object=None):
"""
Puts a value with a given key to cache (overwriting existing value
if any).
:param key: key for the cache entry. Can be of any supported type,
:param value: value for the key,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param value_hint: (optional) Ignite data type, for which the given
value should be converted.
"""
return cache_put(
self._client, self._cache_id, key, value,
key_hint=key_hint, value_hint=value_hint
)
@status_to_exception(CacheError)
def get_all(self, keys: list) -> list:
"""
Retrieves multiple key-value pairs from cache.
:param keys: list of keys or tuples of (key, key_hint),
:return: a dict of key-value pairs.
"""
result = cache_get_all(self._client, self._cache_id, keys)
if result.value:
for key, value in result.value.items():
result.value[key] = self._process_binary(value)
return result
@status_to_exception(CacheError)
def put_all(self, pairs: dict):
"""
Puts multiple key-value pairs to cache (overwriting existing
associations if any).
:param pairs: dictionary type parameters, contains key-value pairs
to save. Each key or value can be an item of representable
Python type or a tuple of (item, hint),
"""
return cache_put_all(self._client, self._cache_id, pairs)
@status_to_exception(CacheError)
def replace(
self, key, value, key_hint: object=None, value_hint: object=None
):
"""
Puts a value with a given key to cache only if the key already exist.
:param key: key for the cache entry. Can be of any supported type,
:param value: value for the key,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param value_hint: (optional) Ignite data type, for which the given
value should be converted.
"""
result = cache_replace(
self._client, self._cache_id, key, value,
key_hint=key_hint, value_hint=value_hint
)
result.value = self._process_binary(result.value)
return result
@status_to_exception(CacheError)
def clear(self, keys: Optional[list]=None):
"""
Clears the cache without notifying listeners or cache writers.
:param keys: (optional) list of cache keys or (key, key type
hint) tuples to clear (default: clear all).
"""
if keys:
return cache_clear_keys(self._client, self._cache_id, keys)
else:
return cache_clear(self._client, self._cache_id)
@status_to_exception(CacheError)
def clear_key(self, key, key_hint: object=None):
"""
Clears the cache key without notifying listeners or cache writers.
:param key: key for the cache entry,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
"""
return cache_clear_key(
self._client, self._cache_id, key, key_hint=key_hint
)
@status_to_exception(CacheError)
def contains_key(self, key, key_hint=None) -> bool:
"""
Returns a value indicating whether given key is present in cache.
:param key: key for the cache entry. Can be of any supported type,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:return: boolean `True` when key is present, `False` otherwise.
"""
return cache_contains_key(
self._client, self._cache_id, key, key_hint=key_hint
)
@status_to_exception(CacheError)
def contains_keys(self, keys: Iterable) -> bool:
"""
Returns a value indicating whether all given keys are present in cache.
:param keys: a list of keys or (key, type hint) tuples,
:return: boolean `True` when all keys are present, `False` otherwise.
"""
return cache_contains_keys(self._client, self._cache_id, keys)
@status_to_exception(CacheError)
def get_and_put(self, key, value, key_hint=None, value_hint=None) -> Any:
"""
Puts a value with a given key to cache, and returns the previous value
for that key, or null value if there was not such key.
:param key: key for the cache entry. Can be of any supported type,
:param value: value for the key,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param value_hint: (optional) Ignite data type, for which the given
value should be converted.
:return: old value or None.
"""
result = cache_get_and_put(
self._client, self._cache_id, key, value, key_hint, value_hint
)
result.value = self._process_binary(result.value)
return result
@status_to_exception(CacheError)
def get_and_put_if_absent(
self, key, value, key_hint=None, value_hint=None
):
"""
Puts a value with a given key to cache only if the key does not
already exist.
:param key: key for the cache entry. Can be of any supported type,
:param value: value for the key,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param value_hint: (optional) Ignite data type, for which the given
value should be converted,
:return: old value or None.
"""
result = cache_get_and_put_if_absent(
self._client, self._cache_id, key, value, key_hint, value_hint
)
result.value = self._process_binary(result.value)
return result
@status_to_exception(CacheError)
def put_if_absent(self, key, value, key_hint=None, value_hint=None):
"""
Puts a value with a given key to cache only if the key does not
already exist.
:param key: key for the cache entry. Can be of any supported type,
:param value: value for the key,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param value_hint: (optional) Ignite data type, for which the given
value should be converted.
"""
return cache_put_if_absent(
self._client, self._cache_id, key, value, key_hint, value_hint
)
@status_to_exception(CacheError)
def get_and_remove(self, key, key_hint=None) -> Any:
"""
Removes the cache entry with specified key, returning the value.
:param key: key for the cache entry. Can be of any supported type,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:return: old value or None.
"""
result = cache_get_and_remove(
self._client, self._cache_id, key, key_hint
)
result.value = self._process_binary(result.value)
return result
@status_to_exception(CacheError)
def get_and_replace(
self, key, value, key_hint=None, value_hint=None
) -> Any:
"""
Puts a value with a given key to cache, returning previous value
for that key, if and only if there is a value currently mapped
for that key.
:param key: key for the cache entry. Can be of any supported type,
:param value: value for the key,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param value_hint: (optional) Ignite data type, for which the given
value should be converted.
:return: old value or None.
"""
result = cache_get_and_replace(
self._client, self._cache_id, key, value, key_hint, value_hint
)
result.value = self._process_binary(result.value)
return result
@status_to_exception(CacheError)
def remove_key(self, key, key_hint=None):
"""
Clears the cache key without notifying listeners or cache writers.
:param key: key for the cache entry,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
"""
return cache_remove_key(self._client, self._cache_id, key, key_hint)
@status_to_exception(CacheError)
def remove_keys(self, keys: list):
"""
Removes cache entries by given list of keys, notifying listeners
and cache writers.
:param keys: list of keys or tuples of (key, key_hint) to remove.
"""
return cache_remove_keys(self._client, self._cache_id, keys)
@status_to_exception(CacheError)
def remove_all(self):
"""
Removes all cache entries, notifying listeners and cache writers.
"""
return cache_remove_all(self._client, self._cache_id)
@status_to_exception(CacheError)
def remove_if_equals(self, key, sample, key_hint=None, sample_hint=None):
"""
Removes an entry with a given key if provided value is equal to
actual value, notifying listeners and cache writers.
:param key: key for the cache entry,
:param sample: a sample to compare the stored value with,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param sample_hint: (optional) Ignite data type, for whic
the given sample should be converted.
"""
return cache_remove_if_equals(
self._client, self._cache_id, key, sample, key_hint, sample_hint
)
@status_to_exception(CacheError)
def replace_if_equals(
self, key, sample, value,
key_hint=None, sample_hint=None, value_hint=None
) -> Any:
"""
Puts a value with a given key to cache only if the key already exists
and value equals provided sample.
:param key: key for the cache entry,
:param sample: a sample to compare the stored value with,
:param value: new value for the given key,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param sample_hint: (optional) Ignite data type, for whic
the given sample should be converted
:param value_hint: (optional) Ignite data type, for which the given
value should be converted,
:return: boolean `True` when key is present, `False` otherwise.
"""
result = cache_replace_if_equals(
self._client, self._cache_id, key, sample, value,
key_hint, sample_hint, value_hint
)
result.value = self._process_binary(result.value)
return result
@status_to_exception(CacheError)
def get_size(self, peek_modes=0):
"""
Gets the number of entries in cache.
:param peek_modes: (optional) limit count to near cache partition
(PeekModes.NEAR), primary cache (PeekModes.PRIMARY), or backup cache
(PeekModes.BACKUP). Defaults to all cache partitions (PeekModes.ALL),
:return: integer number of cache entries.
"""
return cache_get_size(self._client, self._cache_id, peek_modes)
def scan(self, page_size: int=1, partitions: int=-1, local: bool=False):
"""
Returns all key-value pairs from the cache, similar to `get_all`, but
with internal pagination, which is slower, but safer.
:param page_size: (optional) page size. Default size is 1 (slowest
and safest),
:param partitions: (optional) number of partitions to query
(negative to query entire cache),
:param local: (optional) pass True if this query should be executed
on local node only. Defaults to False,
:return: generator with key-value pairs.
"""
result = scan(self._client, self._cache_id, page_size, partitions, local)
if result.status != 0:
raise CacheError(result.message)
cursor = result.value['cursor']
for k, v in result.value['data'].items():
k = self._process_binary(k)
v = self._process_binary(v)
yield k, v
while result.value['more']:
result = scan_cursor_get_page(self._client, cursor)
if result.status != 0:
raise CacheError(result.message)
for k, v in result.value['data'].items():
k = self._process_binary(k)
v = self._process_binary(v)
yield k, v
def select_row(
self, query_str: str, page_size: int=1,
query_args: Optional[list]=None, distributed_joins: bool=False,
replicated_only: bool=False, local: bool=False, timeout: int=0
):
"""
Executes a simplified SQL SELECT query over data stored in the cache.
The query returns the whole record (key and value).
:param query_str: SQL query string,
:param page_size: (optional) cursor page size. Default is 1, which
means that client makes one server call per row,
:param query_args: (optional) query arguments,
:param distributed_joins: (optional) distributed joins. Defaults
to False,
:param replicated_only: (optional) whether query contains only
replicated tables or not. Defaults to False,
:param local: (optional) pass True if this query should be executed
on local node only. Defaults to False,
:param timeout: (optional) non-negative timeout value in ms. Zero
disables timeout (default),
:return: generator with key-value pairs.
"""
def generate_result(value):
cursor = value['cursor']
more = value['more']
for k, v in value['data'].items():
k = self._process_binary(k)
v = self._process_binary(v)
yield k, v
while more:
inner_result = sql_cursor_get_page(self._client, cursor)
if result.status != 0:
raise SQLError(result.message)
more = inner_result.value['more']
for k, v in inner_result.value['data'].items():
k = self._process_binary(k)
v = self._process_binary(v)
yield k, v
type_name = self.settings[
prop_codes.PROP_QUERY_ENTITIES
][0]['value_type_name']
if not type_name:
raise SQLError('Value type is unknown')
result = sql(
self._client,
self._cache_id,
type_name,
query_str,
page_size,
query_args,
distributed_joins,
replicated_only,
local,
timeout
)
if result.status != 0:
raise SQLError(result.message)
return generate_result(result.value)
| apache-2.0 | 5,364,420,798,698,580,000 | 5,065,223,320,304,120,000 | 36.139496 | 81 | 0.609648 | false |
mabushadi/dpxdt | dpxdt/tools/diff_my_urls.py | 7 | 6027 | #!/usr/bin/env python
# Copyright 2014 Brett Slatkin
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for diffing a set of URL pairs defined in a config file.
Example usage:
./dpxdt/tools/diff_my_urls.py \
--upload_build_id=1234 \
--release_server_prefix=https://my-dpxdt-apiserver.example.com/api \
--release_client_id=<your api key> \
--release_client_secret=<your api secret> \
--upload_release_name="My release name" \
--release_cut_url=http://example.com/path/to/my/release/tool/for/this/cut
--tests_json_path=my_url_tests.json
Example input file "my_url_tests.json". One entry per test:
[
{
"name": "My homepage",
"run_url": "http://localhost:5000/static/dummy/dummy_page1.html",
"run_config": {
"viewportSize": {
"width": 1024,
"height": 768
},
"injectCss": "#foobar { background-color: lime",
"injectJs": "document.getElementById('foobar').innerText = 'bar';",
},
"ref_url": "http://localhost:5000/static/dummy/dummy_page1.html",
"ref_config": {
"viewportSize": {
"width": 1024,
"height": 768
},
"injectCss": "#foobar { background-color: goldenrod; }",
"injectJs": "document.getElementById('foobar').innerText = 'foo';",
}
},
...
]
See README.md for documentation of config parameters.
"""
import datetime
import json
import logging
import sys
# Local Libraries
import gflags
FLAGS = gflags.FLAGS
# Local modules
from dpxdt.client import fetch_worker
from dpxdt.client import release_worker
from dpxdt.client import workers
import flags
class Test(object):
"""Represents the JSON of a single test."""
def __init__(self, name=None, run_url=None, run_config=None,
ref_url=None, ref_config=None):
self.name = name
self.run_url = run_url
self.run_config_data = json.dumps(run_config) if run_config else None
self.ref_url = ref_url
self.ref_config_data = json.dumps(ref_config) if ref_config else None
def load_tests(data):
"""Loads JSON data and returns a list of Test objects it contains."""
test_list = json.loads(data)
results = []
for test_json in test_list:
results.append(Test(**test_json))
return results
class DiffMyUrls(workers.WorkflowItem):
"""Workflow for diffing a set of URL pairs defined in a config file.
Args:
release_url: URL of the newest and best version of the page.
tests: List of Test objects to test.
upload_build_id: Optional. Build ID of the site being compared. When
supplied a new release will be cut for this build comparing it
to the last good release.
upload_release_name: Optional. Release name to use for the build. When
not supplied, a new release based on the current time will be
created.
heartbeat: Function to call with progress status.
"""
def run(self,
release_url,
tests,
upload_build_id,
upload_release_name,
heartbeat=None):
if not upload_release_name:
upload_release_name = str(datetime.datetime.utcnow())
yield heartbeat('Creating release %s' % upload_release_name)
release_number = yield release_worker.CreateReleaseWorkflow(
upload_build_id, upload_release_name, release_url)
pending_uploads = []
for test in tests:
item = release_worker.RequestRunWorkflow(
upload_build_id, upload_release_name, release_number,
test.name, url=test.run_url, config_data=test.run_config_data,
ref_url=test.ref_url, ref_config_data=test.ref_config_data)
pending_uploads.append(item)
yield heartbeat('Requesting %d runs' % len(pending_uploads))
yield pending_uploads
yield heartbeat('Marking runs as complete')
release_url = yield release_worker.RunsDoneWorkflow(
upload_build_id, upload_release_name, release_number)
yield heartbeat('Results viewable at: %s' % release_url)
def real_main(release_url=None,
tests_json_path=None,
upload_build_id=None,
upload_release_name=None):
"""Runs diff_my_urls."""
coordinator = workers.get_coordinator()
fetch_worker.register(coordinator)
coordinator.start()
data = open(FLAGS.tests_json_path).read()
tests = load_tests(data)
item = DiffMyUrls(
release_url,
tests,
upload_build_id,
upload_release_name,
heartbeat=workers.PrintWorkflow)
item.root = True
coordinator.input_queue.put(item)
coordinator.wait_one()
coordinator.stop()
coordinator.join()
def main(argv):
try:
argv = FLAGS(argv)
except gflags.FlagsError, e:
print '%s\nUsage: %s ARGS\n%s' % (e, sys.argv[0], FLAGS)
sys.exit(1)
assert FLAGS.release_cut_url
assert FLAGS.release_server_prefix
assert FLAGS.tests_json_path
assert FLAGS.upload_build_id
if FLAGS.verbose:
logging.getLogger().setLevel(logging.DEBUG)
real_main(
release_url=FLAGS.release_cut_url,
tests_json_path=FLAGS.tests_json_path,
upload_build_id=FLAGS.upload_build_id,
upload_release_name=FLAGS.upload_release_name)
if __name__ == '__main__':
main(sys.argv)
| apache-2.0 | -5,258,471,309,593,403,000 | 1,127,395,197,201,959,000 | 30.554974 | 79 | 0.632653 | false |
Osmose/kitsune | kitsune/sumo/tests/test_utils.py | 11 | 7012 | # -*- coding: utf8 -*-
import json
from django.contrib.auth.models import Permission
from django.test.client import RequestFactory
from mock import patch, Mock
from nose.tools import eq_
from kitsune.journal.models import Record
from kitsune.sumo.utils import (
chunked, get_next_url, is_ratelimited, smart_int, truncated_json_dumps, get_browser)
from kitsune.sumo.tests import TestCase
from kitsune.users.tests import profile
class SmartIntTestCase(TestCase):
def test_sanity(self):
eq_(10, smart_int('10'))
eq_(10, smart_int('10.5'))
def test_int(self):
eq_(10, smart_int(10))
def test_invalid_string(self):
eq_(0, smart_int('invalid'))
def test_empty_string(self):
eq_(0, smart_int(''))
def test_wrong_type(self):
eq_(0, smart_int(None))
eq_(10, smart_int([], 10))
def test_large_values(self):
"""Makes sure ints that would cause an overflow result in fallback."""
eq_(0, smart_int('1' * 1000))
class GetNextUrlTests(TestCase):
def setUp(self):
super(GetNextUrlTests, self).setUp()
self.r = RequestFactory()
self.patcher = patch('django.contrib.sites.models.Site.objects')
mock = self.patcher.start()
mock.get_current.return_value.domain = 'su.mo.com'
def tearDown(self):
self.patcher.stop()
super(GetNextUrlTests, self).tearDown()
def test_query_string(self):
"""Query-strings remain intact."""
r = self.r.get('/', {'next': '/new?f=b'})
eq_('/new?f=b', get_next_url(r))
def test_good_host_https(self):
"""Full URLs work with valid hosts."""
r = self.r.post('/users/login',
{'next': 'https://su.mo.com/kb/new'})
eq_('https://su.mo.com/kb/new', get_next_url(r))
def test_post(self):
"""'next' in POST overrides GET."""
r = self.r.post('/?next=/foo', {'next': '/bar'})
eq_('/bar', get_next_url(r))
def test_get(self):
"""'next' can be a query-string parameter."""
r = self.r.get('/users/login', {'next': '/kb/new'})
eq_('/kb/new', get_next_url(r))
def test_referer(self):
"""Use HTTP referer if nothing else."""
r = self.r.get('/')
r.META['HTTP_REFERER'] = 'http://su.mo.com/new'
eq_('http://su.mo.com/new', get_next_url(r))
def test_bad_host_https(self):
r = self.r.get('/', {'next': 'https://example.com'})
eq_(None, get_next_url(r))
def test_bad_host_protocol_relative(self):
"""Protocol-relative URLs do not let bad hosts through."""
r = self.r.get('/', {'next': '//example.com'})
eq_(None, get_next_url(r))
class JSONTests(TestCase):
def test_truncated_noop(self):
"""Make sure short enough things are unmodified."""
d = {'foo': 'bar'}
trunc = truncated_json_dumps(d, 1000, 'foo')
eq_(json.dumps(d), trunc)
def test_truncated_key(self):
"""Make sure truncation works as expected."""
d = {'foo': 'a long string that should be truncated'}
trunc = truncated_json_dumps(d, 30, 'foo')
obj = json.loads(trunc)
eq_(obj['foo'], 'a long string that ')
eq_(len(trunc), 30)
def test_unicode(self):
"""Unicode should not be treated as longer than it is."""
d = {'formula': u'A=πr²'}
trunc = truncated_json_dumps(d, 25, 'formula')
eq_(json.dumps(d, ensure_ascii=False), trunc)
class ChunkedTests(TestCase):
def test_chunked(self):
# chunking nothing yields nothing.
eq_(list(chunked([], 1)), [])
# chunking list where len(list) < n
eq_(list(chunked([1], 10)), [[1]])
# chunking a list where len(list) == n
eq_(list(chunked([1, 2], 2)), [[1, 2]])
# chunking list where len(list) > n
eq_(list(chunked([1, 2, 3, 4, 5], 2)),
[[1, 2], [3, 4], [5]])
# passing in a length overrides the real len(list)
eq_(list(chunked([1, 2, 3, 4, 5, 6, 7], 2, length=4)),
[[1, 2], [3, 4]])
class IsRatelimitedTest(TestCase):
def test_ratelimited(self):
u = profile().user
request = Mock()
request.user = u
request.limited = False
request.method = 'POST'
# One call to the rate limit won't trigger it.
eq_(is_ratelimited(request, 'test-ratelimited', '1/min'), False)
# But two will
eq_(is_ratelimited(request, 'test-ratelimited', '1/min'), True)
def test_ratelimit_bypass(self):
u = profile().user
bypass = Permission.objects.get(codename='bypass_ratelimit')
u.user_permissions.add(bypass)
request = Mock()
request.user = u
request.limited = False
request.method = 'POST'
# One call to the rate limit won't trigger it.
eq_(is_ratelimited(request, 'test-ratelimited', '1/min'), False)
# And a second one still won't, because the user has the bypass permission.
eq_(is_ratelimited(request, 'test-ratelimited', '1/min'), False)
def test_ratelimit_logging(self):
u = profile().user
request = Mock()
request.user = u
request.limited = False
request.method = 'POST'
eq_(Record.objects.count(), 0)
# Two calls will trigger the ratelimit once.
is_ratelimited(request, 'test-ratelimited', '1/min')
is_ratelimited(request, 'test-ratelimited', '1/min')
eq_(Record.objects.count(), 1)
class GetBrowserNameTest(TestCase):
def test_firefox(self):
"""Test with User Agent of Firefox"""
user_agent = 'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0'
# Check Firefox is returning
eq_(get_browser(user_agent), 'Firefox')
def test_chrome(self):
"""Test with User Agent of Chrome"""
user_agent = ('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/41.0.2228.0 Safari/537.36')
# Check Chrome is returning
eq_(get_browser(user_agent), 'Chrome')
def test_internet_explorer(self):
"""Test with User Agent of Internet Explorer"""
# Check with default User Agent of IE 11
user_agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko'
eq_(get_browser(user_agent), 'Trident')
# Check with Compatibility View situation user Agent of IE11
user_agent = ('Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; '
'Trident/7.0; rv:11.0) like Gecko')
eq_(get_browser(user_agent), 'MSIE')
def test_safari(self):
"""Test with User Agent of Safari"""
user_agent = ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14'
'(KHTML, like Gecko) Version/7.0.3 Safari/7046A194A')
# Check Safari is returning
eq_(get_browser(user_agent), 'Safari')
| bsd-3-clause | 5,859,623,999,982,607,000 | -6,832,485,545,185,157,000 | 32.54067 | 95 | 0.579743 | false |
rpm-software-management/librepo | examples/python/download_packages_with_fastestmirror.py | 1 | 1645 | #!/usr/bin/env python3
"""
librepo - download packages
"""
import os
import os.path
import time
import librepo
CACHE = "fastestmirror.cache"
LIBREPOPKG = "librepo-1.2.1-2.fc20.x86_64.rpm"
LAMEPKG = "lame-3.99.5-2.fc19.x86_64.rpm"
if __name__ == "__main__":
# Setup logging
def debug_function(msg, _):
print(msg)
#librepo.set_debug_log_handler(debug_function)
# Remove packages if already exists
def remove_pkg(filename):
if os.path.exists(filename):
os.remove(filename)
remove_pkg(LIBREPOPKG)
remove_pkg(LAMEPKG)
# Prepare list of targets
packages = []
# Prepare first target
h1 = librepo.Handle()
h1.metalinkurl = "https://mirrors.fedoraproject.org/metalink?repo=fedora-20&arch=x86_64"
h1.repotype = librepo.YUMREPO
h1.fastestmirror = True
h1.fastestmirrorcache = CACHE
target = librepo.PackageTarget("Packages/l/"+LIBREPOPKG, handle=h1)
packages.append(target)
# Prepare second target
h2 = librepo.Handle()
h2.mirrorlisturl = "http://mirrors.rpmfusion.org/mirrorlist?repo=free-fedora-19&arch=x86_64"
h2.repotype = librepo.YUMREPO
h2.fastestmirror = True
h2.fastestmirrorcache = CACHE
target = librepo.PackageTarget(LAMEPKG, handle=h2)
packages.append(target)
t = time.time()
librepo.download_packages(packages)
print("Download duration: {0}s\n".format((time.time() - t)))
for target in packages:
print("### %s: %s" % (target.local_path, target.err or "OK"))
print("Local path: ", target.local_path)
print("Error: ", target.err)
print()
| lgpl-2.1 | -7,295,027,021,262,362,000 | 8,331,179,148,953,373,000 | 26.416667 | 96 | 0.649848 | false |
mizuy/mizwiki | mizwiki/utils/conv_pukiwiki.py | 1 | 4421 | # -*- coding: utf-8 -*-
import codecs, os, cStringIO as StringIO, re, sys
class IStreamBuffer:
@staticmethod
def _conv(v):
return v.rstrip(u'\n\r')
def __init__(self,inputfile):
self.input = codecs.getreader('utf-8')(inputfile)
self.stack = []
def __iter__(self):
return self
def next(self):
if len(self.stack)>0:
return self._conv(self.stack.pop())
return self._conv(self.input.next())
def push(self,line):
self.stack.append(self._conv(line))
def eof(self):
if len(self.stack)==0:
try:
self.push(self.input.next())
except StopIteration:
return True
return False
def top(self):
assert not self.eof()
if len(self.stack)==0:
self.push(self.input.next())
return self.stack[-1]
def conv(inputs,os):
os = codecs.getwriter('utf-8')(os)
istr = IStreamBuffer(inputs)
for l in istr:
l = l.rstrip('~')
assert type(l)==unicode
if l.startswith('{{{'):
os.write(l+'\n')
for ll in istr:
os.write(ll+'\n')
if ll.startswith('}}}'):
break
continue
if l.startswith(' '):
istr.push(l)
parse_quote(istr,os)
continue
if l.strip().startswith('----') and l.replace('-',' ').strip()=='':
os.write('====\n')
continue
parse_inline(os,l)
os.write('\n')
def parse_quote(istr,os):
os.write('{{{\n')
for l in istr:
if l.startswith(' '):
os.write(l[1:]+'\n')
else:
break
os.write('}}}\n')
wikilabel = re.compile(ur'\[\[([^\]]+)>([\w_/\.\-]+)\]\]',re.U)
namelabel = re.compile(ur'\[\[([^\]]+)>#([_a-zA-Z0-9]+)\]\]',re.U)
areaedit = re.compile(ur'&areaedit\([^\)]*\){([^}]*)};', re.U)
new = re.compile(ur'&new{([^}]*)};', re.U)
pre = re.compile(ur"\[|&",re.U)
def parse_inline(doc, src):
assert type(src)==unicode
pos = 0
while pos<len(src):
m = pre.search(src,pos)
if not m:
doc.write(src[pos:])
return
doc.write(src[pos:m.start()])
pos = m.start()
if src[pos]=='[':
m = wikilabel.match(src,pos)
if m:
pos += len(m.group(0))
name = m.group(1)
url = m.group(2)
doc.write('[[%s:%s]]'%(name,url))
continue
m = namelabel.match(src,pos)
if m:
pos += len(m.group(0))
name = m.group(1)
url = m.group(2)
doc.write('[[%s:#%s]]'%(name,url))
continue
if src[pos]=='&':
m = areaedit.match(src,pos)
if m:
pos += len(m.group(0))
doc.write(m.group(1))
continue
m = new.match(src,pos)
if m:
pos += len(m.group(0))
doc.write(m.group(1))
continue
doc.write(src[pos])
pos += 1
class iterdir(object):
def __init__(self, path, deep=False):
self._root = path
self._files = None
self.deep = deep
def __iter__(self):
return self
def next(self):
if self._files:
join = os.path.join
d = self._files.pop()
r = join(self._root, d)
if self.deep and os.path.isdir(r):
self._files += [join(d,n) for n in os.listdir(r)]
elif self._files is None:
self._files = os.listdir(self._root)
if self._files:
return self._files[-1]
else:
raise StopIteration
if __name__=='__main__':
sin = codecs.getreader('utf-8')(sys.stdin)
sout = codecs.getwriter('utf-8')(sys.stdout)
it = iterdir('.',4)
for x in it:
p = os.path.basename(x)
if p == 'body.txt':
print x
f = open(x,'r')
try:
out = StringIO.StringIO()
conv(f,out)
out.seek(0)
f.close()
f = open(x, 'w')
f.write(out.read())
finally:
f.close()
| mit | 8,080,553,884,427,415,000 | 3,047,934,326,742,056,400 | 25.793939 | 75 | 0.434291 | false |
fabian4/trove | trove/guestagent/strategies/backup/experimental/mongo_impl.py | 1 | 4132 | # Copyright (c) 2014 eBay Software Foundation
# Copyright 2015 Hewlett-Packard Development Company, L.P. and Tesora, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo_log import log as logging
from trove.common import cfg
from trove.common import exception
from trove.common.i18n import _
from trove.common import utils
from trove.guestagent.common import operating_system
from trove.guestagent.datastore.experimental.mongodb import (
service as mongo_service)
from trove.guestagent.datastore.experimental.mongodb import (
system as mongo_system)
from trove.guestagent.strategies.backup import base
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
MONGODB_DBPATH = CONF.mongodb.mount_point
MONGO_DUMP_DIR = MONGODB_DBPATH + "/dump"
LARGE_TIMEOUT = 1200
class MongoDump(base.BackupRunner):
"""Implementation of Backup Strategy for MongoDump."""
__strategy_name__ = 'mongodump'
backup_cmd = 'mongodump --out ' + MONGO_DUMP_DIR
def __init__(self, *args, **kwargs):
self.app = mongo_service.MongoDBApp()
super(MongoDump, self).__init__(*args, **kwargs)
def _run_pre_backup(self):
"""Create archival contents in dump dir"""
try:
est_dump_size = self.estimate_dump_size()
avail = operating_system.get_bytes_free_on_fs(MONGODB_DBPATH)
if est_dump_size > avail:
self.cleanup()
# TODO(atomic77) Though we can fully recover from this error
# BackupRunner will leave the trove instance in a BACKUP state
raise OSError(_("Need more free space to run mongodump, "
"estimated %(est_dump_size)s"
" and found %(avail)s bytes free ") %
{'est_dump_size': est_dump_size,
'avail': avail})
operating_system.create_directory(MONGO_DUMP_DIR, as_root=True)
operating_system.chown(MONGO_DUMP_DIR, mongo_system.MONGO_USER,
"nogroup", as_root=True)
# high timeout here since mongodump can take a long time
utils.execute_with_timeout(
'mongodump', '--out', MONGO_DUMP_DIR,
*(self.app.admin_cmd_auth_params()),
run_as_root=True, root_helper='sudo',
timeout=LARGE_TIMEOUT
)
except exception.ProcessExecutionError as e:
LOG.debug("Caught exception when creating the dump")
self.cleanup()
raise e
@property
def cmd(self):
"""Tars and streams the dump dir contents to
the stdout
"""
cmd = 'sudo tar cPf - ' + MONGO_DUMP_DIR
return cmd + self.zip_cmd + self.encrypt_cmd
def cleanup(self):
operating_system.remove(MONGO_DUMP_DIR, force=True, as_root=True)
def _run_post_backup(self):
self.cleanup()
def estimate_dump_size(self):
"""
Estimate the space that the mongodump will take based on the output of
db.stats().dataSize. This seems to be conservative, as the actual bson
output in many cases is a fair bit smaller.
"""
dbs = self.app.list_all_dbs()
# mongodump does not dump the content of the local database
dbs.remove('local')
dbstats = dict([(d, 0) for d in dbs])
for d in dbstats:
dbstats[d] = self.app.db_data_size(d)
LOG.debug("Estimated size for databases: " + str(dbstats))
return sum(dbstats.values())
| apache-2.0 | 1,198,138,286,589,472,300 | -640,567,534,280,371,200 | 37.259259 | 78 | 0.623427 | false |
eciis/web | backend/handlers/resend_invite_handler.py | 1 | 1157 | # -*- coding: utf-8 -*-
"""Resend Invite Handler."""
import json
from util import login_required
from utils import json_response
from utils import Utils
from custom_exceptions import NotAuthorizedException
from . import BaseHandler
from google.appengine.ext import ndb
__all__ = ['ResendInviteHandler']
class ResendInviteHandler(BaseHandler):
"""Resend Invite Handler."""
@json_response
@login_required
def post(self, user, invite_key):
"""Handle POST Requests."""
body = json.loads(self.request.body)
host = self.request.host
invite = ndb.Key(urlsafe=invite_key).get()
Utils._assert(invite.status != 'sent',
"The invite has already been used", NotAuthorizedException)
user.check_permission("invite_members",
"User is not allowed to send invites",
invite.institution_key.urlsafe())
institution = invite.institution_key.get()
Utils._assert(not institution.is_active(),
"This institution is not active", NotAuthorizedException)
invite.send_invite(host, user.current_institution)
| gpl-3.0 | -8,114,919,943,871,442,000 | 949,258,657,085,027,200 | 31.138889 | 81 | 0.65255 | false |
marcusmartins/compose | compose/cli/verbose_proxy.py | 67 | 1691 |
import functools
from itertools import chain
import logging
import pprint
import six
def format_call(args, kwargs):
args = (repr(a) for a in args)
kwargs = ("{0!s}={1!r}".format(*item) for item in six.iteritems(kwargs))
return "({0})".format(", ".join(chain(args, kwargs)))
def format_return(result, max_lines):
if isinstance(result, (list, tuple, set)):
return "({0} with {1} items)".format(type(result).__name__, len(result))
if result:
lines = pprint.pformat(result).split('\n')
extra = '\n...' if len(lines) > max_lines else ''
return '\n'.join(lines[:max_lines]) + extra
return result
class VerboseProxy(object):
"""Proxy all function calls to another class and log method name, arguments
and return values for each call.
"""
def __init__(self, obj_name, obj, log_name=None, max_lines=10):
self.obj_name = obj_name
self.obj = obj
self.max_lines = max_lines
self.log = logging.getLogger(log_name or __name__)
def __getattr__(self, name):
attr = getattr(self.obj, name)
if not six.callable(attr):
return attr
return functools.partial(self.proxy_callable, name)
def proxy_callable(self, call_name, *args, **kwargs):
self.log.info("%s %s <- %s",
self.obj_name,
call_name,
format_call(args, kwargs))
result = getattr(self.obj, call_name)(*args, **kwargs)
self.log.info("%s %s -> %s",
self.obj_name,
call_name,
format_return(result, self.max_lines))
return result
| apache-2.0 | -3,666,302,790,307,750,000 | -3,941,793,555,006,772,000 | 28.155172 | 80 | 0.566529 | false |
ritchyteam/odoo | addons/purchase/wizard/purchase_line_invoice.py | 205 | 5419 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
class purchase_line_invoice(osv.osv_memory):
""" To create invoice for purchase order line"""
_name = 'purchase.order.line_invoice'
_description = 'Purchase Order Line Make Invoice'
def makeInvoices(self, cr, uid, ids, context=None):
"""
To get Purchase Order line and create Invoice
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return : retrun view of Invoice
"""
if context is None:
context={}
record_ids = context.get('active_ids',[])
if record_ids:
res = False
invoices = {}
invoice_obj = self.pool.get('account.invoice')
purchase_obj = self.pool.get('purchase.order')
purchase_line_obj = self.pool.get('purchase.order.line')
invoice_line_obj = self.pool.get('account.invoice.line')
account_jrnl_obj = self.pool.get('account.journal')
def multiple_order_invoice_notes(orders):
notes = ""
for order in orders:
notes += "%s \n" % order.notes
return notes
def make_invoice_by_partner(partner, orders, lines_ids):
"""
create a new invoice for one supplier
@param partner : The object partner
@param orders : The set of orders to add in the invoice
@param lines : The list of line's id
"""
name = orders and orders[0].name or ''
journal_id = account_jrnl_obj.search(cr, uid, [('type', '=', 'purchase')], context=None)
journal_id = journal_id and journal_id[0] or False
a = partner.property_account_payable.id
inv = {
'name': name,
'origin': name,
'type': 'in_invoice',
'journal_id':journal_id,
'reference' : partner.ref,
'account_id': a,
'partner_id': partner.id,
'invoice_line': [(6,0,lines_ids)],
'currency_id' : orders[0].currency_id.id,
'comment': multiple_order_invoice_notes(orders),
'payment_term': orders[0].payment_term_id.id,
'fiscal_position': partner.property_account_position.id
}
inv_id = invoice_obj.create(cr, uid, inv)
for order in orders:
order.write({'invoice_ids': [(4, inv_id)]})
return inv_id
for line in purchase_line_obj.browse(cr, uid, record_ids, context=context):
if (not line.invoiced) and (line.state not in ('draft', 'cancel')):
if not line.partner_id.id in invoices:
invoices[line.partner_id.id] = []
acc_id = purchase_obj._choose_account_from_po_line(cr, uid, line, context=context)
inv_line_data = purchase_obj._prepare_inv_line(cr, uid, acc_id, line, context=context)
inv_line_data.update({'origin': line.order_id.name})
inv_id = invoice_line_obj.create(cr, uid, inv_line_data, context=context)
purchase_line_obj.write(cr, uid, [line.id], {'invoiced': True, 'invoice_lines': [(4, inv_id)]})
invoices[line.partner_id.id].append((line,inv_id))
res = []
for result in invoices.values():
il = map(lambda x: x[1], result)
orders = list(set(map(lambda x : x[0].order_id, result)))
res.append(make_invoice_by_partner(orders[0].partner_id, orders, il))
return {
'domain': "[('id','in', ["+','.join(map(str,res))+"])]",
'name': _('Supplier Invoices'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'account.invoice',
'view_id': False,
'context': "{'type':'in_invoice', 'journal_type': 'purchase'}",
'type': 'ir.actions.act_window'
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -4,905,455,877,429,030,000 | 7,826,620,878,052,239,000 | 42.007937 | 115 | 0.527219 | false |
runefriborg/pycsp | test/unix/multiprocesstest.py | 1 | 7496 | """
Copyright (c) 2009 John Markus Bjoerndalen <[email protected]>,
Brian Vinter <[email protected]>, Rune M. Friborg <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software. THE
SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import sys
sys.path.insert(0, "../..")
from pycsp.parallel import *
import check
import time
import random
@choice
def action(assertCheck, id, channel_input=None):
if assertCheck:
assertCheck(id)
@multiprocess
def reader(cin, id, sleeper, assertCheck=None):
while True:
if sleeper: sleeper()
got = cin()
if assertCheck:
assertCheck(id)
@multiprocess
def writer(cout, id, cnt, sleeper):
for i in range(cnt):
if sleeper: sleeper()
cout((id, i))
retire(cout)
@multiprocess
def par_reader(cin1,cin2,cin3,cin4, cnt, sleeper, assertCheck=None):
while True:
if sleeper: sleeper()
AltSelect(
InputGuard(cin1, action(assertCheck, 0)),
InputGuard(cin2, action(assertCheck, 1)),
InputGuard(cin3, action(assertCheck, 2)),
InputGuard(cin4, action(assertCheck, 3))
)
@multiprocess
def par_fair_reader(cin1,cin2,cin3,cin4, cnt, sleeper, assertCheck=None):
while True:
if sleeper: sleeper()
FairSelect(
InputGuard(cin1, action(assertCheck, 0)),
InputGuard(cin2, action(assertCheck, 1)),
InputGuard(cin3, action(assertCheck, 2)),
InputGuard(cin4, action(assertCheck, 3))
)
@multiprocess
def par_pri_reader(cin1,cin2,cin3,cin4, cnt, sleeper, assertCheck=None):
while True:
if sleeper: sleeper()
PriSelect(
InputGuard(cin1, action(assertCheck, 0)),
InputGuard(cin2, action(assertCheck, 1)),
InputGuard(cin3, action(assertCheck, 2)),
InputGuard(cin4, action(assertCheck, 3))
)
@multiprocess
def return_msg(cin, sleeper):
if sleeper: sleeper()
return cin()
@io
def sleep_one():
time.sleep(0.01)
@io
def sleep_random():
time.sleep(random.random()/100)
def Parallel_Test(sleeper):
c1=Channel()
L= Parallel(writer(c1.writer(), 0, 10, sleeper), 10 * return_msg(c1.reader(), sleeper))
if L and len(L) == 11 and L[0] == None and not None in L[1:]:
print(("OK - MultiProcess_Parallel_Test"+str(sleeper)))
else:
print(("Error - MultiProcess_Parallel_Test"+str(sleeper)))
print((str(L)))
def Sequence_Test(sleeper):
c1=Channel()
Spawn(writer(c1.writer(), 0, 10, sleeper))
L= Sequence(10 * return_msg(c1.reader(), sleeper))
if L and len(L) == 10 and not None in L:
print(("OK - MultiProcess_Sequence_Test"+str(sleeper)))
else:
print(("Error - MultiProcess_Sequence_Test"+str(sleeper)))
print((str(L)))
def One2One_Test(read_sleeper, write_sleeper):
x = Channel()
Spawn(check.Assert(x.reader(), "MultiProcess_One2One_Test"+str(read_sleeper)+str(write_sleeper), count=10, vocabulary=[0]))
c1=Channel()
Parallel(reader(c1.reader(), 0 , read_sleeper, x.writer()), writer(c1.writer(),1,10, write_sleeper))
def Any2One_Alting_Test(read_sleeper, write_sleeper):
x = Channel()
Spawn(check.Assert(x.reader(), "MultiProcess_Any2One_Alting_Test"+str(read_sleeper)+str(write_sleeper), count=40, minimum=10, vocabulary=[0,1,2,3], quit_on_count=True))
c1=Channel()
c2=Channel()
c3=Channel()
c4=Channel()
cnt = 10
Parallel(par_reader(c1.reader(), c2.reader(), c3.reader(), c4.reader(),cnt, read_sleeper, x.writer()),
writer(c1.writer(),0,cnt, write_sleeper),
writer(c2.writer(),1,cnt, write_sleeper),
writer(c3.writer(),2,cnt, write_sleeper),
writer(c4.writer(),3,cnt, write_sleeper))
def Any2One_FairAlting_Test(read_sleeper, write_sleeper):
x = Channel()
Spawn(check.Assert(x.reader(), "MultiProcess_Any2One_FairAlting_Test"+str(read_sleeper)+str(write_sleeper), count=40, minimum=10, vocabulary=[0,1,2,3], quit_on_count=True))
c1=Channel()
c2=Channel()
c3=Channel()
c4=Channel()
cnt = 10
Parallel(par_fair_reader(c1.reader(), c2.reader(), c3.reader(), c4.reader(),cnt, read_sleeper, x.writer()),
writer(c1.writer(),0,cnt, write_sleeper),
writer(c2.writer(),1,cnt, write_sleeper),
writer(c3.writer(),2,cnt, write_sleeper),
writer(c4.writer(),3,cnt, write_sleeper))
def Any2One_PriAlting_Test(read_sleeper, write_sleeper):
x = Channel()
Spawn(check.Assert(x.reader(), "MultiProcess_Any2One_PriAlting_Test"+str(read_sleeper)+str(write_sleeper), count=40, minimum=10, vocabulary=[0,1,2,3], quit_on_count=True))
c1=Channel()
c2=Channel()
c3=Channel()
c4=Channel()
cnt = 10
Parallel(par_pri_reader(c1.reader(), c2.reader(), c3.reader(), c4.reader(),cnt, read_sleeper, x.writer()),
writer(c1.writer(),0,cnt, write_sleeper),
writer(c2.writer(),1,cnt, write_sleeper),
writer(c3.writer(),2,cnt, write_sleeper),
writer(c4.writer(),3,cnt, write_sleeper))
def Any2Any_Test(read_sleeper, write_sleeper):
x = Channel()
Spawn(check.Assert(x.reader(), "MultiProcess_Any2Any_Test"+str(read_sleeper)+str(write_sleeper), count=40, vocabulary=[0,1,2,3]))
c1=Channel()
cnt = 10
Parallel(reader(c1.reader(),0, read_sleeper, x.writer()), writer(c1.writer(),0,cnt, write_sleeper),
reader(c1.reader(),1, read_sleeper, x.writer()), writer(c1.writer(),1,cnt, write_sleeper),
reader(c1.reader(),2, read_sleeper, x.writer()), writer(c1.writer(),2,cnt, write_sleeper),
reader(c1.reader(),3, read_sleeper, x.writer()), writer(c1.writer(),3,cnt, write_sleeper))
def autotest():
for read_sleep in [('Zero', None), ('One',sleep_one), ('Random',sleep_random)]:
Sequence_Test(read_sleep[1])
Parallel_Test(read_sleep[1])
for write_sleep in [('Zero', None), ('One',sleep_one), ('Random',sleep_random)]:
rname, rsleep = read_sleep
wname, wsleep = write_sleep
if not rsleep==wsleep==sleep_one:
One2One_Test(rsleep, wsleep)
Any2One_Alting_Test(rsleep, wsleep)
Any2One_FairAlting_Test(rsleep, wsleep)
Any2One_PriAlting_Test(rsleep, wsleep)
Any2Any_Test(rsleep, wsleep)
if __name__ == '__main__':
autotest()
shutdown()
| mit | 7,900,706,319,140,222,000 | 4,070,024,634,007,122,000 | 33.703704 | 176 | 0.636873 | false |
alvarolopez/nova | nova/cmd/network.py | 27 | 2415 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Starter script for Nova Network."""
import sys
import traceback
from oslo_config import cfg
from oslo_log import log as logging
from nova.conductor import rpcapi as conductor_rpcapi
from nova import config
import nova.db.api
from nova import exception
from nova.i18n import _LE
from nova import objects
from nova.objects import base as objects_base
from nova.openstack.common.report import guru_meditation_report as gmr
from nova import service
from nova import utils
from nova import version
CONF = cfg.CONF
CONF.import_opt('network_topic', 'nova.network.rpcapi')
CONF.import_opt('use_local', 'nova.conductor.api', group='conductor')
def block_db_access():
class NoDB(object):
def __getattr__(self, attr):
return self
def __call__(self, *args, **kwargs):
stacktrace = "".join(traceback.format_stack())
LOG = logging.getLogger('nova.network')
LOG.error(_LE('No db access allowed in nova-network: %s'),
stacktrace)
raise exception.DBNotAllowed('nova-network')
nova.db.api.IMPL = NoDB()
def main():
config.parse_args(sys.argv)
logging.setup(CONF, "nova")
utils.monkey_patch()
objects.register_all()
gmr.TextGuruMeditation.setup_autorun(version)
if not CONF.conductor.use_local:
block_db_access()
objects_base.NovaObject.indirection_api = \
conductor_rpcapi.ConductorAPI()
server = service.Service.create(binary='nova-network',
topic=CONF.network_topic,
db_allowed=CONF.conductor.use_local)
service.serve(server)
service.wait()
| apache-2.0 | -7,859,091,543,804,930,000 | -3,802,106,702,709,693,400 | 31.635135 | 78 | 0.684058 | false |
quinot/ansible-modules-core | cloud/amazon/iam_cert.py | 20 | 11703 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: iam_cert
short_description: Manage server certificates for use on ELBs and CloudFront
description:
- Allows for the management of server certificates
version_added: "2.0"
options:
name:
description:
- Name of certificate to add, update or remove.
required: true
aliases: []
new_name:
description:
- When present, this will update the name of the cert with the value passed here.
required: false
aliases: []
new_path:
description:
- When present, this will update the path of the cert with the value passed here.
required: false
aliases: []
state:
description:
- Whether to create, delete certificate. When present is specified it will attempt to make an update if new_path or new_name is specified.
required: true
default: null
choices: [ "present", "absent" ]
aliases: []
path:
description:
- When creating or updating, specify the desired path of the certificate
required: false
default: "/"
aliases: []
cert_chain:
description:
- The path to the CA certificate chain in PEM encoded format.
required: false
default: null
aliases: []
cert:
description:
- The path to the certificate body in PEM encoded format.
required: false
aliases: []
key:
description:
- The path to the private key of the certificate in PEM encoded format.
dup_ok:
description:
- By default the module will not upload a certifcate that is already uploaded into AWS. If set to True, it will upload the certifcate as long as the name is unique.
required: false
default: False
aliases: []
aws_secret_key:
description:
- AWS secret key. If not set then the value of the AWS_SECRET_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_secret_key', 'secret_key' ]
aws_access_key:
description:
- AWS access key. If not set then the value of the AWS_ACCESS_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_access_key', 'access_key' ]
requirements: [ "boto" ]
author: Jonathan I. Davila
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Basic server certificate upload
tasks:
- name: Upload Certifcate
iam_cert:
name: very_ssl
state: present
cert: somecert.pem
key: privcertkey
cert_chain: myverytrustedchain
'''
import json
import sys
try:
import boto
import boto.iam
import boto.ec2
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def boto_exception(err):
'''generic error message handler'''
if hasattr(err, 'error_message'):
error = err.error_message
elif hasattr(err, 'message'):
error = err.message
else:
error = '%s: %s' % (Exception, err)
return error
def cert_meta(iam, name):
opath = iam.get_server_certificate(name).get_server_certificate_result.\
server_certificate.\
server_certificate_metadata.\
path
ocert = iam.get_server_certificate(name).get_server_certificate_result.\
server_certificate.\
certificate_body
ocert_id = iam.get_server_certificate(name).get_server_certificate_result.\
server_certificate.\
server_certificate_metadata.\
server_certificate_id
upload_date = iam.get_server_certificate(name).get_server_certificate_result.\
server_certificate.\
server_certificate_metadata.\
upload_date
exp = iam.get_server_certificate(name).get_server_certificate_result.\
server_certificate.\
server_certificate_metadata.\
expiration
return opath, ocert, ocert_id, upload_date, exp
def dup_check(module, iam, name, new_name, cert, orig_cert_names, orig_cert_bodies, dup_ok):
update=False
if any(ct in orig_cert_names for ct in [name, new_name]):
for i_name in [name, new_name]:
if i_name is None:
continue
if cert is not None:
try:
c_index=orig_cert_names.index(i_name)
except NameError:
continue
else:
if orig_cert_bodies[c_index] == cert:
update=True
break
elif orig_cert_bodies[c_index] != cert:
module.fail_json(changed=False, msg='A cert with the name %s already exists and'
' has a different certificate body associated'
' with it. Certifcates cannot have the same name')
else:
update=True
break
elif cert in orig_cert_bodies and not dup_ok:
for crt_name, crt_body in zip(orig_cert_names, orig_cert_bodies):
if crt_body == cert:
module.fail_json(changed=False, msg='This certificate already'
' exists under the name %s' % crt_name)
return update
def cert_action(module, iam, name, cpath, new_name, new_path, state,
cert, key, chain, orig_cert_names, orig_cert_bodies, dup_ok):
if state == 'present':
update = dup_check(module, iam, name, new_name, cert, orig_cert_names,
orig_cert_bodies, dup_ok)
if update:
opath, ocert, ocert_id, upload_date, exp = cert_meta(iam, name)
changed=True
if new_name and new_path:
iam.update_server_cert(name, new_cert_name=new_name, new_path=new_path)
module.exit_json(changed=changed, original_name=name, new_name=new_name,
original_path=opath, new_path=new_path, cert_body=ocert,
upload_date=upload_date, expiration_date=exp)
elif new_name and not new_path:
iam.update_server_cert(name, new_cert_name=new_name)
module.exit_json(changed=changed, original_name=name, new_name=new_name,
cert_path=opath, cert_body=ocert,
upload_date=upload_date, expiration_date=exp)
elif not new_name and new_path:
iam.update_server_cert(name, new_path=new_path)
module.exit_json(changed=changed, name=new_name,
original_path=opath, new_path=new_path, cert_body=ocert,
upload_date=upload_date, expiration_date=exp)
else:
changed=False
module.exit_json(changed=changed, name=name, cert_path=opath, cert_body=ocert,
upload_date=upload_date, expiration_date=exp,
msg='No new path or name specified. No changes made')
else:
changed=True
iam.upload_server_cert(name, cert, key, cert_chain=chain, path=cpath)
opath, ocert, ocert_id, upload_date, exp = cert_meta(iam, name)
module.exit_json(changed=changed, name=name, cert_path=opath, cert_body=ocert,
upload_date=upload_date, expiration_date=exp)
elif state == 'absent':
if name in orig_cert_names:
changed=True
iam.delete_server_cert(name)
module.exit_json(changed=changed, deleted_cert=name)
else:
changed=False
module.exit_json(changed=changed, msg='Certifcate with the name %s already absent' % name)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(
default=None, required=True, choices=['present', 'absent']),
name=dict(default=None, required=False),
cert=dict(default=None, required=False),
key=dict(default=None, required=False),
cert_chain=dict(default=None, required=False),
new_name=dict(default=None, required=False),
path=dict(default='/', required=False),
new_path=dict(default=None, required=False),
dup_ok=dict(default=False, required=False, choices=[False, True])
)
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=[],
)
if not HAS_BOTO:
module.fail_json(msg="Boto is required for this module")
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
try:
if region:
iam = boto.iam.connect_to_region(region, **aws_connect_kwargs)
else:
iam = boto.iam.connection.IAMConnection(**aws_connect_kwargs)
except boto.exception.NoAuthHandlerFound, e:
module.fail_json(msg=str(e))
state = module.params.get('state')
name = module.params.get('name')
path = module.params.get('path')
new_name = module.params.get('new_name')
new_path = module.params.get('new_path')
cert_chain = module.params.get('cert_chain')
dup_ok = module.params.get('dup_ok')
if state == 'present':
cert = open(module.params.get('cert'), 'r').read().rstrip()
key = open(module.params.get('key'), 'r').read().rstrip()
if cert_chain is not None:
cert_chain = open(module.params.get('cert_chain'), 'r').read()
else:
key=cert=chain=None
orig_certs = [ctb['server_certificate_name'] for ctb in \
iam.get_all_server_certs().\
list_server_certificates_result.\
server_certificate_metadata_list]
orig_bodies = [iam.get_server_certificate(thing).\
get_server_certificate_result.\
certificate_body \
for thing in orig_certs]
if new_name == name:
new_name = None
if new_path == path:
new_path = None
changed = False
try:
cert_action(module, iam, name, path, new_name, new_path, state,
cert, key, cert_chain, orig_certs, orig_bodies, dup_ok)
except boto.exception.BotoServerError, err:
module.fail_json(changed=changed, msg=str(err), debug=[cert,key])
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 | 6,057,035,950,341,978,000 | 1,039,897,925,715,084,400 | 38.40404 | 170 | 0.569256 | false |
gnowxilef/Wox | PythonHome/Lib/site-packages/chardet/langbulgarianmodel.py | 2965 | 12784 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
# this table is modified base on win1251BulgarianCharToOrderMap, so
# only number <64 is sure valid
Latin5_BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80
210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90
81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0
62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0
)
win1251BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80
221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90
88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0
73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 96.9392%
# first 1024 sequences:3.0618%
# rest sequences: 0.2992%
# negative sequences: 0.0020%
BulgarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
)
Latin5BulgarianModel = {
'charToOrderMap': Latin5_BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-5"
}
Win1251BulgarianModel = {
'charToOrderMap': win1251BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "windows-1251"
}
# flake8: noqa
| mit | -8,479,788,386,484,872,000 | -2,025,793,909,222,657,000 | 54.825328 | 70 | 0.553113 | false |
huijunwu/heron | heron/shell/src/python/handlers/pmaphandler.py | 5 | 1352 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
''' pmaphandler.py '''
import json
import tornado.web
from heron.shell.src.python import utils
class PmapHandler(tornado.web.RequestHandler):
"""
Responsible for reporting memory map of a process given its pid.
"""
# pylint: disable=attribute-defined-outside-init
@tornado.web.asynchronous
def get(self, pid):
''' get method '''
body = utils.str_cmd(['pmap', '-pXX', pid], None, None)
self.content_type = 'application/json'
self.write(json.dumps(body))
self.finish()
| apache-2.0 | 8,040,664,786,302,979,000 | -8,496,280,954,676,820,000 | 32.8 | 66 | 0.724112 | false |
NixaSoftware/CVis | venv/lib/python2.7/site-packages/pygments/lexers/_vim_builtins.py | 31 | 57090 | # -*- coding: utf-8 -*-
"""
pygments.lexers._vim_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file is autogenerated by scripts/get_vimkw.py
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
# Split up in multiple functions so it's importable by jython, which has a
# per-method size limit.
def _getauto():
var = (
('BufAdd','BufAdd'),
('BufCreate','BufCreate'),
('BufDelete','BufDelete'),
('BufEnter','BufEnter'),
('BufFilePost','BufFilePost'),
('BufFilePre','BufFilePre'),
('BufHidden','BufHidden'),
('BufLeave','BufLeave'),
('BufNew','BufNew'),
('BufNewFile','BufNewFile'),
('BufRead','BufRead'),
('BufReadCmd','BufReadCmd'),
('BufReadPost','BufReadPost'),
('BufReadPre','BufReadPre'),
('BufUnload','BufUnload'),
('BufWinEnter','BufWinEnter'),
('BufWinLeave','BufWinLeave'),
('BufWipeout','BufWipeout'),
('BufWrite','BufWrite'),
('BufWriteCmd','BufWriteCmd'),
('BufWritePost','BufWritePost'),
('BufWritePre','BufWritePre'),
('Cmd','Cmd'),
('CmdwinEnter','CmdwinEnter'),
('CmdwinLeave','CmdwinLeave'),
('ColorScheme','ColorScheme'),
('CompleteDone','CompleteDone'),
('CursorHold','CursorHold'),
('CursorHoldI','CursorHoldI'),
('CursorMoved','CursorMoved'),
('CursorMovedI','CursorMovedI'),
('EncodingChanged','EncodingChanged'),
('FileAppendCmd','FileAppendCmd'),
('FileAppendPost','FileAppendPost'),
('FileAppendPre','FileAppendPre'),
('FileChangedRO','FileChangedRO'),
('FileChangedShell','FileChangedShell'),
('FileChangedShellPost','FileChangedShellPost'),
('FileEncoding','FileEncoding'),
('FileReadCmd','FileReadCmd'),
('FileReadPost','FileReadPost'),
('FileReadPre','FileReadPre'),
('FileType','FileType'),
('FileWriteCmd','FileWriteCmd'),
('FileWritePost','FileWritePost'),
('FileWritePre','FileWritePre'),
('FilterReadPost','FilterReadPost'),
('FilterReadPre','FilterReadPre'),
('FilterWritePost','FilterWritePost'),
('FilterWritePre','FilterWritePre'),
('FocusGained','FocusGained'),
('FocusLost','FocusLost'),
('FuncUndefined','FuncUndefined'),
('GUIEnter','GUIEnter'),
('GUIFailed','GUIFailed'),
('InsertChange','InsertChange'),
('InsertCharPre','InsertCharPre'),
('InsertEnter','InsertEnter'),
('InsertLeave','InsertLeave'),
('MenuPopup','MenuPopup'),
('QuickFixCmdPost','QuickFixCmdPost'),
('QuickFixCmdPre','QuickFixCmdPre'),
('QuitPre','QuitPre'),
('RemoteReply','RemoteReply'),
('SessionLoadPost','SessionLoadPost'),
('ShellCmdPost','ShellCmdPost'),
('ShellFilterPost','ShellFilterPost'),
('SourceCmd','SourceCmd'),
('SourcePre','SourcePre'),
('SpellFileMissing','SpellFileMissing'),
('StdinReadPost','StdinReadPost'),
('StdinReadPre','StdinReadPre'),
('SwapExists','SwapExists'),
('Syntax','Syntax'),
('TabEnter','TabEnter'),
('TabLeave','TabLeave'),
('TermChanged','TermChanged'),
('TermResponse','TermResponse'),
('TextChanged','TextChanged'),
('TextChangedI','TextChangedI'),
('User','User'),
('UserGettingBored','UserGettingBored'),
('VimEnter','VimEnter'),
('VimLeave','VimLeave'),
('VimLeavePre','VimLeavePre'),
('VimResized','VimResized'),
('WinEnter','WinEnter'),
('WinLeave','WinLeave'),
('event','event'),
)
return var
auto = _getauto()
def _getcommand():
var = (
('a','a'),
('ab','ab'),
('abc','abclear'),
('abo','aboveleft'),
('al','all'),
('ar','ar'),
('ar','args'),
('arga','argadd'),
('argd','argdelete'),
('argdo','argdo'),
('arge','argedit'),
('argg','argglobal'),
('argl','arglocal'),
('argu','argument'),
('as','ascii'),
('au','au'),
('b','buffer'),
('bN','bNext'),
('ba','ball'),
('bad','badd'),
('bd','bdelete'),
('bel','belowright'),
('bf','bfirst'),
('bl','blast'),
('bm','bmodified'),
('bn','bnext'),
('bo','botright'),
('bp','bprevious'),
('br','br'),
('br','brewind'),
('brea','break'),
('breaka','breakadd'),
('breakd','breakdel'),
('breakl','breaklist'),
('bro','browse'),
('bu','bu'),
('buf','buf'),
('bufdo','bufdo'),
('buffers','buffers'),
('bun','bunload'),
('bw','bwipeout'),
('c','c'),
('c','change'),
('cN','cN'),
('cN','cNext'),
('cNf','cNf'),
('cNf','cNfile'),
('cabc','cabclear'),
('cad','cad'),
('cad','caddexpr'),
('caddb','caddbuffer'),
('caddf','caddfile'),
('cal','call'),
('cat','catch'),
('cb','cbuffer'),
('cc','cc'),
('ccl','cclose'),
('cd','cd'),
('ce','center'),
('cex','cexpr'),
('cf','cfile'),
('cfir','cfirst'),
('cg','cgetfile'),
('cgetb','cgetbuffer'),
('cgete','cgetexpr'),
('changes','changes'),
('chd','chdir'),
('che','checkpath'),
('checkt','checktime'),
('cl','cl'),
('cl','clist'),
('cla','clast'),
('clo','close'),
('cmapc','cmapclear'),
('cn','cn'),
('cn','cnext'),
('cnew','cnewer'),
('cnf','cnf'),
('cnf','cnfile'),
('co','copy'),
('col','colder'),
('colo','colorscheme'),
('com','com'),
('comc','comclear'),
('comp','compiler'),
('con','con'),
('con','continue'),
('conf','confirm'),
('cope','copen'),
('cp','cprevious'),
('cpf','cpfile'),
('cq','cquit'),
('cr','crewind'),
('cs','cs'),
('cscope','cscope'),
('cstag','cstag'),
('cuna','cunabbrev'),
('cw','cwindow'),
('d','d'),
('d','delete'),
('de','de'),
('debug','debug'),
('debugg','debuggreedy'),
('del','del'),
('delc','delcommand'),
('delel','delel'),
('delep','delep'),
('deletel','deletel'),
('deletep','deletep'),
('deletl','deletl'),
('deletp','deletp'),
('delf','delf'),
('delf','delfunction'),
('dell','dell'),
('delm','delmarks'),
('delp','delp'),
('dep','dep'),
('di','di'),
('di','display'),
('diffg','diffget'),
('diffo','diffoff'),
('diffp','diffpatch'),
('diffpu','diffput'),
('diffs','diffsplit'),
('difft','diffthis'),
('diffu','diffupdate'),
('dig','dig'),
('dig','digraphs'),
('dir','dir'),
('dj','djump'),
('dl','dl'),
('dli','dlist'),
('do','do'),
('doau','doau'),
('dp','dp'),
('dr','drop'),
('ds','dsearch'),
('dsp','dsplit'),
('e','e'),
('e','edit'),
('ea','ea'),
('earlier','earlier'),
('ec','ec'),
('echoe','echoerr'),
('echom','echomsg'),
('echon','echon'),
('el','else'),
('elsei','elseif'),
('em','emenu'),
('en','en'),
('en','endif'),
('endf','endf'),
('endf','endfunction'),
('endfo','endfor'),
('endfun','endfun'),
('endt','endtry'),
('endw','endwhile'),
('ene','enew'),
('ex','ex'),
('exi','exit'),
('exu','exusage'),
('f','f'),
('f','file'),
('files','files'),
('filet','filet'),
('filetype','filetype'),
('fin','fin'),
('fin','find'),
('fina','finally'),
('fini','finish'),
('fir','first'),
('fix','fixdel'),
('fo','fold'),
('foldc','foldclose'),
('foldd','folddoopen'),
('folddoc','folddoclosed'),
('foldo','foldopen'),
('for','for'),
('fu','fu'),
('fu','function'),
('fun','fun'),
('g','g'),
('go','goto'),
('gr','grep'),
('grepa','grepadd'),
('gui','gui'),
('gvim','gvim'),
('h','h'),
('h','help'),
('ha','hardcopy'),
('helpf','helpfind'),
('helpg','helpgrep'),
('helpt','helptags'),
('hi','hi'),
('hid','hide'),
('his','history'),
('i','i'),
('ia','ia'),
('iabc','iabclear'),
('if','if'),
('ij','ijump'),
('il','ilist'),
('imapc','imapclear'),
('in','in'),
('intro','intro'),
('is','isearch'),
('isp','isplit'),
('iuna','iunabbrev'),
('j','join'),
('ju','jumps'),
('k','k'),
('kee','keepmarks'),
('keepa','keepa'),
('keepalt','keepalt'),
('keepj','keepjumps'),
('keepp','keeppatterns'),
('l','l'),
('l','list'),
('lN','lN'),
('lN','lNext'),
('lNf','lNf'),
('lNf','lNfile'),
('la','la'),
('la','last'),
('lad','lad'),
('lad','laddexpr'),
('laddb','laddbuffer'),
('laddf','laddfile'),
('lan','lan'),
('lan','language'),
('lat','lat'),
('later','later'),
('lb','lbuffer'),
('lc','lcd'),
('lch','lchdir'),
('lcl','lclose'),
('lcs','lcs'),
('lcscope','lcscope'),
('le','left'),
('lefta','leftabove'),
('lex','lexpr'),
('lf','lfile'),
('lfir','lfirst'),
('lg','lgetfile'),
('lgetb','lgetbuffer'),
('lgete','lgetexpr'),
('lgr','lgrep'),
('lgrepa','lgrepadd'),
('lh','lhelpgrep'),
('ll','ll'),
('lla','llast'),
('lli','llist'),
('lmak','lmake'),
('lmapc','lmapclear'),
('lne','lne'),
('lne','lnext'),
('lnew','lnewer'),
('lnf','lnf'),
('lnf','lnfile'),
('lo','lo'),
('lo','loadview'),
('loadk','loadk'),
('loadkeymap','loadkeymap'),
('loc','lockmarks'),
('lockv','lockvar'),
('lol','lolder'),
('lop','lopen'),
('lp','lprevious'),
('lpf','lpfile'),
('lr','lrewind'),
('ls','ls'),
('lt','ltag'),
('lua','lua'),
('luado','luado'),
('luafile','luafile'),
('lv','lvimgrep'),
('lvimgrepa','lvimgrepadd'),
('lw','lwindow'),
('m','move'),
('ma','ma'),
('ma','mark'),
('mak','make'),
('marks','marks'),
('mat','match'),
('menut','menut'),
('menut','menutranslate'),
('mes','mes'),
('messages','messages'),
('mk','mk'),
('mk','mkexrc'),
('mks','mksession'),
('mksp','mkspell'),
('mkv','mkv'),
('mkv','mkvimrc'),
('mkvie','mkview'),
('mo','mo'),
('mod','mode'),
('mz','mz'),
('mz','mzscheme'),
('mzf','mzfile'),
('n','n'),
('n','next'),
('nb','nbkey'),
('nbc','nbclose'),
('nbs','nbstart'),
('ne','ne'),
('new','new'),
('nmapc','nmapclear'),
('noa','noa'),
('noautocmd','noautocmd'),
('noh','nohlsearch'),
('nu','number'),
('o','o'),
('o','open'),
('ol','oldfiles'),
('omapc','omapclear'),
('on','only'),
('opt','options'),
('ownsyntax','ownsyntax'),
('p','p'),
('p','print'),
('pc','pclose'),
('pe','pe'),
('pe','perl'),
('ped','pedit'),
('perld','perldo'),
('po','pop'),
('popu','popu'),
('popu','popup'),
('pp','ppop'),
('pr','pr'),
('pre','preserve'),
('prev','previous'),
('pro','pro'),
('prof','profile'),
('profd','profdel'),
('promptf','promptfind'),
('promptr','promptrepl'),
('ps','psearch'),
('ptN','ptN'),
('ptN','ptNext'),
('pta','ptag'),
('ptf','ptfirst'),
('ptj','ptjump'),
('ptl','ptlast'),
('ptn','ptn'),
('ptn','ptnext'),
('ptp','ptprevious'),
('ptr','ptrewind'),
('pts','ptselect'),
('pu','put'),
('pw','pwd'),
('py','py'),
('py','python'),
('py3','py3'),
('py3','py3'),
('py3do','py3do'),
('pydo','pydo'),
('pyf','pyfile'),
('python3','python3'),
('q','q'),
('q','quit'),
('qa','qall'),
('quita','quitall'),
('r','r'),
('r','read'),
('re','re'),
('rec','recover'),
('red','red'),
('red','redo'),
('redi','redir'),
('redr','redraw'),
('redraws','redrawstatus'),
('reg','registers'),
('res','resize'),
('ret','retab'),
('retu','return'),
('rew','rewind'),
('ri','right'),
('rightb','rightbelow'),
('ru','ru'),
('ru','runtime'),
('rub','ruby'),
('rubyd','rubydo'),
('rubyf','rubyfile'),
('rundo','rundo'),
('rv','rviminfo'),
('sN','sNext'),
('sa','sargument'),
('sal','sall'),
('san','sandbox'),
('sav','saveas'),
('sb','sbuffer'),
('sbN','sbNext'),
('sba','sball'),
('sbf','sbfirst'),
('sbl','sblast'),
('sbm','sbmodified'),
('sbn','sbnext'),
('sbp','sbprevious'),
('sbr','sbrewind'),
('scrip','scrip'),
('scrip','scriptnames'),
('scripte','scriptencoding'),
('scs','scs'),
('scscope','scscope'),
('se','set'),
('setf','setfiletype'),
('setg','setglobal'),
('setl','setlocal'),
('sf','sfind'),
('sfir','sfirst'),
('sh','shell'),
('si','si'),
('sig','sig'),
('sign','sign'),
('sil','silent'),
('sim','simalt'),
('sl','sl'),
('sl','sleep'),
('sla','slast'),
('sm','smagic'),
('sm','smap'),
('sme','sme'),
('smenu','smenu'),
('sn','snext'),
('sni','sniff'),
('sno','snomagic'),
('snoreme','snoreme'),
('snoremenu','snoremenu'),
('so','so'),
('so','source'),
('sor','sort'),
('sp','split'),
('spe','spe'),
('spe','spellgood'),
('spelld','spelldump'),
('spelli','spellinfo'),
('spellr','spellrepall'),
('spellu','spellundo'),
('spellw','spellwrong'),
('spr','sprevious'),
('sre','srewind'),
('st','st'),
('st','stop'),
('sta','stag'),
('star','star'),
('star','startinsert'),
('start','start'),
('startg','startgreplace'),
('startr','startreplace'),
('stj','stjump'),
('stopi','stopinsert'),
('sts','stselect'),
('sun','sunhide'),
('sunme','sunme'),
('sunmenu','sunmenu'),
('sus','suspend'),
('sv','sview'),
('sw','swapname'),
('sy','sy'),
('syn','syn'),
('sync','sync'),
('syncbind','syncbind'),
('syntime','syntime'),
('t','t'),
('tN','tN'),
('tN','tNext'),
('ta','ta'),
('ta','tag'),
('tab','tab'),
('tabN','tabN'),
('tabN','tabNext'),
('tabc','tabclose'),
('tabd','tabdo'),
('tabe','tabedit'),
('tabf','tabfind'),
('tabfir','tabfirst'),
('tabl','tablast'),
('tabm','tabmove'),
('tabn','tabnext'),
('tabnew','tabnew'),
('tabo','tabonly'),
('tabp','tabprevious'),
('tabr','tabrewind'),
('tabs','tabs'),
('tags','tags'),
('tc','tcl'),
('tcld','tcldo'),
('tclf','tclfile'),
('te','tearoff'),
('tf','tfirst'),
('th','throw'),
('tj','tjump'),
('tl','tlast'),
('tm','tm'),
('tm','tmenu'),
('tn','tn'),
('tn','tnext'),
('to','topleft'),
('tp','tprevious'),
('tr','tr'),
('tr','trewind'),
('try','try'),
('ts','tselect'),
('tu','tu'),
('tu','tunmenu'),
('u','u'),
('u','undo'),
('un','un'),
('una','unabbreviate'),
('undoj','undojoin'),
('undol','undolist'),
('unh','unhide'),
('unl','unl'),
('unlo','unlockvar'),
('uns','unsilent'),
('up','update'),
('v','v'),
('ve','ve'),
('ve','version'),
('verb','verbose'),
('vert','vertical'),
('vi','vi'),
('vi','visual'),
('vie','view'),
('vim','vimgrep'),
('vimgrepa','vimgrepadd'),
('viu','viusage'),
('vmapc','vmapclear'),
('vne','vnew'),
('vs','vsplit'),
('w','w'),
('w','write'),
('wN','wNext'),
('wa','wall'),
('wh','while'),
('win','win'),
('win','winsize'),
('winc','wincmd'),
('windo','windo'),
('winp','winpos'),
('wn','wnext'),
('wp','wprevious'),
('wq','wq'),
('wqa','wqall'),
('ws','wsverb'),
('wundo','wundo'),
('wv','wviminfo'),
('x','x'),
('x','xit'),
('xa','xall'),
('xmapc','xmapclear'),
('xme','xme'),
('xmenu','xmenu'),
('xnoreme','xnoreme'),
('xnoremenu','xnoremenu'),
('xunme','xunme'),
('xunmenu','xunmenu'),
('xwininfo','xwininfo'),
('y','yank'),
)
return var
command = _getcommand()
def _getoption():
var = (
('acd','acd'),
('ai','ai'),
('akm','akm'),
('al','al'),
('aleph','aleph'),
('allowrevins','allowrevins'),
('altkeymap','altkeymap'),
('ambiwidth','ambiwidth'),
('ambw','ambw'),
('anti','anti'),
('antialias','antialias'),
('ar','ar'),
('arab','arab'),
('arabic','arabic'),
('arabicshape','arabicshape'),
('ari','ari'),
('arshape','arshape'),
('autochdir','autochdir'),
('autoindent','autoindent'),
('autoread','autoread'),
('autowrite','autowrite'),
('autowriteall','autowriteall'),
('aw','aw'),
('awa','awa'),
('background','background'),
('backspace','backspace'),
('backup','backup'),
('backupcopy','backupcopy'),
('backupdir','backupdir'),
('backupext','backupext'),
('backupskip','backupskip'),
('balloondelay','balloondelay'),
('ballooneval','ballooneval'),
('balloonexpr','balloonexpr'),
('bdir','bdir'),
('bdlay','bdlay'),
('beval','beval'),
('bex','bex'),
('bexpr','bexpr'),
('bg','bg'),
('bh','bh'),
('bin','bin'),
('binary','binary'),
('biosk','biosk'),
('bioskey','bioskey'),
('bk','bk'),
('bkc','bkc'),
('bl','bl'),
('bomb','bomb'),
('breakat','breakat'),
('brk','brk'),
('browsedir','browsedir'),
('bs','bs'),
('bsdir','bsdir'),
('bsk','bsk'),
('bt','bt'),
('bufhidden','bufhidden'),
('buflisted','buflisted'),
('buftype','buftype'),
('casemap','casemap'),
('cb','cb'),
('cc','cc'),
('ccv','ccv'),
('cd','cd'),
('cdpath','cdpath'),
('cedit','cedit'),
('cf','cf'),
('cfu','cfu'),
('ch','ch'),
('charconvert','charconvert'),
('ci','ci'),
('cin','cin'),
('cindent','cindent'),
('cink','cink'),
('cinkeys','cinkeys'),
('cino','cino'),
('cinoptions','cinoptions'),
('cinw','cinw'),
('cinwords','cinwords'),
('clipboard','clipboard'),
('cmdheight','cmdheight'),
('cmdwinheight','cmdwinheight'),
('cmp','cmp'),
('cms','cms'),
('co','co'),
('cocu','cocu'),
('cole','cole'),
('colorcolumn','colorcolumn'),
('columns','columns'),
('com','com'),
('comments','comments'),
('commentstring','commentstring'),
('compatible','compatible'),
('complete','complete'),
('completefunc','completefunc'),
('completeopt','completeopt'),
('concealcursor','concealcursor'),
('conceallevel','conceallevel'),
('confirm','confirm'),
('consk','consk'),
('conskey','conskey'),
('copyindent','copyindent'),
('cot','cot'),
('cp','cp'),
('cpo','cpo'),
('cpoptions','cpoptions'),
('cpt','cpt'),
('crb','crb'),
('cryptmethod','cryptmethod'),
('cscopepathcomp','cscopepathcomp'),
('cscopeprg','cscopeprg'),
('cscopequickfix','cscopequickfix'),
('cscoperelative','cscoperelative'),
('cscopetag','cscopetag'),
('cscopetagorder','cscopetagorder'),
('cscopeverbose','cscopeverbose'),
('cspc','cspc'),
('csprg','csprg'),
('csqf','csqf'),
('csre','csre'),
('cst','cst'),
('csto','csto'),
('csverb','csverb'),
('cuc','cuc'),
('cul','cul'),
('cursorbind','cursorbind'),
('cursorcolumn','cursorcolumn'),
('cursorline','cursorline'),
('cwh','cwh'),
('debug','debug'),
('deco','deco'),
('def','def'),
('define','define'),
('delcombine','delcombine'),
('dex','dex'),
('dg','dg'),
('dict','dict'),
('dictionary','dictionary'),
('diff','diff'),
('diffexpr','diffexpr'),
('diffopt','diffopt'),
('digraph','digraph'),
('dip','dip'),
('dir','dir'),
('directory','directory'),
('display','display'),
('dy','dy'),
('ea','ea'),
('ead','ead'),
('eadirection','eadirection'),
('eb','eb'),
('ed','ed'),
('edcompatible','edcompatible'),
('ef','ef'),
('efm','efm'),
('ei','ei'),
('ek','ek'),
('enc','enc'),
('encoding','encoding'),
('endofline','endofline'),
('eol','eol'),
('ep','ep'),
('equalalways','equalalways'),
('equalprg','equalprg'),
('errorbells','errorbells'),
('errorfile','errorfile'),
('errorformat','errorformat'),
('esckeys','esckeys'),
('et','et'),
('eventignore','eventignore'),
('ex','ex'),
('expandtab','expandtab'),
('exrc','exrc'),
('fcl','fcl'),
('fcs','fcs'),
('fdc','fdc'),
('fde','fde'),
('fdi','fdi'),
('fdl','fdl'),
('fdls','fdls'),
('fdm','fdm'),
('fdn','fdn'),
('fdo','fdo'),
('fdt','fdt'),
('fen','fen'),
('fenc','fenc'),
('fencs','fencs'),
('fex','fex'),
('ff','ff'),
('ffs','ffs'),
('fic','fic'),
('fileencoding','fileencoding'),
('fileencodings','fileencodings'),
('fileformat','fileformat'),
('fileformats','fileformats'),
('fileignorecase','fileignorecase'),
('filetype','filetype'),
('fillchars','fillchars'),
('fk','fk'),
('fkmap','fkmap'),
('flp','flp'),
('fml','fml'),
('fmr','fmr'),
('fo','fo'),
('foldclose','foldclose'),
('foldcolumn','foldcolumn'),
('foldenable','foldenable'),
('foldexpr','foldexpr'),
('foldignore','foldignore'),
('foldlevel','foldlevel'),
('foldlevelstart','foldlevelstart'),
('foldmarker','foldmarker'),
('foldmethod','foldmethod'),
('foldminlines','foldminlines'),
('foldnestmax','foldnestmax'),
('foldopen','foldopen'),
('foldtext','foldtext'),
('formatexpr','formatexpr'),
('formatlistpat','formatlistpat'),
('formatoptions','formatoptions'),
('formatprg','formatprg'),
('fp','fp'),
('fs','fs'),
('fsync','fsync'),
('ft','ft'),
('gcr','gcr'),
('gd','gd'),
('gdefault','gdefault'),
('gfm','gfm'),
('gfn','gfn'),
('gfs','gfs'),
('gfw','gfw'),
('ghr','ghr'),
('go','go'),
('gp','gp'),
('grepformat','grepformat'),
('grepprg','grepprg'),
('gtl','gtl'),
('gtt','gtt'),
('guicursor','guicursor'),
('guifont','guifont'),
('guifontset','guifontset'),
('guifontwide','guifontwide'),
('guiheadroom','guiheadroom'),
('guioptions','guioptions'),
('guipty','guipty'),
('guitablabel','guitablabel'),
('guitabtooltip','guitabtooltip'),
('helpfile','helpfile'),
('helpheight','helpheight'),
('helplang','helplang'),
('hf','hf'),
('hh','hh'),
('hi','hi'),
('hid','hid'),
('hidden','hidden'),
('highlight','highlight'),
('history','history'),
('hk','hk'),
('hkmap','hkmap'),
('hkmapp','hkmapp'),
('hkp','hkp'),
('hl','hl'),
('hlg','hlg'),
('hls','hls'),
('hlsearch','hlsearch'),
('ic','ic'),
('icon','icon'),
('iconstring','iconstring'),
('ignorecase','ignorecase'),
('im','im'),
('imactivatefunc','imactivatefunc'),
('imactivatekey','imactivatekey'),
('imaf','imaf'),
('imak','imak'),
('imc','imc'),
('imcmdline','imcmdline'),
('imd','imd'),
('imdisable','imdisable'),
('imi','imi'),
('iminsert','iminsert'),
('ims','ims'),
('imsearch','imsearch'),
('imsf','imsf'),
('imstatusfunc','imstatusfunc'),
('inc','inc'),
('include','include'),
('includeexpr','includeexpr'),
('incsearch','incsearch'),
('inde','inde'),
('indentexpr','indentexpr'),
('indentkeys','indentkeys'),
('indk','indk'),
('inex','inex'),
('inf','inf'),
('infercase','infercase'),
('inoremap','inoremap'),
('insertmode','insertmode'),
('invacd','invacd'),
('invai','invai'),
('invakm','invakm'),
('invallowrevins','invallowrevins'),
('invaltkeymap','invaltkeymap'),
('invanti','invanti'),
('invantialias','invantialias'),
('invar','invar'),
('invarab','invarab'),
('invarabic','invarabic'),
('invarabicshape','invarabicshape'),
('invari','invari'),
('invarshape','invarshape'),
('invautochdir','invautochdir'),
('invautoindent','invautoindent'),
('invautoread','invautoread'),
('invautowrite','invautowrite'),
('invautowriteall','invautowriteall'),
('invaw','invaw'),
('invawa','invawa'),
('invbackup','invbackup'),
('invballooneval','invballooneval'),
('invbeval','invbeval'),
('invbin','invbin'),
('invbinary','invbinary'),
('invbiosk','invbiosk'),
('invbioskey','invbioskey'),
('invbk','invbk'),
('invbl','invbl'),
('invbomb','invbomb'),
('invbuflisted','invbuflisted'),
('invcf','invcf'),
('invci','invci'),
('invcin','invcin'),
('invcindent','invcindent'),
('invcompatible','invcompatible'),
('invconfirm','invconfirm'),
('invconsk','invconsk'),
('invconskey','invconskey'),
('invcopyindent','invcopyindent'),
('invcp','invcp'),
('invcrb','invcrb'),
('invcscoperelative','invcscoperelative'),
('invcscopetag','invcscopetag'),
('invcscopeverbose','invcscopeverbose'),
('invcsre','invcsre'),
('invcst','invcst'),
('invcsverb','invcsverb'),
('invcuc','invcuc'),
('invcul','invcul'),
('invcursorbind','invcursorbind'),
('invcursorcolumn','invcursorcolumn'),
('invcursorline','invcursorline'),
('invdeco','invdeco'),
('invdelcombine','invdelcombine'),
('invdg','invdg'),
('invdiff','invdiff'),
('invdigraph','invdigraph'),
('invea','invea'),
('inveb','inveb'),
('inved','inved'),
('invedcompatible','invedcompatible'),
('invek','invek'),
('invendofline','invendofline'),
('inveol','inveol'),
('invequalalways','invequalalways'),
('inverrorbells','inverrorbells'),
('invesckeys','invesckeys'),
('invet','invet'),
('invex','invex'),
('invexpandtab','invexpandtab'),
('invexrc','invexrc'),
('invfen','invfen'),
('invfic','invfic'),
('invfileignorecase','invfileignorecase'),
('invfk','invfk'),
('invfkmap','invfkmap'),
('invfoldenable','invfoldenable'),
('invgd','invgd'),
('invgdefault','invgdefault'),
('invguipty','invguipty'),
('invhid','invhid'),
('invhidden','invhidden'),
('invhk','invhk'),
('invhkmap','invhkmap'),
('invhkmapp','invhkmapp'),
('invhkp','invhkp'),
('invhls','invhls'),
('invhlsearch','invhlsearch'),
('invic','invic'),
('invicon','invicon'),
('invignorecase','invignorecase'),
('invim','invim'),
('invimc','invimc'),
('invimcmdline','invimcmdline'),
('invimd','invimd'),
('invimdisable','invimdisable'),
('invincsearch','invincsearch'),
('invinf','invinf'),
('invinfercase','invinfercase'),
('invinsertmode','invinsertmode'),
('invis','invis'),
('invjoinspaces','invjoinspaces'),
('invjs','invjs'),
('invlazyredraw','invlazyredraw'),
('invlbr','invlbr'),
('invlinebreak','invlinebreak'),
('invlisp','invlisp'),
('invlist','invlist'),
('invloadplugins','invloadplugins'),
('invlpl','invlpl'),
('invlz','invlz'),
('invma','invma'),
('invmacatsui','invmacatsui'),
('invmagic','invmagic'),
('invmh','invmh'),
('invml','invml'),
('invmod','invmod'),
('invmodeline','invmodeline'),
('invmodifiable','invmodifiable'),
('invmodified','invmodified'),
('invmore','invmore'),
('invmousef','invmousef'),
('invmousefocus','invmousefocus'),
('invmousehide','invmousehide'),
('invnu','invnu'),
('invnumber','invnumber'),
('invodev','invodev'),
('invopendevice','invopendevice'),
('invpaste','invpaste'),
('invpi','invpi'),
('invpreserveindent','invpreserveindent'),
('invpreviewwindow','invpreviewwindow'),
('invprompt','invprompt'),
('invpvw','invpvw'),
('invreadonly','invreadonly'),
('invrelativenumber','invrelativenumber'),
('invremap','invremap'),
('invrestorescreen','invrestorescreen'),
('invrevins','invrevins'),
('invri','invri'),
('invrightleft','invrightleft'),
('invrl','invrl'),
('invrnu','invrnu'),
('invro','invro'),
('invrs','invrs'),
('invru','invru'),
('invruler','invruler'),
('invsb','invsb'),
('invsc','invsc'),
('invscb','invscb'),
('invscrollbind','invscrollbind'),
('invscs','invscs'),
('invsecure','invsecure'),
('invsft','invsft'),
('invshellslash','invshellslash'),
('invshelltemp','invshelltemp'),
('invshiftround','invshiftround'),
('invshortname','invshortname'),
('invshowcmd','invshowcmd'),
('invshowfulltag','invshowfulltag'),
('invshowmatch','invshowmatch'),
('invshowmode','invshowmode'),
('invsi','invsi'),
('invsm','invsm'),
('invsmartcase','invsmartcase'),
('invsmartindent','invsmartindent'),
('invsmarttab','invsmarttab'),
('invsmd','invsmd'),
('invsn','invsn'),
('invsol','invsol'),
('invspell','invspell'),
('invsplitbelow','invsplitbelow'),
('invsplitright','invsplitright'),
('invspr','invspr'),
('invsr','invsr'),
('invssl','invssl'),
('invsta','invsta'),
('invstartofline','invstartofline'),
('invstmp','invstmp'),
('invswapfile','invswapfile'),
('invswf','invswf'),
('invta','invta'),
('invtagbsearch','invtagbsearch'),
('invtagrelative','invtagrelative'),
('invtagstack','invtagstack'),
('invtbi','invtbi'),
('invtbidi','invtbidi'),
('invtbs','invtbs'),
('invtermbidi','invtermbidi'),
('invterse','invterse'),
('invtextauto','invtextauto'),
('invtextmode','invtextmode'),
('invtf','invtf'),
('invtgst','invtgst'),
('invtildeop','invtildeop'),
('invtimeout','invtimeout'),
('invtitle','invtitle'),
('invto','invto'),
('invtop','invtop'),
('invtr','invtr'),
('invttimeout','invttimeout'),
('invttybuiltin','invttybuiltin'),
('invttyfast','invttyfast'),
('invtx','invtx'),
('invudf','invudf'),
('invundofile','invundofile'),
('invvb','invvb'),
('invvisualbell','invvisualbell'),
('invwa','invwa'),
('invwarn','invwarn'),
('invwb','invwb'),
('invweirdinvert','invweirdinvert'),
('invwfh','invwfh'),
('invwfw','invwfw'),
('invwic','invwic'),
('invwildignorecase','invwildignorecase'),
('invwildmenu','invwildmenu'),
('invwinfixheight','invwinfixheight'),
('invwinfixwidth','invwinfixwidth'),
('invwiv','invwiv'),
('invwmnu','invwmnu'),
('invwrap','invwrap'),
('invwrapscan','invwrapscan'),
('invwrite','invwrite'),
('invwriteany','invwriteany'),
('invwritebackup','invwritebackup'),
('invws','invws'),
('is','is'),
('isf','isf'),
('isfname','isfname'),
('isi','isi'),
('isident','isident'),
('isk','isk'),
('iskeyword','iskeyword'),
('isp','isp'),
('isprint','isprint'),
('joinspaces','joinspaces'),
('js','js'),
('key','key'),
('keymap','keymap'),
('keymodel','keymodel'),
('keywordprg','keywordprg'),
('km','km'),
('kmp','kmp'),
('kp','kp'),
('langmap','langmap'),
('langmenu','langmenu'),
('laststatus','laststatus'),
('lazyredraw','lazyredraw'),
('lbr','lbr'),
('lcs','lcs'),
('linebreak','linebreak'),
('lines','lines'),
('linespace','linespace'),
('lisp','lisp'),
('lispwords','lispwords'),
('list','list'),
('listchars','listchars'),
('lm','lm'),
('lmap','lmap'),
('loadplugins','loadplugins'),
('lpl','lpl'),
('ls','ls'),
('lsp','lsp'),
('lw','lw'),
('lz','lz'),
('ma','ma'),
('macatsui','macatsui'),
('magic','magic'),
('makeef','makeef'),
('makeprg','makeprg'),
('mat','mat'),
('matchpairs','matchpairs'),
('matchtime','matchtime'),
('maxcombine','maxcombine'),
('maxfuncdepth','maxfuncdepth'),
('maxmapdepth','maxmapdepth'),
('maxmem','maxmem'),
('maxmempattern','maxmempattern'),
('maxmemtot','maxmemtot'),
('mco','mco'),
('mef','mef'),
('menuitems','menuitems'),
('mfd','mfd'),
('mh','mh'),
('mis','mis'),
('mkspellmem','mkspellmem'),
('ml','ml'),
('mls','mls'),
('mm','mm'),
('mmd','mmd'),
('mmp','mmp'),
('mmt','mmt'),
('mod','mod'),
('modeline','modeline'),
('modelines','modelines'),
('modifiable','modifiable'),
('modified','modified'),
('more','more'),
('mouse','mouse'),
('mousef','mousef'),
('mousefocus','mousefocus'),
('mousehide','mousehide'),
('mousem','mousem'),
('mousemodel','mousemodel'),
('mouses','mouses'),
('mouseshape','mouseshape'),
('mouset','mouset'),
('mousetime','mousetime'),
('mp','mp'),
('mps','mps'),
('msm','msm'),
('mzq','mzq'),
('mzquantum','mzquantum'),
('nf','nf'),
('nnoremap','nnoremap'),
('noacd','noacd'),
('noai','noai'),
('noakm','noakm'),
('noallowrevins','noallowrevins'),
('noaltkeymap','noaltkeymap'),
('noanti','noanti'),
('noantialias','noantialias'),
('noar','noar'),
('noarab','noarab'),
('noarabic','noarabic'),
('noarabicshape','noarabicshape'),
('noari','noari'),
('noarshape','noarshape'),
('noautochdir','noautochdir'),
('noautoindent','noautoindent'),
('noautoread','noautoread'),
('noautowrite','noautowrite'),
('noautowriteall','noautowriteall'),
('noaw','noaw'),
('noawa','noawa'),
('nobackup','nobackup'),
('noballooneval','noballooneval'),
('nobeval','nobeval'),
('nobin','nobin'),
('nobinary','nobinary'),
('nobiosk','nobiosk'),
('nobioskey','nobioskey'),
('nobk','nobk'),
('nobl','nobl'),
('nobomb','nobomb'),
('nobuflisted','nobuflisted'),
('nocf','nocf'),
('noci','noci'),
('nocin','nocin'),
('nocindent','nocindent'),
('nocompatible','nocompatible'),
('noconfirm','noconfirm'),
('noconsk','noconsk'),
('noconskey','noconskey'),
('nocopyindent','nocopyindent'),
('nocp','nocp'),
('nocrb','nocrb'),
('nocscoperelative','nocscoperelative'),
('nocscopetag','nocscopetag'),
('nocscopeverbose','nocscopeverbose'),
('nocsre','nocsre'),
('nocst','nocst'),
('nocsverb','nocsverb'),
('nocuc','nocuc'),
('nocul','nocul'),
('nocursorbind','nocursorbind'),
('nocursorcolumn','nocursorcolumn'),
('nocursorline','nocursorline'),
('nodeco','nodeco'),
('nodelcombine','nodelcombine'),
('nodg','nodg'),
('nodiff','nodiff'),
('nodigraph','nodigraph'),
('noea','noea'),
('noeb','noeb'),
('noed','noed'),
('noedcompatible','noedcompatible'),
('noek','noek'),
('noendofline','noendofline'),
('noeol','noeol'),
('noequalalways','noequalalways'),
('noerrorbells','noerrorbells'),
('noesckeys','noesckeys'),
('noet','noet'),
('noex','noex'),
('noexpandtab','noexpandtab'),
('noexrc','noexrc'),
('nofen','nofen'),
('nofic','nofic'),
('nofileignorecase','nofileignorecase'),
('nofk','nofk'),
('nofkmap','nofkmap'),
('nofoldenable','nofoldenable'),
('nogd','nogd'),
('nogdefault','nogdefault'),
('noguipty','noguipty'),
('nohid','nohid'),
('nohidden','nohidden'),
('nohk','nohk'),
('nohkmap','nohkmap'),
('nohkmapp','nohkmapp'),
('nohkp','nohkp'),
('nohls','nohls'),
('nohlsearch','nohlsearch'),
('noic','noic'),
('noicon','noicon'),
('noignorecase','noignorecase'),
('noim','noim'),
('noimc','noimc'),
('noimcmdline','noimcmdline'),
('noimd','noimd'),
('noimdisable','noimdisable'),
('noincsearch','noincsearch'),
('noinf','noinf'),
('noinfercase','noinfercase'),
('noinsertmode','noinsertmode'),
('nois','nois'),
('nojoinspaces','nojoinspaces'),
('nojs','nojs'),
('nolazyredraw','nolazyredraw'),
('nolbr','nolbr'),
('nolinebreak','nolinebreak'),
('nolisp','nolisp'),
('nolist','nolist'),
('noloadplugins','noloadplugins'),
('nolpl','nolpl'),
('nolz','nolz'),
('noma','noma'),
('nomacatsui','nomacatsui'),
('nomagic','nomagic'),
('nomh','nomh'),
('noml','noml'),
('nomod','nomod'),
('nomodeline','nomodeline'),
('nomodifiable','nomodifiable'),
('nomodified','nomodified'),
('nomore','nomore'),
('nomousef','nomousef'),
('nomousefocus','nomousefocus'),
('nomousehide','nomousehide'),
('nonu','nonu'),
('nonumber','nonumber'),
('noodev','noodev'),
('noopendevice','noopendevice'),
('nopaste','nopaste'),
('nopi','nopi'),
('nopreserveindent','nopreserveindent'),
('nopreviewwindow','nopreviewwindow'),
('noprompt','noprompt'),
('nopvw','nopvw'),
('noreadonly','noreadonly'),
('norelativenumber','norelativenumber'),
('noremap','noremap'),
('norestorescreen','norestorescreen'),
('norevins','norevins'),
('nori','nori'),
('norightleft','norightleft'),
('norl','norl'),
('nornu','nornu'),
('noro','noro'),
('nors','nors'),
('noru','noru'),
('noruler','noruler'),
('nosb','nosb'),
('nosc','nosc'),
('noscb','noscb'),
('noscrollbind','noscrollbind'),
('noscs','noscs'),
('nosecure','nosecure'),
('nosft','nosft'),
('noshellslash','noshellslash'),
('noshelltemp','noshelltemp'),
('noshiftround','noshiftround'),
('noshortname','noshortname'),
('noshowcmd','noshowcmd'),
('noshowfulltag','noshowfulltag'),
('noshowmatch','noshowmatch'),
('noshowmode','noshowmode'),
('nosi','nosi'),
('nosm','nosm'),
('nosmartcase','nosmartcase'),
('nosmartindent','nosmartindent'),
('nosmarttab','nosmarttab'),
('nosmd','nosmd'),
('nosn','nosn'),
('nosol','nosol'),
('nospell','nospell'),
('nosplitbelow','nosplitbelow'),
('nosplitright','nosplitright'),
('nospr','nospr'),
('nosr','nosr'),
('nossl','nossl'),
('nosta','nosta'),
('nostartofline','nostartofline'),
('nostmp','nostmp'),
('noswapfile','noswapfile'),
('noswf','noswf'),
('nota','nota'),
('notagbsearch','notagbsearch'),
('notagrelative','notagrelative'),
('notagstack','notagstack'),
('notbi','notbi'),
('notbidi','notbidi'),
('notbs','notbs'),
('notermbidi','notermbidi'),
('noterse','noterse'),
('notextauto','notextauto'),
('notextmode','notextmode'),
('notf','notf'),
('notgst','notgst'),
('notildeop','notildeop'),
('notimeout','notimeout'),
('notitle','notitle'),
('noto','noto'),
('notop','notop'),
('notr','notr'),
('nottimeout','nottimeout'),
('nottybuiltin','nottybuiltin'),
('nottyfast','nottyfast'),
('notx','notx'),
('noudf','noudf'),
('noundofile','noundofile'),
('novb','novb'),
('novisualbell','novisualbell'),
('nowa','nowa'),
('nowarn','nowarn'),
('nowb','nowb'),
('noweirdinvert','noweirdinvert'),
('nowfh','nowfh'),
('nowfw','nowfw'),
('nowic','nowic'),
('nowildignorecase','nowildignorecase'),
('nowildmenu','nowildmenu'),
('nowinfixheight','nowinfixheight'),
('nowinfixwidth','nowinfixwidth'),
('nowiv','nowiv'),
('nowmnu','nowmnu'),
('nowrap','nowrap'),
('nowrapscan','nowrapscan'),
('nowrite','nowrite'),
('nowriteany','nowriteany'),
('nowritebackup','nowritebackup'),
('nows','nows'),
('nrformats','nrformats'),
('nu','nu'),
('number','number'),
('numberwidth','numberwidth'),
('nuw','nuw'),
('odev','odev'),
('oft','oft'),
('ofu','ofu'),
('omnifunc','omnifunc'),
('opendevice','opendevice'),
('operatorfunc','operatorfunc'),
('opfunc','opfunc'),
('osfiletype','osfiletype'),
('pa','pa'),
('para','para'),
('paragraphs','paragraphs'),
('paste','paste'),
('pastetoggle','pastetoggle'),
('patchexpr','patchexpr'),
('patchmode','patchmode'),
('path','path'),
('pdev','pdev'),
('penc','penc'),
('pex','pex'),
('pexpr','pexpr'),
('pfn','pfn'),
('ph','ph'),
('pheader','pheader'),
('pi','pi'),
('pm','pm'),
('pmbcs','pmbcs'),
('pmbfn','pmbfn'),
('popt','popt'),
('preserveindent','preserveindent'),
('previewheight','previewheight'),
('previewwindow','previewwindow'),
('printdevice','printdevice'),
('printencoding','printencoding'),
('printexpr','printexpr'),
('printfont','printfont'),
('printheader','printheader'),
('printmbcharset','printmbcharset'),
('printmbfont','printmbfont'),
('printoptions','printoptions'),
('prompt','prompt'),
('pt','pt'),
('pumheight','pumheight'),
('pvh','pvh'),
('pvw','pvw'),
('qe','qe'),
('quoteescape','quoteescape'),
('rdt','rdt'),
('re','re'),
('readonly','readonly'),
('redrawtime','redrawtime'),
('regexpengine','regexpengine'),
('relativenumber','relativenumber'),
('remap','remap'),
('report','report'),
('restorescreen','restorescreen'),
('revins','revins'),
('ri','ri'),
('rightleft','rightleft'),
('rightleftcmd','rightleftcmd'),
('rl','rl'),
('rlc','rlc'),
('rnu','rnu'),
('ro','ro'),
('rs','rs'),
('rtp','rtp'),
('ru','ru'),
('ruf','ruf'),
('ruler','ruler'),
('rulerformat','rulerformat'),
('runtimepath','runtimepath'),
('sb','sb'),
('sbo','sbo'),
('sbr','sbr'),
('sc','sc'),
('scb','scb'),
('scr','scr'),
('scroll','scroll'),
('scrollbind','scrollbind'),
('scrolljump','scrolljump'),
('scrolloff','scrolloff'),
('scrollopt','scrollopt'),
('scs','scs'),
('sect','sect'),
('sections','sections'),
('secure','secure'),
('sel','sel'),
('selection','selection'),
('selectmode','selectmode'),
('sessionoptions','sessionoptions'),
('sft','sft'),
('sh','sh'),
('shcf','shcf'),
('shell','shell'),
('shellcmdflag','shellcmdflag'),
('shellpipe','shellpipe'),
('shellquote','shellquote'),
('shellredir','shellredir'),
('shellslash','shellslash'),
('shelltemp','shelltemp'),
('shelltype','shelltype'),
('shellxescape','shellxescape'),
('shellxquote','shellxquote'),
('shiftround','shiftround'),
('shiftwidth','shiftwidth'),
('shm','shm'),
('shortmess','shortmess'),
('shortname','shortname'),
('showbreak','showbreak'),
('showcmd','showcmd'),
('showfulltag','showfulltag'),
('showmatch','showmatch'),
('showmode','showmode'),
('showtabline','showtabline'),
('shq','shq'),
('si','si'),
('sidescroll','sidescroll'),
('sidescrolloff','sidescrolloff'),
('siso','siso'),
('sj','sj'),
('slm','slm'),
('sm','sm'),
('smartcase','smartcase'),
('smartindent','smartindent'),
('smarttab','smarttab'),
('smc','smc'),
('smd','smd'),
('sn','sn'),
('so','so'),
('softtabstop','softtabstop'),
('sol','sol'),
('sp','sp'),
('spc','spc'),
('spell','spell'),
('spellcapcheck','spellcapcheck'),
('spellfile','spellfile'),
('spelllang','spelllang'),
('spellsuggest','spellsuggest'),
('spf','spf'),
('spl','spl'),
('splitbelow','splitbelow'),
('splitright','splitright'),
('spr','spr'),
('sps','sps'),
('sr','sr'),
('srr','srr'),
('ss','ss'),
('ssl','ssl'),
('ssop','ssop'),
('st','st'),
('sta','sta'),
('stal','stal'),
('startofline','startofline'),
('statusline','statusline'),
('stl','stl'),
('stmp','stmp'),
('sts','sts'),
('su','su'),
('sua','sua'),
('suffixes','suffixes'),
('suffixesadd','suffixesadd'),
('sw','sw'),
('swapfile','swapfile'),
('swapsync','swapsync'),
('swb','swb'),
('swf','swf'),
('switchbuf','switchbuf'),
('sws','sws'),
('sxe','sxe'),
('sxq','sxq'),
('syn','syn'),
('synmaxcol','synmaxcol'),
('syntax','syntax'),
('t_AB','t_AB'),
('t_AF','t_AF'),
('t_AL','t_AL'),
('t_CS','t_CS'),
('t_CV','t_CV'),
('t_Ce','t_Ce'),
('t_Co','t_Co'),
('t_Cs','t_Cs'),
('t_DL','t_DL'),
('t_EI','t_EI'),
('t_F1','t_F1'),
('t_F2','t_F2'),
('t_F3','t_F3'),
('t_F4','t_F4'),
('t_F5','t_F5'),
('t_F6','t_F6'),
('t_F7','t_F7'),
('t_F8','t_F8'),
('t_F9','t_F9'),
('t_IE','t_IE'),
('t_IS','t_IS'),
('t_K1','t_K1'),
('t_K3','t_K3'),
('t_K4','t_K4'),
('t_K5','t_K5'),
('t_K6','t_K6'),
('t_K7','t_K7'),
('t_K8','t_K8'),
('t_K9','t_K9'),
('t_KA','t_KA'),
('t_KB','t_KB'),
('t_KC','t_KC'),
('t_KD','t_KD'),
('t_KE','t_KE'),
('t_KF','t_KF'),
('t_KG','t_KG'),
('t_KH','t_KH'),
('t_KI','t_KI'),
('t_KJ','t_KJ'),
('t_KK','t_KK'),
('t_KL','t_KL'),
('t_RI','t_RI'),
('t_RV','t_RV'),
('t_SI','t_SI'),
('t_Sb','t_Sb'),
('t_Sf','t_Sf'),
('t_WP','t_WP'),
('t_WS','t_WS'),
('t_ZH','t_ZH'),
('t_ZR','t_ZR'),
('t_al','t_al'),
('t_bc','t_bc'),
('t_cd','t_cd'),
('t_ce','t_ce'),
('t_cl','t_cl'),
('t_cm','t_cm'),
('t_cs','t_cs'),
('t_da','t_da'),
('t_db','t_db'),
('t_dl','t_dl'),
('t_fs','t_fs'),
('t_k1','t_k1'),
('t_k2','t_k2'),
('t_k3','t_k3'),
('t_k4','t_k4'),
('t_k5','t_k5'),
('t_k6','t_k6'),
('t_k7','t_k7'),
('t_k8','t_k8'),
('t_k9','t_k9'),
('t_kB','t_kB'),
('t_kD','t_kD'),
('t_kI','t_kI'),
('t_kN','t_kN'),
('t_kP','t_kP'),
('t_kb','t_kb'),
('t_kd','t_kd'),
('t_ke','t_ke'),
('t_kh','t_kh'),
('t_kl','t_kl'),
('t_kr','t_kr'),
('t_ks','t_ks'),
('t_ku','t_ku'),
('t_le','t_le'),
('t_mb','t_mb'),
('t_md','t_md'),
('t_me','t_me'),
('t_mr','t_mr'),
('t_ms','t_ms'),
('t_nd','t_nd'),
('t_op','t_op'),
('t_se','t_se'),
('t_so','t_so'),
('t_sr','t_sr'),
('t_te','t_te'),
('t_ti','t_ti'),
('t_ts','t_ts'),
('t_u7','t_u7'),
('t_ue','t_ue'),
('t_us','t_us'),
('t_ut','t_ut'),
('t_vb','t_vb'),
('t_ve','t_ve'),
('t_vi','t_vi'),
('t_vs','t_vs'),
('t_xs','t_xs'),
('ta','ta'),
('tabline','tabline'),
('tabpagemax','tabpagemax'),
('tabstop','tabstop'),
('tag','tag'),
('tagbsearch','tagbsearch'),
('taglength','taglength'),
('tagrelative','tagrelative'),
('tags','tags'),
('tagstack','tagstack'),
('tal','tal'),
('tb','tb'),
('tbi','tbi'),
('tbidi','tbidi'),
('tbis','tbis'),
('tbs','tbs'),
('tenc','tenc'),
('term','term'),
('termbidi','termbidi'),
('termencoding','termencoding'),
('terse','terse'),
('textauto','textauto'),
('textmode','textmode'),
('textwidth','textwidth'),
('tf','tf'),
('tgst','tgst'),
('thesaurus','thesaurus'),
('tildeop','tildeop'),
('timeout','timeout'),
('timeoutlen','timeoutlen'),
('title','title'),
('titlelen','titlelen'),
('titleold','titleold'),
('titlestring','titlestring'),
('tl','tl'),
('tm','tm'),
('to','to'),
('toolbar','toolbar'),
('toolbariconsize','toolbariconsize'),
('top','top'),
('tpm','tpm'),
('tr','tr'),
('ts','ts'),
('tsl','tsl'),
('tsr','tsr'),
('ttimeout','ttimeout'),
('ttimeoutlen','ttimeoutlen'),
('ttm','ttm'),
('tty','tty'),
('ttybuiltin','ttybuiltin'),
('ttyfast','ttyfast'),
('ttym','ttym'),
('ttymouse','ttymouse'),
('ttyscroll','ttyscroll'),
('ttytype','ttytype'),
('tw','tw'),
('tx','tx'),
('uc','uc'),
('udf','udf'),
('udir','udir'),
('ul','ul'),
('undodir','undodir'),
('undofile','undofile'),
('undolevels','undolevels'),
('undoreload','undoreload'),
('updatecount','updatecount'),
('updatetime','updatetime'),
('ur','ur'),
('ut','ut'),
('vb','vb'),
('vbs','vbs'),
('vdir','vdir'),
('ve','ve'),
('verbose','verbose'),
('verbosefile','verbosefile'),
('vfile','vfile'),
('vi','vi'),
('viewdir','viewdir'),
('viewoptions','viewoptions'),
('viminfo','viminfo'),
('virtualedit','virtualedit'),
('visualbell','visualbell'),
('vnoremap','vnoremap'),
('vop','vop'),
('wa','wa'),
('wak','wak'),
('warn','warn'),
('wb','wb'),
('wc','wc'),
('wcm','wcm'),
('wd','wd'),
('weirdinvert','weirdinvert'),
('wfh','wfh'),
('wfw','wfw'),
('wh','wh'),
('whichwrap','whichwrap'),
('wi','wi'),
('wic','wic'),
('wig','wig'),
('wildchar','wildchar'),
('wildcharm','wildcharm'),
('wildignore','wildignore'),
('wildignorecase','wildignorecase'),
('wildmenu','wildmenu'),
('wildmode','wildmode'),
('wildoptions','wildoptions'),
('wim','wim'),
('winaltkeys','winaltkeys'),
('window','window'),
('winfixheight','winfixheight'),
('winfixwidth','winfixwidth'),
('winheight','winheight'),
('winminheight','winminheight'),
('winminwidth','winminwidth'),
('winwidth','winwidth'),
('wiv','wiv'),
('wiw','wiw'),
('wm','wm'),
('wmh','wmh'),
('wmnu','wmnu'),
('wmw','wmw'),
('wop','wop'),
('wrap','wrap'),
('wrapmargin','wrapmargin'),
('wrapscan','wrapscan'),
('write','write'),
('writeany','writeany'),
('writebackup','writebackup'),
('writedelay','writedelay'),
('ws','ws'),
('ww','ww'),
)
return var
option = _getoption()
| apache-2.0 | 344,166,896,329,131,460 | 5,893,222,706,470,773,000 | 28.443012 | 74 | 0.424172 | false |
tudorvio/nova | nova/api/openstack/compute/schemas/v3/image_metadata.py | 95 | 1177 | # Copyright 2014 IBM Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from nova.api.validation import parameter_types
create = {
'type': 'object',
'properties': {
'metadata': parameter_types.metadata
},
'required': ['metadata'],
'additionalProperties': False,
}
single_metadata = copy.deepcopy(parameter_types.metadata)
single_metadata.update({
'minProperties': 1,
'maxProperties': 1
})
update = {
'type': 'object',
'properties': {
'meta': single_metadata
},
'required': ['meta'],
'additionalProperties': False,
}
update_all = create
| apache-2.0 | -5,729,566,991,466,898 | 1,392,178,096,718,051,300 | 25.75 | 78 | 0.679694 | false |
Asquera/bigcouch | couchjs/scons/scons-local-2.0.1/SCons/Tool/packaging/msi.py | 61 | 20261 | """SCons.Tool.packaging.msi
The msi packager.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Tool/packaging/msi.py 5134 2010/08/16 23:02:40 bdeegan"
import os
import SCons
from SCons.Action import Action
from SCons.Builder import Builder
from xml.dom.minidom import *
from xml.sax.saxutils import escape
from SCons.Tool.packaging import stripinstallbuilder
#
# Utility functions
#
def convert_to_id(s, id_set):
""" Some parts of .wxs need an Id attribute (for example: The File and
Directory directives. The charset is limited to A-Z, a-z, digits,
underscores, periods. Each Id must begin with a letter or with a
underscore. Google for "CNDL0015" for information about this.
Requirements:
* the string created must only contain chars from the target charset.
* the string created must have a minimal editing distance from the
original string.
* the string created must be unique for the whole .wxs file.
Observation:
* There are 62 chars in the charset.
Idea:
* filter out forbidden characters. Check for a collision with the help
of the id_set. Add the number of the number of the collision at the
end of the created string. Furthermore care for a correct start of
the string.
"""
charset = 'ABCDEFGHIJKLMNOPQRSTUVWXYabcdefghijklmnopqrstuvwxyz0123456789_.'
if s[0] in '0123456789.':
s += '_'+s
id = [c for c in s if c in charset]
# did we already generate an id for this file?
try:
return id_set[id][s]
except KeyError:
# no we did not so initialize with the id
if id not in id_set: id_set[id] = { s : id }
# there is a collision, generate an id which is unique by appending
# the collision number
else: id_set[id][s] = id + str(len(id_set[id]))
return id_set[id][s]
def is_dos_short_file_name(file):
""" examine if the given file is in the 8.3 form.
"""
fname, ext = os.path.splitext(file)
proper_ext = len(ext) == 0 or (2 <= len(ext) <= 4) # the ext contains the dot
proper_fname = file.isupper() and len(fname) <= 8
return proper_ext and proper_fname
def gen_dos_short_file_name(file, filename_set):
""" see http://support.microsoft.com/default.aspx?scid=kb;en-us;Q142982
These are no complete 8.3 dos short names. The ~ char is missing and
replaced with one character from the filename. WiX warns about such
filenames, since a collision might occur. Google for "CNDL1014" for
more information.
"""
# guard this to not confuse the generation
if is_dos_short_file_name(file):
return file
fname, ext = os.path.splitext(file) # ext contains the dot
# first try if it suffices to convert to upper
file = file.upper()
if is_dos_short_file_name(file):
return file
# strip forbidden characters.
forbidden = '."/[]:;=, '
fname = [c for c in fname if c not in forbidden]
# check if we already generated a filename with the same number:
# thisis1.txt, thisis2.txt etc.
duplicate, num = not None, 1
while duplicate:
shortname = "%s%s" % (fname[:8-len(str(num))].upper(),\
str(num))
if len(ext) >= 2:
shortname = "%s%s" % (shortname, ext[:4].upper())
duplicate, num = shortname in filename_set, num+1
assert( is_dos_short_file_name(shortname) ), 'shortname is %s, longname is %s' % (shortname, file)
filename_set.append(shortname)
return shortname
def create_feature_dict(files):
""" X_MSI_FEATURE and doc FileTag's can be used to collect files in a
hierarchy. This function collects the files into this hierarchy.
"""
dict = {}
def add_to_dict( feature, file ):
if not SCons.Util.is_List( feature ):
feature = [ feature ]
for f in feature:
if f not in dict:
dict[ f ] = [ file ]
else:
dict[ f ].append( file )
for file in files:
if hasattr( file, 'PACKAGING_X_MSI_FEATURE' ):
add_to_dict(file.PACKAGING_X_MSI_FEATURE, file)
elif hasattr( file, 'PACKAGING_DOC' ):
add_to_dict( 'PACKAGING_DOC', file )
else:
add_to_dict( 'default', file )
return dict
def generate_guids(root):
""" generates globally unique identifiers for parts of the xml which need
them.
Component tags have a special requirement. Their UUID is only allowed to
change if the list of their contained resources has changed. This allows
for clean removal and proper updates.
To handle this requirement, the uuid is generated with an md5 hashing the
whole subtree of a xml node.
"""
from hashlib import md5
# specify which tags need a guid and in which attribute this should be stored.
needs_id = { 'Product' : 'Id',
'Package' : 'Id',
'Component' : 'Guid',
}
# find all XMl nodes matching the key, retrieve their attribute, hash their
# subtree, convert hash to string and add as a attribute to the xml node.
for (key,value) in needs_id.items():
node_list = root.getElementsByTagName(key)
attribute = value
for node in node_list:
hash = md5(node.toxml()).hexdigest()
hash_str = '%s-%s-%s-%s-%s' % ( hash[:8], hash[8:12], hash[12:16], hash[16:20], hash[20:] )
node.attributes[attribute] = hash_str
def string_wxsfile(target, source, env):
return "building WiX file %s"%( target[0].path )
def build_wxsfile(target, source, env):
""" compiles a .wxs file from the keywords given in env['msi_spec'] and
by analyzing the tree of source nodes and their tags.
"""
file = open(target[0].abspath, 'w')
try:
# Create a document with the Wix root tag
doc = Document()
root = doc.createElement( 'Wix' )
root.attributes['xmlns']='http://schemas.microsoft.com/wix/2003/01/wi'
doc.appendChild( root )
filename_set = [] # this is to circumvent duplicates in the shortnames
id_set = {} # this is to circumvent duplicates in the ids
# Create the content
build_wxsfile_header_section(root, env)
build_wxsfile_file_section(root, source, env['NAME'], env['VERSION'], env['VENDOR'], filename_set, id_set)
generate_guids(root)
build_wxsfile_features_section(root, source, env['NAME'], env['VERSION'], env['SUMMARY'], id_set)
build_wxsfile_default_gui(root)
build_license_file(target[0].get_dir(), env)
# write the xml to a file
file.write( doc.toprettyxml() )
# call a user specified function
if 'CHANGE_SPECFILE' in env:
env['CHANGE_SPECFILE'](target, source)
except KeyError, e:
raise SCons.Errors.UserError( '"%s" package field for MSI is missing.' % e.args[0] )
#
# setup function
#
def create_default_directory_layout(root, NAME, VERSION, VENDOR, filename_set):
""" Create the wix default target directory layout and return the innermost
directory.
We assume that the XML tree delivered in the root argument already contains
the Product tag.
Everything is put under the PFiles directory property defined by WiX.
After that a directory with the 'VENDOR' tag is placed and then a
directory with the name of the project and its VERSION. This leads to the
following TARGET Directory Layout:
C:\<PFiles>\<Vendor>\<Projectname-Version>\
Example: C:\Programme\Company\Product-1.2\
"""
doc = Document()
d1 = doc.createElement( 'Directory' )
d1.attributes['Id'] = 'TARGETDIR'
d1.attributes['Name'] = 'SourceDir'
d2 = doc.createElement( 'Directory' )
d2.attributes['Id'] = 'ProgramFilesFolder'
d2.attributes['Name'] = 'PFiles'
d3 = doc.createElement( 'Directory' )
d3.attributes['Id'] = 'VENDOR_folder'
d3.attributes['Name'] = escape( gen_dos_short_file_name( VENDOR, filename_set ) )
d3.attributes['LongName'] = escape( VENDOR )
d4 = doc.createElement( 'Directory' )
project_folder = "%s-%s" % ( NAME, VERSION )
d4.attributes['Id'] = 'MY_DEFAULT_FOLDER'
d4.attributes['Name'] = escape( gen_dos_short_file_name( project_folder, filename_set ) )
d4.attributes['LongName'] = escape( project_folder )
d1.childNodes.append( d2 )
d2.childNodes.append( d3 )
d3.childNodes.append( d4 )
root.getElementsByTagName('Product')[0].childNodes.append( d1 )
return d4
#
# mandatory and optional file tags
#
def build_wxsfile_file_section(root, files, NAME, VERSION, VENDOR, filename_set, id_set):
""" builds the Component sections of the wxs file with their included files.
Files need to be specified in 8.3 format and in the long name format, long
filenames will be converted automatically.
Features are specficied with the 'X_MSI_FEATURE' or 'DOC' FileTag.
"""
root = create_default_directory_layout( root, NAME, VERSION, VENDOR, filename_set )
components = create_feature_dict( files )
factory = Document()
def get_directory( node, dir ):
""" returns the node under the given node representing the directory.
Returns the component node if dir is None or empty.
"""
if dir == '' or not dir:
return node
Directory = node
dir_parts = dir.split(os.path.sep)
# to make sure that our directory ids are unique, the parent folders are
# consecutively added to upper_dir
upper_dir = ''
# walk down the xml tree finding parts of the directory
dir_parts = [d for d in dir_parts if d != '']
for d in dir_parts[:]:
already_created = [c for c in Directory.childNodes
if c.nodeName == 'Directory'
and c.attributes['LongName'].value == escape(d)]
if already_created != []:
Directory = already_created[0]
dir_parts.remove(d)
upper_dir += d
else:
break
for d in dir_parts:
nDirectory = factory.createElement( 'Directory' )
nDirectory.attributes['LongName'] = escape( d )
nDirectory.attributes['Name'] = escape( gen_dos_short_file_name( d, filename_set ) )
upper_dir += d
nDirectory.attributes['Id'] = convert_to_id( upper_dir, id_set )
Directory.childNodes.append( nDirectory )
Directory = nDirectory
return Directory
for file in files:
drive, path = os.path.splitdrive( file.PACKAGING_INSTALL_LOCATION )
filename = os.path.basename( path )
dirname = os.path.dirname( path )
h = {
# tagname : default value
'PACKAGING_X_MSI_VITAL' : 'yes',
'PACKAGING_X_MSI_FILEID' : convert_to_id(filename, id_set),
'PACKAGING_X_MSI_LONGNAME' : filename,
'PACKAGING_X_MSI_SHORTNAME' : gen_dos_short_file_name(filename, filename_set),
'PACKAGING_X_MSI_SOURCE' : file.get_path(),
}
# fill in the default tags given above.
for k,v in [ (k, v) for (k,v) in h.items() if not hasattr(file, k) ]:
setattr( file, k, v )
File = factory.createElement( 'File' )
File.attributes['LongName'] = escape( file.PACKAGING_X_MSI_LONGNAME )
File.attributes['Name'] = escape( file.PACKAGING_X_MSI_SHORTNAME )
File.attributes['Source'] = escape( file.PACKAGING_X_MSI_SOURCE )
File.attributes['Id'] = escape( file.PACKAGING_X_MSI_FILEID )
File.attributes['Vital'] = escape( file.PACKAGING_X_MSI_VITAL )
# create the <Component> Tag under which this file should appear
Component = factory.createElement('Component')
Component.attributes['DiskId'] = '1'
Component.attributes['Id'] = convert_to_id( filename, id_set )
# hang the component node under the root node and the file node
# under the component node.
Directory = get_directory( root, dirname )
Directory.childNodes.append( Component )
Component.childNodes.append( File )
#
# additional functions
#
def build_wxsfile_features_section(root, files, NAME, VERSION, SUMMARY, id_set):
""" This function creates the <features> tag based on the supplied xml tree.
This is achieved by finding all <component>s and adding them to a default target.
It should be called after the tree has been built completly. We assume
that a MY_DEFAULT_FOLDER Property is defined in the wxs file tree.
Furthermore a top-level with the name and VERSION of the software will be created.
An PACKAGING_X_MSI_FEATURE can either be a string, where the feature
DESCRIPTION will be the same as its title or a Tuple, where the first
part will be its title and the second its DESCRIPTION.
"""
factory = Document()
Feature = factory.createElement('Feature')
Feature.attributes['Id'] = 'complete'
Feature.attributes['ConfigurableDirectory'] = 'MY_DEFAULT_FOLDER'
Feature.attributes['Level'] = '1'
Feature.attributes['Title'] = escape( '%s %s' % (NAME, VERSION) )
Feature.attributes['Description'] = escape( SUMMARY )
Feature.attributes['Display'] = 'expand'
for (feature, files) in create_feature_dict(files).items():
SubFeature = factory.createElement('Feature')
SubFeature.attributes['Level'] = '1'
if SCons.Util.is_Tuple(feature):
SubFeature.attributes['Id'] = convert_to_id( feature[0], id_set )
SubFeature.attributes['Title'] = escape(feature[0])
SubFeature.attributes['Description'] = escape(feature[1])
else:
SubFeature.attributes['Id'] = convert_to_id( feature, id_set )
if feature=='default':
SubFeature.attributes['Description'] = 'Main Part'
SubFeature.attributes['Title'] = 'Main Part'
elif feature=='PACKAGING_DOC':
SubFeature.attributes['Description'] = 'Documentation'
SubFeature.attributes['Title'] = 'Documentation'
else:
SubFeature.attributes['Description'] = escape(feature)
SubFeature.attributes['Title'] = escape(feature)
# build the componentrefs. As one of the design decision is that every
# file is also a component we walk the list of files and create a
# reference.
for f in files:
ComponentRef = factory.createElement('ComponentRef')
ComponentRef.attributes['Id'] = convert_to_id( os.path.basename(f.get_path()), id_set )
SubFeature.childNodes.append(ComponentRef)
Feature.childNodes.append(SubFeature)
root.getElementsByTagName('Product')[0].childNodes.append(Feature)
def build_wxsfile_default_gui(root):
""" this function adds a default GUI to the wxs file
"""
factory = Document()
Product = root.getElementsByTagName('Product')[0]
UIRef = factory.createElement('UIRef')
UIRef.attributes['Id'] = 'WixUI_Mondo'
Product.childNodes.append(UIRef)
UIRef = factory.createElement('UIRef')
UIRef.attributes['Id'] = 'WixUI_ErrorProgressText'
Product.childNodes.append(UIRef)
def build_license_file(directory, spec):
""" creates a License.rtf file with the content of "X_MSI_LICENSE_TEXT"
in the given directory
"""
name, text = '', ''
try:
name = spec['LICENSE']
text = spec['X_MSI_LICENSE_TEXT']
except KeyError:
pass # ignore this as X_MSI_LICENSE_TEXT is optional
if name!='' or text!='':
file = open( os.path.join(directory.get_path(), 'License.rtf'), 'w' )
file.write('{\\rtf')
if text!='':
file.write(text.replace('\n', '\\par '))
else:
file.write(name+'\\par\\par')
file.write('}')
file.close()
#
# mandatory and optional package tags
#
def build_wxsfile_header_section(root, spec):
""" Adds the xml file node which define the package meta-data.
"""
# Create the needed DOM nodes and add them at the correct position in the tree.
factory = Document()
Product = factory.createElement( 'Product' )
Package = factory.createElement( 'Package' )
root.childNodes.append( Product )
Product.childNodes.append( Package )
# set "mandatory" default values
if 'X_MSI_LANGUAGE' not in spec:
spec['X_MSI_LANGUAGE'] = '1033' # select english
# mandatory sections, will throw a KeyError if the tag is not available
Product.attributes['Name'] = escape( spec['NAME'] )
Product.attributes['Version'] = escape( spec['VERSION'] )
Product.attributes['Manufacturer'] = escape( spec['VENDOR'] )
Product.attributes['Language'] = escape( spec['X_MSI_LANGUAGE'] )
Package.attributes['Description'] = escape( spec['SUMMARY'] )
# now the optional tags, for which we avoid the KeyErrror exception
if 'DESCRIPTION' in spec:
Package.attributes['Comments'] = escape( spec['DESCRIPTION'] )
if 'X_MSI_UPGRADE_CODE' in spec:
Package.attributes['X_MSI_UPGRADE_CODE'] = escape( spec['X_MSI_UPGRADE_CODE'] )
# We hardcode the media tag as our current model cannot handle it.
Media = factory.createElement('Media')
Media.attributes['Id'] = '1'
Media.attributes['Cabinet'] = 'default.cab'
Media.attributes['EmbedCab'] = 'yes'
root.getElementsByTagName('Product')[0].childNodes.append(Media)
# this builder is the entry-point for .wxs file compiler.
wxs_builder = Builder(
action = Action( build_wxsfile, string_wxsfile ),
ensure_suffix = '.wxs' )
def package(env, target, source, PACKAGEROOT, NAME, VERSION,
DESCRIPTION, SUMMARY, VENDOR, X_MSI_LANGUAGE, **kw):
# make sure that the Wix Builder is in the environment
SCons.Tool.Tool('wix').generate(env)
# get put the keywords for the specfile compiler. These are the arguments
# given to the package function and all optional ones stored in kw, minus
# the the source, target and env one.
loc = locals()
del loc['kw']
kw.update(loc)
del kw['source'], kw['target'], kw['env']
# strip the install builder from the source files
target, source = stripinstallbuilder(target, source, env)
# put the arguments into the env and call the specfile builder.
env['msi_spec'] = kw
specfile = wxs_builder(* [env, target, source], **kw)
# now call the WiX Tool with the built specfile added as a source.
msifile = env.WiX(target, specfile)
# return the target and source tuple.
return (msifile, source+[specfile])
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 | 3,053,118,605,341,912,000 | 5,033,150,013,231,043,000 | 37.44592 | 114 | 0.637086 | false |
johnkit/vtk-dev | Filters/Hybrid/Testing/Python/TestGridWarp3D.py | 20 | 2154 | #!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Image pipeline
reader = vtk.vtkImageReader()
reader.ReleaseDataFlagOff()
reader.SetDataByteOrderToLittleEndian()
reader.SetDataExtent(0,63,0,63,1,93)
reader.SetDataSpacing(3.2,3.2,1.5)
reader.SetDataOrigin(-100.8,-100.8,-69)
reader.SetFilePrefix("" + str(VTK_DATA_ROOT) + "/Data/headsq/quarter")
reader.SetDataMask(0x7fff)
reader.Update()
p1 = vtk.vtkPoints()
p2 = vtk.vtkPoints()
p1.InsertNextPoint(0,0,0)
p2.InsertNextPoint(-60,10,20)
p1.InsertNextPoint(-100,-100,-50)
p2.InsertNextPoint(-100,-100,-50)
p1.InsertNextPoint(-100,-100,50)
p2.InsertNextPoint(-100,-100,50)
p1.InsertNextPoint(-100,100,-50)
p2.InsertNextPoint(-100,100,-50)
p1.InsertNextPoint(-100,100,50)
p2.InsertNextPoint(-100,100,50)
p1.InsertNextPoint(100,-100,-50)
p2.InsertNextPoint(100,-100,-50)
p1.InsertNextPoint(100,-100,50)
p2.InsertNextPoint(100,-100,50)
p1.InsertNextPoint(100,100,-50)
p2.InsertNextPoint(100,100,-50)
p1.InsertNextPoint(100,100,50)
p2.InsertNextPoint(100,100,50)
transform = vtk.vtkThinPlateSplineTransform()
transform.SetSourceLandmarks(p1)
transform.SetTargetLandmarks(p2)
transform.SetBasisToR()
gridThinPlate = vtk.vtkTransformToGrid()
gridThinPlate.SetInput(transform)
gridThinPlate.SetGridExtent(0,64,0,64,0,50)
gridThinPlate.SetGridSpacing(3.2,3.2,3.0)
gridThinPlate.SetGridOrigin(-102.4,-102.4,-75)
gridThinPlate.SetGridScalarTypeToUnsignedChar()
gridThinPlate.Update()
gridTransform = vtk.vtkGridTransform()
gridTransform.SetDisplacementGridData(gridThinPlate.GetOutput())
gridTransform.SetDisplacementShift(gridThinPlate.GetDisplacementShift())
gridTransform.SetDisplacementScale(gridThinPlate.GetDisplacementScale())
reslice = vtk.vtkImageReslice()
reslice.SetInputConnection(reader.GetOutputPort())
reslice.SetResliceTransform(gridTransform)
reslice.SetInterpolationModeToLinear()
reslice.SetOutputSpacing(1,1,1)
viewer = vtk.vtkImageViewer()
viewer.SetInputConnection(reslice.GetOutputPort())
viewer.SetZSlice(70)
viewer.SetColorWindow(2000)
viewer.SetColorLevel(1000)
viewer.Render()
# --- end of script --
| bsd-3-clause | 8,277,313,221,663,431,000 | 9,209,922,777,584,592,000 | 33.190476 | 72 | 0.805942 | false |
cossacklabs/acra | wrappers/python/acrawriter/django/__init__.py | 1 | 3334 | # Copyright 2016, Cossack Labs Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
from django.core import validators
from django.db import models
from django import forms
from django.utils import six
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
import acrawriter
__all__ = ('CharField', 'EmailField', 'TextField')
class CharField(models.CharField):
def __init__(self, public_key=None, encoding='utf-8',
encoding_errors='ignore', *args, **kwargs):
super(CharField, self).__init__(*args, **kwargs)
self._encoding = encoding
self._encoding_errors = encoding_errors
if not (public_key or settings.ACRA_SERVER_PUBLIC_KEY):
raise ValueError("Set public key arg or settings.ACRA_SERVER_PUBLIC_KEY")
self._public_key = public_key or settings.ACRA_SERVER_PUBLIC_KEY
def from_db_value(self, value, *args, **kwargs):
if isinstance(value, memoryview):
value = value.tobytes()
if isinstance(value, six.binary_type):
return value.decode(self._encoding, errors=self._encoding_errors)
else:
return value
def get_db_prep_value(self, value, connection, prepared=False):
value = super(CharField, self).get_db_prep_value(
value, connection, prepared)
if value == '':
return b''
elif value is None:
return None
else:
return acrawriter.create_acrastruct(value.encode(self._encoding), self._public_key)
def get_internal_type(self):
return 'BinaryField'
def to_python(self, value):
value = super(CharField, self).to_python(value)
if isinstance(value, six.binary_type):
return value.decode(self._encoding, errors=self._encoding_errors)
else:
return value
class EmailField(CharField):
default_validators = [validators.validate_email]
description = _("Email address")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = kwargs.get('max_length', 254)
super(EmailField, self).__init__(*args, **kwargs)
class TextField(CharField):
description = _("Text")
def __init__(self, *args, **kwargs):
super(TextField, self).__init__(*args, **kwargs)
self.validators = []
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
defaults = {'max_length': self.max_length, 'widget': forms.Textarea}
defaults.update(kwargs)
return super(TextField, self).formfield(**defaults)
def check(self, **kwargs):
return []
| apache-2.0 | -6,248,262,301,061,941,000 | -1,102,708,617,276,280,800 | 36.044444 | 95 | 0.658668 | false |
ds-hwang/chromium-crosswalk | tools/android/loading/resource_sack_display.py | 3 | 4182 | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utilities for displaying a ResourceSack.
When run standalone, takes traces on the command line and produces a dot file to
stdout.
"""
def ToDot(sack, output, prune=-1, long_edge_msec=2000):
"""Output as a dot file.
Args:
sack: (ResourceSack) the sack to convert to dot.
output: a file-like output stream.
prune: if positive, prune & coalesce nodes under the specified threshold
of repeated views, as fraction node views / total graphs. All pruned
nodes are represented by a single node, and an edge is connected only if
the view count is greater than 1.
long_edge_msec: if positive, the definition of a long edge. Long edges are
distinguished in graph.
"""
output.write("""digraph dependencies {
rankdir = LR;
""")
pruned = set()
num_graphs = len(sack.graph_info)
for bag in sack.bags:
if prune > 0 and float(len(bag.graphs)) / num_graphs < prune:
pruned.add(bag)
continue
output.write('%d [label="%s (%d)\n(%d, %d)\n(%.2f, %.2f)" shape=%s; '
'style=filled; fillcolor=%s];\n' % (
bag.Index(), bag.label, len(bag.graphs),
min(bag.total_costs), max(bag.total_costs),
min(bag.relative_costs), max(bag.relative_costs),
_CriticalToShape(bag),
_AmountToNodeColor(len(bag.graphs), num_graphs)))
if pruned:
pruned_index = num_graphs
output.write('%d [label="Pruned at %.0f%%\n(%d)"; '
'shape=polygon; style=dotted];\n' %
(pruned_index, 100 * prune, len(pruned)))
for bag in sack.bags:
if bag in pruned:
for succ in bag.Successors():
if succ not in pruned:
output.write('%d -> %d [style=dashed];\n' % (
pruned_index, succ.Index()))
for succ in bag.Successors():
if succ in pruned:
if len(bag.successor_sources[succ]) > 1:
output.write('%d -> %d [label="%d"; style=dashed];\n' % (
bag.Index(), pruned_index, len(bag.successor_sources[succ])))
else:
num_succ = len(bag.successor_sources[succ])
num_long = 0
for graph, source, target in bag.successor_sources[succ]:
if graph.EdgeCost(source, target) > long_edge_msec:
num_long += 1
if num_long > 0:
long_frac = float(num_long) / num_succ
long_edge_style = '; penwidth=%f' % (2 + 6.0 * long_frac)
if long_frac < 0.75:
long_edge_style += '; style=dashed'
else:
long_edge_style = ''
min_edge = min(bag.successor_edge_costs[succ])
max_edge = max(bag.successor_edge_costs[succ])
output.write('%d -> %d [label="%d\n(%f,%f)"; color=%s %s];\n' % (
bag.Index(), succ.Index(), num_succ, min_edge, max_edge,
_AmountToEdgeColor(num_succ, len(bag.graphs)),
long_edge_style))
output.write('}')
def _CriticalToShape(bag):
frac = float(bag.num_critical) / bag.num_nodes
if frac < 0.4:
return 'oval'
elif frac < 0.7:
return 'polygon'
elif frac < 0.9:
return 'trapezium'
return 'box'
def _AmountToNodeColor(numer, denom):
if denom <= 0:
return 'grey72'
ratio = 1.0 * numer / denom
if ratio < .3:
return 'white'
elif ratio < .6:
return 'yellow'
elif ratio < .8:
return 'orange'
return 'green'
def _AmountToEdgeColor(numer, denom):
color = _AmountToNodeColor(numer, denom)
if color == 'white' or color == 'grey72':
return 'black'
return color
def _Main():
import json
import logging
import sys
import loading_model
import loading_trace
import resource_sack
sack = resource_sack.GraphSack()
for fname in sys.argv[1:]:
trace = loading_trace.LoadingTrace.FromJsonDict(
json.load(open(fname)))
logging.info('Making graph from %s', fname)
model = loading_model.ResourceGraph(trace, content_lens=None)
sack.ConsumeGraph(model)
logging.info('Finished %s', fname)
ToDot(sack, sys.stdout, prune=.1)
if __name__ == '__main__':
_Main()
| bsd-3-clause | -6,625,389,834,066,218,000 | 7,183,396,604,714,847,000 | 29.977778 | 80 | 0.610234 | false |
yugangw-msft/azure-cli | src/azure-cli/azure/cli/command_modules/vm/manual/custom.py | 1 | 1298 | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
def sshkey_create(client,
resource_group_name,
ssh_public_key_name,
location,
tags=None,
public_key=None):
parameters = {
'location': location,
'tags': tags,
'public_key': public_key
}
client.create(resource_group_name=resource_group_name,
ssh_public_key_name=ssh_public_key_name,
parameters=parameters)
if public_key is None: # Generate one if public key is None
client.generate_key_pair(resource_group_name=resource_group_name,
ssh_public_key_name=ssh_public_key_name)
return client.get(resource_group_name=resource_group_name,
ssh_public_key_name=ssh_public_key_name)
| mit | 1,908,823,595,602,574,600 | -2,418,642,890,979,725,300 | 40.870968 | 76 | 0.544684 | false |
neiudemo1/django | docs/conf.py | 54 | 11938 | # -*- coding: utf-8 -*-
#
# Django documentation build configuration file, created by
# sphinx-quickstart on Thu Mar 27 09:06:53 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import unicode_literals
import sys
from os.path import abspath, dirname, join
# Make sure we get the version of this copy of Django
sys.path.insert(1, dirname(dirname(abspath(__file__))))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(abspath(join(dirname(__file__), "_ext")))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
"djangodocs",
"sphinx.ext.intersphinx",
"sphinx.ext.viewcode",
"ticket_role",
]
# Spelling check needs an additional module that is not installed by default.
# Add it only if spelling check is requested so docs can be generated without it.
if 'spelling' in sys.argv:
extensions.append("sphinxcontrib.spelling")
# Spelling language.
spelling_lang = 'en_US'
# Location of word list.
spelling_word_list_filename = 'spelling_wordlist'
# Add any paths that contain templates here, relative to this directory.
# templates_path = []
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'contents'
# General substitutions.
project = 'Django'
copyright = 'Django Software Foundation and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.10'
# The full version, including alpha/beta/rc tags.
try:
from django import VERSION, get_version
except ImportError:
release = version
else:
def django_release():
pep386ver = get_version()
if VERSION[3:5] == ('alpha', 0) and 'dev' not in pep386ver:
return pep386ver + '.dev'
return pep386ver
release = django_release()
# The "development version" of Django
django_next_version = '1.10'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# Location for .po/.mo translation files used when language is set
locale_dirs = ['locale/']
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'trac'
# Links to Python's docs should reference the most recent version of the 3.x
# branch, which is located at this URL.
intersphinx_mapping = {
'python': ('https://docs.python.org/3/', None),
'sphinx': ('http://sphinx-doc.org/', None),
'six': ('http://pythonhosted.org/six/', None),
'formtools': ('http://django-formtools.readthedocs.org/en/latest/', None),
'psycopg2': ('http://initd.org/psycopg/docs/', None),
}
# Python's docs don't change every week.
intersphinx_cache_limit = 90 # days
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "djangodocs"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_theme"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# HTML translator class for the builder
html_translator_class = "djangodocs.DjangoHTMLTranslator"
# Content template for the index page.
# html_index = ''
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Djangodoc'
modindex_common_prefix = ["django."]
# Appended to every page
rst_epilog = """
.. |django-users| replace:: :ref:`django-users <django-users-mailing-list>`
.. |django-core-mentorship| replace:: :ref:`django-core-mentorship <django-core-mentorship-mailing-list>`
.. |django-developers| replace:: :ref:`django-developers <django-developers-mailing-list>`
.. |django-announce| replace:: :ref:`django-announce <django-announce-mailing-list>`
.. |django-updates| replace:: :ref:`django-updates <django-updates-mailing-list>`
"""
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
'preamble': ('\\DeclareUnicodeCharacter{2264}{\\ensuremath{\\le}}'
'\\DeclareUnicodeCharacter{2265}{\\ensuremath{\\ge}}')
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
# latex_documents = []
latex_documents = [
('contents', 'django.tex', 'Django Documentation',
'Django Software Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(
'ref/django-admin',
'django-admin',
'Utility script for the Django Web framework',
['Django Software Foundation'],
1
), ]
# -- Options for Texinfo output ------------------------------------------------
# List of tuples (startdocname, targetname, title, author, dir_entry,
# description, category, toctree_only)
texinfo_documents = [(
master_doc, "django", "", "", "Django",
"Documentation of the Django framework", "Web development", False
)]
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = 'Django Software Foundation'
epub_publisher = 'Django Software Foundation'
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
# epub_basename = 'Django'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
epub_theme = 'djangodocs-epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
# epub_scheme = ''
# The unique identifier of the text. This can be an ISBN number
# or the project homepage.
# epub_identifier = ''
# A unique identification for the text.
# epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
epub_cover = ('', 'epub-cover.html')
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
# epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_post_files = []
# A list of files that should not be packed into the epub file.
# epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
# epub_tocdepth = 3
# Allow duplicate toc entries.
# epub_tocdup = True
# Choose between 'default' and 'includehidden'.
# epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
# epub_fix_images = False
# Scale large images.
# epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# epub_show_urls = 'inline'
# If false, no index is generated.
# epub_use_index = True
# -- ticket options ------------------------------------------------------------
ticket_url = 'https://code.djangoproject.com/ticket/%s'
| bsd-3-clause | 6,155,085,750,073,063,000 | -5,532,446,195,508,607,000 | 31.796703 | 105 | 0.697437 | false |
oskopek/devassistant | test/test_actions.py | 1 | 9352 | import os
import subprocess
import pytest
from flexmock import flexmock
from devassistant import actions, exceptions
from devassistant.dapi import dapicli
from test.logger import LoggingHandler
class TestActions(object):
def setup_class(self):
self.ha = actions.HelpAction
def test_get_help_contains_task_keywords(self):
gh = self.ha().get_help()
assert 'crt' in gh
assert 'twk' in gh
assert 'prep' in gh
assert 'extra' in gh
def test_get_help_contains_action_name(self):
a = actions.Action()
a.name = 'foobar_action_name'
a.description = 'foobar_action_description'
actions.register_action(a)
assert 'foobar_action_name' in self.ha().get_help()
assert 'foobar_action_description' in self.ha().get_help()
def test_format_text_returns_original_text_on_bogus_formatting(self):
assert self.ha().format_text('aaa', 'foo', 'bar') == 'aaa'
assert self.ha().format_text('', 'foo', 'bar') == ''
def test_format_text_returns_bold(self):
assert self.ha().format_text('aaa', 'bold', 'ascii') == '\033[1maaa\033[0m'
def test_version_action(self, capsys):
va = actions.VersionAction()
from devassistant import __version__ as VERSION
va.run()
assert VERSION in capsys.readouterr()[0]
class TestDocAction(object):
def setup_method(self, method):
self.da = actions.DocAction
self.tlh = LoggingHandler.create_fresh_handler()
def test_no_docs(self):
self.da(dap='f').run()
assert ('INFO', 'DAP f has no documentation.') in self.tlh.msgs
def test_lists_docs(self):
self.da(dap='c').run()
assert self.tlh.msgs == [
('INFO', 'DAP c has these docs:'),
('INFO', 'LICENSE'),
('INFO', 'doc1'),
('INFO', 'something/foo/doc2'),
('INFO', 'Use "da doc c <DOC>" to see a specific document')
]
def test_displays_docs(self):
# we only test displaying without "less" - e.g. simple logging
flexmock(subprocess).should_receive('check_call').\
and_raise(subprocess.CalledProcessError, None, None)
self.da(dap='c', doc='doc1').run()
assert ('INFO', 'Bar!\n') in self.tlh.msgs
class TestPkgSearchAction(object):
@pytest.mark.parametrize('exc', [exceptions.DapiCommError, exceptions.DapiLocalError])
def test_raising_exceptions(self, exc):
flexmock(dapicli).should_receive('print_search').and_raise(exc)
with pytest.raises(exceptions.ExecutionException):
actions.PkgSearchAction(query='foo', noassistants=False, unstable=False,
deactivated=False, minrank=0, mincount=0,
allplatforms=False).run()
class TestPkgInstallAction(object):
def setup_class(self):
self.pkg = 'foo'
self.exc_string = 'bar'
@pytest.mark.parametrize(('isfile', 'method'), [
(True, 'install_dap_from_path'),
(False, 'install_dap')
])
def test_pkg_install(self, isfile, method):
flexmock(os.path).should_receive('isfile').with_args(self.pkg)\
.and_return(isfile).at_least().once()
flexmock(dapicli).should_receive(method)\
.and_return([self.pkg]).at_least().once()
# Install from path, everything goes well
actions.PkgInstallAction(package=[self.pkg], force=False,
reinstall=False, nodeps=False).run()
def test_pkg_install_fails(self):
flexmock(os.path).should_receive('isfile').with_args(self.pkg)\
.and_return(True).at_least().once()
flexmock(dapicli).should_receive('install_dap_from_path')\
.and_raise(exceptions.DapiLocalError(self.exc_string)).at_least().once()
with pytest.raises(exceptions.ExecutionException) as excinfo:
actions.PkgInstallAction(package=[self.pkg], force=False,
reinstall=False, nodeps=False).run()
assert self.exc_string in str(excinfo.value)
class TestPkgUpdateAction(object):
def test_pkg_update_all(self):
'''Run update without args to update all, but everything is up to-date'''
flexmock(dapicli).should_receive('get_installed_daps')\
.and_return(['foo']).at_least().once()
flexmock(dapicli).should_receive('install_dap')\
.and_return([]).at_least().once()
# Update all, everything is up to date
actions.PkgUpdateAction(force=False, allpaths=False).run()
def test_pkg_update_no_dapi(self):
'''Run update of package that is not on Dapi'''
flexmock(dapicli).should_receive('metadap')\
.and_return(None).at_least().once()
with pytest.raises(exceptions.ExecutionException) as excinfo:
actions.PkgUpdateAction(package=['foo'], force=False, allpaths=False).run()
assert 'foo not found' in str(excinfo.value)
def test_pkg_update_no_installed(self):
'''Run update of package that is not installed'''
flexmock(dapicli).should_receive('_get_metadap_dap')\
.and_return(({}, {'version': '0.0.1'})).at_least().once()
flexmock(dapicli).should_receive('get_installed_version_of')\
.and_return(None).at_least().once()
with pytest.raises(exceptions.ExecutionException) as excinfo:
actions.PkgUpdateAction(package=['foo'], force=False, allpaths=False).run()
assert 'Cannot update not yet installed DAP' in str(excinfo.value)
@pytest.mark.parametrize('action', [
actions.PkgUninstallAction,
actions.PkgRemoveAction
])
class TestPkgUninstallAction(object):
def test_pkg_uninstall_dependent(self, action):
'''Uninstall two packages, but the first depend on the latter'''
flexmock(dapicli).should_receive('uninstall_dap')\
.and_return(['first', 'second']).at_least().once()
action(package=['first', 'second'], force=True, allpaths=False).run()
def test_pkg_uninstall_not_installed(self, action):
'''Uninstall package that is not installed'''
flexmock(dapicli).should_receive('get_installed_daps')\
.and_return(['bar']).at_least().once()
with pytest.raises(exceptions.ExecutionException) as excinfo:
action(package=['foo'], force=True, allpaths=False).run()
assert 'Cannot uninstall DAP foo' in str(excinfo.value)
class TestAutoCompleteAction(object):
def setup_class(self):
self.aca = actions.AutoCompleteAction
self.fake_desc = [flexmock(name=n,
get_subassistants=lambda: [],
args=[]) for n in ['foo', 'bar', 'baz']]
self.fake_arg = flexmock(flags=('--qux',), kwargs=dict())
self.fake_crt = flexmock(name='crt',
get_subassistants=lambda: self.fake_desc,
args=[self.fake_arg])
@pytest.mark.parametrize('path', ['', '--debug', '__debug'])
def test_root_path(self, path, capsys):
expected = set(['--debug', '--help', 'create', 'doc', 'extra', 'help',
'pkg', 'prepare', 'tweak', 'version'])
self.aca(path=path).run()
stdout, _ = capsys.readouterr()
assert stdout
assert expected.issubset(set(stdout.split()))
@pytest.mark.parametrize('obj', [
flexmock(get_subassistants=lambda: []),
flexmock(get_subactions=lambda: [])
])
def test_get_descendants(self, obj):
self.aca._get_descendants(obj)
@pytest.mark.parametrize('obj', [
flexmock(get_subassistants=''),
flexmock()
])
def test_get_descendants_fails(self, obj):
with pytest.raises(TypeError):
self.aca._get_descendants(obj)
@pytest.mark.parametrize('path', ['crt', 'crt --qux'])
def test_assistants(self, path, capsys):
aca = self.aca(path=path)
flexmock(aca).should_receive('_assistants').and_return([self.fake_crt])
aca.run()
stdout, _ = capsys.readouterr()
assert not _
assert set([a.name for a in self.fake_desc] + \
[f for f in self.fake_arg.flags]).issubset(set(stdout.split()))
@pytest.mark.parametrize(('long_name', 'short_name'), [
('create', 'crt'),
('tweak', 'twk'),
('twk', 'mod'),
('prepare', 'prep'),
('extra', 'task'),
])
def test_aliases(self, long_name, short_name, capsys):
self.aca(path=long_name).run()
long_stdout, _ = capsys.readouterr()
assert long_stdout
self.aca(path=short_name).run()
short_stdout, _ = capsys.readouterr()
assert short_stdout
assert long_stdout == short_stdout
def test_filenames(self, capsys):
self.aca(path='pkg info').run()
stdout, _ = capsys.readouterr()
assert '_FILENAMES' in stdout.split()
def test_bad_input(self, capsys):
self.aca(path='foo bar baz').run()
stdout, _ = capsys.readouterr()
assert not stdout.split()
| gpl-2.0 | -2,408,615,541,897,554,400 | 687,463,092,750,539,300 | 35.108108 | 97 | 0.590997 | false |
jjingrong/PONUS-1.2 | venv/build/django/django/db/models/options.py | 104 | 24269 | from __future__ import unicode_literals
import re
from bisect import bisect
import warnings
from django.conf import settings
from django.db.models.fields.related import ManyToManyRel
from django.db.models.fields import AutoField, FieldDoesNotExist
from django.db.models.fields.proxy import OrderWrt
from django.db.models.loading import get_models, app_cache_ready
from django.utils import six
from django.utils.functional import cached_property
from django.utils.datastructures import SortedDict
from django.utils.encoding import force_text, smart_text, python_2_unicode_compatible
from django.utils.translation import activate, deactivate_all, get_language, string_concat
# Calculate the verbose_name by converting from InitialCaps to "lowercase with spaces".
get_verbose_name = lambda class_name: re.sub('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))', ' \\1', class_name).lower().strip()
DEFAULT_NAMES = ('verbose_name', 'verbose_name_plural', 'db_table', 'ordering',
'unique_together', 'permissions', 'get_latest_by',
'order_with_respect_to', 'app_label', 'db_tablespace',
'abstract', 'managed', 'proxy', 'swappable', 'auto_created',
'index_together', 'select_on_save')
@python_2_unicode_compatible
class Options(object):
def __init__(self, meta, app_label=None):
self.local_fields, self.local_many_to_many = [], []
self.virtual_fields = []
self.model_name, self.verbose_name = None, None
self.verbose_name_plural = None
self.db_table = ''
self.ordering = []
self.unique_together = []
self.index_together = []
self.select_on_save = False
self.permissions = []
self.object_name, self.app_label = None, app_label
self.get_latest_by = None
self.order_with_respect_to = None
self.db_tablespace = settings.DEFAULT_TABLESPACE
self.meta = meta
self.pk = None
self.has_auto_field, self.auto_field = False, None
self.abstract = False
self.managed = True
self.proxy = False
# For any class that is a proxy (including automatically created
# classes for deferred object loading), proxy_for_model tells us
# which class this model is proxying. Note that proxy_for_model
# can create a chain of proxy models. For non-proxy models, the
# variable is always None.
self.proxy_for_model = None
# For any non-abstract class, the concrete class is the model
# in the end of the proxy_for_model chain. In particular, for
# concrete models, the concrete_model is always the class itself.
self.concrete_model = None
self.swappable = None
self.parents = SortedDict()
self.auto_created = False
# To handle various inheritance situations, we need to track where
# managers came from (concrete or abstract base classes).
self.abstract_managers = []
self.concrete_managers = []
# List of all lookups defined in ForeignKey 'limit_choices_to' options
# from *other* models. Needed for some admin checks. Internal use only.
self.related_fkey_lookups = []
def contribute_to_class(self, cls, name):
from django.db import connection
from django.db.backends.util import truncate_name
cls._meta = self
self.model = cls
self.installed = re.sub('\.models$', '', cls.__module__) in settings.INSTALLED_APPS
# First, construct the default values for these options.
self.object_name = cls.__name__
self.model_name = self.object_name.lower()
self.verbose_name = get_verbose_name(self.object_name)
# Next, apply any overridden values from 'class Meta'.
if self.meta:
meta_attrs = self.meta.__dict__.copy()
for name in self.meta.__dict__:
# Ignore any private attributes that Django doesn't care about.
# NOTE: We can't modify a dictionary's contents while looping
# over it, so we loop over the *original* dictionary instead.
if name.startswith('_'):
del meta_attrs[name]
for attr_name in DEFAULT_NAMES:
if attr_name in meta_attrs:
setattr(self, attr_name, meta_attrs.pop(attr_name))
elif hasattr(self.meta, attr_name):
setattr(self, attr_name, getattr(self.meta, attr_name))
# unique_together can be either a tuple of tuples, or a single
# tuple of two strings. Normalize it to a tuple of tuples, so that
# calling code can uniformly expect that.
ut = meta_attrs.pop('unique_together', self.unique_together)
if ut and not isinstance(ut[0], (tuple, list)):
ut = (ut,)
self.unique_together = ut
# verbose_name_plural is a special case because it uses a 's'
# by default.
if self.verbose_name_plural is None:
self.verbose_name_plural = string_concat(self.verbose_name, 's')
# Any leftover attributes must be invalid.
if meta_attrs != {}:
raise TypeError("'class Meta' got invalid attribute(s): %s" % ','.join(meta_attrs.keys()))
else:
self.verbose_name_plural = string_concat(self.verbose_name, 's')
del self.meta
# If the db_table wasn't provided, use the app_label + model_name.
if not self.db_table:
self.db_table = "%s_%s" % (self.app_label, self.model_name)
self.db_table = truncate_name(self.db_table, connection.ops.max_name_length())
@property
def module_name(self):
"""
This property has been deprecated in favor of `model_name`. refs #19689
"""
warnings.warn(
"Options.module_name has been deprecated in favor of model_name",
PendingDeprecationWarning, stacklevel=2)
return self.model_name
def _prepare(self, model):
if self.order_with_respect_to:
self.order_with_respect_to = self.get_field(self.order_with_respect_to)
self.ordering = ('_order',)
model.add_to_class('_order', OrderWrt())
else:
self.order_with_respect_to = None
if self.pk is None:
if self.parents:
# Promote the first parent link in lieu of adding yet another
# field.
field = next(six.itervalues(self.parents))
# Look for a local field with the same name as the
# first parent link. If a local field has already been
# created, use it instead of promoting the parent
already_created = [fld for fld in self.local_fields if fld.name == field.name]
if already_created:
field = already_created[0]
field.primary_key = True
self.setup_pk(field)
else:
auto = AutoField(verbose_name='ID', primary_key=True,
auto_created=True)
model.add_to_class('id', auto)
def add_field(self, field):
# Insert the given field in the order in which it was created, using
# the "creation_counter" attribute of the field.
# Move many-to-many related fields from self.fields into
# self.many_to_many.
if field.rel and isinstance(field.rel, ManyToManyRel):
self.local_many_to_many.insert(bisect(self.local_many_to_many, field), field)
if hasattr(self, '_m2m_cache'):
del self._m2m_cache
else:
self.local_fields.insert(bisect(self.local_fields, field), field)
self.setup_pk(field)
if hasattr(self, '_field_cache'):
del self._field_cache
del self._field_name_cache
# The fields, concrete_fields and local_concrete_fields are
# implemented as cached properties for performance reasons.
# The attrs will not exists if the cached property isn't
# accessed yet, hence the try-excepts.
try:
del self.fields
except AttributeError:
pass
try:
del self.concrete_fields
except AttributeError:
pass
try:
del self.local_concrete_fields
except AttributeError:
pass
if hasattr(self, '_name_map'):
del self._name_map
def add_virtual_field(self, field):
self.virtual_fields.append(field)
def setup_pk(self, field):
if not self.pk and field.primary_key:
self.pk = field
field.serialize = False
def pk_index(self):
"""
Returns the index of the primary key field in the self.concrete_fields
list.
"""
return self.concrete_fields.index(self.pk)
def setup_proxy(self, target):
"""
Does the internal setup so that the current model is a proxy for
"target".
"""
self.pk = target._meta.pk
self.proxy_for_model = target
self.db_table = target._meta.db_table
def __repr__(self):
return '<Options for %s>' % self.object_name
def __str__(self):
return "%s.%s" % (smart_text(self.app_label), smart_text(self.model_name))
def verbose_name_raw(self):
"""
There are a few places where the untranslated verbose name is needed
(so that we get the same value regardless of currently active
locale).
"""
lang = get_language()
deactivate_all()
raw = force_text(self.verbose_name)
activate(lang)
return raw
verbose_name_raw = property(verbose_name_raw)
def _swapped(self):
"""
Has this model been swapped out for another? If so, return the model
name of the replacement; otherwise, return None.
For historical reasons, model name lookups using get_model() are
case insensitive, so we make sure we are case insensitive here.
"""
if self.swappable:
model_label = '%s.%s' % (self.app_label, self.model_name)
swapped_for = getattr(settings, self.swappable, None)
if swapped_for:
try:
swapped_label, swapped_object = swapped_for.split('.')
except ValueError:
# setting not in the format app_label.model_name
# raising ImproperlyConfigured here causes problems with
# test cleanup code - instead it is raised in get_user_model
# or as part of validation.
return swapped_for
if '%s.%s' % (swapped_label, swapped_object.lower()) not in (None, model_label):
return swapped_for
return None
swapped = property(_swapped)
@cached_property
def fields(self):
"""
The getter for self.fields. This returns the list of field objects
available to this model (including through parent models).
Callers are not permitted to modify this list, since it's a reference
to this instance (not a copy).
"""
try:
self._field_name_cache
except AttributeError:
self._fill_fields_cache()
return self._field_name_cache
@cached_property
def concrete_fields(self):
return [f for f in self.fields if f.column is not None]
@cached_property
def local_concrete_fields(self):
return [f for f in self.local_fields if f.column is not None]
def get_fields_with_model(self):
"""
Returns a sequence of (field, model) pairs for all fields. The "model"
element is None for fields on the current model. Mostly of use when
constructing queries so that we know which model a field belongs to.
"""
try:
self._field_cache
except AttributeError:
self._fill_fields_cache()
return self._field_cache
def get_concrete_fields_with_model(self):
return [(field, model) for field, model in self.get_fields_with_model() if
field.column is not None]
def _fill_fields_cache(self):
cache = []
for parent in self.parents:
for field, model in parent._meta.get_fields_with_model():
if model:
cache.append((field, model))
else:
cache.append((field, parent))
cache.extend([(f, None) for f in self.local_fields])
self._field_cache = tuple(cache)
self._field_name_cache = [x for x, _ in cache]
def _many_to_many(self):
try:
self._m2m_cache
except AttributeError:
self._fill_m2m_cache()
return list(self._m2m_cache)
many_to_many = property(_many_to_many)
def get_m2m_with_model(self):
"""
The many-to-many version of get_fields_with_model().
"""
try:
self._m2m_cache
except AttributeError:
self._fill_m2m_cache()
return list(six.iteritems(self._m2m_cache))
def _fill_m2m_cache(self):
cache = SortedDict()
for parent in self.parents:
for field, model in parent._meta.get_m2m_with_model():
if model:
cache[field] = model
else:
cache[field] = parent
for field in self.local_many_to_many:
cache[field] = None
self._m2m_cache = cache
def get_field(self, name, many_to_many=True):
"""
Returns the requested field by name. Raises FieldDoesNotExist on error.
"""
to_search = (self.fields + self.many_to_many) if many_to_many else self.fields
for f in to_search:
if f.name == name:
return f
raise FieldDoesNotExist('%s has no field named %r' % (self.object_name, name))
def get_field_by_name(self, name):
"""
Returns the (field_object, model, direct, m2m), where field_object is
the Field instance for the given name, model is the model containing
this field (None for local fields), direct is True if the field exists
on this model, and m2m is True for many-to-many relations. When
'direct' is False, 'field_object' is the corresponding RelatedObject
for this field (since the field doesn't have an instance associated
with it).
Uses a cache internally, so after the first access, this is very fast.
"""
try:
try:
return self._name_map[name]
except AttributeError:
cache = self.init_name_map()
return cache[name]
except KeyError:
raise FieldDoesNotExist('%s has no field named %r'
% (self.object_name, name))
def get_all_field_names(self):
"""
Returns a list of all field names that are possible for this model
(including reverse relation names). This is used for pretty printing
debugging output (a list of choices), so any internal-only field names
are not included.
"""
try:
cache = self._name_map
except AttributeError:
cache = self.init_name_map()
names = sorted(cache.keys())
# Internal-only names end with "+" (symmetrical m2m related names being
# the main example). Trim them.
return [val for val in names if not val.endswith('+')]
def init_name_map(self):
"""
Initialises the field name -> field object mapping.
"""
cache = {}
# We intentionally handle related m2m objects first so that symmetrical
# m2m accessor names can be overridden, if necessary.
for f, model in self.get_all_related_m2m_objects_with_model():
cache[f.field.related_query_name()] = (f, model, False, True)
for f, model in self.get_all_related_objects_with_model():
cache[f.field.related_query_name()] = (f, model, False, False)
for f, model in self.get_m2m_with_model():
cache[f.name] = (f, model, True, True)
for f, model in self.get_fields_with_model():
cache[f.name] = (f, model, True, False)
for f in self.virtual_fields:
if hasattr(f, 'related'):
cache[f.name] = (f.related, None if f.model == self.model else f.model, True, False)
if app_cache_ready():
self._name_map = cache
return cache
def get_add_permission(self):
"""
This method has been deprecated in favor of
`django.contrib.auth.get_permission_codename`. refs #20642
"""
warnings.warn(
"`Options.get_add_permission` has been deprecated in favor "
"of `django.contrib.auth.get_permission_codename`.",
PendingDeprecationWarning, stacklevel=2)
return 'add_%s' % self.model_name
def get_change_permission(self):
"""
This method has been deprecated in favor of
`django.contrib.auth.get_permission_codename`. refs #20642
"""
warnings.warn(
"`Options.get_change_permission` has been deprecated in favor "
"of `django.contrib.auth.get_permission_codename`.",
PendingDeprecationWarning, stacklevel=2)
return 'change_%s' % self.model_name
def get_delete_permission(self):
"""
This method has been deprecated in favor of
`django.contrib.auth.get_permission_codename`. refs #20642
"""
warnings.warn(
"`Options.get_delete_permission` has been deprecated in favor "
"of `django.contrib.auth.get_permission_codename`.",
PendingDeprecationWarning, stacklevel=2)
return 'delete_%s' % self.model_name
def get_all_related_objects(self, local_only=False, include_hidden=False,
include_proxy_eq=False):
return [k for k, v in self.get_all_related_objects_with_model(
local_only=local_only, include_hidden=include_hidden,
include_proxy_eq=include_proxy_eq)]
def get_all_related_objects_with_model(self, local_only=False,
include_hidden=False,
include_proxy_eq=False):
"""
Returns a list of (related-object, model) pairs. Similar to
get_fields_with_model().
"""
try:
self._related_objects_cache
except AttributeError:
self._fill_related_objects_cache()
predicates = []
if local_only:
predicates.append(lambda k, v: not v)
if not include_hidden:
predicates.append(lambda k, v: not k.field.rel.is_hidden())
cache = (self._related_objects_proxy_cache if include_proxy_eq
else self._related_objects_cache)
return [t for t in cache.items() if all(p(*t) for p in predicates)]
def _fill_related_objects_cache(self):
cache = SortedDict()
parent_list = self.get_parent_list()
for parent in self.parents:
for obj, model in parent._meta.get_all_related_objects_with_model(include_hidden=True):
if (obj.field.creation_counter < 0 or obj.field.rel.parent_link) and obj.model not in parent_list:
continue
if not model:
cache[obj] = parent
else:
cache[obj] = model
# Collect also objects which are in relation to some proxy child/parent of self.
proxy_cache = cache.copy()
for klass in get_models(include_auto_created=True, only_installed=False):
if not klass._meta.swapped:
for f in klass._meta.local_fields:
if f.rel and not isinstance(f.rel.to, six.string_types) and f.generate_reverse_relation:
if self == f.rel.to._meta:
cache[f.related] = None
proxy_cache[f.related] = None
elif self.concrete_model == f.rel.to._meta.concrete_model:
proxy_cache[f.related] = None
self._related_objects_cache = cache
self._related_objects_proxy_cache = proxy_cache
def get_all_related_many_to_many_objects(self, local_only=False):
try:
cache = self._related_many_to_many_cache
except AttributeError:
cache = self._fill_related_many_to_many_cache()
if local_only:
return [k for k, v in cache.items() if not v]
return list(cache)
def get_all_related_m2m_objects_with_model(self):
"""
Returns a list of (related-m2m-object, model) pairs. Similar to
get_fields_with_model().
"""
try:
cache = self._related_many_to_many_cache
except AttributeError:
cache = self._fill_related_many_to_many_cache()
return list(six.iteritems(cache))
def _fill_related_many_to_many_cache(self):
cache = SortedDict()
parent_list = self.get_parent_list()
for parent in self.parents:
for obj, model in parent._meta.get_all_related_m2m_objects_with_model():
if obj.field.creation_counter < 0 and obj.model not in parent_list:
continue
if not model:
cache[obj] = parent
else:
cache[obj] = model
for klass in get_models(only_installed=False):
if not klass._meta.swapped:
for f in klass._meta.local_many_to_many:
if (f.rel
and not isinstance(f.rel.to, six.string_types)
and self == f.rel.to._meta):
cache[f.related] = None
if app_cache_ready():
self._related_many_to_many_cache = cache
return cache
def get_base_chain(self, model):
"""
Returns a list of parent classes leading to 'model' (order from closet
to most distant ancestor). This has to handle the case were 'model' is
a granparent or even more distant relation.
"""
if not self.parents:
return None
if model in self.parents:
return [model]
for parent in self.parents:
res = parent._meta.get_base_chain(model)
if res:
res.insert(0, parent)
return res
return None
def get_parent_list(self):
"""
Returns a list of all the ancestor of this model as a list. Useful for
determining if something is an ancestor, regardless of lineage.
"""
result = set()
for parent in self.parents:
result.add(parent)
result.update(parent._meta.get_parent_list())
return result
def get_ancestor_link(self, ancestor):
"""
Returns the field on the current model which points to the given
"ancestor". This is possible an indirect link (a pointer to a parent
model, which points, eventually, to the ancestor). Used when
constructing table joins for model inheritance.
Returns None if the model isn't an ancestor of this one.
"""
if ancestor in self.parents:
return self.parents[ancestor]
for parent in self.parents:
# Tries to get a link field from the immediate parent
parent_link = parent._meta.get_ancestor_link(ancestor)
if parent_link:
# In case of a proxied model, the first link
# of the chain to the ancestor is that parent
# links
return self.parents[parent] or parent_link
| mit | 1,197,905,991,700,716,000 | 7,178,042,400,468,610,000 | 40.203735 | 122 | 0.579505 | false |
jwalgran/otm-core | opentreemap/treemap/lib/user.py | 4 | 8019 | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.db.models import Q
from treemap.audit import Audit, Authorizable, get_auditable_class
from treemap.models import Instance, MapFeature, InstanceUser, User
from treemap.util import get_filterable_audit_models
from treemap.lib.object_caches import udf_defs
from treemap.udf import UDFModel
def _instance_ids_edited_by(user):
return Audit.objects.filter(user=user)\
.values_list('instance_id', flat=True)\
.exclude(instance_id=None)\
.distinct()
PAGE_DEFAULT = 20
ALLOWED_MODELS = get_filterable_audit_models()
def get_audits(logged_in_user, instance, query_vars, user=None,
models=ALLOWED_MODELS, model_id=None, start_id=None,
prev_start_ids=[], page_size=PAGE_DEFAULT, exclude_pending=True,
should_count=False):
if instance:
if instance.is_accessible_by(logged_in_user):
instances = Instance.objects.filter(pk=instance.pk)
else:
instances = Instance.objects.none()
# If we didn't specify an instance we only want to
# show audits where the user has permission
else:
instances = Instance.objects\
.filter(user_accessible_instance_filter(logged_in_user))
if user:
instances = instances.filter(pk__in=_instance_ids_edited_by(user))
instances = instances.distinct()
if not instances.exists():
# Force no results
return {'audits': Audit.objects.none(),
'total_count': 0,
'next_page': None,
'prev_page': None}
map_feature_models = set(MapFeature.subclass_dict().keys())
model_filter = Q()
# We only want to show the TreePhoto's image, not other fields
# and we want to do it automatically if 'Tree' was specified as
# a model. The same goes for MapFeature(s) <-> MapFeaturePhoto
# There is no need to check permissions, because photos are always visible
if 'Tree' in models:
model_filter = model_filter | Q(model='TreePhoto', field='image')
if map_feature_models.intersection(models):
model_filter = model_filter | Q(model='MapFeaturePhoto', field='image')
for inst in instances:
eligible_models = ({'Tree', 'TreePhoto', 'MapFeaturePhoto'} |
set(inst.map_feature_types)) & set(models)
if logged_in_user == user:
eligible_udfs = {'udf:%s' % udf.id for udf in udf_defs(inst)
if udf.model_type in eligible_models
and udf.iscollection}
# The logged-in user can see all their own edits
model_filter = model_filter | Q(
instance=inst, model__in=(eligible_models | eligible_udfs))
else:
# Filter other users' edits by their visibility to the
# logged-in user
for model in eligible_models:
ModelClass = get_auditable_class(model)
fake_model = ModelClass(instance=inst)
if issubclass(ModelClass, Authorizable):
visible_fields = fake_model.visible_fields(logged_in_user)
model_filter = model_filter |\
Q(model=model, field__in=visible_fields, instance=inst)
else:
model_filter = model_filter | Q(model=model, instance=inst)
if issubclass(ModelClass, UDFModel):
model_collection_udfs_audit_names = (
fake_model.visible_collection_udfs_audit_names(
logged_in_user))
model_filter = model_filter | (
Q(model__in=model_collection_udfs_audit_names))
udf_bookkeeping_fields = Q(
model__startswith='udf:',
field__in=('id', 'model_id', 'field_definition'))
audits = (Audit.objects
.filter(model_filter)
.filter(instance__in=instances)
.select_related('instance')
.exclude(udf_bookkeeping_fields)
.exclude(user=User.system_user())
.order_by('-pk'))
if user:
audits = audits.filter(user=user)
if model_id:
audits = audits.filter(model_id=model_id)
if exclude_pending:
audits = audits.exclude(requires_auth=True, ref__isnull=True)
# Slicing the QuerySet uses a SQL Limit, which has proven to be quite slow.
# By relying on the fact the our list is ordered by primary key from newest
# to oldest, we can rely on the index on the primary key, which is faster.
if start_id is not None:
audits = audits.filter(pk__lte=start_id)
total_count = audits.count() if should_count else 0
audits = audits[:page_size]
# Coerce the queryset into a list so we can get the last audit row on the
# current page
audits = list(audits)
# We are using len(audits) instead of audits.count() because we
# have already realized the queryset at this point
if len(audits) == page_size:
query_vars.setlist('prev', prev_start_ids + [audits[0].pk])
query_vars['start'] = audits[-1].pk - 1
next_page = "?" + query_vars.urlencode()
else:
next_page = None
if prev_start_ids:
if len(prev_start_ids) == 1:
del query_vars['prev']
del query_vars['start']
else:
prev_start_id = prev_start_ids.pop()
query_vars.setlist('prev', prev_start_ids)
query_vars['start'] = prev_start_id
prev_page = "?" + query_vars.urlencode()
else:
prev_page = None
return {'audits': audits,
'total_count': total_count,
'next_page': next_page,
'prev_page': prev_page}
def get_audits_params(request):
PAGE_MAX = 100
r = request.GET
page_size = min(int(r.get('page_size', PAGE_DEFAULT)), PAGE_MAX)
start_id = r.get('start', None)
if start_id is not None:
start_id = int(start_id)
prev_start_ids = [int(pk) for pk in r.getlist('prev')]
models = r.getlist('models', default=ALLOWED_MODELS)
if models:
for model in models:
if model not in ALLOWED_MODELS:
raise Exception("Invalid model: %s" % model)
model_id = r.get('model_id', None)
if model_id is not None and len(models) != 1:
raise Exception("You must specific one and only model "
"when looking up by id")
exclude_pending = r.get('exclude_pending', "false") == "true"
return {'start_id': start_id, 'prev_start_ids': prev_start_ids,
'page_size': page_size, 'models': models, 'model_id': model_id,
'exclude_pending': exclude_pending}
def user_accessible_instance_filter(logged_in_user):
public = Q(is_public=True)
if logged_in_user is not None and not logged_in_user.is_anonymous():
private_with_access = Q(instanceuser__user=logged_in_user)
instance_filter = public | private_with_access
else:
instance_filter = public
return instance_filter
def get_user_instances(logged_in_user, user, current_instance=None):
# Which instances can the logged-in user see?
instance_filter = (user_accessible_instance_filter(logged_in_user))
user_instance_ids = (InstanceUser.objects
.filter(user_id=user.pk)
.values_list('instance_id', flat=True))
instance_filter = Q(instance_filter, Q(pk__in=user_instance_ids))
# The logged-in user should see the current instance in their own list
if current_instance and logged_in_user == user:
instance_filter = instance_filter | Q(pk=current_instance.id)
return (Instance.objects
.filter(instance_filter)
.distinct()
.order_by('name'))
| gpl-3.0 | -1,007,933,840,928,631,600 | 5,625,840,747,217,021,000 | 36.125 | 79 | 0.599451 | false |
kienpham2000/ansible-modules-core | packaging/rpm_key.py | 60 | 7339 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Ansible module to import third party repo keys to your rpm db
# (c) 2013, Héctor Acosta <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: rpm_key
author: Hector Acosta <[email protected]>
short_description: Adds or removes a gpg key from the rpm db
description:
- Adds or removes (rpm --import) a gpg key to your rpm database.
version_added: "1.3"
options:
key:
required: true
default: null
aliases: []
description:
- Key that will be modified. Can be a url, a file, or a keyid if the key already exists in the database.
state:
required: false
default: "present"
choices: [present, absent]
description:
- Wheather the key will be imported or removed from the rpm db.
validate_certs:
description:
- If C(no) and the C(key) is a url starting with https, SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
'''
EXAMPLES = '''
# Example action to import a key from a url
- rpm_key: state=present key=http://apt.sw.be/RPM-GPG-KEY.dag.txt
# Example action to import a key from a file
- rpm_key: state=present key=/path/to/key.gpg
# Example action to ensure a key is not present in the db
- rpm_key: state=absent key=DEADB33F
'''
import syslog
import os.path
import re
import tempfile
def is_pubkey(string):
"""Verifies if string is a pubkey"""
pgp_regex = ".*?(-----BEGIN PGP PUBLIC KEY BLOCK-----.*?-----END PGP PUBLIC KEY BLOCK-----).*"
return re.match(pgp_regex, string, re.DOTALL)
class RpmKey:
def __init__(self, module):
self.syslogging = False
# If the key is a url, we need to check if it's present to be idempotent,
# to do that, we need to check the keyid, which we can get from the armor.
keyfile = None
should_cleanup_keyfile = False
self.module = module
self.rpm = self.module.get_bin_path('rpm', True)
state = module.params['state']
key = module.params['key']
if '://' in key:
keyfile = self.fetch_key(key)
keyid = self.getkeyid(keyfile)
should_cleanup_keyfile = True
elif self.is_keyid(key):
keyid = key
elif os.path.isfile(key):
keyfile = key
keyid = self.getkeyid(keyfile)
else:
self.module.fail_json(msg="Not a valid key %s" % key)
keyid = self.normalize_keyid(keyid)
if state == 'present':
if self.is_key_imported(keyid):
module.exit_json(changed=False)
else:
if not keyfile:
self.module.fail_json(msg="When importing a key, a valid file must be given")
self.import_key(keyfile, dryrun=module.check_mode)
if should_cleanup_keyfile:
self.module.cleanup(keyfile)
module.exit_json(changed=True)
else:
if self.is_key_imported(keyid):
self.drop_key(keyid, dryrun=module.check_mode)
module.exit_json(changed=True)
else:
module.exit_json(changed=False)
def fetch_key(self, url):
"""Downloads a key from url, returns a valid path to a gpg key"""
try:
rsp, info = fetch_url(self.module, url)
key = rsp.read()
if not is_pubkey(key):
self.module.fail_json(msg="Not a public key: %s" % url)
tmpfd, tmpname = tempfile.mkstemp()
tmpfile = os.fdopen(tmpfd, "w+b")
tmpfile.write(key)
tmpfile.close()
return tmpname
except urllib2.URLError, e:
self.module.fail_json(msg=str(e))
def normalize_keyid(self, keyid):
"""Ensure a keyid doesn't have a leading 0x, has leading or trailing whitespace, and make sure is lowercase"""
ret = keyid.strip().lower()
if ret.startswith('0x'):
return ret[2:]
elif ret.startswith('0X'):
return ret[2:]
else:
return ret
def getkeyid(self, keyfile):
gpg = self.module.get_bin_path('gpg', True)
stdout, stderr = self.execute_command([gpg, '--no-tty', '--batch', '--with-colons', '--fixed-list-mode', '--list-packets', keyfile])
for line in stdout.splitlines():
line = line.strip()
if line.startswith(':signature packet:'):
# We want just the last 8 characters of the keyid
keyid = line.split()[-1].strip()[8:]
return keyid
self.json_fail(msg="Unexpected gpg output")
def is_keyid(self, keystr):
"""Verifies if a key, as provided by the user is a keyid"""
return re.match('(0x)?[0-9a-f]{8}', keystr, flags=re.IGNORECASE)
def execute_command(self, cmd):
if self.syslogging:
syslog.openlog('ansible-%s' % os.path.basename(__file__))
syslog.syslog(syslog.LOG_NOTICE, 'Command %s' % '|'.join(cmd))
rc, stdout, stderr = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg=stderr)
return stdout, stderr
def is_key_imported(self, keyid):
stdout, stderr = self.execute_command([self.rpm, '-qa', 'gpg-pubkey'])
for line in stdout.splitlines():
line = line.strip()
if not line:
continue
match = re.match('gpg-pubkey-([0-9a-f]+)-([0-9a-f]+)', line)
if not match:
self.module.fail_json(msg="rpm returned unexpected output [%s]" % line)
else:
if keyid == match.group(1):
return True
return False
def import_key(self, keyfile, dryrun=False):
if not dryrun:
self.execute_command([self.rpm, '--import', keyfile])
def drop_key(self, key, dryrun=False):
if not dryrun:
self.execute_command([self.rpm, '--erase', '--allmatches', "gpg-pubkey-%s" % key])
def main():
module = AnsibleModule(
argument_spec = dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
key=dict(required=True, type='str'),
validate_certs=dict(default='yes', type='bool'),
),
supports_check_mode=True
)
RpmKey(module)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
main()
| gpl-3.0 | -7,002,517,579,855,384,000 | 899,388,719,358,469,400 | 34.621359 | 140 | 0.591714 | false |
bartQu9/fallenmua | resolvers.py | 1 | 3740 | from urllib.error import URLError, HTTPError
from xml.dom import minidom
from dns import resolver
import urllib.request
import logging
def parse_thunderbird_autoconfig(xml_autoconfig):
mx_servers = []
dom_tree = minidom.parseString(xml_autoconfig)
c_nodes = dom_tree.childNodes
for i in c_nodes[0].getElementsByTagName("outgoingServer"):
try:
curr_hostname = i.getElementsByTagName("hostname")[0].childNodes[0].toxml().lower()
curr_port = int(i.getElementsByTagName("port")[0].childNodes[0].toxml())
curr_sock_type = i.getElementsByTagName("socketType")[0].childNodes[0].toxml().lower()
curr_username_type = i.getElementsByTagName("username")[0].childNodes[0].toxml()
curr_auth_method = i.getElementsByTagName("authentication")[0].childNodes[0].toxml().lower()
except IndexError:
logging.error("Bad autoconfiguration file in ISPDB")
return None
mx_servers.append({'hostname': curr_hostname, 'port': curr_port, 'sock_type': curr_sock_type,
'username_type': curr_username_type, 'auth_method': curr_auth_method})
if mx_servers:
return mx_servers
else:
return None
def get_mx_from_ispdb(domain, _timeout=2):
"""
Search for MX servers in Mozilla ISPDB.
:param _timeout: resource connection timeout
:param domain: a str FQDN
:return: List of tuples consists of mx server and listening port
"""
try:
logging.debug("Connecting to the Mozilla ISPDB")
xml_config = urllib.request.urlopen("https://autoconfig.thunderbird.net/autoconfig/v1.1/{0}".
format(domain), timeout=_timeout).read()
logging.debug("Fetched autoconfigure XML file from Mozilla ISPDB")
except HTTPError:
logging.info("No data for domain {0} in the Mozilla ISPDB".format(domain))
return None
except URLError as err:
logging.warning("Unable to connect with the Mozilla ISPDB, reason: {0}".format(err))
return None
mx_servers = parse_thunderbird_autoconfig(xml_config)
hostnames = [mx['hostname'] for mx in mx_servers]
logging.debug("MX servers from Mozilla ISPDB: {0}".format(hostnames))
return mx_servers
def get_mx_from_isp(domain, _timeout=4):
try:
logging.debug("Connecting to the ISP autoconfig")
xml_config = urllib.request.urlopen("http://autoconfig.{0}/mail/config-v1.1.xml".format(domain),
timeout=_timeout).read()
logging.debug("Fetched autoconfigure XML file from autoconfig.{0}/mail/config-v1.1.xml".format(domain))
except (HTTPError, URLError):
logging.info("No data on autoconfig.{0}".format(domain))
return None
mx_servers = parse_thunderbird_autoconfig(xml_config)
hostnames = [mx['hostname'] for mx in mx_servers]
logging.debug("MX servers from autoconfig.{0}: {1}".format(domain, hostnames))
return mx_servers
def get_mx_from_dns(domain):
mx_servers = []
try:
_tmp_mx = []
for mx in resolver.query(domain, "MX"):
_tmp_mx.append(mx.to_text().split(" "))
logging.info("Found {0} MX servers in DNS zone".format(len(_tmp_mx)))
_tmp_mx.sort() # sort MX's by priority
except resolver.NXDOMAIN:
logging.error("Cannot resolve domain name ".format(domain))
return None
for mx in _tmp_mx:
for port in (587, 465, 25): # Adding commonly known SMTP ports
mx_servers.append({'hostname': mx[1], 'port': port, 'sock_type': None, 'username_type': None,
'auth_method': None})
return mx_servers
| gpl-3.0 | 7,698,984,100,565,145,000 | 4,479,584,843,412,848,000 | 37.163265 | 111 | 0.634225 | false |
AnimeshSinha1309/WebsiteEdunet | WebsiteEdunet/env/Lib/site-packages/django/shortcuts.py | 135 | 7957 | """
This module collects helper functions and classes that "span" multiple levels
of MVC. In other words, these functions/classes introduce controlled coupling
for convenience's sake.
"""
import warnings
from django.core import urlresolvers
from django.db.models.base import ModelBase
from django.db.models.manager import Manager
from django.db.models.query import QuerySet
from django.http import (
Http404, HttpResponse, HttpResponsePermanentRedirect, HttpResponseRedirect,
)
from django.template import RequestContext, loader
from django.template.context import _current_app_undefined
from django.template.engine import (
_context_instance_undefined, _dictionary_undefined, _dirs_undefined,
)
from django.utils import six
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text
from django.utils.functional import Promise
def render_to_response(template_name, context=None,
context_instance=_context_instance_undefined,
content_type=None, status=None, dirs=_dirs_undefined,
dictionary=_dictionary_undefined, using=None):
"""
Returns a HttpResponse whose content is filled with the result of calling
django.template.loader.render_to_string() with the passed arguments.
"""
if (context_instance is _context_instance_undefined
and dirs is _dirs_undefined
and dictionary is _dictionary_undefined):
# No deprecated arguments were passed - use the new code path
content = loader.render_to_string(template_name, context, using=using)
else:
# Some deprecated arguments were passed - use the legacy code path
content = loader.render_to_string(
template_name, context, context_instance, dirs, dictionary,
using=using)
return HttpResponse(content, content_type, status)
def render(request, template_name, context=None,
context_instance=_context_instance_undefined,
content_type=None, status=None, current_app=_current_app_undefined,
dirs=_dirs_undefined, dictionary=_dictionary_undefined,
using=None):
"""
Returns a HttpResponse whose content is filled with the result of calling
django.template.loader.render_to_string() with the passed arguments.
Uses a RequestContext by default.
"""
if (context_instance is _context_instance_undefined
and current_app is _current_app_undefined
and dirs is _dirs_undefined
and dictionary is _dictionary_undefined):
# No deprecated arguments were passed - use the new code path
# In Django 1.10, request should become a positional argument.
content = loader.render_to_string(
template_name, context, request=request, using=using)
else:
# Some deprecated arguments were passed - use the legacy code path
if context_instance is not _context_instance_undefined:
if current_app is not _current_app_undefined:
raise ValueError('If you provide a context_instance you must '
'set its current_app before calling render()')
else:
context_instance = RequestContext(request)
if current_app is not _current_app_undefined:
warnings.warn(
"The current_app argument of render is deprecated. "
"Set the current_app attribute of request instead.",
RemovedInDjango110Warning, stacklevel=2)
request.current_app = current_app
# Directly set the private attribute to avoid triggering the
# warning in RequestContext.__init__.
context_instance._current_app = current_app
content = loader.render_to_string(
template_name, context, context_instance, dirs, dictionary,
using=using)
return HttpResponse(content, content_type, status)
def redirect(to, *args, **kwargs):
"""
Returns an HttpResponseRedirect to the appropriate URL for the arguments
passed.
The arguments could be:
* A model: the model's `get_absolute_url()` function will be called.
* A view name, possibly with arguments: `urlresolvers.reverse()` will
be used to reverse-resolve the name.
* A URL, which will be used as-is for the redirect location.
By default issues a temporary redirect; pass permanent=True to issue a
permanent redirect
"""
if kwargs.pop('permanent', False):
redirect_class = HttpResponsePermanentRedirect
else:
redirect_class = HttpResponseRedirect
return redirect_class(resolve_url(to, *args, **kwargs))
def _get_queryset(klass):
"""
Returns a QuerySet from a Model, Manager, or QuerySet. Created to make
get_object_or_404 and get_list_or_404 more DRY.
Raises a ValueError if klass is not a Model, Manager, or QuerySet.
"""
if isinstance(klass, QuerySet):
return klass
elif isinstance(klass, Manager):
manager = klass
elif isinstance(klass, ModelBase):
manager = klass._default_manager
else:
if isinstance(klass, type):
klass__name = klass.__name__
else:
klass__name = klass.__class__.__name__
raise ValueError("Object is of type '%s', but must be a Django Model, "
"Manager, or QuerySet" % klass__name)
return manager.all()
def get_object_or_404(klass, *args, **kwargs):
"""
Uses get() to return an object, or raises a Http404 exception if the object
does not exist.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
object is found.
"""
queryset = _get_queryset(klass)
try:
return queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
def get_list_or_404(klass, *args, **kwargs):
"""
Uses filter() to return a list of objects, or raise a Http404 exception if
the list is empty.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the filter() query.
"""
queryset = _get_queryset(klass)
obj_list = list(queryset.filter(*args, **kwargs))
if not obj_list:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
return obj_list
def resolve_url(to, *args, **kwargs):
"""
Return a URL appropriate for the arguments passed.
The arguments could be:
* A model: the model's `get_absolute_url()` function will be called.
* A view name, possibly with arguments: `urlresolvers.reverse()` will
be used to reverse-resolve the name.
* A URL, which will be returned as-is.
"""
# If it's a model, use get_absolute_url()
if hasattr(to, 'get_absolute_url'):
return to.get_absolute_url()
if isinstance(to, Promise):
# Expand the lazy instance, as it can cause issues when it is passed
# further to some Python functions like urlparse.
to = force_text(to)
if isinstance(to, six.string_types):
# Handle relative URLs
if to.startswith(('./', '../')):
return to
# Next try a reverse URL resolution.
try:
return urlresolvers.reverse(to, args=args, kwargs=kwargs)
except urlresolvers.NoReverseMatch:
# If this is a callable, re-raise.
if callable(to):
raise
# If this doesn't "feel" like a URL, re-raise.
if '/' not in to and '.' not in to:
raise
# Finally, fall back and assume it's a URL
return to
| mit | 8,426,718,994,742,958,000 | -3,048,886,230,096,700,400 | 36.182243 | 90 | 0.657032 | false |
aperigault/ansible | lib/ansible/modules/cloud/azure/azure_rm_sqlserver.py | 24 | 10519 | #!/usr/bin/python
#
# Copyright (c) 2017 Zim Kalinowski, <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_sqlserver
version_added: "2.5"
short_description: Manage SQL Server instance
description:
- Create, update and delete instance of SQL Server.
options:
resource_group:
description:
- The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
required: True
name:
description:
- The name of the server.
required: True
location:
description:
- Resource location.
admin_username:
description:
- Administrator username for the server. Once created it cannot be changed.
admin_password:
description:
- The administrator login password (required for server creation).
version:
description:
- The version of the server. For example C(12.0).
identity:
description:
- The identity type. Set this to C(SystemAssigned) in order to automatically create and assign an Azure Active Directory principal for the resource.
- Possible values include C(SystemAssigned).
state:
description:
- State of the SQL server. Use C(present) to create or update a server and use C(absent) to delete a server.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
- azure_tags
author:
- Zim Kalinowski (@zikalino)
'''
EXAMPLES = '''
- name: Create (or update) SQL Server
azure_rm_sqlserver:
resource_group: myResourceGroup
name: server_name
location: westus
admin_username: mylogin
admin_password: Testpasswordxyz12!
'''
RETURN = '''
id:
description:
- Resource ID.
returned: always
type: str
sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Sql/servers/sqlcrudtest-4645
version:
description:
- The version of the server.
returned: always
type: str
sample: 12.0
state:
description:
- The state of the server.
returned: always
type: str
sample: state
fully_qualified_domain_name:
description:
- The fully qualified domain name of the server.
returned: always
type: str
sample: sqlcrudtest-4645.database.windows.net
'''
import time
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller
from azure.mgmt.sql import SqlManagementClient
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class Actions:
NoAction, Create, Update, Delete = range(4)
class AzureRMSqlServer(AzureRMModuleBase):
"""Configuration class for an Azure RM SQL Server resource"""
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
location=dict(
type='str'
),
admin_username=dict(
type='str'
),
admin_password=dict(
type='str',
no_log=True
),
version=dict(
type='str'
),
identity=dict(
type='str'
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
)
)
self.resource_group = None
self.name = None
self.parameters = dict()
self.tags = None
self.results = dict(changed=False)
self.state = None
self.to_do = Actions.NoAction
super(AzureRMSqlServer, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=True)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_arg_spec.keys()) + ['tags']:
if hasattr(self, key):
setattr(self, key, kwargs[key])
elif kwargs[key] is not None:
if key == "location":
self.parameters.update({"location": kwargs[key]})
elif key == "admin_username":
self.parameters.update({"administrator_login": kwargs[key]})
elif key == "admin_password":
self.parameters.update({"administrator_login_password": kwargs[key]})
elif key == "version":
self.parameters.update({"version": kwargs[key]})
elif key == "identity":
self.parameters.update({"identity": {"type": kwargs[key]}})
old_response = None
response = None
results = dict()
resource_group = self.get_resource_group(self.resource_group)
if "location" not in self.parameters:
self.parameters["location"] = resource_group.location
old_response = self.get_sqlserver()
if not old_response:
self.log("SQL Server instance doesn't exist")
if self.state == 'absent':
self.log("Old instance didn't exist")
else:
self.to_do = Actions.Create
else:
self.log("SQL Server instance already exists")
if self.state == 'absent':
self.to_do = Actions.Delete
elif self.state == 'present':
self.log("Need to check if SQL Server instance has to be deleted or may be updated")
update_tags, newtags = self.update_tags(old_response.get('tags', dict()))
if update_tags:
self.tags = newtags
self.to_do = Actions.Update
if (self.to_do == Actions.Create) or (self.to_do == Actions.Update):
self.log("Need to Create / Update the SQL Server instance")
if self.check_mode:
self.results['changed'] = True
return self.results
self.parameters['tags'] = self.tags
response = self.create_update_sqlserver()
response.pop('administrator_login_password', None)
if not old_response:
self.results['changed'] = True
else:
self.results['changed'] = old_response.__ne__(response)
self.log("Creation / Update done")
elif self.to_do == Actions.Delete:
self.log("SQL Server instance deleted")
self.results['changed'] = True
if self.check_mode:
return self.results
self.delete_sqlserver()
# make sure instance is actually deleted, for some Azure resources, instance is hanging around
# for some time after deletion -- this should be really fixed in Azure
while self.get_sqlserver():
time.sleep(20)
else:
self.log("SQL Server instance unchanged")
self.results['changed'] = False
response = old_response
if response:
self.results["id"] = response["id"]
self.results["version"] = response["version"]
self.results["state"] = response["state"]
self.results["fully_qualified_domain_name"] = response["fully_qualified_domain_name"]
return self.results
def create_update_sqlserver(self):
'''
Creates or updates SQL Server with the specified configuration.
:return: deserialized SQL Server instance state dictionary
'''
self.log("Creating / Updating the SQL Server instance {0}".format(self.name))
try:
response = self.sql_client.servers.create_or_update(self.resource_group,
self.name,
self.parameters)
if isinstance(response, LROPoller):
response = self.get_poller_result(response)
except CloudError as exc:
self.log('Error attempting to create the SQL Server instance.')
self.fail("Error creating the SQL Server instance: {0}".format(str(exc)))
return response.as_dict()
def delete_sqlserver(self):
'''
Deletes specified SQL Server instance in the specified subscription and resource group.
:return: True
'''
self.log("Deleting the SQL Server instance {0}".format(self.name))
try:
response = self.sql_client.servers.delete(self.resource_group,
self.name)
except CloudError as e:
self.log('Error attempting to delete the SQL Server instance.')
self.fail("Error deleting the SQL Server instance: {0}".format(str(e)))
return True
def get_sqlserver(self):
'''
Gets the properties of the specified SQL Server.
:return: deserialized SQL Server instance state dictionary
'''
self.log("Checking if the SQL Server instance {0} is present".format(self.name))
found = False
try:
response = self.sql_client.servers.get(self.resource_group,
self.name)
found = True
self.log("Response : {0}".format(response))
self.log("SQL Server instance : {0} found".format(response.name))
except CloudError as e:
self.log('Did not find the SQL Server instance.')
if found is True:
return response.as_dict()
return False
def main():
"""Main execution"""
AzureRMSqlServer()
if __name__ == '__main__':
main()
| gpl-3.0 | -2,005,250,184,755,953,700 | 6,146,914,426,829,618,000 | 31.871875 | 160 | 0.563647 | false |
britcey/ansible | lib/ansible/modules/network/dellos9/dellos9_command.py | 46 | 7781 | #!/usr/bin/python
#
# (c) 2015 Peter Sprygada, <[email protected]>
#
# Copyright (c) 2016 Dell Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'}
DOCUMENTATION = """
---
module: dellos9_command
version_added: "2.2"
author: "Dhivya P (@dhivyap)"
short_description: Run commands on remote devices running Dell OS9
description:
- Sends arbitrary commands to a Dell OS9 node and returns the results
read from the device. This module includes an
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
- This module does not support running commands in configuration mode.
Please use M(dellos9_config) to configure Dell OS9 devices.
extends_documentation_fragment: dellos9
options:
commands:
description:
- List of commands to send to the remote dellos9 device over the
configured provider. The resulting output from the command
is returned. If the I(wait_for) argument is provided, the
module is not returned until the condition is satisfied or
the number of retries has expired.
required: true
wait_for:
description:
- List of conditions to evaluate against the output of the
command. The task will wait for each condition to be true
before moving forward. If the conditional is not true
within the configured number of I(retries), the task fails.
See examples.
required: false
default: null
retries:
description:
- Specifies the number of retries a command should be tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the
I(wait_for) conditions.
required: false
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditions, the interval indicates how long to wait before
trying the command again.
required: false
default: 1
notes:
- This module requires Dell OS9 version 9.10.0.1P13 or above.
- This module requires to increase the ssh connection rate limit.
Use the following command I(ip ssh connection-rate-limit 60)
to configure the same. This can be done via M(dellos9_config) module
as well.
"""
EXAMPLES = """
# Note: examples below use the following provider dict to handle
# transport and authentication to the node.
vars:
cli:
host: "{{ inventory_hostname }}"
username: admin
password: admin
transport: cli
tasks:
- name: run show version on remote devices
dellos9_command:
commands: show version
provider: "{{ cli }}"
- name: run show version and check to see if output contains OS9
dellos9_command:
commands: show version
wait_for: result[0] contains OS9
provider: "{{ cli }}"
- name: run multiple commands on remote nodes
dellos9_command:
commands:
- show version
- show interfaces
provider: "{{ cli }}"
- name: run multiple commands and evaluate the output
dellos9_command:
commands:
- show version
- show interfaces
wait_for:
- result[0] contains OS9
- result[1] contains Loopback
provider: "{{ cli }}"
"""
RETURN = """
stdout:
description: The set of responses from the commands
returned: always apart from low level errors (such as action plugin)
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list
returned: always apart from low level errors (such as action plugin)
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: The list of conditionals that have failed
returned: failed
type: list
sample: ['...', '...']
warnings:
description: The list of warnings (if any) generated by module based on arguments
returned: always
type: list
sample: ['...', '...']
"""
import time
from ansible.module_utils.dellos9 import run_commands
from ansible.module_utils.dellos9 import dellos9_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network_common import ComplexList
from ansible.module_utils.netcli import Conditional
def to_lines(stdout):
for item in stdout:
if isinstance(item, basestring):
item = str(item).split('\n')
yield item
def parse_commands(module, warnings):
command = ComplexList(dict(
command=dict(key=True),
prompt=dict(),
answer=dict()
), module)
commands = command(module.params['commands'])
for index, item in enumerate(commands):
if module.check_mode and not item['command'].startswith('show'):
warnings.append(
'only show commands are supported when using check mode, not '
'executing `%s`' % item['command']
)
elif item['command'].startswith('conf'):
module.fail_json(
msg='dellos9_command does not support running config mode '
'commands. Please use dellos9_config instead'
)
return commands
def main():
"""main entry point for module execution
"""
argument_spec = dict(
# { command: <str>, prompt: <str>, response: <str> }
commands=dict(type='list', required=True),
wait_for=dict(type='list', aliases=['waitfor']),
match=dict(default='all', choices=['all', 'any']),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
argument_spec.update(dellos9_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
result = {'changed': False}
warnings = list()
check_args(module, warnings)
commands = parse_commands(module, warnings)
result['warnings'] = warnings
wait_for = module.params['wait_for'] or list()
conditionals = [Conditional(c) for c in wait_for]
retries = module.params['retries']
interval = module.params['interval']
match = module.params['match']
while retries > 0:
responses = run_commands(module, commands)
for item in list(conditionals):
if item(responses):
if match == 'any':
conditionals = list()
break
conditionals.remove(item)
if not conditionals:
break
time.sleep(interval)
retries -= 1
if conditionals:
failed_conditions = [item.raw for item in conditionals]
msg = 'One or more conditional statements have not be satisfied'
module.fail_json(msg=msg, failed_conditions=failed_conditions)
result = {
'changed': False,
'stdout': responses,
'stdout_lines': list(to_lines(responses))
}
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -1,013,231,051,478,195,200 | 3,225,659,115,817,871,400 | 30.248996 | 83 | 0.652872 | false |
emonty/vhd-util | tools/python/logging/logging-0.4.9.2/test/log_test11.py | 42 | 2993 | #!/usr/bin/env python
#
# Copyright 2001-2002 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# This file is part of the Python logging distribution. See
# http://www.red-dove.com/python_logging.html
#
"""Test harness for the logging module. Tests BufferingSMTPHandler, an alternative implementation
of SMTPHandler.
Copyright (C) 2001-2002 Vinay Sajip. All Rights Reserved.
"""
import string, logging, logging.handlers
MAILHOST = 'beta'
FROM = '[email protected]'
TO = ['arkadi_renko']
SUBJECT = 'Test Logging email from Python logging module (buffering)'
class BufferingSMTPHandler(logging.handlers.BufferingHandler):
def __init__(self, mailhost, fromaddr, toaddrs, subject, capacity):
logging.handlers.BufferingHandler.__init__(self, capacity)
self.mailhost = mailhost
self.mailport = None
self.fromaddr = fromaddr
self.toaddrs = toaddrs
self.subject = subject
self.setFormatter(logging.Formatter("%(asctime)s %(levelname)-5s %(message)s"))
def flush(self):
if len(self.buffer) > 0:
try:
import smtplib
port = self.mailport
if not port:
port = smtplib.SMTP_PORT
smtp = smtplib.SMTP(self.mailhost, port)
msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n" % (self.fromaddr, string.join(self.toaddrs, ","), self.subject)
for record in self.buffer:
s = self.format(record)
print s
msg = msg + s + "\r\n"
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
except:
self.handleError(None) # no particular record
self.buffer = []
def test():
logger = logging.getLogger("")
logger.setLevel(logging.DEBUG)
logger.addHandler(BufferingSMTPHandler(MAILHOST, FROM, TO, SUBJECT, 10))
for i in xrange(102):
logger.info("Info index = %d", i)
logging.shutdown()
if __name__ == "__main__":
test() | gpl-2.0 | 9,081,862,369,599,044,000 | 1,563,746,647,001,400,000 | 40.583333 | 129 | 0.663882 | false |
kooksee/TIOT | test/project/src/app/proto/protocol/XBeeProtocol.py | 1 | 11235 | # encoding=utf-8
import binascii
import json
from twisted.internet.protocol import Protocol
from app.proto.controller.XbeeController import XBeeController
class XBeeProtocol(Protocol):
def __init__(self):
self.ip = ''
self.port = ''
def connectionMade(self):
#import socket
#self.transport.socket._sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.ip = str(self.transport.client[0])
self.port = str(self.transport.client[1])
self.factory.numProtocols += 1
print 'conn build From ip:' + self.ip + ' port:' + self.port
print 'current conn num is ' + str(self.factory.numProtocols) + "\n"
self.divName = self.ip +":"+ self.port+"##"+self.__class__.__name__
# self.divName = repr(self) + "##" + self.__class__.__name__
self.factory.controller.add_client(self.divName, self.transport)
return
def connectionLost(self, reason):
print 'conn lost reason --> '+str(reason)
self.factory.numProtocols -= 1
print 'conn lost. ip:' + self.ip + ' port:' + self.port
print 'current conn num is ' + str(self.factory.numProtocols) + "\n"
self.factory.controller.del_client(self.divName)
return
def dataReceived(self, data):
xbeeController = XBeeController()
# print 'recv data from'+self.divName + "\n" + binascii.b2a_hex(data)
print 'recv data from ip:' + self.ip + ' port:' + self.port + ' data:' + "\n" + binascii.b2a_hex(data)
kdiv = self.factory.controller.online_session
for div in kdiv:
if div == self.divName:
print "设备" + div + "正在把数据-->"
data1 = binascii.b2a_hex(data)
print data1
data2 = xbeeController.getPackets(data1).get_import_data()
for div in kdiv:
#print binascii.b2a_hex(data)
# print div.split("##")[-1]," ",self.__class__.__name__
if div.split("##")[-1] == "LightProtocol":
if data2[0].get("hot") or data2[0].get("smog"):
data_hex = ' 7e 00 16 10 00 00 7d 33 a2 00 40 71 54 0a ff fe 00 00 01 00 00 03 00 00 00 00 2a'
data_hex = str(bytearray.fromhex(data_hex))
data_hex1 = '7e 00 16 10 00 00 7d 33 a2 00 40 71 53 bc ff fe 00 00 01 00 00 03 00 00 00 00 79'
data_hex1 = str(bytearray.fromhex(data_hex1))
print data_hex
kdiv[div].write(data_hex)
kdiv[div].write(data_hex1)
if div.split("##")[-1] == self.__class__.__name__:
# data = xbeeController.getPackets(
# "7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 66 66 A6 41 02 00 02 00 1F 85 17 42 44").get_import_data()
# print data
str_data = json.dumps(data2)
print str_data
kdiv[div].write(str_data)
print "传递给:" + div
print "\n"
return
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 B8 1E 0A 42 E9
# 7E 00 1B 20 19 01 00 00 02 11 00 03 50 03 01 00 01 00 00 00 9F 41 11 22 33 44 11 22 33 44 26
# 7E 00 1B 20 19 01 00 00 02 11 00 03 50 03 01 00 01 00 00 80 9E 41 11 22 33 44 11 22 33 44 A7
# 7E 00 1B 20 19 01 00 00 02 11 00 03 50 03 01 00 01 00 00 80 9F 41 11 22 33 44 11 22 33 44 A6
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 B8 1E 0A 42 E9
# 7E 00 1B 20 19 01 00 00 02 11 00 03 50 03 01 00 01 00 00 00 9E 41 11 22 33 44 11 22 33 44 27
# 7E 00 1B 20 19 01 00 00 02 11 00 03 50 03 01 00 01 00 00 00 9F 41 11 22 33 44 11 22 33 44 26
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 AE 47 0A 42 CA
# 7E 00 1B 20 19 01 00 00 02 11 00 03 50 03 01 00 01 00 00 00 9E 41 11 22 33 44 11 22 33 44 27
# 7E 00 13 20 19 01 00 00 02 11 00 03 50 01 0E 00 0E 00 00 00 80 3F 83
# 7E 00 1B 20 19 01 00 00 02 11 00 03 50 03 01 00 01 00 00 80 9E 41 11 22 33 44 11 22 33 44 A7
# 7E 00 1B 20 19 01 00 00 02 11 00 03 50 03 01 00 01 00 00 80 9F 41 11 22 33 44 11 22 33 44 A6
# 7E 00 0F 22 4C 4F 49 54 02 04 00 00 60 00 CB 88 BB 54 DD
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 AE 47 0A 42 CA
# 7E 00 1B 20 19 01 00 00 02 11 00 03 50 03 01 00 01 00 00 00 BC 41 11 22 33 44 11 22 33 44 09
# 7E 00 1B 20 19 01 00 00 02 11 00 03 50 03 01 00 01 00 00 80 BE 41 11 22 33 44 11 22 33 44 87
# 7E 00 13 20 19 01 00 00 02 11 00 03 50 01 0E 00 0E 00 00 00 80 3F 83
# 7E 00 13 20 19 01 00 00 02 11 00 03 50 01 0E 00 0E 00 00 00 80 3F 83
# 7E 00 1B 20 19 01 00 00 02 11 00 03 50 03 01 00 01 00 00 00 B4 41 11 22 33 44 11 22 33 44 11
# 7E 00 1B 20 19 01 00 00 02 11 00 03 50 03 01 00 01 00 00 00 B7 41 11 22 33 44 11 22 33 44 0E
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 AE 47 0A 42 CA
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 AE 47 0A 42 CA
# 7E 00 0F 22 4C 4F 49 54 02 04 00 00 60 00 E9 88 BB 54 BF
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 AA 42 82
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 AA 42 82
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 B8 1E 0A 42 E9
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 B2 41 7B
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AD 41 00
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 B0 41 7D 5D
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 AE 47 0A 42 CA
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 AD 41 80
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 B2 41 FB
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 8A C1 23
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 66 66 0A 42 F3
# 7E 00 0F 22 4C 4F 49 54 02 04 00 00 60 00 07 89 BB 54 A0
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AE 41 FF
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 FA 40 34
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 B5 41 F8
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 66 66 0A 42 F3
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AE 41 FF
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 66 66 0A 42 F3
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 AF 41 7D 5E
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 AF 41 7D 5E
# 7E 00 0F 22 4C 4F 49 54 02 04 00 00 60 00 25 89 BB 54 82
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AC 41 01
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 66 66 0A 42 F3
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 AF 41 7D 5E
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 AD 41 80
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 66 66 0A 42 F3
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 AF 41 7D 5E
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AC 41 01
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AF 41 FE
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 AD 41 80
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 66 66 0A 42 F3
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AF 41 FE
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AD 41 00
# 7E 00 0F 22 4C 4F 49 54 02 04 00 00 60 00 43 89 BB 54 64
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 66 66 0A 42 F3
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AF 41 FE
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AD 41 00
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AF 41 FE
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 AD 41 80
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 66 66 0A 42 F3
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 B0 41 7D 5D
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AD 41 00
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 B0 41 7D 5D
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 66 66 0A 42 F3
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AD 41 00
# 7E 00 0F 22 4C 4F 49 54 02 04 00 00 60 00 61 89 BB 54 46
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 B0 41 7D 5D
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AD 41 00
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 66 66 0A 42 F3
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 B0 41 7D 5D
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AD 41 00
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 B0 41 7D 5D
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 AE 41 7F
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 5C 8F 0A 42 D4
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 B0 41 7D 5D
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 AE 41 7F
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 14 AE 0A 42 FD
# 7E 00 0F 22 4C 4F 49 54 02 04 00 00 60 00 7F 89 BB 54 28
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 B0 41 FD
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 AE 41 7F
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 B0 41 7D 5D
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 28 41 05
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 14 AE 0A 42 FD
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AD 41 00
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 A8 C1 85
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 5C 8F 0A 42 D4
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AD 41 00
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 A1 C1 8C
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AD 41 00
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 A3 C1 8A
# 7E 00 0F 22 4C 4F 49 54 02 04 00 00 60 00 9D 89 BB 54 0A
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 14 AE 0A 42 FD
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AD 41 00
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 9D C1 90
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 80 AD 41 00
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 14 AE 0A 42 FD
# 7E 00 13 20 08 00 00 00 02 11 00 03 50 01 01 00 01 00 00 00 A5 C1 08
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 14 AE 0A 42 FD
# 7E 00 0F 22 4C 4F 49 54 02 04 00 00 60 00 BB 89 BB 54 EC
# 7E 00 1B 20 1B 00 60 03 02 11 00 03 50 01 01 00 01 00 00 00 A8 41 02 00 02 00 14 AE 0A 42 FD
| gpl-2.0 | -79,922,462,028,317,260 | -2,271,502,676,929,535,000 | 58.951087 | 135 | 0.621846 | false |
ruthger/Archipel | ArchipelAgent/archipel-agent-action-scheduler/archipelagentactionscheduler/__init__.py | 5 | 2236 | # -*- coding: utf-8 -*-
#
# __init__.py
#
# Copyright (C) 2010 Antoine Mercadal <[email protected]>
# Copyright, 2011 - Franck Villaume <[email protected]>
# This file is part of ArchipelProject
# http://archipelproject.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import actionscheduler
def make_archipel_plugin(configuration, entity, group):
"""
This function is the plugin factory. It will be called by the object you want
to be plugged in. It must return a list whit at least on dictionary containing
a key for the the plugin informations, and a key for the plugin object.
@type configuration: Config Object
@param configuration: the general configuration object
@type entity: L{TNArchipelEntity}
@param entity: the entity that has load the plugin
@type group: string
@param group: the entry point group name in which the plugin has been loaded
@rtype: array
@return: array of dictionary containing the plugins informations and objects
"""
return [{"info": actionscheduler.TNActionScheduler.plugin_info(),
"plugin": actionscheduler.TNActionScheduler(configuration, entity, group)}]
def version():
"""
This function can be called runarchipel -v in order to get the version of the
installed plugin. You only should have to change the egg name.
@rtype: tupple
@return: tupple containing the package name and the version
"""
import pkg_resources
return (__name__, pkg_resources.get_distribution("archipel-agent-action-scheduler").version, [actionscheduler.TNActionScheduler.plugin_info()]) | agpl-3.0 | 6,379,112,893,619,545,000 | 6,388,979,807,856,881,000 | 42.862745 | 147 | 0.738819 | false |
LeandroRoberto/sapl | sapl/relatorios/templates/pdf_pauta_sessao_preparar_pysc.py | 1 | 9023 | import os
request=context.REQUEST
response=request.RESPONSE
session= request.SESSION
if context.REQUEST['data']!='':
dat_inicio_sessao = context.REQUEST['data']
pauta = [] # lista contendo a pauta da ordem do dia a ser impressa
data = context.pysc.data_converter_pysc(dat_inicio_sessao) # converte data para formato yyyy/mm/dd
codigo = context.REQUEST['cod_sessao_plen']
# seleciona as matérias que compõem a pauta na data escolhida
for sessao in context.zsql.sessao_plenaria_obter_zsql(dat_inicio_sessao=data, cod_sessao_plen=codigo, ind_excluido=0):
inf_basicas_dic = {} # dicionário que armazenará as informacoes basicas da sessao plenaria
# seleciona o tipo da sessao plenaria
tipo_sessao = context.zsql.tipo_sessao_plenaria_obter_zsql(tip_sessao=sessao.tip_sessao,ind_excluido=0)[0]
inf_basicas_dic["nom_sessao"] = tipo_sessao.nom_sessao
inf_basicas_dic["num_sessao_plen"] = sessao.num_sessao_plen
inf_basicas_dic["nom_sessao"] = tipo_sessao.nom_sessao
inf_basicas_dic["num_legislatura"] = sessao.num_legislatura
inf_basicas_dic["num_sessao_leg"] = sessao.num_sessao_leg
inf_basicas_dic["dat_inicio_sessao"] = sessao.dat_inicio_sessao
inf_basicas_dic["hr_inicio_sessao"] = sessao.hr_inicio_sessao
inf_basicas_dic["dat_fim_sessao"] = sessao.dat_fim_sessao
inf_basicas_dic["hr_fim_sessao"] = sessao.hr_fim_sessao
# Lista das matérias do Expediente, incluindo o status da tramitação
lst_expediente_materia=[]
for expediente_materia in context.zsql.votacao_expediente_materia_obter_zsql(dat_ordem=data,cod_sessao_plen=codigo,ind_excluido=0):
# seleciona os detalhes de uma matéria
materia = context.zsql.materia_obter_zsql(cod_materia=expediente_materia.cod_materia)[0]
dic_expediente_materia = {}
dic_expediente_materia["num_ordem"] = expediente_materia.num_ordem
dic_expediente_materia["id_materia"] = materia.sgl_tipo_materia+" - "+materia.des_tipo_materia+" No. "+str(materia.num_ident_basica)+"/"+str(materia.ano_ident_basica)
dic_expediente_materia["txt_ementa"] = materia.txt_ementa
dic_expediente_materia["ordem_observacao"] = expediente_materia.ordem_observacao
dic_expediente_materia["des_numeracao"]=""
numeracao = context.zsql.numeracao_obter_zsql(cod_materia=expediente_materia.cod_materia)
if len(numeracao):
numeracao = numeracao[0]
dic_expediente_materia["des_numeracao"] = str(numeracao.num_materia)+"/"+str(numeracao.ano_materia)
dic_expediente_materia["nom_autor"] = ''
autoria = context.zsql.autoria_obter_zsql(cod_materia=expediente_materia.cod_materia, ind_primeiro_autor=1)
if len(autoria) > 0: # se existe autor
autoria = autoria[0]
autor = context.zsql.autor_obter_zsql(cod_autor=autoria.cod_autor)
if len(autor) > 0:
autor = autor[0]
if autor.des_tipo_autor == "Parlamentar":
parlamentar = context.zsql.parlamentar_obter_zsql(cod_parlamentar=autor.cod_parlamentar)[0]
dic_expediente_materia["nom_autor"] = parlamentar.nom_parlamentar
elif autor.des_tipo_autor == "Comissao":
comissao = context.zsql.comissao_obter_zsql(cod_comissao=autor.cod_comissao)[0]
dic_expediente_materia["nom_autor"] = comissao.nom_comissao
else:
dic_expediente_materia["nom_autor"] = autor.nom_autor
dic_expediente_materia["des_turno"]=""
dic_expediente_materia["des_situacao"] = ""
tramitacao = context.zsql.tramitacao_obter_zsql(cod_materia=expediente_materia.cod_materia, ind_ult_tramitacao=1)
if len(tramitacao):
tramitacao = tramitacao[0]
if tramitacao.sgl_turno != "":
for turno in [("P","Primeiro"), ("S","Segundo"), ("U","Único"), ("F","Final"), ("L","Suplementar"), ("A","Votação Única em Regime de Urgência"), ("B","1ª Votação"), ("C","2ª e 3ª Votações")]:
if tramitacao.sgl_turno == turno[0]:
dic_expediente_materia["des_turno"] = turno[1]
dic_expediente_materia["des_situacao"] = tramitacao.des_status
if dic_expediente_materia["des_situacao"]==None:
dic_expediente_materia["des_situacao"] = " "
lst_expediente_materia.append(dic_expediente_materia)
# Lista das matérias da Ordem do Dia, incluindo o status da tramitação
lst_votacao=[]
for votacao in context.zsql.votacao_ordem_dia_obter_zsql(dat_ordem=data,cod_sessao_plen=codigo,ind_excluido=0):
# seleciona os detalhes de uma matéria
materia = context.zsql.materia_obter_zsql(cod_materia=votacao.cod_materia)[0]
dic_votacao = {}
dic_votacao["num_ordem"] = votacao.num_ordem
dic_votacao["id_materia"] = materia.sgl_tipo_materia+" - "+materia.des_tipo_materia+" No. "+str(materia.num_ident_basica)+"/"+str(materia.ano_ident_basica)
dic_votacao["txt_ementa"] = materia.txt_ementa
dic_votacao["ordem_observacao"] = votacao.ordem_observacao
dic_votacao["des_numeracao"]=""
numeracao = context.zsql.numeracao_obter_zsql(cod_materia=votacao.cod_materia)
if len(numeracao):
numeracao = numeracao[0]
dic_votacao["des_numeracao"] = str(numeracao.num_materia)+"/"+str(numeracao.ano_materia)
dic_votacao["nom_autor"] = ''
autoria = context.zsql.autoria_obter_zsql(cod_materia=votacao.cod_materia, ind_primeiro_autor=1)
if len(autoria) > 0: # se existe autor
autoria = autoria[0]
autor = context.zsql.autor_obter_zsql(cod_autor=autoria.cod_autor)
if len(autor) > 0:
autor = autor[0]
if autor.des_tipo_autor == "Parlamentar":
parlamentar = context.zsql.parlamentar_obter_zsql(cod_parlamentar=autor.cod_parlamentar)[0]
dic_votacao["nom_autor"] = parlamentar.nom_parlamentar
elif autor.des_tipo_autor == "Comissao":
comissao = context.zsql.comissao_obter_zsql(cod_comissao=autor.cod_comissao)[0]
dic_votacao["nom_autor"] = comissao.nom_comissao
else:
dic_votacao["nom_autor"] = autor.nom_autor
dic_votacao["des_turno"]=""
dic_votacao["des_situacao"] = ""
tramitacao = context.zsql.tramitacao_obter_zsql(cod_materia=votacao.cod_materia, ind_ult_tramitacao=1)
if len(tramitacao):
tramitacao = tramitacao[0]
if tramitacao.sgl_turno != "":
for turno in [("P","Primeiro"), ("S","Segundo"), ("U","Único"), ("L","Suplementar"), ("A","Votação Única em Regime de Urgência"), ("B","1ª Votação"), ("C","2ª e 3ª Votações")]:
if tramitacao.sgl_turno == turno[0]:
dic_votacao["des_turno"] = turno[1]
dic_votacao["des_situacao"] = tramitacao.des_status
if dic_votacao["des_situacao"]==None:
dic_votacao["des_situacao"] = " "
lst_votacao.append(dic_votacao)
# obtém as propriedades da casa legislativa para montar o cabeçalho e o rodapé da página
cabecalho={}
# tenta buscar o logotipo da casa LOGO_CASA
if hasattr(context.sapl_documentos.props_sapl,'logo_casa.gif'):
imagem = context.sapl_documentos.props_sapl['logo_casa.gif'].absolute_url()
else:
imagem = context.imagens.absolute_url() + "/brasao_transp.gif"
#Abaixo é gerado o dic do rodapé da página (linha 7)
casa={}
aux=context.sapl_documentos.props_sapl.propertyItems()
for item in aux:
casa[item[0]]=item[1]
localidade=context.zsql.localidade_obter_zsql(cod_localidade=casa["cod_localidade"])
data_emissao= DateTime().strftime("%d/%m/%Y")
rodape= casa
rodape['data_emissao']= data_emissao
inf_basicas_dic['nom_camara']= casa['nom_casa']
REQUEST=context.REQUEST
for local in context.zsql.localidade_obter_zsql(cod_localidade = casa['cod_localidade']):
rodape['nom_localidade']= " "+local.nom_localidade
rodape['sgl_uf']= local.sgl_uf
# return lst_votacao
sessao=session.id
caminho = context.pdf_pauta_sessao_gerar(rodape, sessao, imagem, inf_basicas_dic, lst_votacao, lst_expediente_materia)
if caminho=='aviso':
return response.redirect('mensagem_emitir_proc')
else:
response.redirect(caminho)
| gpl-3.0 | 8,665,497,993,650,787,000 | -7,976,247,496,827,260,000 | 54.432099 | 211 | 0.611693 | false |
dwaynebailey/pootle | pootle/apps/pootle_word/utils.py | 5 | 2811 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import os
import re
import Levenshtein
import translate
from django.utils.functional import cached_property
from pootle.core.delegate import stemmer, stopwords
class Stopwords(object):
@cached_property
def words(self):
ttk_path = translate.__path__[0]
fpath = (
os.path.join(ttk_path, "share", "stoplist-en")
if "share" in os.listdir(ttk_path)
else os.path.join(ttk_path, "..", "share", "stoplist-en"))
words = set()
with open(fpath) as f:
for line in f.read().split("\n"):
if not line:
continue
if line[0] in "<>=@":
words.add(line[1:].strip().lower())
return words
class TextStemmer(object):
def __init__(self, context):
self.context = context
def split(self, words):
return re.split(u"[^\w'-]+", words)
@property
def stopwords(self):
return stopwords.get().words
@property
def tokens(self):
return [
t.lower()
for t
in self.split(self.text)
if (len(t) > 2
and t.lower() not in self.stopwords)]
@property
def text(self):
return self.context.source_f
@property
def stemmer(self):
return stemmer.get()
@property
def stems(self):
return self.get_stems(self.tokens)
def get_stems(self, tokens):
return set(self.stemmer(t) for t in tokens)
class TextComparison(TextStemmer):
@property
def text(self):
return self.context
def jaccard_similarity(self, other):
return (
len(other.stems.intersection(self.stems))
/ float(len(set(other.stems).union(self.stems))))
def levenshtein_distance(self, other):
return (
Levenshtein.distance(self.text, other.text)
/ max(len(self.text), len(other.text)))
def tokens_present(self, other):
return (
len(set(self.tokens).intersection(other.tokens))
/ float(len(other.tokens)))
def stems_present(self, other):
return (
len(set(self.stems).intersection(other.stems))
/ float(len(other.stems)))
def similarity(self, other):
other = self.__class__(other)
return (
(self.jaccard_similarity(other)
+ self.levenshtein_distance(other)
+ self.tokens_present(other)
+ self.stems_present(other))
/ 4)
| gpl-3.0 | 123,692,360,357,058,130 | -2,596,957,472,570,382,000 | 24.788991 | 77 | 0.573106 | false |
yize/grunt-tps | tasks/lib/python/Lib/python2.7/distutils/command/upload.py | 176 | 7002 | """distutils.command.upload
Implements the Distutils 'upload' subcommand (upload package to PyPI)."""
import os
import socket
import platform
from urllib2 import urlopen, Request, HTTPError
from base64 import standard_b64encode
import urlparse
import cStringIO as StringIO
from hashlib import md5
from distutils.errors import DistutilsOptionError
from distutils.core import PyPIRCCommand
from distutils.spawn import spawn
from distutils import log
class upload(PyPIRCCommand):
description = "upload binary package to PyPI"
user_options = PyPIRCCommand.user_options + [
('sign', 's',
'sign files to upload using gpg'),
('identity=', 'i', 'GPG identity used to sign files'),
]
boolean_options = PyPIRCCommand.boolean_options + ['sign']
def initialize_options(self):
PyPIRCCommand.initialize_options(self)
self.username = ''
self.password = ''
self.show_response = 0
self.sign = False
self.identity = None
def finalize_options(self):
PyPIRCCommand.finalize_options(self)
if self.identity and not self.sign:
raise DistutilsOptionError(
"Must use --sign for --identity to have meaning"
)
config = self._read_pypirc()
if config != {}:
self.username = config['username']
self.password = config['password']
self.repository = config['repository']
self.realm = config['realm']
# getting the password from the distribution
# if previously set by the register command
if not self.password and self.distribution.password:
self.password = self.distribution.password
def run(self):
if not self.distribution.dist_files:
raise DistutilsOptionError("No dist file created in earlier command")
for command, pyversion, filename in self.distribution.dist_files:
self.upload_file(command, pyversion, filename)
def upload_file(self, command, pyversion, filename):
# Makes sure the repository URL is compliant
schema, netloc, url, params, query, fragments = \
urlparse.urlparse(self.repository)
if params or query or fragments:
raise AssertionError("Incompatible url %s" % self.repository)
if schema not in ('http', 'https'):
raise AssertionError("unsupported schema " + schema)
# Sign if requested
if self.sign:
gpg_args = ["gpg", "--detach-sign", "-a", filename]
if self.identity:
gpg_args[2:2] = ["--local-user", self.identity]
spawn(gpg_args,
dry_run=self.dry_run)
# Fill in the data - send all the meta-data in case we need to
# register a new release
f = open(filename,'rb')
try:
content = f.read()
finally:
f.close()
meta = self.distribution.metadata
data = {
# action
':action': 'file_upload',
'protcol_version': '1',
# identify release
'name': meta.get_name(),
'version': meta.get_version(),
# file content
'content': (os.path.basename(filename),content),
'filetype': command,
'pyversion': pyversion,
'md5_digest': md5(content).hexdigest(),
# additional meta-data
'metadata_version' : '1.0',
'summary': meta.get_description(),
'home_page': meta.get_url(),
'author': meta.get_contact(),
'author_email': meta.get_contact_email(),
'license': meta.get_licence(),
'description': meta.get_long_description(),
'keywords': meta.get_keywords(),
'platform': meta.get_platforms(),
'classifiers': meta.get_classifiers(),
'download_url': meta.get_download_url(),
# PEP 314
'provides': meta.get_provides(),
'requires': meta.get_requires(),
'obsoletes': meta.get_obsoletes(),
}
comment = ''
if command == 'bdist_rpm':
dist, version, id = platform.dist()
if dist:
comment = 'built for %s %s' % (dist, version)
elif command == 'bdist_dumb':
comment = 'built for %s' % platform.platform(terse=1)
data['comment'] = comment
if self.sign:
data['gpg_signature'] = (os.path.basename(filename) + ".asc",
open(filename+".asc").read())
# set up the authentication
auth = "Basic " + standard_b64encode(self.username + ":" +
self.password)
# Build up the MIME payload for the POST data
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = '\n--' + boundary
end_boundary = sep_boundary + '--'
body = StringIO.StringIO()
for key, value in data.items():
# handle multiple entries for the same name
if not isinstance(value, list):
value = [value]
for value in value:
if isinstance(value, tuple):
fn = ';filename="%s"' % value[0]
value = value[1]
else:
fn = ""
body.write(sep_boundary)
body.write('\nContent-Disposition: form-data; name="%s"'%key)
body.write(fn)
body.write("\n\n")
body.write(value)
if value and value[-1] == '\r':
body.write('\n') # write an extra newline (lurve Macs)
body.write(end_boundary)
body.write("\n")
body = body.getvalue()
self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)
# build the Request
headers = {'Content-type':
'multipart/form-data; boundary=%s' % boundary,
'Content-length': str(len(body)),
'Authorization': auth}
request = Request(self.repository, data=body,
headers=headers)
# send the data
try:
result = urlopen(request)
status = result.getcode()
reason = result.msg
if self.show_response:
msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
self.announce(msg, log.INFO)
except socket.error, e:
self.announce(str(e), log.ERROR)
return
except HTTPError, e:
status = e.code
reason = e.msg
if status == 200:
self.announce('Server response (%s): %s' % (status, reason),
log.INFO)
else:
self.announce('Upload failed (%s): %s' % (status, reason),
log.ERROR)
| mit | 6,492,222,329,891,841,000 | 1,598,560,541,843,423,500 | 35.092784 | 84 | 0.537846 | false |
fitermay/intellij-community | python/lib/Lib/_threading_local.py | 91 | 6946 | """Thread-local objects.
(Note that this module provides a Python version of the threading.local
class. Depending on the version of Python you're using, there may be a
faster one available. You should always import the `local` class from
`threading`.)
Thread-local objects support the management of thread-local data.
If you have data that you want to be local to a thread, simply create
a thread-local object and use its attributes:
>>> mydata = local()
>>> mydata.number = 42
>>> mydata.number
42
You can also access the local-object's dictionary:
>>> mydata.__dict__
{'number': 42}
>>> mydata.__dict__.setdefault('widgets', [])
[]
>>> mydata.widgets
[]
What's important about thread-local objects is that their data are
local to a thread. If we access the data in a different thread:
>>> log = []
>>> def f():
... items = mydata.__dict__.items()
... items.sort()
... log.append(items)
... mydata.number = 11
... log.append(mydata.number)
>>> import threading
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
>>> log
[[], 11]
we get different data. Furthermore, changes made in the other thread
don't affect data seen in this thread:
>>> mydata.number
42
Of course, values you get from a local object, including a __dict__
attribute, are for whatever thread was current at the time the
attribute was read. For that reason, you generally don't want to save
these values across threads, as they apply only to the thread they
came from.
You can create custom local objects by subclassing the local class:
>>> class MyLocal(local):
... number = 2
... initialized = False
... def __init__(self, **kw):
... if self.initialized:
... raise SystemError('__init__ called too many times')
... self.initialized = True
... self.__dict__.update(kw)
... def squared(self):
... return self.number ** 2
This can be useful to support default values, methods and
initialization. Note that if you define an __init__ method, it will be
called each time the local object is used in a separate thread. This
is necessary to initialize each thread's dictionary.
Now if we create a local object:
>>> mydata = MyLocal(color='red')
Now we have a default number:
>>> mydata.number
2
an initial color:
>>> mydata.color
'red'
>>> del mydata.color
And a method that operates on the data:
>>> mydata.squared()
4
As before, we can access the data in a separate thread:
>>> log = []
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
>>> log
[[('color', 'red'), ('initialized', True)], 11]
without affecting this thread's data:
>>> mydata.number
2
>>> mydata.color
Traceback (most recent call last):
...
AttributeError: 'MyLocal' object has no attribute 'color'
Note that subclasses can define slots, but they are not thread
local. They are shared across threads:
>>> class MyLocal(local):
... __slots__ = 'number'
>>> mydata = MyLocal()
>>> mydata.number = 42
>>> mydata.color = 'red'
So, the separate thread:
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
affects what we see:
>>> mydata.number
11
>>> del mydata
"""
__all__ = ["local"]
# We need to use objects from the threading module, but the threading
# module may also want to use our `local` class, if support for locals
# isn't compiled in to the `thread` module. This creates potential problems
# with circular imports. For that reason, we don't import `threading`
# until the bottom of this file (a hack sufficient to worm around the
# potential problems). Note that almost all platforms do have support for
# locals in the `thread` module, and there is no circular import problem
# then, so problems introduced by fiddling the order of imports here won't
# manifest on most boxes.
class _localbase(object):
__slots__ = '_local__key', '_local__args', '_local__lock'
def __new__(cls, *args, **kw):
self = object.__new__(cls)
key = '_local__key', 'thread.local.' + str(id(self))
object.__setattr__(self, '_local__key', key)
object.__setattr__(self, '_local__args', (args, kw))
object.__setattr__(self, '_local__lock', RLock())
if args or kw and (cls.__init__ is object.__init__):
raise TypeError("Initialization arguments are not supported")
# We need to create the thread dict in anticipation of
# __init__ being called, to make sure we don't call it
# again ourselves.
dict = object.__getattribute__(self, '__dict__')
currentThread().__dict__[key] = dict
return self
def _patch(self):
key = object.__getattribute__(self, '_local__key')
d = currentThread().__dict__.get(key)
if d is None:
d = {}
currentThread().__dict__[key] = d
object.__setattr__(self, '__dict__', d)
# we have a new instance dict, so call out __init__ if we have
# one
cls = type(self)
if cls.__init__ is not object.__init__:
args, kw = object.__getattribute__(self, '_local__args')
cls.__init__(self, *args, **kw)
else:
object.__setattr__(self, '__dict__', d)
class local(_localbase):
def __getattribute__(self, name):
lock = object.__getattribute__(self, '_local__lock')
lock.acquire()
try:
_patch(self)
return object.__getattribute__(self, name)
finally:
lock.release()
def __setattr__(self, name, value):
lock = object.__getattribute__(self, '_local__lock')
lock.acquire()
try:
_patch(self)
return object.__setattr__(self, name, value)
finally:
lock.release()
def __delattr__(self, name):
lock = object.__getattribute__(self, '_local__lock')
lock.acquire()
try:
_patch(self)
return object.__delattr__(self, name)
finally:
lock.release()
def __del__(self):
import threading
key = object.__getattribute__(self, '_local__key')
try:
threads = list(threading.enumerate())
except:
# If enumerate fails, as it seems to do during
# shutdown, we'll skip cleanup under the assumption
# that there is nothing to clean up.
return
for thread in threads:
try:
__dict__ = thread.__dict__
except AttributeError:
# Thread is dying, rest in peace.
continue
if key in __dict__:
try:
del __dict__[key]
except KeyError:
pass # didn't have anything in this thread
from threading import currentThread, RLock
| apache-2.0 | -168,200,446,611,563,700 | -210,457,493,222,097,900 | 27.821577 | 76 | 0.600346 | false |
virgree/odoo | addons/l10n_uy/__openerp__.py | 260 | 1807 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2011 Openerp.uy <[email protected]>
# Proyecto de Localización de OperERP para Uruguay
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Uruguay - Chart of Accounts',
'version': '0.1',
'author': 'Uruguay l10n Team & Guillem Barba',
'category': 'Localization/Account Charts',
'website': 'https://launchpad.net/openerp-uruguay',
'description': """
General Chart of Accounts.
==========================
Provide Templates for Chart of Accounts, Taxes for Uruguay.
""",
'license': 'AGPL-3',
'depends': ['account'],
'data': [
'account_types.xml',
'taxes_code_template.xml',
'account_chart_template.xml',
'taxes_template.xml',
'l10n_uy_wizard.xml',
],
'demo': [],
'auto_install': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -2,412,884,556,799,121,000 | -34,954,224,724,616,856 | 34.411765 | 78 | 0.594131 | false |
simonwydooghe/ansible | test/units/modules/storage/netapp/test_na_ontap_nvme_namespace.py | 48 | 7361 | # (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
''' unit tests ONTAP Ansible module: na_ontap_nvme_namespace'''
from __future__ import print_function
import json
import pytest
from units.compat import unittest
from units.compat.mock import patch
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
import ansible.module_utils.netapp as netapp_utils
from ansible.modules.storage.netapp.na_ontap_nvme_namespace \
import NetAppONTAPNVMENamespace as my_module
if not netapp_utils.has_netapp_lib():
pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
def set_module_args(args):
"""prepare arguments so that they will be picked up during module creation"""
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access
class AnsibleExitJson(Exception):
"""Exception class to be raised by module.exit_json and caught by the test case"""
pass
class AnsibleFailJson(Exception):
"""Exception class to be raised by module.fail_json and caught by the test case"""
pass
def exit_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over exit_json; package return data into an exception"""
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
def fail_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over fail_json; package return data into an exception"""
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
class MockONTAPConnection(object):
''' mock server connection to ONTAP host '''
def __init__(self, kind=None):
''' save arguments '''
self.type = kind
self.xml_in = None
self.xml_out = None
def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
''' mock invoke_successfully returning xml data '''
self.xml_in = xml
if self.type == 'namespace':
xml = self.build_namespace_info()
elif self.type == 'quota_fail':
raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
self.xml_out = xml
return xml
@staticmethod
def build_namespace_info():
''' build xml data for namespace-info '''
xml = netapp_utils.zapi.NaElement('xml')
data = {'num-records': 2,
'attributes-list': [{'nvme-namespace-info': {'path': 'abcd/vol'}},
{'nvme-namespace-info': {'path': 'xyz/vol'}}]}
xml.translate_struct(data)
return xml
class TestMyModule(unittest.TestCase):
''' a group of related Unit Tests '''
def setUp(self):
self.mock_module_helper = patch.multiple(basic.AnsibleModule,
exit_json=exit_json,
fail_json=fail_json)
self.mock_module_helper.start()
self.addCleanup(self.mock_module_helper.stop)
self.server = MockONTAPConnection()
self.onbox = False
def set_default_args(self):
if self.onbox:
hostname = '10.193.75.3'
username = 'admin'
password = 'netapp1!'
vserver = 'ansible'
ostype = 'linux'
path = 'abcd/vol'
size = 20
else:
hostname = 'hostname'
username = 'username'
password = 'password'
vserver = 'vserver'
ostype = 'linux'
path = 'abcd/vol'
size = 20
return dict({
'hostname': hostname,
'username': username,
'password': password,
'ostype': ostype,
'vserver': vserver,
'path': path,
'size': size
})
def test_module_fail_when_required_args_missing(self):
''' required arguments are reported as errors '''
with pytest.raises(AnsibleFailJson) as exc:
set_module_args({})
my_module()
print('Info: %s' % exc.value.args[0]['msg'])
def test_ensure_get_called(self):
''' test get_namespace() for non-existent namespace'''
set_module_args(self.set_default_args())
my_obj = my_module()
my_obj.server = self.server
assert my_obj.get_namespace() is None
def test_ensure_get_called_existing(self):
''' test get_namespace() for existing namespace'''
set_module_args(self.set_default_args())
my_obj = my_module()
my_obj.server = MockONTAPConnection(kind='namespace')
assert my_obj.get_namespace()
@patch('ansible.modules.storage.netapp.na_ontap_nvme_namespace.NetAppONTAPNVMENamespace.create_namespace')
def test_successful_create(self, create_namespace):
''' creating namespace and testing idempotency '''
set_module_args(self.set_default_args())
my_obj = my_module()
if not self.onbox:
my_obj.server = self.server
with pytest.raises(AnsibleExitJson) as exc:
my_obj.apply()
assert exc.value.args[0]['changed']
create_namespace.assert_called_with()
# to reset na_helper from remembering the previous 'changed' value
my_obj = my_module()
if not self.onbox:
my_obj.server = MockONTAPConnection('namespace')
with pytest.raises(AnsibleExitJson) as exc:
my_obj.apply()
assert not exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_nvme_namespace.NetAppONTAPNVMENamespace.delete_namespace')
def test_successful_delete(self, delete_namespace):
''' deleting namespace and testing idempotency '''
data = self.set_default_args()
data['state'] = 'absent'
set_module_args(data)
my_obj = my_module()
if not self.onbox:
my_obj.server = MockONTAPConnection('namespace')
with pytest.raises(AnsibleExitJson) as exc:
my_obj.apply()
assert exc.value.args[0]['changed']
delete_namespace.assert_called_with()
# to reset na_helper from remembering the previous 'changed' value
my_obj = my_module()
if not self.onbox:
my_obj.server = self.server
with pytest.raises(AnsibleExitJson) as exc:
my_obj.apply()
assert not exc.value.args[0]['changed']
def test_if_all_methods_catch_exception(self):
module_args = {}
module_args.update(self.set_default_args())
set_module_args(module_args)
my_obj = my_module()
if not self.onbox:
my_obj.server = MockONTAPConnection('quota_fail')
with pytest.raises(AnsibleFailJson) as exc:
my_obj.get_namespace()
assert 'Error fetching namespace info:' in exc.value.args[0]['msg']
with pytest.raises(AnsibleFailJson) as exc:
my_obj.create_namespace()
assert 'Error creating namespace for path' in exc.value.args[0]['msg']
with pytest.raises(AnsibleFailJson) as exc:
my_obj.delete_namespace()
assert 'Error deleting namespace for path' in exc.value.args[0]['msg']
| gpl-3.0 | -7,701,812,992,768,447,000 | -8,637,653,910,392,769,000 | 36.365482 | 110 | 0.612688 | false |
icereval/osf.io | api/requests/serializers.py | 1 | 3573 | from django.db import IntegrityError
from rest_framework import exceptions
from rest_framework import serializers as ser
from api.base.exceptions import Conflict
from api.base.utils import absolute_reverse, get_user_auth
from api.base.serializers import JSONAPISerializer, LinksField, VersionedDateTimeField, RelationshipField
from osf.models import NodeRequest
from osf.utils.workflows import DefaultStates, RequestTypes
class NodeRequestSerializer(JSONAPISerializer):
class Meta:
type_ = 'node-requests'
filterable_fields = frozenset([
'creator',
'request_type',
'machine_state',
'created',
'id'
])
id = ser.CharField(source='_id', read_only=True)
request_type = ser.ChoiceField(read_only=True, required=False, choices=RequestTypes.choices())
machine_state = ser.ChoiceField(read_only=True, required=False, choices=DefaultStates.choices())
comment = ser.CharField(required=False, allow_blank=True, max_length=65535)
created = VersionedDateTimeField(read_only=True)
modified = VersionedDateTimeField(read_only=True)
date_last_transitioned = VersionedDateTimeField(read_only=True)
target = RelationshipField(
read_only=True,
related_view='nodes:node-detail',
related_view_kwargs={'node_id': '<target._id>'},
filter_key='target___id',
)
creator = RelationshipField(
read_only=True,
related_view='users:user-detail',
related_view_kwargs={'user_id': '<creator._id>'},
filter_key='creator___id',
)
links = LinksField({
'self': 'get_absolute_url',
'target': 'get_target_url'
})
def get_absolute_url(self, obj):
return absolute_reverse('requests:node-request-detail', kwargs={'request_id': obj._id, 'version': self.context['request'].parser_context['kwargs']['version']})
def get_target_url(self, obj):
return absolute_reverse('nodes:node-detail', kwargs={'node_id': obj.target._id, 'version': self.context['request'].parser_context['kwargs']['version']})
def create(self, validated_data):
raise NotImplementedError()
class NodeRequestCreateSerializer(NodeRequestSerializer):
request_type = ser.ChoiceField(required=True, choices=RequestTypes.choices())
def create(self, validated_data):
auth = get_user_auth(self.context['request'])
if not auth.user:
raise exceptions.PermissionDenied
try:
node = self.context['view'].get_node()
except exceptions.PermissionDenied:
node = self.context['view'].get_node(check_object_permissions=False)
if auth.user in node.contributors:
raise exceptions.PermissionDenied('You cannot request access to a node you contribute to.')
raise
comment = validated_data.pop('comment', '')
request_type = validated_data.pop('request_type', None)
if not request_type:
raise exceptions.ValidationError('You must specify a valid request_type.')
try:
node_request = NodeRequest.objects.create(
target=node,
creator=auth.user,
comment=comment,
machine_state=DefaultStates.INITIAL.value,
request_type=request_type
)
node_request.save()
except IntegrityError:
raise Conflict('Users may not have more than one {} request per node.'.format(request_type))
node_request.run_submit(auth.user)
return node_request
| apache-2.0 | 5,703,426,101,346,257,000 | 7,646,756,712,607,338,000 | 37.419355 | 167 | 0.65799 | false |
shsingh/ansible | lib/ansible/modules/database/postgresql/postgresql_ext.py | 2 | 13576 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: postgresql_ext
short_description: Add or remove PostgreSQL extensions from a database
description:
- Add or remove PostgreSQL extensions from a database.
version_added: '1.9'
options:
name:
description:
- Name of the extension to add or remove.
required: true
type: str
aliases:
- ext
db:
description:
- Name of the database to add or remove the extension to/from.
required: true
type: str
aliases:
- login_db
schema:
description:
- Name of the schema to add the extension to.
version_added: '2.8'
type: str
session_role:
description:
- Switch to session_role after connecting.
- The specified session_role must be a role that the current login_user is a member of.
- Permissions checking for SQL commands is carried out as though the session_role were the one that had logged in originally.
type: str
version_added: '2.8'
state:
description:
- The database extension state.
default: present
choices: [ absent, present ]
type: str
cascade:
description:
- Automatically install/remove any extensions that this extension depends on
that are not already installed/removed (supported since PostgreSQL 9.6).
type: bool
default: no
version_added: '2.8'
login_unix_socket:
description:
- Path to a Unix domain socket for local connections.
type: str
version_added: '2.8'
ssl_mode:
description:
- Determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server.
- See U(https://www.postgresql.org/docs/current/static/libpq-ssl.html) for more information on the modes.
- Default of C(prefer) matches libpq default.
type: str
default: prefer
choices: [ allow, disable, prefer, require, verify-ca, verify-full ]
version_added: '2.8'
ca_cert:
description:
- Specifies the name of a file containing SSL certificate authority (CA) certificate(s).
- If the file exists, the server's certificate will be verified to be signed by one of these authorities.
type: str
aliases: [ ssl_rootcert ]
version_added: '2.8'
version:
description:
- Extension version to add or update to. Has effect with I(state=present) only.
- If not specified, the latest extension version will be created.
- It can't downgrade an extension version.
When version downgrade is needed, remove the extension and create new one with appropriate version.
- Set I(version=latest) to update the extension to the latest available version.
type: str
version_added: '2.9'
seealso:
- name: PostgreSQL extensions
description: General information about PostgreSQL extensions.
link: https://www.postgresql.org/docs/current/external-extensions.html
- name: CREATE EXTENSION reference
description: Complete reference of the CREATE EXTENSION command documentation.
link: https://www.postgresql.org/docs/current/sql-createextension.html
- name: ALTER EXTENSION reference
description: Complete reference of the ALTER EXTENSION command documentation.
link: https://www.postgresql.org/docs/current/sql-alterextension.html
- name: DROP EXTENSION reference
description: Complete reference of the DROP EXTENSION command documentation.
link: https://www.postgresql.org/docs/current/sql-droppublication.html
notes:
- The default authentication assumes that you are either logging in as
or sudo'ing to the C(postgres) account on the host.
- This module uses I(psycopg2), a Python PostgreSQL database adapter.
- You must ensure that C(psycopg2) is installed on the host before using this module.
- If the remote host is the PostgreSQL server (which is the default case),
then PostgreSQL must also be installed on the remote host.
- For Ubuntu-based systems, install the C(postgresql), C(libpq-dev),
and C(python-psycopg2) packages on the remote host before using this module.
requirements: [ psycopg2 ]
author:
- Daniel Schep (@dschep)
- Thomas O'Donnell (@andytom)
- Sandro Santilli (@strk)
- Andrew Klychkov (@Andersson007)
extends_documentation_fragment: postgres
'''
EXAMPLES = r'''
- name: Adds postgis extension to the database acme in the schema foo
postgresql_ext:
name: postgis
db: acme
schema: foo
- name: Removes postgis extension to the database acme
postgresql_ext:
name: postgis
db: acme
state: absent
- name: Adds earthdistance extension to the database template1 cascade
postgresql_ext:
name: earthdistance
db: template1
cascade: true
# In the example below, if earthdistance extension is installed,
# it will be removed too because it depends on cube:
- name: Removes cube extension from the database acme cascade
postgresql_ext:
name: cube
db: acme
cascade: yes
state: absent
- name: Create extension foo of version 1.2 or update it if it's already created
postgresql_ext:
db: acme
name: foo
version: 1.2
- name: Assuming extension foo is created, update it to the latest version
postgresql_ext:
db: acme
name: foo
version: latest
'''
RETURN = r'''
query:
description: List of executed queries.
returned: always
type: list
sample: ["DROP EXTENSION \"acme\""]
'''
import traceback
from distutils.version import LooseVersion
try:
from psycopg2.extras import DictCursor
except ImportError:
# psycopg2 is checked by connect_to_db()
# from ansible.module_utils.postgres
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.postgres import (
connect_to_db,
get_conn_params,
postgres_common_argument_spec,
)
from ansible.module_utils._text import to_native
executed_queries = []
class NotSupportedError(Exception):
pass
# ===========================================
# PostgreSQL module specific support methods.
#
def ext_exists(cursor, ext):
query = "SELECT * FROM pg_extension WHERE extname=%(ext)s"
cursor.execute(query, {'ext': ext})
return cursor.rowcount == 1
def ext_delete(cursor, ext, cascade):
if ext_exists(cursor, ext):
query = "DROP EXTENSION \"%s\"" % ext
if cascade:
query += " CASCADE"
cursor.execute(query)
executed_queries.append(query)
return True
else:
return False
def ext_update_version(cursor, ext, version):
"""Update extension version.
Return True if success.
Args:
cursor (cursor) -- cursor object of psycopg2 library
ext (str) -- extension name
version (str) -- extension version
"""
if version != 'latest':
query = ("ALTER EXTENSION \"%s\"" % ext)
cursor.execute(query + " UPDATE TO %(ver)s", {'ver': version})
executed_queries.append(cursor.mogrify(query + " UPDATE TO %(ver)s", {'ver': version}))
else:
query = ("ALTER EXTENSION \"%s\" UPDATE" % ext)
cursor.execute(query)
executed_queries.append(query)
return True
def ext_create(cursor, ext, schema, cascade, version):
query = "CREATE EXTENSION \"%s\"" % ext
if schema:
query += " WITH SCHEMA \"%s\"" % schema
if version:
query += " VERSION %(ver)s"
if cascade:
query += " CASCADE"
if version:
cursor.execute(query, {'ver': version})
executed_queries.append(cursor.mogrify(query, {'ver': version}))
else:
cursor.execute(query)
executed_queries.append(query)
return True
def ext_get_versions(cursor, ext):
"""
Get the current created extension version and available versions.
Return tuple (current_version, [list of available versions]).
Note: the list of available versions contains only versions
that higher than the current created version.
If the extension is not created, this list will contain all
available versions.
Args:
cursor (cursor) -- cursor object of psycopg2 library
ext (str) -- extension name
"""
# 1. Get the current extension version:
query = ("SELECT extversion FROM pg_catalog.pg_extension "
"WHERE extname = %(ext)s")
current_version = '0'
cursor.execute(query, {'ext': ext})
res = cursor.fetchone()
if res:
current_version = res[0]
# 2. Get available versions:
query = ("SELECT version FROM pg_available_extension_versions "
"WHERE name = %(ext)s")
cursor.execute(query, {'ext': ext})
res = cursor.fetchall()
available_versions = []
if res:
# Make the list of available versions:
for line in res:
if LooseVersion(line[0]) > LooseVersion(current_version):
available_versions.append(line['version'])
if current_version == '0':
current_version = False
return (current_version, available_versions)
# ===========================================
# Module execution.
#
def main():
argument_spec = postgres_common_argument_spec()
argument_spec.update(
db=dict(type="str", required=True, aliases=["login_db"]),
ext=dict(type="str", required=True, aliases=["name"]),
schema=dict(type="str"),
state=dict(type="str", default="present", choices=["absent", "present"]),
cascade=dict(type="bool", default=False),
session_role=dict(type="str"),
version=dict(type="str"),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
ext = module.params["ext"]
schema = module.params["schema"]
state = module.params["state"]
cascade = module.params["cascade"]
version = module.params["version"]
changed = False
if version and state == 'absent':
module.warn("Parameter version is ignored when state=absent")
conn_params = get_conn_params(module, module.params)
db_connection = connect_to_db(module, conn_params, autocommit=True)
cursor = db_connection.cursor(cursor_factory=DictCursor)
try:
# Get extension info and available versions:
curr_version, available_versions = ext_get_versions(cursor, ext)
if state == "present":
if version == 'latest':
if available_versions:
version = available_versions[-1]
else:
version = ''
if version:
# If the specific version is passed and it is not available for update:
if version not in available_versions:
if not curr_version:
module.fail_json(msg="Passed version '%s' is not available" % version)
elif LooseVersion(curr_version) == LooseVersion(version):
changed = False
else:
module.fail_json(msg="Passed version '%s' is lower than "
"the current created version '%s' or "
"the passed version is not available" % (version, curr_version))
# If the specific version is passed and it is higher that the current version:
if curr_version and version:
if LooseVersion(curr_version) < LooseVersion(version):
if module.check_mode:
changed = True
else:
changed = ext_update_version(cursor, ext, version)
# If the specific version is passed and it is created now:
if curr_version == version:
changed = False
# If the ext doesn't exist and installed:
elif not curr_version and available_versions:
if module.check_mode:
changed = True
else:
changed = ext_create(cursor, ext, schema, cascade, version)
# If version is not passed:
else:
if not curr_version:
# If the ext doesn't exist and it's installed:
if available_versions:
if module.check_mode:
changed = True
else:
changed = ext_create(cursor, ext, schema, cascade, version)
# If the ext doesn't exist and not installed:
else:
module.fail_json(msg="Extension %s is not installed" % ext)
elif state == "absent":
if curr_version:
if module.check_mode:
changed = True
else:
changed = ext_delete(cursor, ext, cascade)
else:
changed = False
except Exception as e:
db_connection.close()
module.fail_json(msg="Database query failed: %s" % to_native(e), exception=traceback.format_exc())
db_connection.close()
module.exit_json(changed=changed, db=module.params["db"], ext=ext, queries=executed_queries)
if __name__ == '__main__':
main()
| gpl-3.0 | 4,887,789,334,779,251,000 | 8,300,287,217,754,827,000 | 31.713253 | 129 | 0.625516 | false |
DayGitH/Python-Challenges | DailyProgrammer/DP20160323B.py | 1 | 5877 | """
[2016-03-23] Challenge #259 [Intermediate] Mahjong Hands
https://www.reddit.com/r/dailyprogrammer/comments/4bmdwz/20160323_challenge_259_intermediate_mahjong_hands/
# Description
You are the biggest, baddest mahjong player around. Your enemies tremble at your presence on the battlefield, and you
can barely walk ten steps before a fan begs you for an autograph.
However, you have a dark secret that would ruin you if it ever came to light. You're terrible at determining whether a
hand is a winning hand. For now, you've been able to bluff and bluster your way, but you know that one day you won't be
able to get away with it.
As such, you've decided to write a program to assist you!
## Further Details
Mahjong (not to be confused with [mahjong solitaire](http://en.wikipedia.org/wiki/Mahjong_solitaire)) is a game where
hands are composed from combinations of tiles. There are a number of variants of mahjong, but for this challenge, we
will consider a simplified variant of Japanese Mahjong which is also known as Riichi Mahjong.
## Basic Version
There are three suits in this variant, "Bamboo", "Circle" and "Character". Every tile that belongs to these suits has a
value that ranges from 1 - 9.
To complete a hand, tiles are organised into groups. If every tile in a hand belongs to a single group (and each tile
can only be used once), the hand is a winning hand.
For now, we shall consider the groups "Pair", "Set" and "Sequence". They are composed as follows:
Pair - Two tiles with the same suit and value
Set - Three tiles with the same suit and value
Sequence - Three tiles with the same suit, and which increment in value, such as "Circle 2, Circle 3, Circle 4". There
is no value wrapping so "Circle 9, Circle 1, Circle 2" would not be considered valid.
A hand is composed of 14 tiles.
## Bonus 1 - Adding Quads
There is actually a fourth group called a "Quad". It is just like a pair and a set, except it is composed of four tiles.
What makes this group special is that a hand containing quads will actually have a hand larger than 14, 1 for every
quad. This is fine, as long as there is *1, and only 1 pair*.
## Bonus 2 - Adding Honour Tiles
In addition to the tiles belonging to the three suits, there are 7 additional tiles. These tiles have no value, and are
collectively known as "honour" tiles.
As they have no value, they cannot be members of a sequence. Furthermore, they can only be part of a set or pair with
tiles that are exactly the same. For example, "Red Dragon, Red Dragon, Red Dragon" would be a valid set, but "Red
Dragon, Green Dragon, Red Dragon" would not.
These additional tiles are:
* Green Dragon
* Red Dragon
* White Dragon
* North Wind
* East Wind
* South Wind
* West Wind
## Bonus 3 - Seven Pairs
There are a number of special hands that are an exception to the above rules. One such hand is "Seven Pairs". As the
name suggests, it is a hand composed of seven pairs.
# Formal Inputs & Outputs
## Input description
### Basic
You will be provided with N on a single line, followed by N lines of the following format:
<tile suit>,<value>
### Bonus 2
In addition, the lines may be of the format:
<honour tile>
## Output description
You should output whether the hand is a winning hand or not.
# Sample Inputs and Outputs
## Sample Input (Standard)
14
Circle,4
Circle,5
Circle,6
Bamboo,1
Bamboo,2
Bamboo,3
Character,2
Character,2
Character,2
Circle,1
Circle,1
Bamboo,7
Bamboo,8
Bamboo,9
## Sample Output (Standard)
Winning hand
## Sample Input (Standard)
14
Circle,4
Bamboo,1
Circle,5
Bamboo,2
Character,2
Bamboo,3
Character,2
Circle,6
Character,2
Circle,1
Bamboo,8
Circle,1
Bamboo,7
Bamboo,9
## Sample Output (Standard)
Winning hand
## Sample Input (Standard)
14
Circle,4
Circle,5
Circle,6
Circle,4
Circle,5
Circle,6
Circle,1
Circle,1
Bamboo,7
Bamboo,8
Bamboo,9
Circle,4
Circle,5
Circle,6
## Sample Output (Standard)
Winning hand
## Sample Input (Bonus 1)
15
Circle,4
Circle,5
Circle,6
Bamboo,1
Bamboo,2
Bamboo,3
Character,2
Character,2
Character,2
Character,2
Circle,1
Circle,1
Bamboo,7
Bamboo,8
Bamboo,9
## Sample Output (Bonus 1)
Winning hand
## Sample Input (Bonus 1)
16
Circle,4
Circle,5
Circle,6
Bamboo,1
Bamboo,2
Bamboo,3
Character,2
Character,2
Character,2
Character,2
Circle,1
Circle,1
Circle,1
Bamboo,7
Bamboo,8
Bamboo,9
## Sample Output (Bonus 1)
Not a winning hand
## Sample Input (Bonus 2)
14
Circle,4
Circle,5
Circle,6
Bamboo,1
Bamboo,2
Bamboo,3
Red Dragon
Red Dragon
Red Dragon
Circle,1
Circle,1
Bamboo,7
Bamboo,8
Bamboo,9
## Sample Output (Bonus 2)
Winning hand
## Sample Input (Bonus 2)
14
Circle,4
Circle,5
Circle,6
Bamboo,1
Bamboo,2
Bamboo,3
Red Dragon
Green Dragon
White Dragon
Circle,1
Circle,1
Bamboo,7
Bamboo,8
Bamboo,9
## Sample Output (Bonus 2)
Not a winning hand
## Sample Input (Bonus 3)
14
Circle,4
Circle,4
Character,5
Character,5
Bamboo,5
Bamboo,5
Circle,5
Circle,5
Circle,7
Circle,7
Circle,9
Circle,9
Circle,9
Circle,9
## Sample Output (Bonus 3)
Winning hand
# Notes
None of the bonus components depend on each other, and can be implemented in any order. The test cases do not presume
completion of earlier bonus components. The order is just the recommended implementation order.
Many thanks to Redditor /u/oketa for this submission to /r/dailyprogrammer_ideas. If you have any ideas, please submit
them there!
"""
def main():
pass
if __name__ == "__main__":
main()
| mit | 5,363,391,328,340,372,000 | 6,684,037,468,069,034,000 | 25.713636 | 120 | 0.693041 | false |
waldocarter/p2pool | nattraverso/pynupnp/soap.py | 288 | 3547 | """
This module is a SOAP client using twisted's deferreds.
It uses the SOAPpy package.
@author: Raphael Slinckx
@copyright: Copyright 2005
@license: LGPL
@contact: U{[email protected]<mailto:[email protected]>}
@version: 0.1.0
"""
__revision__ = "$id"
import SOAPpy, logging
from SOAPpy.Config import Config
from twisted.web import client, error
#General config
Config.typed = False
class SoapError(Exception):
"""
This is a SOAP error message, not an HTTP error message.
The content of this error is a SOAPpy structure representing the
SOAP error message.
"""
pass
class SoapProxy:
"""
Proxy for an url to which we send SOAP rpc calls.
"""
def __init__(self, url, prefix):
"""
Init the proxy, it will connect to the given url, using the
given soap namespace.
@param url: The url of the remote host to call
@param prefix: The namespace prefix to use, eg.
'urn:schemas-upnp-org:service:WANIPConnection:1'
"""
logging.debug("Soap Proxy: '%s', prefix: '%s'", url, prefix)
self._url = url
self._prefix = prefix
def call(self, method, **kwargs):
"""
Call the given remote method with the given arguments, as keywords.
Returns a deferred, called with SOAPpy structure representing
the soap response.
@param method: The method name to call, eg. 'GetExternalIP'
@param kwargs: The parameters of the call, as keywords
@return: A deferred called with the external ip address of this host
@rtype: L{twisted.internet.defer.Deferred}
"""
payload = SOAPpy.buildSOAP(method=method, config=Config, namespace=self._prefix, kw=kwargs)
# Here begins the nasty hack
payload = payload.replace(
# Upnp wants s: instead of SOAP-ENV
'SOAP-ENV','s').replace(
# Doesn't seem to like these encoding stuff
'xmlns:SOAP-ENC="http://schemas.xmlsoap.org/soap/encoding/"', '').replace(
'SOAP-ENC:root="1"', '').replace(
# And it wants u: instead of ns1 namespace for arguments..
'ns1','u')
logging.debug("SOAP Payload:\n%s", payload)
return client.getPage(self._url, postdata=payload, method="POST",
headers={'content-type': 'text/xml', 'SOAPACTION': '%s#%s' % (self._prefix, method)}
).addCallbacks(self._got_page, self._got_error)
def _got_page(self, result):
"""
The http POST command was successful, we parse the SOAP
answer, and return it.
@param result: the xml content
"""
parsed = SOAPpy.parseSOAPRPC(result)
logging.debug("SOAP Answer:\n%s", result)
logging.debug("SOAP Parsed Answer: %r", parsed)
return parsed
def _got_error(self, res):
"""
The HTTP POST command did not succeed, depending on the error type:
- it's a SOAP error, we parse it and return a L{SoapError}.
- it's another type of error (http, other), we raise it as is
"""
logging.debug("SOAP Error:\n%s", res)
if isinstance(res.value, error.Error):
try:
logging.debug("SOAP Error content:\n%s", res.value.response)
raise SoapError(SOAPpy.parseSOAPRPC(res.value.response)["detail"])
except:
raise
raise Exception(res.value)
| gpl-3.0 | 7,275,849,996,519,336,000 | 2,638,047,949,614,780,400 | 33.105769 | 103 | 0.595715 | false |
tiagofrepereira2012/tensorflow | tensorflow/python/debug/cli/readline_ui_test.py | 81 | 5646 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests of the readline-based CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import tempfile
from tensorflow.python.debug.cli import debugger_cli_common
from tensorflow.python.debug.cli import readline_ui
from tensorflow.python.debug.cli import ui_factory
from tensorflow.python.framework import test_util
from tensorflow.python.platform import gfile
from tensorflow.python.platform import googletest
class MockReadlineUI(readline_ui.ReadlineUI):
"""Test subclass of ReadlineUI that bypasses terminal manipulations."""
def __init__(self, on_ui_exit=None, command_sequence=None):
readline_ui.ReadlineUI.__init__(self, on_ui_exit=on_ui_exit)
self._command_sequence = command_sequence
self._command_counter = 0
self.observers = {"screen_outputs": []}
def _get_user_command(self):
command = self._command_sequence[self._command_counter]
self._command_counter += 1
return command
def _display_output(self, screen_output):
self.observers["screen_outputs"].append(screen_output)
class CursesTest(test_util.TensorFlowTestCase):
def _babble(self, args, screen_info=None):
ap = argparse.ArgumentParser(
description="Do babble.", usage=argparse.SUPPRESS)
ap.add_argument(
"-n",
"--num_times",
dest="num_times",
type=int,
default=60,
help="How many times to babble")
parsed = ap.parse_args(args)
lines = ["bar"] * parsed.num_times
return debugger_cli_common.RichTextLines(lines)
def testUIFactoryCreatesReadlineUI(self):
ui = ui_factory.get_ui("readline")
self.assertIsInstance(ui, readline_ui.ReadlineUI)
def testUIFactoryRaisesExceptionOnInvalidUIType(self):
with self.assertRaisesRegexp(ValueError, "Invalid ui_type: 'foobar'"):
ui_factory.get_ui("foobar")
def testUIFactoryRaisesExceptionOnInvalidUITypeGivenAvailable(self):
with self.assertRaisesRegexp(ValueError, "Invalid ui_type: 'readline'"):
ui_factory.get_ui("readline", available_ui_types=["curses"])
def testRunUIExitImmediately(self):
"""Make sure that the UI can exit properly after launch."""
ui = MockReadlineUI(command_sequence=["exit"])
ui.run_ui()
# No screen output should have happened.
self.assertEqual(0, len(ui.observers["screen_outputs"]))
def testRunUIEmptyCommand(self):
"""Issue an empty command then exit."""
ui = MockReadlineUI(command_sequence=["", "exit"])
ui.run_ui()
self.assertEqual(1, len(ui.observers["screen_outputs"]))
def testRunUIWithInitCmd(self):
"""Run UI with an initial command specified."""
ui = MockReadlineUI(command_sequence=["exit"])
ui.register_command_handler("babble", self._babble, "")
ui.run_ui(init_command="babble")
screen_outputs = ui.observers["screen_outputs"]
self.assertEqual(1, len(screen_outputs))
self.assertEqual(["bar"] * 60, screen_outputs[0].lines)
def testRunUIWithValidUsersCommands(self):
"""Run UI with an initial command specified."""
ui = MockReadlineUI(command_sequence=["babble -n 3", "babble -n 6", "exit"])
ui.register_command_handler("babble", self._babble, "")
ui.run_ui()
screen_outputs = ui.observers["screen_outputs"]
self.assertEqual(2, len(screen_outputs))
self.assertEqual(["bar"] * 3, screen_outputs[0].lines)
self.assertEqual(["bar"] * 6, screen_outputs[1].lines)
def testRunUIWithInvalidUsersCommands(self):
"""Run UI with an initial command specified."""
ui = MockReadlineUI(command_sequence=["babble -n 3", "wobble", "exit"])
ui.register_command_handler("babble", self._babble, "")
ui.run_ui()
screen_outputs = ui.observers["screen_outputs"]
self.assertEqual(2, len(screen_outputs))
self.assertEqual(["bar"] * 3, screen_outputs[0].lines)
self.assertEqual(["ERROR: Invalid command prefix \"wobble\""],
screen_outputs[1].lines)
def testRunUIWithOnUIExitCallback(self):
observer = {"callback_invoked": False}
def callback_for_test():
observer["callback_invoked"] = True
ui = MockReadlineUI(on_ui_exit=callback_for_test, command_sequence=["exit"])
self.assertFalse(observer["callback_invoked"])
ui.run_ui()
self.assertEqual(0, len(ui.observers["screen_outputs"]))
self.assertTrue(observer["callback_invoked"])
def testIncompleteRedirectWorks(self):
output_path = tempfile.mktemp()
ui = MockReadlineUI(
command_sequence=["babble -n 2 > %s" % output_path, "exit"])
ui.register_command_handler("babble", self._babble, "")
ui.run_ui()
screen_outputs = ui.observers["screen_outputs"]
self.assertEqual(1, len(screen_outputs))
self.assertEqual(["bar"] * 2, screen_outputs[0].lines)
with gfile.Open(output_path, "r") as f:
self.assertEqual("bar\nbar\n", f.read())
if __name__ == "__main__":
googletest.main()
| apache-2.0 | 3,910,877,535,808,641,500 | -7,964,532,495,898,826,000 | 33.012048 | 80 | 0.687389 | false |
RaRe-Technologies/gensim | gensim/test/test_lee.py | 5 | 4277 | #!/usr/bin/env python
# encoding: utf-8
#
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Automated test to reproduce the results of Lee et al. (2005)
Lee et al. (2005) compares different models for semantic
similarity and verifies the results with similarity judgements from humans.
As a validation of the gensim implementation we reproduced the results
of Lee et al. (2005) in this test.
Many thanks to Michael D. Lee ([email protected]) who provideded us
with his corpus and similarity data.
If you need to reference this dataset, please cite:
Lee, M., Pincombe, B., & Welsh, M. (2005).
An empirical evaluation of models of text document similarity.
Proceedings of the 27th Annual Conference of the Cognitive Science Society
"""
from __future__ import with_statement
import logging
import os.path
import unittest
from functools import partial
import numpy as np
from gensim import corpora, models, utils, matutils
from gensim.parsing.preprocessing import preprocess_documents, preprocess_string, DEFAULT_FILTERS
bg_corpus = None
corpus = None
human_sim_vector = None
class TestLeeTest(unittest.TestCase):
def setUp(self):
"""setup lee test corpora"""
global bg_corpus, corpus, human_sim_vector, bg_corpus2, corpus2
pre_path = os.path.join(os.path.dirname(__file__), 'test_data')
bg_corpus_file = 'lee_background.cor'
corpus_file = 'lee.cor'
sim_file = 'similarities0-1.txt'
# read in the corpora
latin1 = partial(utils.to_unicode, encoding='latin1')
with utils.open(os.path.join(pre_path, bg_corpus_file), 'rb') as f:
bg_corpus = preprocess_documents(latin1(line) for line in f)
with utils.open(os.path.join(pre_path, corpus_file), 'rb') as f:
corpus = preprocess_documents(latin1(line) for line in f)
with utils.open(os.path.join(pre_path, bg_corpus_file), 'rb') as f:
bg_corpus2 = [preprocess_string(latin1(s), filters=DEFAULT_FILTERS[:-1]) for s in f]
with utils.open(os.path.join(pre_path, corpus_file), 'rb') as f:
corpus2 = [preprocess_string(latin1(s), filters=DEFAULT_FILTERS[:-1]) for s in f]
# read the human similarity data
sim_matrix = np.loadtxt(os.path.join(pre_path, sim_file))
sim_m_size = np.shape(sim_matrix)[0]
human_sim_vector = sim_matrix[np.triu_indices(sim_m_size, 1)]
def test_corpus(self):
"""availability and integrity of corpus"""
documents_in_bg_corpus = 300
documents_in_corpus = 50
len_sim_vector = 1225
self.assertEqual(len(bg_corpus), documents_in_bg_corpus)
self.assertEqual(len(corpus), documents_in_corpus)
self.assertEqual(len(human_sim_vector), len_sim_vector)
def test_lee(self):
"""correlation with human data > 0.6
(this is the value which was achieved in the original paper)
"""
global bg_corpus, corpus
# create a dictionary and corpus (bag of words)
dictionary = corpora.Dictionary(bg_corpus)
bg_corpus = [dictionary.doc2bow(text) for text in bg_corpus]
corpus = [dictionary.doc2bow(text) for text in corpus]
# transform the bag of words with log_entropy normalization
log_ent = models.LogEntropyModel(bg_corpus)
bg_corpus_ent = log_ent[bg_corpus]
# initialize an LSI transformation from background corpus
lsi = models.LsiModel(bg_corpus_ent, id2word=dictionary, num_topics=200)
# transform small corpus to lsi bow->log_ent->fold-in-lsi
corpus_lsi = lsi[log_ent[corpus]]
# compute pairwise similarity matrix and extract upper triangular
res = np.zeros((len(corpus), len(corpus)))
for i, par1 in enumerate(corpus_lsi):
for j, par2 in enumerate(corpus_lsi):
res[i, j] = matutils.cossim(par1, par2)
flat = res[np.triu_indices(len(corpus), 1)]
cor = np.corrcoef(flat, human_sim_vector)[0, 1]
logging.info("LSI correlation coefficient is %s", cor)
self.assertTrue(cor > 0.6)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
| lgpl-2.1 | -7,867,504,508,343,990,000 | -7,524,612,663,327,617,000 | 36.849558 | 97 | 0.666121 | false |
vodik/pytest | testing/acceptance_test.py | 4 | 23849 | import sys
import _pytest._code
import py
import pytest
from _pytest.main import EXIT_NOTESTSCOLLECTED, EXIT_USAGEERROR
class TestGeneralUsage:
def test_config_error(self, testdir):
testdir.makeconftest("""
def pytest_configure(config):
import pytest
raise pytest.UsageError("hello")
""")
result = testdir.runpytest(testdir.tmpdir)
assert result.ret != 0
result.stderr.fnmatch_lines([
'*ERROR: hello'
])
def test_root_conftest_syntax_error(self, testdir):
testdir.makepyfile(conftest="raise SyntaxError\n")
result = testdir.runpytest()
result.stderr.fnmatch_lines(["*raise SyntaxError*"])
assert result.ret != 0
def test_early_hook_error_issue38_1(self, testdir):
testdir.makeconftest("""
def pytest_sessionstart():
0 / 0
""")
result = testdir.runpytest(testdir.tmpdir)
assert result.ret != 0
# tracestyle is native by default for hook failures
result.stdout.fnmatch_lines([
'*INTERNALERROR*File*conftest.py*line 2*',
'*0 / 0*',
])
result = testdir.runpytest(testdir.tmpdir, "--fulltrace")
assert result.ret != 0
# tracestyle is native by default for hook failures
result.stdout.fnmatch_lines([
'*INTERNALERROR*def pytest_sessionstart():*',
'*INTERNALERROR*0 / 0*',
])
def test_early_hook_configure_error_issue38(self, testdir):
testdir.makeconftest("""
def pytest_configure():
0 / 0
""")
result = testdir.runpytest(testdir.tmpdir)
assert result.ret != 0
# here we get it on stderr
result.stderr.fnmatch_lines([
'*INTERNALERROR*File*conftest.py*line 2*',
'*0 / 0*',
])
def test_file_not_found(self, testdir):
result = testdir.runpytest("asd")
assert result.ret != 0
result.stderr.fnmatch_lines(["ERROR: file not found*asd"])
def test_file_not_found_unconfigure_issue143(self, testdir):
testdir.makeconftest("""
def pytest_configure():
print("---configure")
def pytest_unconfigure():
print("---unconfigure")
""")
result = testdir.runpytest("-s", "asd")
assert result.ret == 4 # EXIT_USAGEERROR
result.stderr.fnmatch_lines(["ERROR: file not found*asd"])
result.stdout.fnmatch_lines([
"*---configure",
"*---unconfigure",
])
def test_config_preparse_plugin_option(self, testdir):
testdir.makepyfile(pytest_xyz="""
def pytest_addoption(parser):
parser.addoption("--xyz", dest="xyz", action="store")
""")
testdir.makepyfile(test_one="""
def test_option(pytestconfig):
assert pytestconfig.option.xyz == "123"
""")
result = testdir.runpytest("-p", "pytest_xyz", "--xyz=123", syspathinsert=True)
assert result.ret == 0
result.stdout.fnmatch_lines([
'*1 passed*',
])
def test_assertion_magic(self, testdir):
p = testdir.makepyfile("""
def test_this():
x = 0
assert x
""")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
"> assert x",
"E assert 0",
])
assert result.ret == 1
def test_nested_import_error(self, testdir):
p = testdir.makepyfile("""
import import_fails
def test_this():
assert import_fails.a == 1
""")
testdir.makepyfile(import_fails="import does_not_work")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
#XXX on jython this fails: "> import import_fails",
"E ImportError: No module named *does_not_work*",
])
assert result.ret == 1
def test_not_collectable_arguments(self, testdir):
p1 = testdir.makepyfile("")
p2 = testdir.makefile(".pyc", "123")
result = testdir.runpytest(p1, p2)
assert result.ret
result.stderr.fnmatch_lines([
"*ERROR: not found:*%s" %(p2.basename,)
])
def test_issue486_better_reporting_on_conftest_load_failure(self, testdir):
testdir.makepyfile("")
testdir.makeconftest("import qwerty")
result = testdir.runpytest("--help")
result.stdout.fnmatch_lines("""
*--version*
*warning*conftest.py*
""")
result = testdir.runpytest()
result.stderr.fnmatch_lines("""
*ERROR*could not load*conftest.py*
""")
def test_early_skip(self, testdir):
testdir.mkdir("xyz")
testdir.makeconftest("""
import pytest
def pytest_collect_directory():
pytest.skip("early")
""")
result = testdir.runpytest()
assert result.ret == EXIT_NOTESTSCOLLECTED
result.stdout.fnmatch_lines([
"*1 skip*"
])
def test_issue88_initial_file_multinodes(self, testdir):
testdir.makeconftest("""
import pytest
class MyFile(pytest.File):
def collect(self):
return [MyItem("hello", parent=self)]
def pytest_collect_file(path, parent):
return MyFile(path, parent)
class MyItem(pytest.Item):
pass
""")
p = testdir.makepyfile("def test_hello(): pass")
result = testdir.runpytest(p, "--collect-only")
result.stdout.fnmatch_lines([
"*MyFile*test_issue88*",
"*Module*test_issue88*",
])
def test_issue93_initialnode_importing_capturing(self, testdir):
testdir.makeconftest("""
import sys
print ("should not be seen")
sys.stderr.write("stder42\\n")
""")
result = testdir.runpytest()
assert result.ret == EXIT_NOTESTSCOLLECTED
assert "should not be seen" not in result.stdout.str()
assert "stderr42" not in result.stderr.str()
def test_conftest_printing_shows_if_error(self, testdir):
testdir.makeconftest("""
print ("should be seen")
assert 0
""")
result = testdir.runpytest()
assert result.ret != 0
assert "should be seen" in result.stdout.str()
@pytest.mark.skipif(not hasattr(py.path.local, 'mksymlinkto'),
reason="symlink not available on this platform")
def test_chdir(self, testdir):
testdir.tmpdir.join("py").mksymlinkto(py._pydir)
p = testdir.tmpdir.join("main.py")
p.write(_pytest._code.Source("""
import sys, os
sys.path.insert(0, '')
import py
print (py.__file__)
print (py.__path__)
os.chdir(os.path.dirname(os.getcwd()))
print (py.log)
"""))
result = testdir.runpython(p)
assert not result.ret
def test_issue109_sibling_conftests_not_loaded(self, testdir):
sub1 = testdir.tmpdir.mkdir("sub1")
sub2 = testdir.tmpdir.mkdir("sub2")
sub1.join("conftest.py").write("assert 0")
result = testdir.runpytest(sub2)
assert result.ret == EXIT_NOTESTSCOLLECTED
sub2.ensure("__init__.py")
p = sub2.ensure("test_hello.py")
result = testdir.runpytest(p)
assert result.ret == EXIT_NOTESTSCOLLECTED
result = testdir.runpytest(sub1)
assert result.ret == EXIT_USAGEERROR
def test_directory_skipped(self, testdir):
testdir.makeconftest("""
import pytest
def pytest_ignore_collect():
pytest.skip("intentional")
""")
testdir.makepyfile("def test_hello(): pass")
result = testdir.runpytest()
assert result.ret == EXIT_NOTESTSCOLLECTED
result.stdout.fnmatch_lines([
"*1 skipped*"
])
def test_multiple_items_per_collector_byid(self, testdir):
c = testdir.makeconftest("""
import pytest
class MyItem(pytest.Item):
def runtest(self):
pass
class MyCollector(pytest.File):
def collect(self):
return [MyItem(name="xyz", parent=self)]
def pytest_collect_file(path, parent):
if path.basename.startswith("conftest"):
return MyCollector(path, parent)
""")
result = testdir.runpytest(c.basename+"::"+"xyz")
assert result.ret == 0
result.stdout.fnmatch_lines([
"*1 pass*",
])
def test_skip_on_generated_funcarg_id(self, testdir):
testdir.makeconftest("""
import pytest
def pytest_generate_tests(metafunc):
metafunc.addcall({'x': 3}, id='hello-123')
def pytest_runtest_setup(item):
print (item.keywords)
if 'hello-123' in item.keywords:
pytest.skip("hello")
assert 0
""")
p = testdir.makepyfile("""def test_func(x): pass""")
res = testdir.runpytest(p)
assert res.ret == 0
res.stdout.fnmatch_lines(["*1 skipped*"])
def test_direct_addressing_selects(self, testdir):
p = testdir.makepyfile("""
def pytest_generate_tests(metafunc):
metafunc.addcall({'i': 1}, id="1")
metafunc.addcall({'i': 2}, id="2")
def test_func(i):
pass
""")
res = testdir.runpytest(p.basename + "::" + "test_func[1]")
assert res.ret == 0
res.stdout.fnmatch_lines(["*1 passed*"])
def test_direct_addressing_notfound(self, testdir):
p = testdir.makepyfile("""
def test_func():
pass
""")
res = testdir.runpytest(p.basename + "::" + "test_notfound")
assert res.ret
res.stderr.fnmatch_lines(["*ERROR*not found*"])
def test_docstring_on_hookspec(self):
from _pytest import hookspec
for name, value in vars(hookspec).items():
if name.startswith("pytest_"):
assert value.__doc__, "no docstring for %s" % name
def test_initialization_error_issue49(self, testdir):
testdir.makeconftest("""
def pytest_configure():
x
""")
result = testdir.runpytest()
assert result.ret == 3 # internal error
result.stderr.fnmatch_lines([
"INTERNAL*pytest_configure*",
"INTERNAL*x*",
])
assert 'sessionstarttime' not in result.stderr.str()
@pytest.mark.parametrize('lookfor', ['test_fun.py', 'test_fun.py::test_a'])
def test_issue134_report_syntaxerror_when_collecting_member(self, testdir, lookfor):
testdir.makepyfile(test_fun="""
def test_a():
pass
def""")
result = testdir.runpytest(lookfor)
result.stdout.fnmatch_lines(['*SyntaxError*'])
if '::' in lookfor:
result.stderr.fnmatch_lines([
'*ERROR*',
])
assert result.ret == 4 # usage error only if item not found
def test_report_all_failed_collections_initargs(self, testdir):
testdir.makepyfile(test_a="def", test_b="def")
result = testdir.runpytest("test_a.py::a", "test_b.py::b")
result.stderr.fnmatch_lines([
"*ERROR*test_a.py::a*",
"*ERROR*test_b.py::b*",
])
def test_namespace_import_doesnt_confuse_import_hook(self, testdir):
# Ref #383. Python 3.3's namespace package messed with our import hooks
# Importing a module that didn't exist, even if the ImportError was
# gracefully handled, would make our test crash.
testdir.mkdir('not_a_package')
p = testdir.makepyfile("""
try:
from not_a_package import doesnt_exist
except ImportError:
# We handle the import error gracefully here
pass
def test_whatever():
pass
""")
res = testdir.runpytest(p.basename)
assert res.ret == 0
def test_unknown_option(self, testdir):
result = testdir.runpytest("--qwlkej")
result.stderr.fnmatch_lines("""
*unrecognized*
""")
def test_getsourcelines_error_issue553(self, testdir, monkeypatch):
monkeypatch.setattr("inspect.getsourcelines", None)
p = testdir.makepyfile("""
def raise_error(obj):
raise IOError('source code not available')
import inspect
inspect.getsourcelines = raise_error
def test_foo(invalid_fixture):
pass
""")
res = testdir.runpytest(p)
res.stdout.fnmatch_lines([
"*source code not available*",
"*fixture 'invalid_fixture' not found",
])
def test_plugins_given_as_strings(self, tmpdir, monkeypatch):
"""test that str values passed to main() as `plugins` arg
are interpreted as module names to be imported and registered.
#855.
"""
with pytest.raises(ImportError) as excinfo:
pytest.main([str(tmpdir)], plugins=['invalid.module'])
assert 'invalid' in str(excinfo.value)
p = tmpdir.join('test_test_plugins_given_as_strings.py')
p.write('def test_foo(): pass')
mod = py.std.types.ModuleType("myplugin")
monkeypatch.setitem(sys.modules, 'myplugin', mod)
assert pytest.main(args=[str(tmpdir)], plugins=['myplugin']) == 0
def test_parameterized_with_bytes_regex(self, testdir):
p = testdir.makepyfile("""
import re
import pytest
@pytest.mark.parametrize('r', [re.compile(b'foo')])
def test_stuff(r):
pass
"""
)
res = testdir.runpytest(p)
res.stdout.fnmatch_lines([
'*1 passed*'
])
class TestInvocationVariants:
def test_earlyinit(self, testdir):
p = testdir.makepyfile("""
import pytest
assert hasattr(pytest, 'mark')
""")
result = testdir.runpython(p)
assert result.ret == 0
@pytest.mark.xfail("sys.platform.startswith('java')")
def test_pydoc(self, testdir):
for name in ('py.test', 'pytest'):
result = testdir.runpython_c("import %s;help(%s)" % (name, name))
assert result.ret == 0
s = result.stdout.str()
assert 'MarkGenerator' in s
def test_import_star_py_dot_test(self, testdir):
p = testdir.makepyfile("""
from py.test import *
#collect
#cmdline
#Item
#assert collect.Item is Item
#assert collect.Collector is Collector
main
skip
xfail
""")
result = testdir.runpython(p)
assert result.ret == 0
def test_import_star_pytest(self, testdir):
p = testdir.makepyfile("""
from pytest import *
#Item
#File
main
skip
xfail
""")
result = testdir.runpython(p)
assert result.ret == 0
def test_double_pytestcmdline(self, testdir):
p = testdir.makepyfile(run="""
import pytest
pytest.main()
pytest.main()
""")
testdir.makepyfile("""
def test_hello():
pass
""")
result = testdir.runpython(p)
result.stdout.fnmatch_lines([
"*1 passed*",
"*1 passed*",
])
def test_python_minus_m_invocation_ok(self, testdir):
p1 = testdir.makepyfile("def test_hello(): pass")
res = testdir.run(py.std.sys.executable, "-m", "pytest", str(p1))
assert res.ret == 0
def test_python_minus_m_invocation_fail(self, testdir):
p1 = testdir.makepyfile("def test_fail(): 0/0")
res = testdir.run(py.std.sys.executable, "-m", "pytest", str(p1))
assert res.ret == 1
def test_python_pytest_package(self, testdir):
p1 = testdir.makepyfile("def test_pass(): pass")
res = testdir.run(py.std.sys.executable, "-m", "pytest", str(p1))
assert res.ret == 0
res.stdout.fnmatch_lines(["*1 passed*"])
def test_equivalence_pytest_pytest(self):
assert pytest.main == py.test.cmdline.main
def test_invoke_with_string(self, capsys):
retcode = pytest.main("-h")
assert not retcode
out, err = capsys.readouterr()
assert "--help" in out
pytest.raises(ValueError, lambda: pytest.main(0))
def test_invoke_with_path(self, tmpdir, capsys):
retcode = pytest.main(tmpdir)
assert retcode == EXIT_NOTESTSCOLLECTED
out, err = capsys.readouterr()
def test_invoke_plugin_api(self, testdir, capsys):
class MyPlugin:
def pytest_addoption(self, parser):
parser.addoption("--myopt")
pytest.main(["-h"], plugins=[MyPlugin()])
out, err = capsys.readouterr()
assert "--myopt" in out
def test_pyargs_importerror(self, testdir, monkeypatch):
monkeypatch.delenv('PYTHONDONTWRITEBYTECODE', False)
path = testdir.mkpydir("tpkg")
path.join("test_hello.py").write('raise ImportError')
result = testdir.runpytest("--pyargs", "tpkg.test_hello")
assert result.ret != 0
# FIXME: It would be more natural to match NOT
# "ERROR*file*or*package*not*found*".
result.stdout.fnmatch_lines([
"*collected 0 items*"
])
def test_cmdline_python_package(self, testdir, monkeypatch):
monkeypatch.delenv('PYTHONDONTWRITEBYTECODE', False)
path = testdir.mkpydir("tpkg")
path.join("test_hello.py").write("def test_hello(): pass")
path.join("test_world.py").write("def test_world(): pass")
result = testdir.runpytest("--pyargs", "tpkg")
assert result.ret == 0
result.stdout.fnmatch_lines([
"*2 passed*"
])
result = testdir.runpytest("--pyargs", "tpkg.test_hello")
assert result.ret == 0
result.stdout.fnmatch_lines([
"*1 passed*"
])
def join_pythonpath(what):
cur = py.std.os.environ.get('PYTHONPATH')
if cur:
return str(what) + ':' + cur
return what
empty_package = testdir.mkpydir("empty_package")
monkeypatch.setenv('PYTHONPATH', join_pythonpath(empty_package))
result = testdir.runpytest("--pyargs", ".")
assert result.ret == 0
result.stdout.fnmatch_lines([
"*2 passed*"
])
monkeypatch.setenv('PYTHONPATH', join_pythonpath(testdir))
path.join('test_hello.py').remove()
result = testdir.runpytest("--pyargs", "tpkg.test_hello")
assert result.ret != 0
result.stderr.fnmatch_lines([
"*not*found*test_hello*",
])
def test_cmdline_python_package_not_exists(self, testdir):
result = testdir.runpytest("--pyargs", "tpkgwhatv")
assert result.ret
result.stderr.fnmatch_lines([
"ERROR*file*or*package*not*found*",
])
@pytest.mark.xfail(reason="decide: feature or bug")
def test_noclass_discovery_if_not_testcase(self, testdir):
testpath = testdir.makepyfile("""
import unittest
class TestHello(object):
def test_hello(self):
assert self.attr
class RealTest(unittest.TestCase, TestHello):
attr = 42
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(passed=1)
def test_doctest_id(self, testdir):
testdir.makefile('.txt', """
>>> x=3
>>> x
4
""")
result = testdir.runpytest("-rf")
lines = result.stdout.str().splitlines()
for line in lines:
if line.startswith("FAIL "):
testid = line[5:].strip()
break
result = testdir.runpytest(testid, '-rf')
result.stdout.fnmatch_lines([
line,
"*1 failed*",
])
def test_core_backward_compatibility(self):
"""Test backward compatibility for get_plugin_manager function. See #787."""
import _pytest.config
assert type(_pytest.config.get_plugin_manager()) is _pytest.config.PytestPluginManager
def test_has_plugin(self, request):
"""Test hasplugin function of the plugin manager (#932)."""
assert request.config.pluginmanager.hasplugin('python')
class TestDurations:
source = """
import time
frag = 0.002
def test_something():
pass
def test_2():
time.sleep(frag*5)
def test_1():
time.sleep(frag)
def test_3():
time.sleep(frag*10)
"""
def test_calls(self, testdir):
testdir.makepyfile(self.source)
result = testdir.runpytest("--durations=10")
assert result.ret == 0
result.stdout.fnmatch_lines_random([
"*durations*",
"*call*test_3*",
"*call*test_2*",
"*call*test_1*",
])
def test_calls_show_2(self, testdir):
testdir.makepyfile(self.source)
result = testdir.runpytest("--durations=2")
assert result.ret == 0
lines = result.stdout.get_lines_after("*slowest*durations*")
assert "4 passed" in lines[2]
def test_calls_showall(self, testdir):
testdir.makepyfile(self.source)
result = testdir.runpytest("--durations=0")
assert result.ret == 0
for x in "123":
for y in 'call',: #'setup', 'call', 'teardown':
for line in result.stdout.lines:
if ("test_%s" % x) in line and y in line:
break
else:
raise AssertionError("not found %s %s" % (x,y))
def test_with_deselected(self, testdir):
testdir.makepyfile(self.source)
result = testdir.runpytest("--durations=2", "-k test_1")
assert result.ret == 0
result.stdout.fnmatch_lines([
"*durations*",
"*call*test_1*",
])
def test_with_failing_collection(self, testdir):
testdir.makepyfile(self.source)
testdir.makepyfile(test_collecterror="""xyz""")
result = testdir.runpytest("--durations=2", "-k test_1")
assert result.ret != 0
result.stdout.fnmatch_lines([
"*durations*",
"*call*test_1*",
])
def test_with_not(self, testdir):
testdir.makepyfile(self.source)
result = testdir.runpytest("-k not 1")
assert result.ret == 0
class TestDurationWithFixture:
source = """
import time
frag = 0.001
def setup_function(func):
time.sleep(frag * 3)
def test_1():
time.sleep(frag*2)
def test_2():
time.sleep(frag)
"""
def test_setup_function(self, testdir):
testdir.makepyfile(self.source)
result = testdir.runpytest("--durations=10")
assert result.ret == 0
result.stdout.fnmatch_lines_random("""
*durations*
* setup *test_1*
* call *test_1*
""")
| mit | -5,901,813,663,205,925,000 | -5,163,917,276,376,386,000 | 33.07 | 94 | 0.548409 | false |
vasyarv/edx-platform | lms/djangoapps/django_comment_client/management/commands/assign_role.py | 251 | 1144 | from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django_comment_common.models import Role
from django.contrib.auth.models import User
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--remove',
action='store_true',
dest='remove',
default=False,
help='Remove the role instead of adding it'),
)
args = '<user|email> <role> <course_id>'
help = 'Assign a discussion forum role to a user '
def handle(self, *args, **options):
if len(args) != 3:
raise CommandError('Usage is assign_role {0}'.format(self.args))
name_or_email, role, course_id = args
role = Role.objects.get(name=role, course_id=course_id)
if '@' in name_or_email:
user = User.objects.get(email=name_or_email)
else:
user = User.objects.get(username=name_or_email)
if options['remove']:
user.roles.remove(role)
else:
user.roles.add(role)
print 'Success!'
| agpl-3.0 | -2,953,013,591,288,219,000 | -5,295,749,148,543,825,000 | 29.105263 | 76 | 0.587413 | false |
thepaul/uftrace | tests/t217_no_libcall_dump.py | 1 | 1525 | #!/usr/bin/env python
from runtest import TestBase
import subprocess as sp
TDIR='xxx'
class TestCase(TestBase):
def __init__(self):
TestBase.__init__(self, 'signal', """
uftrace file header: magic = 4674726163652100
uftrace file header: version = 4
uftrace file header: header size = 40
uftrace file header: endian = 1 (little)
uftrace file header: class = 2 (64 bit)
uftrace file header: features = 0x363 (PLTHOOK | TASK_SESSION | SYM_REL_ADDR | MAX_STACK | PERF_EVENT | AUTO_ARGS)
uftrace file header: info = 0x3bff
reading 73755.dat
50895.869952000 73755: [entry] main(400787) depth: 0
50895.869952297 73755: [entry] foo(40071f) depth: 1
50895.869952533 73755: [exit ] foo(40071f) depth: 1
50895.869966333 73755: [entry] sighandler(400750) depth: 2
50895.869966473 73755: [entry] bar(400734) depth: 3
50895.869966617 73755: [exit ] bar(400734) depth: 3
50895.869967067 73755: [exit ] sighandler(400750) depth: 2
50895.869969790 73755: [entry] foo(40071f) depth: 1
50895.869969907 73755: [exit ] foo(40071f) depth: 1
50895.869970227 73755: [exit ] main(400787) depth: 0
""", sort='dump')
def pre(self):
record_cmd = '%s record -d %s %s' % (TestBase.uftrace_cmd, TDIR, 't-' + self.name)
sp.call(record_cmd.split())
return TestBase.TEST_SUCCESS
def runcmd(self):
return '%s dump --no-libcall -d %s' % (TestBase.uftrace_cmd, TDIR)
def post(self, ret):
sp.call(['rm', '-rf', TDIR])
return ret
| gpl-2.0 | 6,153,869,330,502,645,000 | -1,431,441,819,475,537,000 | 35.309524 | 119 | 0.658361 | false |
datakortet/django-cms | cms/plugins/teaser/models.py | 1 | 1148 | from django.core.cache import cache
from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models import CMSPlugin, Page
class Teaser(CMSPlugin):
"""
A Teaser
"""
title = models.CharField(_("title"), max_length=255)
image = models.ImageField(_("image"), upload_to=CMSPlugin.get_media_path, blank=True, null=True)
page_link = models.ForeignKey(
Page,
verbose_name=_("page"),
help_text=_("If present image will be clickable"),
blank=True,
null=True,
limit_choices_to={'publisher_is_draft': True}
)
url = models.CharField(_("link"), max_length=255, blank=True, null=True, help_text=_("If present image will be clickable."))
description = models.TextField(_("description"), blank=True, null=True)
@property
def _cache_key(self):
return "%s_id_%d" % (self.__class__.__name__, self.id)
def save(self, *args, **kwargs):
super(Teaser, self).save(*args, **kwargs)
cache.delete(self._cache_key)
def __unicode__(self):
return self.title
search_fields = ('description',)
| bsd-3-clause | 3,452,850,748,140,146,000 | 1,839,439,507,217,345,800 | 32.764706 | 128 | 0.625436 | false |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.0/Lib/dos-8x3/test_win.py | 11 | 5449 | # Test the windows specific win32reg module.
# Only win32reg functions not hit here: FlushKey, LoadKey and SaveKey
from _winreg import *
import os, sys
test_key_name = "SOFTWARE\\Python Registry Test Key - Delete Me"
test_data = [
("Int Value", 45, REG_DWORD),
("String Val", "A string value", REG_SZ,),
(u"Unicode Val", u"A Unicode value", REG_SZ,),
("StringExpand", "The path is %path%", REG_EXPAND_SZ),
("UnicodeExpand", u"The path is %path%", REG_EXPAND_SZ),
("Multi-string", ["Lots", "of", "string", "values"], REG_MULTI_SZ),
("Multi-unicode", [u"Lots", u"of", u"unicode", u"values"], REG_MULTI_SZ),
("Multi-mixed", [u"Unicode", u"and", "string", "values"],REG_MULTI_SZ),
("Raw Data", ("binary"+chr(0)+"data"), REG_BINARY),
]
def WriteTestData(root_key):
# Set the default value for this key.
SetValue(root_key, test_key_name, REG_SZ, "Default value")
key = CreateKey(root_key, test_key_name)
# Create a sub-key
sub_key = CreateKey(key, "sub_key")
# Give the sub-key some named values
for value_name, value_data, value_type in test_data:
SetValueEx(sub_key, value_name, 0, value_type, value_data)
# Check we wrote as many items as we thought.
nkeys, nvalues, since_mod = QueryInfoKey(key)
assert nkeys==1, "Not the correct number of sub keys"
assert nvalues==1, "Not the correct number of values"
nkeys, nvalues, since_mod = QueryInfoKey(sub_key)
assert nkeys==0, "Not the correct number of sub keys"
assert nvalues==len(test_data), "Not the correct number of values"
# Close this key this way...
# (but before we do, copy the key as an integer - this allows
# us to test that the key really gets closed).
int_sub_key = int(sub_key)
CloseKey(sub_key)
try:
QueryInfoKey(int_sub_key)
raise RuntimeError, "It appears the CloseKey() function does not close the actual key!"
except EnvironmentError:
pass
# ... and close that key that way :-)
int_key = int(key)
key.Close()
try:
QueryInfoKey(int_key)
raise RuntimeError, "It appears the key.Close() function does not close the actual key!"
except EnvironmentError:
pass
def ReadTestData(root_key):
# Check we can get default value for this key.
val = QueryValue(root_key, test_key_name)
assert val=="Default value", "Registry didn't give back the correct value"
key = OpenKey(root_key, test_key_name)
# Read the sub-keys
sub_key = OpenKey(key, "sub_key")
# Check I can enumerate over the values.
index = 0
while 1:
try:
data = EnumValue(sub_key, index)
except EnvironmentError:
break
assert data in test_data, "Didn't read back the correct test data"
index = index + 1
assert index==len(test_data), "Didn't read the correct number of items"
# Check I can directly access each item
for value_name, value_data, value_type in test_data:
read_val, read_typ = QueryValueEx(sub_key, value_name)
assert read_val==value_data and read_typ == value_type, \
"Could not directly read the value"
sub_key.Close()
# Enumerate our main key.
read_val = EnumKey(key, 0)
assert read_val == "sub_key", "Read subkey value wrong"
try:
EnumKey(key, 1)
assert 0, "Was able to get a second key when I only have one!"
except EnvironmentError:
pass
key.Close()
def DeleteTestData(root_key):
key = OpenKey(root_key, test_key_name, 0, KEY_ALL_ACCESS)
sub_key = OpenKey(key, "sub_key", 0, KEY_ALL_ACCESS)
# It is not necessary to delete the values before deleting
# the key (although subkeys must not exist). We delete them
# manually just to prove we can :-)
for value_name, value_data, value_type in test_data:
DeleteValue(sub_key, value_name)
nkeys, nvalues, since_mod = QueryInfoKey(sub_key)
assert nkeys==0 and nvalues==0, "subkey not empty before delete"
sub_key.Close()
DeleteKey(key, "sub_key")
try:
# Shouldnt be able to delete it twice!
DeleteKey(key, "sub_key")
assert 0, "Deleting the key twice succeeded"
except EnvironmentError:
pass
key.Close()
DeleteKey(root_key, test_key_name)
# Opening should now fail!
try:
key = OpenKey(root_key, test_key_name)
assert 0, "Could open the non-existent key"
except WindowsError: # Use this error name this time
pass
def TestAll(root_key):
WriteTestData(root_key)
ReadTestData(root_key)
DeleteTestData(root_key)
# Test on my local machine.
TestAll(HKEY_CURRENT_USER)
print "Local registry tests worked"
try:
remote_name = sys.argv[sys.argv.index("--remote")+1]
except (IndexError, ValueError):
remote_name = None
if remote_name is not None:
try:
remote_key = ConnectRegistry(remote_name, HKEY_CURRENT_USER)
except EnvironmentError, exc:
print "Could not connect to the remote machine -", exc.strerror
remote_key = None
if remote_key is not None:
TestAll(remote_key)
print "Remote registry tests worked"
else:
print "Remote registry calls can be tested using",
print "'test_winreg.py --remote \\\\machine_name'"
| mit | -6,141,855,976,602,919,000 | 6,184,622,652,601,505,000 | 36.068027 | 96 | 0.628923 | false |
JshWright/home-assistant | tests/components/switch/test_command_line.py | 25 | 7031 | """The tests for the Command line switch platform."""
import json
import os
import tempfile
import unittest
from homeassistant.setup import setup_component
from homeassistant.const import STATE_ON, STATE_OFF
import homeassistant.components.switch as switch
import homeassistant.components.switch.command_line as command_line
from tests.common import get_test_home_assistant
# pylint: disable=invalid-name
class TestCommandSwitch(unittest.TestCase):
"""Test the command switch."""
def setUp(self):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self):
"""Stop everything that was started."""
self.hass.stop()
def test_state_none(self):
"""Test with none state."""
with tempfile.TemporaryDirectory() as tempdirname:
path = os.path.join(tempdirname, 'switch_status')
test_switch = {
'command_on': 'echo 1 > {}'.format(path),
'command_off': 'echo 0 > {}'.format(path),
}
self.assertTrue(setup_component(self.hass, switch.DOMAIN, {
'switch': {
'platform': 'command_line',
'switches': {
'test': test_switch
}
}
}))
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_OFF, state.state)
switch.turn_on(self.hass, 'switch.test')
self.hass.block_till_done()
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_ON, state.state)
switch.turn_off(self.hass, 'switch.test')
self.hass.block_till_done()
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_OFF, state.state)
def test_state_value(self):
"""Test with state value."""
with tempfile.TemporaryDirectory() as tempdirname:
path = os.path.join(tempdirname, 'switch_status')
test_switch = {
'command_state': 'cat {}'.format(path),
'command_on': 'echo 1 > {}'.format(path),
'command_off': 'echo 0 > {}'.format(path),
'value_template': '{{ value=="1" }}'
}
self.assertTrue(setup_component(self.hass, switch.DOMAIN, {
'switch': {
'platform': 'command_line',
'switches': {
'test': test_switch
}
}
}))
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_OFF, state.state)
switch.turn_on(self.hass, 'switch.test')
self.hass.block_till_done()
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_ON, state.state)
switch.turn_off(self.hass, 'switch.test')
self.hass.block_till_done()
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_OFF, state.state)
def test_state_json_value(self):
"""Test with state JSON value."""
with tempfile.TemporaryDirectory() as tempdirname:
path = os.path.join(tempdirname, 'switch_status')
oncmd = json.dumps({'status': 'ok'})
offcmd = json.dumps({'status': 'nope'})
test_switch = {
'command_state': 'cat {}'.format(path),
'command_on': 'echo \'{}\' > {}'.format(oncmd, path),
'command_off': 'echo \'{}\' > {}'.format(offcmd, path),
'value_template': '{{ value_json.status=="ok" }}'
}
self.assertTrue(setup_component(self.hass, switch.DOMAIN, {
'switch': {
'platform': 'command_line',
'switches': {
'test': test_switch
}
}
}))
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_OFF, state.state)
switch.turn_on(self.hass, 'switch.test')
self.hass.block_till_done()
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_ON, state.state)
switch.turn_off(self.hass, 'switch.test')
self.hass.block_till_done()
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_OFF, state.state)
def test_state_code(self):
"""Test with state code."""
with tempfile.TemporaryDirectory() as tempdirname:
path = os.path.join(tempdirname, 'switch_status')
test_switch = {
'command_state': 'cat {}'.format(path),
'command_on': 'echo 1 > {}'.format(path),
'command_off': 'echo 0 > {}'.format(path),
}
self.assertTrue(setup_component(self.hass, switch.DOMAIN, {
'switch': {
'platform': 'command_line',
'switches': {
'test': test_switch
}
}
}))
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_OFF, state.state)
switch.turn_on(self.hass, 'switch.test')
self.hass.block_till_done()
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_ON, state.state)
switch.turn_off(self.hass, 'switch.test')
self.hass.block_till_done()
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_ON, state.state)
def test_assumed_state_should_be_true_if_command_state_is_none(self):
"""Test with state value."""
# args: hass, device_name, friendly_name, command_on, command_off,
# command_state, value_template
init_args = [
self.hass,
"test_device_name",
"Test friendly name!",
"echo 'on command'",
"echo 'off command'",
None,
None,
]
no_state_device = command_line.CommandSwitch(*init_args)
self.assertTrue(no_state_device.assumed_state)
# Set state command
init_args[-2] = 'cat {}'
state_device = command_line.CommandSwitch(*init_args)
self.assertFalse(state_device.assumed_state)
def test_entity_id_set_correctly(self):
"""Test that entity_id is set correctly from object_id."""
init_args = [
self.hass,
"test_device_name",
"Test friendly name!",
"echo 'on command'",
"echo 'off command'",
False,
None,
]
test_switch = command_line.CommandSwitch(*init_args)
self.assertEqual(test_switch.entity_id, 'switch.test_device_name')
self.assertEqual(test_switch.name, 'Test friendly name!')
| apache-2.0 | -5,703,389,944,818,872,000 | 6,262,159,361,678,940,000 | 34.331658 | 74 | 0.526952 | false |
cloudfoundry/php-buildpack-legacy | builds/runtimes/python-2.7.6/lib/python2.7/rexec.py | 228 | 20148 | """Restricted execution facilities.
The class RExec exports methods r_exec(), r_eval(), r_execfile(), and
r_import(), which correspond roughly to the built-in operations
exec, eval(), execfile() and import, but executing the code in an
environment that only exposes those built-in operations that are
deemed safe. To this end, a modest collection of 'fake' modules is
created which mimics the standard modules by the same names. It is a
policy decision which built-in modules and operations are made
available; this module provides a reasonable default, but derived
classes can change the policies e.g. by overriding or extending class
variables like ok_builtin_modules or methods like make_sys().
XXX To do:
- r_open should allow writing tmp dir
- r_exec etc. with explicit globals/locals? (Use rexec("exec ... in ...")?)
"""
from warnings import warnpy3k
warnpy3k("the rexec module has been removed in Python 3.0", stacklevel=2)
del warnpy3k
import sys
import __builtin__
import os
import ihooks
import imp
__all__ = ["RExec"]
class FileBase:
ok_file_methods = ('fileno', 'flush', 'isatty', 'read', 'readline',
'readlines', 'seek', 'tell', 'write', 'writelines', 'xreadlines',
'__iter__')
class FileWrapper(FileBase):
# XXX This is just like a Bastion -- should use that!
def __init__(self, f):
for m in self.ok_file_methods:
if not hasattr(self, m) and hasattr(f, m):
setattr(self, m, getattr(f, m))
def close(self):
self.flush()
TEMPLATE = """
def %s(self, *args):
return getattr(self.mod, self.name).%s(*args)
"""
class FileDelegate(FileBase):
def __init__(self, mod, name):
self.mod = mod
self.name = name
for m in FileBase.ok_file_methods + ('close',):
exec TEMPLATE % (m, m)
class RHooks(ihooks.Hooks):
def __init__(self, *args):
# Hacks to support both old and new interfaces:
# old interface was RHooks(rexec[, verbose])
# new interface is RHooks([verbose])
verbose = 0
rexec = None
if args and type(args[-1]) == type(0):
verbose = args[-1]
args = args[:-1]
if args and hasattr(args[0], '__class__'):
rexec = args[0]
args = args[1:]
if args:
raise TypeError, "too many arguments"
ihooks.Hooks.__init__(self, verbose)
self.rexec = rexec
def set_rexec(self, rexec):
# Called by RExec instance to complete initialization
self.rexec = rexec
def get_suffixes(self):
return self.rexec.get_suffixes()
def is_builtin(self, name):
return self.rexec.is_builtin(name)
def init_builtin(self, name):
m = __import__(name)
return self.rexec.copy_except(m, ())
def init_frozen(self, name): raise SystemError, "don't use this"
def load_source(self, *args): raise SystemError, "don't use this"
def load_compiled(self, *args): raise SystemError, "don't use this"
def load_package(self, *args): raise SystemError, "don't use this"
def load_dynamic(self, name, filename, file):
return self.rexec.load_dynamic(name, filename, file)
def add_module(self, name):
return self.rexec.add_module(name)
def modules_dict(self):
return self.rexec.modules
def default_path(self):
return self.rexec.modules['sys'].path
# XXX Backwards compatibility
RModuleLoader = ihooks.FancyModuleLoader
RModuleImporter = ihooks.ModuleImporter
class RExec(ihooks._Verbose):
"""Basic restricted execution framework.
Code executed in this restricted environment will only have access to
modules and functions that are deemed safe; you can subclass RExec to
add or remove capabilities as desired.
The RExec class can prevent code from performing unsafe operations like
reading or writing disk files, or using TCP/IP sockets. However, it does
not protect against code using extremely large amounts of memory or
processor time.
"""
ok_path = tuple(sys.path) # That's a policy decision
ok_builtin_modules = ('audioop', 'array', 'binascii',
'cmath', 'errno', 'imageop',
'marshal', 'math', 'md5', 'operator',
'parser', 'select',
'sha', '_sre', 'strop', 'struct', 'time',
'_weakref')
ok_posix_names = ('error', 'fstat', 'listdir', 'lstat', 'readlink',
'stat', 'times', 'uname', 'getpid', 'getppid',
'getcwd', 'getuid', 'getgid', 'geteuid', 'getegid')
ok_sys_names = ('byteorder', 'copyright', 'exit', 'getdefaultencoding',
'getrefcount', 'hexversion', 'maxint', 'maxunicode',
'platform', 'ps1', 'ps2', 'version', 'version_info')
nok_builtin_names = ('open', 'file', 'reload', '__import__')
ok_file_types = (imp.C_EXTENSION, imp.PY_SOURCE)
def __init__(self, hooks = None, verbose = 0):
"""Returns an instance of the RExec class.
The hooks parameter is an instance of the RHooks class or a subclass
of it. If it is omitted or None, the default RHooks class is
instantiated.
Whenever the RExec module searches for a module (even a built-in one)
or reads a module's code, it doesn't actually go out to the file
system itself. Rather, it calls methods of an RHooks instance that
was passed to or created by its constructor. (Actually, the RExec
object doesn't make these calls --- they are made by a module loader
object that's part of the RExec object. This allows another level of
flexibility, which can be useful when changing the mechanics of
import within the restricted environment.)
By providing an alternate RHooks object, we can control the file
system accesses made to import a module, without changing the
actual algorithm that controls the order in which those accesses are
made. For instance, we could substitute an RHooks object that
passes all filesystem requests to a file server elsewhere, via some
RPC mechanism such as ILU. Grail's applet loader uses this to support
importing applets from a URL for a directory.
If the verbose parameter is true, additional debugging output may be
sent to standard output.
"""
raise RuntimeError, "This code is not secure in Python 2.2 and later"
ihooks._Verbose.__init__(self, verbose)
# XXX There's a circular reference here:
self.hooks = hooks or RHooks(verbose)
self.hooks.set_rexec(self)
self.modules = {}
self.ok_dynamic_modules = self.ok_builtin_modules
list = []
for mname in self.ok_builtin_modules:
if mname in sys.builtin_module_names:
list.append(mname)
self.ok_builtin_modules = tuple(list)
self.set_trusted_path()
self.make_builtin()
self.make_initial_modules()
# make_sys must be last because it adds the already created
# modules to its builtin_module_names
self.make_sys()
self.loader = RModuleLoader(self.hooks, verbose)
self.importer = RModuleImporter(self.loader, verbose)
def set_trusted_path(self):
# Set the path from which dynamic modules may be loaded.
# Those dynamic modules must also occur in ok_builtin_modules
self.trusted_path = filter(os.path.isabs, sys.path)
def load_dynamic(self, name, filename, file):
if name not in self.ok_dynamic_modules:
raise ImportError, "untrusted dynamic module: %s" % name
if name in sys.modules:
src = sys.modules[name]
else:
src = imp.load_dynamic(name, filename, file)
dst = self.copy_except(src, [])
return dst
def make_initial_modules(self):
self.make_main()
self.make_osname()
# Helpers for RHooks
def get_suffixes(self):
return [item # (suff, mode, type)
for item in imp.get_suffixes()
if item[2] in self.ok_file_types]
def is_builtin(self, mname):
return mname in self.ok_builtin_modules
# The make_* methods create specific built-in modules
def make_builtin(self):
m = self.copy_except(__builtin__, self.nok_builtin_names)
m.__import__ = self.r_import
m.reload = self.r_reload
m.open = m.file = self.r_open
def make_main(self):
self.add_module('__main__')
def make_osname(self):
osname = os.name
src = __import__(osname)
dst = self.copy_only(src, self.ok_posix_names)
dst.environ = e = {}
for key, value in os.environ.items():
e[key] = value
def make_sys(self):
m = self.copy_only(sys, self.ok_sys_names)
m.modules = self.modules
m.argv = ['RESTRICTED']
m.path = map(None, self.ok_path)
m.exc_info = self.r_exc_info
m = self.modules['sys']
l = self.modules.keys() + list(self.ok_builtin_modules)
l.sort()
m.builtin_module_names = tuple(l)
# The copy_* methods copy existing modules with some changes
def copy_except(self, src, exceptions):
dst = self.copy_none(src)
for name in dir(src):
setattr(dst, name, getattr(src, name))
for name in exceptions:
try:
delattr(dst, name)
except AttributeError:
pass
return dst
def copy_only(self, src, names):
dst = self.copy_none(src)
for name in names:
try:
value = getattr(src, name)
except AttributeError:
continue
setattr(dst, name, value)
return dst
def copy_none(self, src):
m = self.add_module(src.__name__)
m.__doc__ = src.__doc__
return m
# Add a module -- return an existing module or create one
def add_module(self, mname):
m = self.modules.get(mname)
if m is None:
self.modules[mname] = m = self.hooks.new_module(mname)
m.__builtins__ = self.modules['__builtin__']
return m
# The r* methods are public interfaces
def r_exec(self, code):
"""Execute code within a restricted environment.
The code parameter must either be a string containing one or more
lines of Python code, or a compiled code object, which will be
executed in the restricted environment's __main__ module.
"""
m = self.add_module('__main__')
exec code in m.__dict__
def r_eval(self, code):
"""Evaluate code within a restricted environment.
The code parameter must either be a string containing a Python
expression, or a compiled code object, which will be evaluated in
the restricted environment's __main__ module. The value of the
expression or code object will be returned.
"""
m = self.add_module('__main__')
return eval(code, m.__dict__)
def r_execfile(self, file):
"""Execute the Python code in the file in the restricted
environment's __main__ module.
"""
m = self.add_module('__main__')
execfile(file, m.__dict__)
def r_import(self, mname, globals={}, locals={}, fromlist=[]):
"""Import a module, raising an ImportError exception if the module
is considered unsafe.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
"""
return self.importer.import_module(mname, globals, locals, fromlist)
def r_reload(self, m):
"""Reload the module object, re-parsing and re-initializing it.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
"""
return self.importer.reload(m)
def r_unload(self, m):
"""Unload the module.
Removes it from the restricted environment's sys.modules dictionary.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
"""
return self.importer.unload(m)
# The s_* methods are similar but also swap std{in,out,err}
def make_delegate_files(self):
s = self.modules['sys']
self.delegate_stdin = FileDelegate(s, 'stdin')
self.delegate_stdout = FileDelegate(s, 'stdout')
self.delegate_stderr = FileDelegate(s, 'stderr')
self.restricted_stdin = FileWrapper(sys.stdin)
self.restricted_stdout = FileWrapper(sys.stdout)
self.restricted_stderr = FileWrapper(sys.stderr)
def set_files(self):
if not hasattr(self, 'save_stdin'):
self.save_files()
if not hasattr(self, 'delegate_stdin'):
self.make_delegate_files()
s = self.modules['sys']
s.stdin = self.restricted_stdin
s.stdout = self.restricted_stdout
s.stderr = self.restricted_stderr
sys.stdin = self.delegate_stdin
sys.stdout = self.delegate_stdout
sys.stderr = self.delegate_stderr
def reset_files(self):
self.restore_files()
s = self.modules['sys']
self.restricted_stdin = s.stdin
self.restricted_stdout = s.stdout
self.restricted_stderr = s.stderr
def save_files(self):
self.save_stdin = sys.stdin
self.save_stdout = sys.stdout
self.save_stderr = sys.stderr
def restore_files(self):
sys.stdin = self.save_stdin
sys.stdout = self.save_stdout
sys.stderr = self.save_stderr
def s_apply(self, func, args=(), kw={}):
self.save_files()
try:
self.set_files()
r = func(*args, **kw)
finally:
self.restore_files()
return r
def s_exec(self, *args):
"""Execute code within a restricted environment.
Similar to the r_exec() method, but the code will be granted access
to restricted versions of the standard I/O streams sys.stdin,
sys.stderr, and sys.stdout.
The code parameter must either be a string containing one or more
lines of Python code, or a compiled code object, which will be
executed in the restricted environment's __main__ module.
"""
return self.s_apply(self.r_exec, args)
def s_eval(self, *args):
"""Evaluate code within a restricted environment.
Similar to the r_eval() method, but the code will be granted access
to restricted versions of the standard I/O streams sys.stdin,
sys.stderr, and sys.stdout.
The code parameter must either be a string containing a Python
expression, or a compiled code object, which will be evaluated in
the restricted environment's __main__ module. The value of the
expression or code object will be returned.
"""
return self.s_apply(self.r_eval, args)
def s_execfile(self, *args):
"""Execute the Python code in the file in the restricted
environment's __main__ module.
Similar to the r_execfile() method, but the code will be granted
access to restricted versions of the standard I/O streams sys.stdin,
sys.stderr, and sys.stdout.
"""
return self.s_apply(self.r_execfile, args)
def s_import(self, *args):
"""Import a module, raising an ImportError exception if the module
is considered unsafe.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
Similar to the r_import() method, but has access to restricted
versions of the standard I/O streams sys.stdin, sys.stderr, and
sys.stdout.
"""
return self.s_apply(self.r_import, args)
def s_reload(self, *args):
"""Reload the module object, re-parsing and re-initializing it.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
Similar to the r_reload() method, but has access to restricted
versions of the standard I/O streams sys.stdin, sys.stderr, and
sys.stdout.
"""
return self.s_apply(self.r_reload, args)
def s_unload(self, *args):
"""Unload the module.
Removes it from the restricted environment's sys.modules dictionary.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
Similar to the r_unload() method, but has access to restricted
versions of the standard I/O streams sys.stdin, sys.stderr, and
sys.stdout.
"""
return self.s_apply(self.r_unload, args)
# Restricted open(...)
def r_open(self, file, mode='r', buf=-1):
"""Method called when open() is called in the restricted environment.
The arguments are identical to those of the open() function, and a
file object (or a class instance compatible with file objects)
should be returned. RExec's default behaviour is allow opening
any file for reading, but forbidding any attempt to write a file.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
"""
mode = str(mode)
if mode not in ('r', 'rb'):
raise IOError, "can't open files for writing in restricted mode"
return open(file, mode, buf)
# Restricted version of sys.exc_info()
def r_exc_info(self):
ty, va, tr = sys.exc_info()
tr = None
return ty, va, tr
def test():
import getopt, traceback
opts, args = getopt.getopt(sys.argv[1:], 'vt:')
verbose = 0
trusted = []
for o, a in opts:
if o == '-v':
verbose = verbose+1
if o == '-t':
trusted.append(a)
r = RExec(verbose=verbose)
if trusted:
r.ok_builtin_modules = r.ok_builtin_modules + tuple(trusted)
if args:
r.modules['sys'].argv = args
r.modules['sys'].path.insert(0, os.path.dirname(args[0]))
else:
r.modules['sys'].path.insert(0, "")
fp = sys.stdin
if args and args[0] != '-':
try:
fp = open(args[0])
except IOError, msg:
print "%s: can't open file %r" % (sys.argv[0], args[0])
return 1
if fp.isatty():
try:
import readline
except ImportError:
pass
import code
class RestrictedConsole(code.InteractiveConsole):
def runcode(self, co):
self.locals['__builtins__'] = r.modules['__builtin__']
r.s_apply(code.InteractiveConsole.runcode, (self, co))
try:
RestrictedConsole(r.modules['__main__'].__dict__).interact()
except SystemExit, n:
return n
else:
text = fp.read()
fp.close()
c = compile(text, fp.name, 'exec')
try:
r.s_exec(c)
except SystemExit, n:
return n
except:
traceback.print_exc()
return 1
if __name__ == '__main__':
sys.exit(test())
| mit | 8,994,966,638,179,122,000 | 5,306,098,846,265,378,000 | 33.265306 | 78 | 0.612418 | false |
c0hen/django-venv | lib/python3.4/site-packages/django/contrib/admin/helpers.py | 27 | 15048 | from __future__ import unicode_literals
import json
import warnings
from django import forms
from django.conf import settings
from django.contrib.admin.utils import (
display_for_field, flatten_fieldsets, help_text_for_field, label_for_field,
lookup_field,
)
from django.core.exceptions import ObjectDoesNotExist
from django.db.models.fields.related import ManyToManyRel
from django.forms.utils import flatatt
from django.template.defaultfilters import capfirst, linebreaksbr
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text, smart_text
from django.utils.html import conditional_escape, format_html
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext, ugettext_lazy as _
ACTION_CHECKBOX_NAME = '_selected_action'
class ActionForm(forms.Form):
action = forms.ChoiceField(label=_('Action:'))
select_across = forms.BooleanField(
label='',
required=False,
initial=0,
widget=forms.HiddenInput({'class': 'select-across'}),
)
checkbox = forms.CheckboxInput({'class': 'action-select'}, lambda value: False)
class AdminForm(object):
def __init__(self, form, fieldsets, prepopulated_fields, readonly_fields=None, model_admin=None):
self.form, self.fieldsets = form, fieldsets
self.prepopulated_fields = [{
'field': form[field_name],
'dependencies': [form[f] for f in dependencies]
} for field_name, dependencies in prepopulated_fields.items()]
self.model_admin = model_admin
if readonly_fields is None:
readonly_fields = ()
self.readonly_fields = readonly_fields
def __iter__(self):
for name, options in self.fieldsets:
yield Fieldset(
self.form, name,
readonly_fields=self.readonly_fields,
model_admin=self.model_admin,
**options
)
def _media(self):
media = self.form.media
for fs in self:
media = media + fs.media
return media
media = property(_media)
class Fieldset(object):
def __init__(self, form, name=None, readonly_fields=(), fields=(), classes=(),
description=None, model_admin=None):
self.form = form
self.name, self.fields = name, fields
self.classes = ' '.join(classes)
self.description = description
self.model_admin = model_admin
self.readonly_fields = readonly_fields
def _media(self):
if 'collapse' in self.classes:
extra = '' if settings.DEBUG else '.min'
js = [
'vendor/jquery/jquery%s.js' % extra,
'jquery.init.js',
'collapse%s.js' % extra,
]
return forms.Media(js=['admin/js/%s' % url for url in js])
return forms.Media()
media = property(_media)
def __iter__(self):
for field in self.fields:
yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin)
class Fieldline(object):
def __init__(self, form, field, readonly_fields=None, model_admin=None):
self.form = form # A django.forms.Form instance
if not hasattr(field, "__iter__") or isinstance(field, six.text_type):
self.fields = [field]
else:
self.fields = field
self.has_visible_field = not all(
field in self.form.fields and self.form.fields[field].widget.is_hidden
for field in self.fields
)
self.model_admin = model_admin
if readonly_fields is None:
readonly_fields = ()
self.readonly_fields = readonly_fields
def __iter__(self):
for i, field in enumerate(self.fields):
if field in self.readonly_fields:
yield AdminReadonlyField(self.form, field, is_first=(i == 0), model_admin=self.model_admin)
else:
yield AdminField(self.form, field, is_first=(i == 0))
def errors(self):
return mark_safe(
'\n'.join(
self.form[f].errors.as_ul() for f in self.fields if f not in self.readonly_fields
).strip('\n')
)
class AdminField(object):
def __init__(self, form, field, is_first):
self.field = form[field] # A django.forms.BoundField instance
self.is_first = is_first # Whether this field is first on the line
self.is_checkbox = isinstance(self.field.field.widget, forms.CheckboxInput)
self.is_readonly = False
def label_tag(self):
classes = []
contents = conditional_escape(force_text(self.field.label))
if self.is_checkbox:
classes.append('vCheckboxLabel')
if self.field.field.required:
classes.append('required')
if not self.is_first:
classes.append('inline')
attrs = {'class': ' '.join(classes)} if classes else {}
# checkboxes should not have a label suffix as the checkbox appears
# to the left of the label.
return self.field.label_tag(
contents=mark_safe(contents), attrs=attrs,
label_suffix='' if self.is_checkbox else None,
)
def errors(self):
return mark_safe(self.field.errors.as_ul())
class AdminReadonlyField(object):
def __init__(self, form, field, is_first, model_admin=None):
# Make self.field look a little bit like a field. This means that
# {{ field.name }} must be a useful class name to identify the field.
# For convenience, store other field-related data here too.
if callable(field):
class_name = field.__name__ if field.__name__ != '<lambda>' else ''
else:
class_name = field
if form._meta.labels and class_name in form._meta.labels:
label = form._meta.labels[class_name]
else:
label = label_for_field(field, form._meta.model, model_admin)
if form._meta.help_texts and class_name in form._meta.help_texts:
help_text = form._meta.help_texts[class_name]
else:
help_text = help_text_for_field(class_name, form._meta.model)
self.field = {
'name': class_name,
'label': label,
'help_text': help_text,
'field': field,
}
self.form = form
self.model_admin = model_admin
self.is_first = is_first
self.is_checkbox = False
self.is_readonly = True
self.empty_value_display = model_admin.get_empty_value_display()
def label_tag(self):
attrs = {}
if not self.is_first:
attrs["class"] = "inline"
label = self.field['label']
return format_html('<label{}>{}:</label>',
flatatt(attrs),
capfirst(force_text(label)))
def contents(self):
from django.contrib.admin.templatetags.admin_list import _boolean_icon
field, obj, model_admin = self.field['field'], self.form.instance, self.model_admin
try:
f, attr, value = lookup_field(field, obj, model_admin)
except (AttributeError, ValueError, ObjectDoesNotExist):
result_repr = self.empty_value_display
else:
if f is None:
boolean = getattr(attr, "boolean", False)
if boolean:
result_repr = _boolean_icon(value)
else:
if hasattr(value, "__html__"):
result_repr = value
else:
result_repr = smart_text(value)
if getattr(attr, "allow_tags", False):
warnings.warn(
"Deprecated allow_tags attribute used on %s. "
"Use django.utils.html.format_html(), format_html_join(), "
"or django.utils.safestring.mark_safe() instead." % attr,
RemovedInDjango20Warning
)
result_repr = mark_safe(value)
else:
result_repr = linebreaksbr(result_repr)
else:
if isinstance(f.remote_field, ManyToManyRel) and value is not None:
result_repr = ", ".join(map(six.text_type, value.all()))
else:
result_repr = display_for_field(value, f, self.empty_value_display)
result_repr = linebreaksbr(result_repr)
return conditional_escape(result_repr)
class InlineAdminFormSet(object):
"""
A wrapper around an inline formset for use in the admin system.
"""
def __init__(self, inline, formset, fieldsets, prepopulated_fields=None,
readonly_fields=None, model_admin=None):
self.opts = inline
self.formset = formset
self.fieldsets = fieldsets
self.model_admin = model_admin
if readonly_fields is None:
readonly_fields = ()
self.readonly_fields = readonly_fields
if prepopulated_fields is None:
prepopulated_fields = {}
self.prepopulated_fields = prepopulated_fields
self.classes = ' '.join(inline.classes) if inline.classes else ''
def __iter__(self):
for form, original in zip(self.formset.initial_forms, self.formset.get_queryset()):
view_on_site_url = self.opts.get_view_on_site_url(original)
yield InlineAdminForm(
self.formset, form, self.fieldsets, self.prepopulated_fields,
original, self.readonly_fields, model_admin=self.opts,
view_on_site_url=view_on_site_url,
)
for form in self.formset.extra_forms:
yield InlineAdminForm(
self.formset, form, self.fieldsets, self.prepopulated_fields,
None, self.readonly_fields, model_admin=self.opts,
)
yield InlineAdminForm(
self.formset, self.formset.empty_form,
self.fieldsets, self.prepopulated_fields, None,
self.readonly_fields, model_admin=self.opts,
)
def fields(self):
fk = getattr(self.formset, "fk", None)
for i, field_name in enumerate(flatten_fieldsets(self.fieldsets)):
if fk and fk.name == field_name:
continue
if field_name in self.readonly_fields:
yield {
'label': label_for_field(field_name, self.opts.model, self.opts),
'widget': {'is_hidden': False},
'required': False,
'help_text': help_text_for_field(field_name, self.opts.model),
}
else:
form_field = self.formset.empty_form.fields[field_name]
label = form_field.label
if label is None:
label = label_for_field(field_name, self.opts.model, self.opts)
yield {
'label': label,
'widget': form_field.widget,
'required': form_field.required,
'help_text': form_field.help_text,
}
def inline_formset_data(self):
verbose_name = self.opts.verbose_name
return json.dumps({
'name': '#%s' % self.formset.prefix,
'options': {
'prefix': self.formset.prefix,
'addText': ugettext('Add another %(verbose_name)s') % {
'verbose_name': capfirst(verbose_name),
},
'deleteText': ugettext('Remove'),
}
})
def _media(self):
media = self.opts.media + self.formset.media
for fs in self:
media = media + fs.media
return media
media = property(_media)
class InlineAdminForm(AdminForm):
"""
A wrapper around an inline form for use in the admin system.
"""
def __init__(self, formset, form, fieldsets, prepopulated_fields, original,
readonly_fields=None, model_admin=None, view_on_site_url=None):
self.formset = formset
self.model_admin = model_admin
self.original = original
self.show_url = original and view_on_site_url is not None
self.absolute_url = view_on_site_url
super(InlineAdminForm, self).__init__(form, fieldsets, prepopulated_fields, readonly_fields, model_admin)
def __iter__(self):
for name, options in self.fieldsets:
yield InlineFieldset(
self.formset, self.form, name, self.readonly_fields,
model_admin=self.model_admin, **options
)
def needs_explicit_pk_field(self):
# Auto fields are editable (oddly), so need to check for auto or non-editable pk
if self.form._meta.model._meta.has_auto_field or not self.form._meta.model._meta.pk.editable:
return True
# Also search any parents for an auto field. (The pk info is propagated to child
# models so that does not need to be checked in parents.)
for parent in self.form._meta.model._meta.get_parent_list():
if parent._meta.has_auto_field:
return True
return False
def pk_field(self):
return AdminField(self.form, self.formset._pk_field.name, False)
def fk_field(self):
fk = getattr(self.formset, "fk", None)
if fk:
return AdminField(self.form, fk.name, False)
else:
return ""
def deletion_field(self):
from django.forms.formsets import DELETION_FIELD_NAME
return AdminField(self.form, DELETION_FIELD_NAME, False)
def ordering_field(self):
from django.forms.formsets import ORDERING_FIELD_NAME
return AdminField(self.form, ORDERING_FIELD_NAME, False)
class InlineFieldset(Fieldset):
def __init__(self, formset, *args, **kwargs):
self.formset = formset
super(InlineFieldset, self).__init__(*args, **kwargs)
def __iter__(self):
fk = getattr(self.formset, "fk", None)
for field in self.fields:
if fk and fk.name == field:
continue
yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin)
class AdminErrorList(forms.utils.ErrorList):
"""
Stores all errors for the form/formsets in an add/change stage view.
"""
def __init__(self, form, inline_formsets):
super(AdminErrorList, self).__init__()
if form.is_bound:
self.extend(form.errors.values())
for inline_formset in inline_formsets:
self.extend(inline_formset.non_form_errors())
for errors_in_inline_form in inline_formset.errors:
self.extend(errors_in_inline_form.values())
| gpl-3.0 | -6,127,885,314,637,526,000 | -1,176,436,604,434,012,000 | 37.683805 | 113 | 0.57908 | false |
hoangt/gem5v | src/mem/ruby/network/garnet/fixed-pipeline/GarnetLink_d.py | 18 | 3743 | # Copyright (c) 2008 Princeton University
# Copyright (c) 2009 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Steve Reinhardt
# Brad Beckmann
from m5.params import *
from m5.proxy import *
from m5.SimObject import SimObject
from BasicLink import BasicIntLink, BasicExtLink
class NetworkLink_d(SimObject):
type = 'NetworkLink_d'
link_id = Param.Int(Parent.link_id, "link id")
link_latency = Param.Int(Parent.latency, "link latency")
vcs_per_vnet = Param.Int(Parent.vcs_per_vnet,
"virtual channels per virtual network")
virt_nets = Param.Int(Parent.number_of_virtual_networks,
"number of virtual networks")
channel_width = Param.Int(Parent.bandwidth_factor,
"channel width == bw factor")
class CreditLink_d(NetworkLink_d):
type = 'CreditLink_d'
# Interior fixed pipeline links between routers
class GarnetIntLink_d(BasicIntLink):
type = 'GarnetIntLink_d'
# The detailed fixed pipeline bi-directional link include two main
# forward links and two backward flow-control links, one per direction
nls = []
# In uni-directional link
nls.append(NetworkLink_d());
# Out uni-directional link
nls.append(NetworkLink_d());
network_links = VectorParam.NetworkLink_d(nls, "forward links")
cls = []
# In uni-directional link
cls.append(CreditLink_d());
# Out uni-directional link
cls.append(CreditLink_d());
credit_links = VectorParam.CreditLink_d(cls, "backward flow-control links")
# Exterior fixed pipeline links between a router and a controller
class GarnetExtLink_d(BasicExtLink):
type = 'GarnetExtLink_d'
# The detailed fixed pipeline bi-directional link include two main
# forward links and two backward flow-control links, one per direction
nls = []
# In uni-directional link
nls.append(NetworkLink_d());
# Out uni-directional link
nls.append(NetworkLink_d());
network_links = VectorParam.NetworkLink_d(nls, "forward links")
cls = []
# In uni-directional link
cls.append(CreditLink_d());
# Out uni-directional link
cls.append(CreditLink_d());
credit_links = VectorParam.CreditLink_d(cls, "backward flow-control links")
| bsd-3-clause | 5,932,831,953,198,550,000 | 7,583,608,332,297,409,000 | 42.523256 | 79 | 0.728293 | false |
cryptickp/troposphere | examples/CloudFront_S3.py | 22 | 1622 | # Converted from CloudFront_S3.template located at:
# http://aws.amazon.com/cloudformation/aws-cloudformation-templates/
from troposphere import GetAtt, Join, Output
from troposphere import Parameter, Ref, Template
from troposphere.cloudfront import Distribution, DistributionConfig
from troposphere.cloudfront import Origin, DefaultCacheBehavior
from troposphere.cloudfront import ForwardedValues
t = Template()
t.add_description(
"AWS CloudFormation Sample Template CloudFront_S3: Sample template "
"showing how to create an Amazon CloudFront distribution using an "
"S3 origin. "
"**WARNING** This template creates a CloudFront distribution. "
"You will be billed for the AWS resources used if you create "
"a stack from this template.")
s3dnsname = t.add_parameter(Parameter(
"S3DNSNAme",
Description="The DNS name of an existing S3 bucket to use as the "
"Cloudfront distribution origin",
Type="String",
))
myDistribution = t.add_resource(Distribution(
"myDistribution",
DistributionConfig=DistributionConfig(
Origins=[Origin(Id="Origin 1", DomainName=Ref(s3dnsname))],
DefaultCacheBehavior=DefaultCacheBehavior(
TargetOriginId="Origin 1",
ForwardedValues=ForwardedValues(
QueryString=False
),
ViewerProtocolPolicy="allow-all"),
Enabled=True
)
))
t.add_output([
Output("DistributionId", Value=Ref(myDistribution)),
Output(
"DistributionName",
Value=Join("", ["http://", GetAtt(myDistribution, "DomainName")])),
])
print(t.to_json())
| bsd-2-clause | 730,249,534,173,315,800 | 6,418,197,840,705,816,000 | 32.102041 | 75 | 0.702219 | false |
nelmiux/CarnotKE | jyhton/Lib/test/clamp.py | 12 | 2254 | import java
import os
import os.path
from java.lang.reflect import Modifier
from org.python.util import CodegenUtils
from org.python.compiler import CustomMaker, ProxyCodeHelpers
__all__ = ["PackageProxy", "SerializableProxies"]
class SerializableProxies(CustomMaker):
# NOTE: SerializableProxies is itself a java proxy, but it's not a custom one!
serialized_path = None
def doConstants(self):
self.classfile.addField("serialVersionUID",
CodegenUtils.ci(java.lang.Long.TYPE), Modifier.PUBLIC | Modifier.STATIC | Modifier.FINAL)
code = self.classfile.addMethod("<clinit>", ProxyCodeHelpers.makeSig("V"), Modifier.STATIC)
code.visitLdcInsn(java.lang.Long(1))
code.putstatic(self.classfile.name, "serialVersionUID", CodegenUtils.ci(java.lang.Long.TYPE))
code.return_()
def saveBytes(self, bytes):
if self.serialized_path:
path = os.path.join(self.serialized_path, os.path.join(*self.myClass.split(".")) + ".class")
parent = os.path.dirname(path)
try:
os.makedirs(parent)
except OSError:
pass # Directory exists
with open(path, "wb") as f:
f.write(bytes.toByteArray())
def makeClass(self):
try:
# If already defined on CLASSPATH, simply return this class
cls = java.lang.Class.forName(self.myClass)
print "Class defined on CLASSPATH", cls
except:
# Otherwise build it
cls = CustomMaker.makeClass(self)
return cls
class PackageProxy(object):
def __init__(self, package):
self.package = package
def __call__(self, superclass, interfaces, className, pythonModuleName, fullProxyName, mapping):
"""Constructs a usable proxy name that does not depend on ordering"""
if "." in pythonModuleName:
# get around that will be called differently from regrtest, as test.module instead of module
pythonModuleName = pythonModuleName.split(".")[-1]
return SerializableProxies(superclass, interfaces, className, pythonModuleName, self.package + "." + pythonModuleName + "." + className, mapping)
| apache-2.0 | -4,432,527,701,405,570,000 | -7,694,282,089,465,254,000 | 36.566667 | 153 | 0.643744 | false |
minifirocks/nifi-minifi-cpp | thirdparty/rocksdb/buckifier/targets_cfg.py | 6 | 3002 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
rocksdb_target_header = """
import os
TARGETS_PATH = os.path.dirname(__file__)
REPO_PATH = "rocksdb/src/"
BUCK_BINS = "buck-out/gen/" + REPO_PATH
TEST_RUNNER = REPO_PATH + "buckifier/rocks_test_runner.sh"
rocksdb_compiler_flags = [
"-fno-builtin-memcmp",
"-DROCKSDB_PLATFORM_POSIX",
"-DROCKSDB_LIB_IO_POSIX",
"-DROCKSDB_FALLOCATE_PRESENT",
"-DROCKSDB_MALLOC_USABLE_SIZE",
"-DROCKSDB_RANGESYNC_PRESENT",
"-DROCKSDB_SCHED_GETCPU_PRESENT",
"-DROCKSDB_SUPPORT_THREAD_LOCAL",
"-DOS_LINUX",
# Flags to enable libs we include
"-DSNAPPY",
"-DZLIB",
"-DBZIP2",
"-DLZ4",
"-DZSTD",
"-DGFLAGS=gflags",
"-DNUMA",
"-DTBB",
# Needed to compile in fbcode
"-Wno-expansion-to-defined",
]
rocksdb_external_deps = [
('bzip2', None, 'bz2'),
('snappy', None, "snappy"),
('zlib', None, 'z'),
('gflags', None, 'gflags'),
('lz4', None, 'lz4'),
('zstd', None),
('tbb', None),
("numa", None, "numa"),
("googletest", None, "gtest"),
]
rocksdb_preprocessor_flags = [
# Directories with files for #include
"-I" + REPO_PATH + "include/",
"-I" + REPO_PATH,
]
rocksdb_arch_preprocessor_flags = {
"x86_64": ["-DHAVE_SSE42"],
}
"""
library_template = """
cpp_library(
name = "%s",
headers = %s,
srcs = [%s],
deps = [%s],
preprocessor_flags = rocksdb_preprocessor_flags,
arch_preprocessor_flags = rocksdb_arch_preprocessor_flags,
compiler_flags = rocksdb_compiler_flags,
external_deps = rocksdb_external_deps,
)
"""
binary_template = """
cpp_binary(
name = "%s",
srcs = [%s],
deps = [%s],
preprocessor_flags = rocksdb_preprocessor_flags,
arch_preprocessor_flags = rocksdb_arch_preprocessor_flags,
compiler_flags = rocksdb_compiler_flags,
external_deps = rocksdb_external_deps,
)
"""
unittests_template = """
# [test_name, test_src, test_type]
ROCKS_TESTS = %s
# Generate a test rule for each entry in ROCKS_TESTS
for test_cfg in ROCKS_TESTS:
test_name = test_cfg[0]
test_cc = test_cfg[1]
ttype = "gtest" if test_cfg[2] == "parallel" else "simple"
test_bin = test_name + "_bin"
cpp_binary (
name = test_bin,
srcs = [test_cc],
deps = [":rocksdb_test_lib"],
preprocessor_flags = rocksdb_preprocessor_flags,
arch_preprocessor_flags = rocksdb_arch_preprocessor_flags,
compiler_flags = rocksdb_compiler_flags,
external_deps = rocksdb_external_deps,
)
custom_unittest(
name = test_name,
type = ttype,
deps = [":" + test_bin],
command = [TEST_RUNNER, BUCK_BINS + test_bin]
)
custom_unittest(
name = "make_rocksdbjavastatic",
type = "simple",
command = ["internal_repo_rocksdb/make_rocksdbjavastatic.sh"],
)
custom_unittest(
name = "make_rocksdb_lite_release",
type = "simple",
command = ["internal_repo_rocksdb/make_rocksdb_lite_release.sh"],
)
"""
| apache-2.0 | 2,273,778,098,954,722,800 | 2,177,972,895,294,113,500 | 23.209677 | 69 | 0.634577 | false |
YufeiZhang/Principles-of-Programming-Python-3 | Lectures/Lecture_6/k_means_clustering.py | 1 | 10239 | # Written by Eric Martin for COMP9021
import tkinter as tk
import tkinter.messagebox
class KMeansClustering(tk.Tk):
def __init__(self):
tk.Tk.__init__(self)
self.title('k-means clustering')
menubar = tk.Menu()
help_menu = tk.Menu(menubar)
menubar.add_cascade(label = 'k-means Clustering Help', menu = help_menu)
help_menu.add_command(label = 'Principle', command = self.principle_help)
help_menu.add_command(label = 'Clearing', command = self.clearing_help)
help_menu.add_command(label = 'Creating points and initial centroids',
command = self.creating_points_and_initial_centroids_help)
self.config(menu = menubar)
self.space = Space()
buttons = tk.Frame(bd = 20)
self.configure_space_or_cluster_button = tk.Button(buttons, text = 'Cluster', width = 5,
command = self.configure_space_or_cluster)
self.configure_space_or_cluster_button.pack(padx = 30, side = tk.LEFT)
self.clear_or_iterate_button = tk.Button(buttons, text = 'Clear', width = 5,
command = self.clear_or_iterate)
self.clear_or_iterate_button.pack(padx = 30)
buttons.pack()
self.space.pack()
self.clustering = False
def principle_help(self):
tkinter.messagebox.showinfo('Principle',
'k, a positive integer which here can only be at most equal to 6, represents '
'the number of clusters to be created.\n\n'
'After the user has created a number of (round) points, the button displaying "Cluster" '
'can be clicked, and then the user can create k (square) points, or "centroids", '
'displayed in different colors.\n'
'Clicking the button displaying "Iterate" gives each point the colour of the closest '
'centroid, making that point a member of the cluster associated with that colour.\n\n'
'The centre of gravity of each cluster then becomes the new centroid. '
'The same computation can be done again by clicking the button displaying "Iterate", '
'until the clusters do not change any more, in which case the button labels change and '
'the user is in a position to run another experiment.\n\n'
'The user can also click the button displaying "Stop" to get back to that position, and '
'change her mind by clicking again on the button displaying "Cluster".')
def clearing_help(self):
tkinter.messagebox.showinfo('Clearing',
'In case centroids are displayed, clicking the "Clear" button deletes the centroids, and '
'if the points are coloured because they have been clustered, then they lose their '
'colour.\n\n'
'In case no centroid is displayed, possibly because the "Clear" button has just been '
'clicked, then clicking the "Clear" button deletes all points.')
def creating_points_and_initial_centroids_help(self):
tkinter.messagebox.showinfo('Creating points and initial centroids',
'Points and initial centroids are created simply by clicking in the grey area.\n'
'Clicking on an existing point or initial centroid deletes it.\n'
'No point or centroid is created when it is too close to an existing point or centroid, '
'respectively.\n\n'
'There can be at most 6 centroids. Trying to create more will have no effect.')
def configure_space_or_cluster(self):
if self.clustering:
self.configure_space_or_cluster_button.config(text = 'Cluster')
self.clear_or_iterate_button.config(text = 'Clear')
self.clustering = False
self.space.clustering = False
self.space.nb_of_clusters = 0
else:
self.configure_space_or_cluster_button.config(text = 'Stop')
self.clear_or_iterate_button.config(text = 'Iterate')
self.clustering = True
self.space.clustering = True
def clear_or_iterate(self):
if self.clustering:
if not self.space.iterate():
self.configure_space_or_cluster()
else:
self.space.clear()
class Space(tk.Frame):
space_dim = 600
space_colour = '#F5F5F5'
point_colour = '#808080'
def __init__(self):
tk.Frame.__init__(self, padx = 20, pady = 20)
self.space = tk.Canvas(self, width = self.space_dim, height = self.space_dim, bg = self.space_colour)
self.space.bind('<1>', self.act_on_click)
self.space.pack()
self.points = {}
self.centroids = {}
self.colours = 'red', 'green', 'blue', 'cyan', 'black', 'magenta'
self.available_colours = list(self.colours)
self.clustering = False
def clear(self):
if self.centroids:
for centroid_coordinates in self.centroids:
self.space.itemconfig(self.centroids[centroid_coordinates].drawn_point, fill = '',
outline = '')
self.centroids.clear()
for point_coordinates in self.points:
self.points[point_coordinates].colour = self.point_colour
self.space.itemconfig(self.points[point_coordinates].drawn_point, fill = self.point_colour,
outline = self.point_colour)
self.available_colours = list(self.colours)
else:
for point_coordinates in self.points:
self.space.itemconfig(self.points[point_coordinates].drawn_point, fill = '',
outline = '')
self.points.clear()
def act_on_click(self, event):
x = self.space.canvasx(event.x)
y = self.space.canvasx(event.y)
if x < 10 or x > self.space_dim - 5 or y < 10 or y > self.space_dim - 5:
return
coordinates = x, y
if self.clustering:
if (self.request_point_otherwise_delete_or_ignore(coordinates, self.centroids, 8) and
self.available_colours):
colour = self.available_colours.pop()
self.centroids[coordinates] = Point(self.draw_centroid(x, y, colour), colour)
else:
if self.request_point_otherwise_delete_or_ignore(coordinates, self.points, 25):
self.points[coordinates] = Point(self.space.create_oval(x - 2, y - 2, x + 2, y + 2,
fill = self.point_colour,
outline = self.point_colour),
self.point_colour)
def request_point_otherwise_delete_or_ignore(self, coordinates, points, size):
for point_coordinates in points:
if self.square_of_distance(coordinates, point_coordinates) < size:
self.space.itemconfig(points[point_coordinates].drawn_point, fill = '', outline = '')
colour = points[point_coordinates].colour
if colour != self.point_colour:
self.available_colours.append(colour)
del points[point_coordinates]
return False
if any(self.square_of_distance(coordinates, point_coordinates) < 4 * size
for point_coordinates in points):
return False
return True
def square_of_distance(self, coordinates_1, coordinates_2):
return (coordinates_1[0] - coordinates_2[0]) ** 2 + (coordinates_1[1] - coordinates_2[1]) ** 2
def iterate(self):
clusters = {centroid_coordinates: [] for centroid_coordinates in self.centroids}
if not clusters:
return
different_clustering = False
for point_coordinates in self.points:
min_square_of_distance = float('inf')
for centroid_coordinates in self.centroids:
square_of_distance = self.square_of_distance(point_coordinates, centroid_coordinates)
if square_of_distance < min_square_of_distance:
min_square_of_distance = square_of_distance
closest_centroid_coordinates = centroid_coordinates
colour = self.centroids[closest_centroid_coordinates].colour
if self.points[point_coordinates].colour != colour:
self.points[point_coordinates].colour = colour
self.space.itemconfig(self.points[point_coordinates].drawn_point, fill = colour,
outline = colour)
different_clustering = True
clusters[closest_centroid_coordinates].append(point_coordinates)
for centroid_coordinates in clusters:
nb_of_points = len(clusters[centroid_coordinates])
if nb_of_points:
x, y = tuple(map(sum, zip(*clusters[centroid_coordinates])))
clusters[centroid_coordinates] = x / nb_of_points, y / nb_of_points
for centroid_coordinates in self.centroids:
self.space.itemconfig(self.centroids[centroid_coordinates].drawn_point, fill = '',
outline = '')
updated_centroids = {}
for centroid_coordinates in clusters:
if clusters[centroid_coordinates]:
colour = self.centroids[centroid_coordinates].colour
x, y = clusters[centroid_coordinates]
updated_centroids[(x, y)] = Point(self.draw_centroid(x, y, colour), colour)
self.centroids = updated_centroids
return different_clustering
def draw_centroid(self, x, y, colour):
return self.space.create_rectangle(x - 1, y - 1, x + 1, y + 1, fill = colour, outline = colour)
class Point:
def __init__(self, drawn_point, colour):
self.drawn_point = drawn_point
self.colour = colour
if __name__ == '__main__':
KMeansClustering().mainloop()
| gpl-3.0 | -7,594,628,259,311,744,000 | 3,722,597,427,618,233,000 | 49.940299 | 109 | 0.585604 | false |
aerval/blast_comparison | main.py | 1 | 16353 | #!/bin/env python
##############################################
# CompareBLASTs #
# A tool to compare the found hits from two #
# BLAST searches with the same search query. #
# #
# by Philipp B. Rentzsch #
# BCCDC Vancouver, BC #
# October 2014 - January 2015 #
# License: MIT #
##############################################
from __future__ import print_function
from time import strptime # convert string into time object
import optparse # commandline parsing
from blast_hit import * # BlastHit.py file
import string # for valid letters in filename
def load_blasthits(file):
'''
Read a tabular BLAST file into a list of BlastHits.
file = (string) filename of tabular blast result file
'''
blastfile = open(file).readlines()
hits = []
# We can not extract every line from the tabular file into a single hit
# since some correspont to multiple such hits
for hit in blastfile:
h = hit.split('\t')
if h[1] == h[12]:
hits.append(BlastHit(hit))
else:
# When multiple gene ids contribute to the same alignment, they
# can be summarized to one hit. In the following we split these
# up because we want to check all hit seperately.
subhits = h[12].split(';')
for sub in subhits:
h[1] = sub
hits.append(BlastHit('\t'.join(h)))
return hits
class CompareBLASTs(object):
def __init__(self, old_hits, new_hits, email, name):
'''
Initialize the comparison object.
old_hits = List of BlastHits from the older BLAST Search
new_hits = List of BlastHits from the newer, second BLAST Search
email = Your email address, needed for use of NCBIs Enterez to prevent
misuse of their service
name = Query name that lead to the BlastHits to identify them later
'''
self.input_old_hits = old_hits
self.input_new_hits = new_hits
self.email = email
self.name = name
def compare(self):
'''
Compares the two lists of BlastHits for more or less similar elements
and extracts those elements form both lists that have no companion in
each other.
'''
# Compare for exact (or similar) hits.
self.new_hits, self.old_hits = compare_blasts(self.input_new_hits,
self.input_old_hits)
# Retrieve basic information of coresponding genes for all old hits.
self.oldGeneIDs = get_idlist(self.old_hits['all'], self.email)
# Check all the old hits without a copy in the new hit list what
# happend to their associated gene (whether it still exists, was
# updated (=replaced) or deleted (=suppressed).
oldOnly = {'live': [], 'replaced': [], 'suppressed': []}
# A bit confusing: live and lost are handled here equivalent since a
# hit that is live (=still existing in the db) but not found in the
# new BLAST search was 'lost' at some point.
for hit in self.old_hits['unknown']:
for ID in hit.ids:
if ID.db == 'gi':
oldOnly[self.oldGeneIDs[ID.num]['Status']].append(hit)
hit.status = self.oldGeneIDs[ID.num]['Status']
break
self.new_hits['replacement'] = [] # Equivalent to old_hits 'replaced'
self.old_hits['lost'] = oldOnly['live']
self.old_hits['suppressed'] = oldOnly['suppressed']
self.old_hits['replacement'] = []
# Check the old hits with a known replacement tag, whether a replacing
# hit can be found within the new hits.
for num, hit in enumerate(oldOnly['replaced']):
for ID in hit.ids:
if ID.db == 'gi':
new_id = self.oldGeneIDs[ID.num]['ReplacedBy']
found = False
for num2, hit2 in enumerate(self.new_hits['unknown']):
if new_id in [ID.num for ID in hit2.ids]:
same, differences = hit.compare_hit(hit2, check_ids=False)
if same:
rep = self.new_hits['unknown'].pop(num2)
rep.status = 'replacement'
self.new_hits['replacement'].append(rep)
self.old_hits['replacement'].append(
oldOnly['replaced'][num])
found = True
break
if not found:
# Hit can be replaced but the replacement was nevertheless not
# found in the new Blast Search => lost/live.
self.old_hits['lost'].append(oldOnly['replaced'][num])
oldOnly['replaced'][num].status = 'live'
# Get the basic info for those hit in the new search, that have no
# know relative in the old search.
self.newGeneIDs = get_idlist(self.new_hits['unknown'], self.email)
# Estimate the time of the old BLAST (or last used database update)
# search by looking for the creation of the youngest entree that match
# to the old hits.
date_oldsearch = max([strptime(record['CreateDate'], '%Y/%m/%d')
for record in self.oldGeneIDs.values()])
# Check wether all new hits with no relative in the old Search are
# indeed new (there for created after the last of the old Hits). I
# never had this case but one can never know ...
self.new_hits['new'] = []
self.new_hits['old'] = []
for hit in self.new_hits['unknown']:
if strptime(self.newGeneIDs[hit.ids[0].num]['CreateDate'],
'%Y/%m/%d') < date_oldsearch:
self.new_hits['old'].append(hit)
hit.status = 'strange'
else:
self.new_hits['new'].append(hit)
hit.status = 'new'
def output_comparison(self, output_types=[lambda x: print(x)], top=0,
long_output=False, adaptive=True):
'''
Prints (and or writes to a file) the output of the BLAST comparison.
output_types = List of output lambdas like 'lambda x: print(x)' and
'lambda x: output_file.write(''.join([x, '\n']))'
top = The number of Hits (from the top score) that are of interest for
the comparion (0 = all)
long_output = A longer, more describitive output
adaptive = In adaptive mode only those categories are displayed that
appear like if there are no new hits in the second BLAST, this is not
dispalyed
'''
# Determine the number of hits (in the interested interval) that
# belong to each category.
hits_per_category = {'equal': 0, 'similar': 0, 'live': 0,
'replaced': 0, 'suppressed': 0, 'new': 0,
'strange': 0}
if top == 0: # Count all hits
top_old = len(self.old_hits['all'])
top_new = len(self.new_hits['all'])
else: # Count only the specified fraction of hits
top_old = min(top, len(self.old_hits['all']))
top_new = min(top, len(self.new_hits['all']))
for hit in self.old_hits['all'][:top_old]:
hits_per_category[hit.status] += 1
for hit in self.new_hits['all'][:top_new]:
if hit.status in ['new', 'strange']:
hits_per_category[hit.status] += 1
if long_output:
category_names = {
'equal': 'Found in both BLASTs results:\t%i',
'similar': 'Found in both BLASTs results with slight \
changes:\t%i',
'live': 'Not showing up for unknown reasons in the second \
BLAST (probably low scores):\t%i',
'replaced': 'Replaced/updated before the second BLAST:\t%i',
'suppressed': 'Deleted/suppressed before the second BLAST:\t\
%i',
'new': 'New hits added to the database for the second BLAST:\t\
%i',
'strange': 'Hits that do only appear in the second BLAST \
that should have appeared in the first:\t%i'}
else:
category_names = {
'equal': 'Equal Hits:\t%i',
'similar': 'Changed Hits\t%i',
'live': 'Lost Hits\t%i',
'replaced': 'Replaced Hits:\t%i',
'suppressed': 'Deleted Hits:\t%i',
'new': 'New Hits:\t%i',
'strange': 'New appearing Hits:\t%i'}
# For the different output channels (write to file or print).
for output in output_types:
# Always print the query name as more than one query can be found
# in a single BLAST.
if self.name:
output('Query:\t%s' % self.name)
if long_output:
output('Total hits in old search:\t%i' %
len(self.old_hits['all']))
output('Total hits in new search:\t%i' %
len(self.new_hits['all']))
if top_old != len(self.old_hits['all']) and \
top_new != len(self.new_hits['all']):
output('Among the top %i hits were:' % top)
else:
output('From all hits were:')
for key in ['equal', 'similar', 'live', 'replaced', 'suppressed',
'new', 'strange']:
if not adaptive or hits_per_category[key] > 0:
# In (default) adaptive mode, only those hit categories
# are displayed that appear (example: if there is no
# replaced hit, the replaced hits column is not displayed.
output(category_names[key] % hits_per_category[key])
# separate from following queries
output('\n')
def export_hit_categories(self, categories, path=''):
'''
Exports the given categories into files (format similar to the input
.blast format with a status column added at the end).
categories = String with comma ',' delimited categories (e.g: new,
all_old to export all new Hits and all the hits from the old search)
path = file path to the exported files
'''
categories = categories.split(',')
# Generate valid filenames:
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
name = ''.join(c for c in self.name if c in valid_chars)
for category in categories:
hits = None
if category == 'new':
hits = self.new_hits['new']
if category == 'equal':
hits = self.old_hits['same']
if category == 'similar':
hits = self.old_hits['similar']
if category == 'live':
hits = self.old_hits['lost']
if category == 'replaced':
hits = self.old_hits['replacement']
if category == 'suppressed':
hits = self.old_hits['suppressed']
if category == 'all_old':
hits = self.old_hits['all']
if category == 'all_new':
hits = self.new_hits['all']
if category == 'strange':
hits = self.new_hits['old']
if hits:
with open(path + name + '_' + category + '.blast', 'w+') as f:
# The query name and category speciefies the file name
# (e.g. Query7_all_new.blast).
for hit in hits:
f.write(str(hit) + '\n')
else:
print("Unknown export category %s" % category)
def perform_comparison(opts):
'''
The main function that compares two BLAST files against the same Query
Sequence
opts = parsed OptionsParser
'''
new_hits = {}
old_hits = {}
# Load the hits from the two input files.
new_hits_all = load_blasthits(opts.new_Blast)
old_hits_all = load_blasthits(opts.old_Blast)
# Sort all hits for their repective query (as one BLAST file can contain
# multiple queries.
for hit in new_hits_all:
if hit.name in new_hits.keys():
new_hits[hit.name].append(hit)
else:
new_hits[hit.name] = [hit]
for hit in old_hits_all:
if hit.name in old_hits.keys():
old_hits[hit.name].append(hit)
else:
old_hits[hit.name] = [hit]
# Make sure that both files where against the same queries.
assert old_hits.keys() == new_hits.keys()
# Define how to output the (general) results (print to console and/or save
# to file).
output_types = []
if opts.verbose:
output_types.append(lambda x: print(x))
if opts.save_output:
output_file = open(opts.output_path + opts.save_output, 'w+')
output_types.append(lambda x: output_file.write(''.join([x, '\n'])))
# Somewhat complicated expression because file.write does not
# automatically add a line end character.
for key in old_hits.keys():
blastComparison = CompareBLASTs(old_hits[key], new_hits[key],
opts.email, key)
blastComparison.compare()
blastComparison.output_comparison(output_types, opts.top,
opts.long_output, opts.adaptive)
# Export specified hit categories to file.
if opts.export:
blastComparison.export_hit_categories(opts.export,
path=opts.output_path)
if opts.save_output:
output_file.close()
if __name__ == '__main__':
# General description of the program
usage = '''
%prog [options]
Neccessary to provide are the two tabular BLAST files old (-o) and new
(-n)
'''
op = optparse.OptionParser(usage=usage)
op.add_option('-o', '--old', default=None, dest='old_Blast',
help='the older tabular BLAST file (24 columns)')
op.add_option('-n', '--new', default=None, dest='new_Blast',
help='the newer BLAST file')
op.add_option('-t', '--top', type='int', default=0,
help='specify when only the top X (integer value) hits for \
each query are of interest')
op.add_option('-v', '--verbose', action='store_true', dest='verbose',
default=True, help='print everything')
op.add_option('-q', '--quiet', action='store_false', dest='verbose',
help='stay quiet')
op.add_option('-s', '--save', default=None, dest='save_output',
help='file where the output is saved')
op.add_option('-p', '--put', default='', dest='output_path',
help='the path where the saved output and/or exported hit \
files are stored')
op.add_option('-l', '--longOutput', action='store_true',
dest='long_output', default=False,
help='enable long names in the output')
op.add_option('-a', '--adaptive', action='store_true',
dest='adaptive', default=True,
help='only display those hit classes, that have elements')
op.add_option('-A', '--notAdaptive', action='store_false',
dest='adaptive', help='display all elements')
op.add_option('-e', '--email', default='[email protected]',
help='email address of the user to send him/her notice of \
excess use')
op.add_option('-x', '--export', default=None,
help='export specified hit categories (Example: \
"-x new,old_all,suppressed", Categories: "equal, similar, \
live, replaced, suppressed, new, strange, all_old and \
all_new)"')
opts, args = op.parse_args()
assert opts.old_Blast and opts.new_Blast
# Executes the analysing program
perform_comparison(opts)
| mit | 3,695,388,466,441,639,400 | 2,572,118,752,261,110,000 | 41.365285 | 79 | 0.538494 | false |
makinacorpus/pygal | pygal/test/test_interpolate.py | 4 | 3200 | # -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2014 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
from pygal.test import make_data
def test_cubic(Chart, datas):
chart = Chart(interpolate='cubic')
chart = make_data(chart, datas)
assert chart.render()
def test_cubic_prec(Chart, datas):
chart = Chart(interpolate='cubic', interpolation_precision=200)
chart = make_data(chart, datas)
chart_low = Chart(interpolate='cubic', interpolation_precision=5)
chart_low = make_data(chart, datas)
assert len(chart.render()) >= len(chart_low.render())
def test_quadratic(Chart, datas):
chart = Chart(interpolate='quadratic')
chart = make_data(chart, datas)
assert chart.render()
def test_lagrange(Chart, datas):
chart = Chart(interpolate='lagrange')
chart = make_data(chart, datas)
assert chart.render()
def test_trigonometric(Chart, datas):
chart = Chart(interpolate='trigonometric')
chart = make_data(chart, datas)
assert chart.render()
def test_hermite(Chart, datas):
chart = Chart(interpolate='hermite')
chart = make_data(chart, datas)
assert chart.render()
def test_hermite_finite(Chart, datas):
chart = Chart(interpolate='hermite',
interpolation_parameters={'type': 'finite_difference'})
chart = make_data(chart, datas)
assert chart.render()
def test_hermite_cardinal(Chart, datas):
chart = Chart(interpolate='hermite',
interpolation_parameters={'type': 'cardinal', 'c': .75})
chart = make_data(chart, datas)
assert chart.render()
def test_hermite_catmull_rom(Chart, datas):
chart = Chart(interpolate='hermite',
interpolation_parameters={'type': 'catmull_rom'})
chart = make_data(chart, datas)
assert chart.render()
def test_hermite_kochanek_bartels(Chart, datas):
chart = Chart(interpolate='hermite',
interpolation_parameters={
'type': 'kochanek_bartels', 'b': -1, 'c': 1, 't': 1})
chart = make_data(chart, datas)
assert chart.render()
chart = Chart(interpolate='hermite',
interpolation_parameters={
'type': 'kochanek_bartels', 'b': -1, 'c': -8, 't': 0})
chart = make_data(chart, datas)
assert chart.render()
chart = Chart(interpolate='hermite',
interpolation_parameters={
'type': 'kochanek_bartels', 'b': 0, 'c': 10, 't': -1})
chart = make_data(chart, datas)
assert chart.render()
| lgpl-3.0 | -5,594,919,251,115,478,000 | 5,738,839,702,995,925,000 | 30.99 | 79 | 0.657393 | false |
jit/pyew | pymsasid/decode.py | 16 | 31254 | # -----------------------------------------------------------------------------
# decode.py
#
# author: [email protected]
# Mainly rewrited from udis86 -- Vivek Mohan <[email protected]>
# -----------------------------------------------------------------------------
from common import DecodeException, VENDOR_INTEL, VENDOR_AMD
from inst import Inst, Operand, Ptr, ie_invalid, ie_pause, ie_nop
# this is intended: hundreds of constants used
from itab import *
from operand import *
# Extracts instruction prefixes.
def get_prefixes(u, inst):
have_pfx = 1
# if in error state, bail out
if u.error:
return -1
# keep going as long as there are prefixes available
i = 0
while have_pfx:
# Get next byte.
u.input.next()
if u.error:
return -1
curr = u.input.current()
# rex prefixes in 64bit mode
if u.dis_mode == 64 and (curr & 0xF0) == 0x40:
inst.pfx.rex = curr
else:
if curr == 0x2E:
inst.pfx.seg = 'cs'
inst.pfx.rex = 0
elif curr == 0x36:
inst.pfx.seg = 'ss'
inst.pfx.rex = 0
elif curr == 0x3E:
inst.pfx.seg = 'ds'
inst.pfx.rex = 0
elif curr == 0x26:
inst.pfx.seg = 'es'
inst.pfx.rex = 0
elif curr == 0x64:
inst.pfx.seg = 'fs'
inst.pfx.rex = 0
elif curr == 0x65:
inst.pfx.seg = 'gs'
inst.pfx.rex = 0
elif curr == 0x67: #adress-size override prefix
inst.pfx.adr = 0x67
inst.pfx.rex = 0
elif curr == 0xF0:
inst.pfx.lock = 0xF0
inst.pfx.rex = 0
elif curr == 0x66:
# the 0x66 sse prefix is only effective if no other sse prefix
# has already been specified.
if inst.pfx.insn == 0:
inst.pfx.insn = 0x66
inst.pfx.opr = 0x66
inst.pfx.rex = 0
elif curr == 0xF2:
inst.pfx.insn = 0xF2
inst.pfx.repne = 0xF2
inst.pfx.rex = 0
elif curr == 0xF3:
inst.pfx.insn = 0xF3
inst.pfx.rep = 0xF3
inst.pfx.repe = 0xF3
inst.pfx.rex = 0
else:
# No more prefixes
have_pfx = 0
# check if we reached max instruction length
if(i + 1) == MAX_INSN_LENGTH:
u.error = 1
i += 1
# return status
if u.error:
return -1
# rewind back one byte in stream, since the above loop
# stops with a non-prefix byte.
u.input.back()
# speculatively determine the effective operand mode,
# based on the prefixes and the current disassembly
# mode. This may be inaccurate, but useful for mode
# dependent decoding.
if u.dis_mode == 64:
if REX_W(inst.pfx.rex):
inst.opr_mode = 64
elif inst.pfx.opr:
inst.opr_mode = 16
elif(P_DEF64(inst.itab_entry.prefix)):
inst.opr_mode = 64
else:
inst.opr_mode = 32
if inst.pfx.adr:
inst.adr_mode = 32
else:
inst.adr_mode = 64
elif u.dis_mode == 32:
if inst.pfx.opr:
inst.opr_mode = 16
else:
inst.opr_mode = 32
if inst.pfx.adr:
inst.adr_mode = 16
else:
inst.adr_mode = 32
elif u.dis_mode == 16:
if inst.pfx.opr:
inst.opr_mode = 32
else:
inst.opr_mode = 16
if inst.pfx.adr:
inst.adr_mode = 32
else:
inst.adr_mode = 16
return 0
# Searches the instruction tables for the right entry.
def search_itab(u, inst):
# if in state of error, return
did_peek = 0
if u.error:
return -1
# get first byte of opcode
u.input.next()
if u.error:
return -1
curr = u.input.current()
if curr == None :
inst.itab_entry = ie_invalid
inst.operator = inst.itab_entry.operator
return 0
# resolve xchg, nop, pause crazyness
if 0x90 == curr:
if not(u.dis_mode == 64 and REX_B(inst.pfx.rex)):
if(inst.pfx.rep):
inst.pfx.rep = 0
e = ie_pause
else:
e = ie_nop
inst.itab_entry = e
inst.operator = inst.itab_entry.operator
return 0
# get top-level table
elif 0x0F == curr:
table = ITAB__0F
curr = u.input.next()
if u.error:
return -1
# 2byte opcodes can be modified by 0x66, F3, and F2 prefixes
if 0x66 == inst.pfx.insn:
if itab_list[ITAB__PFX_SSE66__0F][curr].operator != 'invalid':
table = ITAB__PFX_SSE66__0F
inst.pfx.opr = 0
elif 0xF2 == inst.pfx.insn:
if itab_list[ITAB__PFX_SSEF2__0F][curr].operator != 'invalid':
table = ITAB__PFX_SSEF2__0F
inst.pfx.repne = 0
elif 0xF3 == inst.pfx.insn:
if itab_list[ITAB__PFX_SSEF3__0F][curr].operator != 'invalid':
table = ITAB__PFX_SSEF3__0F
inst.pfx.repe = 0
inst.pfx.rep = 0
# pick an instruction from the 1byte table
else:
table = ITAB__1BYTE
index = curr
while True:
e = itab_list[ table ][ index ]
# if operator constant is a standard instruction constant
# our search is over.
if e.operator in operator:
if e.operator == 'invalid':
if did_peek:
u.input.next()
if u.input.error:
raise DecodeException('error')
#return -1
inst.itab_entry = e
inst.operator = inst.itab_entry.operator
return 0
table = e.prefix
if e.operator == 'grp_reg':
peek = u.input.peek()
did_peek = 1
index = MODRM_REG(peek)
elif e.operator == 'grp_mod':
peek = u.input.peek()
did_peek = 1
index = MODRM_MOD(peek)
if index == 3:
index = ITAB__MOD_INDX__11
else:
index = ITAB__MOD_INDX__NOT_11
elif e.operator == 'grp_rm':
curr = u.input.next()
did_peek = 0
if u.error:
return -1
index = MODRM_RM(curr)
elif e.operator == 'grp_x87':
curr = u.input.next()
did_peek = 0
if u.error:
return -1
index = curr - 0xC0
elif e.operator == 'grp_osize':
if inst.opr_mode == 64:
index = ITAB__MODE_INDX__64
elif inst.opr_mode == 32:
index = ITAB__MODE_INDX__32
else:
index = ITAB__MODE_INDX__16
elif e.operator == 'grp_asize':
if inst.adr_mode == 64:
index = ITAB__MODE_INDX__64
elif inst.adr_mode == 32:
index = ITAB__MODE_INDX__32
else:
index = ITAB__MODE_INDX__16
elif e.operator == 'grp_mode':
if u.dis_mode == 64:
index = ITAB__MODE_INDX__64
elif u.dis_mode == 32:
index = ITAB__MODE_INDX__32
else:
index = ITAB__MODE_INDX__16
elif e.operator == 'grp_vendor':
if u.vendor == VENDOR_INTEL:
index = ITAB__VENDOR_INDX__INTEL
elif u.vendor == VENDOR_AMD:
index = ITAB__VENDOR_INDX__AMD
else:
raise DecodeException('unrecognized vendor id')
elif e.operator == 'd3vil':
raise DecodeException('invalid instruction operator constant Id3vil')
else:
raise DecodeException('invalid instruction operator constant')
inst.itab_entry = e
inst.operator = inst.itab_entry.operator
return 0
def resolve_operand_size(u, inst, s):
if s == SZ_V:
return inst.opr_mode
elif s == SZ_Z:
if inst.opr_mode == 16:
return 16
else:
return 32
elif s == SZ_P:
if inst.opr_mode == 16:
return SZ_WP
else:
return SZ_DP
elif s == SZ_MDQ:
if inst.opr_mode == 16:
return 32
else:
return inst.opr_mode
elif s == SZ_RDQ:
if u.dis_mode == 64:
return 64
else:
return 32
else:
return s
def resolve_operator(u, inst):
# far/near flags
inst.branch_dist = None
# readjust operand sizes for call/jmp instrcutions
if inst.operator == 'call' or inst.operator == 'jmp':
# WP: 16bit pointer
if inst.operand[0].size == SZ_WP:
inst.operand[0].size = 16
inst.branch_dist = 'far'
# DP: 32bit pointer
elif inst.operand[0].size == SZ_DP:
inst.operand[0].size = 32
inst.branch_dist = 'far'
elif inst.operand[0].size == 8:
inst.branch_dist = 'near'
# resolve 3dnow weirdness
elif inst.operator == '3dnow':
inst.operator = itab_list[ITAB__3DNOW][u.input.current()].operator
# SWAPGS is only valid in 64bits mode
if inst.operator == 'swapgs' and u.dis_mode != 64:
u.error = 1
return -1
return 0
def decode_a(u, inst, op):
"""Decodes operands of the type seg:offset."""
if inst.opr_mode == 16:
# seg16:off16
op.type = 'OP_PTR'
op.size = 32
op.lval = Ptr(u.input.read(16), u.input.read(16))
else:
# seg16:off32
op.type = 'OP_PTR'
op.size = 48
op.lval = Ptr(u.input.read(32), u.input.read(16))
def decode_gpr(u, inst, s, rm):
"""Returns decoded General Purpose Register."""
s = resolve_operand_size(u, inst, s)
if s == 64:
return GPR[64][rm]
elif s == SZ_DP or s == 32:
return GPR[32][rm]
elif s == SZ_WP or s == 16:
return GPR[16][rm]
elif s == 8:
if u.dis_mode == 64 and inst.pfx.rex:
if rm >= 4:
return GPR[8][rm+4]
return GPR[8][rm]
else:
return GPR[8][rm]
else:
return None
def resolve_gpr64(u, inst, gpr_op):
"""64bit General Purpose Register-Selection."""
if gpr_op in range(OP_rAXr8, OP_rDIr15) :
index = (gpr_op - OP_rAXr8) |(REX_B(inst.pfx.rex) << 3)
else:
index = gpr_op - OP_rAX
if inst.opr_mode == 16:
return GPR[16][index]
elif u.dis_mode == 32 or not(inst.opr_mode == 32 and REX_W(inst.pfx.rex) == 0):
return GPR[32][index]
return GPR[64][index]
def resolve_gpr32(u, inst, gpr_op):
"""32bit General Purpose Register-Selection."""
index = gpr_op - OP_eAX
if(inst.opr_mode == 16):
return GPR[16][index]
return GPR[32][index]
def resolve_reg(regtype, i):
"""Resolves the register type."""
return GPR[regtype][i]
def decode_imm(u, inst, s, op):
"""Decodes Immediate values."""
op.size = resolve_operand_size(u, inst, s)
op.type = 'OP_IMM'
op.lval = u.input.read(op.size)
def decode_modrm(u, inst, op, s, rm_type, opreg, reg_size, reg_type):
"""Decodes ModRM Byte."""
u.input.next()
# get mod, r/m and reg fields
mod = MODRM_MOD(u.input.current())
rm = (REX_B(inst.pfx.rex) << 3) | MODRM_RM(u.input.current())
reg = (REX_R(inst.pfx.rex) << 3) | MODRM_REG(u.input.current())
op.size = resolve_operand_size(u, inst, s)
# if mod is 11b, then the m specifies a gpr/mmx/sse/control/debug
if mod == 3:
op.type = 'OP_REG'
if rm_type == 'T_GPR':
op.base = decode_gpr(u, inst, op.size, rm)
else:
op.base = resolve_reg(rm_type, (REX_B(inst.pfx.rex) << 3) |(rm&7))
# else its memory addressing
else:
op.type = 'OP_MEM'
op.seg = inst.pfx.seg
# 64bit addressing
if inst.adr_mode == 64:
op.base = GPR[64][rm]
# get offset type
if mod == 1:
op.offset = 8
elif mod == 2:
op.offset = 32
elif mod == 0 and(rm & 7) == 5:
op.base = 'rip'
op.offset = 32
else:
op.offset = 0
# Scale-Index-Base(SIB)
if rm & 7 == 4:
u.input.next()
op.scale = (1 << SIB_S(u.input.current())) & ~1
op.index = GPR[64][(SIB_I(u.input.current()) |(REX_X(inst.pfx.rex) << 3))]
op.base = GPR[64][(SIB_B(u.input.current()) |(REX_B(inst.pfx.rex) << 3))]
# special conditions for base reference
if op.index == 'rsp':
op.index = None
op.scale = 0
if op.base == 'rbp' or op.base == 'r13':
if mod == 0:
op.base = None
if mod == 1:
op.offset = 8
else:
op.offset = 32
# 32-Bit addressing mode
elif inst.adr_mode == 32:
# get base
op.base = GPR[16][rm]
# get offset type
if mod == 1:
op.offset = 8
elif mod == 2:
op.offset = 32
elif mod == 0 and rm == 5:
op.base = None
op.offset = 32
else:
op.offset = 0
# Scale-Index-Base(SIB)
if(rm & 7) == 4:
u.input.next()
op.scale = (1 << SIB_S(u.input.current())) & ~1
op.index = GPR[32][SIB_I(u.input.current()) |(REX_X(inst.pfx.rex) << 3)]
op.base = GPR[32][SIB_B(u.input.current()) |(REX_B(inst.pfx.rex) << 3)]
if op.index == 'esp':
op.index = None
op.scale = 0
# special condition for base reference
if op.base == 'ebp':
if mod == 0:
op.base = None
if mod == 1:
op.offset = 8
else:
op.offset = 32
# 16bit addressing mode
else:
if rm == 0:
op.base = 'bx'
op.index = 'si'
elif rm == 1:
op.base = 'bx'
op.index = 'di'
elif rm == 2:
op.base = 'bp'
op.index = 'si'
elif rm == 3:
op.base = 'bp'
op.index = 'di'
elif rm == 4:
op.base = 'si'
elif rm == 5:
op.base = 'di'
elif rm == 6:
op.base = 'bp'
elif rm == 7:
op.base = 'bx'
if mod == 0 and rm == 6:
op.offset = 16
op.base = None
elif mod == 1:
op.offset = 8
elif mod == 2:
op.offset = 16
# extract offset, if any
if op.offset in [8, 16, 32, 64]:
op.lval = u.input.read(op.offset)
bound = pow(2, op.offset - 1)
if op.lval > bound:
op.lval = -(((2 * bound) - op.lval) % bound)
# resolve register encoded in reg field
if opreg:
opreg.type = 'OP_REG'
opreg.size = resolve_operand_size(u, inst, reg_size)
if reg_type == 'T_GPR':
opreg.base = decode_gpr(u, inst, opreg.size, reg)
else:
opreg.base = resolve_reg(reg_type, reg)
def decode_o(u, inst, s, op):
"""Decodes offset."""
op.seg = inst.pfx.seg
op.offset = inst.adr_mode
op.lval = u.input.read(inst.adr_mode)
op.type = 'OP_MEM'
op.size = resolve_operand_size(u, inst, s)
def disasm_operands(u, inst):
"""Disassembles Operands."""
# get type
def get_mopt(x): return x.type
mopt = map(get_mopt, inst.itab_entry.operand)
# get size
def get_mops(x): return x.size
mops = map(get_mops, inst.itab_entry.operand)
if mopt[2] != OP_NONE:
inst.operand = [Operand(), Operand(), Operand()]
elif mopt[1] != OP_NONE:
inst.operand = [Operand(), Operand()]
elif mopt[0] != OP_NONE:
inst.operand = [Operand()]
# iop = instruction operand
#iop = inst.operand
if mopt[0] == OP_A:
decode_a(u, inst, inst.operand[0])
# M[b] ...
# E, G/P/V/I/CL/1/S
elif mopt[0] == OP_M or mopt[0] == OP_E:
if mopt[0] == OP_M and MODRM_MOD(u.input.peek()) == 3:
u.error = 1
if mopt[1] == OP_G:
decode_modrm(u, inst, inst.operand[0], mops[0], 'T_GPR', inst.operand[1], mops[1], 'T_GPR')
if mopt[2] == OP_I:
decode_imm(u, inst, mops[2], inst.operand[2])
elif mopt[2] == OP_CL:
inst.operand[2].type = 'OP_REG'
inst.operand[2].base = 'cl'
inst.operand[2].size = 8
elif mopt[1] == OP_P:
decode_modrm(u, inst, inst.operand[0], mops[0], 'T_GPR', inst.operand[1], mops[1], 'T_MMX')
elif mopt[1] == OP_V:
decode_modrm(u, inst, inst.operand[0], mops[0], 'T_GPR', inst.operand[1], mops[1], 'T_XMM')
elif mopt[1] == OP_S:
decode_modrm(u, inst, inst.operand[0], mops[0], 'T_GPR', inst.operand[1], mops[1], 'T_SEG')
else:
decode_modrm(u, inst, inst.operand[0], mops[0], 'T_GPR', NULL, 0, 'T_NONE')
if mopt[1] == OP_CL:
inst.operand[1].type = 'OP_REG'
inst.operand[1].base = 'cl'
inst.operand[1].size = 8
elif mopt[1] == OP_I1:
inst.operand[1].type = 'OP_IMM'
inst.operand[1].lval = 1
elif mopt[1] == OP_I:
decode_imm(u, inst, mops[1], inst.operand[1])
# G, E/PR[,I]/VR
elif mopt[0] == OP_G:
if mopt[1] == OP_M:
if MODRM_MOD(u.input.peek()) == 3:
u.error = 1
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_GPR', inst.operand[0], mops[0], 'T_GPR')
elif mopt[1] == OP_E:
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_GPR', inst.operand[0], mops[0], 'T_GPR')
if mopt[2] == OP_I:
decode_imm(u, inst, mops[2], inst.operand[2])
elif mopt[1] == OP_PR:
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_MMX', inst.operand[0], mops[0], 'T_GPR')
if mopt[2] == OP_I:
decode_imm(u, inst, mops[2], inst.operand[2])
elif mopt[1] == OP_VR:
if MODRM_MOD(u.input.peek()) != 3:
u.error = 1
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_XMM', inst.operand[0], mops[0], 'T_GPR')
elif mopt[1] == OP_W:
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_XMM', inst.operand[0], mops[0], 'T_GPR')
# AL..BH, I/O/DX
elif mopt[0] in [OP_AL, OP_CL, OP_DL, OP_BL,
OP_AH, OP_CH, OP_DH, OP_BH]:
inst.operand[0].type = 'OP_REG'
inst.operand[0].base = GPR[8][mopt[0] - OP_AL]
inst.operand[0].size = 8
if mopt[1] == OP_I:
decode_imm(u, inst, mops[1], inst.operand[1])
elif mopt[1] == OP_DX:
inst.operand[1].type = 'OP_REG'
inst.operand[1].base = 'dx'
inst.operand[1].size = 16
elif mopt[1] == OP_O:
decode_o(u, inst, mops[1], inst.operand[1])
# rAX[r8]..rDI[r15], I/rAX..rDI/O
elif mopt[0] in [OP_rAXr8, OP_rCXr9, OP_rDXr10, OP_rBXr11,
OP_rSPr12, OP_rBPr13, OP_rSIr14, OP_rDIr15,
OP_rAX, OP_rCX, OP_rDX, OP_rBX,
OP_rSP, OP_rBP, OP_rSI, OP_rDI]:
inst.operand[0].type = 'OP_REG'
inst.operand[0].base = resolve_gpr64(u, inst, mopt[0])
if mopt[1] == OP_I:
decode_imm(u, inst, mops[1], inst.operand[1])
elif mopt[1] in [OP_rAX, OP_rCX, OP_rDX, OP_rBX,
OP_rSP, OP_rBP, OP_rSI, OP_rDI]:
inst.operand[1].type = 'OP_REG'
inst.operand[1].base = resolve_gpr64(u, inst, mopt[1])
elif mopt[1] == OP_O:
decode_o(u, inst, mops[1], inst.operand[1])
inst.operand[0].size = resolve_operand_size(u, inst, mops[1])
elif mopt[0] in [OP_ALr8b, OP_CLr9b, OP_DLr10b, OP_BLr11b,
OP_AHr12b, OP_CHr13b, OP_DHr14b, OP_BHr15b]:
gpr = (mopt[0] - OP_ALr8b +(REX_B(inst.pfx.rex) << 3))
if gpr in ['ah', 'ch', 'dh', 'bh',
'spl', 'bpl', 'sil', 'dil',
'r8b', 'r9b', 'r10b', 'r11b',
'r12b', 'r13b', 'r14b', 'r15b',
] and inst.pfx.rex:
gpr = gpr + 4
inst.operand[0].type = 'OP_REG'
inst.operand[0].base = GPR[8][gpr]
if mopt[1] == OP_I:
decode_imm(u, inst, mops[1], inst.operand[1])
# eAX..eDX, DX/I
elif mopt[0] in [OP_eAX, OP_eCX, OP_eDX, OP_eBX,
OP_eSP, OP_eBP, OP_eSI, OP_eDI]:
inst.operand[0].type = 'OP_REG'
inst.operand[0].base = resolve_gpr32(u, inst, mopt[0])
if mopt[1] == OP_DX:
inst.operand[1].type = 'OP_REG'
inst.operand[1].base = 'dx'
inst.operand[1].size = 16
elif mopt[1] == OP_I:
decode_imm(u, inst, mops[1], inst.operand[1])
# ES..GS
elif mopt[0] in [OP_ES, OP_CS, OP_DS,
OP_SS, OP_FS, OP_GS]:
# in 64bits mode, only fs and gs are allowed
if u.dis_mode == 64:
if mopt[0] != OP_FS and mopt[0] != OP_GS:
u.error = 1
inst.operand[0].type = 'OP_REG'
inst.operand[0].base = GPR['T_SEG'][mopt[0] - OP_ES]
inst.operand[0].size = 16
# J
elif mopt[0] == OP_J:
decode_imm(u, inst, mops[0], inst.operand[0])
# MK take care of signs
bound = pow(2, inst.operand[0].size - 1)
if inst.operand[0].lval > bound:
inst.operand[0].lval = -(((2 * bound) - inst.operand[0].lval) % bound)
inst.operand[0].type = 'OP_JIMM'
# PR, I
elif mopt[0] == OP_PR:
if MODRM_MOD(u.input.peek()) != 3:
u.error = 1
decode_modrm(u, inst, inst.operand[0], mops[0], 'T_MMX', NULL, 0, 'T_NONE')
if mopt[1] == OP_I:
decode_imm(u, inst, mops[1], inst.operand[1])
# VR, I
elif mopt[0] == OP_VR:
if MODRM_MOD(u.input.peek()) != 3:
u.error = 1
decode_modrm(u, inst, inst.operand[0], mops[0], 'T_XMM', NULL, 0, 'T_NONE')
if mopt[1] == OP_I:
decode_imm(u, inst, mops[1], inst.operand[1])
# P, Q[,I]/W/E[,I],VR
elif mopt[0] == OP_P:
if mopt[1] == OP_Q:
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_MMX', inst.operand[0], mops[0], 'T_MMX')
if mopt[2] == OP_I:
decode_imm(u, inst, mops[2], inst.operand[2])
elif mopt[1] == OP_W:
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_XMM', inst.operand[0], mops[0], 'T_MMX')
elif mopt[1] == OP_VR:
if MODRM_MOD(u.input.peek()) != 3:
u.error = 1
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_XMM', inst.operand[0], mops[0], 'T_MMX')
elif mopt[1] == OP_E:
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_GPR', inst.operand[0], mops[0], 'T_MMX')
if mopt[2] == OP_I:
decode_imm(u, inst, mops[2], inst.operand[2])
# R, C/D
elif mopt[0] == OP_R:
if mopt[1] == OP_C:
decode_modrm(u, inst, inst.operand[0], mops[0], 'T_GPR', inst.operand[1], mops[1], 'T_CRG')
elif mopt[1] == OP_D:
decode_modrm(u, inst, inst.operand[0], mops[0], 'T_GPR', inst.operand[1], mops[1], 'T_DBG')
# C, R
elif mopt[0] == OP_C:
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_GPR', inst.operand[0], mops[0], 'T_CRG')
# D, R
elif mopt[0] == OP_D:
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_GPR', inst.operand[0], mops[0], 'T_DBG')
# Q, P
elif mopt[0] == OP_Q:
decode_modrm(u, inst, inst.operand[0], mops[0], 'T_MMX', inst.operand[1], mops[1], 'T_MMX')
# S, E
elif mopt[0] == OP_S:
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_GPR', inst.operand[0], mops[0], 'T_SEG')
# W, V
elif mopt[0] == OP_W:
decode_modrm(u, inst, inst.operand[0], mops[0], 'T_XMM', inst.operand[1], mops[1], 'T_XMM')
# V, W[,I]/Q/M/E
elif mopt[0] == OP_V:
if mopt[1] == OP_W:
# special cases for movlps and movhps
if MODRM_MOD(u.input.peek()) == 3:
if inst.operator == 'movlps':
inst.operator = 'movhlps'
elif inst.operator == 'movhps':
inst.operator = 'movlhps'
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_XMM', inst.operand[0], mops[0], 'T_XMM')
if mopt[2] == OP_I:
decode_imm(u, inst, mops[2], inst.operand[2])
elif mopt[1] == OP_Q:
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_MMX', inst.operand[0], mops[0], 'T_XMM')
elif mopt[1] == OP_M:
if MODRM_MOD(u.input.peek()) == 3:
u.error = 1
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_GPR', inst.operand[0], mops[0], 'T_XMM')
elif mopt[1] == OP_E:
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_GPR', inst.operand[0], mops[0], 'T_XMM')
elif mopt[1] == OP_PR:
decode_modrm(u, inst, inst.operand[1], mops[1], 'T_MMX', inst.operand[0], mops[0], 'T_XMM')
# DX, eAX/AL
elif mopt[0] == OP_DX:
inst.operand[0].type = 'OP_REG'
inst.operand[0].base = 'dx'
inst.operand[0].size = 16
if mopt[1] == OP_eAX:
inst.operand[1].type = 'OP_REG'
inst.operand[1].base = resolve_gpr32(u, inst, mopt[1])
elif mopt[1] == OP_AL:
inst.operand[1].type = 'OP_REG'
inst.operand[1].base = 'al'
inst.operand[1].size = 8
# I, I/AL/eAX
elif mopt[0] == OP_I:
decode_imm(u, inst, mops[0], inst.operand[0])
if mopt[1] == OP_I:
decode_imm(u, inst, mops[1], inst.operand[1])
elif mopt[1] == OP_AL:
inst.operand[1].type = 'OP_REG'
inst.operand[1].base = 'al'
inst.operand[1].size = 16
elif mopt[1] == OP_eAX:
inst.operand[1].type = 'OP_REG'
inst.operand[1].base = resolve_gpr32(u, inst, mopt[1])
# O, AL/eAX
elif mopt[0] == OP_O:
decode_o(u, inst, mops[0], inst.operand[0])
inst.operand[1].type = 'OP_REG'
inst.operand[1].size = resolve_operand_size(u, inst, mops[0])
if mopt[1] == OP_AL:
inst.operand[1].base = 'al'
elif mopt[1] == OP_eAX:
inst.operand[1].base = resolve_gpr32(u, inst, mopt[1])
elif mopt[1] == OP_rAX:
inst.operand[1].base = resolve_gpr64(u, inst, mopt[1])
# 3
elif mopt[0] == OP_I3:
inst.operand[0].type = 'OP_IMM'
inst.operand[0].lval = 3
# ST(n), ST(n)
elif mopt[0] in [OP_ST0, OP_ST1, OP_ST2, OP_ST3,
OP_ST4, OP_ST5, OP_ST6, OP_ST7]:
inst.operand[0].type = 'OP_REG'
inst.operand[0].base = GPR['T_ST'][mopt[0] - OP_ST0]
inst.operand[0].size = 0
if mopt[1] in [OP_ST0, OP_ST1, OP_ST2, OP_ST3,
OP_ST4, OP_ST5, OP_ST6, OP_ST7]:
inst.operand[1].type = 'OP_REG'
inst.operand[1].base = GPR['T_ST'][mopt[1] - OP_ST0]
inst.operand[1].size = 0
# AX
elif mopt[0] == OP_AX:
inst.operand[0].type = 'OP_REG'
inst.operand[0].base = 'ax'
inst.operand[0].size = 16
# none
else:
for op in inst.operand:
op.type = None
return 0
def do_mode(u, inst):
# if in error state, bail out
if u.error:
return -1
# propagate perfix effects
if u.dis_mode == 64: # set 64bit-mode flags
# Check validity of instruction m64
if P_INV64(inst.itab_entry.prefix):
u.error = 1
return -1
# effective rex prefix is the effective mask for the
# instruction hard-coded in the opcode map.
inst.pfx.rex = ((inst.pfx.rex & 0x40)
|(inst.pfx.rex & REX_PFX_MASK(inst.itab_entry.prefix)))
# calculate effective operand size
if REX_W(inst.pfx.rex) or P_DEF64(inst.itab_entry.prefix):
inst.opr_mode = 64
elif inst.pfx.opr:
inst.opr_mode = 16
else:
inst.opr_mode = 32
# calculate effective address size
if inst.pfx.adr:
inst.adr_mode = 32
else:
inst.adr_mode = 64
elif u.dis_mode == 32: # set 32bit-mode flags
if inst.pfx.opr:
inst.opr_mode = 16
else:
inst.opr_mode = 32
if inst.pfx.adr:
inst.adr_mode = 16
else:
inst.adr_mode = 32
elif u.dis_mode == 16: # set 16bit-mode flags
if inst.pfx.opr:
inst.opr_mode = 32
else:
inst.opr_mode = 16
if inst.pfx.adr:
inst.adr_mode = 32
else:
inst.adr_mode = 16
# These flags determine which operand to apply the operand size
# cast to.
cast = [P_C0, P_C1, P_C2]
for i in range(len(inst.operand)):
inst.operand[i].cast = cast[i](inst.itab_entry.prefix)
return 0
def decode(self):
"""Instruction decoder. Returns the number of bytes decoded."""
inst = Inst(myInput = self.input, add = self.pc, mode = self.dis_mode, syntax = self.syntax)
self.error = 0
self.input.start ()
if get_prefixes(self, inst) != 0:
pass # ;print('prefixes error') # error
elif search_itab(self, inst) != 0:
pass #; print('itab error') # error
elif do_mode(self, inst) != 0:
pass #; print('mode error') # error
elif disasm_operands(self, inst) != 0:
pass #; print('operand error') # error
elif resolve_operator(self, inst) != 0:
pass #; print('operator error') # error
# Handle decode error.
if self.error:
inst.clear()
inst.size = self.input.ctr + 1
inst.raw = self.input.buffer[0:inst.size]
inst.set_pc(inst.add + inst.size)
return inst
| gpl-2.0 | -7,384,994,103,494,279,000 | -3,214,245,553,475,315,000 | 32.788108 | 103 | 0.476323 | false |
jemofthewest/mykoans | python2/libs/colorama/win32.py | 86 | 2730 |
# from winbase.h
STDOUT = -11
STDERR = -12
try:
from ctypes import windll
except ImportError:
windll = None
SetConsoleTextAttribute = lambda *_: None
else:
from ctypes import (
byref, Structure, c_char, c_short, c_uint32, c_ushort
)
handles = {
STDOUT: windll.kernel32.GetStdHandle(STDOUT),
STDERR: windll.kernel32.GetStdHandle(STDERR),
}
SHORT = c_short
WORD = c_ushort
DWORD = c_uint32
TCHAR = c_char
class COORD(Structure):
"""struct in wincon.h"""
_fields_ = [
('X', SHORT),
('Y', SHORT),
]
class SMALL_RECT(Structure):
"""struct in wincon.h."""
_fields_ = [
("Left", SHORT),
("Top", SHORT),
("Right", SHORT),
("Bottom", SHORT),
]
class CONSOLE_SCREEN_BUFFER_INFO(Structure):
"""struct in wincon.h."""
_fields_ = [
("dwSize", COORD),
("dwCursorPosition", COORD),
("wAttributes", WORD),
("srWindow", SMALL_RECT),
("dwMaximumWindowSize", COORD),
]
def GetConsoleScreenBufferInfo(stream_id):
handle = handles[stream_id]
csbi = CONSOLE_SCREEN_BUFFER_INFO()
success = windll.kernel32.GetConsoleScreenBufferInfo(
handle, byref(csbi))
# This fails when imported via setup.py when installing using 'pip'
# presumably the fix is that running setup.py should not trigger all
# this activity.
# assert success
return csbi
def SetConsoleTextAttribute(stream_id, attrs):
handle = handles[stream_id]
success = windll.kernel32.SetConsoleTextAttribute(handle, attrs)
assert success
def SetConsoleCursorPosition(stream_id, position):
handle = handles[stream_id]
position = COORD(*position)
success = windll.kernel32.SetConsoleCursorPosition(handle, position)
assert success
def FillConsoleOutputCharacter(stream_id, char, length, start):
handle = handles[stream_id]
char = TCHAR(char)
length = DWORD(length)
start = COORD(*start)
num_written = DWORD(0)
# AttributeError: function 'FillConsoleOutputCharacter' not found
# could it just be that my types are wrong?
success = windll.kernel32.FillConsoleOutputCharacter(
handle, char, length, start, byref(num_written))
assert success
return num_written.value
if __name__=='__main__':
x = GetConsoleScreenBufferInfo(STDOUT)
print(x.dwSize)
print(x.dwCursorPosition)
print(x.wAttributes)
print(x.srWindow)
print(x.dwMaximumWindowSize)
| mit | 5,910,495,903,060,479,000 | 7,894,396,676,737,501,000 | 27.736842 | 76 | 0.595604 | false |
ztrautt/tutorials | TEM-image-simple/mdcs/explore.py | 10 | 1530 | #! /usr/bin/env python
import requests
from collections import OrderedDict
def select_all(host,user,pswd,cert=None,format=None):
url = host + "/rest/explore/select/all"
params = dict()
if format: params['dataformat'] = format
r = requests.get(url, params=params, auth=(user, pswd), verify=cert)
return r.json(object_pairs_hook=OrderedDict)
def select(host,user,pswd,cert=None,format=None,ID=None,template=None,title=None):
url = host + "/rest/explore/select"
params = dict()
if format: params['dataformat'] = format
if ID: params['id'] = ID
if template: params['schema'] = template
if title: params['title'] = title
r = requests.get(url, params=params, auth=(user, pswd), verify=cert)
return r.json(object_pairs_hook=OrderedDict)
def delete(ID,host,user,pswd,cert=None):
url = host + "/rest/explore/delete"
params = dict()
params['id']=ID
r = requests.delete(url, params=params, auth=(user, pswd), verify=cert)
if int(r.status_code)==204:
return "Successful deletion of: "+ID
else:
return r.json()
def query(host,user,pswd,cert=None,format=None,query=None,repositories=None):
url = host + "/rest/explore/query-by-example"
data = dict()
if format: data['dataformat'] = format
if query: data['query'] = query
if repositories: data['repositories'] = repositories
r = requests.post(url, data=data, auth=(user, pswd), verify=cert)
return r.json(object_pairs_hook=OrderedDict) | cc0-1.0 | 7,817,183,333,625,104,000 | -88,313,764,930,277,400 | 38.25641 | 82 | 0.656863 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.