repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
xutian/avocado-vt | virttest/bootstrap.py | 5 | 40674 | from distutils import dir_util # virtualenv problem pylint: disable=E0611
import logging
import os
import glob
import shutil
import sys
import re
from avocado.utils import distro
from avocado.utils import genio
from avocado.utils import linux_modules
from avocado.utils import path as utils_path
from avocado.utils import process
from . import data_dir
from . import asset
from . import cartesian_config
from . import utils_selinux
from . import defaults
from . import arch
from .compat_52lts import results_stdout_52lts
LOG = logging.getLogger("avocado.app")
basic_program_requirements = ['xz', 'tcpdump', 'nc', 'ip', 'arping']
recommended_programs = {'qemu': [('qemu-kvm', 'kvm'), ('qemu-img',),
('qemu-io',)],
'spice': [('qemu-kvm', 'kvm'), ('qemu-img',),
('qemu-io',)],
'libvirt': [('virsh',), ('virt-install',),
('fakeroot',), ('semanage',),
('getfattr',), ('restorecon',)],
'openvswitch': [],
'lvsb': [('semanage',), ('getfattr',), ('restorecon',), ('virt-sandbox')],
'v2v': [],
'libguestfs': [('perl',)]}
mandatory_programs = {'qemu': basic_program_requirements + ['gcc'],
'spice': basic_program_requirements + ['gcc'],
'libvirt': basic_program_requirements,
'openvswitch': basic_program_requirements,
'lvsb': ['virt-sandbox', 'virt-sandbox-service', 'virsh'],
'v2v': basic_program_requirements,
'libguestfs': basic_program_requirements}
mandatory_headers = {'qemu': ['Python.h', 'types.h', 'socket.h', 'unistd.h'],
'spice': [],
'libvirt': [],
'openvswitch': [],
'v2v': [],
'lvsb': [],
'libguestfs': []}
first_subtest = {'qemu': ['unattended_install', 'steps'],
'spice': ['unattended_install', 'steps'],
'libvirt': ['unattended_install'],
'openvswitch': ['unattended_install'],
'v2v': ['unattended_install'],
'libguestfs': ['unattended_install'],
'lvsb': []}
last_subtest = {'qemu': ['shutdown'],
'spice': ['shutdown'],
'libvirt': ['shutdown', 'remove_guest'],
'openvswitch': ['shutdown'],
'v2v': ['shutdown'],
'libguestfs': ['shutdown'],
'lvsb': []}
test_filter = ['__init__', 'cfg', 'dropin.py']
def get_guest_os_info_list(test_name, guest_os):
"""
Returns a list of matching assets compatible with the specified test name
and guest OS
"""
os_info_list = []
cartesian_parser = cartesian_config.Parser()
cartesian_parser.parse_file(
data_dir.get_backend_cfg_path(test_name, 'guest-os.cfg'))
cartesian_parser.only_filter(guest_os)
dicts = cartesian_parser.get_dicts()
for params in dicts:
image_name = params.get('image_name', 'image').split('/')[-1]
shortname = params.get('shortname', guest_os)
os_info_list.append({'asset': image_name, 'variant': shortname})
if not os_info_list:
LOG.error("Could not find any assets compatible with %s for %s",
guest_os, test_name)
raise ValueError("Missing compatible assets for %s" % guest_os)
return os_info_list
def get_config_filter():
config_filter = ['__init__', ]
for provider_subdir in asset.get_test_provider_subdirs():
config_filter.append(os.path.join('%s' % provider_subdir, 'cfg'))
return config_filter
def verify_recommended_programs(t_type):
cmds = recommended_programs[t_type]
found = False
for cmd_aliases in cmds:
for cmd in cmd_aliases:
found = None
try:
found = utils_path.find_command(cmd)
LOG.debug('%s OK', found)
break
except utils_path.CmdNotFoundError:
pass
if not found:
if len(cmd_aliases) == 1:
LOG.info("Recommended command %s missing. You may "
"want to install it if not building from "
"source.", cmd_aliases[0])
else:
LOG.info("Recommended command missing. You may "
"want to install it if not building it from "
"source. Aliases searched: %s", cmd_aliases)
def verify_mandatory_programs(t_type, guest_os):
failed_cmds = []
cmds = mandatory_programs[t_type]
for cmd in cmds:
try:
LOG.debug('%s OK', utils_path.find_command(cmd))
except utils_path.CmdNotFoundError:
LOG.error("Required command %s is missing. You must "
"install it", cmd)
failed_cmds.append(cmd)
includes = mandatory_headers[t_type]
available_includes = glob.glob('/usr/include/*/*')
for include in available_includes:
include_basename = os.path.basename(include)
if include_basename in includes:
LOG.debug('%s OK', include)
includes.pop(includes.index(include_basename))
if includes:
for include in includes:
LOG.error("Required include %s is missing. You may have to "
"install it", include)
failures = failed_cmds + includes
if failures:
raise ValueError('Missing (cmds/includes): %s' % " ".join(failures))
def write_subtests_files(config_file_list, output_file_object, test_type=None):
"""
Writes a collection of individual subtests config file to one output file
Optionally, for tests that we know their type, write the 'virt_test_type'
configuration automatically.
"""
if test_type is not None:
output_file_object.write(" - @type_specific:\n")
output_file_object.write(" variants subtest:\n")
for provider_name, config_path in config_file_list:
config_file = open(config_path, 'r')
write_test_type_line = False
write_provider_line = False
for line in config_file.readlines():
if line.startswith('- ') and provider_name is not None:
name, deps = line.split(":")
name = name[1:].strip()
if name[0] == "@":
name = name[1:]
line = "- %s.%s:%s" % (provider_name, name, deps)
# special virt_test_type line output
if test_type is not None:
if write_test_type_line:
type_line = (" virt_test_type = %s\n" %
test_type)
output_file_object.write(type_line)
provider_line = (" provider = %s\n" %
provider_name)
output_file_object.write(provider_line)
write_test_type_line = False
elif line.startswith('- '):
write_test_type_line = True
output_file_object.write(" %s" % line)
else:
if write_provider_line:
provider_line = (" provider = %s\n" %
provider_name)
output_file_object.write(provider_line)
write_provider_line = False
elif line.startswith('- '):
write_provider_line = True
# regular line output
output_file_object.write(" %s" % line)
config_file.close()
def get_directory_structure(rootdir, guest_file, first_variant=None):
rootdir = rootdir.rstrip(os.sep)
start = rootdir.rfind(os.sep) + 1
previous_indent = 0
indent = 0
number_variants = 0
first_variant_offset = 0
if first_variant:
guest_file.write("variants:\n")
guest_file.write(" - %s:\n" % first_variant)
first_variant_offset = 2
for path, subdirs, files in os.walk(rootdir):
folders = path[start:].split(os.sep)
folders = folders[1:]
indent = len(folders)
if indent > previous_indent:
offset = first_variant_offset + indent + number_variants - 1
guest_file.write("%svariants:\n" % (4 * offset * " "))
number_variants += 1
elif indent < previous_indent:
number_variants = indent
indent += number_variants
try:
base_folder = folders[-1]
except IndexError:
base_folder = []
base_cfg = "%s.cfg" % base_folder
base_cfg_path = os.path.join(os.path.dirname(path), base_cfg)
if os.path.isfile(base_cfg_path):
base_file = open(base_cfg_path, 'r')
for line in base_file.readlines():
offset = first_variant_offset + indent - 1
guest_file.write("%s%s" % ((4 * offset * " "), line))
else:
if base_folder:
offset = first_variant_offset + indent - 1
guest_file.write("%s- %s:\n" %
((4 * offset * " "), base_folder))
variant_printed = False
if files:
files.sort()
for f in files:
if f.endswith(".cfg"):
bf = f[:len(f) - 4]
if bf not in subdirs:
if not variant_printed:
offset = first_variant_offset + indent
guest_file.write("%svariants:\n" %
((4 * offset * " ")))
variant_printed = True
base_file = open(os.path.join(path, f), 'r')
for line in base_file.readlines():
offset = first_variant_offset + indent + 1
guest_file.write("%s%s"
% ((4 * offset * " "), line))
indent -= number_variants
previous_indent = indent
def sync_download_dir(interactive):
base_download_dir = data_dir.get_base_download_dir()
download_dir = data_dir.get_download_dir()
LOG.debug("Copying downloadable assets file definitions from %s "
"into %s", base_download_dir, download_dir)
download_file_list = glob.glob(os.path.join(base_download_dir,
"*.ini"))
for src_file in download_file_list:
dst_file = os.path.join(download_dir,
os.path.basename(src_file))
if not os.path.isfile(dst_file):
shutil.copyfile(src_file, dst_file)
else:
diff_cmd = "diff -Naur %s %s" % (dst_file, src_file)
diff_result = process.run(
diff_cmd, ignore_status=True, verbose=False)
if diff_result.exit_status != 0:
LOG.debug("%s result:\n %s",
diff_result.command,
results_stdout_52lts(diff_result))
answer = genio.ask('Download file "%s" differs from "%s". '
'Overwrite?' % (dst_file, src_file),
auto=not interactive)
if answer == "y":
LOG.debug("Restoring download file %s from sample",
dst_file)
shutil.copyfile(src_file, dst_file)
else:
LOG.debug("Preserving existing %s file", dst_file)
else:
LOG.debug('Download file %s exists, not touching',
dst_file)
def create_guest_os_cfg(t_type):
root_dir = data_dir.get_root_dir()
guest_os_cfg_dir = os.path.join(root_dir, 'shared', 'cfg', 'guest-os')
guest_os_cfg_path = data_dir.get_backend_cfg_path(t_type, 'guest-os.cfg')
guest_os_cfg_file = open(guest_os_cfg_path, 'w')
get_directory_structure(guest_os_cfg_dir, guest_os_cfg_file, "Guest")
LOG.debug("Config file %s auto generated from guest OS samples",
guest_os_cfg_path)
def host_os_get_distro_name(options, detected):
"""
Gets the distro name, either from the command line or auto detection
If option vt_host_distro_compat is set, name is returned as
uppercase or capitalized
:param options: parsed command line arguments results
:type options: :class:`argparse.Namespace`
:param detected: result of :class:`avocado.utils.distro.detect`
:type detected: :class:`avocado.utils.distro.LinuxDistro`
"""
if options.vt_host_distro_name:
return options.vt_host_distro_name
if detected.name == 'rhel':
return 'RHEL'
elif detected.name == 'fedora':
return 'Fedora'
return "Host_%s" % detected.name
def create_host_os_cfg(options):
def _forced_or_detected(forced, detected):
if forced:
return forced
else:
return detected
host_os_cfg_path = data_dir.get_backend_cfg_path(options.vt_type, 'host-os.cfg')
with open(host_os_cfg_path, 'w') as cfg:
detected = distro.detect()
name = host_os_get_distro_name(options, detected)
version = _forced_or_detected(options.vt_host_distro_version,
"m%s" % detected.version)
release = _forced_or_detected(options.vt_host_distro_release,
"u%s" % detected.release)
arch = _forced_or_detected(options.vt_host_distro_arch,
"Host_arch_%s" % detected.arch)
cfg.write("variants:\n")
cfg.write(" - @Host:\n")
cfg.write(" variants:\n")
cfg.write(" - @%s:\n" % name)
cfg.write(" variants:\n")
cfg.write(" - @%s:\n" % version)
cfg.write(" variants:\n")
cfg.write(" - @%s:\n" % release)
cfg.write(" variants:\n")
cfg.write(" - @%s:\n" % arch)
count = [options.vt_host_distro_name,
options.vt_host_distro_version,
options.vt_host_distro_release,
options.vt_host_distro_arch].count(None)
if count == 4:
source = "distro detection"
elif count == 0:
source = "command line parameters"
else:
source = "distro detection and command line parameters"
LOG.debug("Config file %s generated from %s", host_os_cfg_path, source)
def create_subtests_cfg(t_type):
specific_test_list = []
specific_file_list = []
specific_subdirs = asset.get_test_provider_subdirs(t_type)
provider_names_specific = asset.get_test_provider_names(t_type)
config_filter = get_config_filter()
provider_info_specific = []
for specific_provider in provider_names_specific:
provider_info_specific.append(
asset.get_test_provider_info(specific_provider))
for subdir in specific_subdirs:
specific_test_list += data_dir.SubdirGlobList(subdir,
'*.py',
test_filter)
specific_file_list += data_dir.SubdirGlobList(subdir,
'*.cfg',
config_filter)
shared_test_list = []
shared_file_list = []
shared_subdirs = asset.get_test_provider_subdirs('generic')
shared_subdirs += asset.get_test_provider_subdirs('multi_host_migration')
provider_names_shared = asset.get_test_provider_names('generic')
provider_names_shared += asset.get_test_provider_names('multi_host_migration')
provider_info_shared = []
for shared_provider in provider_names_shared:
provider_info_shared.append(
asset.get_test_provider_info(shared_provider))
if not t_type == 'lvsb':
for subdir in shared_subdirs:
shared_test_list += data_dir.SubdirGlobList(subdir,
'*.py',
test_filter)
shared_file_list += data_dir.SubdirGlobList(subdir,
'*.cfg',
config_filter)
all_specific_test_list = []
for test in specific_test_list:
for p in provider_info_specific:
provider_base_path = p['backends'][t_type]['path']
if provider_base_path in test:
provider_name = p['name']
break
basename = os.path.basename(test)
if basename != "__init__.py":
all_specific_test_list.append("%s.%s" %
(provider_name,
basename.split(".")[0]))
all_shared_test_list = []
for test in shared_test_list:
for p in provider_info_shared:
if 'generic' in p['backends']:
provider_base_path = p['backends']['generic']['path']
if provider_base_path in test:
provider_name = p['name']
break
if 'multi_host_migration' in p['backends']:
provider_base_path = p['backends']['multi_host_migration']['path']
if provider_base_path in test:
provider_name = p['name']
break
basename = os.path.basename(test)
if basename != "__init__.py":
all_shared_test_list.append("%s.%s" %
(provider_name,
basename.split(".")[0]))
all_specific_test_list.sort()
all_shared_test_list.sort()
first_subtest_file = []
last_subtest_file = []
non_dropin_tests = []
tmp = []
for shared_file in shared_file_list:
provider_name = None
for p in provider_info_shared:
provider_base_path = p['backends']['generic']['path']
if provider_base_path in shared_file:
provider_name = p['name']
break
provider_base_path = p['backends']['multi_host_migration']['path']
if provider_base_path in test:
provider_name = p['name']
break
shared_file_obj = open(shared_file, 'r')
for line in shared_file_obj.readlines():
line = line.strip()
if re.match("type\s*=.*", line):
cartesian_parser = cartesian_config.Parser()
cartesian_parser.parse_string(line)
td = next(cartesian_parser.get_dicts())
values = td['type'].split(" ")
for value in values:
if t_type not in non_dropin_tests:
non_dropin_tests.append("%s.%s" %
(provider_name, value))
shared_file_name = os.path.basename(shared_file)
shared_file_name = shared_file_name.split(".")[0]
if shared_file_name in first_subtest[t_type]:
if [provider_name, shared_file] not in first_subtest_file:
first_subtest_file.append([provider_name, shared_file])
elif shared_file_name in last_subtest[t_type]:
if [provider_name, shared_file] not in last_subtest_file:
last_subtest_file.append([provider_name, shared_file])
else:
if [provider_name, shared_file] not in tmp:
tmp.append([provider_name, shared_file])
shared_file_list = tmp
tmp = []
for shared_file in specific_file_list:
provider_name = None
for p in provider_info_specific:
provider_base_path = p['backends'][t_type]['path']
if provider_base_path in shared_file:
provider_name = p['name']
break
shared_file_obj = open(shared_file, 'r')
for line in shared_file_obj.readlines():
line = line.strip()
if re.match("type\s*=.*", line):
cartesian_parser = cartesian_config.Parser()
cartesian_parser.parse_string(line)
td = next(cartesian_parser.get_dicts())
values = td['type'].split(" ")
for value in values:
if value not in non_dropin_tests:
non_dropin_tests.append("%s.%s" %
(provider_name, value))
shared_file_name = os.path.basename(shared_file)
shared_file_name = shared_file_name.split(".")[0]
if shared_file_name in first_subtest[t_type]:
if [provider_name, shared_file] not in first_subtest_file:
first_subtest_file.append([provider_name, shared_file])
elif shared_file_name in last_subtest[t_type]:
if [provider_name, shared_file] not in last_subtest_file:
last_subtest_file.append([provider_name, shared_file])
else:
if [provider_name, shared_file] not in tmp:
tmp.append([provider_name, shared_file])
specific_file_list = tmp
subtests_cfg = os.path.join(data_dir.get_backend_dir(t_type), 'cfg',
'subtests.cfg')
subtests_file = open(subtests_cfg, 'w')
subtests_file.write(
"# Do not edit, auto generated file from subtests config\n")
subtests_file.write("variants subtest:\n")
write_subtests_files(first_subtest_file, subtests_file)
write_subtests_files(specific_file_list, subtests_file, t_type)
write_subtests_files(shared_file_list, subtests_file)
write_subtests_files(last_subtest_file, subtests_file)
subtests_file.close()
LOG.debug("Config file %s auto generated from subtest samples",
subtests_cfg)
def create_config_files(test_dir, shared_dir, interactive, t_type, step=None,
force_update=False):
def is_file_tracked(fl):
tracked_result = process.run("git ls-files %s --error-unmatch" % fl,
ignore_status=True, verbose=False)
return tracked_result.exit_status == 0
if step is None:
step = 0
LOG.info("")
step += 1
LOG.info("%d - Generating config set", step)
config_file_list = data_dir.SubdirGlobList(os.path.join(test_dir, "cfg"),
"*.cfg",
get_config_filter())
config_file_list = [cf for cf in config_file_list if is_file_tracked(cf)]
config_file_list_shared = glob.glob(os.path.join(shared_dir, "cfg",
"*.cfg"))
provider_info_specific = []
provider_names_specific = asset.get_test_provider_names(t_type)
for specific_provider in provider_names_specific:
provider_info_specific.append(
asset.get_test_provider_info(specific_provider))
specific_subdirs = asset.get_test_provider_subdirs(t_type)
for subdir in specific_subdirs:
for p in provider_info_specific:
if 'cartesian_configs' in p['backends'][t_type]:
for c in p['backends'][t_type]['cartesian_configs']:
cfg = os.path.join(subdir, "cfg", c)
config_file_list.append(cfg)
# Handle overrides of cfg files. Let's say a test provides its own
# subtest.cfg.sample, this file takes precedence over the shared
# subtest.cfg.sample. So, yank this file from the cfg file list.
config_file_list_shared_keep = []
for cf in config_file_list_shared:
basename = os.path.basename(cf)
target = os.path.join(test_dir, "cfg", basename)
if target not in config_file_list:
config_file_list_shared_keep.append(cf)
config_file_list += config_file_list_shared_keep
for config_file in config_file_list:
src_file = config_file
dst_file = os.path.join(test_dir, "cfg", os.path.basename(config_file))
if not os.path.isfile(dst_file):
LOG.debug("Creating config file %s from sample", dst_file)
shutil.copyfile(src_file, dst_file)
else:
diff_cmd = "diff -Naur %s %s" % (dst_file, src_file)
diff_result = process.run(
diff_cmd, ignore_status=True, verbose=False)
if diff_result.exit_status != 0:
LOG.info("%s result:\n %s",
diff_result.command,
results_stdout_52lts(diff_result))
answer = genio.ask("Config file %s differs from %s."
"Overwrite?" % (dst_file, src_file),
auto=force_update or not interactive)
if answer == "y":
LOG.debug("Restoring config file %s from sample",
dst_file)
shutil.copyfile(src_file, dst_file)
else:
LOG.debug("Preserving existing %s file", dst_file)
else:
if force_update:
update_msg = 'Config file %s exists, equal to sample'
else:
update_msg = 'Config file %s exists, not touching'
LOG.debug(update_msg, dst_file)
return step
def haz_defcon(datadir, imagesdir, isosdir, tmpdir):
"""
Compare current types from Defaults, or if default, compare on-disk type
"""
# Searching through default contexts is very slow.
# Exploit restorecon -n to find any defaults
try:
# First element is list, third tuple item is desired context
data_type = utils_selinux.diff_defcon(datadir, False)[0][2]
except IndexError: # object matches default, get current on-disk context
data_type = utils_selinux.get_context_of_file(datadir)
# Extract just the type component
data_type = utils_selinux.get_type_from_context(data_type)
try:
# Do not descend, we want to know the base-dir def. context
images_type = utils_selinux.diff_defcon(imagesdir, False)[0][2]
except IndexError:
images_type = utils_selinux.get_context_of_file(imagesdir)
images_type = utils_selinux.get_type_from_context(images_type)
try:
isos_type = utils_selinux.diff_defcon(isosdir, False)[0][2]
except IndexError:
isos_type = utils_selinux.get_context_of_file(isosdir)
isos_type = utils_selinux.get_type_from_context(isos_type)
try:
tmp_type = utils_selinux.diff_defcon(tmpdir, False)[0][2]
except IndexError:
tmp_type = utils_selinux.get_context_of_file(tmpdir)
tmp_type = utils_selinux.get_type_from_context(tmp_type)
# hard-coded values b/c only four of them and widly-used
if data_type == 'virt_var_lib_t':
if images_type == 'virt_image_t':
if isos_type == 'virt_content_t':
if tmp_type == 'user_tmp_t':
return True # No changes needed
return False
def set_defcon(datadir, imagesdir, isosdir, tmpdir):
"""
Tries to set datadir default contexts returns True if changed
"""
made_changes = False
try:
# Returns list of tuple(pathname, from, to) of context differences
# between on-disk and defaults. Only interested in top-level
# object [0] and the context it would change to [2]
data_type = utils_selinux.diff_defcon(datadir, False)[0][2]
# Extrach only the type
existing_data = utils_selinux.get_type_from_context(data_type)
except IndexError:
existing_data = None
try:
images_type = utils_selinux.diff_defcon(imagesdir, False)[0][2]
existing_images = utils_selinux.get_type_from_context(images_type)
except IndexError:
existing_images = None
try:
isos_type = utils_selinux.diff_defcon(isosdir, False)[0][2]
existing_isos = utils_selinux.get_type_from_context(isos_type)
except IndexError:
existing_isos = None
try:
tmp_type = utils_selinux.diff_defcon(tmpdir, False)[0][2]
existing_tmp = utils_selinux.get_type_from_context(tmp_type)
except IndexError:
existing_tmp = None
# Only print slow info message one time
could_be_slow = False
msg = "Defining default contexts, this could take a few seconds..."
# Changing default contexts is *slow*, avoid it if not necessary
if existing_data is None or existing_data is not 'virt_var_lib_t':
# semanage gives errors if don't treat /usr & /usr/local the same
data_regex = utils_selinux.transmogrify_usr_local(datadir)
LOG.info(msg)
could_be_slow = True
# This applies only to datadir symlink, not sub-directories!
utils_selinux.set_defcon('virt_var_lib_t', data_regex)
made_changes = True
if existing_images is None or existing_images is not 'virt_image_t':
# Applies to imagesdir and everything below
images_regex = utils_selinux.transmogrify_usr_local(imagesdir)
images_regex = utils_selinux.transmogrify_sub_dirs(images_regex)
if not could_be_slow:
LOG.info(msg)
could_be_slow = True
utils_selinux.set_defcon('virt_image_t', images_regex)
made_changes = True
if existing_isos is None or existing_isos is not 'virt_content_t':
# Applies to isosdir and everything below
isos_regex = utils_selinux.transmogrify_usr_local(isosdir)
isos_regex = utils_selinux.transmogrify_sub_dirs(isos_regex)
if not could_be_slow:
LOG.info(msg)
could_be_slow = True
utils_selinux.set_defcon('virt_content_t', isos_regex)
made_changes = True
if existing_tmp is None or existing_tmp is not 'user_tmp_t':
tmp_regex = utils_selinux.transmogrify_usr_local(tmpdir)
tmp_regex = utils_selinux.transmogrify_sub_dirs(tmp_regex)
if not could_be_slow:
LOG.info(msg)
could_be_slow = True
utils_selinux.set_defcon('user_tmp_t', tmp_regex)
made_changes = True
return made_changes
def verify_selinux(datadir, imagesdir, isosdir, tmpdir,
interactive, selinux=False):
"""
Verify/Set/Warn about SELinux and default file contexts for testing.
:param datadir: Abs. path to data-directory symlink
:param imagesdir: Abs. path to data/images directory
:param isosdir: Abs. path to data/isos directory
:param tmpdir: Abs. path to avocado-vt tmp dir
:param interactive: True if running from console
:param selinux: Whether setup SELinux contexts for shared/data
"""
# datadir can be a symlink, but these must not have any
imagesdir = os.path.realpath(imagesdir)
isosdir = os.path.realpath(isosdir)
tmpdir = os.path.realpath(tmpdir)
needs_relabel = None
try:
# Raise SeCmdError if selinux not installed
if utils_selinux.get_status() == 'enforcing':
# Check if default contexts are set
if not haz_defcon(datadir, imagesdir, isosdir, tmpdir):
if selinux:
answer = "y"
else:
answer = genio.ask("Setup all undefined default SE"
"Linux contexts for shared/data/?",
auto=not interactive)
else:
answer = "n"
if answer.lower() == "y":
# Assume relabeling is needed if changes made
needs_relabel = set_defcon(datadir, imagesdir, isosdir, tmpdir)
# Only relabel if files/dirs don't match default
labels_ok = utils_selinux.verify_defcon(datadir, False)
labels_ok &= utils_selinux.verify_defcon(imagesdir, True)
labels_ok &= utils_selinux.verify_defcon(isosdir, True)
labels_ok &= utils_selinux.verify_defcon(tmpdir, True)
if labels_ok:
needs_relabel = False
else:
LOG.warning("On-disk SELinux labels do not match defaults")
needs_relabel = True
# Disabled or Permissive mode is same result as not installed
else:
LOG.info("SELinux in permissive or disabled, testing"
"in enforcing mode is highly encourraged.")
except utils_selinux.SemanageError:
LOG.info("Could not set default SELinux contexts. Please")
LOG.info("consider installing the semanage program then ")
LOG.info("verifying and/or running running:")
# Paths must be transmogrified (changed) into regular expressions
LOG.info("semanage fcontext --add -t virt_var_lib_t '%s'",
utils_selinux.transmogrify_usr_local(datadir))
LOG.info("semanage fcontext --add -t virt_image_t '%s'",
utils_selinux.transmogrify_usr_local(
utils_selinux.transmogrify_sub_dirs(imagesdir)))
LOG.info("semanage fcontext --add -t virt_content_t '%s'",
utils_selinux.transmogrify_usr_local(
utils_selinux.transmogrify_sub_dirs(isosdir)))
LOG.info("semanage fcontext --add -t user_tmp_t '%s'",
utils_selinux.transmogrify_usr_local(
utils_selinux.transmogrify_sub_dirs(tmpdir)))
needs_relabel = None # Next run will catch if relabeling needed
except utils_selinux.SelinuxError: # Catchall SELinux related
LOG.info("SELinux not available, or error in command/setup.")
LOG.info("Please manually verify default file contexts before")
LOG.info("testing with SELinux enabled and enforcing.")
if needs_relabel:
if selinux:
answer = "y"
else:
answer = genio.ask("Relabel from default contexts?",
auto=not interactive)
if answer.lower() == 'y':
changes = utils_selinux.apply_defcon(datadir, False)
changes += utils_selinux.apply_defcon(imagesdir, True)
changes += utils_selinux.apply_defcon(isosdir, True)
changes += utils_selinux.apply_defcon(tmpdir, True)
LOG.info("Corrected contexts on %d files/dirs",
len(changes))
def bootstrap(options, interactive=False):
"""
Common virt test assistant module.
:param options: Command line options.
:param interactive: Whether to ask for confirmation.
"""
if options.yes_to_all:
interactive = False
LOG.info("Running bootstrap for %s", options.vt_type)
step = 0
LOG.info("")
step += 1
LOG.info("%d - Checking the mandatory programs and headers", step)
guest_os = options.vt_guest_os or defaults.DEFAULT_GUEST_OS
try:
verify_mandatory_programs(options.vt_type, guest_os)
except Exception as details:
LOG.debug(details)
LOG.debug('Install the missing programs and/or headers and '
're-run boostrap')
sys.exit(1)
LOG.info("")
step += 1
LOG.info("%d - Checking the recommended programs", step)
verify_recommended_programs(options.vt_type)
LOG.info("")
step += 1
LOG.info("%d - Updating test providers repo configuration from local copy", step)
tp_base_dir = data_dir.get_base_test_providers_dir()
tp_local_dir = data_dir.get_test_providers_dir()
dir_util.copy_tree(tp_base_dir, tp_local_dir)
not_downloaded = asset.test_providers_not_downloaded()
if not_downloaded:
action = "Downloading"
else:
action = "Updating"
if not options.vt_no_downloads:
LOG.info("")
step += 1
LOG.info("%d - %s the test providers from remote repos", step, action)
asset.download_all_test_providers(options.vt_update_providers)
else:
if not_downloaded:
LOG.warn("The following test providers have not been downloaded: %s",
", ".join(not_downloaded))
LOG.info("")
step += 1
LOG.info("%d - Verifying directories", step)
datadir = data_dir.get_data_dir()
shared_dir = data_dir.get_shared_dir()
sub_dir_list = ["images", "isos", "steps_data", "gpg", "downloads"]
for sub_dir in sub_dir_list:
sub_dir_path = os.path.join(datadir, sub_dir)
if not os.path.isdir(sub_dir_path):
LOG.debug("Creating %s", sub_dir_path)
os.makedirs(sub_dir_path)
else:
LOG.debug("Dir %s exists, not creating",
sub_dir_path)
base_backend_dir = data_dir.get_base_backend_dir()
local_backend_dir = data_dir.get_local_backend_dir()
LOG.info("")
step += 1
LOG.info("%d - Syncing backend dirs %s -> %s", step, base_backend_dir,
local_backend_dir)
dir_util.copy_tree(base_backend_dir, local_backend_dir)
sync_download_dir(interactive)
test_dir = data_dir.get_backend_dir(options.vt_type)
if options.vt_type == 'libvirt':
step = create_config_files(test_dir, shared_dir, interactive,
options.vt_type, step,
force_update=options.vt_update_config)
create_subtests_cfg(options.vt_type)
create_guest_os_cfg(options.vt_type)
# Don't bother checking if changes can't be made
if os.getuid() == 0:
verify_selinux(datadir,
os.path.join(datadir, 'images'),
os.path.join(datadir, 'isos'),
data_dir.get_tmp_dir(),
interactive, options.vt_selinux_setup)
# lvsb test doesn't use any shared configs
elif options.vt_type == 'lvsb':
create_subtests_cfg(options.vt_type)
if os.getuid() == 0:
# Don't bother checking if changes can't be made
verify_selinux(datadir,
os.path.join(datadir, 'images'),
os.path.join(datadir, 'isos'),
data_dir.get_tmp_dir(),
interactive, options.vt_selinux_setup)
else: # Some other test
step = create_config_files(test_dir, shared_dir, interactive,
options.vt_type, step,
force_update=options.vt_update_config)
create_subtests_cfg(options.vt_type)
create_guest_os_cfg(options.vt_type)
create_host_os_cfg(options)
if not (options.vt_no_downloads or options.vt_skip_verify_download_assets):
LOG.info("")
step += 1
LOG.info("%s - Verifying (and possibly downloading) guest image",
step)
try:
for os_info in get_guest_os_info_list(options.vt_type, guest_os):
os_asset = os_info['asset']
try:
asset.download_asset(os_asset, interactive=interactive,
restore_image=True)
except AssertionError:
pass # Not all files are managed via asset
except ValueError as details:
LOG.error(details)
sys.exit(1)
check_modules = []
if options.vt_type == "qemu":
check_modules = arch.get_kvm_module_list()
elif options.vt_type == "openvswitch":
check_modules = ["openvswitch"]
if check_modules:
LOG.info("")
step += 1
LOG.info("%d - Checking for modules %s", step,
", ".join(check_modules))
for module in check_modules:
if not linux_modules.module_is_loaded(module):
LOG.warning("Module %s is not loaded. You might want to "
"load it", module)
else:
LOG.debug("Module %s loaded", module)
LOG.info("")
LOG.info("VT-BOOTSTRAP FINISHED")
LOG.debug("You may take a look at the following online docs for more info:")
LOG.debug(" - http://avocado-vt.readthedocs.org/")
LOG.debug(" - http://avocado-framework.readthedocs.org/")
| gpl-2.0 |
shownomercy/django | django/contrib/sites/migrations/0001_initial.py | 276 | 1096 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.contrib.sites.models
from django.contrib.sites.models import _simple_domain_name_validator
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = []
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('domain', models.CharField(max_length=100, verbose_name='domain name', validators=[_simple_domain_name_validator])),
('name', models.CharField(max_length=50, verbose_name='display name')),
],
options={
'ordering': ('domain',),
'db_table': 'django_site',
'verbose_name': 'site',
'verbose_name_plural': 'sites',
},
bases=(models.Model,),
managers=[
('objects', django.contrib.sites.models.SiteManager()),
],
),
]
| bsd-3-clause |
sserrot/champion_relationships | venv/Lib/site-packages/adodbapi/test/test_adodbapi_dbapi20.py | 1 | 5822 | print("This module depends on the dbapi20 compliance tests created by Stuart Bishop")
print("(see db-sig mailing list history for info)")
import platform
import unittest
import sys
import dbapi20
import setuptestframework
testfolder = setuptestframework.maketemp()
if '--package' in sys.argv:
pth = setuptestframework.makeadopackage(testfolder)
sys.argv.remove('--package')
else:
pth = setuptestframework.find_ado_path()
if pth not in sys.path:
sys.path.insert(1,pth)
# function to clean up the temporary folder -- calling program must run this function before exit.
cleanup = setuptestframework.getcleanupfunction()
import adodbapi
import adodbapi.is64bit as is64bit
db = adodbapi
if '--verbose' in sys.argv:
db.adodbapi.verbose = 3
print((adodbapi.version))
print(("Tested with dbapi20 %s" % dbapi20.__version__))
try:
onWindows = bool(sys.getwindowsversion()) # seems to work on all versions of Python
except:
onWindows = False
node = platform.node()
conn_kws = {}
host = 'testsql.2txt.us,1430' # if None, will use macro to fill in node name
instance = r'%s\SQLEXPRESS'
conn_kws['name'] = 'adotest'
conn_kws['user'] = 'adotestuser' # None implies Windows security
conn_kws['password'] = "Sq1234567"
# macro definition for keyword "security" using macro "auto_security"
conn_kws['macro_auto_security'] = 'security'
if host is None:
conn_kws['macro_getnode'] = ['host', instance]
else:
conn_kws['host'] = host
conn_kws['provider'] = 'Provider=MSOLEDBSQL;DataTypeCompatibility=80;MARS Connection=True;'
connStr = "%(provider)s; %(security)s; Initial Catalog=%(name)s;Data Source=%(host)s"
if onWindows and node != "z-PC":
pass # default should make a local SQL Server connection
elif node == "xxx": # try Postgres database
_computername = "25.223.161.222"
_databasename='adotest'
_username = 'adotestuser'
_password = '12345678'
_driver="PostgreSQL Unicode"
_provider = ''
connStr = '%sDriver={%s};Server=%s;Database=%s;uid=%s;pwd=%s;' % \
(_provider,_driver,_computername,_databasename,_username,_password)
elif node == "yyy": # ACCESS data base is known to fail some tests.
if is64bit.Python():
driver = "Microsoft.ACE.OLEDB.12.0"
else:
driver = "Microsoft.Jet.OLEDB.4.0"
testmdb = setuptestframework.makemdb(testfolder)
connStr = r"Provider=%s;Data Source=%s" % (driver, testmdb)
else: # try a remote connection to an SQL server
conn_kws['proxy_host'] = '25.44.77.176'
import adodbapi.remote
db = adodbapi.remote
print(('Using Connection String like=%s' % connStr))
print(('Keywords=%s' % repr(conn_kws)))
class test_adodbapi(dbapi20.DatabaseAPI20Test):
driver = db
connect_args = (connStr,)
connect_kw_args = conn_kws
def __init__(self,arg):
dbapi20.DatabaseAPI20Test.__init__(self,arg)
def testMethodName(self):
return self.id().split('.')[-1]
def setUp(self):
# Call superclass setUp In case this does something in the
# future
dbapi20.DatabaseAPI20Test.setUp(self)
if self.testMethodName()=='test_callproc':
con = self._connect()
engine = con.dbms_name
## print('Using database Engine=%s' % engine) ##
if engine != 'MS Jet':
sql="""
create procedure templower
@theData varchar(50)
as
select lower(@theData)
"""
else: # Jet
sql="""
create procedure templower
(theData varchar(50))
as
select lower(theData);
"""
cur = con.cursor()
try:
cur.execute(sql)
con.commit()
except:
pass
cur.close()
con.close()
self.lower_func='templower'
def tearDown(self):
if self.testMethodName()=='test_callproc':
con = self._connect()
cur = con.cursor()
try:
cur.execute("drop procedure templower")
except:
pass
con.commit()
dbapi20.DatabaseAPI20Test.tearDown(self)
def help_nextset_setUp(self,cur):
'Should create a procedure called deleteme '
'that returns two result sets, first the number of rows in booze then "name from booze"'
sql="""
create procedure deleteme as
begin
select count(*) from %sbooze
select name from %sbooze
end
""" %(self.table_prefix,self.table_prefix)
cur.execute(sql)
def help_nextset_tearDown(self,cur):
'If cleaning up is needed after nextSetTest'
try:
cur.execute("drop procedure deleteme")
except:
pass
def test_nextset(self):
con = self._connect()
try:
cur = con.cursor()
stmts=[self.ddl1] + self._populate()
for sql in stmts:
cur.execute(sql)
self.help_nextset_setUp(cur)
cur.callproc('deleteme')
numberofrows=cur.fetchone()
assert numberofrows[0]== 6
assert cur.nextset()
names=cur.fetchall()
assert len(names) == len(self.samples)
s=cur.nextset()
assert s == None,'No more return sets, should return None'
finally:
try:
self.help_nextset_tearDown(cur)
finally:
con.close()
def test_setoutputsize(self): pass
if __name__ == '__main__':
unittest.main()
cleanup(testfolder, None)
| mit |
grovesdixon/metaTranscriptomes | scripts/parse_codeml_likelihoods.py | 1 | 1593 | #!/usr/bin/env python
##parse_codeml_likelihoods.py
##written 12/18/14 by Groves Dixon
ProgramName = 'parse_codeml_likelihoods.py'
LastUpdated = '12/18/14'
By = 'Groves Dixon'
VersionNumber = '1.0'
print "\nRunning Program {}...".format(ProgramName)
VersionString = '{} version {} Last Updated {} by {}'.format(ProgramName, VersionNumber, LastUpdated, By)
Description = '''
Description:
Parse outputs from codeml files.
'''
##Import Modules
from sys import argv
from sys import exit
null = argv[1]
alt = argv[2]
outfile = argv[3]
def read(fileName):
geneList = []
likeList = []
npList = []
with open(fileName, 'r') as infile:
for line in infile:
line = line.strip("\n").split()
gene = line[0]
np = line[4].split(")")[0]
like = line[5]
geneList.append(gene)
likeList.append(like)
npList.append(np)
return geneList, likeList, npList
nullGenes, nullLikes, nullNps = read(null)
altGenes, altLikes, altNps = read(alt)
with open(outfile, 'w') as out:
out.write("EST\tnullLike\taltLike\tnullNP\taltNP")
for i in range(len(nullGenes)):
nullGene = nullGenes[i].strip(".codeml")
altGene = altGenes[i].strip(".codeml")
nullLike = nullLikes[i]
altLike = altLikes[i]
nullNp = nullNps[i]
altNp = altNps[i]
if nullGene != altGene:
exit("genes don't match!")
outstring = "\n{}\t{}\t{}\t{}\t{}".format(nullGene, nullLike, altLike, nullNp, altNp)
out.write(outstring)
| mit |
jigarkb/CTCI | CTCI/chapter_2/2.3.py | 2 | 1156 | # Delete Middle Node: Implement an algorithm to delete a node in the middle of
# singly linked list, given only access to that node
from LinkedList import Node, LinkedList
def delete_middle_node(node_to_delete):
if node_to_delete.next is None:
raise Exception("Invalid node to delete")
node_to_delete.data = node_to_delete.next.data
node_to_delete.next = node_to_delete.next.next
if __name__ == '__main__':
ll = LinkedList()
continue_ans = raw_input("Do you want to add new node? (y/n): ")
to_delete = "n"
while continue_ans == 'y':
data = raw_input("Enter data for the new node: ")
data_node = Node(data)
ll.append_node(data_node)
if to_delete == 'n':
to_delete = raw_input("Is this the one you want to remove? (y/n): ")
if to_delete == 'y':
node_to_delete = data_node
continue_ans = raw_input("Do you want to add new node? (y/n)?")
print "Initial linked list: {}".format(ll)
print "Middle node to delete: {}".format(node_to_delete)
delete_middle_node(node_to_delete)
print "Linked list after deletion: {}".format(ll)
| mit |
zorroz/microblog | flask/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/charsetprober.py | 3127 | 1902 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import re
class CharSetProber:
def __init__(self):
pass
def reset(self):
self._mState = constants.eDetecting
def get_charset_name(self):
return None
def feed(self, aBuf):
pass
def get_state(self):
return self._mState
def get_confidence(self):
return 0.0
def filter_high_bit_only(self, aBuf):
aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf)
return aBuf
def filter_without_english_letters(self, aBuf):
aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf)
return aBuf
def filter_with_english_letters(self, aBuf):
# TODO
return aBuf
| bsd-3-clause |
daivietpda/M7WLJ-5.0.2 | scripts/tracing/draw_functrace.py | 14676 | 3560 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <[email protected]>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
| gpl-2.0 |
timebackzhou/ogc_server | bayesian/test/test_factor_graph_verify.py | 2 | 3768 | import pytest
from bayesian.factor_graph import *
def pytest_funcarg__x1(request):
x1 = VariableNode('x1')
return x1
def pytest_funcarg__x2(request):
x2 = VariableNode('x2')
return x2
def pytest_funcarg__fA_node(request):
def fA(x1):
return 0.5
fA_node = FactorNode('fA', fA)
return fA_node
def pytest_funcarg__simple_valid_graph(request):
def fA(x1):
return 0.5
fA_node = FactorNode('fA', fA)
x1 = VariableNode('x1')
connect(fA_node, x1)
graph = FactorGraph([fA_node, x1])
return graph
def pytest_funcarg__graph_with_function_as_node(request):
'''
A common error is to instantiate the
graph with the function instead of
the function node wrapper.
'''
def fA(x1):
return 0.5
fA_node = FactorNode('fA', fA)
x1 = VariableNode('x1')
connect(fA_node, x1)
graph = FactorGraph([fA, x1])
return graph
def pytest_funcarg__graph_with_empty_func_domains(request):
def fA(x1):
return 0.5
fA_node = FactorNode('fA', fA)
x1 = VariableNode('x1')
connect(fA_node, x1)
graph = FactorGraph([fA_node, x1])
fA_node.func.domains = {}
return graph
def pytest_funcarg__graph_with_missing_func_domains(request):
def fA(x1):
return 0.5
fA_node = FactorNode('fA', fA)
x1 = VariableNode('x1')
connect(fA_node, x1)
graph = FactorGraph([fA_node, x1])
delattr(fA_node.func, 'domains')
return graph
def pytest_funcarg__graph_with_cycle(request):
'''
This graph looks like this BBN:
x1 x2----+
| | |
+----+----+ |
| |
x3 |
| |
+-----+----+
|
x4
'''
def fA(x1):
return 0.5
def fB(x2):
return 0.5
def fC(x1, x2, x3):
return 0.5
def fD(x2, x3, x4):
return 0.5
graph = build_graph(fA, fB, fC, fD)
return graph
class TestVerify():
def test_verify_variable_node_neighbour_type(self, x1, fA_node):
connect(fA_node, x1)
assert fA_node.verify_neighbour_types() is True
assert x1.verify_neighbour_types() is True
def test_verify_variable_node_neighbour_type_symmetry(self, x1, fA_node):
connect(x1, fA_node)
assert fA_node.verify_neighbour_types() is True
assert x1.verify_neighbour_types() is True
def test_verify_variable_node_wrong_neighbour_type(self, x1, x2):
connect(x1, x2)
assert x1.verify_neighbour_types() is False
assert x2.verify_neighbour_types() is False
def test_nodes_of_correct_type(self, simple_valid_graph):
assert simple_valid_graph.verify() is True
def test_broken_graph_bad_factor_node(self, graph_with_function_as_node):
'''
Make sure exception is raised for
broken graph.
'''
with pytest.raises(InvalidGraphException):
graph_with_function_as_node.verify()
def test_broken_graph_empty_factor_domains(
self, graph_with_empty_func_domains):
"""Ensure exception is raised for broken graph."""
with pytest.raises(InvalidGraphException):
graph_with_empty_func_domains.verify()
def test_broken_graph_missing_factor_domains(
self, graph_with_missing_func_domains):
"""Ensureexception is raised for broken graph."""
with pytest.raises(InvalidGraphException):
graph_with_missing_func_domains.verify()
def test_graph_has_no_cycles(self, simple_valid_graph):
assert simple_valid_graph.has_cycles() is False
def test_graph_has_cycles(self, graph_with_cycle):
assert graph_with_cycle.has_cycles() is True
| mit |
brakhane/python-mode | pymode/rope.py | 6 | 25526 | """ Rope support in pymode. """
from __future__ import absolute_import, print_function
import multiprocessing
import os.path
import re
import site
import sys
from rope.base import project, libutils, exceptions, change, worder # noqa
from rope.base.fscommands import FileSystemCommands # noqa
from rope.base.taskhandle import TaskHandle # noqa
from rope.contrib import autoimport as rope_autoimport, codeassist, findit, generate # noqa
from rope.refactor import ModuleToPackage, ImportOrganizer, rename, extract, inline, usefunction, move, change_signature, importutils # noqa
from ._compat import StringIO
from .environment import env
def look_ropeproject(path):
""" Search for ropeproject in current and parent dirs.
:return str|None: A finded path
"""
env.debug('Look project', path)
p = os.path.abspath(path)
while True:
if '.ropeproject' in os.listdir(p):
return p
new_p = os.path.abspath(os.path.join(p, ".."))
if new_p == p:
return path
p = new_p
@env.catch_exceptions
def completions():
""" Search completions.
:return None:
"""
row, col = env.cursor
if env.var('a:findstart', True):
count = 0
for char in reversed(env.current.line[:col]):
if not re.match(r'[\w\d]', char):
break
count += 1
env.debug('Complete find start', (col - count))
return env.stop(col - count)
base = env.var('a:base')
source, offset = env.get_offset_params((row, col), base)
proposals = get_proporsals(source, offset, base)
return env.stop(proposals)
FROM_RE = re.compile(r'^\s*from\s+[\.\w\d_]+$')
@env.catch_exceptions
def complete(dot=False):
""" Ctrl+Space completion.
:return bool: success
"""
row, col = env.cursor
source, offset = env.get_offset_params()
cline = env.current.line[:col]
env.debug('dot completion', cline)
if FROM_RE.match(cline) or cline.endswith('..') or cline.endswith('\.'):
return env.stop("")
proposals = get_proporsals(source, offset, dot=dot)
if not proposals:
return False
prefix = proposals[0]['word']
# Find common part
for p in proposals:
common = len([
c1 for c1, c2 in zip(prefix, p['word']) if c1 == c2 and c1 != ' '
])
prefix = prefix[:common]
s_offset = codeassist.starting_offset(source, offset)
p_prefix = prefix[offset - s_offset:]
line = env.lines[row - 1]
cline = line[:col] + p_prefix + line[col:]
if cline != line:
env.curbuf[row - 1] = env.prepare_value(cline, dumps=False)
env.current.window.cursor = (row, col + len(p_prefix))
env.run('complete', col - len(prefix) + len(p_prefix) + 1, proposals)
return True
def get_proporsals(source, offset, base='', dot=False):
""" Code assist.
:return str:
"""
with RopeContext() as ctx:
try:
proposals = codeassist.code_assist(
ctx.project, source, offset, ctx.resource, maxfixes=3,
later_locals=False)
except exceptions.ModuleSyntaxError:
proposals = []
proposals = sorted(proposals, key=_sort_proporsals)
out = []
preview = 'preview' in ctx.options.get('completeopt')
for p in proposals:
out.append(dict(
word=p.name,
menu=p.type,
kind=p.scope + ':',
info=p.get_doc() or "No docs." if preview else "",
))
out = _get_autoimport_proposals(out, ctx, source, offset, dot=dot)
return out
@env.catch_exceptions
def goto():
""" Goto definition. """
with RopeContext() as ctx:
source, offset = env.get_offset_params()
found_resource, line = codeassist.get_definition_location(
ctx.project, source, offset, ctx.resource, maxfixes=3)
if not found_resource:
env.error('Definition not found')
return
env.goto_file(
found_resource.real_path,
cmd=ctx.options.get('goto_definition_cmd'))
env.goto_line(line)
@env.catch_exceptions
def show_doc():
""" Show documentation. """
with RopeContext() as ctx:
source, offset = env.get_offset_params()
try:
doc = codeassist.get_doc(
ctx.project, source, offset, ctx.resource, maxfixes=3)
if not doc:
raise exceptions.BadIdentifierError
env.let('l:output', doc.split('\n'))
except exceptions.BadIdentifierError:
env.error("No documentation found.")
def find_it():
""" Find occurrences. """
with RopeContext() as ctx:
_, offset = env.get_offset_params()
try:
occurrences = findit.find_occurrences(
ctx.project, ctx.resource, offset)
except exceptions.BadIdentifierError:
occurrences = []
lst = []
for oc in occurrences:
lst.append(dict(
filename=oc.resource.path,
text=env.lines[oc.lineno - 1] if oc.resource.real_path == env.curbuf.name else "", # noqa
lnum=oc.lineno,
))
env.let('loclist._loclist', lst)
def update_python_path(paths):
""" Update sys.path and make sure the new items come first. """
old_sys_path_items = list(sys.path)
for path in paths:
# see if it is a site dir
if path.find('site-packages') != -1:
site.addsitedir(path)
else:
sys.path.insert(0, path)
# Reorder sys.path so new directories at the front.
new_sys_path_items = set(sys.path) - set(old_sys_path_items)
sys.path = list(new_sys_path_items) + old_sys_path_items
def organize_imports():
""" Organize imports in current file. """
with RopeContext() as ctx:
organizer = ImportOrganizer(ctx.project)
changes = organizer.organize_imports(ctx.resource)
if changes is not None:
progress = ProgressHandler('Organize imports')
ctx.project.do(changes, task_handle=progress.handle)
reload_changes(changes)
@env.catch_exceptions
def regenerate():
""" Clear cache. """
with RopeContext() as ctx:
ctx.project.pycore._invalidate_resource_cache(ctx.resource) # noqa
ctx.importer.generate_cache(resources=[ctx.resource])
ctx.project.sync()
def new():
""" Create a new project. """
root = None
if env.var('a:0') != '0':
root = env.var('a:1')
else:
default = env.var('g:pymode_rope_project_root')
if not default:
default = env.var('getcwd()')
root = env.var('input("Enter project root: ", "%s")' % default)
ropefolder = env.var('g:pymode_rope_ropefolder')
prj = project.Project(projectroot=root, ropefolder=ropefolder)
prj.close()
env.message("Project is opened: %s" % root)
def undo():
""" Undo last changes.
:return bool:
"""
with RopeContext() as ctx:
changes = ctx.project.history.tobe_undone
if changes is None:
env.error('Nothing to undo!')
return False
if env.user_confirm('Undo [%s]?' % str(changes)):
progress = ProgressHandler('Undo %s' % str(changes))
for c in ctx.project.history.undo(task_handle=progress.handle):
reload_changes(c)
def redo():
""" Redo last changes.
:return bool:
"""
with RopeContext() as ctx:
changes = ctx.project.history.tobe_redone
if changes is None:
env.error('Nothing to redo!')
return False
if env.user_confirm('Redo [%s]?' % str(changes)):
progress = ProgressHandler('Redo %s' % str(changes))
for c in ctx.project.history.redo(task_handle=progress.handle):
reload_changes(c)
def cache_project(cls):
""" Cache projects.
:return func:
"""
projects = dict()
resources = dict()
def get_ctx(*args, **kwargs):
path = env.curbuf.name
if resources.get(path):
return resources.get(path)
project_path = env.var('g:pymode_rope_project_root')
if not project_path:
project_path = env.curdir
env.debug('Look ctx', project_path)
if env.var('g:pymode_rope_lookup_project', True):
project_path = look_ropeproject(project_path)
if not os.path.exists(project_path):
env.error("Rope project root not exist: %s" % project_path)
ctx = None
else:
ctx = projects.get(project_path)
if not ctx:
projects[project_path] = ctx = cls(path, project_path)
resources[path] = ctx
return ctx
return get_ctx
def autoimport():
""" Autoimport modules.
:return bool:
"""
word = env.var('a:word')
if not word:
env.error("Should be word under cursor.")
return False
with RopeContext() as ctx:
if not ctx.importer.names:
ctx.generate_autoimport_cache()
modules = ctx.importer.get_modules(word)
if not modules:
env.message('Global name %s not found.' % word)
return False
if len(modules) == 1:
_insert_import(word, modules[0], ctx)
else:
module = env.user_input_choices(
'Which module to import:', *modules)
_insert_import(word, module, ctx)
return True
@cache_project
class RopeContext(object):
""" A context manager to have a rope project context. """
def __init__(self, path, project_path):
""" Init Rope context. """
self.path = path
self.project = project.Project(
project_path, fscommands=FileSystemCommands())
self.importer = rope_autoimport.AutoImport(
project=self.project, observe=False)
update_python_path(self.project.prefs.get('python_path', []))
self.resource = None
self.current = None
self.options = dict(
completeopt=env.var('&completeopt'),
autoimport=env.var('g:pymode_rope_autoimport', True),
autoimport_modules=env.var('g:pymode_rope_autoimport_modules'),
goto_definition_cmd=env.var('g:pymode_rope_goto_definition_cmd'),
)
if os.path.exists("%s/__init__.py" % project_path):
sys.path.append(project_path)
if self.options.get('autoimport') == '1':
self.generate_autoimport_cache()
env.debug('Context init', project_path)
env.message('Init Rope project: %s' % project_path)
def __enter__(self):
""" Enter to Rope ctx. """
env.let('g:pymode_rope_current', self.project.root.real_path)
self.project.validate(self.project.root)
self.resource = libutils.path_to_resource(
self.project, env.curbuf.name, 'file')
if not self.resource.exists() or os.path.isdir(
self.resource.real_path):
self.resource = None
else:
env.debug('Found resource', self.resource.path)
return self
def __exit__(self, t, value, traceback):
""" Exit from Rope ctx. """
if t is None:
self.project.close()
def generate_autoimport_cache(self):
""" Update autoimport cache. """
env.message('Regenerate autoimport cache.')
modules = self.options.get('autoimport_modules', [])
def _update_cache(importer, modules=None):
importer.generate_cache()
if modules:
importer.generate_modules_cache(modules)
importer.project.sync()
sys.stdout, stdout_ = StringIO.StringIO(), sys.stdout
sys.stderr, stderr_ = StringIO.StringIO(), sys.stderr
process = multiprocessing.Process(target=_update_cache, args=(
self.importer, modules))
process.start()
sys.stdout, sys.stderr = stdout_, stderr_
class ProgressHandler(object):
""" Handle task progress. """
def __init__(self, msg):
""" Init progress handler. """
self.handle = TaskHandle(name="refactoring_handle")
self.handle.add_observer(self)
self.message = msg
def __call__(self):
""" Show current progress. """
percent_done = self.handle.current_jobset().get_percent_done()
env.message('%s - done %s%%' % (self.message, percent_done))
_scope_weight = {
'local': 10, 'attribute': 20, 'global': 30, 'imported': 40, 'builtin': 50}
def _sort_proporsals(p):
return (
_scope_weight.get(p.scope, 100), int(p.name.startswith('_')), p.name)
class Refactoring(object): # noqa
""" Base class for refactor operations. """
def run(self):
""" Run refactoring.
:return bool:
"""
with RopeContext() as ctx:
if not ctx.resource:
env.error("You should save the file before refactoring.")
return None
try:
env.message(self.__doc__)
refactor = self.get_refactor(ctx)
input_str = self.get_input_str(refactor, ctx)
if not input_str:
return False
changes = self.get_changes(refactor, input_str)
action = env.user_input_choices(
'Choose what to do:', 'perform', 'preview')
if not action:
return False
if action == 'preview':
print("\n ")
print("-------------------------------")
print("\n%s\n" % changes.get_description())
print("-------------------------------\n\n")
if not env.user_confirm('Do the changes?'):
return False
progress = ProgressHandler('Apply changes ...')
ctx.project.do(changes, task_handle=progress.handle)
reload_changes(changes)
except exceptions.RefactoringError as e:
env.error(str(e))
except Exception as e:
env.error('Unhandled exception in Pymode: %s' % e)
@staticmethod
def get_refactor(ctx):
""" Get refactor object. """
raise NotImplementedError
@staticmethod
def get_input_str(refactor, ctx):
""" Get user input. Skip by default.
:return bool: True
"""
return True
@staticmethod
def get_changes(refactor, input_str):
""" Get changes.
:return Changes:
"""
progress = ProgressHandler('Calculate changes ...')
return refactor.get_changes(
input_str, task_handle=progress.handle)
class RenameRefactoring(Refactoring):
""" Rename var/function/method/class. """
def __init__(self, module=False):
self.module = module
super(RenameRefactoring, self).__init__()
def get_refactor(self, ctx):
""" Function description.
:return Rename:
"""
offset = None
if not self.module:
_, offset = env.get_offset_params()
env.debug('Prepare rename', offset)
return rename.Rename(ctx.project, ctx.resource, offset)
def get_input_str(self, refactor, ctx):
""" Return user input. """
oldname = str(refactor.get_old_name())
msg = 'Renaming method/variable. New name:'
if self.module:
msg = 'Renaming module. New name:'
newname = env.user_input(msg, oldname)
if newname == oldname:
env.message("Nothing to do.")
return False
return newname
class ExtractMethodRefactoring(Refactoring):
""" Extract method. """
@staticmethod
def get_input_str(refactor, ctx):
""" Return user input. """
return env.user_input('New method name:')
@staticmethod
def get_refactor(ctx):
""" Function description.
:return Rename:
"""
cursor1, cursor2 = env.curbuf.mark('<'), env.curbuf.mark('>')
_, offset1 = env.get_offset_params(cursor1)
_, offset2 = env.get_offset_params(cursor2)
return extract.ExtractMethod(
ctx.project, ctx.resource, offset1, offset2)
@staticmethod
def get_changes(refactor, input_str):
""" Get changes.
:return Changes:
"""
return refactor.get_changes(input_str)
class ExtractVariableRefactoring(Refactoring):
""" Extract variable. """
@staticmethod
def get_input_str(refactor, ctx):
""" Return user input. """
return env.user_input('New variable name:')
@staticmethod
def get_refactor(ctx):
""" Function description.
:return Rename:
"""
cursor1, cursor2 = env.curbuf.mark('<'), env.curbuf.mark('>')
_, offset1 = env.get_offset_params(cursor1)
_, offset2 = env.get_offset_params(cursor2)
return extract.ExtractVariable(
ctx.project, ctx.resource, offset1, offset2)
@staticmethod
def get_changes(refactor, input_str):
""" Get changes.
:return Changes:
"""
return refactor.get_changes(input_str)
class InlineRefactoring(Refactoring):
""" Inline variable/method. """
@staticmethod
def get_refactor(ctx):
""" Function description.
:return Rename:
"""
_, offset = env.get_offset_params()
return inline.create_inline(ctx.project, ctx.resource, offset)
@staticmethod
def get_changes(refactor, input_str):
""" Get changes.
:return Changes:
"""
progress = ProgressHandler('Calculate changes ...')
return refactor.get_changes(task_handle=progress.handle)
class UseFunctionRefactoring(Refactoring):
""" Use selected function as possible. """
@staticmethod
def get_refactor(ctx):
""" Function description.
:return Rename:
"""
_, offset = env.get_offset_params()
return usefunction.UseFunction(ctx.project, ctx.resource, offset)
@staticmethod
def get_changes(refactor, input_str):
""" Get changes.
:return Changes:
"""
progress = ProgressHandler('Calculate changes ...')
return refactor.get_changes(
resources=[refactor.resource], task_handle=progress.handle)
class ModuleToPackageRefactoring(Refactoring):
""" Convert module to package. """
@staticmethod
def get_refactor(ctx):
""" Function description.
:return Rename:
"""
return ModuleToPackage(ctx.project, ctx.resource)
@staticmethod
def get_changes(refactor, input_str):
""" Get changes.
:return Changes:
"""
return refactor.get_changes()
class MoveRefactoring(Refactoring):
""" Move method/module to other class/global. """
@staticmethod
def get_input_str(refactor, ctx):
""" Get destination.
:return str:
"""
return env.user_input('Enter destination:')
@staticmethod
def get_refactor(ctx):
""" Function description.
:return Rename:
"""
_, offset = env.get_offset_params()
if offset == 0:
offset = None
return move.create_move(ctx.project, ctx.resource, offset)
class ChangeSignatureRefactoring(Refactoring):
""" Change function signature (add/remove/sort arguments). """
@staticmethod
def get_input_str(refactor, ctx):
""" Get destination.
:return str:
"""
args = refactor.get_args()
default = ', '.join(a[0] for a in args)
return env.user_input('Change the signature:', default)
@staticmethod
def get_refactor(ctx):
""" Function description.
:return Rename:
"""
_, offset = env.get_offset_params()
return change_signature.ChangeSignature(
ctx.project, ctx.resource, offset)
def get_changes(self, refactor, input_string):
""" Function description.
:return Rope.changes:
"""
args = re.sub(r'[\s\(\)]+', '', input_string).split(',')
olds = [arg[0] for arg in refactor.get_args()]
changers = []
for arg in [a for a in olds if a not in args]:
changers.append(change_signature.ArgumentRemover(olds.index(arg)))
olds.remove(arg)
order = []
for index, arg in enumerate(args):
if arg not in olds:
changers.append(change_signature.ArgumentAdder(index, arg))
olds.insert(index, arg)
order.append(olds.index(arg))
changers.append(change_signature.ArgumentReorderer(
order, autodef='None'))
return refactor.get_changes(changers)
class GenerateElementRefactoring(Refactoring):
""" Class description. """
def __init__(self, kind, *args, **kwargs):
""" Function description. """
self.kind = kind
super(GenerateElementRefactoring, self).__init__(*args, **kwargs)
def get_refactor(self, ctx):
""" Function description.
:return Rename:
"""
_, offset = env.get_offset_params()
return generate.create_generate(
self.kind, ctx.project, ctx.resource, offset)
def get_changes(self, refactor, input_str):
""" Function description.
:return Rope.changes:
"""
return refactor.get_changes()
@env.catch_exceptions
def reload_changes(changes):
""" Reload changed buffers. """
resources = changes.get_changed_resources()
moved = _get_moved_resources(changes) # noqa
current = env.curbuf.number
for f in resources:
bufnr = env.var('bufnr("%s")' % f.real_path)
env.goto_buffer(bufnr)
path = env.curbuf.name
if f in moved:
path = moved[f].real_path
env.debug('Reload', f.real_path, path, bufnr)
env.goto_file(path, 'e!', force=True)
env.message("%s has been changed." % f.real_path, history=True)
env.goto_buffer(current)
def _get_moved_resources(changes):
moved = dict()
if isinstance(changes, change.ChangeSet):
for c in changes.changes:
moved.update(_get_moved_resources(c))
if isinstance(changes, change.MoveResource):
moved[changes.resource] = changes.new_resource
return moved
def _get_autoimport_proposals(out, ctx, source, offset, dot=False):
if not ctx.options.get('autoimport') or dot:
return out
if '.' in codeassist.starting_expression(source, offset):
return out
current_offset = offset - 1
while current_offset > 0 and (
source[current_offset].isalnum() or source[current_offset] == '_'):
current_offset -= 1
starting = source[current_offset:offset]
starting = starting.strip()
if not starting:
return out
for assist in ctx.importer.import_assist(starting):
out.append(dict(
abbr=' : '.join(assist),
word=assist[0],
kind='autoimport:',
))
return out
@env.catch_exceptions
def complete_check():
""" Function description.
:return bool:
"""
row, column = env.cursor
line = env.lines[row - 1]
word_finder = worder.Worder(line, True)
parent, name, _ = word_finder.get_splitted_primary_before(column - 1)
if parent:
return False
with RopeContext() as ctx:
modules = ctx.importer.get_modules(name)
if not modules:
return False
if name in ctx.project.pycore.resource_to_pyobject(ctx.resource):
return False
if not env.user_confirm("Import %s?" % name, True):
return False
if len(modules) == 1:
_insert_import(name, modules[0], ctx)
else:
module = env.user_input_choices('With module to import:', *modules)
if module:
_insert_import(name, module, ctx)
def _insert_import(name, module, ctx):
if not ctx.resource:
source, _ = env.get_offset_params()
lineno = ctx.importer.find_insertion_line(source)
line = 'from %s import %s' % (module, name)
env.curbuf[lineno - 1:lineno - 1] = [
env.prepare_value(line, dumps=False)]
return True
pyobject = ctx.project.pycore.resource_to_pyobject(ctx.resource)
import_tools = importutils.ImportTools(ctx.project.pycore)
module_imports = import_tools.module_imports(pyobject)
new_import = importutils.FromImport(module, 0, [[name, None]])
module_imports.add_import(new_import)
changes = change.ChangeContents(
ctx.resource, module_imports.get_changed_source())
action = env.user_input_choices(
'Choose what to do:', 'perform', 'preview')
if not action:
return False
if action == 'preview':
print("\n ")
print("-------------------------------")
print("\n%s\n" % changes.get_description())
print("-------------------------------\n\n")
if not env.user_confirm('Do the changes?'):
return False
progress = ProgressHandler('Apply changes ...')
ctx.project.do(changes, task_handle=progress.handle)
reload_changes(changes)
# pylama:ignore=W1401,E1120,D
| lgpl-3.0 |
rkashapov/buildbot | www/waterfall_view/setup.py | 10 | 1565 | #!/usr/bin/env python
#
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
try:
from buildbot_pkg import setup_www_plugin
except ImportError:
import sys
print("Please install buildbot_pkg module in order to install that package, or use the pre-build .whl modules available on pypi", file=sys.stderr)
sys.exit(1)
setup_www_plugin(
name='buildbot-waterfall-view',
description='Buildbot Waterfall View plugin',
author=u'Pierre Tardy',
author_email=u'[email protected]',
url='http://buildbot.net/',
license='GNU GPL',
packages=['buildbot_waterfall_view'],
package_data={
'': [
'VERSION',
'static/*'
]
},
entry_points="""
[buildbot.www]
waterfall_view = buildbot_waterfall_view:ep
""",
)
| gpl-2.0 |
jsamoocha/pysweat | tests/test_transform_activities_moving_averages.py | 1 | 5107 | import unittest
import numpy as np
import pandas as pd
from pysweat.transformation.activities import weighted_average, compute_moving_averages
from pysweat.transformation.general import get_observations_without_feature
from pysweat.transformation.windows import select_activity_window
test_activities = pd.DataFrame().from_dict({
'start_date_local': [np.datetime64(ts) for ts in pd.date_range(end='2015-05-01', periods=3).tolist()],
'test_var': [1, 2, 3.5],
'distance': [1, 1, 2],
'average_speed': [18, 22, 12],
'average_speed_28': [18, np.NaN, np.NaN]
})
class ActivityMovingAverageTransformationTest(unittest.TestCase):
mock_athletes = [
{'id': 123},
{'id': 456}
]
def setUp(self):
self.test_activities = pd.DataFrame.copy(test_activities)
def test_select_window_end_ts_and_window_size_within_data(self):
"""Should return dataframe with complete window"""
selected_activities = select_activity_window(self.test_activities, self.test_activities.start_date_local[2], 2)
self.assertEqual(2, len(selected_activities))
self.assertEqual(2, selected_activities.test_var.values[0])
self.assertEqual(3.5, selected_activities.test_var.values[1])
def test_select_window_end_ts_after_last_activity_window_size_within_data(self):
"""Should return last activity"""
selected_activities = select_activity_window(self.test_activities, pd.Timestamp('2015-05-02'), 2)
self.assertEqual(1, len(selected_activities))
self.assertEqual(3.5, selected_activities.test_var.values[0])
def test_select_window_end_ts_after_last_activity_window_size_outside_data(self):
"""Should return empty"""
selected_activities = select_activity_window(self.test_activities, pd.Timestamp('2015-05-05'), 2)
self.assertEqual(0, len(selected_activities))
def test_select_window_end_ts_before_last_activity_window_size_outside_data(self):
"""Should return first activity"""
selected_activities = select_activity_window(self.test_activities, pd.Timestamp('2015-04-29'), 2)
self.assertEqual(1, len(selected_activities))
self.assertEqual(1, selected_activities.test_var.values[0])
def test_weighted_average(self):
"""Should return average speed weighted by distance"""
self.assertEqual(16, weighted_average(self.test_activities, feature='average_speed', weight_feature='distance'))
def test_get_activities_without_feature_all_activities_with_feature(self):
"""Should return all-false boolean index if all activities have the feature"""
self.assertEqual([False, False, False],
list(get_observations_without_feature(self.test_activities, 'average_speed')))
def test_get_activities_without_feature_no_activities_with_feature(self):
"""Should return all-true boolean index if no activities have the feature"""
self.assertEqual([True, True, True],
list(get_observations_without_feature(self.test_activities, 'non_existing_feature')))
def test_get_activities_without_feature_first_activity_has_feature(self):
"""Should return all-true boolean index except for first activity that has the feature"""
self.assertEqual([False, True, True],
list(get_observations_without_feature(self.test_activities, 'average_speed_28')))
def test_compute_moving_averages_retains_original_data(self):
"""Should compute moving average for given feature retaining existing features and observations"""
self.assertEqual(3, len(compute_moving_averages(self.test_activities, feature_name='test_var', window_days=2)))
self.assertEqual(len(test_activities.columns) + 1,
len(compute_moving_averages(
self.test_activities, feature_name='test_var', window_days=2).columns))
def test_compute_moving_averages_no_new_column_for_existing_moving_averages(self):
"""Should not add new column if one or more moving averages were computed for the given feature"""
self.assertEqual(len(test_activities.columns),
len(compute_moving_averages(
self.test_activities, feature_name='average_speed', window_days=28).columns))
def test_compute_moving_averages_adds_column_for_given_feature(self):
"""Should create new column with name [original_feature_name]_[window_size] as name"""
self.assertIn('test_var_3',
compute_moving_averages(self.test_activities, feature_name='test_var', window_days=3).columns)
def test_compute_moving_averages_computes_moving_averages(self):
"""Should compute moving averages for given feature and window"""
self.assertEqual([1, 1.5, 3],
list(compute_moving_averages(self.test_activities,
feature_name='test_var',
window_days=2).test_var_2))
| apache-2.0 |
sfroment/collectd-elasticsearch | tools/generate_defaults.py | 3 | 3690 | #! /usr/bin/env python
import json
def load_file(file):
""" Converts an array of file paths into an array of json defined objects
:param file: An array of filepath strings
:return: An array of loaded json objects
"""
CONFIGS = []
with open(file, 'r') as f:
j = json.load(f)
f.close()
CONFIGS.append(j)
return CONFIGS
def process_json_minimal(conf):
""" Processes an array of SignalFx Default Dashboard json objects
:param conf: An array of json loaded objects
:return: A string representation of a python dictionary named "DEFAULTS"
"""
d = set()
DEFAULTS = "DEFAULTS = {\n"
DEFAULTS += " # AUTOMATICALLY GENERATED METRIC NAMES\n"
DEFAULTS += " # TO INCLUDE BY DEFAULT\n"
# Iterate over each file passed in
for file in conf:
# Iterate each element in the first level array
for a in file:
# ? a.sf_chart
if 'sf_chart' in a.keys():
# ? a.sf_uiModel
if 'sf_uiModel' in a.keys():
# ? a.sf_uiModel.allPlots
if 'allPlots' in a['sf_uiModel'].keys():
# Iterate over each plot in a.sf_uiModel.allPlots
for b in a['sf_uiModel']['allPlots']:
# ? a.sf_uiModel.allPlots[i].seriesData
if 'seriesData' in b.keys():
# ? a.sf_uiModel.allPlots[i].seriesData.metric
if 'metric' in b['seriesData'].keys():
# temporarily store the metric name
metric = b['seriesData']['metric']
d.add(metric[metric.find('.')+1:])
for elem in d:
DEFAULTS += ' "' + elem + '",\n'
DEFAULTS += '}\n'
return DEFAULTS
def save_file(text, file):
"""Saves the supplied string to a file
:param text: The string that should be written out
:param file: The path and file name to write out
"""
f = open(file, 'w')
f.write(text)
f.close()
def run(files):
"""Main function of the script.
"""
config = []
for file in files:
try:
# Load the json files
config += load_file(file)
print "LOADED: %s" % file
except Exception as e:
print "Failed to load the following file '%s' due to: %s" % \
(file, e)
if len(config) > 0:
# Process the array of loaded json
defaults = process_json_minimal(config)
# Save the file to the working directory
save_file(defaults, "DEFAULTS_DICT.py")
# Load the generated defaults from the python file that was written out
from DEFAULTS_DICT import DEFAULTS
print DEFAULTS
else:
# There is nothing to process, so we print the usage message
print """
generate_defaults.py
USAGE:
$ ./generate_defaults.py <json file> <json file>...<json file>*
$ python generate_defaults.py <json file> <json file>...<json file>*
INPUT:
SignalFx Default Dashboard JSON files for Elasticsearch:
Page_Elasticsearch.json
Page_Elasticsearch_Index.json
OUTPUT:
CONSOLE: Prints the generated set
FILE: Generates a DEFAULTS_DICT.py file in the working directory
NOTE:
While the DEFAULTS_DICT.py file can be imported,
the elasticsearch_collectd.py file does not import this file.
You MUST copy the DEFAULTS dictionary into the elasticsearch_collectd.py
file when you wish to update the default dashboard.
"""
if __name__ == '__main__':
import sys
run(sys.argv[1:])
| apache-2.0 |
bgroff/kala-app | django_kala/api/basecamp_classic/people/renderers.py | 1 | 12868 | from io import StringIO
from django.db.models import QuerySet
from django.utils.xmlutils import SimplerXMLGenerator
from django.utils.encoding import smart_text
from rest_framework.renderers import BaseRenderer
class XMLPeopleRenderer(BaseRenderer):
"""
Renderer which serializes to XML.
"""
media_type = 'application/xml'
format = 'xml'
charset = 'utf-8'
item_tag_name = 'person'
root_tag_name = 'people'
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders `data` into serialized XML.
"""
if data is None:
return ''
stream = StringIO()
xml = SimplerXMLGenerator(stream, self.charset)
xml.startDocument()
# If we do not have users or request_user then we have errors
if not data.get('users', False) and not data.get('request_user', False):
self._to_errors(data, xml)
xml.endDocument()
return stream.getvalue()
# If users are a list, deal with that
if type(data['users']) is QuerySet or type(data['users']) is list:
xml.startElement('people', {'type': 'array'})
self._to_xml(data['users'], data['request_user'], xml)
xml.endElement('people')
# Otherwise just render a person
else:
self.render_person(data['users'], data['request_user'], xml)
xml.endDocument()
return stream.getvalue()
def _to_xml(self, users, request_user, xml):
for user in users:
self.render_person(user, request_user, xml)
def render_person(self, user, request_user, xml):
xml.startElement('person', {})
xml.startElement('id', {'type': 'integer'})
xml.characters(smart_text(user.id))
xml.endElement('id')
xml.startElement('uuid', {'type': 'uuid'})
xml.characters(smart_text(user.uuid))
xml.endElement('uuid')
xml.startElement('created-at', {'type': 'datetime'})
xml.characters(smart_text(user.date_joined.isoformat()))
xml.endElement('created-at')
xml.startElement('first-name', {})
xml.characters(smart_text(user.first_name))
xml.endElement('first-name')
xml.startElement('last-name', {})
xml.characters(smart_text(user.last_name))
xml.endElement('last-name')
xml.startElement('title', {})
try:
xml.characters(smart_text(user.title if user.title else ''))
except AttributeError:
xml.characters(smart_text(''))
xml.endElement('title')
xml.startElement('email-address', {})
xml.characters(smart_text(user.email))
xml.endElement('email-address')
xml.startElement('im-handle', {})
try:
xml.characters(smart_text(user.im_handle if user.im_handle else ''))
except AttributeError:
xml.characters(smart_text(''))
xml.endElement('im-handle')
xml.startElement('im-service', {})
try:
xml.characters(smart_text(user.im_service if user.im_service else ''))
except AttributeError:
xml.characters(smart_text(''))
xml.endElement('im-service')
xml.startElement('phone-number-office', {})
try:
xml.characters(smart_text(user.phone if user.phone else ''))
except AttributeError:
xml.characters(smart_text(''))
xml.endElement('phone-number-office')
xml.startElement('phone-number-office-ext', {})
try:
xml.characters(smart_text(user.ext if user.ext else ''))
except AttributeError:
xml.characters(smart_text(''))
xml.endElement('phone-number-office-ext')
xml.startElement('phone-number-mobile', {})
try:
xml.characters(smart_text(user.phone_mobile if user.phone_mobile else ''))
except AttributeError:
xml.characters(smart_text(''))
xml.endElement('phone-number-mobile')
xml.startElement('phone-number-home', {})
try:
xml.characters(smart_text(user.phone_home if user.phone_home else ''))
except AttributeError:
xml.characters(smart_text(''))
xml.endElement('phone-number-home')
xml.startElement('phone-number-fax', {})
try:
xml.characters(smart_text(user.phone_fax if user.phone_fax else ''))
except AttributeError:
xml.characters(smart_text(''))
xml.endElement('phone-number-fax')
xml.startElement('company-id', {'type': 'integer'})
try:
xml.characters(smart_text(user.organizations.first().id if user.organizations.first() else ''))
except AttributeError:
xml.characters(smart_text(''))
xml.endElement('company-id')
xml.startElement('client-id', {'type': 'integer'})
try:
xml.characters(smart_text(user.client.id if user.client.id else ''))
except AttributeError:
xml.characters(smart_text(''))
xml.endElement('client-id')
xml.startElement('avatar-url', {})
try:
xml.characters(smart_text(user.avatar_url if user.avatar_url else ''))
except AttributeError:
xml.characters(smart_text(''))
xml.endElement('avatar-url')
if request_user.is_superuser:
xml.startElement('user-name', {})
xml.characters(smart_text(user.username))
xml.endElement('user-name')
xml.startElement('administrator', {'type': 'boolean'})
xml.characters(smart_text(str(user.is_superuser).lower()))
xml.endElement('administrator')
xml.startElement('deleted', {'type': 'boolean'})
xml.characters(smart_text(str(not user.is_active)).lower())
xml.endElement('deleted')
xml.startElement('has-access-to-new-projects', {'type': 'boolean'})
try:
xml.characters(
smart_text(
str(user.access_new_projects).lower() if user.access_new_projects else str(False).lower()
)
)
except AttributeError:
xml.characters(smart_text(''))
xml.endElement('has-access-to-new-projects')
xml.endElement('person')
def _to_errors(self, data, xml):
xml.startElement('errors', {})
if data.get('id', False):
xml.startElement('id', {'type': 'integer'})
for error in data['id']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('id')
if data.get('uuid', False):
xml.startElement('uuid', {'type': 'uuid'})
for error in data['uuid']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('uuid')
if data.get('username', False):
xml.startElement('user-name', {})
for error in data['username']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('user-name')
if data.get('first_name', False):
xml.startElement('first-name', {})
for error in data['first_name']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('first-name')
if data.get('last_name', False):
xml.startElement('last-name', {})
for error in data['last_name']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('last-name')
if data.get('title', False):
xml.startElement('title', {})
for error in data['title']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('title')
if data.get('email', False):
xml.startElement('email-address', {})
for error in data['email']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('email-address')
if data.get('im_handler', False):
xml.startElement('im-handler', {})
for error in data['im_handler']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('im-handler')
if data.get('im_service', False):
xml.startElement('im-service', {})
for error in data['im_service']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('im-service')
if data.get('phone', False):
xml.startElement('phone-number-office', {})
for error in data['phone']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('phone-number-office')
if data.get('phone_ext', False):
xml.startElement('phone-number-office-ext', {})
for error in data['ext']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('phone-number-office-ext')
if data.get('mobile', False):
xml.startElement('phone-number-mobile', {})
for error in data['mobile']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('phone-number-mobile')
if data.get('home', False):
xml.startElement('phone-number-home', {})
for error in data['home']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('phone-number-home')
if data.get('fax', False):
xml.startElement('phone-number-fax', {})
for error in data['fax']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('phone-number-fax')
if data.get('company', False):
xml.startElement('company-id', {})
for error in data['companies']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('company-id')
if data.get('client', False):
xml.startElement('client-id', {})
for error in data['client']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('client-id')
if data.get('avatar_url', False):
xml.startElement('avatar-url', {})
for error in data['avatar_url']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('avatar-url')
if data.get('is_superuser', False):
xml.startElement('administrator', {})
for error in data['is_superuser']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('administrator')
if data.get('access_new_projects', False):
xml.startElement('has-access-to-new-projects', {})
for error in data['access_new_projects']:
xml.startElement('error', {})
xml.characters(smart_text(error))
xml.endElement('error')
xml.endElement('has-access-to-new-projects')
xml.endElement('errors')
| mit |
persandstrom/home-assistant | tests/components/test_spaceapi.py | 4 | 3476 | """The tests for the Home Assistant SpaceAPI component."""
# pylint: disable=protected-access
from unittest.mock import patch
import pytest
from tests.common import mock_coro
from homeassistant.components.spaceapi import (
DOMAIN, SPACEAPI_VERSION, URL_API_SPACEAPI)
from homeassistant.setup import async_setup_component
CONFIG = {
DOMAIN: {
'space': 'Home',
'logo': 'https://home-assistant.io/logo.png',
'url': 'https://home-assistant.io',
'location': {'address': 'In your Home'},
'contact': {'email': '[email protected]'},
'issue_report_channels': ['email'],
'state': {
'entity_id': 'test.test_door',
'icon_open': 'https://home-assistant.io/open.png',
'icon_closed': 'https://home-assistant.io/close.png',
},
'sensors': {
'temperature': ['test.temp1', 'test.temp2'],
'humidity': ['test.hum1'],
}
}
}
SENSOR_OUTPUT = {
'temperature': [
{
'location': 'Home',
'name': 'temp1',
'unit': '°C',
'value': '25'
},
{
'location': 'Home',
'name': 'temp2',
'unit': '°C',
'value': '23'
},
],
'humidity': [
{
'location': 'Home',
'name': 'hum1',
'unit': '%',
'value': '88'
},
]
}
@pytest.fixture
def mock_client(hass, aiohttp_client):
"""Start the Home Assistant HTTP component."""
with patch('homeassistant.components.spaceapi',
return_value=mock_coro(True)):
hass.loop.run_until_complete(
async_setup_component(hass, 'spaceapi', CONFIG))
hass.states.async_set('test.temp1', 25,
attributes={'unit_of_measurement': '°C'})
hass.states.async_set('test.temp2', 23,
attributes={'unit_of_measurement': '°C'})
hass.states.async_set('test.hum1', 88,
attributes={'unit_of_measurement': '%'})
return hass.loop.run_until_complete(aiohttp_client(hass.http.app))
async def test_spaceapi_get(hass, mock_client):
"""Test response after start-up Home Assistant."""
resp = await mock_client.get(URL_API_SPACEAPI)
assert resp.status == 200
data = await resp.json()
assert data['api'] == SPACEAPI_VERSION
assert data['space'] == 'Home'
assert data['contact']['email'] == '[email protected]'
assert data['location']['address'] == 'In your Home'
assert data['location']['latitude'] == 32.87336
assert data['location']['longitude'] == -117.22743
assert data['state']['open'] == 'null'
assert data['state']['icon']['open'] == \
'https://home-assistant.io/open.png'
assert data['state']['icon']['close'] == \
'https://home-assistant.io/close.png'
async def test_spaceapi_state_get(hass, mock_client):
"""Test response if the state entity was set."""
hass.states.async_set('test.test_door', True)
resp = await mock_client.get(URL_API_SPACEAPI)
assert resp.status == 200
data = await resp.json()
assert data['state']['open'] == bool(1)
async def test_spaceapi_sensors_get(hass, mock_client):
"""Test the response for the sensors."""
resp = await mock_client.get(URL_API_SPACEAPI)
assert resp.status == 200
data = await resp.json()
assert data['sensors'] == SENSOR_OUTPUT
| apache-2.0 |
kubevirt/client-python | kubevirt/models/v1_access_credential.py | 1 | 4455 | # coding: utf-8
"""
KubeVirt API
This is KubeVirt API an add-on for Kubernetes.
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1AccessCredential(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'ssh_public_key': 'V1SSHPublicKeyAccessCredential',
'user_password': 'V1UserPasswordAccessCredential'
}
attribute_map = {
'ssh_public_key': 'sshPublicKey',
'user_password': 'userPassword'
}
def __init__(self, ssh_public_key=None, user_password=None):
"""
V1AccessCredential - a model defined in Swagger
"""
self._ssh_public_key = None
self._user_password = None
if ssh_public_key is not None:
self.ssh_public_key = ssh_public_key
if user_password is not None:
self.user_password = user_password
@property
def ssh_public_key(self):
"""
Gets the ssh_public_key of this V1AccessCredential.
SSHPublicKey represents the source and method of applying a ssh public key into a guest virtual machine.
:return: The ssh_public_key of this V1AccessCredential.
:rtype: V1SSHPublicKeyAccessCredential
"""
return self._ssh_public_key
@ssh_public_key.setter
def ssh_public_key(self, ssh_public_key):
"""
Sets the ssh_public_key of this V1AccessCredential.
SSHPublicKey represents the source and method of applying a ssh public key into a guest virtual machine.
:param ssh_public_key: The ssh_public_key of this V1AccessCredential.
:type: V1SSHPublicKeyAccessCredential
"""
self._ssh_public_key = ssh_public_key
@property
def user_password(self):
"""
Gets the user_password of this V1AccessCredential.
UserPassword represents the source and method for applying a guest user's password
:return: The user_password of this V1AccessCredential.
:rtype: V1UserPasswordAccessCredential
"""
return self._user_password
@user_password.setter
def user_password(self, user_password):
"""
Sets the user_password of this V1AccessCredential.
UserPassword represents the source and method for applying a guest user's password
:param user_password: The user_password of this V1AccessCredential.
:type: V1UserPasswordAccessCredential
"""
self._user_password = user_password
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1AccessCredential):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| apache-2.0 |
SeanTater/tscan | tscan/test_cli.py | 1 | 1962 | from unittest import TestCase
from mock import Mock, call
from cli import Plugin, Argument
from argparse import Namespace
class TestArgument(TestCase):
def test_create(self):
arg = Argument('--abcd', '--efgh', foo='bar')
assert arg.flags == ('--abcd', '--efgh')
assert arg.names == ['abcd', 'efgh']
assert arg.name == 'abcd'
assert arg.tags == dict(foo='bar')
def test_add_to_argparse(self):
arg = Argument('--abcd', '--efgh', foo='bar')
m = Mock()
arg.add_to_argparse(m)
assert m.add_argument.mock_calls == [call('--abcd', '--efgh', foo='bar')]
class TestPlugin(TestCase):
def test_register(self):
class Example(Plugin):
pass
assert not Plugin.all_plugins
Plugin.register(Example)
assert Plugin.all_plugins == {'foo'}
def test_register(self):
fake_plugin = Mock()
assert Plugin.register(fake_plugin) is fake_plugin
fake_plugin._args = [
Argument('required', tag='value'),
Argument('--optional', tag='value')]
Plugin.register(fake_plugin)
assert fake_plugin in Plugin.all_plugins
def init_from_cli(self):
# Documented GIGO: argparse handles missing required arguments
class Example(Plugin):
__init__ = Mock()
pass
fake_plugin = Example()
fake_ap_output = Namespace()
fake_plugin.init_from_cli(Namespace())
fake_plugin.init_from_cli(Namespace(required='foo'))
fake_plugin.init_from_cli(Namespace(required='foo', optional='bar'))
fake_plugin.init_from_cli(Namespace(required='foo', optional='bar', garbage='baz'))
assert fake_plugin.__init__.mock_calls == [
call(),
call(required='foo'),
call(required='foo', optional='bar'),
call(required='foo', optional='bar')] | gpl-3.0 |
nhenezi/kuma | kuma/wiki/migrations/0019_disallow_add_attachment.py | 5 | 17057 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
permissions = (
("disallow_add_attachment", "Cannot upload attachment"),
)
def forwards(self, orm):
db.send_pending_create_signals()
ct = orm['contenttypes.ContentType'].objects.get(
app_label='attachments', model='attachment')
for p in self.permissions:
perm, created = orm['auth.permission'].objects.get_or_create(
content_type=ct, codename=p[0], defaults=dict(name=p[1]))
def backwards(self, orm):
ct = orm['contenttypes.ContentType'].objects.get(
app_label='wiki', model='attachment')
for p in self.permissions:
orm['auth.permission'].objects.filter(content_type=ct,
codename=p[0]).delete()
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'tidings.watch': {
'Meta': {'object_name': 'Watch'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'db_index': 'True', 'max_length': '75', 'null': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'attachments.attachment': {
'Meta': {'object_name': 'Attachment', 'db_table': "'wiki_attachment'"},
'current_revision': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'current_rev'", 'null': 'True', 'to': "orm['attachments.AttachmentRevision']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mindtouch_attachment_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_index': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'attachments.attachmentrevision': {
'Meta': {'object_name': 'AttachmentRevision', 'db_table': "'wiki_attachmentrevision'"},
'attachment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['attachments.Attachment']"}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_attachment_revisions'", 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '500'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'is_mindtouch_migration': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'mime_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'mindtouch_old_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_index': 'True'})
},
'wiki.document': {
'Meta': {'unique_together': "(('parent', 'locale'), ('slug', 'locale'))", 'object_name': 'Document'},
'category': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'current_revision': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'current_for+'", 'null': 'True', 'to': "orm['wiki.Revision']"}),
'defer_rendering': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'html': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_localizable': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'is_template': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'last_rendered_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'locale': ('kuma.core.fields.LocaleField', [], {'default': "'en-US'", 'max_length': '7', 'db_index': 'True'}),
'mindtouch_page_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_index': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'translations'", 'null': 'True', 'to': "orm['wiki.Document']"}),
'parent_topic': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['wiki.Document']"}),
'related_documents': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['wiki.Document']", 'through': "orm['wiki.RelatedDocument']", 'symmetrical': 'False'}),
'render_scheduled_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'render_started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'rendered_errors': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rendered_html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'wiki.documenttag': {
'Meta': {'object_name': 'DocumentTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'wiki.editortoolbar': {
'Meta': {'object_name': 'EditorToolbar'},
'code': ('django.db.models.fields.TextField', [], {'max_length': '2000'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_toolbars'", 'to': "orm['auth.User']"}),
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'wiki.firefoxversion': {
'Meta': {'unique_together': "(('item_id', 'document'),)", 'object_name': 'FirefoxVersion'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'firefox_version_set'", 'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_id': ('django.db.models.fields.IntegerField', [], {})
},
'wiki.helpfulvote': {
'Meta': {'object_name': 'HelpfulVote'},
'anonymous_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'poll_votes'", 'null': 'True', 'to': "orm['auth.User']"}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'poll_votes'", 'to': "orm['wiki.Document']"}),
'helpful': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user_agent': ('django.db.models.fields.CharField', [], {'max_length': '1000'})
},
'wiki.operatingsystem': {
'Meta': {'unique_together': "(('item_id', 'document'),)", 'object_name': 'OperatingSystem'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'operating_system_set'", 'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_id': ('django.db.models.fields.IntegerField', [], {})
},
'wiki.relateddocument': {
'Meta': {'ordering': "['-in_common']", 'object_name': 'RelatedDocument'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'related_from'", 'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_common': ('django.db.models.fields.IntegerField', [], {}),
'related': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'related_to'", 'to': "orm['wiki.Document']"})
},
'wiki.reviewtag': {
'Meta': {'object_name': 'ReviewTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'wiki.reviewtaggedrevision': {
'Meta': {'object_name': 'ReviewTaggedRevision'},
'content_object': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Revision']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.ReviewTag']"})
},
'wiki.revision': {
'Meta': {'object_name': 'Revision'},
'based_on': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Revision']", 'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'content': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_revisions'", 'to': "orm['auth.User']"}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'is_mindtouch_migration': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'mindtouch_old_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'reviewed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'reviewer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reviewed_revisions'", 'null': 'True', 'to': "orm['auth.User']"}),
'show_toc': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'significance': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_index': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {}),
'tags': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_index': 'True'})
},
'wiki.taggeddocument': {
'Meta': {'object_name': 'TaggedDocument'},
'content_object': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.DocumentTag']"})
}
}
complete_apps = ['wiki']
| mpl-2.0 |
verycumbersome/the-blue-alliance | controllers/account_controller.py | 4 | 24016 | import os
import logging
import datetime
import random
import string
from collections import defaultdict
from google.appengine.ext import ndb
from base_controller import LoggedInHandler
from consts.account_permissions import AccountPermissions
from consts.auth_type import AuthType
from consts.client_type import ClientType
from consts.model_type import ModelType
from consts.notification_type import NotificationType
from helpers.event_helper import EventHelper
from helpers.match_helper import MatchHelper
from helpers.mytba_helper import MyTBAHelper
from helpers.notification_helper import NotificationHelper
from helpers.validation_helper import ValidationHelper
from models.account import Account
from models.api_auth_access import ApiAuthAccess
from models.event import Event
from models.favorite import Favorite
from models.match import Match
from models.sitevar import Sitevar
from models.subscription import Subscription
from models.suggestion import Suggestion
from models.team import Team
from template_engine import jinja2_engine
import tba_config
class AccountOverview(LoggedInHandler):
def get(self):
self._require_registration()
push_sitevar = Sitevar.get_by_id('notifications.enable')
if push_sitevar is None or not push_sitevar.values_json == "true":
ping_enabled = "disabled"
else:
ping_enabled = ""
# Compute myTBA statistics
user = self.user_bundle.account.key
num_favorites = Favorite.query(ancestor=user).count()
num_subscriptions = Subscription.query(ancestor=user).count()
# Compute suggestion statistics
submissions_pending = Suggestion.query(Suggestion.review_state==Suggestion.REVIEW_PENDING, Suggestion.author==user).count()
submissions_accepted = Suggestion.query(Suggestion.review_state==Suggestion.REVIEW_ACCEPTED, Suggestion.author==user).count()
# Suggestion review statistics
review_permissions = False
num_reviewed = 0
total_pending = 0
if self.user_bundle.account.permissions:
review_permissions = True
num_reviewed = Suggestion.query(Suggestion.reviewer==user).count()
total_pending = Suggestion.query(Suggestion.review_state==Suggestion.REVIEW_PENDING).count()
# Fetch trusted API keys
api_keys = ApiAuthAccess.query(ApiAuthAccess.owner == user).fetch()
write_keys = filter(lambda key: key.is_write_key, api_keys)
read_keys = filter(lambda key: key.is_read_key, api_keys)
self.template_values['status'] = self.request.get('status')
self.template_values['webhook_verification_success'] = self.request.get('webhook_verification_success')
self.template_values['ping_enabled'] = ping_enabled
self.template_values['num_favorites'] = num_favorites
self.template_values['num_subscriptions'] = num_subscriptions
self.template_values['submissions_pending'] = submissions_pending
self.template_values['submissions_accepted'] = submissions_accepted
self.template_values['review_permissions'] = review_permissions
self.template_values['num_reviewed'] = num_reviewed
self.template_values['total_pending'] = total_pending
self.template_values['read_keys'] = read_keys
self.template_values['write_keys'] = write_keys
self.template_values['auth_write_type_names'] = AuthType.write_type_names
self.response.out.write(jinja2_engine.render('account_overview.html', self.template_values))
class AccountEdit(LoggedInHandler):
def get(self):
self._require_registration()
self.response.out.write(jinja2_engine.render('account_edit.html', self.template_values))
def post(self):
self._require_registration()
# Check to make sure that they aren't trying to edit another user
real_account_id = self.user_bundle.account.key.id()
check_account_id = self.request.get('account_id')
if check_account_id == real_account_id:
user = Account.get_by_id(self.user_bundle.account.key.id())
user.display_name = self.request.get('display_name')
user.put()
self.redirect('/account?status=account_edit_success')
else:
self.redirect('/account?status=account_edit_failure')
class AccountRegister(LoggedInHandler):
def get(self):
self._require_login()
# Redirects if already registered
redirect = self.request.get('redirect')
if self.user_bundle.account.registered:
if redirect:
self.redirect(redirect, abort=True)
else:
self.redirect('/account', abort=True)
self.template_values['redirect'] = redirect
self.template_values['logout_url'] = self.user_bundle.create_logout_url(redirect)
self.response.out.write(jinja2_engine.render('account_register.html', self.template_values))
def post(self):
self._require_login()
if self.user_bundle.account.registered:
self.redirect('/account', abort=True)
# Check to make sure that they aren't trying to edit another user
real_account_id = self.user_bundle.account.key.id()
check_account_id = self.request.get('account_id')
if check_account_id == real_account_id:
account = Account.get_by_id(self.user_bundle.account.key.id())
account.display_name = self.request.get('display_name')
account.registered = True
account.put()
redirect = self.request.get('redirect')
if redirect:
self.redirect(redirect, abort=True)
else:
self.redirect('/account', abort=True)
else:
self.redirect('/')
class AccountLogin(LoggedInHandler):
def get(self):
if self.user_bundle.user:
self.redirect('/account', abort=True)
redirect = self.request.get('redirect')
if redirect:
url = self._get_login_url(redirect)
else:
url = self._get_login_url('/account')
self.redirect(url, abort=True)
class AccountLoginRequired(LoggedInHandler):
def get(self):
self.template_values['redirect'] = self.request.get('redirect')
self.response.out.write(jinja2_engine.render('account_login_required.html', self.template_values))
class AccountLogout(LoggedInHandler):
def get(self):
if os.environ.get('SERVER_SOFTWARE', '').startswith('Development/'):
self.redirect(self.user_bundle.logout_url)
return
# Deletes the session cookies pertinent to TBA without touching Google session(s)
# Reference: http://ptspts.blogspot.ca/2011/12/how-to-log-out-from-appengine-app-only.html
response = self.redirect('/')
response.delete_cookie('ACSID')
response.delete_cookie('SACSID')
return response
class AccountAPIReadKeyAdd(LoggedInHandler):
def post(self):
self._require_registration()
description = self.request.get('description')
if description:
ApiAuthAccess(
id=''.join(random.choice(string.ascii_lowercase + string.ascii_uppercase + string.digits) for _ in range(64)),
owner=self.user_bundle.account.key,
auth_types_enum=[AuthType.READ_API],
description=description,
).put()
self.redirect('/account?status=read_key_add_success')
else:
self.redirect('/account?status=read_key_add_no_description')
class AccountAPIReadKeyDelete(LoggedInHandler):
def post(self):
self._require_registration()
key_id = self.request.get('key_id')
auth = ApiAuthAccess.get_by_id(key_id)
if auth and auth.owner == self.user_bundle.account.key:
auth.key.delete()
self.redirect('/account?status=read_key_delete_success')
else:
self.redirect('/account?status=read_key_delete_failure')
class MyTBAController(LoggedInHandler):
def get(self):
self._require_registration()
user = self.user_bundle.account.key
favorites = Favorite.query(ancestor=user).fetch()
subscriptions = Subscription.query(ancestor=user).fetch()
team_keys = set()
team_fav = {}
team_subs = {}
event_keys = set()
event_fav = {}
event_subs = {}
events = []
match_keys = set()
match_event_keys = set()
match_fav = {}
match_subs = {}
for item in favorites + subscriptions:
if item.model_type == ModelType.TEAM:
team_keys.add(ndb.Key(Team, item.model_key))
if type(item) == Favorite:
team_fav[item.model_key] = item
elif type(item) == Subscription:
team_subs[item.model_key] = item
elif item.model_type == ModelType.MATCH:
match_keys.add(ndb.Key(Match, item.model_key))
match_event_keys.add(ndb.Key(Event, item.model_key.split('_')[0]))
if type(item) == Favorite:
match_fav[item.model_key] = item
elif type(item) == Subscription:
match_subs[item.model_key] = item
elif item.model_type == ModelType.EVENT:
if item.model_key.endswith('*'): # All year events wildcard
event_year = int(item.model_key[:-1])
events.append(Event( # add fake event for rendering
id=item.model_key,
short_name='ALL EVENTS',
event_short=item.model_key,
year=event_year,
start_date=datetime.datetime(event_year, 1, 1),
end_date=datetime.datetime(event_year, 1, 1)
))
else:
event_keys.add(ndb.Key(Event, item.model_key))
if type(item) == Favorite:
event_fav[item.model_key] = item
elif type(item) == Subscription:
event_subs[item.model_key] = item
team_futures = ndb.get_multi_async(team_keys)
event_futures = ndb.get_multi_async(event_keys)
match_futures = ndb.get_multi_async(match_keys)
match_event_futures = ndb.get_multi_async(match_event_keys)
teams = sorted([team_future.get_result() for team_future in team_futures], key=lambda x: x.team_number)
team_fav_subs = []
for team in teams:
fav = team_fav.get(team.key.id(), None)
subs = team_subs.get(team.key.id(), None)
team_fav_subs.append((team, fav, subs))
events += [event_future.get_result() for event_future in event_futures]
EventHelper.sort_events(events)
event_fav_subs = []
for event in events:
fav = event_fav.get(event.key.id(), None)
subs = event_subs.get(event.key.id(), None)
event_fav_subs.append((event, fav, subs))
matches = [match_future.get_result() for match_future in match_futures]
match_events = [match_event_future.get_result() for match_event_future in match_event_futures]
MatchHelper.natural_sort_matches(matches)
match_fav_subs_by_event = {}
for event in match_events:
match_fav_subs_by_event[event.key.id()] = (event, [])
for match in matches:
event_key = match.key.id().split('_')[0]
fav = match_fav.get(match.key.id(), None)
subs = match_subs.get(match.key.id(), None)
match_fav_subs_by_event[event_key][1].append((match, fav, subs))
event_match_fav_subs = sorted(match_fav_subs_by_event.values(), key=lambda x: EventHelper.distantFutureIfNoStartDate(x[0]))
event_match_fav_subs = sorted(event_match_fav_subs, key=lambda x: EventHelper.distantFutureIfNoEndDate(x[0]))
self.template_values['team_fav_subs'] = team_fav_subs
self.template_values['event_fav_subs'] = event_fav_subs
self.template_values['event_match_fav_subs'] = event_match_fav_subs
self.template_values['status'] = self.request.get('status')
self.template_values['year'] = datetime.datetime.now().year
self.response.out.write(jinja2_engine.render('mytba.html', self.template_values))
class myTBAAddHotMatchesController(LoggedInHandler):
def get(self, event_key=None):
self._require_registration()
if event_key is None:
events = EventHelper.getEventsWithinADay()
EventHelper.sort_events(events)
self.template_values['events'] = events
self.response.out.write(jinja2_engine.render('mytba_add_hot_matches_base.html', self.template_values))
return
event = Event.get_by_id(event_key)
if not event:
self.abort(404)
subscriptions_future = Subscription.query(
Subscription.model_type==ModelType.MATCH,
Subscription.notification_types==NotificationType.UPCOMING_MATCH,
ancestor=self.user_bundle.account.key).fetch_async(projection=[Subscription.model_key])
matches = []
if event.details and event.details.predictions and event.details.predictions['match_predictions']:
match_predictions = dict(
event.details.predictions['match_predictions']['qual'].items() +
event.details.predictions['match_predictions']['playoff'].items())
max_hotness = 0
min_hotness = float('inf')
for match in event.matches:
if not match.has_been_played and match.key.id() in match_predictions:
prediction = match_predictions[match.key.id()]
red_score = prediction['red']['score']
blue_score = prediction['blue']['score']
if red_score > blue_score:
winner_score = red_score
loser_score = blue_score
else:
winner_score = blue_score
loser_score = red_score
hotness = winner_score + 2.0*loser_score # Favor close high scoring matches
max_hotness = max(max_hotness, hotness)
min_hotness = min(min_hotness, hotness)
match.hotness = hotness
matches.append(match)
existing_subscriptions = set()
for sub in subscriptions_future.get_result():
existing_subscriptions.add(sub.model_key)
hot_matches = []
for match in matches:
match.hotness = 100 * (match.hotness - min_hotness) / (max_hotness - min_hotness)
match.already_subscribed = match.key.id() in existing_subscriptions
hot_matches.append(match)
hot_matches = sorted(hot_matches, key=lambda match: -match.hotness)
matches_dict = {'qm': hot_matches[:25]}
self.template_values['event'] = event
self.template_values['matches'] = matches_dict
self.response.out.write(jinja2_engine.render('mytba_add_hot_matches.html', self.template_values))
def post(self, event_key):
self._require_registration()
current_user_id = self.user_bundle.account.key.id()
event = Event.get_by_id(event_key)
subscribed_matches = set(self.request.get_all('subscribed_matches'))
for match in event.matches:
if not match.has_been_played:
match_key = match.key.id()
if match.key.id() in subscribed_matches:
sub = Subscription(
parent=ndb.Key(Account, current_user_id),
user_id=current_user_id,
model_type=ModelType.MATCH,
model_key=match_key,
notification_types=[NotificationType.UPCOMING_MATCH]
)
MyTBAHelper.add_subscription(sub)
else:
MyTBAHelper.remove_subscription(current_user_id, match_key, ModelType.MATCH)
self.redirect('/account/mytba?status=match_updated#my-matches'.format(event_key))
class MyTBAEventController(LoggedInHandler):
def get(self, event_key):
self._require_registration()
# Handle wildcard for all events in a year
event = None
is_wildcard = False
if event_key.endswith('*'):
try:
year = int(event_key[:-1])
except:
year = None
if year and year >= 1992 and year <= tba_config.MAX_YEAR:
event = Event( # fake event for rendering
name="ALL {} EVENTS".format(year),
year=year,
)
is_wildcard = True
else:
event = Event.get_by_id(event_key)
if not event:
self.abort(404)
user = self.user_bundle.account.key
favorite = Favorite.query(Favorite.model_key==event_key, Favorite.model_type==ModelType.EVENT, ancestor=user).get()
subscription = Subscription.query(Favorite.model_key==event_key, Favorite.model_type==ModelType.EVENT, ancestor=user).get()
if not favorite and not subscription: # New entry; default to being a favorite
is_favorite = True
else:
is_favorite = favorite is not None
enabled_notifications = [(en, NotificationType.render_names[en]) for en in NotificationType.enabled_event_notifications]
self.template_values['event'] = event
self.template_values['is_wildcard'] = is_wildcard
self.template_values['is_favorite'] = is_favorite
self.template_values['subscription'] = subscription
self.template_values['enabled_notifications'] = enabled_notifications
self.response.out.write(jinja2_engine.render('mytba_event.html', self.template_values))
def post(self, event_key):
self._require_registration()
current_user_id = self.user_bundle.account.key.id()
if self.request.get('favorite'):
favorite = Favorite(
parent=ndb.Key(Account, current_user_id),
user_id=current_user_id,
model_type=ModelType.EVENT,
model_key=event_key
)
MyTBAHelper.add_favorite(favorite)
else:
MyTBAHelper.remove_favorite(current_user_id, event_key, ModelType.EVENT)
subs = self.request.get_all('notification_types')
if subs:
subscription = Subscription(
parent=ndb.Key(Account, current_user_id),
user_id=current_user_id,
model_type=ModelType.EVENT,
model_key=event_key,
notification_types=[int(s) for s in subs]
)
MyTBAHelper.add_subscription(subscription)
else:
MyTBAHelper.remove_subscription(current_user_id, event_key, ModelType.EVENT)
self.redirect('/account/mytba?status=event_updated#my-events')
class MyTBAMatchController(LoggedInHandler):
def get(self, match_key):
self._require_registration()
match = Match.get_by_id(match_key)
if not match:
self.abort(404)
user = self.user_bundle.account.key
favorite = Favorite.query(Favorite.model_key==match_key, Favorite.model_type==ModelType.MATCH, ancestor=user).get()
subscription = Subscription.query(Favorite.model_key==match_key, Favorite.model_type==ModelType.MATCH, ancestor=user).get()
if not favorite and not subscription: # New entry; default to being a favorite
is_favorite = True
else:
is_favorite = favorite is not None
enabled_notifications = [(en, NotificationType.render_names[en]) for en in NotificationType.enabled_match_notifications]
self.template_values['match'] = match
self.template_values['is_favorite'] = is_favorite
self.template_values['subscription'] = subscription
self.template_values['enabled_notifications'] = enabled_notifications
self.response.out.write(jinja2_engine.render('mytba_match.html', self.template_values))
def post(self, match_key):
self._require_registration()
current_user_id = self.user_bundle.account.key.id()
match = Match.get_by_id(match_key)
if self.request.get('favorite'):
favorite = Favorite(
parent=ndb.Key(Account, current_user_id),
user_id=current_user_id,
model_type=ModelType.MATCH,
model_key=match_key
)
MyTBAHelper.add_favorite(favorite)
else:
MyTBAHelper.remove_favorite(current_user_id, match_key, ModelType.MATCH)
subs = self.request.get_all('notification_types')
if subs:
subscription = Subscription(
parent=ndb.Key(Account, current_user_id),
user_id=current_user_id,
model_type=ModelType.MATCH,
model_key=match_key,
notification_types=[int(s) for s in subs]
)
MyTBAHelper.add_subscription(subscription)
else:
MyTBAHelper.remove_subscription(current_user_id, match_key, ModelType.MATCH)
self.redirect('/account/mytba?status=match_updated#my-matches')
class MyTBATeamController(LoggedInHandler):
def get(self, team_number):
self._require_registration()
team_key = 'frc{}'.format(team_number)
team = Team.get_by_id(team_key)
if not team:
self.abort(404)
user = self.user_bundle.account.key
favorite = Favorite.query(Favorite.model_key==team_key, Favorite.model_type==ModelType.TEAM, ancestor=user).get()
subscription = Subscription.query(Favorite.model_key==team_key, Favorite.model_type==ModelType.TEAM, ancestor=user).get()
if not favorite and not subscription: # New entry; default to being a favorite
is_favorite = True
else:
is_favorite = favorite is not None
enabled_notifications = [(en, NotificationType.render_names[en]) for en in NotificationType.enabled_team_notifications]
self.template_values['team'] = team
self.template_values['is_favorite'] = is_favorite
self.template_values['subscription'] = subscription
self.template_values['enabled_notifications'] = enabled_notifications
self.response.out.write(jinja2_engine.render('mytba_team.html', self.template_values))
def post(self, team_number):
self._require_registration()
current_user_id = self.user_bundle.account.key.id()
team_key = 'frc{}'.format(team_number)
if self.request.get('favorite'):
favorite = Favorite(
parent=ndb.Key(Account, current_user_id),
user_id=current_user_id,
model_type=ModelType.TEAM,
model_key=team_key
)
MyTBAHelper.add_favorite(favorite)
else:
MyTBAHelper.remove_favorite(current_user_id, team_key, ModelType.TEAM)
subs = self.request.get_all('notification_types')
if subs:
subscription = Subscription(
parent=ndb.Key(Account, current_user_id),
user_id=current_user_id,
model_type=ModelType.TEAM,
model_key=team_key,
notification_types=[int(s) for s in subs]
)
MyTBAHelper.add_subscription(subscription)
else:
MyTBAHelper.remove_subscription(current_user_id, team_key, ModelType.TEAM)
self.redirect('/account/mytba?status=team_updated#my-teams')
| mit |
joegomes/deepchem | examples/sampl/sampl_datasets.py | 2 | 1395 | """
SAMPL dataset loader.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import os
import numpy as np
import shutil
import deepchem as dc
def load_sampl(featurizer='ECFP', split='index'):
"""Load SAMPL datasets."""
# Featurize SAMPL dataset
print("About to featurize SAMPL dataset.")
current_dir = os.path.dirname(os.path.realpath(__file__))
dataset_file = os.path.join(
current_dir, "./SAMPL.csv")
SAMPL_tasks = ['expt']
if featurizer == 'ECFP':
featurizer = dc.feat.CircularFingerprint(size=1024)
elif featurizer == 'GraphConv':
featurizer = dc.feat.ConvMolFeaturizer()
loader = dc.data.CSVLoader(
tasks=SAMPL_tasks, smiles_field="smiles", featurizer=featurizer)
dataset = loader.featurize(
dataset_file, shard_size=8192)
# Initialize transformers
transformers = [
dc.trans.NormalizationTransformer(transform_y=True, dataset=dataset)]
print("About to transform data")
for transformer in transformers:
dataset = transformer.transform(dataset)
splitters = {'index': dc.splits.IndexSplitter(),
'random': dc.splits.RandomSplitter(),
'scaffold': dc.splits.ScaffoldSplitter()}
splitter = splitters[split]
train, valid, test = splitter.train_valid_test_split(dataset)
return SAMPL_tasks, (train, valid, test), transformers
| mit |
djordon/queueing-tool | queueing_tool/queues/agents.py | 1 | 6408 | from numpy import infty
from numpy.random import uniform
from queueing_tool.queues.choice import _choice, _argmin
class Agent(object):
"""The base class for an agent.
``Agents`` are the objects that move throughout the network.
``Agents`` are instantiated by a queue, and once serviced the
``Agent`` moves on to another queue in the network. Each ``Agent``
*decides* where in the network it wants to arrive at next but
choosing amongst its options randomly. The probabilities are
specified in :class:`QueueNetwork's<.QueueNetwork>` transition
matrix. See :meth:`.set_transitions` for changing the routing
probabilities.
Parameters
----------
agent_id : tuple (optional, default: ``(0, 0)``)
A unique identifier for an agent. Is set automatically by the
:class:`.QueueServer` that instantiates the ``Agent``. The
first slot is the :class:`QueueServer's<.QueueServer>` edge
index and the second slot is the ``Agent's``
instantiation number for that queue.
**kwargs :
Unused.
Attributes
----------
agent_id : tuple
A unique identifier for an agent.
blocked : int
Specifies how many times an agent has been blocked by a finite
capacity queue.
"""
def __init__(self, agent_id=(0, 0), **kwargs):
self.agent_id = agent_id
self.blocked = 0
self._time = 0 # The agents arrival or departure time
def __repr__(self):
return "Agent; agent_id:{0}. time: {1}".format(self.agent_id, round(self._time, 3))
def __lt__(self, b):
return self._time < b._time
def __gt__(self, b):
return self._time > b._time
def __eq__(self, b):
return self._time == b._time
def __le__(self, b):
return self._time <= b._time
def __ge__(self, b):
return self._time >= b._time
def add_loss(self, *args, **kwargs):
"""Adds one to the number of times the agent has been blocked
from entering a queue.
"""
self.blocked += 1
def desired_destination(self, network, edge):
"""Returns the agents next destination given their current
location on the network.
An ``Agent`` chooses one of the out edges at random. The
probability that the ``Agent`` will travel along a specific
edge is specified in the :class:`QueueNetwork's<.QueueNetwork>`
transition matrix.
Parameters
----------
network : :class:`.QueueNetwork`
The :class:`.QueueNetwork` where the Agent resides.
edge : tuple
A 4-tuple indicating which edge this agent is located at.
The first two slots indicate the current edge's source and
target vertices, while the third slot indicates this edges
``edge_index``. The last slot indicates the edge type of
that edge
Returns
-------
out : int
Returns an the edge index corresponding to the agents next
edge to visit in the network.
See Also
--------
:meth:`.transitions` : :class:`QueueNetwork's<.QueueNetwork>`
method that returns the transition probabilities for each
edge in the graph.
"""
n = len(network.out_edges[edge[1]])
if n <= 1:
return network.out_edges[edge[1]][0]
u = uniform()
pr = network._route_probs[edge[1]]
k = _choice(pr, u, n)
# _choice returns an integer between 0 and n-1 where the
# probability of k being selected is equal to pr[k].
return network.out_edges[edge[1]][k]
def queue_action(self, queue, *args, **kwargs):
"""A method that acts on the queue the Agent is at. This method
is called when the Agent arrives at the queue (where
``args[0] == 0``), when service starts for the Agent (where
``args[0] == 1``), and when the Agent departs from the queue
(where ``args[0] == 2``). By default, this method does nothing
to the queue, but is here if the Agent class is extended and
this method is overwritten.
"""
pass
class GreedyAgent(Agent):
"""An agent that chooses the queue with the shortest line as their
next destination.
Notes
-----
If there are any ties, the ``GreedyAgent`` chooses the first queue
with the shortest line (where the ordering is given by
:class:`QueueNetwork's<.QueueNetwork>` ``out_edges`` attribute).
"""
def __init__(self, agent_id=(0, 0)):
Agent.__init__(self, agent_id)
def __repr__(self):
msg = "GreedyAgent; agent_id:{0}. time: {1}"
return msg.format(self.agent_id, round(self._time, 3))
def desired_destination(self, network, edge):
"""Returns the agents next destination given their current
location on the network.
``GreedyAgents`` choose their next destination with-in the
network by picking the adjacent queue with the fewest number of
:class:`Agents<.Agent>` in the queue.
Parameters
----------
network : :class:`.QueueNetwork`
The :class:`.QueueNetwork` where the Agent resides.
edge : tuple
A 4-tuple indicating which edge this agent is located at.
The first two slots indicate the current edge's source and
target vertices, while the third slot indicates this edges
``edge_index``. The last slot indicates the edges edge
type.
Returns
-------
out : int
Returns an the edge index corresponding to the agents next
edge to visit in the network.
"""
adjacent_edges = network.out_edges[edge[1]]
d = _argmin([network.edge2queue[d].number_queued() for d in adjacent_edges])
return adjacent_edges[d]
class InftyAgent(object):
"""An special agent that only operates within the
:class:`.QueueServer` class.
This agent never interacts with the :class:`.QueueNetwork`.
"""
def __init__(self):
self._time = infty
def __repr__(self):
return "InftyAgent"
def __lt__(self, b):
return self._time < b._time
def __gt__(self, b):
return self._time > b._time
def __eq__(self, b):
return self._time == b._time
| mit |
maohongyuan/kbengine | kbe/src/lib/python/Tools/scripts/highlight.py | 36 | 9117 | #!/usr/bin/env python3
'''Add syntax highlighting to Python source code'''
__author__ = 'Raymond Hettinger'
import keyword, tokenize, cgi, re, functools
try:
import builtins
except ImportError:
import __builtin__ as builtins
#### Analyze Python Source #################################
def is_builtin(s):
'Return True if s is the name of a builtin'
return hasattr(builtins, s)
def combine_range(lines, start, end):
'Join content from a range of lines between start and end'
(srow, scol), (erow, ecol) = start, end
if srow == erow:
return lines[srow-1][scol:ecol], end
rows = [lines[srow-1][scol:]] + lines[srow: erow-1] + [lines[erow-1][:ecol]]
return ''.join(rows), end
def analyze_python(source):
'''Generate and classify chunks of Python for syntax highlighting.
Yields tuples in the form: (category, categorized_text).
'''
lines = source.splitlines(True)
lines.append('')
readline = functools.partial(next, iter(lines), '')
kind = tok_str = ''
tok_type = tokenize.COMMENT
written = (1, 0)
for tok in tokenize.generate_tokens(readline):
prev_tok_type, prev_tok_str = tok_type, tok_str
tok_type, tok_str, (srow, scol), (erow, ecol), logical_lineno = tok
kind = ''
if tok_type == tokenize.COMMENT:
kind = 'comment'
elif tok_type == tokenize.OP and tok_str[:1] not in '{}[](),.:;@':
kind = 'operator'
elif tok_type == tokenize.STRING:
kind = 'string'
if prev_tok_type == tokenize.INDENT or scol==0:
kind = 'docstring'
elif tok_type == tokenize.NAME:
if tok_str in ('def', 'class', 'import', 'from'):
kind = 'definition'
elif prev_tok_str in ('def', 'class'):
kind = 'defname'
elif keyword.iskeyword(tok_str):
kind = 'keyword'
elif is_builtin(tok_str) and prev_tok_str != '.':
kind = 'builtin'
if kind:
text, written = combine_range(lines, written, (srow, scol))
yield '', text
text, written = tok_str, (erow, ecol)
yield kind, text
line_upto_token, written = combine_range(lines, written, (erow, ecol))
yield '', line_upto_token
#### Raw Output ###########################################
def raw_highlight(classified_text):
'Straight text display of text classifications'
result = []
for kind, text in classified_text:
result.append('%15s: %r\n' % (kind or 'plain', text))
return ''.join(result)
#### ANSI Output ###########################################
default_ansi = {
'comment': ('\033[0;31m', '\033[0m'),
'string': ('\033[0;32m', '\033[0m'),
'docstring': ('\033[0;32m', '\033[0m'),
'keyword': ('\033[0;33m', '\033[0m'),
'builtin': ('\033[0;35m', '\033[0m'),
'definition': ('\033[0;33m', '\033[0m'),
'defname': ('\033[0;34m', '\033[0m'),
'operator': ('\033[0;33m', '\033[0m'),
}
def ansi_highlight(classified_text, colors=default_ansi):
'Add syntax highlighting to source code using ANSI escape sequences'
# http://en.wikipedia.org/wiki/ANSI_escape_code
result = []
for kind, text in classified_text:
opener, closer = colors.get(kind, ('', ''))
result += [opener, text, closer]
return ''.join(result)
#### HTML Output ###########################################
def html_highlight(classified_text,opener='<pre class="python">\n', closer='</pre>\n'):
'Convert classified text to an HTML fragment'
result = [opener]
for kind, text in classified_text:
if kind:
result.append('<span class="%s">' % kind)
result.append(cgi.escape(text))
if kind:
result.append('</span>')
result.append(closer)
return ''.join(result)
default_css = {
'.comment': '{color: crimson;}',
'.string': '{color: forestgreen;}',
'.docstring': '{color: forestgreen; font-style:italic;}',
'.keyword': '{color: darkorange;}',
'.builtin': '{color: purple;}',
'.definition': '{color: darkorange; font-weight:bold;}',
'.defname': '{color: blue;}',
'.operator': '{color: brown;}',
}
default_html = '''\
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
"http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<meta http-equiv="Content-type" content="text/html;charset=UTF-8">
<title> {title} </title>
<style type="text/css">
{css}
</style>
</head>
<body>
{body}
</body>
</html>
'''
def build_html_page(classified_text, title='python',
css=default_css, html=default_html):
'Create a complete HTML page with colorized source code'
css_str = '\n'.join(['%s %s' % item for item in css.items()])
result = html_highlight(classified_text)
title = cgi.escape(title)
return html.format(title=title, css=css_str, body=result)
#### LaTeX Output ##########################################
default_latex_commands = {
'comment': '{\color{red}#1}',
'string': '{\color{ForestGreen}#1}',
'docstring': '{\emph{\color{ForestGreen}#1}}',
'keyword': '{\color{orange}#1}',
'builtin': '{\color{purple}#1}',
'definition': '{\color{orange}#1}',
'defname': '{\color{blue}#1}',
'operator': '{\color{brown}#1}',
}
default_latex_document = r'''
\documentclass{article}
\usepackage{alltt}
\usepackage{upquote}
\usepackage{color}
\usepackage[usenames,dvipsnames]{xcolor}
\usepackage[cm]{fullpage}
%(macros)s
\begin{document}
\center{\LARGE{%(title)s}}
\begin{alltt}
%(body)s
\end{alltt}
\end{document}
'''
def alltt_escape(s):
'Replace backslash and braces with their escaped equivalents'
xlat = {'{': r'\{', '}': r'\}', '\\': r'\textbackslash{}'}
return re.sub(r'[\\{}]', lambda mo: xlat[mo.group()], s)
def latex_highlight(classified_text, title = 'python',
commands = default_latex_commands,
document = default_latex_document):
'Create a complete LaTeX document with colorized source code'
macros = '\n'.join(r'\newcommand{\py%s}[1]{%s}' % c for c in commands.items())
result = []
for kind, text in classified_text:
if kind:
result.append(r'\py%s{' % kind)
result.append(alltt_escape(text))
if kind:
result.append('}')
return default_latex_document % dict(title=title, macros=macros, body=''.join(result))
if __name__ == '__main__':
import sys, argparse, webbrowser, os, textwrap
parser = argparse.ArgumentParser(
description = 'Add syntax highlighting to Python source code',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog = textwrap.dedent('''
examples:
# Show syntax highlighted code in the terminal window
$ ./highlight.py myfile.py
# Colorize myfile.py and display in a browser
$ ./highlight.py -b myfile.py
# Create an HTML section to embed in an existing webpage
./highlight.py -s myfile.py
# Create a complete HTML file
$ ./highlight.py -c myfile.py > myfile.html
# Create a PDF using LaTeX
$ ./highlight.py -l myfile.py | pdflatex
'''))
parser.add_argument('sourcefile', metavar = 'SOURCEFILE',
help = 'file containing Python sourcecode')
parser.add_argument('-b', '--browser', action = 'store_true',
help = 'launch a browser to show results')
parser.add_argument('-c', '--complete', action = 'store_true',
help = 'build a complete html webpage')
parser.add_argument('-l', '--latex', action = 'store_true',
help = 'build a LaTeX document')
parser.add_argument('-r', '--raw', action = 'store_true',
help = 'raw parse of categorized text')
parser.add_argument('-s', '--section', action = 'store_true',
help = 'show an HTML section rather than a complete webpage')
args = parser.parse_args()
if args.section and (args.browser or args.complete):
parser.error('The -s/--section option is incompatible with '
'the -b/--browser or -c/--complete options')
sourcefile = args.sourcefile
with open(sourcefile) as f:
source = f.read()
classified_text = analyze_python(source)
if args.raw:
encoded = raw_highlight(classified_text)
elif args.complete or args.browser:
encoded = build_html_page(classified_text, title=sourcefile)
elif args.section:
encoded = html_highlight(classified_text)
elif args.latex:
encoded = latex_highlight(classified_text, title=sourcefile)
else:
encoded = ansi_highlight(classified_text)
if args.browser:
htmlfile = os.path.splitext(os.path.basename(sourcefile))[0] + '.html'
with open(htmlfile, 'w') as f:
f.write(encoded)
webbrowser.open('file://' + os.path.abspath(htmlfile))
else:
sys.stdout.write(encoded)
| lgpl-3.0 |
Jionglun/w17test_2 | static/Brython3.1.1-20150328-091302/Lib/site-packages/pygame/__init__.py | 603 | 6082 | ## pygame - Python Game Library
## Copyright (C) 2000-2001 Pete Shinners
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Library General Public
## License as published by the Free Software Foundation; either
## version 2 of the License, or (at your option) any later version.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Library General Public License for more details.
##
## You should have received a copy of the GNU Library General Public
## License along with this library; if not, write to the Free
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##
## Pete Shinners
## [email protected]
'''Top-level Pygame module.
Pygame is a set of Python modules designed for writing games.
It is written on top of the excellent SDL library. This allows you
to create fully featured games and multimedia programs in the Python
language. The package is highly portable, with games running on
Windows, MacOS, OS X, BeOS, FreeBSD, IRIX, and Linux.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import os
import sys
class MissingModule:
def __init__(self, name, info='', urgent=0):
self.name = name
self.info = str(info)
self.urgent = urgent
if urgent:
self.warn()
def __getattr__(self, var):
if not self.urgent:
self.warn()
self.urgent = 1
MissingPygameModule = "%s module not available" % self.name
raise NotImplementedError(MissingPygameModule)
def __nonzero__(self):
return 0
def warn(self):
if self.urgent: type = 'import'
else: type = 'use'
message = '%s %s: %s' % (type, self.name, self.info)
try:
import warnings
if self.urgent: level = 4
else: level = 3
warnings.warn(message, RuntimeWarning, level)
except ImportError:
print(message)
#we need to import like this, each at a time. the cleanest way to import
#our modules is with the import command (not the __import__ function)
#first, the "required" modules
#from pygame.array import * #brython fix me
from pygame.base import *
from pygame.constants import *
from pygame.version import *
from pygame.rect import Rect
import pygame.color
Color = pygame.color.Color
__version__ = ver
#added by earney
from . import time
from . import display
from . import constants
from . import event
from . import font
from . import mixer
from . import sprite
from .surface import Surface
from . import image
from . import mouse
from . import transform
#next, the "standard" modules
#we still allow them to be missing for stripped down pygame distributions
'''
try: import pygame.cdrom
except (ImportError,IOError), msg:cdrom=MissingModule("cdrom", msg, 1)
try: import pygame.cursors
except (ImportError,IOError), msg:cursors=MissingModule("cursors", msg, 1)
try: import pygame.display
except (ImportError,IOError), msg:display=MissingModule("display", msg, 1)
try: import pygame.draw
except (ImportError,IOError), msg:draw=MissingModule("draw", msg, 1)
try: import pygame.event
except (ImportError,IOError), msg:event=MissingModule("event", msg, 1)
try: import pygame.image
except (ImportError,IOError), msg:image=MissingModule("image", msg, 1)
try: import pygame.joystick
except (ImportError,IOError), msg:joystick=MissingModule("joystick", msg, 1)
try: import pygame.key
except (ImportError,IOError), msg:key=MissingModule("key", msg, 1)
try: import pygame.mouse
except (ImportError,IOError), msg:mouse=MissingModule("mouse", msg, 1)
try: import pygame.sprite
except (ImportError,IOError), msg:sprite=MissingModule("sprite", msg, 1)
try: from pygame.surface import Surface
except (ImportError,IOError):Surface = lambda:Missing_Function
try: from pygame.overlay import Overlay
except (ImportError,IOError):Overlay = lambda:Missing_Function
try: import pygame.time
except (ImportError,IOError), msg:time=MissingModule("time", msg, 1)
try: import pygame.transform
except (ImportError,IOError), msg:transform=MissingModule("transform", msg, 1)
#lastly, the "optional" pygame modules
try:
import pygame.font
import pygame.sysfont
pygame.font.SysFont = pygame.sysfont.SysFont
pygame.font.get_fonts = pygame.sysfont.get_fonts
pygame.font.match_font = pygame.sysfont.match_font
except (ImportError,IOError), msg:font=MissingModule("font", msg, 0)
try: import pygame.mixer
except (ImportError,IOError), msg:mixer=MissingModule("mixer", msg, 0)
#try: import pygame.movie
#except (ImportError,IOError), msg:movie=MissingModule("movie", msg, 0)
#try: import pygame.movieext
#except (ImportError,IOError), msg:movieext=MissingModule("movieext", msg, 0)
try: import pygame.surfarray
except (ImportError,IOError), msg:surfarray=MissingModule("surfarray", msg, 0)
try: import pygame.sndarray
except (ImportError,IOError), msg:sndarray=MissingModule("sndarray", msg, 0)
#try: import pygame.fastevent
#except (ImportError,IOError), msg:fastevent=MissingModule("fastevent", msg, 0)
#there's also a couple "internal" modules not needed
#by users, but putting them here helps "dependency finder"
#programs get everything they need (like py2exe)
try: import pygame.imageext; del pygame.imageext
except (ImportError,IOError):pass
try: import pygame.mixer_music; del pygame.mixer_music
except (ImportError,IOError):pass
def packager_imports():
"""
Some additional things that py2app/py2exe will want to see
"""
import OpenGL.GL
'''
#make Rects pickleable
import copyreg
def __rect_constructor(x,y,w,h):
return Rect(x,y,w,h)
def __rect_reduce(r):
assert type(r) == Rect
return __rect_constructor, (r.x, r.y, r.w, r.h)
copyreg.pickle(Rect, __rect_reduce, __rect_constructor)
#cleanup namespace
del pygame, os, sys, #TODO rwobject, surflock, MissingModule, copy_reg
| gpl-3.0 |
datastreaming/mflow_nodes | mflow_nodes/processors/base.py | 1 | 1732 | from logging import getLogger
class BaseProcessor(object):
"""
Just a stub, for actual stream processors to extend.
"""
_logger = getLogger(__name__)
def start(self):
"""
Start the stream mflow_processor.
:return: None.
"""
self._logger.debug("Starting mflow_processor.")
def process_message(self, message):
"""
Process the message received over ZMQ.
:param message: Message received from the ZMQ stream.
:return: None
"""
self._logger.debug("Received message.")
def set_parameter(self, parameter):
"""
Set the parameter received from the REST API.
:param parameter: Tuple of format (parameter_name, parameter_value).
:return: None
"""
# Parameters can only be received in tuple format: (parameter_name, parameter_value)
if not isinstance(parameter, tuple):
error = "Received parameter '%s' value is not in tuple format. " \
"All parameters must be in format: (name, value)."
self._logger.error(error)
raise ValueError(error)
name = parameter[0]
value = parameter[1]
self._logger.debug("Update parameter '%s'='%s'" % (name, value))
# Overwrite current attribute value.
setattr(self, name, value)
def stop(self):
"""
Stop the mflow_processor. Called after the processing node stop command has been invoked.
:return: None.
"""
self._logger.debug("Stopping mflow_processor.")
def is_running(self):
"""
Check if the processor is still running.
:return:
"""
return True
| gpl-3.0 |
cryptobanana/ansible | lib/ansible/modules/cloud/ovirt/ovirt_vmpools.py | 75 | 7594 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_vmpools
short_description: Module to manage VM pools in oVirt/RHV
version_added: "2.3"
author: "Ondra Machacek (@machacekondra)"
description:
- "Module to manage VM pools in oVirt/RHV."
options:
name:
description:
- "Name of the VM pool to manage."
required: true
state:
description:
- "Should the VM pool be present/absent."
- "Note that when C(state) is I(absent) all VMs in VM pool are stopped and removed."
choices: ['present', 'absent']
default: present
template:
description:
- "Name of the template, which will be used to create VM pool."
description:
description:
- "Description of the VM pool."
cluster:
description:
- "Name of the cluster, where VM pool should be created."
type:
description:
- "Type of the VM pool. Either manual or automatic."
- "C(manual) - The administrator is responsible for explicitly returning the virtual machine to the pool.
The virtual machine reverts to the original base image after the administrator returns it to the pool."
- "C(Automatic) - When the virtual machine is shut down, it automatically reverts to its base image and
is returned to the virtual machine pool."
- "Default value is set by engine."
choices: ['manual', 'automatic']
vm_per_user:
description:
- "Maximum number of VMs a single user can attach to from this pool."
- "Default value is set by engine."
prestarted:
description:
- "Number of pre-started VMs defines the number of VMs in run state, that are waiting
to be attached to Users."
- "Default value is set by engine."
vm_count:
description:
- "Number of VMs in the pool."
- "Default value is set by engine."
extends_documentation_fragment: ovirt
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Create VM pool from template
- ovirt_vmpools:
cluster: mycluster
name: myvmpool
template: rhel7
vm_count: 2
prestarted: 2
vm_per_user: 1
# Remove vmpool, note that all VMs in pool will be stopped and removed:
- ovirt_vmpools:
state: absent
name: myvmpool
'''
RETURN = '''
id:
description: ID of the VM pool which is managed
returned: On success if VM pool is found.
type: str
sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c
vm_pool:
description: "Dictionary of all the VM pool attributes. VM pool attributes can be found on your oVirt/RHV instance
at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/vm_pool."
returned: On success if VM pool is found.
type: dict
'''
try:
import ovirtsdk4.types as otypes
except ImportError:
pass
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
BaseModule,
check_params,
check_sdk,
create_connection,
equal,
get_link_name,
ovirt_full_argument_spec,
wait,
)
class VmPoolsModule(BaseModule):
def build_entity(self):
return otypes.VmPool(
name=self._module.params['name'],
description=self._module.params['description'],
comment=self._module.params['comment'],
cluster=otypes.Cluster(
name=self._module.params['cluster']
) if self._module.params['cluster'] else None,
template=otypes.Template(
name=self._module.params['template']
) if self._module.params['template'] else None,
max_user_vms=self._module.params['vm_per_user'],
prestarted_vms=self._module.params['prestarted'],
size=self._module.params['vm_count'],
type=otypes.VmPoolType(
self._module.params['type']
) if self._module.params['type'] else None,
)
def update_check(self, entity):
return (
equal(self._module.params.get('cluster'), get_link_name(self._connection, entity.cluster)) and
equal(self._module.params.get('description'), entity.description) and
equal(self._module.params.get('comment'), entity.comment) and
equal(self._module.params.get('vm_per_user'), entity.max_user_vms) and
equal(self._module.params.get('prestarted'), entity.prestarted_vms) and
equal(self._module.params.get('vm_count'), entity.size)
)
def main():
argument_spec = ovirt_full_argument_spec(
state=dict(
choices=['present', 'absent'],
default='present',
),
name=dict(default=None, required=True),
template=dict(default=None),
cluster=dict(default=None),
description=dict(default=None),
comment=dict(default=None),
vm_per_user=dict(default=None, type='int'),
prestarted=dict(default=None, type='int'),
vm_count=dict(default=None, type='int'),
type=dict(default=None, choices=['automatic', 'manual']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
check_sdk(module)
check_params(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
vm_pools_service = connection.system_service().vm_pools_service()
vm_pools_module = VmPoolsModule(
connection=connection,
module=module,
service=vm_pools_service,
)
state = module.params['state']
if state == 'present':
ret = vm_pools_module.create()
# Wait for all VM pool VMs to be created:
if module.params['wait']:
vms_service = connection.system_service().vms_service()
for vm in vms_service.list(search='pool=%s' % module.params['name']):
wait(
service=vms_service.service(vm.id),
condition=lambda vm: vm.status in [otypes.VmStatus.DOWN, otypes.VmStatus.UP],
timeout=module.params['timeout'],
)
elif state == 'absent':
ret = vm_pools_module.remove()
module.exit_json(**ret)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == "__main__":
main()
| gpl-3.0 |
gunnarku/mysql-8.0 | storage/ndb/mcc/ndb_setup.py | 39 | 1802 | #! /usr/bin/env python
"""Launch script for the configurator backend. Parses command line options and starts the web server."""
import sys
import platform
import os.path
import mcc_config
def num_pyver(vn):
if isinstance(vn, str):
return int(''.join(filter(str.isdigit, vn)))
return vn
def num_py_major_minor_tuple():
return map(num_pyver, platform.python_version_tuple()[0:2])
if __name__ == '__main__':
if os.path.isabs(mcc_config.MCC_INSTALL_BINDIR):
print "Running out of source dir..."
# abs_install_bindir = mcc_config.MCC_INSTALL_BINDIR
mcc_config.abs_install_subdir = mcc_config.MCC_INSTALL_BINDIR
mcc_config.abs_install_frontenddir = os.path.normpath(os.path.join(mcc_config.MCC_INSTALL_BINDIR, mcc_config.MCC_INSTALL_FRONTENDDIR))
else:
print "Running out of install dir: "+os.path.dirname(os.path.abspath(sys.argv[0]))
abs_install_bindir = os.path.dirname(os.path.abspath(sys.argv[0]))
mcc_config.install_prefix = abs_install_bindir[0:abs_install_bindir.rindex(mcc_config.MCC_INSTALL_BINDIR)]
mcc_config.abs_install_subdir = os.path.normpath(os.path.join(mcc_config.install_prefix,
mcc_config.MCC_INSTALL_SUBDIR))
mcc_config.install_frontenddir = os.path.join(mcc_config.install_prefix, mcc_config.MCC_INSTALL_FRONTENDDIR)
sys.path.append(mcc_config.abs_install_subdir)
(pymajor, pyminor) = num_py_major_minor_tuple()
assert (pymajor == 2 and pyminor >= 6), 'Unsupported Python version: '+str(platform.python_version())
sys.path.append('/opt/csw/lib/python/site-packages')
import request_handler
request_handler.main(mcc_config.install_prefix, mcc_config.abs_install_subdir)
| gpl-2.0 |
nickzuber/ClusterRunner | app/util/autoversioning.py | 1 | 6946 | import os
import subprocess
from app.util import fs
_MAJOR_MINOR_VERSION = '0.5'
_calculated_version = None # We will cache the calculated version so that it can't change during execution.
_VERSION_FILE_PATH = os.path.join(os.path.dirname(__file__), 'package_version.py')
_VERSION_FILE_BACKUP_PATH = os.path.join(os.path.dirname(__file__), 'package_version.py.bak')
def get_version():
"""
Get the version of the application. This method should return the correct version in both the frozen and unfrozen
(running from cloned source) cases.
:return: The version of the application
:rtype: str
"""
return _get_frozen_package_version() or _calculate_source_version() or '0.0.0'
def _try_rename(src, dst):
try:
os.rename(src, dst)
except (FileExistsError, FileNotFoundError):
# Skip backing up the original package_version.py if a FileExistsError or FileNotFoundError happened.
# FileExistsError might happen on Windows as NTFS doesn't support writing to a file while the file
# is opened in python.
pass
def _try_remove(src):
try:
os.remove(src)
except OSError:
pass
def write_package_version_file(package_version_string):
"""
Write the specfied version string to package_version.py. This method is intended to be called during the process of
freezing a package for release. This in-effect hard codes the version into the frozen package.
This also backs up the original file, which can be restored with another method in this module.
:param package_version_string: The version to write to the file -- presumably the output of get_version()
:type package_version_string: str
"""
package_version_file_contents = 'version = "{}" # DO NOT COMMIT\n'.format(package_version_string)
_try_rename(_VERSION_FILE_PATH, _VERSION_FILE_BACKUP_PATH) # Backup the original file.
fs.write_file(package_version_file_contents, _VERSION_FILE_PATH)
def restore_original_package_version_file():
"""
Restore the backed up version of package_version.py. This is just a convenience method to help us remember not to
commit changes to the package version file.
"""
_try_rename(_VERSION_FILE_BACKUP_PATH, _VERSION_FILE_PATH)
def _get_frozen_package_version():
"""
Return the hard coded version from package_version.py. The package_version module is only populated with the actual
version during the freeze process, so this method only returns the correct version if run from a frozen package.
:return: The version of the (frozen) application
:rtype: str
"""
# only import package_version when needed as on Windows once imported, the actual package_version.py can't be
# edited anymore
try:
from app.util import package_version
return package_version.version
except ImportError:
return None
def _calculate_source_version():
"""
Calculate the version using a scheme based off of git repo info. Note that since this depends on the git history,
this will *not* work from a frozen package (which does not include the git repo data). This will only work in the
context of running the application from the cloned git repo.
If this is running outside of a git repo, it will handle the CalledProcessError exception and return None.
:return: The version of the (source) application
:rtype: str
"""
global _calculated_version
if _calculated_version is None:
try:
head_commit_hash = _get_commit_hash_from_revision_param('HEAD')
head_commit_is_on_trunk = _is_commit_hash_in_masters_first_parent_chain(head_commit_hash)
commit_count = _get_repo_commit_count()
hash_extension = '' if head_commit_is_on_trunk else '-{}'.format(head_commit_hash[:7])
mod_extension = '' if not _repo_has_uncommited_changes() else '-mod'
_calculated_version = '{}.{}{}{}'.format(_MAJOR_MINOR_VERSION, commit_count, hash_extension, mod_extension)
except subprocess.CalledProcessError:
_calculated_version = None
return _calculated_version
def _get_repo_commit_count():
"""
:return: The number of commits in the repo
:rtype: int
"""
commit_list = _execute_local_git_command('rev-list', 'HEAD').split()
return len(commit_list)
def _repo_has_uncommited_changes():
"""
Check if the git repo has any changes to tracked files that haven't been committed.
:return: Whether or not the repo has uncommited changes to tracked files
:rtype: bool
"""
has_uncommited_changes = False
try:
_execute_local_git_command('diff-index', '--quiet', 'HEAD')
except subprocess.CalledProcessError: # CalledProcessError is raised if command exits with non-zero exit code
has_uncommited_changes = True
return has_uncommited_changes
def _is_commit_hash_in_masters_first_parent_chain(commit_hash):
"""
Check if the current HEAD is in the first-parent chain of origin/master. The first-parent chain of origin/master
consists of all the "trunk" commits. All other commits are either on merged branches or haven't been merged at all.
:type commit_hash: str
:rtype: bool
"""
master_commit_hash = _get_commit_hash_from_revision_param('origin/master')
first_parent_chain = _execute_local_git_command(
'rev-list',
'--first-parent',
'{}^..{}'.format(commit_hash, master_commit_hash)).split()
return commit_hash in first_parent_chain
def _get_commit_hash_from_revision_param(revision_param):
"""
Get the full git commit hash from a given revision parameter (branch name, short hash, etc.)
:type revision_param: str
:rtype: str
"""
return _execute_local_git_command('rev-parse', '--verify', revision_param).strip()
def _execute_local_git_command(*args):
"""
Execute a git command in the ClusterRunner git repo that we are currently executing from. subprocess.check_output()
raises a CalledProcessError exception if the command exits with a nonzero exit code.
:param args: The command arguments to provide to git
:type args: tuple
:return: The output of the git command
:rtype: str
"""
command_output = subprocess.check_output(
['git'] + list(args),
cwd=os.path.dirname(__file__),
stderr=subprocess.DEVNULL,
)
return command_output.decode()
def _print_and_hardcode_version():
"""
Print the version number and write the package version file. This is useful when packaging
ClusterRunner (e.g., building an RPM).
"""
# Remove the "package_version.py" file so that autoversioning always calculates version.
_try_remove(_VERSION_FILE_PATH)
version = get_version()
write_package_version_file(version)
print(version)
if __name__ == '__main__':
_print_and_hardcode_version()
| apache-2.0 |
gsi-upm/soba | soba/space/continuousElements.py | 2 | 4137 | """
In the file continuousItems.py four classes are defined to implement the elements of
the physical space in a continuous model:
-GeneralItem: Class that implements generic elements positioned on the map with the effect of being impenetrable.
-Door: Class that implements bulding plane doors.
-Wall: Class that implements building walls.
-Poi: Class that implements points of interest where Occupancy objects perform certain actions.
"""
class GeneralItem():
"""
Class that implements generic elements positioned on the map with the effect of being impenetrable.
Attributes:
pos: Position where the object is located.
color: Color with which the object will be represented in the visualization.
"""
def __init__(self, model, pos, color = None):
"""
Create a new Door object.
Args:
model: Associated Model object
pos: Position where the object is located.
color: Color with which the object will be represented in the visualization.
Return: GeneralItem object
"""
self.pos = pos
model.grid.place_agent(self, pos)
self.color = 'grey' if color == None else color
class Door():
"""
Class that implements bulding plane doors.
Attributes:
state: Door status, open (True) or closed (False).
pos1: First position to access to the door.
pos2: Second position to access to the door.
rot: Door orientation in the grid ('x' or 'y').
Methods:
open: Change the status of the door to open.
close: Change the status of the door to close.
"""
def __init__(self, model, pos1, pos2, rot, state = True):
"""
Create a new Door object.
Args:
model: Associated Model object
pos1: Position where the object is located.
pos2: Position where the object is located.
rot: Orientation of the door in the grid ('x' or 'y').
state: Door status, open (True) or closed (False).
Return: Door object
"""
self.state = state
self.pos1 = pos1
self.pos2 = pos2
self.rot = rot
def open(self):
""" Change the status of the door to open (True) """
self.state = True
def close(self):
"""Change the status of the door to close (False)"""
self.state = False
class Wall():
"""
Class that implements building walls.
Attributes:
block1, block2, block3: lists of positions that contain positions between which an
occupant can move obeying with the impenetrability of the wall.
color: Color with which the object will be represented in the visualization.
"""
def __init__(self, block1, block2, block3, color = None):
"""
Create a new Wall object.
Args:
block1, block2, block3: lists of positions that contain positions between which an
occupant can move obeying with the impenetrability of the wall.
color: Color with which the object will be represented in the visualization.
Return: Wall object
"""
self.block1 = block1
self.block2 = block2
self.block3 = block3
self.color = 'brown' if color == None else color
class Poi():
"""
Class that implements relevant elements in the simulations: points of interest where Occupancy objects perform certain actions by associating these points with certain states.
Attributes:
pos: Position where the object is located.
ide: Unique identifier associated with the point of interest.
share: Define if the poi can be shared by more than one occupant.
color: Color with which the object will be represented in the visualization.
"""
def __init__(self, model, pos, ide, share = True, color = None):
"""
Create a new Door object.
Args:
model: Associated Model object
pos: Position where the object is located.
ide: Unique identifier associated with the point of interest.
share: Define if the poi can be shared by more than one occupant.
color: Color with which the object will be represented in the visualization.
Return: Door object
"""
self.pos = pos
self.id = ide
model.grid.place_agent(self, pos)
self.used = False
self.share = share
self.color = 'green' if color == None else color | mit |
jimi-c/ansible | lib/ansible/modules/network/illumos/ipadm_addrprop.py | 61 | 7148 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Adam Števko <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ipadm_addrprop
short_description: Manage IP address properties on Solaris/illumos systems.
description:
- Modify IP address properties on Solaris/illumos systems.
version_added: "2.3"
author: Adam Števko (@xen0l)
options:
addrobj:
description:
- Specifies the address object we want to manage.
required: true
aliases: [nic, interface]
property:
description:
- Specifies the name of the address property we want to manage.
required: true
aliases: [name]
value:
description:
- Specifies the value we want to set for the address property.
required: false
temporary:
description:
- Specifies that the address property value is temporary.
Temporary values do not persist across reboots.
required: false
default: false
state:
description:
- Set or reset the property value.
required: false
default: present
choices: [ "present", "absent", "reset" ]
'''
EXAMPLES = '''
- name: Mark address on addrobj as deprecated
ipadm_addrprop: property=deprecated value=on addrobj=e1000g0/v6
- name: Set network prefix length for addrobj
ipadm_addrprop: addrobj=bge0/v4 name=prefixlen value=26
'''
RETURN = '''
property:
description: property name
returned: always
type: string
sample: deprecated
addrobj:
description: address object name
returned: always
type: string
sample: bge0/v4
state:
description: state of the target
returned: always
type: string
sample: present
temporary:
description: specifies if operation will persist across reboots
returned: always
type: boolean
sample: True
value:
description: property value
returned: when value is provided
type: string
sample: 26
'''
from ansible.module_utils.basic import AnsibleModule
class AddrProp(object):
def __init__(self, module):
self.module = module
self.addrobj = module.params['addrobj']
self.property = module.params['property']
self.value = module.params['value']
self.temporary = module.params['temporary']
self.state = module.params['state']
def property_exists(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('show-addrprop')
cmd.append('-p')
cmd.append(self.property)
cmd.append(self.addrobj)
(rc, _, _) = self.module.run_command(cmd)
if rc == 0:
return True
else:
self.module.fail_json(msg='Unknown property "%s" on addrobj %s' %
(self.property, self.addrobj),
property=self.property,
addrobj=self.addrobj)
def property_is_modified(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('show-addrprop')
cmd.append('-c')
cmd.append('-o')
cmd.append('current,default')
cmd.append('-p')
cmd.append(self.property)
cmd.append(self.addrobj)
(rc, out, _) = self.module.run_command(cmd)
out = out.rstrip()
(value, default) = out.split(':')
if rc == 0 and value == default:
return True
else:
return False
def property_is_set(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('show-addrprop')
cmd.append('-c')
cmd.append('-o')
cmd.append('current')
cmd.append('-p')
cmd.append(self.property)
cmd.append(self.addrobj)
(rc, out, _) = self.module.run_command(cmd)
out = out.rstrip()
if rc == 0 and self.value == out:
return True
else:
return False
def set_property(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('set-addrprop')
if self.temporary:
cmd.append('-t')
cmd.append('-p')
cmd.append(self.property + '=' + self.value)
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def reset_property(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('reset-addrprop')
if self.temporary:
cmd.append('-t')
cmd.append('-p')
cmd.append(self.property)
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec=dict(
addrobj=dict(required=True, default=None, aliases=['nic', 'interface']),
property=dict(required=True, aliases=['name']),
value=dict(required=False),
temporary=dict(default=False, type='bool'),
state=dict(
default='present', choices=['absent', 'present', 'reset']),
),
supports_check_mode=True
)
addrprop = AddrProp(module)
rc = None
out = ''
err = ''
result = {}
result['property'] = addrprop.property
result['addrobj'] = addrprop.addrobj
result['state'] = addrprop.state
result['temporary'] = addrprop.temporary
if addrprop.value:
result['value'] = addrprop.value
if addrprop.state == 'absent' or addrprop.state == 'reset':
if addrprop.property_exists():
if not addrprop.property_is_modified():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addrprop.reset_property()
if rc != 0:
module.fail_json(property=addrprop.property,
addrobj=addrprop.addrobj,
msg=err,
rc=rc)
elif addrprop.state == 'present':
if addrprop.value is None:
module.fail_json(msg='Value is mandatory with state "present"')
if addrprop.property_exists():
if not addrprop.property_is_set():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addrprop.set_property()
if rc != 0:
module.fail_json(property=addrprop.property,
addrobj=addrprop.addrobj,
msg=err,
rc=rc)
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
blois/AndroidSDKCloneMin | ndk/prebuilt/linux-x86_64/lib/python2.7/ihooks.py | 259 | 18986 | """Import hook support.
Consistent use of this module will make it possible to change the
different mechanisms involved in loading modules independently.
While the built-in module imp exports interfaces to the built-in
module searching and loading algorithm, and it is possible to replace
the built-in function __import__ in order to change the semantics of
the import statement, until now it has been difficult to combine the
effect of different __import__ hacks, like loading modules from URLs
by rimport.py, or restricted execution by rexec.py.
This module defines three new concepts:
1) A "file system hooks" class provides an interface to a filesystem.
One hooks class is defined (Hooks), which uses the interface provided
by standard modules os and os.path. It should be used as the base
class for other hooks classes.
2) A "module loader" class provides an interface to search for a
module in a search path and to load it. It defines a method which
searches for a module in a single directory; by overriding this method
one can redefine the details of the search. If the directory is None,
built-in and frozen modules are searched instead.
Two module loader class are defined, both implementing the search
strategy used by the built-in __import__ function: ModuleLoader uses
the imp module's find_module interface, while HookableModuleLoader
uses a file system hooks class to interact with the file system. Both
use the imp module's load_* interfaces to actually load the module.
3) A "module importer" class provides an interface to import a
module, as well as interfaces to reload and unload a module. It also
provides interfaces to install and uninstall itself instead of the
default __import__ and reload (and unload) functions.
One module importer class is defined (ModuleImporter), which uses a
module loader instance passed in (by default HookableModuleLoader is
instantiated).
The classes defined here should be used as base classes for extended
functionality along those lines.
If a module importer class supports dotted names, its import_module()
must return a different value depending on whether it is called on
behalf of a "from ... import ..." statement or not. (This is caused
by the way the __import__ hook is used by the Python interpreter.) It
would also do wise to install a different version of reload().
"""
from warnings import warnpy3k, warn
warnpy3k("the ihooks module has been removed in Python 3.0", stacklevel=2)
del warnpy3k
import __builtin__
import imp
import os
import sys
__all__ = ["BasicModuleLoader","Hooks","ModuleLoader","FancyModuleLoader",
"BasicModuleImporter","ModuleImporter","install","uninstall"]
VERBOSE = 0
from imp import C_EXTENSION, PY_SOURCE, PY_COMPILED
from imp import C_BUILTIN, PY_FROZEN, PKG_DIRECTORY
BUILTIN_MODULE = C_BUILTIN
FROZEN_MODULE = PY_FROZEN
class _Verbose:
def __init__(self, verbose = VERBOSE):
self.verbose = verbose
def get_verbose(self):
return self.verbose
def set_verbose(self, verbose):
self.verbose = verbose
# XXX The following is an experimental interface
def note(self, *args):
if self.verbose:
self.message(*args)
def message(self, format, *args):
if args:
print format%args
else:
print format
class BasicModuleLoader(_Verbose):
"""Basic module loader.
This provides the same functionality as built-in import. It
doesn't deal with checking sys.modules -- all it provides is
find_module() and a load_module(), as well as find_module_in_dir()
which searches just one directory, and can be overridden by a
derived class to change the module search algorithm when the basic
dependency on sys.path is unchanged.
The interface is a little more convenient than imp's:
find_module(name, [path]) returns None or 'stuff', and
load_module(name, stuff) loads the module.
"""
def find_module(self, name, path = None):
if path is None:
path = [None] + self.default_path()
for dir in path:
stuff = self.find_module_in_dir(name, dir)
if stuff: return stuff
return None
def default_path(self):
return sys.path
def find_module_in_dir(self, name, dir):
if dir is None:
return self.find_builtin_module(name)
else:
try:
return imp.find_module(name, [dir])
except ImportError:
return None
def find_builtin_module(self, name):
# XXX frozen packages?
if imp.is_builtin(name):
return None, '', ('', '', BUILTIN_MODULE)
if imp.is_frozen(name):
return None, '', ('', '', FROZEN_MODULE)
return None
def load_module(self, name, stuff):
file, filename, info = stuff
try:
return imp.load_module(name, file, filename, info)
finally:
if file: file.close()
class Hooks(_Verbose):
"""Hooks into the filesystem and interpreter.
By deriving a subclass you can redefine your filesystem interface,
e.g. to merge it with the URL space.
This base class behaves just like the native filesystem.
"""
# imp interface
def get_suffixes(self): return imp.get_suffixes()
def new_module(self, name): return imp.new_module(name)
def is_builtin(self, name): return imp.is_builtin(name)
def init_builtin(self, name): return imp.init_builtin(name)
def is_frozen(self, name): return imp.is_frozen(name)
def init_frozen(self, name): return imp.init_frozen(name)
def get_frozen_object(self, name): return imp.get_frozen_object(name)
def load_source(self, name, filename, file=None):
return imp.load_source(name, filename, file)
def load_compiled(self, name, filename, file=None):
return imp.load_compiled(name, filename, file)
def load_dynamic(self, name, filename, file=None):
return imp.load_dynamic(name, filename, file)
def load_package(self, name, filename, file=None):
return imp.load_module(name, file, filename, ("", "", PKG_DIRECTORY))
def add_module(self, name):
d = self.modules_dict()
if name in d: return d[name]
d[name] = m = self.new_module(name)
return m
# sys interface
def modules_dict(self): return sys.modules
def default_path(self): return sys.path
def path_split(self, x): return os.path.split(x)
def path_join(self, x, y): return os.path.join(x, y)
def path_isabs(self, x): return os.path.isabs(x)
# etc.
def path_exists(self, x): return os.path.exists(x)
def path_isdir(self, x): return os.path.isdir(x)
def path_isfile(self, x): return os.path.isfile(x)
def path_islink(self, x): return os.path.islink(x)
# etc.
def openfile(self, *x): return open(*x)
openfile_error = IOError
def listdir(self, x): return os.listdir(x)
listdir_error = os.error
# etc.
class ModuleLoader(BasicModuleLoader):
"""Default module loader; uses file system hooks.
By defining suitable hooks, you might be able to load modules from
other sources than the file system, e.g. from compressed or
encrypted files, tar files or (if you're brave!) URLs.
"""
def __init__(self, hooks = None, verbose = VERBOSE):
BasicModuleLoader.__init__(self, verbose)
self.hooks = hooks or Hooks(verbose)
def default_path(self):
return self.hooks.default_path()
def modules_dict(self):
return self.hooks.modules_dict()
def get_hooks(self):
return self.hooks
def set_hooks(self, hooks):
self.hooks = hooks
def find_builtin_module(self, name):
# XXX frozen packages?
if self.hooks.is_builtin(name):
return None, '', ('', '', BUILTIN_MODULE)
if self.hooks.is_frozen(name):
return None, '', ('', '', FROZEN_MODULE)
return None
def find_module_in_dir(self, name, dir, allow_packages=1):
if dir is None:
return self.find_builtin_module(name)
if allow_packages:
fullname = self.hooks.path_join(dir, name)
if self.hooks.path_isdir(fullname):
stuff = self.find_module_in_dir("__init__", fullname, 0)
if stuff:
file = stuff[0]
if file: file.close()
return None, fullname, ('', '', PKG_DIRECTORY)
for info in self.hooks.get_suffixes():
suff, mode, type = info
fullname = self.hooks.path_join(dir, name+suff)
try:
fp = self.hooks.openfile(fullname, mode)
return fp, fullname, info
except self.hooks.openfile_error:
pass
return None
def load_module(self, name, stuff):
file, filename, info = stuff
(suff, mode, type) = info
try:
if type == BUILTIN_MODULE:
return self.hooks.init_builtin(name)
if type == FROZEN_MODULE:
return self.hooks.init_frozen(name)
if type == C_EXTENSION:
m = self.hooks.load_dynamic(name, filename, file)
elif type == PY_SOURCE:
m = self.hooks.load_source(name, filename, file)
elif type == PY_COMPILED:
m = self.hooks.load_compiled(name, filename, file)
elif type == PKG_DIRECTORY:
m = self.hooks.load_package(name, filename, file)
else:
raise ImportError, "Unrecognized module type (%r) for %s" % \
(type, name)
finally:
if file: file.close()
m.__file__ = filename
return m
class FancyModuleLoader(ModuleLoader):
"""Fancy module loader -- parses and execs the code itself."""
def load_module(self, name, stuff):
file, filename, (suff, mode, type) = stuff
realfilename = filename
path = None
if type == PKG_DIRECTORY:
initstuff = self.find_module_in_dir("__init__", filename, 0)
if not initstuff:
raise ImportError, "No __init__ module in package %s" % name
initfile, initfilename, initinfo = initstuff
initsuff, initmode, inittype = initinfo
if inittype not in (PY_COMPILED, PY_SOURCE):
if initfile: initfile.close()
raise ImportError, \
"Bad type (%r) for __init__ module in package %s" % (
inittype, name)
path = [filename]
file = initfile
realfilename = initfilename
type = inittype
if type == FROZEN_MODULE:
code = self.hooks.get_frozen_object(name)
elif type == PY_COMPILED:
import marshal
file.seek(8)
code = marshal.load(file)
elif type == PY_SOURCE:
data = file.read()
code = compile(data, realfilename, 'exec')
else:
return ModuleLoader.load_module(self, name, stuff)
m = self.hooks.add_module(name)
if path:
m.__path__ = path
m.__file__ = filename
try:
exec code in m.__dict__
except:
d = self.hooks.modules_dict()
if name in d:
del d[name]
raise
return m
class BasicModuleImporter(_Verbose):
"""Basic module importer; uses module loader.
This provides basic import facilities but no package imports.
"""
def __init__(self, loader = None, verbose = VERBOSE):
_Verbose.__init__(self, verbose)
self.loader = loader or ModuleLoader(None, verbose)
self.modules = self.loader.modules_dict()
def get_loader(self):
return self.loader
def set_loader(self, loader):
self.loader = loader
def get_hooks(self):
return self.loader.get_hooks()
def set_hooks(self, hooks):
return self.loader.set_hooks(hooks)
def import_module(self, name, globals={}, locals={}, fromlist=[]):
name = str(name)
if name in self.modules:
return self.modules[name] # Fast path
stuff = self.loader.find_module(name)
if not stuff:
raise ImportError, "No module named %s" % name
return self.loader.load_module(name, stuff)
def reload(self, module, path = None):
name = str(module.__name__)
stuff = self.loader.find_module(name, path)
if not stuff:
raise ImportError, "Module %s not found for reload" % name
return self.loader.load_module(name, stuff)
def unload(self, module):
del self.modules[str(module.__name__)]
# XXX Should this try to clear the module's namespace?
def install(self):
self.save_import_module = __builtin__.__import__
self.save_reload = __builtin__.reload
if not hasattr(__builtin__, 'unload'):
__builtin__.unload = None
self.save_unload = __builtin__.unload
__builtin__.__import__ = self.import_module
__builtin__.reload = self.reload
__builtin__.unload = self.unload
def uninstall(self):
__builtin__.__import__ = self.save_import_module
__builtin__.reload = self.save_reload
__builtin__.unload = self.save_unload
if not __builtin__.unload:
del __builtin__.unload
class ModuleImporter(BasicModuleImporter):
"""A module importer that supports packages."""
def import_module(self, name, globals=None, locals=None, fromlist=None,
level=-1):
parent = self.determine_parent(globals, level)
q, tail = self.find_head_package(parent, str(name))
m = self.load_tail(q, tail)
if not fromlist:
return q
if hasattr(m, "__path__"):
self.ensure_fromlist(m, fromlist)
return m
def determine_parent(self, globals, level=-1):
if not globals or not level:
return None
pkgname = globals.get('__package__')
if pkgname is not None:
if not pkgname and level > 0:
raise ValueError, 'Attempted relative import in non-package'
else:
# __package__ not set, figure it out and set it
modname = globals.get('__name__')
if modname is None:
return None
if "__path__" in globals:
# __path__ is set so modname is already the package name
pkgname = modname
else:
# normal module, work out package name if any
if '.' not in modname:
if level > 0:
raise ValueError, ('Attempted relative import in '
'non-package')
globals['__package__'] = None
return None
pkgname = modname.rpartition('.')[0]
globals['__package__'] = pkgname
if level > 0:
dot = len(pkgname)
for x in range(level, 1, -1):
try:
dot = pkgname.rindex('.', 0, dot)
except ValueError:
raise ValueError('attempted relative import beyond '
'top-level package')
pkgname = pkgname[:dot]
try:
return sys.modules[pkgname]
except KeyError:
if level < 1:
warn("Parent module '%s' not found while handling "
"absolute import" % pkgname, RuntimeWarning, 1)
return None
else:
raise SystemError, ("Parent module '%s' not loaded, cannot "
"perform relative import" % pkgname)
def find_head_package(self, parent, name):
if '.' in name:
i = name.find('.')
head = name[:i]
tail = name[i+1:]
else:
head = name
tail = ""
if parent:
qname = "%s.%s" % (parent.__name__, head)
else:
qname = head
q = self.import_it(head, qname, parent)
if q: return q, tail
if parent:
qname = head
parent = None
q = self.import_it(head, qname, parent)
if q: return q, tail
raise ImportError, "No module named '%s'" % qname
def load_tail(self, q, tail):
m = q
while tail:
i = tail.find('.')
if i < 0: i = len(tail)
head, tail = tail[:i], tail[i+1:]
mname = "%s.%s" % (m.__name__, head)
m = self.import_it(head, mname, m)
if not m:
raise ImportError, "No module named '%s'" % mname
return m
def ensure_fromlist(self, m, fromlist, recursive=0):
for sub in fromlist:
if sub == "*":
if not recursive:
try:
all = m.__all__
except AttributeError:
pass
else:
self.ensure_fromlist(m, all, 1)
continue
if sub != "*" and not hasattr(m, sub):
subname = "%s.%s" % (m.__name__, sub)
submod = self.import_it(sub, subname, m)
if not submod:
raise ImportError, "No module named '%s'" % subname
def import_it(self, partname, fqname, parent, force_load=0):
if not partname:
# completely empty module name should only happen in
# 'from . import' or __import__("")
return parent
if not force_load:
try:
return self.modules[fqname]
except KeyError:
pass
try:
path = parent and parent.__path__
except AttributeError:
return None
partname = str(partname)
stuff = self.loader.find_module(partname, path)
if not stuff:
return None
fqname = str(fqname)
m = self.loader.load_module(fqname, stuff)
if parent:
setattr(parent, partname, m)
return m
def reload(self, module):
name = str(module.__name__)
if '.' not in name:
return self.import_it(name, name, None, force_load=1)
i = name.rfind('.')
pname = name[:i]
parent = self.modules[pname]
return self.import_it(name[i+1:], name, parent, force_load=1)
default_importer = None
current_importer = None
def install(importer = None):
global current_importer
current_importer = importer or default_importer or ModuleImporter()
current_importer.install()
def uninstall():
global current_importer
current_importer.uninstall()
| apache-2.0 |
bobthekingofegypt/servo | tests/wpt/web-platform-tests/tools/html5lib/utils/spider.py | 436 | 4157 | #!/usr/bin/env python
"""Spider to try and find bugs in the parser. Requires httplib2 and elementtree
usage:
import spider
s = spider.Spider()
s.spider("http://www.google.com", maxURLs=100)
"""
import urllib.request, urllib.error, urllib.parse
import urllib.robotparser
import md5
import httplib2
import html5lib
from html5lib.treebuilders import etree
class Spider(object):
def __init__(self):
self.unvisitedURLs = set()
self.visitedURLs = set()
self.buggyURLs=set()
self.robotParser = urllib.robotparser.RobotFileParser()
self.contentDigest = {}
self.http = httplib2.Http(".cache")
def run(self, initialURL, maxURLs=1000):
urlNumber = 0
self.visitedURLs.add(initialURL)
content = self.loadURL(initialURL)
while maxURLs is None or urlNumber < maxURLs:
if content is not None:
self.parse(content)
urlNumber += 1
if not self.unvisitedURLs:
break
content = self.loadURL(self.unvisitedURLs.pop())
def parse(self, content):
failed = False
p = html5lib.HTMLParser(tree=etree.TreeBuilder)
try:
tree = p.parse(content)
except:
self.buggyURLs.add(self.currentURL)
failed = True
print("BUGGY:", self.currentURL)
self.visitedURLs.add(self.currentURL)
if not failed:
self.updateURLs(tree)
def loadURL(self, url):
resp, content = self.http.request(url, "GET")
self.currentURL = url
digest = md5.md5(content).hexdigest()
if digest in self.contentDigest:
content = None
self.visitedURLs.add(url)
else:
self.contentDigest[digest] = url
if resp['status'] != "200":
content = None
return content
def updateURLs(self, tree):
"""Take all the links in the current document, extract the URLs and
update the list of visited and unvisited URLs according to whether we
have seen them before or not"""
urls = set()
#Remove all links we have already visited
for link in tree.findall(".//a"):
try:
url = urllib.parse.urldefrag(link.attrib['href'])[0]
if (url and url not in self.unvisitedURLs and url
not in self.visitedURLs):
urls.add(url)
except KeyError:
pass
#Remove all non-http URLs and a dd a sutiable base URL where that is
#missing
newUrls = set()
for url in urls:
splitURL = list(urllib.parse.urlsplit(url))
if splitURL[0] != "http":
continue
if splitURL[1] == "":
splitURL[1] = urllib.parse.urlsplit(self.currentURL)[1]
newUrls.add(urllib.parse.urlunsplit(splitURL))
urls = newUrls
responseHeaders = {}
#Now we want to find the content types of the links we haven't visited
for url in urls:
try:
resp, content = self.http.request(url, "HEAD")
responseHeaders[url] = resp
except AttributeError as KeyError:
#Don't know why this happens
pass
#Remove links not of content-type html or pages not found
#XXX - need to deal with other status codes?
toVisit = set([url for url in urls if url in responseHeaders and
"html" in responseHeaders[url]['content-type'] and
responseHeaders[url]['status'] == "200"])
#Now check we are allowed to spider the page
for url in toVisit:
robotURL = list(urllib.parse.urlsplit(url)[:2])
robotURL.extend(["robots.txt", "", ""])
robotURL = urllib.parse.urlunsplit(robotURL)
self.robotParser.set_url(robotURL)
if not self.robotParser.can_fetch("*", url):
toVisit.remove(url)
self.visitedURLs.update(urls)
self.unvisitedURLs.update(toVisit)
| mpl-2.0 |
francoisluus/tensorboard-supervise | tensorboard/plugin_util.py | 3 | 2526 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Provides utilities that may be especially useful to plugins."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import bleach
# pylint: disable=g-bad-import-order
# Google-only: import markdown_freewisdom
import markdown
import six
_ALLOWED_ATTRIBUTES = {
'a': ['href', 'title'],
'img': ['src', 'title', 'alt'],
}
_ALLOWED_TAGS = [
'ul',
'ol',
'li',
'p',
'pre',
'code',
'blockquote',
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
'hr',
'br',
'strong',
'em',
'a',
'img',
'table',
'thead',
'tbody',
'td',
'tr',
'th',
]
def markdown_to_safe_html(markdown_string):
"""Convert Markdown to HTML that's safe to splice into the DOM.
Arguments:
markdown_string: A Unicode string or UTF-8--encoded bytestring
containing Markdown source. Markdown tables are supported.
Returns:
A string containing safe HTML.
"""
warning = ''
# Convert to utf-8 whenever we have a binary input.
if isinstance(markdown_string, six.binary_type):
markdown_string_decoded = markdown_string.decode('utf-8')
# Remove null bytes and warn if there were any, since it probably means
# we were given a bad encoding.
markdown_string = markdown_string_decoded.replace(u'\x00', u'')
num_null_bytes = len(markdown_string_decoded) - len(markdown_string)
if num_null_bytes:
warning = ('<!-- WARNING: discarded %d null bytes in markdown string '
'after UTF-8 decoding -->\n') % num_null_bytes
string_html = markdown.markdown(
markdown_string, extensions=['markdown.extensions.tables'])
string_sanitized = bleach.clean(
string_html, tags=_ALLOWED_TAGS, attributes=_ALLOWED_ATTRIBUTES)
return warning + string_sanitized
| apache-2.0 |
Aaron1992/tornado | tornado/test/httpclient_test.py | 8 | 23119 | #!/usr/bin/env python
from __future__ import absolute_import, division, print_function, with_statement
import base64
import binascii
from contextlib import closing
import functools
import sys
import threading
import datetime
from io import BytesIO
from tornado.escape import utf8
from tornado.httpclient import HTTPRequest, HTTPResponse, _RequestProxy, HTTPError, HTTPClient
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.iostream import IOStream
from tornado.log import gen_log
from tornado import netutil
from tornado.stack_context import ExceptionStackContext, NullContext
from tornado.testing import AsyncHTTPTestCase, bind_unused_port, gen_test, ExpectLog
from tornado.test.util import unittest, skipOnTravis
from tornado.util import u
from tornado.web import Application, RequestHandler, url
from tornado.httputil import format_timestamp, HTTPHeaders
class HelloWorldHandler(RequestHandler):
def get(self):
name = self.get_argument("name", "world")
self.set_header("Content-Type", "text/plain")
self.finish("Hello %s!" % name)
class PostHandler(RequestHandler):
def post(self):
self.finish("Post arg1: %s, arg2: %s" % (
self.get_argument("arg1"), self.get_argument("arg2")))
class PutHandler(RequestHandler):
def put(self):
self.write("Put body: ")
self.write(self.request.body)
class RedirectHandler(RequestHandler):
def prepare(self):
self.redirect(self.get_argument("url"),
status=int(self.get_argument("status", "302")))
class ChunkHandler(RequestHandler):
def get(self):
self.write("asdf")
self.flush()
self.write("qwer")
class AuthHandler(RequestHandler):
def get(self):
self.finish(self.request.headers["Authorization"])
class CountdownHandler(RequestHandler):
def get(self, count):
count = int(count)
if count > 0:
self.redirect(self.reverse_url("countdown", count - 1))
else:
self.write("Zero")
class EchoPostHandler(RequestHandler):
def post(self):
self.write(self.request.body)
class UserAgentHandler(RequestHandler):
def get(self):
self.write(self.request.headers.get('User-Agent', 'User agent not set'))
class ContentLength304Handler(RequestHandler):
def get(self):
self.set_status(304)
self.set_header('Content-Length', 42)
def _clear_headers_for_304(self):
# Tornado strips content-length from 304 responses, but here we
# want to simulate servers that include the headers anyway.
pass
class PatchHandler(RequestHandler):
def patch(self):
"Return the request payload - so we can check it is being kept"
self.write(self.request.body)
class AllMethodsHandler(RequestHandler):
SUPPORTED_METHODS = RequestHandler.SUPPORTED_METHODS + ('OTHER',)
def method(self):
self.write(self.request.method)
get = post = put = delete = options = patch = other = method
# These tests end up getting run redundantly: once here with the default
# HTTPClient implementation, and then again in each implementation's own
# test suite.
class HTTPClientCommonTestCase(AsyncHTTPTestCase):
def get_app(self):
return Application([
url("/hello", HelloWorldHandler),
url("/post", PostHandler),
url("/put", PutHandler),
url("/redirect", RedirectHandler),
url("/chunk", ChunkHandler),
url("/auth", AuthHandler),
url("/countdown/([0-9]+)", CountdownHandler, name="countdown"),
url("/echopost", EchoPostHandler),
url("/user_agent", UserAgentHandler),
url("/304_with_content_length", ContentLength304Handler),
url("/all_methods", AllMethodsHandler),
url('/patch', PatchHandler),
], gzip=True)
def test_patch_receives_payload(self):
body = b"some patch data"
response = self.fetch("/patch", method='PATCH', body=body)
self.assertEqual(response.code, 200)
self.assertEqual(response.body, body)
@skipOnTravis
def test_hello_world(self):
response = self.fetch("/hello")
self.assertEqual(response.code, 200)
self.assertEqual(response.headers["Content-Type"], "text/plain")
self.assertEqual(response.body, b"Hello world!")
self.assertEqual(int(response.request_time), 0)
response = self.fetch("/hello?name=Ben")
self.assertEqual(response.body, b"Hello Ben!")
def test_streaming_callback(self):
# streaming_callback is also tested in test_chunked
chunks = []
response = self.fetch("/hello",
streaming_callback=chunks.append)
# with streaming_callback, data goes to the callback and not response.body
self.assertEqual(chunks, [b"Hello world!"])
self.assertFalse(response.body)
def test_post(self):
response = self.fetch("/post", method="POST",
body="arg1=foo&arg2=bar")
self.assertEqual(response.code, 200)
self.assertEqual(response.body, b"Post arg1: foo, arg2: bar")
def test_chunked(self):
response = self.fetch("/chunk")
self.assertEqual(response.body, b"asdfqwer")
chunks = []
response = self.fetch("/chunk",
streaming_callback=chunks.append)
self.assertEqual(chunks, [b"asdf", b"qwer"])
self.assertFalse(response.body)
def test_chunked_close(self):
# test case in which chunks spread read-callback processing
# over several ioloop iterations, but the connection is already closed.
sock, port = bind_unused_port()
with closing(sock):
def write_response(stream, request_data):
stream.write(b"""\
HTTP/1.1 200 OK
Transfer-Encoding: chunked
1
1
1
2
0
""".replace(b"\n", b"\r\n"), callback=stream.close)
def accept_callback(conn, address):
# fake an HTTP server using chunked encoding where the final chunks
# and connection close all happen at once
stream = IOStream(conn, io_loop=self.io_loop)
stream.read_until(b"\r\n\r\n",
functools.partial(write_response, stream))
netutil.add_accept_handler(sock, accept_callback, self.io_loop)
self.http_client.fetch("http://127.0.0.1:%d/" % port, self.stop)
resp = self.wait()
resp.rethrow()
self.assertEqual(resp.body, b"12")
self.io_loop.remove_handler(sock.fileno())
def test_streaming_stack_context(self):
chunks = []
exc_info = []
def error_handler(typ, value, tb):
exc_info.append((typ, value, tb))
return True
def streaming_cb(chunk):
chunks.append(chunk)
if chunk == b'qwer':
1 / 0
with ExceptionStackContext(error_handler):
self.fetch('/chunk', streaming_callback=streaming_cb)
self.assertEqual(chunks, [b'asdf', b'qwer'])
self.assertEqual(1, len(exc_info))
self.assertIs(exc_info[0][0], ZeroDivisionError)
def test_basic_auth(self):
self.assertEqual(self.fetch("/auth", auth_username="Aladdin",
auth_password="open sesame").body,
b"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
def test_basic_auth_explicit_mode(self):
self.assertEqual(self.fetch("/auth", auth_username="Aladdin",
auth_password="open sesame",
auth_mode="basic").body,
b"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
def test_unsupported_auth_mode(self):
# curl and simple clients handle errors a bit differently; the
# important thing is that they don't fall back to basic auth
# on an unknown mode.
with ExpectLog(gen_log, "uncaught exception", required=False):
with self.assertRaises((ValueError, HTTPError)):
response = self.fetch("/auth", auth_username="Aladdin",
auth_password="open sesame",
auth_mode="asdf")
response.rethrow()
def test_follow_redirect(self):
response = self.fetch("/countdown/2", follow_redirects=False)
self.assertEqual(302, response.code)
self.assertTrue(response.headers["Location"].endswith("/countdown/1"))
response = self.fetch("/countdown/2")
self.assertEqual(200, response.code)
self.assertTrue(response.effective_url.endswith("/countdown/0"))
self.assertEqual(b"Zero", response.body)
def test_credentials_in_url(self):
url = self.get_url("/auth").replace("http://", "http://me:secret@")
self.http_client.fetch(url, self.stop)
response = self.wait()
self.assertEqual(b"Basic " + base64.b64encode(b"me:secret"),
response.body)
def test_body_encoding(self):
unicode_body = u("\xe9")
byte_body = binascii.a2b_hex(b"e9")
# unicode string in body gets converted to utf8
response = self.fetch("/echopost", method="POST", body=unicode_body,
headers={"Content-Type": "application/blah"})
self.assertEqual(response.headers["Content-Length"], "2")
self.assertEqual(response.body, utf8(unicode_body))
# byte strings pass through directly
response = self.fetch("/echopost", method="POST",
body=byte_body,
headers={"Content-Type": "application/blah"})
self.assertEqual(response.headers["Content-Length"], "1")
self.assertEqual(response.body, byte_body)
# Mixing unicode in headers and byte string bodies shouldn't
# break anything
response = self.fetch("/echopost", method="POST", body=byte_body,
headers={"Content-Type": "application/blah"},
user_agent=u("foo"))
self.assertEqual(response.headers["Content-Length"], "1")
self.assertEqual(response.body, byte_body)
def test_types(self):
response = self.fetch("/hello")
self.assertEqual(type(response.body), bytes)
self.assertEqual(type(response.headers["Content-Type"]), str)
self.assertEqual(type(response.code), int)
self.assertEqual(type(response.effective_url), str)
def test_header_callback(self):
first_line = []
headers = {}
chunks = []
def header_callback(header_line):
if header_line.startswith('HTTP/'):
first_line.append(header_line)
elif header_line != '\r\n':
k, v = header_line.split(':', 1)
headers[k] = v.strip()
def streaming_callback(chunk):
# All header callbacks are run before any streaming callbacks,
# so the header data is available to process the data as it
# comes in.
self.assertEqual(headers['Content-Type'], 'text/html; charset=UTF-8')
chunks.append(chunk)
self.fetch('/chunk', header_callback=header_callback,
streaming_callback=streaming_callback)
self.assertEqual(len(first_line), 1)
self.assertRegexpMatches(first_line[0], 'HTTP/1.[01] 200 OK\r\n')
self.assertEqual(chunks, [b'asdf', b'qwer'])
def test_header_callback_stack_context(self):
exc_info = []
def error_handler(typ, value, tb):
exc_info.append((typ, value, tb))
return True
def header_callback(header_line):
if header_line.startswith('Content-Type:'):
1 / 0
with ExceptionStackContext(error_handler):
self.fetch('/chunk', header_callback=header_callback)
self.assertEqual(len(exc_info), 1)
self.assertIs(exc_info[0][0], ZeroDivisionError)
def test_configure_defaults(self):
defaults = dict(user_agent='TestDefaultUserAgent', allow_ipv6=False)
# Construct a new instance of the configured client class
client = self.http_client.__class__(self.io_loop, force_instance=True,
defaults=defaults)
try:
client.fetch(self.get_url('/user_agent'), callback=self.stop)
response = self.wait()
self.assertEqual(response.body, b'TestDefaultUserAgent')
finally:
client.close()
def test_header_types(self):
# Header values may be passed as character or utf8 byte strings,
# in a plain dictionary or an HTTPHeaders object.
# Keys must always be the native str type.
# All combinations should have the same results on the wire.
for value in [u("MyUserAgent"), b"MyUserAgent"]:
for container in [dict, HTTPHeaders]:
headers = container()
headers['User-Agent'] = value
resp = self.fetch('/user_agent', headers=headers)
self.assertEqual(
resp.body, b"MyUserAgent",
"response=%r, value=%r, container=%r" %
(resp.body, value, container))
def test_304_with_content_length(self):
# According to the spec 304 responses SHOULD NOT include
# Content-Length or other entity headers, but some servers do it
# anyway.
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
response = self.fetch('/304_with_content_length')
self.assertEqual(response.code, 304)
self.assertEqual(response.headers['Content-Length'], '42')
def test_final_callback_stack_context(self):
# The final callback should be run outside of the httpclient's
# stack_context. We want to ensure that there is not stack_context
# between the user's callback and the IOLoop, so monkey-patch
# IOLoop.handle_callback_exception and disable the test harness's
# context with a NullContext.
# Note that this does not apply to secondary callbacks (header
# and streaming_callback), as errors there must be seen as errors
# by the http client so it can clean up the connection.
exc_info = []
def handle_callback_exception(callback):
exc_info.append(sys.exc_info())
self.stop()
self.io_loop.handle_callback_exception = handle_callback_exception
with NullContext():
self.http_client.fetch(self.get_url('/hello'),
lambda response: 1 / 0)
self.wait()
self.assertEqual(exc_info[0][0], ZeroDivisionError)
@gen_test
def test_future_interface(self):
response = yield self.http_client.fetch(self.get_url('/hello'))
self.assertEqual(response.body, b'Hello world!')
@gen_test
def test_future_http_error(self):
with self.assertRaises(HTTPError) as context:
yield self.http_client.fetch(self.get_url('/notfound'))
self.assertEqual(context.exception.code, 404)
self.assertEqual(context.exception.response.code, 404)
@gen_test
def test_reuse_request_from_response(self):
# The response.request attribute should be an HTTPRequest, not
# a _RequestProxy.
# This test uses self.http_client.fetch because self.fetch calls
# self.get_url on the input unconditionally.
url = self.get_url('/hello')
response = yield self.http_client.fetch(url)
self.assertEqual(response.request.url, url)
self.assertTrue(isinstance(response.request, HTTPRequest))
response2 = yield self.http_client.fetch(response.request)
self.assertEqual(response2.body, b'Hello world!')
def test_all_methods(self):
for method in ['GET', 'DELETE', 'OPTIONS']:
response = self.fetch('/all_methods', method=method)
self.assertEqual(response.body, utf8(method))
for method in ['POST', 'PUT', 'PATCH']:
response = self.fetch('/all_methods', method=method, body=b'')
self.assertEqual(response.body, utf8(method))
response = self.fetch('/all_methods', method='HEAD')
self.assertEqual(response.body, b'')
response = self.fetch('/all_methods', method='OTHER',
allow_nonstandard_methods=True)
self.assertEqual(response.body, b'OTHER')
@gen_test
def test_body_sanity_checks(self):
hello_url = self.get_url('/hello')
with self.assertRaises(ValueError) as context:
yield self.http_client.fetch(hello_url, body='data')
self.assertTrue('must be None' in str(context.exception))
with self.assertRaises(ValueError) as context:
yield self.http_client.fetch(hello_url, method='POST')
self.assertTrue('must not be None' in str(context.exception))
# This test causes odd failures with the combination of
# curl_httpclient (at least with the version of libcurl available
# on ubuntu 12.04), TwistedIOLoop, and epoll. For POST (but not PUT),
# curl decides the response came back too soon and closes the connection
# to start again. It does this *before* telling the socket callback to
# unregister the FD. Some IOLoop implementations have special kernel
# integration to discover this immediately. Tornado's IOLoops
# ignore errors on remove_handler to accommodate this behavior, but
# Twisted's reactor does not. The removeReader call fails and so
# do all future removeAll calls (which our tests do at cleanup).
#
#def test_post_307(self):
# response = self.fetch("/redirect?status=307&url=/post",
# method="POST", body=b"arg1=foo&arg2=bar")
# self.assertEqual(response.body, b"Post arg1: foo, arg2: bar")
def test_put_307(self):
response = self.fetch("/redirect?status=307&url=/put",
method="PUT", body=b"hello")
response.rethrow()
self.assertEqual(response.body, b"Put body: hello")
class RequestProxyTest(unittest.TestCase):
def test_request_set(self):
proxy = _RequestProxy(HTTPRequest('http://example.com/',
user_agent='foo'),
dict())
self.assertEqual(proxy.user_agent, 'foo')
def test_default_set(self):
proxy = _RequestProxy(HTTPRequest('http://example.com/'),
dict(network_interface='foo'))
self.assertEqual(proxy.network_interface, 'foo')
def test_both_set(self):
proxy = _RequestProxy(HTTPRequest('http://example.com/',
proxy_host='foo'),
dict(proxy_host='bar'))
self.assertEqual(proxy.proxy_host, 'foo')
def test_neither_set(self):
proxy = _RequestProxy(HTTPRequest('http://example.com/'),
dict())
self.assertIs(proxy.auth_username, None)
def test_bad_attribute(self):
proxy = _RequestProxy(HTTPRequest('http://example.com/'),
dict())
with self.assertRaises(AttributeError):
proxy.foo
def test_defaults_none(self):
proxy = _RequestProxy(HTTPRequest('http://example.com/'), None)
self.assertIs(proxy.auth_username, None)
class HTTPResponseTestCase(unittest.TestCase):
def test_str(self):
response = HTTPResponse(HTTPRequest('http://example.com'),
200, headers={}, buffer=BytesIO())
s = str(response)
self.assertTrue(s.startswith('HTTPResponse('))
self.assertIn('code=200', s)
class SyncHTTPClientTest(unittest.TestCase):
def setUp(self):
if IOLoop.configured_class().__name__ in ('TwistedIOLoop',
'AsyncIOMainLoop'):
# TwistedIOLoop only supports the global reactor, so we can't have
# separate IOLoops for client and server threads.
# AsyncIOMainLoop doesn't work with the default policy
# (although it could with some tweaks to this test and a
# policy that created loops for non-main threads).
raise unittest.SkipTest(
'Sync HTTPClient not compatible with TwistedIOLoop or '
'AsyncIOMainLoop')
self.server_ioloop = IOLoop()
sock, self.port = bind_unused_port()
app = Application([('/', HelloWorldHandler)])
self.server = HTTPServer(app, io_loop=self.server_ioloop)
self.server.add_socket(sock)
self.server_thread = threading.Thread(target=self.server_ioloop.start)
self.server_thread.start()
self.http_client = HTTPClient()
def tearDown(self):
def stop_server():
self.server.stop()
self.server_ioloop.stop()
self.server_ioloop.add_callback(stop_server)
self.server_thread.join()
self.http_client.close()
self.server_ioloop.close(all_fds=True)
def get_url(self, path):
return 'http://localhost:%d%s' % (self.port, path)
def test_sync_client(self):
response = self.http_client.fetch(self.get_url('/'))
self.assertEqual(b'Hello world!', response.body)
def test_sync_client_error(self):
# Synchronous HTTPClient raises errors directly; no need for
# response.rethrow()
with self.assertRaises(HTTPError) as assertion:
self.http_client.fetch(self.get_url('/notfound'))
self.assertEqual(assertion.exception.code, 404)
class HTTPRequestTestCase(unittest.TestCase):
def test_headers(self):
request = HTTPRequest('http://example.com', headers={'foo': 'bar'})
self.assertEqual(request.headers, {'foo': 'bar'})
def test_headers_setter(self):
request = HTTPRequest('http://example.com')
request.headers = {'bar': 'baz'}
self.assertEqual(request.headers, {'bar': 'baz'})
def test_null_headers_setter(self):
request = HTTPRequest('http://example.com')
request.headers = None
self.assertEqual(request.headers, {})
def test_body(self):
request = HTTPRequest('http://example.com', body='foo')
self.assertEqual(request.body, utf8('foo'))
def test_body_setter(self):
request = HTTPRequest('http://example.com')
request.body = 'foo'
self.assertEqual(request.body, utf8('foo'))
def test_if_modified_since(self):
http_date = datetime.datetime.utcnow()
request = HTTPRequest('http://example.com', if_modified_since=http_date)
self.assertEqual(request.headers,
{'If-Modified-Since': format_timestamp(http_date)})
| apache-2.0 |
pixelrebel/st2 | st2api/st2api/controllers/v1/aliasexecution.py | 5 | 7343 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import jsonschema
from jinja2.exceptions import UndefinedError
from pecan import (abort, rest, request)
import six
from st2common import log as logging
from st2common.models.api.base import jsexpose
from st2common.exceptions.db import StackStormDBObjectNotFoundError
from st2common.models.api.action import AliasExecutionAPI
from st2common.models.api.action import ActionAliasAPI
from st2common.models.api.auth import get_system_username
from st2common.models.api.execution import ActionExecutionAPI
from st2common.models.db.liveaction import LiveActionDB
from st2common.models.db.notification import NotificationSchema, NotificationSubSchema
from st2common.models.utils import action_param_utils
from st2common.models.utils.action_alias_utils import extract_parameters_for_action_alias_db
from st2common.persistence.actionalias import ActionAlias
from st2common.services import action as action_service
from st2common.util import action_db as action_utils
from st2common.util import reference
from st2common.util.api import get_requester
from st2common.util.jinja import render_values as render
from st2common.rbac.types import PermissionType
from st2common.rbac.utils import assert_request_user_has_resource_db_permission
http_client = six.moves.http_client
LOG = logging.getLogger(__name__)
CAST_OVERRIDES = {
'array': (lambda cs_x: [v.strip() for v in cs_x.split(',')])
}
class ActionAliasExecutionController(rest.RestController):
@jsexpose(body_cls=AliasExecutionAPI, status_code=http_client.CREATED)
def post(self, payload):
action_alias_name = payload.name if payload else None
if not action_alias_name:
abort(http_client.BAD_REQUEST, 'Alias execution "name" is required')
format_str = payload.format or ''
command = payload.command or ''
try:
action_alias_db = ActionAlias.get_by_name(action_alias_name)
except ValueError:
action_alias_db = None
if not action_alias_db:
msg = 'Unable to identify action alias with name "%s".' % (action_alias_name)
abort(http_client.NOT_FOUND, msg)
return
if not action_alias_db.enabled:
msg = 'Action alias with name "%s" is disabled.' % (action_alias_name)
abort(http_client.BAD_REQUEST, msg)
return
execution_parameters = extract_parameters_for_action_alias_db(
action_alias_db=action_alias_db,
format_str=format_str,
param_stream=command)
notify = self._get_notify_field(payload)
context = {
'action_alias_ref': reference.get_ref_from_model(action_alias_db),
'api_user': payload.user,
'user': get_requester(),
'source_channel': payload.source_channel
}
execution = self._schedule_execution(action_alias_db=action_alias_db,
params=execution_parameters,
notify=notify,
context=context)
result = {
'execution': execution,
'actionalias': ActionAliasAPI.from_model(action_alias_db)
}
if action_alias_db.ack:
try:
if 'format' in action_alias_db.ack:
result.update({
'message': render({'alias': action_alias_db.ack['format']}, result)['alias']
})
except UndefinedError as e:
result.update({
'message': 'Cannot render "format" in field "ack" for alias. ' + e.message
})
try:
if 'extra' in action_alias_db.ack:
result.update({
'extra': render(action_alias_db.ack['extra'], result)
})
except UndefinedError as e:
result.update({
'extra': 'Cannot render "extra" in field "ack" for alias. ' + e.message
})
return result
def _tokenize_alias_execution(self, alias_execution):
tokens = alias_execution.strip().split(' ', 1)
return (tokens[0], tokens[1] if len(tokens) > 1 else None)
def _get_notify_field(self, payload):
on_complete = NotificationSubSchema()
route = (getattr(payload, 'notification_route', None) or
getattr(payload, 'notification_channel', None))
on_complete.routes = [route]
on_complete.data = {
'user': payload.user,
'source_channel': payload.source_channel
}
notify = NotificationSchema()
notify.on_complete = on_complete
return notify
def _schedule_execution(self, action_alias_db, params, notify, context):
action_ref = action_alias_db.action_ref
action_db = action_utils.get_action_by_ref(action_ref)
if not action_db:
raise StackStormDBObjectNotFoundError('Action with ref "%s" not found ' % (action_ref))
assert_request_user_has_resource_db_permission(request=request, resource_db=action_db,
permission_type=PermissionType.ACTION_EXECUTE)
try:
# prior to shipping off the params cast them to the right type.
params = action_param_utils.cast_params(action_ref=action_alias_db.action_ref,
params=params,
cast_overrides=CAST_OVERRIDES)
if not context:
context = {
'action_alias_ref': reference.get_ref_from_model(action_alias_db),
'user': get_system_username()
}
liveaction = LiveActionDB(action=action_alias_db.action_ref, context=context,
parameters=params, notify=notify)
_, action_execution_db = action_service.request(liveaction)
return ActionExecutionAPI.from_model(action_execution_db)
except ValueError as e:
LOG.exception('Unable to execute action.')
abort(http_client.BAD_REQUEST, str(e))
except jsonschema.ValidationError as e:
LOG.exception('Unable to execute action. Parameter validation failed.')
abort(http_client.BAD_REQUEST, str(e))
except Exception as e:
LOG.exception('Unable to execute action. Unexpected error encountered.')
abort(http_client.INTERNAL_SERVER_ERROR, str(e))
| apache-2.0 |
hentaiPanda/Levenshtein | levenshtein.py | 2 | 4422 | #!/usr/bin/env python
# coding: utf-8
'''
Created: 2014-9-17
Author: niR@github (https://github.com/hentaiPanda)
Version: 0.0.1
License: MIT
This is a straightforward implementation of Levenshitein
Distance and Damerau-Levenshtein Distance.
The code is based on the examples on the Wikipedia
You can get more infomation from
http://en.wikipedia.org/wiki/Levenshtein_distance
http://en.wikipedia.org/wiki/Damerau-Levenshtein_distance
'''
# Levenshtein Distance
def lev(str1, str2):
"""Make a Levenshtein Distances Matrix"""
n1, n2 = len(str1), len(str2)
lev_matrix = [ [ 0 for i1 in range(n1 + 1) ] for i2 in range(n2 + 1) ]
for i1 in range(1, n1 + 1):
lev_matrix[0][i1] = i1
for i2 in range(1, n2 + 1):
lev_matrix[i2][0] = i2
for i2 in range(1, n2 + 1):
for i1 in range(1, n1 + 1):
cost = 0 if str1[i1-1] == str2[i2-1] else 1
elem = min( lev_matrix[i2-1][i1] + 1,
lev_matrix[i2][i1-1] + 1,
lev_matrix[i2-1][i1-1] + cost )
lev_matrix[i2][i1] = elem
return lev_matrix[-1][-1]
# Optimal string alignment distance
def osa(str1, str2):
n1, n2 = len(str1), len(str2)
matrix = [ [ 0 for i1 in range(n1 + 1) ] for i2 in range(n2 + 1) ]
for i1 in range(1, n1 + 1):
matrix[0][i1] = i1
for i2 in range(1, n2 + 1):
matrix[i2][0] = i2
for i2 in range(1, n2 + 1):
for i1 in range(1, n1 + 1):
cost = 0 if str1[i1-1] == str2[i2-1] else 1
elem = min( matrix[i2-1][i1] + 1,
matrix[i2][i1-1] + 1,
matrix[i2-1][i1-1] + cost )
if (i1 > 1 and i2 > 1 and
str1[i1-2] == str2[i2-1] and
str1[i1-1] == str2[i2-2]):
elem = min( elem,
matrix[i2-3][i1-3] + cost )
matrix[i2][i1] = elem
return matrix[-1][-1]
# Damerau-Levenshtein Distance
def dalev(str1, str2):
# Suppose we have two strings 'ac' and 'cba'
# This is the initialized matrix:
#
# str1
# a c
# * * * * * *
# * 5 5 5 5 *
# * 5 0 1 2 *
# c * 5 1 5 5 *
# str2 b * 5 2 5 5 *
# a * 5 3 5 5 *
# * * * * * *
#
# If we wanna transpose 'ac' to 'ca', the starting point is (2, 2),
# And we need to compute the distance, so we must start at the point
# (1, 1), that's where the number 0 is, and it's the initial position
# of (p2-1, p1-1).
# The reason we use p2=ORIGIN, p1=ORIGIN (here ORIGIN is 1) as the
# initial value is that we must ensure if we don't have the proper
# pair of strings ('abc' and 'ga' etc.), the distance with adjacent
# transposition is always bigger than the number of other edit operations
# ( because no matter which variable (p2 or p1) is 0, matrix[p2-1][p1-1]
# is always the biggest number, here it is 5).
# (i1-p1-1) / (i2-p2-1) is the edit distance of the substrings.
n1, n2 = len(str1), len(str2)
max_dis = n1 + n2
letters = {} # pointer of the last row where a[i] == b[j]
INIT_POS = 2 # initial position of two str ('some'[0] etc.) in the matrix
ORIGIN = INIT_POS - 1 # the position of '0' in the matrix
matrix = [ [ max_dis for i1 in range(n1 + INIT_POS) ]
for i2 in range(n2 + INIT_POS) ]
for i1 in range(ORIGIN, n1 + INIT_POS):
matrix[1][i1] = i1 -ORIGIN
for i2 in range(ORIGIN, n2 + INIT_POS):
matrix[i2][1] = i2 -ORIGIN
for i2 in range(INIT_POS, n2 + INIT_POS):
temp = ORIGIN # pointer of the last column where b[j] == a[i]
for i1 in range(INIT_POS, n1 + INIT_POS):
p2 = letters.get(str1[i1-INIT_POS], ORIGIN)
p1 = temp
cost = 0 if str1[i1-INIT_POS] == str2[i2-INIT_POS] else 1
if not cost:
temp = i1
elem = min( matrix[i2-1][i1] + 1,
matrix[i2][i1-1] + 1,
matrix[i2-1][i1-1] + cost,
matrix[p2-1][p1-1] + 1 + (i1-p1-1) + (i2-p2-1) )
matrix[i2][i1] = elem
letters[str2[i2-INIT_POS]] = i2
return matrix[-1][-1]
| mit |
redhat-cip/horizon | openstack_dashboard/dashboards/project/network_topology/tests.py | 2 | 9198 | # Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
import django.test
from mox3.mox import IsA # noqa
from oslo_serialization import jsonutils
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
from openstack_dashboard.usage import quotas
JSON_URL = reverse('horizon:project:network_topology:json')
INDEX_URL = reverse('horizon:project:network_topology:index')
class NetworkTopologyTests(test.TestCase):
@test.create_stubs({api.nova: ('server_list',),
api.neutron: ('network_list_for_tenant',
'network_list',
'router_list',
'port_list')})
def test_json_view(self):
self._test_json_view()
@django.test.utils.override_settings(
OPENSTACK_NEUTRON_NETWORK={'enable_router': False})
@test.create_stubs({api.nova: ('server_list',),
api.neutron: ('network_list_for_tenant',
'port_list')})
def test_json_view_router_disabled(self):
self._test_json_view(router_enable=False)
def _test_json_view(self, router_enable=True):
api.nova.server_list(
IsA(http.HttpRequest)).AndReturn([self.servers.list(), False])
tenant_networks = [net for net in self.networks.list()
if not net['router:external']]
external_networks = [net for net in self.networks.list()
if net['router:external']]
api.neutron.network_list_for_tenant(
IsA(http.HttpRequest),
self.tenant.id).AndReturn(tenant_networks)
if router_enable:
api.neutron.network_list(
IsA(http.HttpRequest),
**{'router:external': True}).AndReturn(external_networks)
# router1 : gateway port not in the port list
# router2 : no gateway port
# router3 : gateway port included in port list
routers = self.routers.list() + self.routers_with_rules.list()
if router_enable:
api.neutron.router_list(
IsA(http.HttpRequest),
tenant_id=self.tenant.id).AndReturn(routers)
api.neutron.port_list(
IsA(http.HttpRequest)).AndReturn(self.ports.list())
self.mox.ReplayAll()
res = self.client.get(JSON_URL)
self.assertEqual('text/json', res['Content-Type'])
data = jsonutils.loads(res.content)
# servers
# result_server_urls = [(server['id'], server['url'])
# for server in data['servers']]
expect_server_urls = [
{'id': server.id,
'name': server.name,
'status': server.status,
'task': None,
'url': '/project/instances/%s/' % server.id}
for server in self.servers.list()]
self.assertEqual(expect_server_urls, data['servers'])
# routers
# result_router_urls = [(router['id'], router['url'])
# for router in data['routers']]
if router_enable:
expect_router_urls = [
{'id': router.id,
'external_gateway_info':
router.external_gateway_info,
'name': router.name,
'status': router.status,
'url': '/project/routers/%s/' % router.id}
for router in routers]
self.assertEqual(expect_router_urls, data['routers'])
else:
self.assertFalse(data['routers'])
# networks
expect_net_urls = []
if router_enable:
expect_net_urls += [{'id': net.id,
'url': '/project/networks/%s/detail' % net.id,
'name': net.name,
'router:external': net.router__external,
'status': net.status,
'subnets': []}
for net in external_networks]
expect_net_urls += [{'id': net.id,
'url': '/project/networks/%s/detail' % net.id,
'name': net.name,
'router:external': net.router__external,
'status': net.status,
'subnets': [{'cidr': subnet.cidr,
'id': subnet.id,
'url':
'/project/networks/subnets/%s/detail'
% subnet.id}
for subnet in net.subnets]}
for net in tenant_networks]
for exp_net in expect_net_urls:
if exp_net['url'] is None:
del exp_net['url']
self.assertEqual(expect_net_urls, data['networks'])
# ports
expect_port_urls = [
{'id': port.id,
'device_id': port.device_id,
'device_owner': port.device_owner,
'fixed_ips': port.fixed_ips,
'network_id': port.network_id,
'status': port.status,
'url': '/project/networks/ports/%s/detail' % port.id}
for port in self.ports.list()]
if router_enable:
# fake port for router1 gateway (router1 on ext_net)
router1 = routers[0]
ext_net = external_networks[0]
expect_port_urls.append(
{'id': 'gateway%s' % ext_net.id,
'device_id': router1.id,
'network_id': ext_net.id,
'fixed_ips': []})
self.assertEqual(expect_port_urls, data['ports'])
class NetworkTopologyCreateTests(test.TestCase):
def _test_new_button_disabled_when_quota_exceeded(
self, expected_string, networks_quota=10,
routers_quota=10, instances_quota=10):
quota_data = self.quota_usages.first()
quota_data['networks']['available'] = networks_quota
quota_data['routers']['available'] = routers_quota
quota_data['instances']['available'] = instances_quota
quotas.tenant_quota_usages(
IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(quota_data)
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'project/network_topology/index.html')
self.assertContains(res, expected_string, html=True,
msg_prefix="The create button is not disabled")
@test.create_stubs({quotas: ('tenant_quota_usages',)})
def test_create_network_button_disabled_when_quota_exceeded(self):
url = reverse('horizon:project:network_topology:createnetwork')
classes = 'btn btn-default btn-sm ajax-modal'
link_name = "Create Network (Quota exceeded)"
expected_string = "<a href='%s' class='%s disabled' "\
"id='networks__action_create'>" \
"<span class='fa fa-plus'></span>%s</a>" \
% (url, classes, link_name)
self._test_new_button_disabled_when_quota_exceeded(
expected_string, networks_quota=0)
@test.create_stubs({quotas: ('tenant_quota_usages',)})
def test_create_router_button_disabled_when_quota_exceeded(self):
url = reverse('horizon:project:network_topology:createrouter')
classes = 'btn btn-default btn-sm ajax-modal'
link_name = "Create Router (Quota exceeded)"
expected_string = "<a href='%s' class='%s disabled' "\
"id='Routers__action_create'>" \
"<span class='fa fa-plus'></span>%s</a>" \
% (url, classes, link_name)
self._test_new_button_disabled_when_quota_exceeded(
expected_string, routers_quota=0)
@test.create_stubs({quotas: ('tenant_quota_usages',)})
def test_launch_instance_button_disabled_when_quota_exceeded(self):
url = reverse('horizon:project:network_topology:launchinstance')
classes = 'btn btn-default btn-sm btn-launch ajax-modal'
link_name = "Launch Instance (Quota exceeded)"
expected_string = "<a href='%s' class='%s disabled' "\
"id='instances__action_launch'>" \
"<span class='fa fa-cloud-upload'></span>%s</a>" \
% (url, classes, link_name)
self._test_new_button_disabled_when_quota_exceeded(
expected_string, instances_quota=0)
| apache-2.0 |
PythonScientists/Shape | env/lib/python3.5/site-packages/pycparser/c_lexer.py | 21 | 14463 | #------------------------------------------------------------------------------
# pycparser: c_lexer.py
#
# CLexer class: lexer for the C language
#
# Eli Bendersky [http://eli.thegreenplace.net]
# License: BSD
#------------------------------------------------------------------------------
import re
import sys
from .ply import lex
from .ply.lex import TOKEN
class CLexer(object):
""" A lexer for the C language. After building it, set the
input text with input(), and call token() to get new
tokens.
The public attribute filename can be set to an initial
filaneme, but the lexer will update it upon #line
directives.
"""
def __init__(self, error_func, on_lbrace_func, on_rbrace_func,
type_lookup_func):
""" Create a new Lexer.
error_func:
An error function. Will be called with an error
message, line and column as arguments, in case of
an error during lexing.
on_lbrace_func, on_rbrace_func:
Called when an LBRACE or RBRACE is encountered
(likely to push/pop type_lookup_func's scope)
type_lookup_func:
A type lookup function. Given a string, it must
return True IFF this string is a name of a type
that was defined with a typedef earlier.
"""
self.error_func = error_func
self.on_lbrace_func = on_lbrace_func
self.on_rbrace_func = on_rbrace_func
self.type_lookup_func = type_lookup_func
self.filename = ''
# Keeps track of the last token returned from self.token()
self.last_token = None
# Allow either "# line" or "# <num>" to support GCC's
# cpp output
#
self.line_pattern = re.compile(r'([ \t]*line\W)|([ \t]*\d+)')
self.pragma_pattern = re.compile(r'[ \t]*pragma\W')
def build(self, **kwargs):
""" Builds the lexer from the specification. Must be
called after the lexer object is created.
This method exists separately, because the PLY
manual warns against calling lex.lex inside
__init__
"""
self.lexer = lex.lex(object=self, **kwargs)
def reset_lineno(self):
""" Resets the internal line number counter of the lexer.
"""
self.lexer.lineno = 1
def input(self, text):
self.lexer.input(text)
def token(self):
self.last_token = self.lexer.token()
return self.last_token
def find_tok_column(self, token):
""" Find the column of the token in its line.
"""
last_cr = self.lexer.lexdata.rfind('\n', 0, token.lexpos)
return token.lexpos - last_cr
######################-- PRIVATE --######################
##
## Internal auxiliary methods
##
def _error(self, msg, token):
location = self._make_tok_location(token)
self.error_func(msg, location[0], location[1])
self.lexer.skip(1)
def _make_tok_location(self, token):
return (token.lineno, self.find_tok_column(token))
##
## Reserved keywords
##
keywords = (
'_BOOL', '_COMPLEX', 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST',
'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', 'ELSE', 'ENUM', 'EXTERN',
'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG',
'REGISTER', 'OFFSETOF',
'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT',
'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID',
'VOLATILE', 'WHILE', '__INT128',
)
keyword_map = {}
for keyword in keywords:
if keyword == '_BOOL':
keyword_map['_Bool'] = keyword
elif keyword == '_COMPLEX':
keyword_map['_Complex'] = keyword
else:
keyword_map[keyword.lower()] = keyword
##
## All the tokens recognized by the lexer
##
tokens = keywords + (
# Identifiers
'ID',
# Type identifiers (identifiers previously defined as
# types with typedef)
'TYPEID',
# constants
'INT_CONST_DEC', 'INT_CONST_OCT', 'INT_CONST_HEX', 'INT_CONST_BIN',
'FLOAT_CONST', 'HEX_FLOAT_CONST',
'CHAR_CONST',
'WCHAR_CONST',
# String literals
'STRING_LITERAL',
'WSTRING_LITERAL',
# Operators
'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
'LOR', 'LAND', 'LNOT',
'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
# Assignment
'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL',
'PLUSEQUAL', 'MINUSEQUAL',
'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL',
'OREQUAL',
# Increment/decrement
'PLUSPLUS', 'MINUSMINUS',
# Structure dereference (->)
'ARROW',
# Conditional operator (?)
'CONDOP',
# Delimeters
'LPAREN', 'RPAREN', # ( )
'LBRACKET', 'RBRACKET', # [ ]
'LBRACE', 'RBRACE', # { }
'COMMA', 'PERIOD', # . ,
'SEMI', 'COLON', # ; :
# Ellipsis (...)
'ELLIPSIS',
# pre-processor
'PPHASH', # '#'
'PPPRAGMA', # 'pragma'
'PPPRAGMASTR',
)
##
## Regexes for use in tokens
##
##
# valid C identifiers (K&R2: A.2.3), plus '$' (supported by some compilers)
identifier = r'[a-zA-Z_$][0-9a-zA-Z_$]*'
hex_prefix = '0[xX]'
hex_digits = '[0-9a-fA-F]+'
bin_prefix = '0[bB]'
bin_digits = '[01]+'
# integer constants (K&R2: A.2.5.1)
integer_suffix_opt = r'(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?'
decimal_constant = '(0'+integer_suffix_opt+')|([1-9][0-9]*'+integer_suffix_opt+')'
octal_constant = '0[0-7]*'+integer_suffix_opt
hex_constant = hex_prefix+hex_digits+integer_suffix_opt
bin_constant = bin_prefix+bin_digits+integer_suffix_opt
bad_octal_constant = '0[0-7]*[89]'
# character constants (K&R2: A.2.5.2)
# Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line
# directives with Windows paths as filenames (..\..\dir\file)
# For the same reason, decimal_escape allows all digit sequences. We want to
# parse all correct code, even if it means to sometimes parse incorrect
# code.
#
simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
decimal_escape = r"""(\d+)"""
hex_escape = r"""(x[0-9a-fA-F]+)"""
bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
escape_sequence = r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))'
cconst_char = r"""([^'\\\n]|"""+escape_sequence+')'
char_const = "'"+cconst_char+"'"
wchar_const = 'L'+char_const
unmatched_quote = "('"+cconst_char+"*\\n)|('"+cconst_char+"*$)"
bad_char_const = r"""('"""+cconst_char+"""[^'\n]+')|('')|('"""+bad_escape+r"""[^'\n]*')"""
# string literals (K&R2: A.2.6)
string_char = r"""([^"\\\n]|"""+escape_sequence+')'
string_literal = '"'+string_char+'*"'
wstring_literal = 'L'+string_literal
bad_string_literal = '"'+string_char+'*?'+bad_escape+string_char+'*"'
# floating constants (K&R2: A.2.5.3)
exponent_part = r"""([eE][-+]?[0-9]+)"""
fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)"""
floating_constant = '(((('+fractional_constant+')'+exponent_part+'?)|([0-9]+'+exponent_part+'))[FfLl]?)'
binary_exponent_part = r'''([pP][+-]?[0-9]+)'''
hex_fractional_constant = '((('+hex_digits+r""")?\."""+hex_digits+')|('+hex_digits+r"""\.))"""
hex_floating_constant = '('+hex_prefix+'('+hex_digits+'|'+hex_fractional_constant+')'+binary_exponent_part+'[FfLl]?)'
##
## Lexer states: used for preprocessor \n-terminated directives
##
states = (
# ppline: preprocessor line directives
#
('ppline', 'exclusive'),
# pppragma: pragma
#
('pppragma', 'exclusive'),
)
def t_PPHASH(self, t):
r'[ \t]*\#'
if self.line_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos):
t.lexer.begin('ppline')
self.pp_line = self.pp_filename = None
elif self.pragma_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos):
t.lexer.begin('pppragma')
else:
t.type = 'PPHASH'
return t
##
## Rules for the ppline state
##
@TOKEN(string_literal)
def t_ppline_FILENAME(self, t):
if self.pp_line is None:
self._error('filename before line number in #line', t)
else:
self.pp_filename = t.value.lstrip('"').rstrip('"')
@TOKEN(decimal_constant)
def t_ppline_LINE_NUMBER(self, t):
if self.pp_line is None:
self.pp_line = t.value
else:
# Ignore: GCC's cpp sometimes inserts a numeric flag
# after the file name
pass
def t_ppline_NEWLINE(self, t):
r'\n'
if self.pp_line is None:
self._error('line number missing in #line', t)
else:
self.lexer.lineno = int(self.pp_line)
if self.pp_filename is not None:
self.filename = self.pp_filename
t.lexer.begin('INITIAL')
def t_ppline_PPLINE(self, t):
r'line'
pass
t_ppline_ignore = ' \t'
def t_ppline_error(self, t):
self._error('invalid #line directive', t)
##
## Rules for the pppragma state
##
def t_pppragma_NEWLINE(self, t):
r'\n'
t.lexer.lineno += 1
t.lexer.begin('INITIAL')
def t_pppragma_PPPRAGMA(self, t):
r'pragma'
return t
t_pppragma_ignore = ' \t'
def t_pppragma_STR(self, t):
'.+'
t.type = 'PPPRAGMASTR'
return t
def t_pppragma_error(self, t):
self._error('invalid #pragma directive', t)
##
## Rules for the normal state
##
t_ignore = ' \t'
# Newlines
def t_NEWLINE(self, t):
r'\n+'
t.lexer.lineno += t.value.count("\n")
# Operators
t_PLUS = r'\+'
t_MINUS = r'-'
t_TIMES = r'\*'
t_DIVIDE = r'/'
t_MOD = r'%'
t_OR = r'\|'
t_AND = r'&'
t_NOT = r'~'
t_XOR = r'\^'
t_LSHIFT = r'<<'
t_RSHIFT = r'>>'
t_LOR = r'\|\|'
t_LAND = r'&&'
t_LNOT = r'!'
t_LT = r'<'
t_GT = r'>'
t_LE = r'<='
t_GE = r'>='
t_EQ = r'=='
t_NE = r'!='
# Assignment operators
t_EQUALS = r'='
t_TIMESEQUAL = r'\*='
t_DIVEQUAL = r'/='
t_MODEQUAL = r'%='
t_PLUSEQUAL = r'\+='
t_MINUSEQUAL = r'-='
t_LSHIFTEQUAL = r'<<='
t_RSHIFTEQUAL = r'>>='
t_ANDEQUAL = r'&='
t_OREQUAL = r'\|='
t_XOREQUAL = r'\^='
# Increment/decrement
t_PLUSPLUS = r'\+\+'
t_MINUSMINUS = r'--'
# ->
t_ARROW = r'->'
# ?
t_CONDOP = r'\?'
# Delimeters
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_LBRACKET = r'\['
t_RBRACKET = r'\]'
t_COMMA = r','
t_PERIOD = r'\.'
t_SEMI = r';'
t_COLON = r':'
t_ELLIPSIS = r'\.\.\.'
# Scope delimiters
# To see why on_lbrace_func is needed, consider:
# typedef char TT;
# void foo(int TT) { TT = 10; }
# TT x = 5;
# Outside the function, TT is a typedef, but inside (starting and ending
# with the braces) it's a parameter. The trouble begins with yacc's
# lookahead token. If we open a new scope in brace_open, then TT has
# already been read and incorrectly interpreted as TYPEID. So, we need
# to open and close scopes from within the lexer.
# Similar for the TT immediately outside the end of the function.
#
@TOKEN(r'\{')
def t_LBRACE(self, t):
self.on_lbrace_func()
return t
@TOKEN(r'\}')
def t_RBRACE(self, t):
self.on_rbrace_func()
return t
t_STRING_LITERAL = string_literal
# The following floating and integer constants are defined as
# functions to impose a strict order (otherwise, decimal
# is placed before the others because its regex is longer,
# and this is bad)
#
@TOKEN(floating_constant)
def t_FLOAT_CONST(self, t):
return t
@TOKEN(hex_floating_constant)
def t_HEX_FLOAT_CONST(self, t):
return t
@TOKEN(hex_constant)
def t_INT_CONST_HEX(self, t):
return t
@TOKEN(bin_constant)
def t_INT_CONST_BIN(self, t):
return t
@TOKEN(bad_octal_constant)
def t_BAD_CONST_OCT(self, t):
msg = "Invalid octal constant"
self._error(msg, t)
@TOKEN(octal_constant)
def t_INT_CONST_OCT(self, t):
return t
@TOKEN(decimal_constant)
def t_INT_CONST_DEC(self, t):
return t
# Must come before bad_char_const, to prevent it from
# catching valid char constants as invalid
#
@TOKEN(char_const)
def t_CHAR_CONST(self, t):
return t
@TOKEN(wchar_const)
def t_WCHAR_CONST(self, t):
return t
@TOKEN(unmatched_quote)
def t_UNMATCHED_QUOTE(self, t):
msg = "Unmatched '"
self._error(msg, t)
@TOKEN(bad_char_const)
def t_BAD_CHAR_CONST(self, t):
msg = "Invalid char constant %s" % t.value
self._error(msg, t)
@TOKEN(wstring_literal)
def t_WSTRING_LITERAL(self, t):
return t
# unmatched string literals are caught by the preprocessor
@TOKEN(bad_string_literal)
def t_BAD_STRING_LITERAL(self, t):
msg = "String contains invalid escape code"
self._error(msg, t)
@TOKEN(identifier)
def t_ID(self, t):
t.type = self.keyword_map.get(t.value, "ID")
if t.type == 'ID' and self.type_lookup_func(t.value):
t.type = "TYPEID"
return t
def t_error(self, t):
msg = 'Illegal character %s' % repr(t.value[0])
self._error(msg, t)
| apache-2.0 |
atosatto/ansible | lib/ansible/modules/files/acl.py | 44 | 11662 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: acl
version_added: "1.4"
short_description: Sets and retrieves file ACL information.
description:
- Sets and retrieves file ACL information.
options:
path:
required: true
default: null
description:
- The full path of the file or object.
aliases: ['name']
state:
required: false
default: query
choices: [ 'query', 'present', 'absent' ]
description:
- defines whether the ACL should be present or not. The C(query) state gets the current acl without changing it, for use in 'register' operations.
follow:
required: false
default: yes
choices: [ 'yes', 'no' ]
description:
- whether to follow symlinks on the path if a symlink is encountered.
default:
version_added: "1.5"
required: false
default: no
choices: [ 'yes', 'no' ]
description:
- if the target is a directory, setting this to yes will make it the default acl for entities created inside the directory. It causes an error if
path is a file.
entity:
version_added: "1.5"
required: false
description:
- actual user or group that the ACL applies to when matching entity types user or group are selected.
etype:
version_added: "1.5"
required: false
default: null
choices: [ 'user', 'group', 'mask', 'other' ]
description:
- the entity type of the ACL to apply, see setfacl documentation for more info.
permissions:
version_added: "1.5"
required: false
default: null
description:
- Permissions to apply/remove can be any combination of r, w and x (read, write and execute respectively)
entry:
required: false
default: null
description:
- DEPRECATED. The acl to set or remove. This must always be quoted in the form of '<etype>:<qualifier>:<perms>'. The qualifier may be empty for
some types, but the type and perms are always required. '-' can be used as placeholder when you do not care about permissions. This is now
superseded by entity, type and permissions fields.
recursive:
version_added: "2.0"
required: false
default: no
choices: [ 'yes', 'no' ]
description:
- Recursively sets the specified ACL (added in Ansible 2.0). Incompatible with C(state=query).
author:
- "Brian Coca (@bcoca)"
- "Jérémie Astori (@astorije)"
notes:
- The "acl" module requires that acls are enabled on the target filesystem and that the setfacl and getfacl binaries are installed.
- As of Ansible 2.0, this module only supports Linux distributions.
- As of Ansible 2.3, the I(name) option has been changed to I(path) as default, but I(name) still works as well.
'''
EXAMPLES = '''
# Grant user Joe read access to a file
- acl:
path: /etc/foo.conf
entity: joe
etype: user
permissions: r
state: present
# Removes the acl for Joe on a specific file
- acl:
path: /etc/foo.conf
entity: joe
etype: user
state: absent
# Sets default acl for joe on foo.d
- acl:
path: /etc/foo.d
entity: joe
etype: user
permissions: rw
default: yes
state: present
# Same as previous but using entry shorthand
- acl:
path: /etc/foo.d
entry: "default:user:joe:rw-"
state: present
# Obtain the acl for a specific file
- acl:
path: /etc/foo.conf
register: acl_info
'''
RETURN = '''
acl:
description: Current acl on provided path (after changes, if any)
returned: success
type: list
sample: [ "user::rwx", "group::rwx", "other::rwx" ]
'''
import os
# import module snippets
from ansible.module_utils.basic import AnsibleModule, get_platform
def split_entry(entry):
''' splits entry and ensures normalized return'''
a = entry.split(':')
d = None
if entry.lower().startswith("d"):
d = True
a.pop(0)
if len(a) == 2:
a.append(None)
t, e, p = a
t = t.lower()
if t.startswith("u"):
t = "user"
elif t.startswith("g"):
t = "group"
elif t.startswith("m"):
t = "mask"
elif t.startswith("o"):
t = "other"
else:
t = None
return [d, t, e, p]
def build_entry(etype, entity, permissions=None, use_nfsv4_acls=False):
'''Builds and returns an entry string. Does not include the permissions bit if they are not provided.'''
if use_nfsv4_acls:
return ':'.join([etype, entity, permissions, 'allow'])
if permissions:
return etype + ':' + entity + ':' + permissions
else:
return etype + ':' + entity
def build_command(module, mode, path, follow, default, recursive, entry=''):
'''Builds and returns a getfacl/setfacl command.'''
if mode == 'set':
cmd = [module.get_bin_path('setfacl', True)]
cmd.append('-m "%s"' % entry)
elif mode == 'rm':
cmd = [module.get_bin_path('setfacl', True)]
cmd.append('-x "%s"' % entry)
else: # mode == 'get'
cmd = [module.get_bin_path('getfacl', True)]
# prevents absolute path warnings and removes headers
if get_platform().lower() == 'linux':
cmd.append('--omit-header')
cmd.append('--absolute-names')
if recursive:
cmd.append('--recursive')
if not follow:
if get_platform().lower() == 'linux':
cmd.append('--physical')
elif get_platform().lower() == 'freebsd':
cmd.append('-h')
if default:
if(mode == 'rm'):
cmd.insert(1, '-k')
else: # mode == 'set' or mode == 'get'
cmd.insert(1, '-d')
cmd.append(path)
return cmd
def acl_changed(module, cmd):
'''Returns true if the provided command affects the existing ACLs, false otherwise.'''
# FreeBSD do not have a --test flag, so by default, it is safer to always say "true"
if get_platform().lower() == 'freebsd':
return True
cmd = cmd[:] # lists are mutables so cmd would be overwritten without this
cmd.insert(1, '--test')
lines = run_acl(module, cmd)
for line in lines:
if not line.endswith('*,*'):
return True
return False
def run_acl(module, cmd, check_rc=True):
try:
(rc, out, err) = module.run_command(' '.join(cmd), check_rc=check_rc)
except Exception:
e = get_exception()
module.fail_json(msg=e.strerror)
lines = []
for l in out.splitlines():
if not l.startswith('#'):
lines.append(l.strip())
if lines and not lines[-1].split():
# trim last line only when it is empty
return lines[:-1]
else:
return lines
def main():
module = AnsibleModule(
argument_spec=dict(
path=dict(required=True, aliases=['name'], type='path'),
entry=dict(required=False, type='str'),
entity=dict(required=False, type='str', default=''),
etype=dict(
required=False,
choices=['other', 'user', 'group', 'mask'],
type='str'
),
permissions=dict(required=False, type='str'),
state=dict(
required=False,
default='query',
choices=['query', 'present', 'absent'],
type='str'
),
follow=dict(required=False, type='bool', default=True),
default=dict(required=False, type='bool', default=False),
recursive=dict(required=False, type='bool', default=False),
use_nfsv4_acls=dict(required=False, type='bool', default=False)
),
supports_check_mode=True,
)
if get_platform().lower() not in ['linux', 'freebsd']:
module.fail_json(msg="The acl module is not available on this system.")
path = module.params.get('path')
entry = module.params.get('entry')
entity = module.params.get('entity')
etype = module.params.get('etype')
permissions = module.params.get('permissions')
state = module.params.get('state')
follow = module.params.get('follow')
default = module.params.get('default')
recursive = module.params.get('recursive')
use_nfsv4_acls = module.params.get('use_nfsv4_acls')
if not os.path.exists(path):
module.fail_json(msg="Path not found or not accessible.")
if state == 'query' and recursive:
module.fail_json(msg="'recursive' MUST NOT be set when 'state=query'.")
if not entry:
if state == 'absent' and permissions:
module.fail_json(msg="'permissions' MUST NOT be set when 'state=absent'.")
if state == 'absent' and not entity:
module.fail_json(msg="'entity' MUST be set when 'state=absent'.")
if state in ['present', 'absent'] and not etype:
module.fail_json(msg="'etype' MUST be set when 'state=%s'." % state)
if entry:
if etype or entity or permissions:
module.fail_json(msg="'entry' MUST NOT be set when 'entity', 'etype' or 'permissions' are set.")
if state == 'present' and not entry.count(":") in [2, 3]:
module.fail_json(msg="'entry' MUST have 3 or 4 sections divided by ':' when 'state=present'.")
if state == 'absent' and not entry.count(":") in [1, 2]:
module.fail_json(msg="'entry' MUST have 2 or 3 sections divided by ':' when 'state=absent'.")
if state == 'query':
module.fail_json(msg="'entry' MUST NOT be set when 'state=query'.")
default_flag, etype, entity, permissions = split_entry(entry)
if default_flag is not None:
default = default_flag
if get_platform().lower() == 'freebsd':
if recursive:
module.fail_json(msg="recursive is not supported on that platform.")
changed = False
msg = ""
if state == 'present':
entry = build_entry(etype, entity, permissions, use_nfsv4_acls)
command = build_command(
module, 'set', path, follow,
default, recursive, entry
)
changed = acl_changed(module, command)
if changed and not module.check_mode:
run_acl(module, command)
msg = "%s is present" % entry
elif state == 'absent':
entry = build_entry(etype, entity, use_nfsv4_acls)
command = build_command(
module, 'rm', path, follow,
default, recursive, entry
)
changed = acl_changed(module, command)
if changed and not module.check_mode:
run_acl(module, command, False)
msg = "%s is absent" % entry
elif state == 'query':
msg = "current acl"
acl = run_acl(
module,
build_command(module, 'get', path, follow, default, recursive)
)
module.exit_json(changed=changed, msg=msg, acl=acl)
if __name__ == '__main__':
main()
| gpl-3.0 |
bruecksen/isimip | config/settings/production.py | 1 | 2683 | # -*- coding: utf-8 -*-
'''
Production Configurations
- Use sentry for error logging
'''
import logging
from .common import * # noqa
SECRET_KEY = env("DJANGO_SECRET_KEY")
# CACHING
# ------------------------------------------------------------------------------
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
},
"file_resubmit": {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
"LOCATION": '/tmp/file_resubmit/'
},
}
# INTERNAL_IPS = ('127.0.0.1', '10.0.2.2',)
INSTALLED_APPS += ('raven.contrib.django.raven_compat', )
RAVEN_MIDDLEWARE = (
#'raven.contrib.django.raven_compat.middleware.Sentry404CatchMiddleware',
'raven.contrib.django.raven_compat.middleware.SentryResponseErrorIdMiddleware',
)
MIDDLEWARE_CLASSES += RAVEN_MIDDLEWARE #+ MIDDLEWARE_CLASSES
# Sentry Configuration
SENTRY_DSN = env('DJANGO_SENTRY_DSN')
SENTRY_CLIENT = env('DJANGO_SENTRY_CLIENT', default='raven.contrib.django.raven_compat.DjangoClient')
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'root': {
'level': 'WARNING',
'handlers': ['sentry'],
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s '
'%(process)d %(thread)d %(message)s'
},
},
'handlers': {
'sentry': {
'level': 'ERROR',
'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
}
},
'loggers': {
'django.db.backends': {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
},
'raven': {
'level': 'DEBUG',
'handlers': ['console'],
'propagate': False,
},
'sentry.errors': {
'level': 'DEBUG',
'handlers': ['console'],
'propagate': False,
},
'django.security.DisallowedHost': {
'level': 'ERROR',
'handlers': ['console', 'sentry'],
'propagate': False,
},
},
}
SENTRY_CELERY_LOGLEVEL = env.int('DJANGO_SENTRY_LOG_LEVEL', logging.INFO)
RAVEN_CONFIG = {
'CELERY_LOGLEVEL': env.int('DJANGO_SENTRY_LOG_LEVEL', logging.INFO),
'DSN': SENTRY_DSN
}
INVITATION_VALID_DAYS = 365
INSTALLED_APPS += ("gunicorn", "email_obfuscator")
DATABASES['default'] = env.db("DATABASE_URL")
ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['isimip.org'])
| mit |
Anonymouslemming/ansible | lib/ansible/modules/cloud/cloudstack/cs_account.py | 18 | 10806 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, René Moser <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_account
short_description: Manages accounts on Apache CloudStack based clouds.
description:
- Create, disable, lock, enable and remove accounts.
version_added: '2.0'
author: "René Moser (@resmo)"
options:
name:
description:
- Name of account.
required: true
username:
description:
- Username of the user to be created if account did not exist.
- Required on C(state=present).
required: false
default: null
password:
description:
- Password of the user to be created if account did not exist.
- Required on C(state=present).
required: false
default: null
first_name:
description:
- First name of the user to be created if account did not exist.
- Required on C(state=present).
required: false
default: null
last_name:
description:
- Last name of the user to be created if account did not exist.
- Required on C(state=present).
required: false
default: null
email:
description:
- Email of the user to be created if account did not exist.
- Required on C(state=present).
required: false
default: null
timezone:
description:
- Timezone of the user to be created if account did not exist.
required: false
default: null
network_domain:
description:
- Network domain of the account.
required: false
default: null
account_type:
description:
- Type of the account.
required: false
default: 'user'
choices: [ 'user', 'root_admin', 'domain_admin' ]
domain:
description:
- Domain the account is related to.
required: false
default: 'ROOT'
state:
description:
- State of the account.
- C(unlocked) is an alias for C(enabled).
required: false
default: 'present'
choices: [ 'present', 'absent', 'enabled', 'disabled', 'locked', 'unlocked' ]
poll_async:
description:
- Poll async jobs until job has finished.
required: false
default: true
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# create an account in domain 'CUSTOMERS'
local_action:
module: cs_account
name: customer_xy
username: customer_xy
password: S3Cur3
last_name: Doe
first_name: John
email: [email protected]
domain: CUSTOMERS
# Lock an existing account in domain 'CUSTOMERS'
local_action:
module: cs_account
name: customer_xy
domain: CUSTOMERS
state: locked
# Disable an existing account in domain 'CUSTOMERS'
local_action:
module: cs_account
name: customer_xy
domain: CUSTOMERS
state: disabled
# Enable an existing account in domain 'CUSTOMERS'
local_action:
module: cs_account
name: customer_xy
domain: CUSTOMERS
state: enabled
# Remove an account in domain 'CUSTOMERS'
local_action:
module: cs_account
name: customer_xy
domain: CUSTOMERS
state: absent
'''
RETURN = '''
---
id:
description: UUID of the account.
returned: success
type: string
sample: 87b1e0ce-4e01-11e4-bb66-0050569e64b8
name:
description: Name of the account.
returned: success
type: string
sample: [email protected]
account_type:
description: Type of the account.
returned: success
type: string
sample: user
state:
description: State of the account.
returned: success
type: string
sample: enabled
network_domain:
description: Network domain of the account.
returned: success
type: string
sample: example.local
domain:
description: Domain the account is related.
returned: success
type: string
sample: ROOT
'''
# import cloudstack common
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together
)
class AnsibleCloudStackAccount(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackAccount, self).__init__(module)
self.returns = {
'networkdomain': 'network_domain',
}
self.account = None
self.account_types = {
'user': 0,
'root_admin': 1,
'domain_admin': 2,
}
def get_account_type(self):
account_type = self.module.params.get('account_type')
return self.account_types[account_type]
def get_account(self):
if not self.account:
args = {
'listall': True,
'domainid': self.get_domain(key='id'),
}
accounts = self.query_api('listAccounts', **args)
if accounts:
account_name = self.module.params.get('name')
for a in accounts['account']:
if account_name == a['name']:
self.account = a
break
return self.account
def enable_account(self):
account = self.get_account()
if not account:
account = self.present_account()
if account['state'].lower() != 'enabled':
self.result['changed'] = True
args = {
'id': account['id'],
'account': self.module.params.get('name'),
'domainid': self.get_domain(key='id')
}
if not self.module.check_mode:
res = self.query_api('enableAccount', **args)
account = res['account']
return account
def lock_account(self):
return self.lock_or_disable_account(lock=True)
def disable_account(self):
return self.lock_or_disable_account()
def lock_or_disable_account(self, lock=False):
account = self.get_account()
if not account:
account = self.present_account()
# we need to enable the account to lock it.
if lock and account['state'].lower() == 'disabled':
account = self.enable_account()
if (lock and account['state'].lower() != 'locked' or
not lock and account['state'].lower() != 'disabled'):
self.result['changed'] = True
args = {
'id': account['id'],
'account': self.module.params.get('name'),
'domainid': self.get_domain(key='id'),
'lock': lock,
}
if not self.module.check_mode:
account = self.query_api('disableAccount', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
account = self.poll_job(account, 'account')
return account
def present_account(self):
required_params = [
'email',
'username',
'password',
'first_name',
'last_name',
]
self.module.fail_on_missing_params(required_params=required_params)
account = self.get_account()
if not account:
self.result['changed'] = True
args = {
'account': self.module.params.get('name'),
'domainid': self.get_domain(key='id'),
'accounttype': self.get_account_type(),
'networkdomain': self.module.params.get('network_domain'),
'username': self.module.params.get('username'),
'password': self.module.params.get('password'),
'firstname': self.module.params.get('first_name'),
'lastname': self.module.params.get('last_name'),
'email': self.module.params.get('email'),
'timezone': self.module.params.get('timezone')
}
if not self.module.check_mode:
res = self.query_api('createAccount', **args)
account = res['account']
return account
def absent_account(self):
account = self.get_account()
if account:
self.result['changed'] = True
if not self.module.check_mode:
res = self.query_api('deleteAccount', id=account['id'])
poll_async = self.module.params.get('poll_async')
if poll_async:
self.poll_job(res, 'account')
return account
def get_result(self, account):
super(AnsibleCloudStackAccount, self).get_result(account)
if account:
if 'accounttype' in account:
for key, value in self.account_types.items():
if value == account['accounttype']:
self.result['account_type'] = key
break
return self.result
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name=dict(required=True),
state=dict(choices=['present', 'absent', 'enabled', 'disabled', 'locked', 'unlocked'], default='present'),
account_type=dict(choices=['user', 'root_admin', 'domain_admin'], default='user'),
network_domain=dict(),
domain=dict(default='ROOT'),
email=dict(),
first_name=dict(),
last_name=dict(),
username=dict(),
password=dict(no_log=True),
timezone=dict(),
poll_async=dict(type='bool', default=True),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
acs_acc = AnsibleCloudStackAccount(module)
state = module.params.get('state')
if state in ['absent']:
account = acs_acc.absent_account()
elif state in ['enabled', 'unlocked']:
account = acs_acc.enable_account()
elif state in ['disabled']:
account = acs_acc.disable_account()
elif state in ['locked']:
account = acs_acc.lock_account()
else:
account = acs_acc.present_account()
result = acs_acc.get_result(account)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
ahuarte47/QGIS | tests/src/python/test_qgslayoutmap.py | 14 | 15917 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsLayoutItemMap.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '(C) 2017 Nyall Dawson'
__date__ = '20/10/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
import qgis # NOQA
import os
from qgis.PyQt.QtCore import QFileInfo, QRectF, QDir
from qgis.PyQt.QtXml import QDomDocument
from qgis.PyQt.QtGui import QPainter, QColor
from qgis.core import (QgsLayoutItemMap,
QgsRectangle,
QgsRasterLayer,
QgsVectorLayer,
QgsLayout,
QgsMapSettings,
QgsProject,
QgsMultiBandColorRenderer,
QgsCoordinateReferenceSystem,
QgsTextFormat,
QgsFontUtils,
QgsPalLayerSettings,
QgsNullSymbolRenderer,
QgsPoint,
QgsFeature,
QgsVectorLayerSimpleLabeling,
QgsLabelingEngineSettings,
QgsLayoutMeasurement,
QgsUnitTypes,
QgsLayoutObject,
QgsProperty,
QgsReadWriteContext)
from qgis.testing import start_app, unittest
from utilities import unitTestDataPath
from qgslayoutchecker import QgsLayoutChecker
from test_qgslayoutitem import LayoutItemTestCase
start_app()
TEST_DATA_DIR = unitTestDataPath()
class TestQgsLayoutMap(unittest.TestCase, LayoutItemTestCase):
@classmethod
def setUpClass(cls):
cls.item_class = QgsLayoutItemMap
def setUp(self):
self.report = "<h1>Python QgsLayoutItemMap Tests</h1>\n"
def tearDown(self):
report_file_path = "%s/qgistest.html" % QDir.tempPath()
with open(report_file_path, 'a') as report_file:
report_file.write(self.report)
def __init__(self, methodName):
"""Run once on class initialization."""
unittest.TestCase.__init__(self, methodName)
myPath = os.path.join(TEST_DATA_DIR, 'rgb256x256.png')
rasterFileInfo = QFileInfo(myPath)
self.raster_layer = QgsRasterLayer(rasterFileInfo.filePath(),
rasterFileInfo.completeBaseName())
rasterRenderer = QgsMultiBandColorRenderer(
self.raster_layer.dataProvider(), 1, 2, 3)
self.raster_layer.setRenderer(rasterRenderer)
myPath = os.path.join(TEST_DATA_DIR, 'points.shp')
vector_file_info = QFileInfo(myPath)
self.vector_layer = QgsVectorLayer(vector_file_info.filePath(),
vector_file_info.completeBaseName(), 'ogr')
assert self.vector_layer.isValid()
# pipe = mRasterLayer.pipe()
# assert pipe.set(rasterRenderer), 'Cannot set pipe renderer'
QgsProject.instance().addMapLayers([self.raster_layer, self.vector_layer])
# create layout with layout map
self.layout = QgsLayout(QgsProject.instance())
self.layout.initializeDefaults()
self.map = QgsLayoutItemMap(self.layout)
self.map.attemptSetSceneRect(QRectF(20, 20, 200, 100))
self.map.setFrameEnabled(True)
self.map.setLayers([self.raster_layer])
self.layout.addLayoutItem(self.map)
def testMapCrs(self):
# create layout with layout map
map_settings = QgsMapSettings()
map_settings.setLayers([self.vector_layer])
layout = QgsLayout(QgsProject.instance())
layout.initializeDefaults()
# check that new maps inherit project CRS
QgsProject.instance().setCrs(QgsCoordinateReferenceSystem('EPSG:4326'))
map = QgsLayoutItemMap(layout)
map.attemptSetSceneRect(QRectF(20, 20, 200, 100))
map.setFrameEnabled(True)
rectangle = QgsRectangle(-13838977, 2369660, -8672298, 6250909)
map.setExtent(rectangle)
map.setLayers([self.vector_layer])
layout.addLayoutItem(map)
self.assertEqual(map.crs().authid(), 'EPSG:4326')
self.assertFalse(map.presetCrs().isValid())
# overwrite CRS
map.setCrs(QgsCoordinateReferenceSystem('EPSG:3857'))
self.assertEqual(map.crs().authid(), 'EPSG:3857')
self.assertEqual(map.presetCrs().authid(), 'EPSG:3857')
checker = QgsLayoutChecker('composermap_crs3857', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
# overwrite CRS
map.setCrs(QgsCoordinateReferenceSystem('EPSG:4326'))
self.assertEqual(map.presetCrs().authid(), 'EPSG:4326')
self.assertEqual(map.crs().authid(), 'EPSG:4326')
rectangle = QgsRectangle(-124, 17, -78, 52)
map.zoomToExtent(rectangle)
checker = QgsLayoutChecker('composermap_crs4326', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
# change back to project CRS
map.setCrs(QgsCoordinateReferenceSystem())
self.assertEqual(map.crs().authid(), 'EPSG:4326')
self.assertFalse(map.presetCrs().isValid())
def testContainsAdvancedEffects(self):
map_settings = QgsMapSettings()
map_settings.setLayers([self.vector_layer])
layout = QgsLayout(QgsProject.instance())
map = QgsLayoutItemMap(layout)
self.assertFalse(map.containsAdvancedEffects())
self.vector_layer.setBlendMode(QPainter.CompositionMode_Darken)
result = map.containsAdvancedEffects()
self.vector_layer.setBlendMode(QPainter.CompositionMode_SourceOver)
self.assertTrue(result)
def testRasterization(self):
map_settings = QgsMapSettings()
map_settings.setLayers([self.vector_layer])
layout = QgsLayout(QgsProject.instance())
map = QgsLayoutItemMap(layout)
self.assertFalse(map.requiresRasterization())
self.vector_layer.setBlendMode(QPainter.CompositionMode_Darken)
self.assertFalse(map.requiresRasterization())
self.assertTrue(map.containsAdvancedEffects())
map.setBackgroundEnabled(False)
self.assertTrue(map.requiresRasterization())
map.setBackgroundEnabled(True)
map.setBackgroundColor(QColor(1, 1, 1, 1))
self.assertTrue(map.requiresRasterization())
self.vector_layer.setBlendMode(QPainter.CompositionMode_SourceOver)
def testLabelMargin(self):
"""
Test rendering map item with a label margin set
"""
format = QgsTextFormat()
format.setFont(QgsFontUtils.getStandardTestFont("Bold"))
format.setSize(20)
format.setNamedStyle("Bold")
format.setColor(QColor(0, 0, 0))
settings = QgsPalLayerSettings()
settings.setFormat(format)
settings.fieldName = "'X'"
settings.isExpression = True
settings.placement = QgsPalLayerSettings.OverPoint
vl = QgsVectorLayer("Point?crs=epsg:4326&field=id:integer", "vl", "memory")
vl.setRenderer(QgsNullSymbolRenderer())
f = QgsFeature(vl.fields(), 1)
for x in range(15):
for y in range(15):
f.setGeometry(QgsPoint(x, y))
vl.dataProvider().addFeature(f)
vl.setLabeling(QgsVectorLayerSimpleLabeling(settings))
vl.setLabelsEnabled(True)
p = QgsProject()
engine_settings = QgsLabelingEngineSettings()
engine_settings.setFlag(QgsLabelingEngineSettings.UsePartialCandidates, False)
engine_settings.setFlag(QgsLabelingEngineSettings.DrawLabelRectOnly, True)
p.setLabelingEngineSettings(engine_settings)
p.addMapLayer(vl)
layout = QgsLayout(p)
layout.initializeDefaults()
p.setCrs(QgsCoordinateReferenceSystem('EPSG:4326'))
map = QgsLayoutItemMap(layout)
map.attemptSetSceneRect(QRectF(10, 10, 180, 180))
map.setFrameEnabled(True)
map.zoomToExtent(vl.extent())
map.setLayers([vl])
layout.addLayoutItem(map)
checker = QgsLayoutChecker('composermap_label_nomargin', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
map.setLabelMargin(QgsLayoutMeasurement(15, QgsUnitTypes.LayoutMillimeters))
checker = QgsLayoutChecker('composermap_label_margin', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
map.setLabelMargin(QgsLayoutMeasurement(3, QgsUnitTypes.LayoutCentimeters))
checker = QgsLayoutChecker('composermap_label_cm_margin', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
map.setMapRotation(45)
map.zoomToExtent(vl.extent())
map.setScale(map.scale() * 1.2)
checker = QgsLayoutChecker('composermap_rotated_label_margin', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
# data defined
map.setMapRotation(0)
map.zoomToExtent(vl.extent())
map.dataDefinedProperties().setProperty(QgsLayoutObject.MapLabelMargin, QgsProperty.fromExpression('1+3'))
map.refresh()
checker = QgsLayoutChecker('composermap_dd_label_margin', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
def testPartialLabels(self):
"""
Test rendering map item with a show partial labels flag
"""
format = QgsTextFormat()
format.setFont(QgsFontUtils.getStandardTestFont("Bold"))
format.setSize(20)
format.setNamedStyle("Bold")
format.setColor(QColor(0, 0, 0))
settings = QgsPalLayerSettings()
settings.setFormat(format)
settings.fieldName = "'X'"
settings.isExpression = True
settings.placement = QgsPalLayerSettings.OverPoint
vl = QgsVectorLayer("Point?crs=epsg:4326&field=id:integer", "vl", "memory")
vl.setRenderer(QgsNullSymbolRenderer())
f = QgsFeature(vl.fields(), 1)
for x in range(15):
for y in range(15):
f.setGeometry(QgsPoint(x, y))
vl.dataProvider().addFeature(f)
vl.setLabeling(QgsVectorLayerSimpleLabeling(settings))
vl.setLabelsEnabled(True)
p = QgsProject()
engine_settings = QgsLabelingEngineSettings()
engine_settings.setFlag(QgsLabelingEngineSettings.UsePartialCandidates, False)
engine_settings.setFlag(QgsLabelingEngineSettings.DrawLabelRectOnly, True)
p.setLabelingEngineSettings(engine_settings)
p.addMapLayer(vl)
layout = QgsLayout(p)
layout.initializeDefaults()
p.setCrs(QgsCoordinateReferenceSystem('EPSG:4326'))
map = QgsLayoutItemMap(layout)
map.attemptSetSceneRect(QRectF(10, 10, 180, 180))
map.setFrameEnabled(True)
map.zoomToExtent(vl.extent())
map.setLayers([vl])
layout.addLayoutItem(map)
# default should always be to hide partial labels
self.assertFalse(map.mapFlags() & QgsLayoutItemMap.ShowPartialLabels)
# hiding partial labels (the default)
map.setMapFlags(QgsLayoutItemMap.MapItemFlags())
checker = QgsLayoutChecker('composermap_label_nomargin', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
# showing partial labels
map.setMapFlags(QgsLayoutItemMap.ShowPartialLabels)
checker = QgsLayoutChecker('composermap_show_partial_labels', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
def testBlockingItems(self):
"""
Test rendering map item with blocking items
"""
format = QgsTextFormat()
format.setFont(QgsFontUtils.getStandardTestFont("Bold"))
format.setSize(20)
format.setNamedStyle("Bold")
format.setColor(QColor(0, 0, 0))
settings = QgsPalLayerSettings()
settings.setFormat(format)
settings.fieldName = "'X'"
settings.isExpression = True
settings.placement = QgsPalLayerSettings.OverPoint
vl = QgsVectorLayer("Point?crs=epsg:4326&field=id:integer", "vl", "memory")
vl.setRenderer(QgsNullSymbolRenderer())
f = QgsFeature(vl.fields(), 1)
for x in range(15):
for y in range(15):
f.setGeometry(QgsPoint(x, y))
vl.dataProvider().addFeature(f)
vl.setLabeling(QgsVectorLayerSimpleLabeling(settings))
vl.setLabelsEnabled(True)
p = QgsProject()
engine_settings = QgsLabelingEngineSettings()
engine_settings.setFlag(QgsLabelingEngineSettings.DrawLabelRectOnly, True)
p.setLabelingEngineSettings(engine_settings)
p.addMapLayer(vl)
layout = QgsLayout(p)
layout.initializeDefaults()
p.setCrs(QgsCoordinateReferenceSystem('EPSG:4326'))
map = QgsLayoutItemMap(layout)
map.attemptSetSceneRect(QRectF(10, 10, 180, 180))
map.setFrameEnabled(True)
map.zoomToExtent(vl.extent())
map.setLayers([vl])
map.setId('map')
layout.addLayoutItem(map)
map2 = QgsLayoutItemMap(layout)
map2.attemptSetSceneRect(QRectF(0, 5, 50, 80))
map2.setFrameEnabled(True)
map2.setBackgroundEnabled(False)
map2.setId('map2')
layout.addLayoutItem(map2)
map3 = QgsLayoutItemMap(layout)
map3.attemptSetSceneRect(QRectF(150, 160, 50, 50))
map3.setFrameEnabled(True)
map3.setBackgroundEnabled(False)
map3.setId('map3')
layout.addLayoutItem(map3)
map.addLabelBlockingItem(map2)
map.addLabelBlockingItem(map3)
map.setMapFlags(QgsLayoutItemMap.MapItemFlags())
checker = QgsLayoutChecker('composermap_label_blockers', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
doc = QDomDocument("testdoc")
elem = layout.writeXml(doc, QgsReadWriteContext())
l2 = QgsLayout(p)
self.assertTrue(l2.readXml(elem, doc, QgsReadWriteContext()))
map_restore = [i for i in l2.items() if isinstance(i, QgsLayoutItemMap) and i.id() == 'map'][0]
map2_restore = [i for i in l2.items() if isinstance(i, QgsLayoutItemMap) and i.id() == 'map2'][0]
map3_restore = [i for i in l2.items() if isinstance(i, QgsLayoutItemMap) and i.id() == 'map3'][0]
self.assertTrue(map_restore.isLabelBlockingItem(map2_restore))
self.assertTrue(map_restore.isLabelBlockingItem(map3_restore))
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
alexbhanneman/CS430-Project | src/js/three.js-master/utils/exporters/blender/addons/io_three/exporter/__init__.py | 119 | 2708 | import os
import sys
import traceback
from .. import constants, logger, exceptions, dialogs
from . import scene, geometry, api, base_classes
def _error_handler(func):
def inner(filepath, options, *args, **kwargs):
level = options.get(constants.LOGGING, constants.DISABLED)
version = options.get('addon_version')
if level != constants.DISABLED:
logger.init('io_three.export.log', level=level)
if version is not None:
logger.debug("Addon Version %s", version)
api.init()
try:
func(filepath, options, *args, **kwargs)
except:
info = sys.exc_info()
trace = traceback.format_exception(
info[0], info[1], info[2].tb_next)
trace = ''.join(trace)
logger.error(trace)
print('Error recorded to %s' % logger.LOG_FILE)
raise
else:
print('Log: %s' % logger.LOG_FILE)
return inner
@_error_handler
def export_scene(filepath, options):
selected = []
# during scene exports unselect everything. this is needed for
# applying modifiers, if it is necessary
# record the selected nodes so that selection is restored later
for obj in api.selected_objects():
api.object.unselect(obj)
selected.append(obj)
active = api.active_object()
try:
scene_ = scene.Scene(filepath, options=options)
scene_.parse()
scene_.write()
except:
_restore_selection(selected, active)
raise
_restore_selection(selected, active)
@_error_handler
def export_geometry(filepath, options, node=None):
msg = ""
exception = None
if node is None:
node = api.active_object()
if node is None:
msg = "Nothing selected"
logger.error(msg)
exception = exceptions.SelectionError
if node.type != 'MESH':
msg = "%s is not a valid mesh object" % node.name
logger.error(msg)
exception = exceptions.GeometryError
if exception is not None:
if api.batch_mode():
raise exception(msg)
else:
dialogs.error(msg)
return
mesh = api.object.mesh(node, options)
parent = base_classes.BaseScene(filepath, options)
geo = geometry.Geometry(mesh, parent)
geo.parse()
geo.write()
if not options.get(constants.EMBED_ANIMATION, True):
geo.write_animation(os.path.dirname(filepath))
def _restore_selection(objects, active):
for obj in objects:
api.object.select(obj)
api.set_active_object(active)
| mit |
miguelgrinberg/python-engineio | src/engineio/asyncio_server.py | 1 | 22271 | import asyncio
import urllib
from . import exceptions
from . import packet
from . import server
from . import asyncio_socket
class AsyncServer(server.Server):
"""An Engine.IO server for asyncio.
This class implements a fully compliant Engine.IO web server with support
for websocket and long-polling transports, compatible with the asyncio
framework on Python 3.5 or newer.
:param async_mode: The asynchronous model to use. See the Deployment
section in the documentation for a description of the
available options. Valid async modes are "aiohttp",
"sanic", "tornado" and "asgi". If this argument is not
given, "aiohttp" is tried first, followed by "sanic",
"tornado", and finally "asgi". The first async mode that
has all its dependencies installed is the one that is
chosen.
:param ping_interval: The interval in seconds at which the server pings
the client. The default is 25 seconds. For advanced
control, a two element tuple can be given, where
the first number is the ping interval and the second
is a grace period added by the server.
:param ping_timeout: The time in seconds that the client waits for the
server to respond before disconnecting. The default
is 20 seconds.
:param max_http_buffer_size: The maximum size of a message when using the
polling transport. The default is 1,000,000
bytes.
:param allow_upgrades: Whether to allow transport upgrades or not.
:param http_compression: Whether to compress packages when using the
polling transport.
:param compression_threshold: Only compress messages when their byte size
is greater than this value.
:param cookie: If set to a string, it is the name of the HTTP cookie the
server sends back tot he client containing the client
session id. If set to a dictionary, the ``'name'`` key
contains the cookie name and other keys define cookie
attributes, where the value of each attribute can be a
string, a callable with no arguments, or a boolean. If set
to ``None`` (the default), a cookie is not sent to the
client.
:param cors_allowed_origins: Origin or list of origins that are allowed to
connect to this server. Only the same origin
is allowed by default. Set this argument to
``'*'`` to allow all origins, or to ``[]`` to
disable CORS handling.
:param cors_credentials: Whether credentials (cookies, authentication) are
allowed in requests to this server.
:param logger: To enable logging set to ``True`` or pass a logger object to
use. To disable logging set to ``False``. Note that fatal
errors are logged even when ``logger`` is ``False``.
:param json: An alternative json module to use for encoding and decoding
packets. Custom json modules must have ``dumps`` and ``loads``
functions that are compatible with the standard library
versions.
:param async_handlers: If set to ``True``, run message event handlers in
non-blocking threads. To run handlers synchronously,
set to ``False``. The default is ``True``.
:param kwargs: Reserved for future extensions, any additional parameters
given as keyword arguments will be silently ignored.
"""
def is_asyncio_based(self):
return True
def async_modes(self):
return ['aiohttp', 'sanic', 'tornado', 'asgi']
def attach(self, app, engineio_path='engine.io'):
"""Attach the Engine.IO server to an application."""
engineio_path = engineio_path.strip('/')
self._async['create_route'](app, self, '/{}/'.format(engineio_path))
async def send(self, sid, data):
"""Send a message to a client.
:param sid: The session id of the recipient client.
:param data: The data to send to the client. Data can be of type
``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
or ``dict``, the data will be serialized as JSON.
Note: this method is a coroutine.
"""
try:
socket = self._get_socket(sid)
except KeyError:
# the socket is not available
self.logger.warning('Cannot send to sid %s', sid)
return
await socket.send(packet.Packet(packet.MESSAGE, data=data))
async def get_session(self, sid):
"""Return the user session for a client.
:param sid: The session id of the client.
The return value is a dictionary. Modifications made to this
dictionary are not guaranteed to be preserved. If you want to modify
the user session, use the ``session`` context manager instead.
"""
socket = self._get_socket(sid)
return socket.session
async def save_session(self, sid, session):
"""Store the user session for a client.
:param sid: The session id of the client.
:param session: The session dictionary.
"""
socket = self._get_socket(sid)
socket.session = session
def session(self, sid):
"""Return the user session for a client with context manager syntax.
:param sid: The session id of the client.
This is a context manager that returns the user session dictionary for
the client. Any changes that are made to this dictionary inside the
context manager block are saved back to the session. Example usage::
@eio.on('connect')
def on_connect(sid, environ):
username = authenticate_user(environ)
if not username:
return False
with eio.session(sid) as session:
session['username'] = username
@eio.on('message')
def on_message(sid, msg):
async with eio.session(sid) as session:
print('received message from ', session['username'])
"""
class _session_context_manager(object):
def __init__(self, server, sid):
self.server = server
self.sid = sid
self.session = None
async def __aenter__(self):
self.session = await self.server.get_session(sid)
return self.session
async def __aexit__(self, *args):
await self.server.save_session(sid, self.session)
return _session_context_manager(self, sid)
async def disconnect(self, sid=None):
"""Disconnect a client.
:param sid: The session id of the client to close. If this parameter
is not given, then all clients are closed.
Note: this method is a coroutine.
"""
if sid is not None:
try:
socket = self._get_socket(sid)
except KeyError: # pragma: no cover
# the socket was already closed or gone
pass
else:
await socket.close()
if sid in self.sockets: # pragma: no cover
del self.sockets[sid]
else:
await asyncio.wait([client.close()
for client in self.sockets.values()])
self.sockets = {}
async def handle_request(self, *args, **kwargs):
"""Handle an HTTP request from the client.
This is the entry point of the Engine.IO application. This function
returns the HTTP response to deliver to the client.
Note: this method is a coroutine.
"""
translate_request = self._async['translate_request']
if asyncio.iscoroutinefunction(translate_request):
environ = await translate_request(*args, **kwargs)
else:
environ = translate_request(*args, **kwargs)
if self.cors_allowed_origins != []:
# Validate the origin header if present
# This is important for WebSocket more than for HTTP, since
# browsers only apply CORS controls to HTTP.
origin = environ.get('HTTP_ORIGIN')
if origin:
allowed_origins = self._cors_allowed_origins(environ)
if allowed_origins is not None and origin not in \
allowed_origins:
self._log_error_once(
origin + ' is not an accepted origin.', 'bad-origin')
return await self._make_response(
self._bad_request(
origin + ' is not an accepted origin.'),
environ)
method = environ['REQUEST_METHOD']
query = urllib.parse.parse_qs(environ.get('QUERY_STRING', ''))
sid = query['sid'][0] if 'sid' in query else None
jsonp = False
jsonp_index = None
# make sure the client speaks a compatible Engine.IO version
sid = query['sid'][0] if 'sid' in query else None
if sid is None and query.get('EIO') != ['4']:
self._log_error_once(
'The client is using an unsupported version of the Socket.IO '
'or Engine.IO protocols', 'bad-version'
)
return await self._make_response(self._bad_request(
'The client is using an unsupported version of the Socket.IO '
'or Engine.IO protocols'
), environ)
if 'j' in query:
jsonp = True
try:
jsonp_index = int(query['j'][0])
except (ValueError, KeyError, IndexError):
# Invalid JSONP index number
pass
if jsonp and jsonp_index is None:
self._log_error_once('Invalid JSONP index number',
'bad-jsonp-index')
r = self._bad_request('Invalid JSONP index number')
elif method == 'GET':
if sid is None:
transport = query.get('transport', ['polling'])[0]
# transport must be one of 'polling' or 'websocket'.
# if 'websocket', the HTTP_UPGRADE header must match.
upgrade_header = environ.get('HTTP_UPGRADE').lower() \
if 'HTTP_UPGRADE' in environ else None
if transport == 'polling' \
or transport == upgrade_header == 'websocket':
r = await self._handle_connect(environ, transport,
jsonp_index)
else:
self._log_error_once('Invalid transport ' + transport,
'bad-transport')
r = self._bad_request('Invalid transport ' + transport)
else:
if sid not in self.sockets:
self._log_error_once('Invalid session ' + sid, 'bad-sid')
r = self._bad_request('Invalid session ' + sid)
else:
socket = self._get_socket(sid)
try:
packets = await socket.handle_get_request(environ)
if isinstance(packets, list):
r = self._ok(packets, jsonp_index=jsonp_index)
else:
r = packets
except exceptions.EngineIOError:
if sid in self.sockets: # pragma: no cover
await self.disconnect(sid)
r = self._bad_request()
if sid in self.sockets and self.sockets[sid].closed:
del self.sockets[sid]
elif method == 'POST':
if sid is None or sid not in self.sockets:
self._log_error_once('Invalid session ' + sid, 'bad-sid')
r = self._bad_request('Invalid session ' + sid)
else:
socket = self._get_socket(sid)
try:
await socket.handle_post_request(environ)
r = self._ok(jsonp_index=jsonp_index)
except exceptions.EngineIOError:
if sid in self.sockets: # pragma: no cover
await self.disconnect(sid)
r = self._bad_request()
except: # pragma: no cover
# for any other unexpected errors, we log the error
# and keep going
self.logger.exception('post request handler error')
r = self._ok(jsonp_index=jsonp_index)
elif method == 'OPTIONS':
r = self._ok()
else:
self.logger.warning('Method %s not supported', method)
r = self._method_not_found()
if not isinstance(r, dict):
return r
if self.http_compression and \
len(r['response']) >= self.compression_threshold:
encodings = [e.split(';')[0].strip() for e in
environ.get('HTTP_ACCEPT_ENCODING', '').split(',')]
for encoding in encodings:
if encoding in self.compression_methods:
r['response'] = \
getattr(self, '_' + encoding)(r['response'])
r['headers'] += [('Content-Encoding', encoding)]
break
return await self._make_response(r, environ)
def start_background_task(self, target, *args, **kwargs):
"""Start a background task using the appropriate async model.
This is a utility function that applications can use to start a
background task using the method that is compatible with the
selected async mode.
:param target: the target function to execute.
:param args: arguments to pass to the function.
:param kwargs: keyword arguments to pass to the function.
The return value is a ``asyncio.Task`` object.
"""
return asyncio.ensure_future(target(*args, **kwargs))
async def sleep(self, seconds=0):
"""Sleep for the requested amount of time using the appropriate async
model.
This is a utility function that applications can use to put a task to
sleep without having to worry about using the correct call for the
selected async mode.
Note: this method is a coroutine.
"""
return await asyncio.sleep(seconds)
def create_queue(self, *args, **kwargs):
"""Create a queue object using the appropriate async model.
This is a utility function that applications can use to create a queue
without having to worry about using the correct call for the selected
async mode. For asyncio based async modes, this returns an instance of
``asyncio.Queue``.
"""
return asyncio.Queue(*args, **kwargs)
def get_queue_empty_exception(self):
"""Return the queue empty exception for the appropriate async model.
This is a utility function that applications can use to work with a
queue without having to worry about using the correct call for the
selected async mode. For asyncio based async modes, this returns an
instance of ``asyncio.QueueEmpty``.
"""
return asyncio.QueueEmpty
def create_event(self, *args, **kwargs):
"""Create an event object using the appropriate async model.
This is a utility function that applications can use to create an
event without having to worry about using the correct call for the
selected async mode. For asyncio based async modes, this returns
an instance of ``asyncio.Event``.
"""
return asyncio.Event(*args, **kwargs)
async def _make_response(self, response_dict, environ):
cors_headers = self._cors_headers(environ)
make_response = self._async['make_response']
if asyncio.iscoroutinefunction(make_response):
response = await make_response(
response_dict['status'],
response_dict['headers'] + cors_headers,
response_dict['response'], environ)
else:
response = make_response(
response_dict['status'],
response_dict['headers'] + cors_headers,
response_dict['response'], environ)
return response
async def _handle_connect(self, environ, transport, jsonp_index=None):
"""Handle a client connection request."""
if self.start_service_task:
# start the service task to monitor connected clients
self.start_service_task = False
self.start_background_task(self._service_task)
sid = self.generate_id()
s = asyncio_socket.AsyncSocket(self, sid)
self.sockets[sid] = s
pkt = packet.Packet(
packet.OPEN, {'sid': sid,
'upgrades': self._upgrades(sid, transport),
'pingTimeout': int(self.ping_timeout * 1000),
'pingInterval': int(self.ping_interval * 1000)})
await s.send(pkt)
s.schedule_ping()
ret = await self._trigger_event('connect', sid, environ,
run_async=False)
if ret is not None and ret is not True:
del self.sockets[sid]
self.logger.warning('Application rejected connection')
return self._unauthorized(ret or None)
if transport == 'websocket':
ret = await s.handle_get_request(environ)
if s.closed and sid in self.sockets:
# websocket connection ended, so we are done
del self.sockets[sid]
return ret
else:
s.connected = True
headers = None
if self.cookie:
if isinstance(self.cookie, dict):
headers = [(
'Set-Cookie',
self._generate_sid_cookie(sid, self.cookie)
)]
else:
headers = [(
'Set-Cookie',
self._generate_sid_cookie(sid, {
'name': self.cookie, 'path': '/', 'SameSite': 'Lax'
})
)]
try:
return self._ok(await s.poll(), headers=headers,
jsonp_index=jsonp_index)
except exceptions.QueueEmpty:
return self._bad_request()
async def _trigger_event(self, event, *args, **kwargs):
"""Invoke an event handler."""
run_async = kwargs.pop('run_async', False)
ret = None
if event in self.handlers:
if asyncio.iscoroutinefunction(self.handlers[event]) is True:
if run_async:
return self.start_background_task(self.handlers[event],
*args)
else:
try:
ret = await self.handlers[event](*args)
except asyncio.CancelledError: # pragma: no cover
pass
except:
self.logger.exception(event + ' async handler error')
if event == 'connect':
# if connect handler raised error we reject the
# connection
return False
else:
if run_async:
async def async_handler():
return self.handlers[event](*args)
return self.start_background_task(async_handler)
else:
try:
ret = self.handlers[event](*args)
except:
self.logger.exception(event + ' handler error')
if event == 'connect':
# if connect handler raised error we reject the
# connection
return False
return ret
async def _service_task(self): # pragma: no cover
"""Monitor connected clients and clean up those that time out."""
while True:
if len(self.sockets) == 0:
# nothing to do
await self.sleep(self.ping_timeout)
continue
# go through the entire client list in a ping interval cycle
sleep_interval = self.ping_timeout / len(self.sockets)
try:
# iterate over the current clients
for socket in self.sockets.copy().values():
if not socket.closing and not socket.closed:
await socket.check_ping_timeout()
await self.sleep(sleep_interval)
except (
SystemExit,
KeyboardInterrupt,
asyncio.CancelledError,
GeneratorExit,
):
self.logger.info('service task canceled')
break
except:
if asyncio.get_event_loop().is_closed():
self.logger.info('event loop is closed, exiting service '
'task')
break
# an unexpected exception has occurred, log it and continue
self.logger.exception('service task exception')
| mit |
gilisagreen/Project4 | lib/itsdangerous.py | 296 | 30509 | # -*- coding: utf-8 -*-
"""
itsdangerous
~~~~~~~~~~~~
A module that implements various functions to deal with untrusted
sources. Mainly useful for web applications.
:copyright: (c) 2011 by Armin Ronacher and the Django Software Foundation.
:license: BSD, see LICENSE for more details.
"""
import sys
import hmac
import zlib
import time
import base64
import hashlib
import operator
from datetime import datetime
PY2 = sys.version_info[0] == 2
if PY2:
from itertools import izip
text_type = unicode
int_to_byte = chr
number_types = (int, long, float)
else:
from functools import reduce
izip = zip
text_type = str
int_to_byte = operator.methodcaller('to_bytes', 1, 'big')
number_types = (int, float)
try:
import simplejson as json
except ImportError:
import json
class _CompactJSON(object):
"""Wrapper around simplejson that strips whitespace.
"""
def loads(self, payload):
return json.loads(payload)
def dumps(self, obj):
return json.dumps(obj, separators=(',', ':'))
compact_json = _CompactJSON()
# 2011/01/01 in UTC
EPOCH = 1293840000
def want_bytes(s, encoding='utf-8', errors='strict'):
if isinstance(s, text_type):
s = s.encode(encoding, errors)
return s
def is_text_serializer(serializer):
"""Checks wheather a serializer generates text or binary."""
return isinstance(serializer.dumps({}), text_type)
# Starting with 3.3 the standard library has a c-implementation for
# constant time string compares.
_builtin_constant_time_compare = getattr(hmac, 'compare_digest', None)
def constant_time_compare(val1, val2):
"""Returns True if the two strings are equal, False otherwise.
The time taken is independent of the number of characters that match. Do
not use this function for anything else than comparision with known
length targets.
This is should be implemented in C in order to get it completely right.
"""
if _builtin_constant_time_compare is not None:
return _builtin_constant_time_compare(val1, val2)
len_eq = len(val1) == len(val2)
if len_eq:
result = 0
left = val1
else:
result = 1
left = val2
for x, y in izip(bytearray(left), bytearray(val2)):
result |= x ^ y
return result == 0
class BadData(Exception):
"""Raised if bad data of any sort was encountered. This is the
base for all exceptions that itsdangerous is currently using.
.. versionadded:: 0.15
"""
message = None
def __init__(self, message):
Exception.__init__(self, message)
self.message = message
def __str__(self):
return text_type(self.message)
if PY2:
__unicode__ = __str__
def __str__(self):
return self.__unicode__().encode('utf-8')
class BadPayload(BadData):
"""This error is raised in situations when payload is loaded without
checking the signature first and an exception happend as a result of
that. The original exception that caused that will be stored on the
exception as :attr:`original_error`.
.. versionadded:: 0.15
"""
def __init__(self, message, original_error=None):
BadData.__init__(self, message)
#: If available, the error that indicates why the payload
#: was not valid. This might be `None`.
self.original_error = original_error
class BadSignature(BadData):
"""This error is raised if a signature does not match. As of
itsdangerous 0.14 there are helpful attributes on the exception
instances. You can also catch down the baseclass :exc:`BadData`.
"""
def __init__(self, message, payload=None):
BadData.__init__(self, message)
#: The payload that failed the signature test. In some
#: situations you might still want to inspect this, even if
#: you know it was tampered with.
#:
#: .. versionadded:: 0.14
self.payload = payload
class BadTimeSignature(BadSignature):
"""Raised for time based signatures that fail. This is a subclass
of :class:`BadSignature` so you can catch those down as well.
"""
def __init__(self, message, payload=None, date_signed=None):
BadSignature.__init__(self, message, payload)
#: If the signature expired this exposes the date of when the
#: signature was created. This can be helpful in order to
#: tell the user how long a link has been gone stale.
#:
#: .. versionadded:: 0.14
self.date_signed = date_signed
class SignatureExpired(BadTimeSignature):
"""Signature timestamp is older than required max_age. This is a
subclass of :exc:`BadTimeSignature` so you can use the baseclass for
catching the error.
"""
def base64_encode(string):
"""base64 encodes a single bytestring (and is tolerant to getting
called with a unicode string).
The resulting bytestring is safe for putting into URLs.
"""
string = want_bytes(string)
return base64.urlsafe_b64encode(string).strip(b'=')
def base64_decode(string):
"""base64 decodes a single bytestring (and is tolerant to getting
called with a unicode string).
The result is also a bytestring.
"""
string = want_bytes(string, encoding='ascii', errors='ignore')
return base64.urlsafe_b64decode(string + b'=' * (-len(string) % 4))
def int_to_bytes(num):
assert num >= 0
rv = []
while num:
rv.append(int_to_byte(num & 0xff))
num >>= 8
return b''.join(reversed(rv))
def bytes_to_int(bytestr):
return reduce(lambda a, b: a << 8 | b, bytearray(bytestr), 0)
class SigningAlgorithm(object):
"""Subclasses of `SigningAlgorithm` have to implement `get_signature` to
provide signature generation functionality.
"""
def get_signature(self, key, value):
"""Returns the signature for the given key and value"""
raise NotImplementedError()
def verify_signature(self, key, value, sig):
"""Verifies the given signature matches the expected signature"""
return constant_time_compare(sig, self.get_signature(key, value))
class NoneAlgorithm(SigningAlgorithm):
"""This class provides a algorithm that does not perform any signing and
returns an empty signature.
"""
def get_signature(self, key, value):
return b''
class HMACAlgorithm(SigningAlgorithm):
"""This class provides signature generation using HMACs."""
#: The digest method to use with the MAC algorithm. This defaults to sha1
#: but can be changed for any other function in the hashlib module.
default_digest_method = staticmethod(hashlib.sha1)
def __init__(self, digest_method=None):
if digest_method is None:
digest_method = self.default_digest_method
self.digest_method = digest_method
def get_signature(self, key, value):
mac = hmac.new(key, msg=value, digestmod=self.digest_method)
return mac.digest()
class Signer(object):
"""This class can sign bytes and unsign it and validate the signature
provided.
Salt can be used to namespace the hash, so that a signed string is only
valid for a given namespace. Leaving this at the default value or re-using
a salt value across different parts of your application where the same
signed value in one part can mean something different in another part
is a security risk.
See :ref:`the-salt` for an example of what the salt is doing and how you
can utilize it.
.. versionadded:: 0.14
`key_derivation` and `digest_method` were added as arguments to the
class constructor.
.. versionadded:: 0.18
`algorithm` was added as an argument to the class constructor.
"""
#: The digest method to use for the signer. This defaults to sha1 but can
#: be changed for any other function in the hashlib module.
#:
#: .. versionchanged:: 0.14
default_digest_method = staticmethod(hashlib.sha1)
#: Controls how the key is derived. The default is Django style
#: concatenation. Possible values are ``concat``, ``django-concat``
#: and ``hmac``. This is used for deriving a key from the secret key
#: with an added salt.
#:
#: .. versionadded:: 0.14
default_key_derivation = 'django-concat'
def __init__(self, secret_key, salt=None, sep='.', key_derivation=None,
digest_method=None, algorithm=None):
self.secret_key = want_bytes(secret_key)
self.sep = sep
self.salt = 'itsdangerous.Signer' if salt is None else salt
if key_derivation is None:
key_derivation = self.default_key_derivation
self.key_derivation = key_derivation
if digest_method is None:
digest_method = self.default_digest_method
self.digest_method = digest_method
if algorithm is None:
algorithm = HMACAlgorithm(self.digest_method)
self.algorithm = algorithm
def derive_key(self):
"""This method is called to derive the key. If you're unhappy with
the default key derivation choices you can override them here.
Keep in mind that the key derivation in itsdangerous is not intended
to be used as a security method to make a complex key out of a short
password. Instead you should use large random secret keys.
"""
salt = want_bytes(self.salt)
if self.key_derivation == 'concat':
return self.digest_method(salt + self.secret_key).digest()
elif self.key_derivation == 'django-concat':
return self.digest_method(salt + b'signer' +
self.secret_key).digest()
elif self.key_derivation == 'hmac':
mac = hmac.new(self.secret_key, digestmod=self.digest_method)
mac.update(salt)
return mac.digest()
elif self.key_derivation == 'none':
return self.secret_key
else:
raise TypeError('Unknown key derivation method')
def get_signature(self, value):
"""Returns the signature for the given value"""
value = want_bytes(value)
key = self.derive_key()
sig = self.algorithm.get_signature(key, value)
return base64_encode(sig)
def sign(self, value):
"""Signs the given string."""
return value + want_bytes(self.sep) + self.get_signature(value)
def verify_signature(self, value, sig):
"""Verifies the signature for the given value."""
key = self.derive_key()
sig = base64_decode(sig)
return self.algorithm.verify_signature(key, value, sig)
def unsign(self, signed_value):
"""Unsigns the given string."""
signed_value = want_bytes(signed_value)
sep = want_bytes(self.sep)
if sep not in signed_value:
raise BadSignature('No %r found in value' % self.sep)
value, sig = signed_value.rsplit(sep, 1)
if self.verify_signature(value, sig):
return value
raise BadSignature('Signature %r does not match' % sig,
payload=value)
def validate(self, signed_value):
"""Just validates the given signed value. Returns `True` if the
signature exists and is valid, `False` otherwise."""
try:
self.unsign(signed_value)
return True
except BadSignature:
return False
class TimestampSigner(Signer):
"""Works like the regular :class:`Signer` but also records the time
of the signing and can be used to expire signatures. The unsign
method can rause a :exc:`SignatureExpired` method if the unsigning
failed because the signature is expired. This exception is a subclass
of :exc:`BadSignature`.
"""
def get_timestamp(self):
"""Returns the current timestamp. This implementation returns the
seconds since 1/1/2011. The function must return an integer.
"""
return int(time.time() - EPOCH)
def timestamp_to_datetime(self, ts):
"""Used to convert the timestamp from `get_timestamp` into a
datetime object.
"""
return datetime.utcfromtimestamp(ts + EPOCH)
def sign(self, value):
"""Signs the given string and also attaches a time information."""
value = want_bytes(value)
timestamp = base64_encode(int_to_bytes(self.get_timestamp()))
sep = want_bytes(self.sep)
value = value + sep + timestamp
return value + sep + self.get_signature(value)
def unsign(self, value, max_age=None, return_timestamp=False):
"""Works like the regular :meth:`~Signer.unsign` but can also
validate the time. See the base docstring of the class for
the general behavior. If `return_timestamp` is set to `True`
the timestamp of the signature will be returned as naive
:class:`datetime.datetime` object in UTC.
"""
try:
result = Signer.unsign(self, value)
sig_error = None
except BadSignature as e:
sig_error = e
result = e.payload or b''
sep = want_bytes(self.sep)
# If there is no timestamp in the result there is something
# seriously wrong. In case there was a signature error, we raise
# that one directly, otherwise we have a weird situation in which
# we shouldn't have come except someone uses a time-based serializer
# on non-timestamp data, so catch that.
if not sep in result:
if sig_error:
raise sig_error
raise BadTimeSignature('timestamp missing', payload=result)
value, timestamp = result.rsplit(sep, 1)
try:
timestamp = bytes_to_int(base64_decode(timestamp))
except Exception:
timestamp = None
# Signature is *not* okay. Raise a proper error now that we have
# split the value and the timestamp.
if sig_error is not None:
raise BadTimeSignature(text_type(sig_error), payload=value,
date_signed=timestamp)
# Signature was okay but the timestamp is actually not there or
# malformed. Should not happen, but well. We handle it nonetheless
if timestamp is None:
raise BadTimeSignature('Malformed timestamp', payload=value)
# Check timestamp is not older than max_age
if max_age is not None:
age = self.get_timestamp() - timestamp
if age > max_age:
raise SignatureExpired(
'Signature age %s > %s seconds' % (age, max_age),
payload=value,
date_signed=self.timestamp_to_datetime(timestamp))
if return_timestamp:
return value, self.timestamp_to_datetime(timestamp)
return value
def validate(self, signed_value, max_age=None):
"""Just validates the given signed value. Returns `True` if the
signature exists and is valid, `False` otherwise."""
try:
self.unsign(signed_value, max_age=max_age)
return True
except BadSignature:
return False
class Serializer(object):
"""This class provides a serialization interface on top of the
signer. It provides a similar API to json/pickle and other modules but is
slightly differently structured internally. If you want to change the
underlying implementation for parsing and loading you have to override the
:meth:`load_payload` and :meth:`dump_payload` functions.
This implementation uses simplejson if available for dumping and loading
and will fall back to the standard library's json module if it's not
available.
Starting with 0.14 you do not need to subclass this class in order to
switch out or customer the :class:`Signer`. You can instead also pass a
different class to the constructor as well as keyword arguments as
dictionary that should be forwarded::
s = Serializer(signer_kwargs={'key_derivation': 'hmac'})
.. versionchanged:: 0.14:
The `signer` and `signer_kwargs` parameters were added to the
constructor.
"""
#: If a serializer module or class is not passed to the constructor
#: this one is picked up. This currently defaults to :mod:`json`.
default_serializer = json
#: The default :class:`Signer` class that is being used by this
#: serializer.
#:
#: .. versionadded:: 0.14
default_signer = Signer
def __init__(self, secret_key, salt=b'itsdangerous', serializer=None,
signer=None, signer_kwargs=None):
self.secret_key = want_bytes(secret_key)
self.salt = want_bytes(salt)
if serializer is None:
serializer = self.default_serializer
self.serializer = serializer
self.is_text_serializer = is_text_serializer(serializer)
if signer is None:
signer = self.default_signer
self.signer = signer
self.signer_kwargs = signer_kwargs or {}
def load_payload(self, payload, serializer=None):
"""Loads the encoded object. This function raises :class:`BadPayload`
if the payload is not valid. The `serializer` parameter can be used to
override the serializer stored on the class. The encoded payload is
always byte based.
"""
if serializer is None:
serializer = self.serializer
is_text = self.is_text_serializer
else:
is_text = is_text_serializer(serializer)
try:
if is_text:
payload = payload.decode('utf-8')
return serializer.loads(payload)
except Exception as e:
raise BadPayload('Could not load the payload because an '
'exception ocurred on unserializing the data',
original_error=e)
def dump_payload(self, obj):
"""Dumps the encoded object. The return value is always a
bytestring. If the internal serializer is text based the value
will automatically be encoded to utf-8.
"""
return want_bytes(self.serializer.dumps(obj))
def make_signer(self, salt=None):
"""A method that creates a new instance of the signer to be used.
The default implementation uses the :class:`Signer` baseclass.
"""
if salt is None:
salt = self.salt
return self.signer(self.secret_key, salt=salt, **self.signer_kwargs)
def dumps(self, obj, salt=None):
"""Returns a signed string serialized with the internal serializer.
The return value can be either a byte or unicode string depending
on the format of the internal serializer.
"""
payload = want_bytes(self.dump_payload(obj))
rv = self.make_signer(salt).sign(payload)
if self.is_text_serializer:
rv = rv.decode('utf-8')
return rv
def dump(self, obj, f, salt=None):
"""Like :meth:`dumps` but dumps into a file. The file handle has
to be compatible with what the internal serializer expects.
"""
f.write(self.dumps(obj, salt))
def loads(self, s, salt=None):
"""Reverse of :meth:`dumps`, raises :exc:`BadSignature` if the
signature validation fails.
"""
s = want_bytes(s)
return self.load_payload(self.make_signer(salt).unsign(s))
def load(self, f, salt=None):
"""Like :meth:`loads` but loads from a file."""
return self.loads(f.read(), salt)
def loads_unsafe(self, s, salt=None):
"""Like :meth:`loads` but without verifying the signature. This is
potentially very dangerous to use depending on how your serializer
works. The return value is ``(signature_okay, payload)`` instead of
just the payload. The first item will be a boolean that indicates
if the signature is okay (``True``) or if it failed. This function
never fails.
Use it for debugging only and if you know that your serializer module
is not exploitable (eg: do not use it with a pickle serializer).
.. versionadded:: 0.15
"""
return self._loads_unsafe_impl(s, salt)
def _loads_unsafe_impl(self, s, salt, load_kwargs=None,
load_payload_kwargs=None):
"""Lowlevel helper function to implement :meth:`loads_unsafe` in
serializer subclasses.
"""
try:
return True, self.loads(s, salt=salt, **(load_kwargs or {}))
except BadSignature as e:
if e.payload is None:
return False, None
try:
return False, self.load_payload(e.payload,
**(load_payload_kwargs or {}))
except BadPayload:
return False, None
def load_unsafe(self, f, *args, **kwargs):
"""Like :meth:`loads_unsafe` but loads from a file.
.. versionadded:: 0.15
"""
return self.loads_unsafe(f.read(), *args, **kwargs)
class TimedSerializer(Serializer):
"""Uses the :class:`TimestampSigner` instead of the default
:meth:`Signer`.
"""
default_signer = TimestampSigner
def loads(self, s, max_age=None, return_timestamp=False, salt=None):
"""Reverse of :meth:`dumps`, raises :exc:`BadSignature` if the
signature validation fails. If a `max_age` is provided it will
ensure the signature is not older than that time in seconds. In
case the signature is outdated, :exc:`SignatureExpired` is raised
which is a subclass of :exc:`BadSignature`. All arguments are
forwarded to the signer's :meth:`~TimestampSigner.unsign` method.
"""
base64d, timestamp = self.make_signer(salt) \
.unsign(s, max_age, return_timestamp=True)
payload = self.load_payload(base64d)
if return_timestamp:
return payload, timestamp
return payload
def loads_unsafe(self, s, max_age=None, salt=None):
load_kwargs = {'max_age': max_age}
load_payload_kwargs = {}
return self._loads_unsafe_impl(s, salt, load_kwargs, load_payload_kwargs)
class JSONWebSignatureSerializer(Serializer):
"""This serializer implements JSON Web Signature (JWS) support. Only
supports the JWS Compact Serialization.
"""
jws_algorithms = {
'HS256': HMACAlgorithm(hashlib.sha256),
'HS384': HMACAlgorithm(hashlib.sha384),
'HS512': HMACAlgorithm(hashlib.sha512),
'none': NoneAlgorithm(),
}
#: The default algorithm to use for signature generation
default_algorithm = 'HS256'
default_serializer = compact_json
def __init__(self, secret_key, salt=None, serializer=None,
signer=None, signer_kwargs=None, algorithm_name=None):
Serializer.__init__(self, secret_key, salt, serializer,
signer, signer_kwargs)
if algorithm_name is None:
algorithm_name = self.default_algorithm
self.algorithm_name = algorithm_name
self.algorithm = self.make_algorithm(algorithm_name)
def load_payload(self, payload, return_header=False):
payload = want_bytes(payload)
if b'.' not in payload:
raise BadPayload('No "." found in value')
base64d_header, base64d_payload = payload.split(b'.', 1)
try:
json_header = base64_decode(base64d_header)
json_payload = base64_decode(base64d_payload)
except Exception as e:
raise BadPayload('Could not base64 decode the payload because of '
'an exception', original_error=e)
header = Serializer.load_payload(self, json_header,
serializer=json)
if not isinstance(header, dict):
raise BadPayload('Header payload is not a JSON object')
payload = Serializer.load_payload(self, json_payload)
if return_header:
return payload, header
return payload
def dump_payload(self, header, obj):
base64d_header = base64_encode(self.serializer.dumps(header))
base64d_payload = base64_encode(self.serializer.dumps(obj))
return base64d_header + b'.' + base64d_payload
def make_algorithm(self, algorithm_name):
try:
return self.jws_algorithms[algorithm_name]
except KeyError:
raise NotImplementedError('Algorithm not supported')
def make_signer(self, salt=None, algorithm=None):
if salt is None:
salt = self.salt
key_derivation = 'none' if salt is None else None
if algorithm is None:
algorithm = self.algorithm
return self.signer(self.secret_key, salt=salt, sep='.',
key_derivation=key_derivation, algorithm=algorithm)
def make_header(self, header_fields):
header = header_fields.copy() if header_fields else {}
header['alg'] = self.algorithm_name
return header
def dumps(self, obj, salt=None, header_fields=None):
"""Like :meth:`~Serializer.dumps` but creates a JSON Web Signature. It
also allows for specifying additional fields to be included in the JWS
Header.
"""
header = self.make_header(header_fields)
signer = self.make_signer(salt, self.algorithm)
return signer.sign(self.dump_payload(header, obj))
def loads(self, s, salt=None, return_header=False):
"""Reverse of :meth:`dumps`. If requested via `return_header` it will
return a tuple of payload and header.
"""
payload, header = self.load_payload(
self.make_signer(salt, self.algorithm).unsign(want_bytes(s)),
return_header=True)
if header.get('alg') != self.algorithm_name:
raise BadSignature('Algorithm mismatch')
if return_header:
return payload, header
return payload
def loads_unsafe(self, s, salt=None, return_header=False):
kwargs = {'return_header': return_header}
return self._loads_unsafe_impl(s, salt, kwargs, kwargs)
class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer):
"""Works like the regular :class:`JSONWebSignatureSerializer` but also
records the time of the signing and can be used to expire signatures.
JWS currently does not specify this behavior but it mentions a possibility
extension like this in the spec. Expiry date is encoded into the header
similarily as specified in `draft-ietf-oauth-json-web-token
<http://self-issued.info/docs/draft-ietf-oauth-json-web-token.html#expDef`_.
The unsign method can raise a :exc:`SignatureExpired` method if the
unsigning failed because the signature is expired. This exception is a
subclass of :exc:`BadSignature`.
"""
DEFAULT_EXPIRES_IN = 3600
def __init__(self, secret_key, expires_in=None, **kwargs):
JSONWebSignatureSerializer.__init__(self, secret_key, **kwargs)
if expires_in is None:
expires_in = self.DEFAULT_EXPIRES_IN
self.expires_in = expires_in
def make_header(self, header_fields):
header = JSONWebSignatureSerializer.make_header(self, header_fields)
iat = self.now()
exp = iat + self.expires_in
header['iat'] = iat
header['exp'] = exp
return header
def loads(self, s, salt=None, return_header=False):
payload, header = JSONWebSignatureSerializer.loads(
self, s, salt, return_header=True)
if 'exp' not in header:
raise BadSignature('Missing expiry date', payload=payload)
if not (isinstance(header['exp'], number_types)
and header['exp'] > 0):
raise BadSignature('expiry date is not an IntDate',
payload=payload)
if header['exp'] < self.now():
raise SignatureExpired('Signature expired', payload=payload,
date_signed=self.get_issue_date(header))
if return_header:
return payload, header
return payload
def get_issue_date(self, header):
rv = header.get('iat')
if isinstance(rv, number_types):
return datetime.utcfromtimestamp(int(rv))
def now(self):
return int(time.time())
class URLSafeSerializerMixin(object):
"""Mixed in with a regular serializer it will attempt to zlib compress
the string to make it shorter if necessary. It will also base64 encode
the string so that it can safely be placed in a URL.
"""
def load_payload(self, payload):
decompress = False
if payload.startswith(b'.'):
payload = payload[1:]
decompress = True
try:
json = base64_decode(payload)
except Exception as e:
raise BadPayload('Could not base64 decode the payload because of '
'an exception', original_error=e)
if decompress:
try:
json = zlib.decompress(json)
except Exception as e:
raise BadPayload('Could not zlib decompress the payload before '
'decoding the payload', original_error=e)
return super(URLSafeSerializerMixin, self).load_payload(json)
def dump_payload(self, obj):
json = super(URLSafeSerializerMixin, self).dump_payload(obj)
is_compressed = False
compressed = zlib.compress(json)
if len(compressed) < (len(json) - 1):
json = compressed
is_compressed = True
base64d = base64_encode(json)
if is_compressed:
base64d = b'.' + base64d
return base64d
class URLSafeSerializer(URLSafeSerializerMixin, Serializer):
"""Works like :class:`Serializer` but dumps and loads into a URL
safe string consisting of the upper and lowercase character of the
alphabet as well as ``'_'``, ``'-'`` and ``'.'``.
"""
default_serializer = compact_json
class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer):
"""Works like :class:`TimedSerializer` but dumps and loads into a URL
safe string consisting of the upper and lowercase character of the
alphabet as well as ``'_'``, ``'-'`` and ``'.'``.
"""
default_serializer = compact_json
| apache-2.0 |
amanuel/bigcouch | couchjs/scons/scons-local-2.0.1/SCons/Tool/ifort.py | 61 | 3350 | """SCons.Tool.ifort
Tool-specific initialization for newer versions of the Intel Fortran Compiler
for Linux/Windows (and possibly Mac OS X).
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/ifort.py 5134 2010/08/16 23:02:40 bdeegan"
import SCons.Defaults
from SCons.Scanner.Fortran import FortranScan
from FortranCommon import add_all_to_env
def generate(env):
"""Add Builders and construction variables for ifort to an Environment."""
# ifort supports Fortran 90 and Fortran 95
# Additionally, ifort recognizes more file extensions.
fscan = FortranScan("FORTRANPATH")
SCons.Tool.SourceFileScanner.add_scanner('.i', fscan)
SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan)
if 'FORTRANFILESUFFIXES' not in env:
env['FORTRANFILESUFFIXES'] = ['.i']
else:
env['FORTRANFILESUFFIXES'].append('.i')
if 'F90FILESUFFIXES' not in env:
env['F90FILESUFFIXES'] = ['.i90']
else:
env['F90FILESUFFIXES'].append('.i90')
add_all_to_env(env)
fc = 'ifort'
for dialect in ['F77', 'F90', 'FORTRAN', 'F95']:
env['%s' % dialect] = fc
env['SH%s' % dialect] = '$%s' % dialect
if env['PLATFORM'] == 'posix':
env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect)
if env['PLATFORM'] == 'win32':
# On Windows, the ifort compiler specifies the object on the
# command line with -object:, not -o. Massage the necessary
# command-line construction variables.
for dialect in ['F77', 'F90', 'FORTRAN', 'F95']:
for var in ['%sCOM' % dialect, '%sPPCOM' % dialect,
'SH%sCOM' % dialect, 'SH%sPPCOM' % dialect]:
env[var] = env[var].replace('-o $TARGET', '-object:$TARGET')
env['FORTRANMODDIRPREFIX'] = "/module:"
else:
env['FORTRANMODDIRPREFIX'] = "-module "
def exists(env):
return env.Detect('ifort')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 |
piniGitHUB/volatility | volatility/plugins/addrspaces/crash.py | 44 | 3315 | # Volatility
# Copyright (C) 2007-2013 Volatility Foundation
# Copyright (C) 2005,2006,2007 4tphi Research
#
# Authors:
# {npetroni,awalters}@4tphi.net (Nick Petroni and AAron Walters)
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
""" An AS for processing crash dumps """
import struct
import volatility.obj as obj
import volatility.addrspace as addrspace
#pylint: disable-msg=C0111
page_shift = 12
class WindowsCrashDumpSpace32(addrspace.AbstractRunBasedMemory):
""" This AS supports windows Crash Dump format """
order = 30
dumpsig = 'PAGEDUMP'
headertype = "_DMP_HEADER"
headerpages = 1
def __init__(self, base, config, **kwargs):
## We must have an AS below us
self.as_assert(base, "No base Address Space")
addrspace.AbstractRunBasedMemory.__init__(self, base, config, **kwargs)
## Must start with the magic PAGEDUMP
self.as_assert((base.read(0, 8) == self.dumpsig), "Header signature invalid")
self.as_assert(self.profile.has_type(self.headertype), self.headertype + " not available in profile")
self.header = obj.Object(self.headertype, 0, base)
offset = self.headerpages
for x in self.header.PhysicalMemoryBlockBuffer.Run:
self.runs.append((x.BasePage.v() * 0x1000,
offset * 0x1000,
x.PageCount.v() * 0x1000))
offset += x.PageCount.v()
self.dtb = self.header.DirectoryTableBase.v()
def get_header(self):
return self.header
def get_base(self):
return self.base
def write(self, phys_addr, buf):
"""This is mostly for support of raw2dmp so that
it can modify the kernel CONTEXT after the crash
dump has been written to disk"""
if not self._config.WRITE:
return False
file_addr = self.translate(phys_addr)
if file_addr is None:
return False
return self.base.write(file_addr, buf)
def read_long(self, addr):
_baseaddr = self.translate(addr)
string = self.read(addr, 4)
if not string:
return obj.NoneObject("Could not read data at " + str(addr))
(longval,) = struct.unpack('=I', string)
return longval
def get_available_addresses(self):
""" This returns the ranges of valid addresses """
for run in self.runs:
yield (run[0], run[2])
def close(self):
self.base.close()
class WindowsCrashDumpSpace64(WindowsCrashDumpSpace32):
""" This AS supports windows Crash Dump format """
order = 30
dumpsig = 'PAGEDU64'
headertype = "_DMP_HEADER64"
headerpages = 2
| gpl-2.0 |
maurizi/nyc-trees | src/nyc_trees/apps/survey/views.py | 1 | 24867 | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import os
import json
import shortuuid
from pytz import timezone
from celery import chain
from django.conf import settings
from django.contrib.gis.geos import Point
from django.core.exceptions import ValidationError, PermissionDenied
from django.core.urlresolvers import reverse
from django.db import transaction, connection
from django.db.models import Q
from django.http import (HttpResponse, HttpResponseForbidden,
HttpResponseBadRequest)
from django.shortcuts import get_object_or_404, redirect
from django.utils.timezone import now
from django.utils.html import escape
from apps.core.models import User, Group
from apps.core.helpers import (user_is_group_admin, user_is_individual_mapper,
user_is_census_admin)
from apps.event.models import Event
from apps.event.helpers import (user_is_checked_in_to_event,
user_is_rsvped_for_event)
from apps.mail.tasks import notify_reservation_confirmed
from libs.pdf_maps import create_reservations_map_pdf
from apps.users import can_show_full_name
from apps.users.models import TrustedMapper
from apps.survey.models import (BlockfaceReservation, Blockface, Territory,
Survey, Tree, Species, CURB_CHOICES,
STATUS_CHOICES, CERTAINTY_CHOICES,
HEALTH_CHOICES, STEWARDSHIP_CHOICES,
GUARD_CHOICES, SIDEWALK_CHOICES,
PROBLEMS_CHOICES)
from apps.survey.layer_context import (
get_context_for_reservations_layer, get_context_for_reservable_layer,
get_context_for_progress_layer, get_context_for_territory_survey_layer,
get_context_for_printable_reservations_layer,
get_context_for_group_progress_layer, get_context_for_user_progress_layer,
get_context_for_borough_progress_layer, get_context_for_nta_progress_layer
)
from apps.survey.helpers import group_percent_completed
from libs.pdf_maps import create_and_save_pdf
_SURVEY_DETAIL_QUERY_FILE = os.path.join(os.path.dirname(__file__),
'survey_detail.sql')
with open(_SURVEY_DETAIL_QUERY_FILE, 'r') as f:
_SURVEY_DETAIL_QUERY = f.read()
def progress_page(request):
context = {
'legend_entries': [
{'mode': 'all', 'css_class': 'mapped', 'label': 'Mapped'},
{'mode': 'all', 'css_class': 'not-mapped', 'label': 'Not mapped'},
{'mode': 'all', 'css_class': 'unmappable',
'label': 'Could not be mapped'},
{'mode': 'my', 'css_class': 'mapped', 'label': 'Mapped by you'},
{'mode': 'my', 'css_class': 'not-mapped',
'label': 'Not mapped by you'},
{'mode': 'my', 'css_class': 'unmappable',
'label': 'Could not be mapped'},
{'mode': 'group', 'css_class': 'mapped',
'label': 'Mapped by this group'},
{'mode': 'group', 'css_class': 'not-mapped',
'label': 'Not mapped'},
{'mode': 'group', 'css_class': 'unmappable',
'label': 'Could not be mapped'},
],
'percentage_ramps': range(0, 100, 10),
'legend_mode': 'all-percent',
'layer_all': get_context_for_progress_layer(),
'layer_all_nta': get_context_for_nta_progress_layer(),
'layer_all_borough': get_context_for_borough_progress_layer(),
'help_shown': _was_help_shown(request, 'progress_page_help_shown')
}
user = request.user
if user.is_authenticated():
context['layer_my'] = get_context_for_user_progress_layer(request)
blocks = (user.surveys.distinct('blockface')
.values_list('blockface_id', flat=True))
if len(blocks) > 0:
blockfaces = Blockface.objects.filter(id__in=blocks).collect()
context['my_bounds'] = list(blockfaces.extent)
return context
def _was_help_shown(request, help_shown_attr):
"""
help_shown_attr is a user attribute specifying whether help has been
shown on a particular page. We also use it as a session attribute
for non-logged-in users.
Calling this function returns the current attribute value, and also
sets it to True (so it will only be shown once).
"""
help_shown = request.session.get(help_shown_attr, False)
request.session[help_shown_attr] = True
user = request.user
if user.is_authenticated():
user_help_shown = getattr(user, help_shown_attr)
help_shown = help_shown or user_help_shown
if not user_help_shown:
setattr(user, help_shown_attr, True)
user.save()
return help_shown
def progress_page_blockface_popup(request, blockface_id):
blockface = get_object_or_404(Blockface, id=blockface_id)
turf = Territory.objects.filter(blockface_id=blockface_id)
groups = Group.objects.filter(pk=turf.values_list('group_id', flat=True))
group = groups[0] if len(groups) else None
is_active = (group is None or group.is_active or
user_is_group_admin(request.user, group))
survey_type = _get_survey_type(blockface, request.user, group)
return {
'survey_type': survey_type,
'group': group,
'is_active': is_active
}
def _get_survey_type(blockface, user, group):
if user.is_authenticated():
reserved_by_user = BlockfaceReservation.objects \
.filter(blockface=blockface, user=user).current().exists()
if reserved_by_user:
return 'reserved'
try:
latest_survey = Survey.objects \
.filter(blockface=blockface) \
.latest('created_at')
if latest_survey.quit_reason:
return 'unmappable'
if user.is_authenticated() and user.pk in {
latest_survey.user_id, latest_survey.teammate_id}:
return 'surveyed-by-me'
else:
return 'surveyed-by-others'
except Survey.DoesNotExist:
pass
if group is None and blockface.is_available:
return 'available'
return 'unavailable'
def _query_reservation(user, blockface_id):
return BlockfaceReservation.objects \
.filter(blockface_id=blockface_id, user=user) \
.current()
def blockface_cart_page(request):
ids_str = request.POST.get('ids', None)
ids = ids_str.split(',') if ids_str else []
cancelled_reservations = _get_reservations_to_cancel(ids, request.user)
already_reserved_ids = _already_reserved_blockface_ids(ids)
return {
'blockface_ids': request.POST['ids'],
'num_reserved': len(ids) - already_reserved_ids.count(),
'num_cancelled': cancelled_reservations.count()
}
def user_reserved_blockfaces_geojson(request):
reservations = BlockfaceReservation.objects \
.select_related('blockface') \
.filter(user=request.user) \
.current()
est_tz = timezone('US/Eastern')
def get_formatted_expiration_date(reservation):
dt = reservation.expires_at.astimezone(est_tz)
return dt.strftime('%b %-d, %Y')
return [
{
'type': 'Feature',
'geometry': {
'type': 'MultiLineString',
'coordinates': reservation.blockface.geom.coords
},
'properties': {
'id': reservation.blockface.id,
'expires_at': get_formatted_expiration_date(reservation)
}
}
for reservation in reservations
]
def group_borders_geojson(request):
groups = Group.objects.filter(is_active=True)
base_group_layer_context = get_context_for_group_progress_layer()
base_group_tile_url = base_group_layer_context['tile_url']
base_group_grid_url = base_group_layer_context['grid_url']
return [
{
'type': 'Feature',
'geometry': {
'type': 'MultiPolygon',
'coordinates': list(group.border.coords)
},
'properties': {
'tileUrl': '%s?group=%s' % (base_group_tile_url, group.id),
'gridUrl': '%s?group=%s' % (base_group_grid_url, group.id),
'popupUrl': reverse('group_popup',
kwargs={'group_slug': group.slug}),
'bounds': group.border.extent
}
}
for group in groups
if group.border
]
def group_popup(request):
return {
'group': request.group,
'completed': group_percent_completed(request.group)
}
def reservations_map_pdf_poll(request):
# Update reservations map PDF if reservations have changed
user = request.user
reservation_ids = _reservation_ids(user)
if reservation_ids != user.reservation_ids_in_map_pdf:
create_reservations_map_pdf(request, reservation_ids)
url = request.user.reservations_map_pdf_url
if url:
return {'map_pdf_url': url}
else:
return {}
def _reservation_ids(user):
reservation_ids = BlockfaceReservation.objects \
.filter(user=user) \
.current() \
.order_by('id') \
.values_list('id', flat=True)
reservation_ids = ','.join(str(x) for x in reservation_ids)
return reservation_ids
def printable_reservations_page(request):
blockfaces = _user_reservations(request.user)
endpoints = set()
for blockface in blockfaces:
coords = blockface.geom.coords[0]
endpoints.add(coords[0])
endpoints.add(coords[-1])
# make list of [lat,lng] coordinates for leaflet convenience
endpoints = [[e[1], e[0]] for e in endpoints]
return {
'layer': get_context_for_printable_reservations_layer(request),
'bounds': list(blockfaces.collect().extent) if blockfaces else None,
'endpoints': endpoints
}
def _user_reservation_bounds(user):
blockfaces = _user_reservations(user)
return list(blockfaces.collect().extent) if blockfaces else None
def _user_reservations(user):
reservations = BlockfaceReservation.objects \
.filter(user=user) \
.current() \
.values_list('blockface_id', flat=True)
blockfaces = Blockface.objects.filter(id__in=reservations)
return blockfaces
def reserve_blockfaces_page(request):
if not user_is_individual_mapper(request.user):
return redirect('reservations_instructions')
current_reservations_amount = BlockfaceReservation.objects \
.filter(user=request.user) \
.current() \
.count()
return {
'reservations': {
'current': current_reservations_amount,
'total': settings.RESERVATIONS_LIMIT
},
'layer': get_context_for_reservable_layer(request),
'bounds': _user_reservation_bounds(request.user),
'legend_entries': [
{'css_class': 'available', 'label': 'Available'},
{'css_class': 'unavailable', 'label': 'Unavailable'},
{'css_class': 'reserved', 'label': 'Reserved by you'},
{'css_class': 'in-cart', 'label': 'In your cart'},
],
'help_shown': _was_help_shown(request, 'reservations_page_help_shown')
}
@transaction.atomic
def confirm_blockface_reservations(request):
id_string = request.POST['ids']
ids = id_string.split(',')
# Filter empty strings
ids = filter(None, ids)
is_mapping_with_paper = \
request.POST.get('is_mapping_with_paper', 'False') == 'True'
blockfaces = Blockface.objects \
.filter(id__in=ids) \
.select_related('territory')
user_trusted_group_ids = TrustedMapper.objects \
.filter(user=request.user, is_approved=True) \
.values_list('group_id', flat=True)
user_admin_group_ids = Group.objects \
.filter(admin=request.user) \
.values_list('id', flat=True)
already_reserved_blockface_ids = _already_reserved_blockface_ids(ids)
right_now = now()
expiration_date = right_now + settings.RESERVATION_TIME_PERIOD
new_reservations = []
for blockface in blockfaces:
territory = _get_territory(blockface)
if ((blockface.is_available and
blockface.id not in already_reserved_blockface_ids and
(territory is None or user_is_census_admin(request.user) or
territory.group_id in user_trusted_group_ids or
territory.group_id in user_admin_group_ids))):
new_reservations.append(BlockfaceReservation(
blockface=blockface,
user=request.user,
is_mapping_with_paper=is_mapping_with_paper,
expires_at=expiration_date
))
cancelled_reservations = _get_reservations_to_cancel(ids, request.user)
num_cancelled = cancelled_reservations.count()
cancelled_reservations.update(canceled_at=right_now, updated_at=right_now)
# Workaround for Django limitation which prevents us from obtaining
# primary keys for objects created in bulk.
for reservation in new_reservations:
reservation.save()
reservation_ids = [r.id for r in new_reservations]
filename = "reservations_map/%s_%s.pdf" % (
request.user.username, shortuuid.uuid())
request.user.reservations_map_pdf_filename = filename
request.user.clean_and_save()
url = reverse('printable_reservations_map')
host = request.get_host()
if hasattr(request, 'session'): # prevent test failure
session_id = request.session.session_key
chain(create_and_save_pdf.s(session_id, host, url, filename),
notify_reservation_confirmed.s(request.user.id,
reservation_ids)) \
.apply_async()
num_reserved = len(new_reservations)
return {
'n_requested': len(ids) - len(already_reserved_blockface_ids),
'n_reserved': num_reserved,
'n_cancelled': num_cancelled,
'expiration_date': expiration_date
}
def _get_territory(blockface):
try:
return blockface.territory
except Territory.DoesNotExist:
return None
def _already_reserved_blockface_ids(ids):
return BlockfaceReservation.objects \
.filter(blockface__id__in=ids) \
.current() \
.values_list('blockface_id', flat=True)
def _get_reservations_to_cancel(ids, user):
# Whatever blockface IDs were not submitted, should be cancelled
return BlockfaceReservation.objects \
.filter(user=user) \
.exclude(blockface__id__in=ids) \
.current()
def _blockface_context(blockface):
return {
'id': blockface.id,
'extent': blockface.geom.extent,
'geojson': blockface.geom.geojson
}
def blockface(request, blockface_id):
blockface = get_object_or_404(Blockface, id=blockface_id)
return _blockface_context(blockface)
def blockface_near_point(request):
p = Point(float(request.GET.get('lng', 0)),
float(request.GET.get('lat', 0)),
srid=4326)
# The size of the distance filter was chosen through trial and
# error by testing tap precision on a mobile device
qs = Blockface.objects.filter(geom__dwithin=(p, 0.0002))\
.distance(p)\
.order_by('distance')
blockfaces = qs[:1] # We only want the closest blockface
if blockfaces:
return _blockface_context(blockfaces[0])
else:
return {
'error': 'Block edge not found near lat:%f lon:%f' % (p.y, p.x)
}
def _validate_event_and_group(request, event_slug):
event = get_object_or_404(Event, group=request.group, slug=event_slug)
if not user_is_checked_in_to_event(request.user, event):
raise PermissionDenied('User not checked-in to this event')
return event
def start_survey(request):
reservations_for_user = (
BlockfaceReservation.objects.remaining_for(request.user))
return {
'layer': get_context_for_reservations_layer(request),
'bounds': _user_reservation_bounds(request.user),
'choices': _get_survey_choices(),
'no_more_reservations': reservations_for_user <= 1,
'geolocate_help_shown': _was_help_shown(request,
'survey_geolocate_help_shown'),
}
def start_survey_from_event(request, event_slug):
group = request.group
event = get_object_or_404(Event, group=request.group, slug=event_slug)
if not user_is_rsvped_for_event(request.user, event):
raise PermissionDenied('User not checked-in to this event')
if not user_is_checked_in_to_event(request.user, event):
return redirect('event_user_check_in_page',
group_slug=event.group.slug, event_slug=event.slug)
if not event.is_mapping_allowed():
return HttpResponseForbidden('Event not currently in-progress')
return {
'layer': get_context_for_territory_survey_layer(group.id),
'location': [event.location.y, event.location.x],
'choices': _get_survey_choices(),
'geolocate_help_shown': _was_help_shown(request,
'survey_geolocate_help_shown'),
}
def teammates_for_mapping(request):
query = request.GET.get('q', None)
users = User.objects.exclude(id=request.user.id) \
.filter(is_active=True) \
.order_by('username')
if query:
users = users.filter(
Q(username__icontains=query) |
(Q(real_name_is_public=True) & (Q(first_name__icontains=query) |
Q(last_name__icontains=query))))
return [_teammate_user_context(u) for u in users]
def _teammate_user_context(user):
if can_show_full_name(user):
text = "{} — {} {}".format(
escape(user.username), escape(user.first_name),
escape(user.last_name))
else:
text = user.username
return {
"id": user.id,
"text": text.strip()
}
def _get_survey_choices():
# NOTE: "No Problems" is handled in the template
grouped_problem_choices = [choice for choice in PROBLEMS_CHOICES
if isinstance(choice[1], tuple)]
guard_installation_choices = (('No', 'Not installed'),
('Yes', 'Installed'))
guard_helpfulness_choices = [choice for choice in GUARD_CHOICES
if choice[0] != 'None']
species_choices = Species.objects.all()
return {
'curb_location': CURB_CHOICES,
'status': STATUS_CHOICES,
'species': species_choices,
'species_certainty': CERTAINTY_CHOICES,
'health': HEALTH_CHOICES,
'stewardship': STEWARDSHIP_CHOICES,
'guard_installation': guard_installation_choices,
'guards': guard_helpfulness_choices,
'sidewalk_damage': SIDEWALK_CHOICES,
'problem_groups': grouped_problem_choices,
}
def submit_survey(request):
ctx = {}
# sometimes a dict, sometimes HttpResponse
create_result = _create_survey_and_trees(request)
if isinstance(create_result, HttpResponse):
return create_result
else:
ctx.update(create_result)
return ctx
def submit_survey_from_event(request, event_slug):
event = _validate_event_and_group(request, event_slug)
return _create_survey_and_trees(request, event)
def _mark_survey_blockface_availability(survey, availability):
if not isinstance(availability, bool):
raise ValidationError('availability arg must be a boolean value')
survey.blockface.is_available = availability
survey.blockface.full_clean()
survey.blockface.save()
def restart_blockface(request, survey_id):
survey = get_object_or_404(Survey, id=survey_id, user=request.user)
survey.submit_comment = request.POST.get('comment', '')
survey.full_clean()
survey.save()
_mark_survey_blockface_availability(survey, True)
expiration_date = now() + settings.RESERVATION_TIME_PERIOD
BlockfaceReservation.objects.create(blockface=survey.blockface,
user=request.user,
expires_at=expiration_date)
return {'success': True}
@transaction.atomic
def _create_survey_and_trees(request, event=None):
"""
Creates survey and trees from JSON body, where k1... are model attrs: {
survey: { k1:v1, ... },
trees: [
{ k1:v1, ...},
...
]
}
trees.problems should be a list of problem codes -- ["Stones", "Sneakers"]
"""
data = json.loads(request.body)
survey_data = data['survey']
tree_list = data.get('trees', [])
survey = Survey(user=request.user, **survey_data)
if survey.has_trees and len(tree_list) == 0:
return HttpResponseBadRequest('Trees expected but absent')
if not survey.has_trees and len(tree_list) > 0:
return HttpResponseBadRequest('Trees not expected but present')
blockface = survey.blockface
if event:
territory = _get_territory(blockface)
if territory is None or territory.group_id != event.group_id:
return HttpResponseForbidden(
"Blockface is not in group's territory.")
else:
if not _query_reservation(request.user, blockface.id).exists():
return HttpResponseForbidden(
'You have not reserved this block edge.')
survey.full_clean()
survey.save()
_mark_survey_blockface_availability(survey, False)
for tree_data in tree_list:
if 'problems' in tree_data:
tree_data['problems'] = ','.join(tree_data['problems'])
# Convert any floats into integers for all the integer fields in Tree
for field in ('circumference', 'stump_diameter'):
if field in tree_data:
tree_data[field] = int(round(float(tree_data[field])))
tree = Tree(survey=survey, **tree_data)
tree.clean_and_save()
return {'survey_id': survey.id}
def flag_survey(request, survey_id):
survey = get_object_or_404(Survey, id=survey_id, user=request.user)
comment = request.POST.get('comment', None)
if comment:
survey.submit_comment = comment
survey.is_flagged = True
survey.full_clean()
survey.save()
return {'success': True}
else:
return HttpResponseBadRequest("The 'comment' field is required when "
"flagging surveys for review")
def _survey_detail(request, survey_id):
survey = Survey.objects.get(id=survey_id)
with connection.cursor() as cursor:
cursor.execute(_SURVEY_DETAIL_QUERY, [survey_id])
trees = [tree[0] for tree in cursor]
return {
'survey_id': survey_id,
'blockface_id': survey.blockface_id,
'trees': json.dumps(trees),
'bounds': list(survey.blockface.geom.extent),
}
def survey_detail_from_event(request, event_slug, survey_id):
_validate_event_and_group(request, event_slug)
return _survey_detail(request, survey_id)
def survey_detail(request, survey_id):
ctx = _survey_detail(request, survey_id)
reservations_for_user = (
BlockfaceReservation.objects.remaining_for(request.user))
ctx.update({'no_more_reservations': reservations_for_user == 0})
return ctx
def admin_territory_page(request):
groups = Group.objects.all().order_by('name')
context = {
'groups': groups,
'legend_entries': [
{'css_class': 'available', 'label': 'Available'},
{'css_class': 'reserved',
'label': "This group's unmapped territory"},
{'css_class': 'unavailable',
'label': "Others' unmapped territory/reservations"},
{'css_class': 'surveyed-by-me', 'label': 'Mapped by this group'},
{'css_class': 'surveyed-by-others', 'label': 'Mapped by others'},
{'css_class': 'selected',
'label': 'Currently-selected block edges'},
]
}
return context
def reservations_instructions(request):
user = request.user
step1_complete = user.online_training_complete
step2_complete = step1_complete and user.field_training_complete
step3_complete = step2_complete and user.attended_at_least_two_events
step4_complete = step3_complete and user.individual_mapper is not None
return {
'step1_complete': step1_complete,
'step2_complete': step2_complete,
'step3_complete': step3_complete,
'step4_complete': step4_complete,
}
| agpl-3.0 |
dnstap/knot | tests-extra/tools/dnstest/response.py | 1 | 9292 | #!/usr/bin/env python3
import binascii
import dns.name
from dnstest.utils import *
class Response(object):
'''Dig output context.'''
def __init__(self, server, response, args):
self.resp = response
self.args = args
self.srv = server
self.rname = dns.name.from_text(self.args["rname"])
if type(self.args["rtype"]) is str:
self.rtype = dns.rdatatype.from_text(self.args["rtype"])
else:
self.rtype = self.args["rtype"]
if type(self.args["rclass"]) is str:
self.rclass = dns.rdataclass.from_text(self.args["rclass"])
else:
self.rclass = self.args["rclass"]
def _check_question(self):
question = self.resp.question[0]
compare(question.name, self.rname, "QNAME")
compare(question.rdclass, self.rclass, "QCLASS")
compare(question.rdtype, self.rtype, "QTYPE")
def _check_flags(self, flags, noflags):
flag_names = flags.split()
for flag in flag_names:
flag_val = dns.flags.from_text(flag)
isset(self.resp.flags & flag_val, "%s FLAG" % flag)
flag_names = noflags.split()
for flag in flag_names:
flag_val = dns.flags.from_text(flag)
isset(not(self.resp.flags & flag_val), "NO %s FLAG" % flag)
def _check_eflags(self, eflags, noeflags):
eflag_names = eflags.split()
for flag in eflag_names:
flag_val = dns.flags.edns_from_text(flag)
isset(self.resp.ednsflags & flag_val, "%s FLAG" % flag)
eflag_names = noeflags.split()
for flag in eflag_names:
flag_val = dns.flags.edns_from_text(flag)
isset(not(self.resp.ednsflags & flag_val), "NO %s FLAG" % flag)
def check_record(self, section="answer", rtype=None, ttl=None, rdata=None,
nordata=None):
'''Checks given section for particular record/rdata'''
if not rtype:
rtype = self.rtype
elif type(rtype) is str:
rtype = dns.rdatatype.from_text(rtype)
if section == "answer":
sect = self.resp.answer
elif section == "additional":
sect = self.resp.additional
elif section == "authority":
sect = self.resp.authority
# Check rdata presence.
if rdata:
# We work with just one rdata with TTL=0 (this TTL is not used).
rrset = dns.rdataset.from_text(self.rclass, rtype, 0, rdata)
ref = str(list(rrset)[0])
# Check answer section if contains reference rdata.
for data in sect:
for rd in data.to_rdataset():
# Compare Rdataset instances.
if str(rd) == ref:
# Check CLASS.
compare(data.rdclass, self.rclass, "CLASS")
# Check TYPE.
compare(data.rdtype, rtype, "TYPE")
# Check TTL if specified.
if ttl != None:
compare(data.ttl, int(ttl), "TTL")
return
else:
set_err("CHECK RDATA")
check_log("ERROR: CHECK RDATA")
detail_log("!Missing data in %s section:" % section)
detail_log(" %s" % ref)
detail_log(SEP)
# Check rdata absence.
if nordata:
# We work with just one rdata with TTL=0 (this TTL is not used).
rrset = dns.rdataset.from_text(self.rclass, rtype, 0, nordata)
ref = str(list(rrset)[0])
# Check answer section if contains reference rdata.
for data in sect:
for rd in data.to_rdataset():
# Compare Rdataset instances.
if str(rd) == ref and data.rdtype == rtype:
set_err("CHECK RDATA")
check_log("ERROR: CHECK RDATA")
detail_log("!Unwanted data in %s section:" % section)
detail_log(" %s" % ref)
detail_log(SEP)
return
def check(self, rdata=None, ttl=None, rcode="NOERROR", nordata=None,
flags="", noflags="", eflags="", noeflags=""):
'''Flags are text strings separated by whitespace character'''
self._check_flags(flags, noflags)
self._check_eflags(eflags, noeflags)
self._check_question()
# Check rcode.
if type(rcode) is not str:
rc = dns.rcode.to_text(rcode)
else:
rc = rcode
compare(dns.rcode.to_text(self.resp.rcode()), rc, "RCODE")
# Check rdata only if NOERROR.
if rc == "NOERROR":
self.check_record(section="answer", rtype=self.rtype, ttl=ttl,
rdata=rdata, nordata=nordata)
def check_edns(self, nsid=None, buff_size=None):
compare(self.resp.edns, 0, "EDNS VERSION")
options = 1 if nsid != None else 0
compare(len(self.resp.options), options, "NUMBER OF EDNS0 OPTIONS")
if options > 0:
option = list(self.resp.options)[0]
compare(option.otype, dns.edns.NSID, "OPTION TYPE")
if nsid[:2] == "0x":
compare(binascii.hexlify(option.data).decode('ascii'),
nsid[2:], "HEX NSID")
else:
compare(option.data.decode('ascii'), nsid, "TXT NSID")
def diff(self, resp, flags=True, answer=True, authority=True,
additional=True):
'''Compares specified response sections against another response'''
if flags:
compare(dns.flags.to_text(self.resp.flags),
dns.flags.to_text(resp.resp.flags), "FLAGS")
compare(dns.flags.edns_to_text(self.resp.ednsflags),
dns.flags.edns_to_text(resp.resp.ednsflags), "EDNS FLAGS")
if answer:
compare_sections(self.resp.answer, self.srv.name,
resp.resp.answer, resp.srv.name,
"ANSWER")
if authority:
compare_sections(self.resp.authority, self.srv.name,
resp.resp.authority, resp.srv.name,
"AUTHORITY")
if additional:
compare_sections(self.resp.additional, self.srv.name,
resp.resp.additional, resp.srv.name,
"ADDITIONAL")
def cmp(self, server, flags=True, answer=True, authority=True,
additional=True):
'''Asks server for the same question an compares specified sections'''
resp = server.dig(**self.args)
self.diff(resp, flags, answer, authority, additional)
def count(self, rtype=None, section="answer"):
'''Returns number of records of given type in specified section'''
if not rtype:
rtype = self.rtype
elif type(rtype) is str:
rtype = dns.rdatatype.from_text(rtype)
if not section or section == "answer":
sect = self.resp.answer
elif section == "additional":
sect = self.resp.additional
elif section == "authority":
sect = self.resp.authority
cnt = 0
for rrset in sect:
if rrset.rdtype == rtype or rtype == dns.rdatatype.ANY:
cnt += len(rrset)
return cnt
def check_nsec(self, nsec3=False, nonsec=False):
'''Checks if the response contains NSEC(3) records.'''
nsec_rrs = list()
nsec3_rrs = list()
for data in self.resp.authority:
rrset = data.to_rdataset()
records = data.to_text().split("\n")
if rrset.rdtype == dns.rdatatype.NSEC:
nsec_rrs.extend(records)
elif rrset.rdtype == dns.rdatatype.NSEC3:
nsec3_rrs.extend(records)
if nonsec:
if nsec_rrs or nsec3_rrs:
set_err("CHECK NSEC(3) ABSENCE")
check_log("ERROR: CHECK NSEC(3) ABSENCE")
detail_log("!Unexpected records:")
for rr in nsec_rrs + nsec3_rrs:
detail_log(" %s" % rr)
detail_log(SEP)
return
if nsec3:
if not nsec3_rrs:
set_err("CHECK NSEC3 PRESENCE")
check_log("ERROR: CHECK NSEC3 PRESENCE")
detail_log(SEP)
if nsec_rrs:
set_err("CHECK NSEC3")
check_log("ERROR: CHECK NSEC3")
detail_log("!Unexpected records:")
for rr in nsec_rrs:
detail_log(" %s" % rr)
detail_log(SEP)
else:
if not nsec_rrs:
set_err("CHECK NSEC PRESENCE")
check_log("ERROR: CHECK NSEC PRESENCE")
detail_log(SEP)
if nsec3_rrs:
set_err("CHECK NSEC")
check_log("ERROR: CHECK NSEC")
detail_log("!Unexpected records:")
for rr in nsec3_rrs:
detail_log(" %s" % rr)
detail_log(SEP)
| gpl-3.0 |
40223123/finaltest2 | static/Brython3.1.1-20150328-091302/Lib/abc.py | 765 | 8057 | # Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Abstract Base Classes (ABCs) according to PEP 3119."""
from _weakrefset import WeakSet
def abstractmethod(funcobj):
"""A decorator indicating abstract methods.
Requires that the metaclass is ABCMeta or derived from it. A
class that has a metaclass derived from ABCMeta cannot be
instantiated unless all of its abstract methods are overridden.
The abstract methods can be called using any of the normal
'super' call mechanisms.
Usage:
class C(metaclass=ABCMeta):
@abstractmethod
def my_abstract_method(self, ...):
...
"""
funcobj.__isabstractmethod__ = True
return funcobj
class abstractclassmethod(classmethod):
"""
A decorator indicating abstract classmethods.
Similar to abstractmethod.
Usage:
class C(metaclass=ABCMeta):
@abstractclassmethod
def my_abstract_classmethod(cls, ...):
...
'abstractclassmethod' is deprecated. Use 'classmethod' with
'abstractmethod' instead.
"""
__isabstractmethod__ = True
def __init__(self, callable):
callable.__isabstractmethod__ = True
super().__init__(callable)
class abstractstaticmethod(staticmethod):
"""
A decorator indicating abstract staticmethods.
Similar to abstractmethod.
Usage:
class C(metaclass=ABCMeta):
@abstractstaticmethod
def my_abstract_staticmethod(...):
...
'abstractstaticmethod' is deprecated. Use 'staticmethod' with
'abstractmethod' instead.
"""
__isabstractmethod__ = True
def __init__(self, callable):
callable.__isabstractmethod__ = True
super().__init__(callable)
class abstractproperty(property):
"""
A decorator indicating abstract properties.
Requires that the metaclass is ABCMeta or derived from it. A
class that has a metaclass derived from ABCMeta cannot be
instantiated unless all of its abstract properties are overridden.
The abstract properties can be called using any of the normal
'super' call mechanisms.
Usage:
class C(metaclass=ABCMeta):
@abstractproperty
def my_abstract_property(self):
...
This defines a read-only property; you can also define a read-write
abstract property using the 'long' form of property declaration:
class C(metaclass=ABCMeta):
def getx(self): ...
def setx(self, value): ...
x = abstractproperty(getx, setx)
'abstractproperty' is deprecated. Use 'property' with 'abstractmethod'
instead.
"""
__isabstractmethod__ = True
class ABCMeta(type):
"""Metaclass for defining Abstract Base Classes (ABCs).
Use this metaclass to create an ABC. An ABC can be subclassed
directly, and then acts as a mix-in class. You can also register
unrelated concrete classes (even built-in classes) and unrelated
ABCs as 'virtual subclasses' -- these and their descendants will
be considered subclasses of the registering ABC by the built-in
issubclass() function, but the registering ABC won't show up in
their MRO (Method Resolution Order) nor will method
implementations defined by the registering ABC be callable (not
even via super()).
"""
# A global counter that is incremented each time a class is
# registered as a virtual subclass of anything. It forces the
# negative cache to be cleared before its next use.
_abc_invalidation_counter = 0
def __new__(mcls, name, bases, namespace):
cls = super().__new__(mcls, name, bases, namespace)
# Compute set of abstract method names
abstracts = {name
for name, value in namespace.items()
if getattr(value, "__isabstractmethod__", False)}
for base in bases:
for name in getattr(base, "__abstractmethods__", set()):
value = getattr(cls, name, None)
if getattr(value, "__isabstractmethod__", False):
abstracts.add(name)
cls.__abstractmethods__ = frozenset(abstracts)
# Set up inheritance registry
cls._abc_registry = WeakSet()
cls._abc_cache = WeakSet()
cls._abc_negative_cache = WeakSet()
cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
return cls
def register(cls, subclass):
"""Register a virtual subclass of an ABC.
Returns the subclass, to allow usage as a class decorator.
"""
if not isinstance(subclass, type):
raise TypeError("Can only register classes")
if issubclass(subclass, cls):
return subclass # Already a subclass
# Subtle: test for cycles *after* testing for "already a subclass";
# this means we allow X.register(X) and interpret it as a no-op.
if issubclass(cls, subclass):
# This would create a cycle, which is bad for the algorithm below
raise RuntimeError("Refusing to create an inheritance cycle")
cls._abc_registry.add(subclass)
ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache
return subclass
def _dump_registry(cls, file=None):
"""Debug helper to print the ABC registry."""
print("Class: %s.%s" % (cls.__module__, cls.__name__), file=file)
print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file)
for name in sorted(cls.__dict__.keys()):
if name.startswith("_abc_"):
value = getattr(cls, name)
print("%s: %r" % (name, value), file=file)
def __instancecheck__(cls, instance):
"""Override for isinstance(instance, cls)."""
# Inline the cache checking
subclass = instance.__class__
if subclass in cls._abc_cache:
return True
subtype = type(instance)
if subtype is subclass:
if (cls._abc_negative_cache_version ==
ABCMeta._abc_invalidation_counter and
subclass in cls._abc_negative_cache):
return False
# Fall back to the subclass check.
return cls.__subclasscheck__(subclass)
return any(cls.__subclasscheck__(c) for c in {subclass, subtype})
def __subclasscheck__(cls, subclass):
"""Override for issubclass(subclass, cls)."""
# Check cache
if subclass in cls._abc_cache:
return True
# Check negative cache; may have to invalidate
if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter:
# Invalidate the negative cache
cls._abc_negative_cache = WeakSet()
cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
elif subclass in cls._abc_negative_cache:
return False
# Check the subclass hook
ok = cls.__subclasshook__(subclass)
if ok is not NotImplemented:
assert isinstance(ok, bool)
if ok:
cls._abc_cache.add(subclass)
else:
cls._abc_negative_cache.add(subclass)
return ok
# Check if it's a direct subclass
if cls in getattr(subclass, '__mro__', ()):
cls._abc_cache.add(subclass)
return True
# Check if it's a subclass of a registered class (recursive)
for rcls in cls._abc_registry:
if issubclass(subclass, rcls):
cls._abc_cache.add(subclass)
return True
# Check if it's a subclass of a subclass (recursive)
for scls in cls.__subclasses__():
if issubclass(subclass, scls):
cls._abc_cache.add(subclass)
return True
# No dice; update negative cache
cls._abc_negative_cache.add(subclass)
return False
| gpl-3.0 |
rochacbruno/dynaconf | dynaconf/vendor_src/click/parser.py | 1 | 15782 | """
This module started out as largely a copy paste from the stdlib's
optparse module with the features removed that we do not need from
optparse because we implement them in Click on a higher level (for
instance type handling, help formatting and a lot more).
The plan is to remove more and more from here over time.
The reason this is a different module and not optparse from the stdlib
is that there are differences in 2.x and 3.x about the error messages
generated and optparse in the stdlib uses gettext for no good reason
and might cause us issues.
Click uses parts of optparse written by Gregory P. Ward and maintained
by the Python Software Foundation. This is limited to code in parser.py.
Copyright 2001-2006 Gregory P. Ward. All rights reserved.
Copyright 2002-2006 Python Software Foundation. All rights reserved.
"""
# This code uses parts of optparse written by Gregory P. Ward and
# maintained by the Python Software Foundation.
# Copyright 2001-2006 Gregory P. Ward
# Copyright 2002-2006 Python Software Foundation
import re
from collections import deque
from .exceptions import BadArgumentUsage
from .exceptions import BadOptionUsage
from .exceptions import NoSuchOption
from .exceptions import UsageError
def _unpack_args(args, nargs_spec):
"""Given an iterable of arguments and an iterable of nargs specifications,
it returns a tuple with all the unpacked arguments at the first index
and all remaining arguments as the second.
The nargs specification is the number of arguments that should be consumed
or `-1` to indicate that this position should eat up all the remainders.
Missing items are filled with `None`.
"""
args = deque(args)
nargs_spec = deque(nargs_spec)
rv = []
spos = None
def _fetch(c):
try:
if spos is None:
return c.popleft()
else:
return c.pop()
except IndexError:
return None
while nargs_spec:
nargs = _fetch(nargs_spec)
if nargs == 1:
rv.append(_fetch(args))
elif nargs > 1:
x = [_fetch(args) for _ in range(nargs)]
# If we're reversed, we're pulling in the arguments in reverse,
# so we need to turn them around.
if spos is not None:
x.reverse()
rv.append(tuple(x))
elif nargs < 0:
if spos is not None:
raise TypeError("Cannot have two nargs < 0")
spos = len(rv)
rv.append(None)
# spos is the position of the wildcard (star). If it's not `None`,
# we fill it with the remainder.
if spos is not None:
rv[spos] = tuple(args)
args = []
rv[spos + 1 :] = reversed(rv[spos + 1 :])
return tuple(rv), list(args)
def _error_opt_args(nargs, opt):
if nargs == 1:
raise BadOptionUsage(opt, f"{opt} option requires an argument")
raise BadOptionUsage(opt, f"{opt} option requires {nargs} arguments")
def split_opt(opt):
first = opt[:1]
if first.isalnum():
return "", opt
if opt[1:2] == first:
return opt[:2], opt[2:]
return first, opt[1:]
def normalize_opt(opt, ctx):
if ctx is None or ctx.token_normalize_func is None:
return opt
prefix, opt = split_opt(opt)
return f"{prefix}{ctx.token_normalize_func(opt)}"
def split_arg_string(string):
"""Given an argument string this attempts to split it into small parts."""
rv = []
for match in re.finditer(
r"('([^'\\]*(?:\\.[^'\\]*)*)'|\"([^\"\\]*(?:\\.[^\"\\]*)*)\"|\S+)\s*",
string,
re.S,
):
arg = match.group().strip()
if arg[:1] == arg[-1:] and arg[:1] in "\"'":
arg = arg[1:-1].encode("ascii", "backslashreplace").decode("unicode-escape")
try:
arg = type(string)(arg)
except UnicodeError:
pass
rv.append(arg)
return rv
class Option:
def __init__(self, opts, dest, action=None, nargs=1, const=None, obj=None):
self._short_opts = []
self._long_opts = []
self.prefixes = set()
for opt in opts:
prefix, value = split_opt(opt)
if not prefix:
raise ValueError(f"Invalid start character for option ({opt})")
self.prefixes.add(prefix[0])
if len(prefix) == 1 and len(value) == 1:
self._short_opts.append(opt)
else:
self._long_opts.append(opt)
self.prefixes.add(prefix)
if action is None:
action = "store"
self.dest = dest
self.action = action
self.nargs = nargs
self.const = const
self.obj = obj
@property
def takes_value(self):
return self.action in ("store", "append")
def process(self, value, state):
if self.action == "store":
state.opts[self.dest] = value
elif self.action == "store_const":
state.opts[self.dest] = self.const
elif self.action == "append":
state.opts.setdefault(self.dest, []).append(value)
elif self.action == "append_const":
state.opts.setdefault(self.dest, []).append(self.const)
elif self.action == "count":
state.opts[self.dest] = state.opts.get(self.dest, 0) + 1
else:
raise ValueError(f"unknown action '{self.action}'")
state.order.append(self.obj)
class Argument:
def __init__(self, dest, nargs=1, obj=None):
self.dest = dest
self.nargs = nargs
self.obj = obj
def process(self, value, state):
if self.nargs > 1:
holes = sum(1 for x in value if x is None)
if holes == len(value):
value = None
elif holes != 0:
raise BadArgumentUsage(
f"argument {self.dest} takes {self.nargs} values"
)
state.opts[self.dest] = value
state.order.append(self.obj)
class ParsingState:
def __init__(self, rargs):
self.opts = {}
self.largs = []
self.rargs = rargs
self.order = []
class OptionParser:
"""The option parser is an internal class that is ultimately used to
parse options and arguments. It's modelled after optparse and brings
a similar but vastly simplified API. It should generally not be used
directly as the high level Click classes wrap it for you.
It's not nearly as extensible as optparse or argparse as it does not
implement features that are implemented on a higher level (such as
types or defaults).
:param ctx: optionally the :class:`~click.Context` where this parser
should go with.
"""
def __init__(self, ctx=None):
#: The :class:`~click.Context` for this parser. This might be
#: `None` for some advanced use cases.
self.ctx = ctx
#: This controls how the parser deals with interspersed arguments.
#: If this is set to `False`, the parser will stop on the first
#: non-option. Click uses this to implement nested subcommands
#: safely.
self.allow_interspersed_args = True
#: This tells the parser how to deal with unknown options. By
#: default it will error out (which is sensible), but there is a
#: second mode where it will ignore it and continue processing
#: after shifting all the unknown options into the resulting args.
self.ignore_unknown_options = False
if ctx is not None:
self.allow_interspersed_args = ctx.allow_interspersed_args
self.ignore_unknown_options = ctx.ignore_unknown_options
self._short_opt = {}
self._long_opt = {}
self._opt_prefixes = {"-", "--"}
self._args = []
def add_option(self, opts, dest, action=None, nargs=1, const=None, obj=None):
"""Adds a new option named `dest` to the parser. The destination
is not inferred (unlike with optparse) and needs to be explicitly
provided. Action can be any of ``store``, ``store_const``,
``append``, ``appnd_const`` or ``count``.
The `obj` can be used to identify the option in the order list
that is returned from the parser.
"""
if obj is None:
obj = dest
opts = [normalize_opt(opt, self.ctx) for opt in opts]
option = Option(opts, dest, action=action, nargs=nargs, const=const, obj=obj)
self._opt_prefixes.update(option.prefixes)
for opt in option._short_opts:
self._short_opt[opt] = option
for opt in option._long_opts:
self._long_opt[opt] = option
def add_argument(self, dest, nargs=1, obj=None):
"""Adds a positional argument named `dest` to the parser.
The `obj` can be used to identify the option in the order list
that is returned from the parser.
"""
if obj is None:
obj = dest
self._args.append(Argument(dest=dest, nargs=nargs, obj=obj))
def parse_args(self, args):
"""Parses positional arguments and returns ``(values, args, order)``
for the parsed options and arguments as well as the leftover
arguments if there are any. The order is a list of objects as they
appear on the command line. If arguments appear multiple times they
will be memorized multiple times as well.
"""
state = ParsingState(args)
try:
self._process_args_for_options(state)
self._process_args_for_args(state)
except UsageError:
if self.ctx is None or not self.ctx.resilient_parsing:
raise
return state.opts, state.largs, state.order
def _process_args_for_args(self, state):
pargs, args = _unpack_args(
state.largs + state.rargs, [x.nargs for x in self._args]
)
for idx, arg in enumerate(self._args):
arg.process(pargs[idx], state)
state.largs = args
state.rargs = []
def _process_args_for_options(self, state):
while state.rargs:
arg = state.rargs.pop(0)
arglen = len(arg)
# Double dashes always handled explicitly regardless of what
# prefixes are valid.
if arg == "--":
return
elif arg[:1] in self._opt_prefixes and arglen > 1:
self._process_opts(arg, state)
elif self.allow_interspersed_args:
state.largs.append(arg)
else:
state.rargs.insert(0, arg)
return
# Say this is the original argument list:
# [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
# ^
# (we are about to process arg(i)).
#
# Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
# [arg0, ..., arg(i-1)] (any options and their arguments will have
# been removed from largs).
#
# The while loop will usually consume 1 or more arguments per pass.
# If it consumes 1 (eg. arg is an option that takes no arguments),
# then after _process_arg() is done the situation is:
#
# largs = subset of [arg0, ..., arg(i)]
# rargs = [arg(i+1), ..., arg(N-1)]
#
# If allow_interspersed_args is false, largs will always be
# *empty* -- still a subset of [arg0, ..., arg(i-1)], but
# not a very interesting subset!
def _match_long_opt(self, opt, explicit_value, state):
if opt not in self._long_opt:
possibilities = [word for word in self._long_opt if word.startswith(opt)]
raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx)
option = self._long_opt[opt]
if option.takes_value:
# At this point it's safe to modify rargs by injecting the
# explicit value, because no exception is raised in this
# branch. This means that the inserted value will be fully
# consumed.
if explicit_value is not None:
state.rargs.insert(0, explicit_value)
nargs = option.nargs
if len(state.rargs) < nargs:
_error_opt_args(nargs, opt)
elif nargs == 1:
value = state.rargs.pop(0)
else:
value = tuple(state.rargs[:nargs])
del state.rargs[:nargs]
elif explicit_value is not None:
raise BadOptionUsage(opt, f"{opt} option does not take a value")
else:
value = None
option.process(value, state)
def _match_short_opt(self, arg, state):
stop = False
i = 1
prefix = arg[0]
unknown_options = []
for ch in arg[1:]:
opt = normalize_opt(f"{prefix}{ch}", self.ctx)
option = self._short_opt.get(opt)
i += 1
if not option:
if self.ignore_unknown_options:
unknown_options.append(ch)
continue
raise NoSuchOption(opt, ctx=self.ctx)
if option.takes_value:
# Any characters left in arg? Pretend they're the
# next arg, and stop consuming characters of arg.
if i < len(arg):
state.rargs.insert(0, arg[i:])
stop = True
nargs = option.nargs
if len(state.rargs) < nargs:
_error_opt_args(nargs, opt)
elif nargs == 1:
value = state.rargs.pop(0)
else:
value = tuple(state.rargs[:nargs])
del state.rargs[:nargs]
else:
value = None
option.process(value, state)
if stop:
break
# If we got any unknown options we re-combinate the string of the
# remaining options and re-attach the prefix, then report that
# to the state as new larg. This way there is basic combinatorics
# that can be achieved while still ignoring unknown arguments.
if self.ignore_unknown_options and unknown_options:
state.largs.append(f"{prefix}{''.join(unknown_options)}")
def _process_opts(self, arg, state):
explicit_value = None
# Long option handling happens in two parts. The first part is
# supporting explicitly attached values. In any case, we will try
# to long match the option first.
if "=" in arg:
long_opt, explicit_value = arg.split("=", 1)
else:
long_opt = arg
norm_long_opt = normalize_opt(long_opt, self.ctx)
# At this point we will match the (assumed) long option through
# the long option matching code. Note that this allows options
# like "-foo" to be matched as long options.
try:
self._match_long_opt(norm_long_opt, explicit_value, state)
except NoSuchOption:
# At this point the long option matching failed, and we need
# to try with short options. However there is a special rule
# which says, that if we have a two character options prefix
# (applies to "--foo" for instance), we do not dispatch to the
# short option code and will instead raise the no option
# error.
if arg[:2] not in self._opt_prefixes:
return self._match_short_opt(arg, state)
if not self.ignore_unknown_options:
raise
state.largs.append(arg)
| mit |
sileht/deb-openstack-nova | nova/virt/vmwareapi/vmops.py | 4 | 37865 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Class for VM tasks like spawn, snapshot, suspend, resume etc.
"""
import base64
import os
import time
import urllib
import urllib2
import uuid
from nova.compute import power_state
from nova import exception
from nova import flags
from nova import log as logging
from nova.openstack.common import cfg
from nova import utils
from nova.virt.vmwareapi import vim_util
from nova.virt.vmwareapi import vm_util
from nova.virt.vmwareapi import vmware_images
from nova.virt.vmwareapi import network_utils
vmware_vif_driver_opt = cfg.StrOpt('vmware_vif_driver',
default='nova.virt.vmwareapi.vif.VMWareVlanBridgeDriver',
help='The VMWare VIF driver to configure the VIFs.')
FLAGS = flags.FLAGS
FLAGS.register_opt(vmware_vif_driver_opt)
LOG = logging.getLogger(__name__)
VMWARE_POWER_STATES = {
'poweredOff': power_state.SHUTDOWN,
'poweredOn': power_state.RUNNING,
'suspended': power_state.PAUSED}
class VMWareVMOps(object):
"""Management class for VM-related tasks."""
def __init__(self, session):
"""Initializer."""
self._session = session
self._vif_driver = utils.import_object(FLAGS.vmware_vif_driver)
def list_instances(self):
"""Lists the VM instances that are registered with the ESX host."""
LOG.debug(_("Getting list of instances"))
vms = self._session._call_method(vim_util, "get_objects",
"VirtualMachine",
["name", "runtime.connectionState"])
lst_vm_names = []
for vm in vms:
vm_name = None
conn_state = None
for prop in vm.propSet:
if prop.name == "name":
vm_name = prop.val
elif prop.name == "runtime.connectionState":
conn_state = prop.val
# Ignoring the oprhaned or inaccessible VMs
if conn_state not in ["orphaned", "inaccessible"]:
lst_vm_names.append(vm_name)
LOG.debug(_("Got total of %s instances") % str(len(lst_vm_names)))
return lst_vm_names
def spawn(self, context, instance, image_meta, network_info):
"""
Creates a VM instance.
Steps followed are:
1. Create a VM with no disk and the specifics in the instance object
like RAM size.
2. Create a dummy vmdk of the size of the disk file that is to be
uploaded. This is required just to create the metadata file.
3. Delete the -flat.vmdk file created in the above step and retain
the metadata .vmdk file.
4. Upload the disk file.
5. Attach the disk to the VM by reconfiguring the same.
6. Power on the VM.
"""
vm_ref = self._get_vm_ref_from_the_name(instance.name)
if vm_ref:
raise exception.InstanceExists(name=instance.name)
client_factory = self._session._get_vim().client.factory
service_content = self._session._get_vim().get_service_content()
def _get_datastore_ref():
"""Get the datastore list and choose the first local storage."""
data_stores = self._session._call_method(vim_util, "get_objects",
"Datastore", ["summary.type", "summary.name"])
for elem in data_stores:
ds_name = None
ds_type = None
for prop in elem.propSet:
if prop.name == "summary.type":
ds_type = prop.val
elif prop.name == "summary.name":
ds_name = prop.val
# Local storage identifier
if ds_type == "VMFS":
data_store_name = ds_name
return data_store_name
if data_store_name is None:
msg = _("Couldn't get a local Datastore reference")
LOG.error(msg)
raise exception.Error(msg)
data_store_name = _get_datastore_ref()
def _get_image_properties():
"""
Get the Size of the flat vmdk file that is there on the storage
repository.
"""
_image_info = vmware_images.get_vmdk_size_and_properties(context,
instance.image_ref,
instance)
image_size, image_properties = _image_info
vmdk_file_size_in_kb = int(image_size) / 1024
os_type = image_properties.get("vmware_ostype", "otherGuest")
adapter_type = image_properties.get("vmware_adaptertype",
"lsiLogic")
return vmdk_file_size_in_kb, os_type, adapter_type
vmdk_file_size_in_kb, os_type, adapter_type = _get_image_properties()
def _get_vmfolder_and_res_pool_mors():
"""Get the Vm folder ref from the datacenter."""
dc_objs = self._session._call_method(vim_util, "get_objects",
"Datacenter", ["vmFolder"])
# There is only one default datacenter in a standalone ESX host
vm_folder_mor = dc_objs[0].propSet[0].val
# Get the resource pool. Taking the first resource pool coming our
# way. Assuming that is the default resource pool.
res_pool_mor = self._session._call_method(vim_util, "get_objects",
"ResourcePool")[0].obj
return vm_folder_mor, res_pool_mor
vm_folder_mor, res_pool_mor = _get_vmfolder_and_res_pool_mors()
def _check_if_network_bridge_exists(network_name):
network_ref = network_utils.get_network_with_the_name(
self._session, network_name)
if network_ref is None:
raise exception.NetworkNotFoundForBridge(bridge=network_name)
return network_ref
def _get_vif_infos():
vif_infos = []
for (network, mapping) in network_info:
mac_address = mapping['mac']
network_name = network['bridge']
if mapping.get('should_create_vlan'):
network_ref = self._vif_driver.ensure_vlan_bridge(
self._session, network)
else:
network_ref = _check_if_network_bridge_exists(network_name)
vif_infos.append({'network_name': network_name,
'mac_address': mac_address,
'network_ref': network_ref,
})
return vif_infos
vif_infos = _get_vif_infos()
# Get the create vm config spec
config_spec = vm_util.get_vm_create_spec(
client_factory, instance,
data_store_name, vif_infos, os_type)
def _execute_create_vm():
"""Create VM on ESX host."""
LOG.debug(_("Creating VM with the name %s on the ESX host") %
instance.name)
# Create the VM on the ESX host
vm_create_task = self._session._call_method(
self._session._get_vim(),
"CreateVM_Task", vm_folder_mor,
config=config_spec, pool=res_pool_mor)
self._session._wait_for_task(instance['uuid'], vm_create_task)
LOG.debug(_("Created VM with the name %s on the ESX host") %
instance.name)
_execute_create_vm()
# Set the machine.id parameter of the instance to inject
# the NIC configuration inside the VM
if FLAGS.flat_injected:
self._set_machine_id(client_factory, instance, network_info)
# Naming the VM files in correspondence with the VM instance name
# The flat vmdk file name
flat_uploaded_vmdk_name = "%s/%s-flat.vmdk" % (instance.name,
instance.name)
# The vmdk meta-data file
uploaded_vmdk_name = "%s/%s.vmdk" % (instance.name, instance.name)
flat_uploaded_vmdk_path = vm_util.build_datastore_path(data_store_name,
flat_uploaded_vmdk_name)
uploaded_vmdk_path = vm_util.build_datastore_path(data_store_name,
uploaded_vmdk_name)
def _create_virtual_disk():
"""Create a virtual disk of the size of flat vmdk file."""
# Create a Virtual Disk of the size of the flat vmdk file. This is
# done just to generate the meta-data file whose specifics
# depend on the size of the disk, thin/thick provisioning and the
# storage adapter type.
# Here we assume thick provisioning and lsiLogic for the adapter
# type
LOG.debug(_("Creating Virtual Disk of size "
"%(vmdk_file_size_in_kb)s KB and adapter type "
"%(adapter_type)s on the ESX host local store"
" %(data_store_name)s") %
{"vmdk_file_size_in_kb": vmdk_file_size_in_kb,
"adapter_type": adapter_type,
"data_store_name": data_store_name})
vmdk_create_spec = vm_util.get_vmdk_create_spec(client_factory,
vmdk_file_size_in_kb, adapter_type)
vmdk_create_task = self._session._call_method(
self._session._get_vim(),
"CreateVirtualDisk_Task",
service_content.virtualDiskManager,
name=uploaded_vmdk_path,
datacenter=self._get_datacenter_name_and_ref()[0],
spec=vmdk_create_spec)
self._session._wait_for_task(instance['uuid'], vmdk_create_task)
LOG.debug(_("Created Virtual Disk of size %(vmdk_file_size_in_kb)s"
" KB on the ESX host local store "
"%(data_store_name)s") %
{"vmdk_file_size_in_kb": vmdk_file_size_in_kb,
"data_store_name": data_store_name})
_create_virtual_disk()
def _delete_disk_file():
LOG.debug(_("Deleting the file %(flat_uploaded_vmdk_path)s "
"on the ESX host local"
"store %(data_store_name)s") %
{"flat_uploaded_vmdk_path": flat_uploaded_vmdk_path,
"data_store_name": data_store_name})
# Delete the -flat.vmdk file created. .vmdk file is retained.
vmdk_delete_task = self._session._call_method(
self._session._get_vim(),
"DeleteDatastoreFile_Task",
service_content.fileManager,
name=flat_uploaded_vmdk_path)
self._session._wait_for_task(instance['uuid'], vmdk_delete_task)
LOG.debug(_("Deleted the file %(flat_uploaded_vmdk_path)s on the "
"ESX host local store %(data_store_name)s") %
{"flat_uploaded_vmdk_path": flat_uploaded_vmdk_path,
"data_store_name": data_store_name})
_delete_disk_file()
cookies = self._session._get_vim().client.options.transport.cookiejar
def _fetch_image_on_esx_datastore():
"""Fetch image from Glance to ESX datastore."""
LOG.debug(_("Downloading image file data %(image_ref)s to the ESX "
"data store %(data_store_name)s") %
({'image_ref': instance.image_ref,
'data_store_name': data_store_name}))
# Upload the -flat.vmdk file whose meta-data file we just created
# above
vmware_images.fetch_image(
context,
instance.image_ref,
instance,
host=self._session._host_ip,
data_center_name=self._get_datacenter_name_and_ref()[1],
datastore_name=data_store_name,
cookies=cookies,
file_path=flat_uploaded_vmdk_name)
LOG.debug(_("Downloaded image file data %(image_ref)s to the ESX "
"data store %(data_store_name)s") %
({'image_ref': instance.image_ref,
'data_store_name': data_store_name}))
_fetch_image_on_esx_datastore()
vm_ref = self._get_vm_ref_from_the_name(instance.name)
def _attach_vmdk_to_the_vm():
"""
Attach the vmdk uploaded to the VM. VM reconfigure is done
to do so.
"""
vmdk_attach_config_spec = vm_util.get_vmdk_attach_config_spec(
client_factory,
vmdk_file_size_in_kb, uploaded_vmdk_path,
adapter_type)
LOG.debug(_("Reconfiguring VM instance %s to attach the image "
"disk") % instance.name)
reconfig_task = self._session._call_method(
self._session._get_vim(),
"ReconfigVM_Task", vm_ref,
spec=vmdk_attach_config_spec)
self._session._wait_for_task(instance['uuid'], reconfig_task)
LOG.debug(_("Reconfigured VM instance %s to attach the image "
"disk") % instance.name)
_attach_vmdk_to_the_vm()
def _power_on_vm():
"""Power on the VM."""
LOG.debug(_("Powering on the VM instance %s") % instance.name)
# Power On the VM
power_on_task = self._session._call_method(
self._session._get_vim(),
"PowerOnVM_Task", vm_ref)
self._session._wait_for_task(instance['uuid'], power_on_task)
LOG.debug(_("Powered on the VM instance %s") % instance.name)
_power_on_vm()
def snapshot(self, context, instance, snapshot_name):
"""Create snapshot from a running VM instance.
Steps followed are:
1. Get the name of the vmdk file which the VM points to right now.
Can be a chain of snapshots, so we need to know the last in the
chain.
2. Create the snapshot. A new vmdk is created which the VM points to
now. The earlier vmdk becomes read-only.
3. Call CopyVirtualDisk which coalesces the disk chain to form a single
vmdk, rather a .vmdk metadata file and a -flat.vmdk disk data file.
4. Now upload the -flat.vmdk file to the image store.
5. Delete the coalesced .vmdk and -flat.vmdk created.
"""
vm_ref = self._get_vm_ref_from_the_name(instance.name)
if vm_ref is None:
raise exception.InstanceNotFound(instance_id=instance.id)
client_factory = self._session._get_vim().client.factory
service_content = self._session._get_vim().get_service_content()
def _get_vm_and_vmdk_attribs():
# Get the vmdk file name that the VM is pointing to
hardware_devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "config.hardware.device")
_vmdk_info = vm_util.get_vmdk_file_path_and_adapter_type(
client_factory, hardware_devices)
vmdk_file_path_before_snapshot, adapter_type = _vmdk_info
datastore_name = vm_util.split_datastore_path(
vmdk_file_path_before_snapshot)[0]
os_type = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "summary.config.guestId")
return (vmdk_file_path_before_snapshot, adapter_type,
datastore_name, os_type)
(vmdk_file_path_before_snapshot, adapter_type, datastore_name,
os_type) = _get_vm_and_vmdk_attribs()
def _create_vm_snapshot():
# Create a snapshot of the VM
LOG.debug(_("Creating Snapshot of the VM instance %s ") %
instance.name)
snapshot_task = self._session._call_method(
self._session._get_vim(),
"CreateSnapshot_Task", vm_ref,
name="%s-snapshot" % instance.name,
description="Taking Snapshot of the VM",
memory=True,
quiesce=True)
self._session._wait_for_task(instance['uuid'], snapshot_task)
LOG.debug(_("Created Snapshot of the VM instance %s ") %
instance.name)
_create_vm_snapshot()
def _check_if_tmp_folder_exists():
# Copy the contents of the VM that were there just before the
# snapshot was taken
ds_ref_ret = vim_util.get_dynamic_property(
self._session._get_vim(),
vm_ref,
"VirtualMachine",
"datastore")
if not ds_ref_ret:
raise exception.DatastoreNotFound()
ds_ref = ds_ref_ret.ManagedObjectReference[0]
ds_browser = vim_util.get_dynamic_property(
self._session._get_vim(),
ds_ref,
"Datastore",
"browser")
# Check if the vmware-tmp folder exists or not. If not, create one
tmp_folder_path = vm_util.build_datastore_path(datastore_name,
"vmware-tmp")
if not self._path_exists(ds_browser, tmp_folder_path):
self._mkdir(vm_util.build_datastore_path(datastore_name,
"vmware-tmp"))
_check_if_tmp_folder_exists()
# Generate a random vmdk file name to which the coalesced vmdk content
# will be copied to. A random name is chosen so that we don't have
# name clashes.
random_name = str(uuid.uuid4())
dest_vmdk_file_location = vm_util.build_datastore_path(datastore_name,
"vmware-tmp/%s.vmdk" % random_name)
dc_ref = self._get_datacenter_name_and_ref()[0]
def _copy_vmdk_content():
# Copy the contents of the disk ( or disks, if there were snapshots
# done earlier) to a temporary vmdk file.
copy_spec = vm_util.get_copy_virtual_disk_spec(client_factory,
adapter_type)
LOG.debug(_("Copying disk data before snapshot of the VM "
" instance %s") % instance.name)
copy_disk_task = self._session._call_method(
self._session._get_vim(),
"CopyVirtualDisk_Task",
service_content.virtualDiskManager,
sourceName=vmdk_file_path_before_snapshot,
sourceDatacenter=dc_ref,
destName=dest_vmdk_file_location,
destDatacenter=dc_ref,
destSpec=copy_spec,
force=False)
self._session._wait_for_task(instance['uuid'], copy_disk_task)
LOG.debug(_("Copied disk data before snapshot of the VM "
"instance %s") % instance.name)
_copy_vmdk_content()
cookies = self._session._get_vim().client.options.transport.cookiejar
def _upload_vmdk_to_image_repository():
# Upload the contents of -flat.vmdk file which has the disk data.
LOG.debug(_("Uploading image %s") % snapshot_name)
vmware_images.upload_image(
context,
snapshot_name,
instance,
os_type=os_type,
adapter_type=adapter_type,
image_version=1,
host=self._session._host_ip,
data_center_name=self._get_datacenter_name_and_ref()[1],
datastore_name=datastore_name,
cookies=cookies,
file_path="vmware-tmp/%s-flat.vmdk" % random_name)
LOG.debug(_("Uploaded image %s") % snapshot_name)
_upload_vmdk_to_image_repository()
def _clean_temp_data():
"""
Delete temporary vmdk files generated in image handling
operations.
"""
# Delete the temporary vmdk created above.
LOG.debug(_("Deleting temporary vmdk file %s")
% dest_vmdk_file_location)
remove_disk_task = self._session._call_method(
self._session._get_vim(),
"DeleteVirtualDisk_Task",
service_content.virtualDiskManager,
name=dest_vmdk_file_location,
datacenter=dc_ref)
self._session._wait_for_task(instance['uuid'], remove_disk_task)
LOG.debug(_("Deleted temporary vmdk file %s")
% dest_vmdk_file_location)
_clean_temp_data()
def reboot(self, instance, network_info):
"""Reboot a VM instance."""
vm_ref = self._get_vm_ref_from_the_name(instance.name)
if vm_ref is None:
raise exception.InstanceNotFound(instance_id=instance.id)
self.plug_vifs(instance, network_info)
lst_properties = ["summary.guest.toolsStatus", "runtime.powerState",
"summary.guest.toolsRunningStatus"]
props = self._session._call_method(vim_util, "get_object_properties",
None, vm_ref, "VirtualMachine",
lst_properties)
pwr_state = None
tools_status = None
tools_running_status = False
for elem in props:
for prop in elem.propSet:
if prop.name == "runtime.powerState":
pwr_state = prop.val
elif prop.name == "summary.guest.toolsStatus":
tools_status = prop.val
elif prop.name == "summary.guest.toolsRunningStatus":
tools_running_status = prop.val
# Raise an exception if the VM is not powered On.
if pwr_state not in ["poweredOn"]:
reason = _("instance is not powered on")
raise exception.InstanceRebootFailure(reason=reason)
# If latest vmware tools are installed in the VM, and that the tools
# are running, then only do a guest reboot. Otherwise do a hard reset.
if (tools_status == "toolsOk" and
tools_running_status == "guestToolsRunning"):
LOG.debug(_("Rebooting guest OS of VM %s") % instance.name)
self._session._call_method(self._session._get_vim(), "RebootGuest",
vm_ref)
LOG.debug(_("Rebooted guest OS of VM %s") % instance.name)
else:
LOG.debug(_("Doing hard reboot of VM %s") % instance.name)
reset_task = self._session._call_method(self._session._get_vim(),
"ResetVM_Task", vm_ref)
self._session._wait_for_task(instance['uuid'], reset_task)
LOG.debug(_("Did hard reboot of VM %s") % instance.name)
def destroy(self, instance, network_info):
"""
Destroy a VM instance. Steps followed are:
1. Power off the VM, if it is in poweredOn state.
2. Un-register a VM.
3. Delete the contents of the folder holding the VM related data.
"""
try:
vm_ref = self._get_vm_ref_from_the_name(instance.name)
if vm_ref is None:
LOG.debug(_("instance - %s not present") % instance.name)
return
lst_properties = ["config.files.vmPathName", "runtime.powerState"]
props = self._session._call_method(vim_util,
"get_object_properties",
None, vm_ref, "VirtualMachine", lst_properties)
pwr_state = None
for elem in props:
vm_config_pathname = None
for prop in elem.propSet:
if prop.name == "runtime.powerState":
pwr_state = prop.val
elif prop.name == "config.files.vmPathName":
vm_config_pathname = prop.val
if vm_config_pathname:
_ds_path = vm_util.split_datastore_path(vm_config_pathname)
datastore_name, vmx_file_path = _ds_path
# Power off the VM if it is in PoweredOn state.
if pwr_state == "poweredOn":
LOG.debug(_("Powering off the VM %s") % instance.name)
poweroff_task = self._session._call_method(
self._session._get_vim(),
"PowerOffVM_Task", vm_ref)
self._session._wait_for_task(instance['uuid'], poweroff_task)
LOG.debug(_("Powered off the VM %s") % instance.name)
# Un-register the VM
try:
LOG.debug(_("Unregistering the VM %s") % instance.name)
self._session._call_method(self._session._get_vim(),
"UnregisterVM", vm_ref)
LOG.debug(_("Unregistered the VM %s") % instance.name)
except Exception, excep:
LOG.warn(_("In vmwareapi:vmops:destroy, got this exception"
" while un-registering the VM: %s") % str(excep))
self._unplug_vifs(instance, network_info)
# Delete the folder holding the VM related content on
# the datastore.
try:
dir_ds_compliant_path = vm_util.build_datastore_path(
datastore_name,
os.path.dirname(vmx_file_path))
LOG.debug(_("Deleting contents of the VM %(name)s from "
"datastore %(datastore_name)s") %
({'name': instance.name,
'datastore_name': datastore_name}))
delete_task = self._session._call_method(
self._session._get_vim(),
"DeleteDatastoreFile_Task",
self._session._get_vim().get_service_content().fileManager,
name=dir_ds_compliant_path)
self._session._wait_for_task(instance['uuid'], delete_task)
LOG.debug(_("Deleted contents of the VM %(name)s from "
"datastore %(datastore_name)s") %
({'name': instance.name,
'datastore_name': datastore_name}))
except Exception, excep:
LOG.warn(_("In vmwareapi:vmops:destroy, "
"got this exception while deleting"
" the VM contents from the disk: %s")
% str(excep))
except Exception, exc:
LOG.exception(exc)
def pause(self, instance):
msg = _("pause not supported for vmwareapi")
raise NotImplementedError(msg)
def unpause(self, instance):
msg = _("unpause not supported for vmwareapi")
raise NotImplementedError(msg)
def suspend(self, instance):
"""Suspend the specified instance."""
vm_ref = self._get_vm_ref_from_the_name(instance.name)
if vm_ref is None:
raise exception.InstanceNotFound(instance_id=instance.id)
pwr_state = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "runtime.powerState")
# Only PoweredOn VMs can be suspended.
if pwr_state == "poweredOn":
LOG.debug(_("Suspending the VM %s ") % instance.name)
suspend_task = self._session._call_method(self._session._get_vim(),
"SuspendVM_Task", vm_ref)
self._session._wait_for_task(instance['uuid'], suspend_task)
LOG.debug(_("Suspended the VM %s ") % instance.name)
# Raise Exception if VM is poweredOff
elif pwr_state == "poweredOff":
reason = _("instance is powered off and can not be suspended.")
raise exception.InstanceSuspendFailure(reason=reason)
LOG.debug(_("VM %s was already in suspended state. So returning "
"without doing anything") % instance.name)
def resume(self, instance):
"""Resume the specified instance."""
vm_ref = self._get_vm_ref_from_the_name(instance.name)
if vm_ref is None:
raise exception.InstanceNotFound(instance_id=instance.id)
pwr_state = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "runtime.powerState")
if pwr_state.lower() == "suspended":
LOG.debug(_("Resuming the VM %s") % instance.name)
suspend_task = self._session._call_method(
self._session._get_vim(),
"PowerOnVM_Task", vm_ref)
self._session._wait_for_task(instance['uuid'], suspend_task)
LOG.debug(_("Resumed the VM %s ") % instance.name)
else:
reason = _("instance is not in a suspended state")
raise exception.InstanceResumeFailure(reason=reason)
def get_info(self, instance):
"""Return data about the VM instance."""
vm_ref = self._get_vm_ref_from_the_name(instance['name'])
if vm_ref is None:
raise exception.InstanceNotFound(instance_id=instance['name'])
lst_properties = ["summary.config.numCpu",
"summary.config.memorySizeMB",
"runtime.powerState"]
vm_props = self._session._call_method(vim_util,
"get_object_properties", None, vm_ref, "VirtualMachine",
lst_properties)
max_mem = None
pwr_state = None
num_cpu = None
for elem in vm_props:
for prop in elem.propSet:
if prop.name == "summary.config.numCpu":
num_cpu = int(prop.val)
elif prop.name == "summary.config.memorySizeMB":
# In MB, but we want in KB
max_mem = int(prop.val) * 1024
elif prop.name == "runtime.powerState":
pwr_state = VMWARE_POWER_STATES[prop.val]
return {'state': pwr_state,
'max_mem': max_mem,
'mem': max_mem,
'num_cpu': num_cpu,
'cpu_time': 0}
def get_diagnostics(self, instance):
"""Return data about VM diagnostics."""
msg = _("get_diagnostics not implemented for vmwareapi")
raise NotImplementedError(msg)
def get_console_output(self, instance):
"""Return snapshot of console."""
vm_ref = self._get_vm_ref_from_the_name(instance.name)
if vm_ref is None:
raise exception.InstanceNotFound(instance_id=instance.id)
param_list = {"id": str(vm_ref)}
base_url = "%s://%s/screen?%s" % (self._session._scheme,
self._session._host_ip,
urllib.urlencode(param_list))
request = urllib2.Request(base_url)
base64string = base64.encodestring(
'%s:%s' % (
self._session._host_username,
self._session._host_password)).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
result = urllib2.urlopen(request)
if result.code == 200:
return result.read()
else:
return ""
def _set_machine_id(self, client_factory, instance, network_info):
"""
Set the machine id of the VM for guest tools to pick up and reconfigure
the network interfaces.
"""
vm_ref = self._get_vm_ref_from_the_name(instance.name)
if vm_ref is None:
raise exception.InstanceNotFound(instance_id=instance.id)
machine_id_str = ''
for (network, info) in network_info:
# TODO(vish): add support for dns2
# TODO(sateesh): add support for injection of ipv6 configuration
ip_v4 = ip_v6 = None
if 'ips' in info and len(info['ips']) > 0:
ip_v4 = info['ips'][0]
if 'ip6s' in info and len(info['ip6s']) > 0:
ip_v6 = info['ip6s'][0]
if len(info['dns']) > 0:
dns = info['dns'][0]
else:
dns = ''
interface_str = ";".join([info['mac'],
ip_v4 and ip_v4['ip'] or '',
ip_v4 and ip_v4['netmask'] or '',
info['gateway'],
info['broadcast'],
dns])
machine_id_str = machine_id_str + interface_str + '#'
machine_id_change_spec = vm_util.get_machine_id_change_spec(
client_factory, machine_id_str)
LOG.debug(_("Reconfiguring VM instance %(name)s to set the machine id "
"with ip - %(ip_addr)s") %
({'name': instance.name,
'ip_addr': ip_v4['ip']}))
reconfig_task = self._session._call_method(self._session._get_vim(),
"ReconfigVM_Task", vm_ref,
spec=machine_id_change_spec)
self._session._wait_for_task(instance['uuid'], reconfig_task)
LOG.debug(_("Reconfigured VM instance %(name)s to set the machine id "
"with ip - %(ip_addr)s") %
({'name': instance.name,
'ip_addr': ip_v4['ip']}))
def _get_datacenter_name_and_ref(self):
"""Get the datacenter name and the reference."""
dc_obj = self._session._call_method(vim_util, "get_objects",
"Datacenter", ["name"])
return dc_obj[0].obj, dc_obj[0].propSet[0].val
def _path_exists(self, ds_browser, ds_path):
"""Check if the path exists on the datastore."""
search_task = self._session._call_method(self._session._get_vim(),
"SearchDatastore_Task",
ds_browser,
datastorePath=ds_path)
# Wait till the state changes from queued or running.
# If an error state is returned, it means that the path doesn't exist.
while True:
task_info = self._session._call_method(vim_util,
"get_dynamic_property",
search_task, "Task", "info")
if task_info.state in ['queued', 'running']:
time.sleep(2)
continue
break
if task_info.state == "error":
return False
return True
def _mkdir(self, ds_path):
"""
Creates a directory at the path specified. If it is just "NAME",
then a directory with this name is created at the topmost level of the
DataStore.
"""
LOG.debug(_("Creating directory with path %s") % ds_path)
self._session._call_method(self._session._get_vim(), "MakeDirectory",
self._session._get_vim().get_service_content().fileManager,
name=ds_path, createParentDirectories=False)
LOG.debug(_("Created directory with path %s") % ds_path)
def _get_vm_ref_from_the_name(self, vm_name):
"""Get reference to the VM with the name specified."""
vms = self._session._call_method(vim_util, "get_objects",
"VirtualMachine", ["name"])
for vm in vms:
if vm.propSet[0].val == vm_name:
return vm.obj
return None
def plug_vifs(self, instance, network_info):
"""Plug VIFs into networks."""
for (network, mapping) in network_info:
self._vif_driver.plug(instance, network, mapping)
def _unplug_vifs(self, instance, network_info):
"""Unplug VIFs from networks."""
for (network, mapping) in network_info:
self._vif_driver.unplug(instance, network, mapping)
| apache-2.0 |
benschmaus/catapult | third_party/google-endpoints/apitools/base/protorpclite/util.py | 17 | 9243 | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Common utility library."""
from __future__ import with_statement
import datetime
import functools
import inspect
import os
import re
import sys
import six
__all__ = [
'Error',
'decode_datetime',
'get_package_for_module',
'positional',
'TimeZoneOffset',
'total_seconds',
]
class Error(Exception):
"""Base class for protorpc exceptions."""
_TIME_ZONE_RE_STRING = r"""
# Examples:
# +01:00
# -05:30
# Z12:00
((?P<z>Z) | (?P<sign>[-+])
(?P<hours>\d\d) :
(?P<minutes>\d\d))$
"""
_TIME_ZONE_RE = re.compile(_TIME_ZONE_RE_STRING, re.IGNORECASE | re.VERBOSE)
def positional(max_positional_args):
"""A decorator to declare that only the first N arguments may be positional.
This decorator makes it easy to support Python 3 style keyword-only
parameters. For example, in Python 3 it is possible to write:
def fn(pos1, *, kwonly1=None, kwonly1=None):
...
All named parameters after * must be a keyword:
fn(10, 'kw1', 'kw2') # Raises exception.
fn(10, kwonly1='kw1') # Ok.
Example:
To define a function like above, do:
@positional(1)
def fn(pos1, kwonly1=None, kwonly2=None):
...
If no default value is provided to a keyword argument, it
becomes a required keyword argument:
@positional(0)
def fn(required_kw):
...
This must be called with the keyword parameter:
fn() # Raises exception.
fn(10) # Raises exception.
fn(required_kw=10) # Ok.
When defining instance or class methods always remember to account for
'self' and 'cls':
class MyClass(object):
@positional(2)
def my_method(self, pos1, kwonly1=None):
...
@classmethod
@positional(2)
def my_method(cls, pos1, kwonly1=None):
...
One can omit the argument to 'positional' altogether, and then no
arguments with default values may be passed positionally. This
would be equivalent to placing a '*' before the first argument
with a default value in Python 3. If there are no arguments with
default values, and no argument is given to 'positional', an error
is raised.
@positional
def fn(arg1, arg2, required_kw1=None, required_kw2=0):
...
fn(1, 3, 5) # Raises exception.
fn(1, 3) # Ok.
fn(1, 3, required_kw1=5) # Ok.
Args:
max_positional_arguments: Maximum number of positional arguments. All
parameters after the this index must be keyword only.
Returns:
A decorator that prevents using arguments after max_positional_args from
being used as positional parameters.
Raises:
TypeError if a keyword-only argument is provided as a positional
parameter.
ValueError if no maximum number of arguments is provided and the function
has no arguments with default values.
"""
def positional_decorator(wrapped):
@functools.wraps(wrapped)
def positional_wrapper(*args, **kwargs):
if len(args) > max_positional_args:
plural_s = ''
if max_positional_args != 1:
plural_s = 's'
raise TypeError('%s() takes at most %d positional argument%s '
'(%d given)' % (wrapped.__name__,
max_positional_args,
plural_s, len(args)))
return wrapped(*args, **kwargs)
return positional_wrapper
if isinstance(max_positional_args, six.integer_types):
return positional_decorator
else:
args, _, _, defaults = inspect.getargspec(max_positional_args)
if defaults is None:
raise ValueError(
'Functions with no keyword arguments must specify '
'max_positional_args')
return positional(len(args) - len(defaults))(max_positional_args)
@positional(1)
def get_package_for_module(module):
"""Get package name for a module.
Helper calculates the package name of a module.
Args:
module: Module to get name for. If module is a string, try to find
module in sys.modules.
Returns:
If module contains 'package' attribute, uses that as package name.
Else, if module is not the '__main__' module, the module __name__.
Else, the base name of the module file name. Else None.
"""
if isinstance(module, six.string_types):
try:
module = sys.modules[module]
except KeyError:
return None
try:
return six.text_type(module.package)
except AttributeError:
if module.__name__ == '__main__':
try:
file_name = module.__file__
except AttributeError:
pass
else:
base_name = os.path.basename(file_name)
split_name = os.path.splitext(base_name)
if len(split_name) == 1:
return six.text_type(base_name)
else:
return u'.'.join(split_name[:-1])
return six.text_type(module.__name__)
def total_seconds(offset):
"""Backport of offset.total_seconds() from python 2.7+."""
seconds = offset.days * 24 * 60 * 60 + offset.seconds
microseconds = seconds * 10**6 + offset.microseconds
return microseconds / (10**6 * 1.0)
class TimeZoneOffset(datetime.tzinfo):
"""Time zone information as encoded/decoded for DateTimeFields."""
def __init__(self, offset):
"""Initialize a time zone offset.
Args:
offset: Integer or timedelta time zone offset, in minutes from UTC.
This can be negative.
"""
super(TimeZoneOffset, self).__init__()
if isinstance(offset, datetime.timedelta):
offset = total_seconds(offset) / 60
self.__offset = offset
def utcoffset(self, _):
"""Get the a timedelta with the time zone's offset from UTC.
Returns:
The time zone offset from UTC, as a timedelta.
"""
return datetime.timedelta(minutes=self.__offset)
def dst(self, _):
"""Get the daylight savings time offset.
The formats that ProtoRPC uses to encode/decode time zone
information don't contain any information about daylight
savings time. So this always returns a timedelta of 0.
Returns:
A timedelta of 0.
"""
return datetime.timedelta(0)
def decode_datetime(encoded_datetime):
"""Decode a DateTimeField parameter from a string to a python datetime.
Args:
encoded_datetime: A string in RFC 3339 format.
Returns:
A datetime object with the date and time specified in encoded_datetime.
Raises:
ValueError: If the string is not in a recognized format.
"""
# Check if the string includes a time zone offset. Break out the
# part that doesn't include time zone info. Convert to uppercase
# because all our comparisons should be case-insensitive.
time_zone_match = _TIME_ZONE_RE.search(encoded_datetime)
if time_zone_match:
time_string = encoded_datetime[:time_zone_match.start(1)].upper()
else:
time_string = encoded_datetime.upper()
if '.' in time_string:
format_string = '%Y-%m-%dT%H:%M:%S.%f'
else:
format_string = '%Y-%m-%dT%H:%M:%S'
decoded_datetime = datetime.datetime.strptime(time_string, format_string)
if not time_zone_match:
return decoded_datetime
# Time zone info was included in the parameter. Add a tzinfo
# object to the datetime. Datetimes can't be changed after they're
# created, so we'll need to create a new one.
if time_zone_match.group('z'):
offset_minutes = 0
else:
sign = time_zone_match.group('sign')
hours, minutes = [int(value) for value in
time_zone_match.group('hours', 'minutes')]
offset_minutes = hours * 60 + minutes
if sign == '-':
offset_minutes *= -1
return datetime.datetime(decoded_datetime.year,
decoded_datetime.month,
decoded_datetime.day,
decoded_datetime.hour,
decoded_datetime.minute,
decoded_datetime.second,
decoded_datetime.microsecond,
TimeZoneOffset(offset_minutes))
| bsd-3-clause |
jjmleiro/hue | desktop/core/ext-py/boto-2.38.0/boto/rds/parametergroup.py | 170 | 7037 | # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class ParameterGroup(dict):
def __init__(self, connection=None):
dict.__init__(self)
self.connection = connection
self.name = None
self.description = None
self.engine = None
self._current_param = None
def __repr__(self):
return 'ParameterGroup:%s' % self.name
def startElement(self, name, attrs, connection):
if name == 'Parameter':
if self._current_param:
self[self._current_param.name] = self._current_param
self._current_param = Parameter(self)
return self._current_param
def endElement(self, name, value, connection):
if name == 'DBParameterGroupName':
self.name = value
elif name == 'Description':
self.description = value
elif name == 'Engine':
self.engine = value
else:
setattr(self, name, value)
def modifiable(self):
mod = []
for key in self:
p = self[key]
if p.is_modifiable:
mod.append(p)
return mod
def get_params(self):
pg = self.connection.get_all_dbparameters(self.name)
self.update(pg)
def add_param(self, name, value, apply_method):
param = Parameter()
param.name = name
param.value = value
param.apply_method = apply_method
self.params.append(param)
class Parameter(object):
"""
Represents a RDS Parameter
"""
ValidTypes = {'integer' : int,
'string' : str,
'boolean' : bool}
ValidSources = ['user', 'system', 'engine-default']
ValidApplyTypes = ['static', 'dynamic']
ValidApplyMethods = ['immediate', 'pending-reboot']
def __init__(self, group=None, name=None):
self.group = group
self.name = name
self._value = None
self.type = 'string'
self.source = None
self.is_modifiable = True
self.description = None
self.apply_method = None
self.allowed_values = None
def __repr__(self):
return 'Parameter:%s' % self.name
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'ParameterName':
self.name = value
elif name == 'ParameterValue':
self._value = value
elif name == 'DataType':
if value in self.ValidTypes:
self.type = value
elif name == 'Source':
if value in self.ValidSources:
self.source = value
elif name == 'IsModifiable':
if value.lower() == 'true':
self.is_modifiable = True
else:
self.is_modifiable = False
elif name == 'Description':
self.description = value
elif name == 'ApplyType':
if value in self.ValidApplyTypes:
self.apply_type = value
elif name == 'AllowedValues':
self.allowed_values = value
else:
setattr(self, name, value)
def merge(self, d, i):
prefix = 'Parameters.member.%d.' % i
if self.name:
d[prefix+'ParameterName'] = self.name
if self._value is not None:
d[prefix+'ParameterValue'] = self._value
if self.apply_type:
d[prefix+'ApplyMethod'] = self.apply_method
def _set_string_value(self, value):
if not isinstance(value, basestring):
raise ValueError('value must be of type str')
if self.allowed_values:
choices = self.allowed_values.split(',')
if value not in choices:
raise ValueError('value must be in %s' % self.allowed_values)
self._value = value
def _set_integer_value(self, value):
if isinstance(value, basestring):
value = int(value)
if isinstance(value, int) or isinstance(value, long):
if self.allowed_values:
min, max = self.allowed_values.split('-')
if value < int(min) or value > int(max):
raise ValueError('range is %s' % self.allowed_values)
self._value = value
else:
raise ValueError('value must be integer')
def _set_boolean_value(self, value):
if isinstance(value, bool):
self._value = value
elif isinstance(value, basestring):
if value.lower() == 'true':
self._value = True
else:
self._value = False
else:
raise ValueError('value must be boolean')
def set_value(self, value):
if self.type == 'string':
self._set_string_value(value)
elif self.type == 'integer':
self._set_integer_value(value)
elif self.type == 'boolean':
self._set_boolean_value(value)
else:
raise TypeError('unknown type (%s)' % self.type)
def get_value(self):
if self._value is None:
return self._value
if self.type == 'string':
return self._value
elif self.type == 'integer':
if not isinstance(self._value, int) and not isinstance(self._value, long):
self._set_integer_value(self._value)
return self._value
elif self.type == 'boolean':
if not isinstance(self._value, bool):
self._set_boolean_value(self._value)
return self._value
else:
raise TypeError('unknown type (%s)' % self.type)
value = property(get_value, set_value, 'The value of the parameter')
def apply(self, immediate=False):
if immediate:
self.apply_method = 'immediate'
else:
self.apply_method = 'pending-reboot'
self.group.connection.modify_parameter_group(self.group.name, [self])
| apache-2.0 |
Bysmyyr/chromium-crosswalk | build/android/gyp/write_build_config.py | 1 | 18096 | #!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Writes a build_config file.
The build_config file for a target is a json file containing information about
how to build that target based on the target's dependencies. This includes
things like: the javac classpath, the list of android resources dependencies,
etc. It also includes the information needed to create the build_config for
other targets that depend on that one.
Android build scripts should not refer to the build_config directly, and the
build specification should instead pass information in using the special
file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing
of values in a json dict in a file and looks like this:
--python-arg=@FileArg(build_config_path:javac:classpath)
Note: If paths to input files are passed in this way, it is important that:
1. inputs/deps of the action ensure that the files are available the first
time the action runs.
2. Either (a) or (b)
a. inputs/deps ensure that the action runs whenever one of the files changes
b. the files are added to the action's depfile
"""
import itertools
import optparse
import os
import sys
import xml.dom.minidom
from util import build_utils
from util import md5_check
import write_ordered_libraries
class AndroidManifest(object):
def __init__(self, path):
self.path = path
dom = xml.dom.minidom.parse(path)
manifests = dom.getElementsByTagName('manifest')
assert len(manifests) == 1
self.manifest = manifests[0]
def GetInstrumentation(self):
instrumentation_els = self.manifest.getElementsByTagName('instrumentation')
if len(instrumentation_els) == 0:
return None
if len(instrumentation_els) != 1:
raise Exception(
'More than one <instrumentation> element found in %s' % self.path)
return instrumentation_els[0]
def CheckInstrumentation(self, expected_package):
instr = self.GetInstrumentation()
if not instr:
raise Exception('No <instrumentation> elements found in %s' % self.path)
instrumented_package = instr.getAttributeNS(
'http://schemas.android.com/apk/res/android', 'targetPackage')
if instrumented_package != expected_package:
raise Exception(
'Wrong instrumented package. Expected %s, got %s'
% (expected_package, instrumented_package))
def GetPackageName(self):
return self.manifest.getAttribute('package')
dep_config_cache = {}
def GetDepConfig(path):
if not path in dep_config_cache:
dep_config_cache[path] = build_utils.ReadJson(path)['deps_info']
return dep_config_cache[path]
def DepsOfType(wanted_type, configs):
return [c for c in configs if c['type'] == wanted_type]
def GetAllDepsConfigsInOrder(deps_config_paths):
def GetDeps(path):
return set(GetDepConfig(path)['deps_configs'])
return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps)
class Deps(object):
def __init__(self, direct_deps_config_paths):
self.all_deps_config_paths = GetAllDepsConfigsInOrder(
direct_deps_config_paths)
self.direct_deps_configs = [
GetDepConfig(p) for p in direct_deps_config_paths]
self.all_deps_configs = [
GetDepConfig(p) for p in self.all_deps_config_paths]
def All(self, wanted_type=None):
if type is None:
return self.all_deps_configs
return DepsOfType(wanted_type, self.all_deps_configs)
def Direct(self, wanted_type=None):
if wanted_type is None:
return self.direct_deps_configs
return DepsOfType(wanted_type, self.direct_deps_configs)
def AllConfigPaths(self):
return self.all_deps_config_paths
def _MergeAssets(all_assets):
"""Merges all assets from the given deps.
Returns:
A tuple of lists: (compressed, uncompressed)
Each tuple entry is a list of "srcPath:zipPath". srcPath is the path of the
asset to add, and zipPath is the location within the zip (excluding assets/
prefix)
"""
compressed = {}
uncompressed = {}
for asset_dep in all_assets:
entry = asset_dep['assets']
disable_compression = entry.get('disable_compression', False)
dest_map = uncompressed if disable_compression else compressed
other_map = compressed if disable_compression else uncompressed
outputs = entry.get('outputs', [])
for src, dest in itertools.izip_longest(entry['sources'], outputs):
if not dest:
dest = os.path.basename(src)
# Merge so that each path shows up in only one of the lists, and that
# deps of the same target override previous ones.
other_map.pop(dest, 0)
dest_map[dest] = src
def create_list(asset_map):
ret = ['%s:%s' % (src, dest) for dest, src in asset_map.iteritems()]
# Sort to ensure deterministic ordering.
ret.sort()
return ret
return create_list(compressed), create_list(uncompressed)
def main(argv):
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--build-config', help='Path to build_config output.')
parser.add_option(
'--type',
help='Type of this target (e.g. android_library).')
parser.add_option(
'--possible-deps-configs',
help='List of paths for dependency\'s build_config files. Some '
'dependencies may not write build_config files. Missing build_config '
'files are handled differently based on the type of this target.')
# android_resources options
parser.add_option('--srcjar', help='Path to target\'s resources srcjar.')
parser.add_option('--resources-zip', help='Path to target\'s resources zip.')
parser.add_option('--r-text', help='Path to target\'s R.txt file.')
parser.add_option('--package-name',
help='Java package name for these resources.')
parser.add_option('--android-manifest', help='Path to android manifest.')
# android_assets options
parser.add_option('--asset-sources', help='List of asset sources.')
parser.add_option('--asset-renaming-sources',
help='List of asset sources with custom destinations.')
parser.add_option('--asset-renaming-destinations',
help='List of asset custom destinations.')
parser.add_option('--disable-asset-compression', action='store_true',
help='Whether to disable asset compression.')
# java library options
parser.add_option('--jar-path', help='Path to target\'s jar output.')
parser.add_option('--supports-android', action='store_true',
help='Whether this library supports running on the Android platform.')
parser.add_option('--requires-android', action='store_true',
help='Whether this library requires running on the Android platform.')
parser.add_option('--bypass-platform-checks', action='store_true',
help='Bypass checks for support/require Android platform.')
# android library options
parser.add_option('--dex-path', help='Path to target\'s dex output.')
# native library options
parser.add_option('--native-libs', help='List of top-level native libs.')
parser.add_option('--readelf-path', help='Path to toolchain\'s readelf.')
parser.add_option('--tested-apk-config',
help='Path to the build config of the tested apk (for an instrumentation '
'test apk).')
parser.add_option('--proguard-enabled', action='store_true',
help='Whether proguard is enabled for this apk.')
parser.add_option('--proguard-info',
help='Path to the proguard .info output for this apk.')
options, args = parser.parse_args(argv)
if args:
parser.error('No positional arguments should be given.')
required_options_map = {
'java_library': ['build_config', 'jar_path'],
'android_assets': ['build_config'],
'android_resources': ['build_config', 'resources_zip'],
'android_apk': ['build_config', 'jar_path', 'dex_path', 'resources_zip'],
'deps_dex': ['build_config', 'dex_path'],
'resource_rewriter': ['build_config']
}
required_options = required_options_map.get(options.type)
if not required_options:
raise Exception('Unknown type: <%s>' % options.type)
if options.native_libs:
required_options.append('readelf_path')
build_utils.CheckOptions(options, parser, required_options)
if options.type == 'java_library':
if options.supports_android and not options.dex_path:
raise Exception('java_library that supports Android requires a dex path.')
if options.requires_android and not options.supports_android:
raise Exception(
'--supports-android is required when using --requires-android')
possible_deps_config_paths = build_utils.ParseGypList(
options.possible_deps_configs)
allow_unknown_deps = (options.type in
('android_apk', 'android_assets', 'android_resources'))
unknown_deps = [
c for c in possible_deps_config_paths if not os.path.exists(c)]
if unknown_deps and not allow_unknown_deps:
raise Exception('Unknown deps: ' + str(unknown_deps))
direct_deps_config_paths = [
c for c in possible_deps_config_paths if not c in unknown_deps]
deps = Deps(direct_deps_config_paths)
direct_library_deps = deps.Direct('java_library')
all_library_deps = deps.All('java_library')
direct_resources_deps = deps.Direct('android_resources')
all_resources_deps = deps.All('android_resources')
# Resources should be ordered with the highest-level dependency first so that
# overrides are done correctly.
all_resources_deps.reverse()
if options.type == 'android_apk' and options.tested_apk_config:
tested_apk_deps = Deps([options.tested_apk_config])
tested_apk_resources_deps = tested_apk_deps.All('android_resources')
all_resources_deps = [
d for d in all_resources_deps if not d in tested_apk_resources_deps]
# Initialize some common config.
config = {
'deps_info': {
'name': os.path.basename(options.build_config),
'path': options.build_config,
'type': options.type,
'deps_configs': direct_deps_config_paths,
}
}
deps_info = config['deps_info']
if options.type == 'java_library' and not options.bypass_platform_checks:
deps_info['requires_android'] = options.requires_android
deps_info['supports_android'] = options.supports_android
deps_require_android = (all_resources_deps +
[d['name'] for d in all_library_deps if d['requires_android']])
deps_not_support_android = (
[d['name'] for d in all_library_deps if not d['supports_android']])
if deps_require_android and not options.requires_android:
raise Exception('Some deps require building for the Android platform: ' +
str(deps_require_android))
if deps_not_support_android and options.supports_android:
raise Exception('Not all deps support the Android platform: ' +
str(deps_not_support_android))
if options.type in ['java_library', 'android_apk']:
javac_classpath = [c['jar_path'] for c in direct_library_deps]
java_full_classpath = [c['jar_path'] for c in all_library_deps]
deps_info['resources_deps'] = [c['path'] for c in all_resources_deps]
deps_info['jar_path'] = options.jar_path
if options.type == 'android_apk' or options.supports_android:
deps_info['dex_path'] = options.dex_path
config['javac'] = {
'classpath': javac_classpath,
}
config['java'] = {
'full_classpath': java_full_classpath
}
if options.type == 'java_library':
# Only resources might have srcjars (normal srcjar targets are listed in
# srcjar_deps). A resource's srcjar contains the R.java file for those
# resources, and (like Android's default build system) we allow a library to
# refer to the resources in any of its dependents.
config['javac']['srcjars'] = [
c['srcjar'] for c in direct_resources_deps if 'srcjar' in c]
if options.type == 'android_apk':
# Apks will get their resources srcjar explicitly passed to the java step.
config['javac']['srcjars'] = []
if options.type == 'android_assets':
all_asset_sources = []
if options.asset_renaming_sources:
all_asset_sources.extend(
build_utils.ParseGypList(options.asset_renaming_sources))
if options.asset_sources:
all_asset_sources.extend(build_utils.ParseGypList(options.asset_sources))
deps_info['assets'] = {
'sources': all_asset_sources
}
if options.asset_renaming_destinations:
deps_info['assets']['outputs'] = (
build_utils.ParseGypList(options.asset_renaming_destinations))
if options.disable_asset_compression:
deps_info['assets']['disable_compression'] = True
if options.type == 'android_resources':
deps_info['resources_zip'] = options.resources_zip
if options.srcjar:
deps_info['srcjar'] = options.srcjar
if options.android_manifest:
manifest = AndroidManifest(options.android_manifest)
deps_info['package_name'] = manifest.GetPackageName()
if options.package_name:
deps_info['package_name'] = options.package_name
if options.r_text:
deps_info['r_text'] = options.r_text
if options.type in ('android_resources','android_apk', 'resource_rewriter'):
config['resources'] = {}
config['resources']['dependency_zips'] = [
c['resources_zip'] for c in all_resources_deps]
config['resources']['extra_package_names'] = []
config['resources']['extra_r_text_files'] = []
if options.type == 'android_apk' or options.type == 'resource_rewriter':
config['resources']['extra_package_names'] = [
c['package_name'] for c in all_resources_deps if 'package_name' in c]
config['resources']['extra_r_text_files'] = [
c['r_text'] for c in all_resources_deps if 'r_text' in c]
if options.type in ['android_apk', 'deps_dex']:
deps_dex_files = [c['dex_path'] for c in all_library_deps]
proguard_enabled = options.proguard_enabled
if options.type == 'android_apk':
deps_info['proguard_enabled'] = proguard_enabled
if proguard_enabled:
deps_info['proguard_info'] = options.proguard_info
config['proguard'] = {}
proguard_config = config['proguard']
proguard_config['input_paths'] = [options.jar_path] + java_full_classpath
proguard_config['tested_apk_info'] = ''
# An instrumentation test apk should exclude the dex files that are in the apk
# under test.
if options.type == 'android_apk' and options.tested_apk_config:
tested_apk_deps = Deps([options.tested_apk_config])
tested_apk_library_deps = tested_apk_deps.All('java_library')
tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps]
deps_dex_files = [
p for p in deps_dex_files if not p in tested_apk_deps_dex_files]
tested_apk_config = GetDepConfig(options.tested_apk_config)
expected_tested_package = tested_apk_config['package_name']
AndroidManifest(options.android_manifest).CheckInstrumentation(
expected_tested_package)
if tested_apk_config['proguard_enabled']:
assert proguard_enabled, ('proguard must be enabled for instrumentation'
' apks if it\'s enabled for the tested apk')
proguard_config['tested_apk_info'] = tested_apk_config['proguard_info']
# Dependencies for the final dex file of an apk or a 'deps_dex'.
if options.type in ['android_apk', 'deps_dex']:
config['final_dex'] = {}
dex_config = config['final_dex']
if proguard_enabled:
# When proguard is enabled, the proguarded jar contains the code for all
# of the dependencies.
deps_dex_files = []
dex_config['dependency_dex_files'] = deps_dex_files
if options.type == 'android_apk':
config['dist_jar'] = {
'dependency_jars': [
c['jar_path'] for c in all_library_deps
]
}
manifest = AndroidManifest(options.android_manifest)
deps_info['package_name'] = manifest.GetPackageName()
if not options.tested_apk_config and manifest.GetInstrumentation():
# This must then have instrumentation only for itself.
manifest.CheckInstrumentation(manifest.GetPackageName())
library_paths = []
java_libraries_list_holder = [None]
libraries = build_utils.ParseGypList(options.native_libs or '[]')
if libraries:
def recompute_ordered_libraries():
libraries_dir = os.path.dirname(libraries[0])
write_ordered_libraries.SetReadelfPath(options.readelf_path)
write_ordered_libraries.SetLibraryDirs([libraries_dir])
all_deps = (
write_ordered_libraries.GetSortedTransitiveDependenciesForBinaries(
libraries))
# Create a java literal array with the "base" library names:
# e.g. libfoo.so -> foo
java_libraries_list_holder[0] = ('{%s}' % ','.join(
['"%s"' % s[3:-3] for s in all_deps]))
library_paths.extend(
write_ordered_libraries.FullLibraryPath(x) for x in all_deps)
# This step takes about 600ms on a z620 for chrome_apk, so it's worth
# caching.
md5_check.CallAndRecordIfStale(
recompute_ordered_libraries,
record_path=options.build_config + '.nativelibs.md5.stamp',
input_paths=libraries,
output_paths=[options.build_config])
if not library_paths:
prev_config = build_utils.ReadJson(options.build_config)
java_libraries_list_holder[0] = (
prev_config['native']['java_libraries_list'])
library_paths.extend(prev_config['native']['libraries'])
config['native'] = {
'libraries': library_paths,
'java_libraries_list': java_libraries_list_holder[0],
}
config['assets'], config['uncompressed_assets'] = (
_MergeAssets(deps.All('android_assets')))
build_utils.WriteJson(config, options.build_config, only_if_changed=True)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
deps.AllConfigPaths() + build_utils.GetPythonDependencies())
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause |
AudioGod/DTS-Eagle-Integration_CAF-Android-kernel | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py | 11088 | 3246 | # Core.py - Python extension for perf script, core functions
#
# Copyright (C) 2010 by Tom Zanussi <[email protected]>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
| gpl-2.0 |
vrv/tensorflow | tensorflow/contrib/slim/python/slim/learning_test.py | 28 | 37001 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for slim.learning."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import numpy as np
from numpy import testing as np_testing
from tensorflow.contrib.framework.python.ops import variables as variables_lib2
from tensorflow.contrib.layers.python.layers import layers
from tensorflow.contrib.losses.python.losses import loss_ops
from tensorflow.contrib.slim.python.slim import learning
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import test
from tensorflow.python.summary import summary
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import input as input_lib
from tensorflow.python.training import saver as saver_lib
class ClipGradientNormsTest(test.TestCase):
def clip_values(self, arr):
norm = np.sqrt(np.sum(arr**2))
if norm > self._max_norm:
return self._max_norm * arr / np.sqrt(np.sum(arr**2))
return arr
def setUp(self):
np.random.seed(0)
self._max_norm = 1.0
self._grad_vec = np.array([1., 2., 3.])
self._clipped_grad_vec = self.clip_values(self._grad_vec)
self._zero_vec = np.zeros(self._grad_vec.size)
def testOrdinaryGradIsClippedCorrectly(self):
gradient = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
variable = variables_lib.Variable(self._zero_vec, dtype=dtypes.float32)
gradients_to_variables = (gradient, variable)
[gradients_to_variables] = learning.clip_gradient_norms(
[gradients_to_variables], self._max_norm)
# Ensure the variable passed through.
self.assertEqual(gradients_to_variables[1], variable)
with self.test_session() as sess:
actual_gradient = sess.run(gradients_to_variables[0])
np_testing.assert_almost_equal(actual_gradient, self._clipped_grad_vec)
def testNoneGradPassesThroughCorrectly(self):
gradient = None
variable = variables_lib.Variable(self._zero_vec, dtype=dtypes.float32)
gradients_to_variables = (gradient, variable)
[gradients_to_variables] = learning.clip_gradient_norms(
[gradients_to_variables], self._max_norm)
self.assertEqual(gradients_to_variables[0], None)
self.assertEqual(gradients_to_variables[1], variable)
def testIndexedSlicesGradIsClippedCorrectly(self):
sparse_grad_indices = np.array([0, 1, 4])
sparse_grad_dense_shape = [self._grad_vec.size]
values = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
indices = constant_op.constant(sparse_grad_indices, dtype=dtypes.int32)
dense_shape = constant_op.constant(
sparse_grad_dense_shape, dtype=dtypes.int32)
gradient = ops.IndexedSlices(values, indices, dense_shape)
variable = variables_lib.Variable(self._zero_vec, dtype=dtypes.float32)
gradients_to_variables = (gradient, variable)
gradients_to_variables = learning.clip_gradient_norms(
[gradients_to_variables], self._max_norm)[0]
# Ensure the built IndexedSlice has the right form.
self.assertEqual(gradients_to_variables[1], variable)
self.assertEqual(gradients_to_variables[0].indices, indices)
self.assertEqual(gradients_to_variables[0].dense_shape, dense_shape)
with session.Session() as sess:
actual_gradient = sess.run(gradients_to_variables[0].values)
np_testing.assert_almost_equal(actual_gradient, self._clipped_grad_vec)
class MultiplyGradientsTest(test.TestCase):
def setUp(self):
np.random.seed(0)
self._multiplier = 3.7
self._grad_vec = np.array([1., 2., 3.])
self._multiplied_grad_vec = np.multiply(self._grad_vec, self._multiplier)
def testNonListGradsRaisesError(self):
gradient = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
variable = variables_lib.Variable(array_ops.zeros_like(gradient))
grad_to_var = (gradient, variable)
gradient_multipliers = {variable: self._multiplier}
with self.assertRaises(ValueError):
learning.multiply_gradients(grad_to_var, gradient_multipliers)
def testEmptyMultiplesRaisesError(self):
gradient = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
variable = variables_lib.Variable(array_ops.zeros_like(gradient))
grad_to_var = (gradient, variable)
with self.assertRaises(ValueError):
learning.multiply_gradients([grad_to_var], {})
def testNonDictMultiplierRaisesError(self):
gradient = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
variable = variables_lib.Variable(array_ops.zeros_like(gradient))
grad_to_var = (gradient, variable)
with self.assertRaises(ValueError):
learning.multiply_gradients([grad_to_var], 3)
def testMultipleOfNoneGradRaisesError(self):
gradient = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
variable = variables_lib.Variable(array_ops.zeros_like(gradient))
grad_to_var = (None, variable)
gradient_multipliers = {variable: self._multiplier}
with self.assertRaises(ValueError):
learning.multiply_gradients(grad_to_var, gradient_multipliers)
def testMultipleGradientsWithVariables(self):
gradient = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
variable = variables_lib.Variable(array_ops.zeros_like(gradient))
grad_to_var = (gradient, variable)
gradient_multipliers = {variable: self._multiplier}
[grad_to_var] = learning.multiply_gradients([grad_to_var],
gradient_multipliers)
# Ensure the variable passed through.
self.assertEqual(grad_to_var[1], variable)
with self.test_session() as sess:
actual_gradient = sess.run(grad_to_var[0])
np_testing.assert_almost_equal(actual_gradient, self._multiplied_grad_vec,
5)
def testIndexedSlicesGradIsMultiplied(self):
values = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
indices = constant_op.constant([0, 1, 2], dtype=dtypes.int32)
dense_shape = constant_op.constant(
[self._grad_vec.size], dtype=dtypes.int32)
gradient = ops.IndexedSlices(values, indices, dense_shape)
variable = variables_lib.Variable(array_ops.zeros((1, 3)))
grad_to_var = (gradient, variable)
gradient_multipliers = {variable: self._multiplier}
[grad_to_var] = learning.multiply_gradients([grad_to_var],
gradient_multipliers)
# Ensure the built IndexedSlice has the right form.
self.assertEqual(grad_to_var[1], variable)
self.assertEqual(grad_to_var[0].indices, indices)
self.assertEqual(grad_to_var[0].dense_shape, dense_shape)
with self.test_session() as sess:
actual_gradient = sess.run(grad_to_var[0].values)
np_testing.assert_almost_equal(actual_gradient, self._multiplied_grad_vec,
5)
def testTensorMultiplierOfGradient(self):
gradient = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
variable = variables_lib.Variable(array_ops.zeros_like(gradient))
multiplier_flag = variables_lib.Variable(True)
tensor_multiplier = array_ops.where(multiplier_flag,
self._multiplier,
1.0)
grad_to_var = (gradient, variable)
gradient_multipliers = {variable: tensor_multiplier}
[grad_to_var] = learning.multiply_gradients([grad_to_var],
gradient_multipliers)
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
gradient_true_flag = sess.run(grad_to_var[0])
sess.run(multiplier_flag.assign(False))
gradient_false_flag = sess.run(grad_to_var[0])
np_testing.assert_almost_equal(gradient_true_flag,
self._multiplied_grad_vec,
5)
np_testing.assert_almost_equal(gradient_false_flag,
self._grad_vec,
5)
def LogisticClassifier(inputs):
return layers.fully_connected(inputs, 1, activation_fn=math_ops.sigmoid)
def BatchNormClassifier(inputs):
inputs = layers.batch_norm(inputs, decay=0.1)
return layers.fully_connected(inputs, 1, activation_fn=math_ops.sigmoid)
class TrainBNClassifierTest(test.TestCase):
def setUp(self):
# Create an easy training set:
np.random.seed(0)
self._inputs = np.zeros((16, 4))
self._labels = np.random.randint(0, 2, size=(16, 1)).astype(np.float32)
for i in range(16):
j = int(2 * self._labels[i] + np.random.randint(0, 2))
self._inputs[i, j] = 1
def testTrainWithNoInitAssignCanAchieveZeroLoss(self):
logdir = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs')
g = ops.Graph()
with g.as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = BatchNormClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
loss = learning.train(
train_op, logdir, number_of_steps=300, log_every_n_steps=10)
self.assertLess(loss, .1)
class CreateTrainOpTest(test.TestCase):
def setUp(self):
# Create an easy training set:
np.random.seed(0)
self._inputs = np.random.rand(16, 4).astype(np.float32)
self._labels = np.random.randint(0, 2, size=(16, 1)).astype(np.float32)
def testUseUpdateOps(self):
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
expected_mean = np.mean(self._inputs, axis=(0))
expected_var = np.var(self._inputs, axis=(0))
tf_predictions = BatchNormClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
moving_mean = variables_lib2.get_variables_by_name('moving_mean')[0]
moving_variance = variables_lib2.get_variables_by_name('moving_variance')[
0]
with session.Session() as sess:
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
mean, variance = sess.run([moving_mean, moving_variance])
# After initialization moving_mean == 0 and moving_variance == 1.
self.assertAllClose(mean, [0] * 4)
self.assertAllClose(variance, [1] * 4)
for _ in range(10):
sess.run([train_op])
mean = moving_mean.eval()
variance = moving_variance.eval()
# After 10 updates with decay 0.1 moving_mean == expected_mean and
# moving_variance == expected_var.
self.assertAllClose(mean, expected_mean)
self.assertAllClose(variance, expected_var)
def testEmptyUpdateOps(self):
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = BatchNormClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer, update_ops=[])
moving_mean = variables_lib2.get_variables_by_name('moving_mean')[0]
moving_variance = variables_lib2.get_variables_by_name('moving_variance')[
0]
with session.Session() as sess:
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
mean, variance = sess.run([moving_mean, moving_variance])
# After initialization moving_mean == 0 and moving_variance == 1.
self.assertAllClose(mean, [0] * 4)
self.assertAllClose(variance, [1] * 4)
for _ in range(10):
sess.run([train_op])
mean = moving_mean.eval()
variance = moving_variance.eval()
# Since we skip update_ops the moving_vars are not updated.
self.assertAllClose(mean, [0] * 4)
self.assertAllClose(variance, [1] * 4)
def testUseGlobalStep(self):
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = BatchNormClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
global_step = variables_lib2.get_or_create_global_step()
with session.Session() as sess:
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
for _ in range(10):
sess.run([train_op])
global_step = global_step.eval()
# After 10 updates global_step should be 10.
self.assertAllClose(global_step, 10)
def testNoneGlobalStep(self):
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = BatchNormClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(
total_loss, optimizer, global_step=None)
global_step = variables_lib2.get_or_create_global_step()
with session.Session() as sess:
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
for _ in range(10):
sess.run([train_op])
global_step = global_step.eval()
# Since train_op don't use global_step it shouldn't change.
self.assertAllClose(global_step, 0)
def testRecordTrainOpInCollection(self):
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
# Make sure the training op was recorded in the proper collection
self.assertTrue(train_op in ops.get_collection(ops.GraphKeys.TRAIN_OP))
class TrainTest(test.TestCase):
def setUp(self):
# Create an easy training set:
np.random.seed(0)
self._inputs = np.zeros((16, 4))
self._labels = np.random.randint(0, 2, size=(16, 1)).astype(np.float32)
for i in range(16):
j = int(2 * self._labels[i] + np.random.randint(0, 2))
self._inputs[i, j] = 1
def testTrainWithNonDefaultGraph(self):
logdir = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs')
g = ops.Graph()
with g.as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
loss = learning.train(
train_op, logdir, number_of_steps=300, log_every_n_steps=10, graph=g)
self.assertIsNotNone(loss)
self.assertLess(loss, .015)
def testTrainWithNoneAsLogdir(self):
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
loss = learning.train(
train_op, None, number_of_steps=300, log_every_n_steps=10)
self.assertIsNotNone(loss)
self.assertLess(loss, .015)
def testTrainWithSessionConfig(self):
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
session_config = config_pb2.ConfigProto(allow_soft_placement=True)
loss = learning.train(
train_op,
None,
number_of_steps=300,
log_every_n_steps=10,
session_config=session_config)
self.assertIsNotNone(loss)
self.assertLess(loss, .015)
def testTrainWithTrace(self):
logdir = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs')
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
summary.scalar('total_loss', total_loss)
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
loss = learning.train(
train_op,
logdir,
number_of_steps=300,
log_every_n_steps=10,
trace_every_n_steps=100)
self.assertIsNotNone(loss)
for trace_step in [1, 101, 201]:
trace_filename = 'tf_trace-%d.json' % trace_step
self.assertTrue(os.path.isfile(os.path.join(logdir, trace_filename)))
def testTrainWithNoneAsLogdirWhenUsingSummariesRaisesError(self):
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
summary.scalar('total_loss', total_loss)
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
summary_op = summary.merge_all()
with self.assertRaises(ValueError):
learning.train(
train_op, None, number_of_steps=300, summary_op=summary_op)
def testTrainWithNoneAsLogdirWhenUsingTraceRaisesError(self):
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
with self.assertRaises(ValueError):
learning.train(
train_op, None, number_of_steps=300, trace_every_n_steps=10)
def testTrainWithNoneAsLogdirWhenUsingSaverRaisesError(self):
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
saver = saver_lib.Saver()
with self.assertRaises(ValueError):
learning.train(
train_op, None, init_op=None, number_of_steps=300, saver=saver)
def testTrainWithNoneAsInitWhenUsingVarsRaisesError(self):
logdir = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs')
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
with self.assertRaises(RuntimeError):
learning.train(train_op, logdir, init_op=None, number_of_steps=300)
def testTrainWithNoInitAssignCanAchieveZeroLoss(self):
logdir = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs')
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
loss = learning.train(
train_op, logdir, number_of_steps=300, log_every_n_steps=10)
self.assertIsNotNone(loss)
self.assertLess(loss, .015)
def testTrainWithLocalVariable(self):
logdir = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs')
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
local_multiplier = variables_lib2.local_variable(1.0)
tf_predictions = LogisticClassifier(tf_inputs) * local_multiplier
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
loss = learning.train(
train_op, logdir, number_of_steps=300, log_every_n_steps=10)
self.assertIsNotNone(loss)
self.assertLess(loss, .015)
def testResumeTrainAchievesRoughlyTheSameLoss(self):
logdir = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs')
number_of_steps = [300, 301, 305]
for i in range(len(number_of_steps)):
with ops.Graph().as_default():
random_seed.set_random_seed(i)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
loss = learning.train(
train_op,
logdir,
number_of_steps=number_of_steps[i],
log_every_n_steps=10)
self.assertIsNotNone(loss)
self.assertLess(loss, .015)
def create_train_op(self, learning_rate=1.0, gradient_multiplier=1.0):
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(
learning_rate=learning_rate)
if gradient_multiplier != 1.0:
variables = variables_lib.trainable_variables()
gradient_multipliers = {var: gradient_multiplier for var in variables}
else:
gradient_multipliers = None
return learning.create_train_op(
total_loss, optimizer, gradient_multipliers=gradient_multipliers)
def testTrainWithInitFromCheckpoint(self):
logdir1 = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs1')
logdir2 = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs2')
# First, train the model one step (make sure the error is high).
with ops.Graph().as_default():
random_seed.set_random_seed(0)
train_op = self.create_train_op()
loss = learning.train(train_op, logdir1, number_of_steps=1)
self.assertGreater(loss, .5)
# Next, train the model to convergence.
with ops.Graph().as_default():
random_seed.set_random_seed(1)
train_op = self.create_train_op()
loss = learning.train(
train_op, logdir1, number_of_steps=300, log_every_n_steps=10)
self.assertIsNotNone(loss)
self.assertLess(loss, .02)
# Finally, advance the model a single step and validate that the loss is
# still low.
with ops.Graph().as_default():
random_seed.set_random_seed(2)
train_op = self.create_train_op()
model_variables = variables_lib.global_variables()
model_path = os.path.join(logdir1, 'model.ckpt-300')
init_op = variables_lib.global_variables_initializer()
op, init_feed_dict = variables_lib2.assign_from_checkpoint(
model_path, model_variables)
def InitAssignFn(sess):
sess.run(op, init_feed_dict)
loss = learning.train(
train_op,
logdir2,
number_of_steps=1,
init_op=init_op,
init_fn=InitAssignFn)
self.assertIsNotNone(loss)
self.assertLess(loss, .02)
def testTrainWithInitFromFn(self):
logdir1 = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs1')
logdir2 = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs2')
# First, train the model one step (make sure the error is high).
with ops.Graph().as_default():
random_seed.set_random_seed(0)
train_op = self.create_train_op()
loss = learning.train(train_op, logdir1, number_of_steps=1)
self.assertGreater(loss, .5)
# Next, train the model to convergence.
with ops.Graph().as_default():
random_seed.set_random_seed(1)
train_op = self.create_train_op()
loss = learning.train(
train_op, logdir1, number_of_steps=300, log_every_n_steps=10)
self.assertIsNotNone(loss)
self.assertLess(loss, .015)
# Finally, advance the model a single step and validate that the loss is
# still low.
with ops.Graph().as_default():
random_seed.set_random_seed(2)
train_op = self.create_train_op()
model_variables = variables_lib.global_variables()
model_path = os.path.join(logdir1, 'model.ckpt-300')
saver = saver_lib.Saver(model_variables)
def RestoreFn(sess):
saver.restore(sess, model_path)
loss = learning.train(
train_op, logdir2, number_of_steps=1, init_fn=RestoreFn)
self.assertIsNotNone(loss)
self.assertLess(loss, .015)
def ModelLoss(self):
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
return loss_ops.get_total_loss()
def testTrainAllVarsHasLowerLossThanTrainSubsetOfVars(self):
logdir1 = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs1')
# First, train only the weights of the model.
with ops.Graph().as_default():
random_seed.set_random_seed(0)
total_loss = self.ModelLoss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
weights = variables_lib2.get_variables_by_name('weights')
train_op = learning.create_train_op(
total_loss, optimizer, variables_to_train=weights)
loss = learning.train(
train_op, logdir1, number_of_steps=200, log_every_n_steps=10)
self.assertGreater(loss, .015)
self.assertLess(loss, .05)
# Next, train the biases of the model.
with ops.Graph().as_default():
random_seed.set_random_seed(1)
total_loss = self.ModelLoss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
biases = variables_lib2.get_variables_by_name('biases')
train_op = learning.create_train_op(
total_loss, optimizer, variables_to_train=biases)
loss = learning.train(
train_op, logdir1, number_of_steps=300, log_every_n_steps=10)
self.assertGreater(loss, .015)
self.assertLess(loss, .05)
# Finally, train both weights and bias to get lower loss.
with ops.Graph().as_default():
random_seed.set_random_seed(2)
total_loss = self.ModelLoss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
loss = learning.train(
train_op, logdir1, number_of_steps=400, log_every_n_steps=10)
self.assertIsNotNone(loss)
self.assertLess(loss, .015)
def testTrainingSubsetsOfVariablesOnlyUpdatesThoseVariables(self):
# First, train only the weights of the model.
with ops.Graph().as_default():
random_seed.set_random_seed(0)
total_loss = self.ModelLoss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
weights, biases = variables_lib2.get_variables()
train_op = learning.create_train_op(total_loss, optimizer)
train_weights = learning.create_train_op(
total_loss, optimizer, variables_to_train=[weights])
train_biases = learning.create_train_op(
total_loss, optimizer, variables_to_train=[biases])
with session.Session() as sess:
# Initialize the variables.
sess.run(variables_lib.global_variables_initializer())
# Get the intial weights and biases values.
weights_values, biases_values = sess.run([weights, biases])
self.assertGreater(np.linalg.norm(weights_values), 0)
self.assertAlmostEqual(np.linalg.norm(biases_values), 0)
# Update weights and biases.
loss = sess.run(train_op)
self.assertGreater(loss, .5)
new_weights, new_biases = sess.run([weights, biases])
# Check that the weights and biases have been updated.
self.assertGreater(np.linalg.norm(weights_values - new_weights), 0)
self.assertGreater(np.linalg.norm(biases_values - new_biases), 0)
weights_values, biases_values = new_weights, new_biases
# Update only weights.
loss = sess.run(train_weights)
self.assertGreater(loss, .5)
new_weights, new_biases = sess.run([weights, biases])
# Check that the weights have been updated, but biases have not.
self.assertGreater(np.linalg.norm(weights_values - new_weights), 0)
self.assertAlmostEqual(np.linalg.norm(biases_values - new_biases), 0)
weights_values = new_weights
# Update only biases.
loss = sess.run(train_biases)
self.assertGreater(loss, .5)
new_weights, new_biases = sess.run([weights, biases])
# Check that the biases have been updated, but weights have not.
self.assertAlmostEqual(np.linalg.norm(weights_values - new_weights), 0)
self.assertGreater(np.linalg.norm(biases_values - new_biases), 0)
def testTrainWithAlteredGradients(self):
# Use the same learning rate but different gradient multipliers
# to train two models. Model with equivalently larger learning
# rate (i.e., learning_rate * gradient_multiplier) has smaller
# training loss.
logdir1 = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs1')
logdir2 = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs2')
multipliers = [1., 1000.]
number_of_steps = 10
losses = []
learning_rate = 0.001
# First, train the model with equivalently smaller learning rate.
with ops.Graph().as_default():
random_seed.set_random_seed(0)
train_op = self.create_train_op(
learning_rate=learning_rate, gradient_multiplier=multipliers[0])
loss = learning.train(train_op, logdir1, number_of_steps=number_of_steps)
losses.append(loss)
self.assertGreater(loss, .5)
# Second, train the model with equivalently larger learning rate.
with ops.Graph().as_default():
random_seed.set_random_seed(0)
train_op = self.create_train_op(
learning_rate=learning_rate, gradient_multiplier=multipliers[1])
loss = learning.train(train_op, logdir2, number_of_steps=number_of_steps)
losses.append(loss)
self.assertIsNotNone(loss)
self.assertLess(loss, .5)
# The loss of the model trained with larger learning rate should
# be smaller.
self.assertGreater(losses[0], losses[1])
def testTrainWithEpochLimit(self):
logdir = os.path.join(tempfile.mkdtemp(prefix=self.get_temp_dir()),
'tmp_logs')
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_inputs_limited = input_lib.limit_epochs(tf_inputs, num_epochs=300)
tf_labels_limited = input_lib.limit_epochs(tf_labels, num_epochs=300)
tf_predictions = LogisticClassifier(tf_inputs_limited)
loss_ops.log_loss(tf_predictions, tf_labels_limited)
total_loss = loss_ops.get_total_loss()
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
loss = learning.train(train_op, logdir, log_every_n_steps=10)
self.assertIsNotNone(loss)
self.assertLess(loss, .015)
self.assertTrue(os.path.isfile('{}/model.ckpt-300.index'.format(logdir)))
self.assertTrue(os.path.isfile('{}/model.ckpt-300.data-00000-of-00001'.format(logdir)))
if __name__ == '__main__':
test.main()
| apache-2.0 |
scyclops/Readable-Feeds | web/db.py | 16 | 36406 | """
Database API
(part of web.py)
"""
__all__ = [
"UnknownParamstyle", "UnknownDB", "TransactionError",
"sqllist", "sqlors", "reparam", "sqlquote",
"SQLQuery", "SQLParam", "sqlparam",
"SQLLiteral", "sqlliteral",
"database", 'DB',
]
import time
try:
import datetime
except ImportError:
datetime = None
from utils import threadeddict, storage, iters, iterbetter
try:
# db module can work independent of web.py
from webapi import debug, config
except:
import sys
debug = sys.stderr
config = storage()
class UnknownDB(Exception):
"""raised for unsupported dbms"""
pass
class _ItplError(ValueError):
def __init__(self, text, pos):
ValueError.__init__(self)
self.text = text
self.pos = pos
def __str__(self):
return "unfinished expression in %s at char %d" % (
repr(self.text), self.pos)
class TransactionError(Exception): pass
class UnknownParamstyle(Exception):
"""
raised for unsupported db paramstyles
(currently supported: qmark, numeric, format, pyformat)
"""
pass
class SQLParam:
"""
Parameter in SQLQuery.
>>> q = SQLQuery(["SELECT * FROM test WHERE name=", SQLParam("joe")])
>>> q
<sql: "SELECT * FROM test WHERE name='joe'">
>>> q.query()
'SELECT * FROM test WHERE name=%s'
>>> q.values()
['joe']
"""
def __init__(self, value):
self.value = value
def get_marker(self, paramstyle='pyformat'):
if paramstyle == 'qmark':
return '?'
elif paramstyle == 'numeric':
return ':1'
elif paramstyle is None or paramstyle in ['format', 'pyformat']:
return '%s'
raise UnknownParamstyle, paramstyle
def sqlquery(self):
return SQLQuery([self])
def __add__(self, other):
return self.sqlquery() + other
def __radd__(self, other):
return other + self.sqlquery()
def __str__(self):
return str(self.value)
def __repr__(self):
return '<param: %s>' % repr(self.value)
sqlparam = SQLParam
class SQLQuery:
"""
You can pass this sort of thing as a clause in any db function.
Otherwise, you can pass a dictionary to the keyword argument `vars`
and the function will call reparam for you.
Internally, consists of `items`, which is a list of strings and
SQLParams, which get concatenated to produce the actual query.
"""
# tested in sqlquote's docstring
def __init__(self, items=[]):
"""Creates a new SQLQuery.
>>> SQLQuery("x")
<sql: 'x'>
>>> q = SQLQuery(['SELECT * FROM ', 'test', ' WHERE x=', SQLParam(1)])
>>> q
<sql: 'SELECT * FROM test WHERE x=1'>
>>> q.query(), q.values()
('SELECT * FROM test WHERE x=%s', [1])
>>> SQLQuery(SQLParam(1))
<sql: '1'>
"""
if isinstance(items, list):
self.items = items
elif isinstance(items, SQLParam):
self.items = [items]
elif isinstance(items, SQLQuery):
self.items = list(items.items)
else:
self.items = [str(items)]
# Take care of SQLLiterals
for i, item in enumerate(self.items):
if isinstance(item, SQLParam) and isinstance(item.value, SQLLiteral):
self.items[i] = item.value.v
def __add__(self, other):
if isinstance(other, basestring):
items = [other]
elif isinstance(other, SQLQuery):
items = other.items
else:
return NotImplemented
return SQLQuery(self.items + items)
def __radd__(self, other):
if isinstance(other, basestring):
items = [other]
else:
return NotImplemented
return SQLQuery(items + self.items)
def __iadd__(self, other):
if isinstance(other, basestring):
items = [other]
elif isinstance(other, SQLQuery):
items = other.items
else:
return NotImplemented
self.items.extend(items)
return self
def __len__(self):
return len(self.query())
def query(self, paramstyle=None):
"""
Returns the query part of the sql query.
>>> q = SQLQuery(["SELECT * FROM test WHERE name=", SQLParam('joe')])
>>> q.query()
'SELECT * FROM test WHERE name=%s'
>>> q.query(paramstyle='qmark')
'SELECT * FROM test WHERE name=?'
"""
s = ''
for x in self.items:
if isinstance(x, SQLParam):
x = x.get_marker(paramstyle)
s += x
return s
def values(self):
"""
Returns the values of the parameters used in the sql query.
>>> q = SQLQuery(["SELECT * FROM test WHERE name=", SQLParam('joe')])
>>> q.values()
['joe']
"""
return [i.value for i in self.items if isinstance(i, SQLParam)]
def join(items, sep=' '):
"""
Joins multiple queries.
>>> SQLQuery.join(['a', 'b'], ', ')
<sql: 'a, b'>
"""
if len(items) == 0:
return SQLQuery("")
q = SQLQuery(items[0])
for item in items[1:]:
q += sep
q += item
return q
join = staticmethod(join)
def __str__(self):
try:
return self.query() % tuple([sqlify(x) for x in self.values()])
except (ValueError, TypeError):
return self.query()
def __repr__(self):
return '<sql: %s>' % repr(str(self))
class SQLLiteral:
"""
Protects a string from `sqlquote`.
>>> sqlquote('NOW()')
<sql: "'NOW()'">
>>> sqlquote(SQLLiteral('NOW()'))
<sql: 'NOW()'>
"""
def __init__(self, v):
self.v = v
def __repr__(self):
return self.v
sqlliteral = SQLLiteral
def reparam(string_, dictionary):
"""
Takes a string and a dictionary and interpolates the string
using values from the dictionary. Returns an `SQLQuery` for the result.
>>> reparam("s = $s", dict(s=True))
<sql: "s = 't'">
"""
dictionary = dictionary.copy() # eval mucks with it
vals = []
result = []
for live, chunk in _interpolate(string_):
if live:
v = eval(chunk, dictionary)
result.append(sqlparam(v))
else:
result.append(chunk)
return SQLQuery.join(result, '')
def sqlify(obj):
"""
converts `obj` to its proper SQL version
>>> sqlify(None)
'NULL'
>>> sqlify(True)
"'t'"
>>> sqlify(3)
'3'
"""
# because `1 == True and hash(1) == hash(True)`
# we have to do this the hard way...
if obj is None:
return 'NULL'
elif obj is True:
return "'t'"
elif obj is False:
return "'f'"
elif datetime and isinstance(obj, datetime.datetime):
return repr(obj.isoformat())
else:
return repr(obj)
def sqllist(lst):
"""
Converts the arguments for use in something like a WHERE clause.
>>> sqllist(['a', 'b'])
'a, b'
>>> sqllist('a')
'a'
>>> sqllist(u'abc')
u'abc'
"""
if isinstance(lst, basestring):
return lst
else:
return ', '.join(lst)
def sqlors(left, lst):
"""
`left is a SQL clause like `tablename.arg = `
and `lst` is a list of values. Returns a reparam-style
pair featuring the SQL that ORs together the clause
for each item in the lst.
>>> sqlors('foo = ', [])
<sql: '1=2'>
>>> sqlors('foo = ', [1])
<sql: 'foo = 1'>
>>> sqlors('foo = ', 1)
<sql: 'foo = 1'>
>>> sqlors('foo = ', [1,2,3])
<sql: '(foo = 1 OR foo = 2 OR foo = 3 OR 1=2)'>
"""
if isinstance(lst, iters):
lst = list(lst)
ln = len(lst)
if ln == 0:
return SQLQuery("1=2")
if ln == 1:
lst = lst[0]
if isinstance(lst, iters):
return SQLQuery(['('] +
sum([[left, sqlparam(x), ' OR '] for x in lst], []) +
['1=2)']
)
else:
return left + sqlparam(lst)
def sqlwhere(dictionary, grouping=' AND '):
"""
Converts a `dictionary` to an SQL WHERE clause `SQLQuery`.
>>> sqlwhere({'cust_id': 2, 'order_id':3})
<sql: 'order_id = 3 AND cust_id = 2'>
>>> sqlwhere({'cust_id': 2, 'order_id':3}, grouping=', ')
<sql: 'order_id = 3, cust_id = 2'>
>>> sqlwhere({'a': 'a', 'b': 'b'}).query()
'a = %s AND b = %s'
"""
return SQLQuery.join([k + ' = ' + sqlparam(v) for k, v in dictionary.items()], grouping)
def sqlquote(a):
"""
Ensures `a` is quoted properly for use in a SQL query.
>>> 'WHERE x = ' + sqlquote(True) + ' AND y = ' + sqlquote(3)
<sql: "WHERE x = 't' AND y = 3">
"""
return sqlparam(a).sqlquery()
class Transaction:
"""Database transaction."""
def __init__(self, ctx):
self.ctx = ctx
self.transaction_count = transaction_count = len(ctx.transactions)
class transaction_engine:
"""Transaction Engine used in top level transactions."""
def do_transact(self):
ctx.commit(unload=False)
def do_commit(self):
ctx.commit()
def do_rollback(self):
ctx.rollback()
class subtransaction_engine:
"""Transaction Engine used in sub transactions."""
def query(self, q):
db_cursor = ctx.db.cursor()
ctx.db_execute(db_cursor, SQLQuery(q % transaction_count))
def do_transact(self):
self.query('SAVEPOINT webpy_sp_%s')
def do_commit(self):
self.query('RELEASE SAVEPOINT webpy_sp_%s')
def do_rollback(self):
self.query('ROLLBACK TO SAVEPOINT webpy_sp_%s')
class dummy_engine:
"""Transaction Engine used instead of subtransaction_engine
when sub transactions are not supported."""
do_transact = do_commit = do_rollback = lambda self: None
if self.transaction_count:
# nested transactions are not supported in some databases
if self.ctx.get('ignore_nested_transactions'):
self.engine = dummy_engine()
else:
self.engine = subtransaction_engine()
else:
self.engine = transaction_engine()
self.engine.do_transact()
self.ctx.transactions.append(self)
def __enter__(self):
return self
def __exit__(self, exctype, excvalue, traceback):
if exctype is not None:
self.rollback()
else:
self.commit()
def commit(self):
if len(self.ctx.transactions) > self.transaction_count:
self.engine.do_commit()
self.ctx.transactions = self.ctx.transactions[:self.transaction_count]
def rollback(self):
if len(self.ctx.transactions) > self.transaction_count:
self.engine.do_rollback()
self.ctx.transactions = self.ctx.transactions[:self.transaction_count]
class DB:
"""Database"""
def __init__(self, db_module, keywords):
"""Creates a database.
"""
self.db_module = db_module
self.keywords = keywords
self._ctx = threadeddict()
# flag to enable/disable printing queries
self.printing = config.get('debug', False)
self.supports_multiple_insert = False
try:
import DBUtils
# enable pooling if DBUtils module is available.
self.has_pooling = True
except ImportError:
self.has_pooling = False
# Pooling can be disabled by passing pooling=False in the keywords.
self.has_pooling = self.keywords.pop('pooling', True) and self.has_pooling
def _getctx(self):
if not self._ctx.get('db'):
self._load_context(self._ctx)
return self._ctx
ctx = property(_getctx)
def _load_context(self, ctx):
ctx.dbq_count = 0
ctx.transactions = [] # stack of transactions
if self.has_pooling:
ctx.db = self._connect_with_pooling(self.keywords)
else:
ctx.db = self._connect(self.keywords)
ctx.db_execute = self._db_execute
if not hasattr(ctx.db, 'commit'):
ctx.db.commit = lambda: None
if not hasattr(ctx.db, 'rollback'):
ctx.db.rollback = lambda: None
def commit(unload=True):
# do db commit and release the connection if pooling is enabled.
ctx.db.commit()
if unload and self.has_pooling:
self._unload_context(self._ctx)
def rollback():
# do db rollback and release the connection if pooling is enabled.
ctx.db.rollback()
if self.has_pooling:
self._unload_context(self._ctx)
ctx.commit = commit
ctx.rollback = rollback
def _unload_context(self, ctx):
del ctx.db
def _connect(self, keywords):
return self.db_module.connect(**keywords)
def _connect_with_pooling(self, keywords):
def get_pooled_db():
from DBUtils import PooledDB
# In DBUtils 0.9.3, `dbapi` argument is renamed as `creator`
# see Bug#122112
if PooledDB.__version__.split('.') < '0.9.3'.split('.'):
return PooledDB.PooledDB(dbapi=self.db_module, **keywords)
else:
return PooledDB.PooledDB(creator=self.db_module, **keywords)
if getattr(self, '_pooleddb', None) is None:
self._pooleddb = get_pooled_db()
return self._pooleddb.connection()
def _db_cursor(self):
return self.ctx.db.cursor()
def _param_marker(self):
"""Returns parameter marker based on paramstyle attribute if this database."""
style = getattr(self, 'paramstyle', 'pyformat')
if style == 'qmark':
return '?'
elif style == 'numeric':
return ':1'
elif style in ['format', 'pyformat']:
return '%s'
raise UnknownParamstyle, style
def _py2sql(self, val):
"""
Transforms a Python value into a value to pass to cursor.execute.
This exists specifically for a workaround in SqliteDB.
"""
if isinstance(val, unicode):
val = val.encode('UTF-8')
return val
def _db_execute(self, cur, sql_query):
"""executes an sql query"""
self.ctx.dbq_count += 1
try:
a = time.time()
paramstyle = getattr(self, 'paramstyle', 'pyformat')
out = cur.execute(sql_query.query(paramstyle),
[self._py2sql(x)
for x in sql_query.values()])
b = time.time()
except:
if self.printing:
print >> debug, 'ERR:', str(sql_query)
if self.ctx.transactions:
self.ctx.transactions[-1].rollback()
else:
self.ctx.rollback()
raise
if self.printing:
print >> debug, '%s (%s): %s' % (round(b-a, 2), self.ctx.dbq_count, str(sql_query))
return out
def _where(self, where, vars):
if isinstance(where, (int, long)):
where = "id = " + sqlparam(where)
#@@@ for backward-compatibility
elif isinstance(where, (list, tuple)) and len(where) == 2:
where = SQLQuery(where[0], where[1])
elif isinstance(where, SQLQuery):
pass
else:
where = reparam(where, vars)
return where
def query(self, sql_query, vars=None, processed=False, _test=False):
"""
Execute SQL query `sql_query` using dictionary `vars` to interpolate it.
If `processed=True`, `vars` is a `reparam`-style list to use
instead of interpolating.
>>> db = DB(None, {})
>>> db.query("SELECT * FROM foo", _test=True)
<sql: 'SELECT * FROM foo'>
>>> db.query("SELECT * FROM foo WHERE x = $x", vars=dict(x='f'), _test=True)
<sql: "SELECT * FROM foo WHERE x = 'f'">
>>> db.query("SELECT * FROM foo WHERE x = " + sqlquote('f'), _test=True)
<sql: "SELECT * FROM foo WHERE x = 'f'">
"""
if vars is None: vars = {}
if not processed and not isinstance(sql_query, SQLQuery):
sql_query = reparam(sql_query, vars)
if _test: return sql_query
db_cursor = self._db_cursor()
self._db_execute(db_cursor, sql_query)
if db_cursor.description:
names = [x[0] for x in db_cursor.description]
def iterwrapper():
row = db_cursor.fetchone()
while row:
yield storage(dict(zip(names, row)))
row = db_cursor.fetchone()
out = iterbetter(iterwrapper())
out.__len__ = lambda: int(db_cursor.rowcount)
out.list = lambda: [storage(dict(zip(names, x))) \
for x in db_cursor.fetchall()]
else:
out = db_cursor.rowcount
if not self.ctx.transactions:
self.ctx.commit()
return out
def select(self, tables, vars=None, what='*', where=None, order=None, group=None,
limit=None, offset=None, _test=False):
"""
Selects `what` from `tables` with clauses `where`, `order`,
`group`, `limit`, and `offset`. Uses vars to interpolate.
Otherwise, each clause can be a SQLQuery.
>>> db = DB(None, {})
>>> db.select('foo', _test=True)
<sql: 'SELECT * FROM foo'>
>>> db.select(['foo', 'bar'], where="foo.bar_id = bar.id", limit=5, _test=True)
<sql: 'SELECT * FROM foo, bar WHERE foo.bar_id = bar.id LIMIT 5'>
"""
if vars is None: vars = {}
sql_clauses = self.sql_clauses(what, tables, where, group, order, limit, offset)
clauses = [self.gen_clause(sql, val, vars) for sql, val in sql_clauses if val is not None]
qout = SQLQuery.join(clauses)
if _test: return qout
return self.query(qout, processed=True)
def where(self, table, what='*', order=None, group=None, limit=None,
offset=None, _test=False, **kwargs):
"""
Selects from `table` where keys are equal to values in `kwargs`.
>>> db = DB(None, {})
>>> db.where('foo', bar_id=3, _test=True)
<sql: 'SELECT * FROM foo WHERE bar_id = 3'>
>>> db.where('foo', source=2, crust='dewey', _test=True)
<sql: "SELECT * FROM foo WHERE source = 2 AND crust = 'dewey'">
"""
where = []
for k, v in kwargs.iteritems():
where.append(k + ' = ' + sqlquote(v))
return self.select(table, what=what, order=order,
group=group, limit=limit, offset=offset, _test=_test,
where=SQLQuery.join(where, ' AND '))
def sql_clauses(self, what, tables, where, group, order, limit, offset):
return (
('SELECT', what),
('FROM', sqllist(tables)),
('WHERE', where),
('GROUP BY', group),
('ORDER BY', order),
('LIMIT', limit),
('OFFSET', offset))
def gen_clause(self, sql, val, vars):
if isinstance(val, (int, long)):
if sql == 'WHERE':
nout = 'id = ' + sqlquote(val)
else:
nout = SQLQuery(val)
#@@@
elif isinstance(val, (list, tuple)) and len(val) == 2:
nout = SQLQuery(val[0], val[1]) # backwards-compatibility
elif isinstance(val, SQLQuery):
nout = val
else:
nout = reparam(val, vars)
def xjoin(a, b):
if a and b: return a + ' ' + b
else: return a or b
return xjoin(sql, nout)
def insert(self, tablename, seqname=None, _test=False, **values):
"""
Inserts `values` into `tablename`. Returns current sequence ID.
Set `seqname` to the ID if it's not the default, or to `False`
if there isn't one.
>>> db = DB(None, {})
>>> q = db.insert('foo', name='bob', age=2, created=SQLLiteral('NOW()'), _test=True)
>>> q
<sql: "INSERT INTO foo (age, name, created) VALUES (2, 'bob', NOW())">
>>> q.query()
'INSERT INTO foo (age, name, created) VALUES (%s, %s, NOW())'
>>> q.values()
[2, 'bob']
"""
def q(x): return "(" + x + ")"
if values:
_keys = SQLQuery.join(values.keys(), ', ')
_values = SQLQuery.join([sqlparam(v) for v in values.values()], ', ')
sql_query = "INSERT INTO %s " % tablename + q(_keys) + ' VALUES ' + q(_values)
else:
sql_query = SQLQuery("INSERT INTO %s DEFAULT VALUES" % tablename)
if _test: return sql_query
db_cursor = self._db_cursor()
if seqname is not False:
sql_query = self._process_insert_query(sql_query, tablename, seqname)
if isinstance(sql_query, tuple):
# for some databases, a separate query has to be made to find
# the id of the inserted row.
q1, q2 = sql_query
self._db_execute(db_cursor, q1)
self._db_execute(db_cursor, q2)
else:
self._db_execute(db_cursor, sql_query)
try:
out = db_cursor.fetchone()[0]
except Exception:
out = None
if not self.ctx.transactions:
self.ctx.commit()
return out
def multiple_insert(self, tablename, values, seqname=None, _test=False):
"""
Inserts multiple rows into `tablename`. The `values` must be a list of dictioanries,
one for each row to be inserted, each with the same set of keys.
Returns the list of ids of the inserted rows.
Set `seqname` to the ID if it's not the default, or to `False`
if there isn't one.
>>> db = DB(None, {})
>>> db.supports_multiple_insert = True
>>> values = [{"name": "foo", "email": "[email protected]"}, {"name": "bar", "email": "[email protected]"}]
>>> db.multiple_insert('person', values=values, _test=True)
<sql: "INSERT INTO person (name, email) VALUES ('foo', '[email protected]'), ('bar', '[email protected]')">
"""
if not values:
return []
if not self.supports_multiple_insert:
out = [self.insert(tablename, seqname=seqname, _test=_test, **v) for v in values]
if seqname is False:
return None
else:
return out
keys = values[0].keys()
#@@ make sure all keys are valid
# make sure all rows have same keys.
for v in values:
if v.keys() != keys:
raise ValueError, 'Bad data'
sql_query = SQLQuery('INSERT INTO %s (%s) VALUES ' % (tablename, ', '.join(keys)))
data = []
for row in values:
d = SQLQuery.join([SQLParam(row[k]) for k in keys], ', ')
data.append('(' + d + ')')
sql_query += SQLQuery.join(data, ', ')
if _test: return sql_query
db_cursor = self._db_cursor()
if seqname is not False:
sql_query = self._process_insert_query(sql_query, tablename, seqname)
if isinstance(sql_query, tuple):
# for some databases, a separate query has to be made to find
# the id of the inserted row.
q1, q2 = sql_query
self._db_execute(db_cursor, q1)
self._db_execute(db_cursor, q2)
else:
self._db_execute(db_cursor, sql_query)
try:
out = db_cursor.fetchone()[0]
out = range(out-len(values)+1, out+1)
except Exception:
out = None
if not self.ctx.transactions:
self.ctx.commit()
return out
def update(self, tables, where, vars=None, _test=False, **values):
"""
Update `tables` with clause `where` (interpolated using `vars`)
and setting `values`.
>>> db = DB(None, {})
>>> name = 'Joseph'
>>> q = db.update('foo', where='name = $name', name='bob', age=2,
... created=SQLLiteral('NOW()'), vars=locals(), _test=True)
>>> q
<sql: "UPDATE foo SET age = 2, name = 'bob', created = NOW() WHERE name = 'Joseph'">
>>> q.query()
'UPDATE foo SET age = %s, name = %s, created = NOW() WHERE name = %s'
>>> q.values()
[2, 'bob', 'Joseph']
"""
if vars is None: vars = {}
where = self._where(where, vars)
query = (
"UPDATE " + sqllist(tables) +
" SET " + sqlwhere(values, ', ') +
" WHERE " + where)
if _test: return query
db_cursor = self._db_cursor()
self._db_execute(db_cursor, query)
if not self.ctx.transactions:
self.ctx.commit()
return db_cursor.rowcount
def delete(self, table, where, using=None, vars=None, _test=False):
"""
Deletes from `table` with clauses `where` and `using`.
>>> db = DB(None, {})
>>> name = 'Joe'
>>> db.delete('foo', where='name = $name', vars=locals(), _test=True)
<sql: "DELETE FROM foo WHERE name = 'Joe'">
"""
if vars is None: vars = {}
where = self._where(where, vars)
q = 'DELETE FROM ' + table
if where: q += ' WHERE ' + where
if using: q += ' USING ' + sqllist(using)
if _test: return q
db_cursor = self._db_cursor()
self._db_execute(db_cursor, q)
if not self.ctx.transactions:
self.ctx.commit()
return db_cursor.rowcount
def _process_insert_query(self, query, tablename, seqname):
return query
def transaction(self):
"""Start a transaction."""
return Transaction(self.ctx)
class PostgresDB(DB):
"""Postgres driver."""
def __init__(self, **keywords):
if 'pw' in keywords:
keywords['password'] = keywords['pw']
del keywords['pw']
db_module = self.get_db_module()
keywords['database'] = keywords.pop('db')
self.dbname = "postgres"
self.paramstyle = db_module.paramstyle
DB.__init__(self, db_module, keywords)
self.supports_multiple_insert = True
def get_db_module(self):
try:
import psycopg2 as db
import psycopg2.extensions
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
except ImportError:
try:
import psycopg as db
except ImportError:
import pgdb as db
return db
def _process_insert_query(self, query, tablename, seqname):
if seqname is None:
seqname = tablename + "_id_seq"
return query + "; SELECT currval('%s')" % seqname
def _connect(self, keywords):
conn = DB._connect(self, keywords)
conn.set_client_encoding('UTF8')
return conn
def _connect_with_pooling(self, keywords):
conn = DB._connect_with_pooling(self, keywords)
conn._con._con.set_client_encoding('UTF8')
return conn
class MySQLDB(DB):
def __init__(self, **keywords):
import MySQLdb as db
if 'pw' in keywords:
keywords['passwd'] = keywords['pw']
del keywords['pw']
if 'charset' not in keywords:
keywords['charset'] = 'utf8'
elif keywords['charset'] is None:
del keywords['charset']
self.paramstyle = db.paramstyle = 'pyformat' # it's both, like psycopg
self.dbname = "mysql"
DB.__init__(self, db, keywords)
self.supports_multiple_insert = True
def _process_insert_query(self, query, tablename, seqname):
return query, SQLQuery('SELECT last_insert_id();')
class SqliteDB(DB):
def __init__(self, **keywords):
try:
import sqlite3 as db
db.paramstyle = 'qmark'
except ImportError:
try:
from pysqlite2 import dbapi2 as db
db.paramstyle = 'qmark'
except ImportError:
import sqlite as db
self.paramstyle = db.paramstyle
keywords['database'] = keywords.pop('db')
self.dbname = "sqlite"
DB.__init__(self, db, keywords)
def _process_insert_query(self, query, tablename, seqname):
return query, SQLQuery('SELECT last_insert_rowid();')
def query(self, *a, **kw):
out = DB.query(self, *a, **kw)
if isinstance(out, iterbetter):
# rowcount is not provided by sqlite
del out.__len__
return out
# as with PostgresDB, the database is assumed to be in UTF-8.
# This doesn't mean we turn byte-strings coming out of it into
# Unicode objects, but we avoid trying to put Unicode objects into
# it.
encoding = 'UTF-8'
def _py2sql(self, val):
r"""
Work around a couple of problems in SQLite that maybe pysqlite
should take care of: give it True and False and it thinks
they're column names; give it Unicode and it tries to insert
it in, possibly, ASCII.
>>> meth = SqliteDB(db='nonexistent')._py2sql
>>> [meth(x) for x in [True, False, 1, 2, 'foo', u'souffl\xe9']]
[1, 0, 1, 2, 'foo', 'souffl\xc3\xa9']
"""
if val is True: return 1
elif val is False: return 0
elif isinstance(val, unicode): return val.encode(self.encoding)
else: return val
class FirebirdDB(DB):
"""Firebird Database.
"""
def __init__(self, **keywords):
try:
import kinterbasdb as db
except Exception:
db = None
pass
if 'pw' in keywords:
keywords['passwd'] = keywords['pw']
del keywords['pw']
keywords['database'] = keywords['db']
del keywords['db']
DB.__init__(self, db, keywords)
def delete(self, table, where=None, using=None, vars=None, _test=False):
# firebird doesn't support using clause
using=None
return DB.delete(self, table, where, using, vars, _test)
def sql_clauses(self, what, tables, where, group, order, limit, offset):
return (
('SELECT', ''),
('FIRST', limit),
('SKIP', offset),
('', what),
('FROM', sqllist(tables)),
('WHERE', where),
('GROUP BY', group),
('ORDER BY', order)
)
class MSSQLDB(DB):
def __init__(self, **keywords):
import pymssql as db
if 'pw' in keywords:
keywords['password'] = keywords.pop('kw')
keywords['database'] = keywords.pop('db')
self.dbname = "mssql"
DB.__init__(self, db, keywords)
class OracleDB(DB):
def __init__(self, **keywords):
import cx_Oracle as db
if 'pw' in keywords:
keywords['password'] = keywords.pop('pw')
#@@ TODO: use db.makedsn if host, port is specified
keywords['dsn'] = keywords.pop('db')
self.dbname = 'oracle'
db.paramstyle = 'numeric'
self.paramstyle = db.paramstyle
# oracle doesn't support pooling
keywords.pop('pooling', None)
DB.__init__(self, db, keywords)
def _process_insert_query(self, query, tablename, seqname):
if seqname is None:
# It is not possible to get seq name from table name in Oracle
return query
else:
return query + "; SELECT %s.currval FROM dual" % seqname
_databases = {}
def database(dburl=None, **params):
"""Creates appropriate database using params.
Pooling will be enabled if DBUtils module is available.
Pooling can be disabled by passing pooling=False in params.
"""
dbn = params.pop('dbn')
if dbn in _databases:
return _databases[dbn](**params)
else:
raise UnknownDB, dbn
def register_database(name, clazz):
"""
Register a database.
>>> class LegacyDB(DB):
... def __init__(self, **params):
... pass
...
>>> register_database('legacy', LegacyDB)
>>> db = database(dbn='legacy', db='test', user='joe', passwd='secret')
"""
_databases[name] = clazz
register_database('mysql', MySQLDB)
register_database('postgres', PostgresDB)
register_database('sqlite', SqliteDB)
register_database('firebird', FirebirdDB)
register_database('mssql', MSSQLDB)
register_database('oracle', OracleDB)
def _interpolate(format):
"""
Takes a format string and returns a list of 2-tuples of the form
(boolean, string) where boolean says whether string should be evaled
or not.
from <http://lfw.org/python/Itpl.py> (public domain, Ka-Ping Yee)
"""
from tokenize import tokenprog
def matchorfail(text, pos):
match = tokenprog.match(text, pos)
if match is None:
raise _ItplError(text, pos)
return match, match.end()
namechars = "abcdefghijklmnopqrstuvwxyz" \
"ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_";
chunks = []
pos = 0
while 1:
dollar = format.find("$", pos)
if dollar < 0:
break
nextchar = format[dollar + 1]
if nextchar == "{":
chunks.append((0, format[pos:dollar]))
pos, level = dollar + 2, 1
while level:
match, pos = matchorfail(format, pos)
tstart, tend = match.regs[3]
token = format[tstart:tend]
if token == "{":
level = level + 1
elif token == "}":
level = level - 1
chunks.append((1, format[dollar + 2:pos - 1]))
elif nextchar in namechars:
chunks.append((0, format[pos:dollar]))
match, pos = matchorfail(format, dollar + 1)
while pos < len(format):
if format[pos] == "." and \
pos + 1 < len(format) and format[pos + 1] in namechars:
match, pos = matchorfail(format, pos + 1)
elif format[pos] in "([":
pos, level = pos + 1, 1
while level:
match, pos = matchorfail(format, pos)
tstart, tend = match.regs[3]
token = format[tstart:tend]
if token[0] in "([":
level = level + 1
elif token[0] in ")]":
level = level - 1
else:
break
chunks.append((1, format[dollar + 1:pos]))
else:
chunks.append((0, format[pos:dollar + 1]))
pos = dollar + 1 + (nextchar == "$")
if pos < len(format):
chunks.append((0, format[pos:]))
return chunks
if __name__ == "__main__":
import doctest
doctest.testmod()
| gpl-3.0 |
rmst/chi | chi/rl/dqn_m.py | 1 | 11810 | #issue in tensortools
from time import sleep, time
import chi
import tensortools as tt
import chi.rl.wrappers
import gym
import numpy as np
import tensorflow as tf
from tensortools import Function
from chi.rl.memory import ReplayMemory
from chi.rl.core import Agent
from chi.rl.memory import ShardedMemory
from chi.rl.wrappers import get_wrapper
from gym import wrappers
from gym.wrappers import Monitor
from tensorflow.contrib import layers
class DQN:
"""
An implementation of
Human Level Control through Deep Reinforcement Learning
http://www.nature.com/nature/journal/v518/n7540/full/nature14236.html
and
Deep Reinforcement Learning with Double Q-learning
https://arxiv.org/abs/1509.06461
"""
def __init__(self, n_actions, observation_shape, q_network: tt.Model, double_dqn=True,
replay_start=50000, clip_td=False, logdir="", clip_gradients=10):
self.logdir = logdir
self.replay_start = replay_start
self.n_actions = n_actions
self.observation_shape = observation_shape
self.memory = ShardedMemory()
self.discount = .99
self.step = 0
@tt.model(tracker=tf.train.ExponentialMovingAverage(1 - .0005), # TODO: replace with original weight freeze
optimizer=tf.train.RMSPropOptimizer(6.25e-5, .95, .95, .01))
def q_network(x):
x /= 255
x = layers.conv2d(x, 32, 8, 4)
x = layers.conv2d(x, 64, 4, 2)
x = layers.conv2d(x, 64, 3, 1)
x = layers.flatten(x)
xv = layers.fully_connected(x, 512)
val = layers.fully_connected(xv, 1, activation_fn=None)
# val = tf.squeeze(val, 1)
xa = layers.fully_connected(x, 512)
adv = layers.fully_connected(xa, env.action_space.n, activation_fn=None)
q = val + adv - tf.reduce_mean(adv, axis=1, keep_dims=True)
q = tf.identity(q, name='Q')
return q, x
def act(x: [observation_shape]):
qs = q_network(x)
a = tf.argmax(qs, axis=1)
# qm = tf.reduce_max(qs, axis=1)
return a, qs
self.act = Function(act)
def train_step(o: [observation_shape], a: (tf.int32, [[]]), r, t: tf.bool, o2: [observation_shape]):
q = q_network(o)
# ac = tf.argmax(q, axis=1)
# compute targets
q2 = q_network.tracked(o2)
if double_dqn:
a2 = tf.argmax(q_network(o2), axis=1) # yep, that's really the only difference
else:
a2 = tf.argmax(q2, axis=1)
mask2 = tf.one_hot(a2, n_actions, 1.0, 0.0, axis=1)
q_target = tf.where(t, r, r + self.discount * tf.reduce_sum(q2 * mask2, axis=1))
q_target = tf.stop_gradient(q_target)
# compute loss
mask = tf.one_hot(a, n_actions, 1.0, 0.0, axis=1)
qs = tf.reduce_sum(q * mask, axis=1, name='q_max')
td = tf.subtract(q_target, qs, name='td')
if clip_td:
td = tf.clip_by_value(td, -.5, .5, name='clipped_td')
# loss = tf.reduce_mean(tf.abs(td), axis=0, name='mae')
# loss = tf.where(tf.abs(td) < 1.0, 0.5 * tf.square(td), tf.abs(td) - 0.5, name='mse_huber')
loss = tf.reduce_mean(tf.square(td), axis=0, name='mse')
gav = q_network.compute_gradients(loss)
if clip_gradients:
gav = [(tf.clip_by_norm(g, clip_gradients), v) for g, v in gav]
loss_update = q_network.apply_gradients(gav)
# logging
layers.summarize_tensors([td, loss, r, o, a,
tf.subtract(o2, o, name='state_dif'),
tf.reduce_mean(tf.cast(t, tf.float32), name='frac_terminal'),
tf.subtract(tf.reduce_max(q, 1, True), q, name='av_advantage')])
# layers.summarize_tensors(chi.activations())
# layers.summarize_tensors(chi.gradients())
return loss_update
self.train_step = Function(train_step,
prefetch_fctn=lambda: self.memory.sample_batch()[:-1],
prefetch_capacity=10,
prefetch_threads=3)
def log_weigths():
v = q_network.trainable_variables()
# print(f'log weights {v}')
f = q_network.tracker_variables
# print(f'log weights EMA {f}')
difs = []
for g in v:
a = q_network.tracker.average(g)
difs.append(tf.subtract(g, a, name=f'ema/dif{g.name[:-2]}'))
layers.summarize_tensors(v + f + difs)
self.log_weights = Function(log_weigths, async=True)
def train(self, timesteps=10000000, tter=.25):
saver = tf.train.Saver(keep_checkpoint_every_n_hours=5)
# saver.restore()
debugged = False
wt = 0.
while self.step < timesteps:
if self.step % 50000 == 0:
saver.save(tt.get_session(), self.logdir + '/dqn_checkpoint', global_step=self.step)
train_debug = not debugged and self.memory.t > 512 # it is assumed the batch size is smaller than that
debugged = debugged or train_debug
curb = self.step > self.memory.t * tter
if (self.memory.t > self.replay_start and not curb) or train_debug:
if self.step % 500 == 0:
print(f"{self.step} steps of training after {self.memory.t} steps of experience (idle for {wt} s)")
wt = 0.
self.train_step()
if self.step % 50000 == 0:
self.log_weights()
self.step += 1
else:
sleep(.1)
wt += .1
def make_agent(self, test=False, memory_size=50000, name=None, logdir=None):
return Agent(self.agent(test, memory_size), name, logdir)
def agent(self, test=False, memory_size=50000):
if test:
def log_returns(rret: [], ret: [], qs, q_minus_ret, duration: []):
layers.summarize_tensors([rret, ret, qs, q_minus_ret, duration])
log_returns = Function(log_returns, async=True)
memory = None
else:
memory = ReplayMemory(memory_size, batch_size=None)
self.memory.children.append(memory)
t = 0
for ep in range(10000000000000):
done = False
annealing_time = 1000000
qs = []
unwrapped_rewards = []
rewards = []
ob = yield # get initial observation
annealing_factor = max(0, 1 - self.memory.t / annealing_time)
tt = 0
while not done:
# select actions according to epsilon-greedy policy
action, q = self.act(ob)
if not test and (self.step == 0 or np.random.rand() < 1 * annealing_factor + .1):
action = np.random.randint(0, self.n_actions)
qs.append(q[action])
meta = {'action_values': q}
if len(qs) > 1:
td = qs[-2] - (rewards[-1] - self.discount * qs[-1])
meta.update(td=td)
ob2, r, done, info = yield action, meta # return action and meta information and receive environment outputs
if not test:
memory.enqueue(ob, action, r, done, info)
ob = ob2
rewards.append(r)
unwrapped_rewards.append(info.get('unwrapped_reward', r))
t += 1
tt += 1
if test:
wrapped_return = sum(rewards)
unwrapped_return = sum(unwrapped_rewards)
discounted_returns = [sum(rewards[i:] * self.discount ** np.arange(len(rewards)-i)) for i, _ in enumerate(rewards)]
q_minus_ret = np.subtract(qs, discounted_returns)
log_returns(unwrapped_return, wrapped_return, qs, q_minus_ret, tt)
def deep_q_network():
""" Architecture according to:
http://www.nature.com/nature/journal/v518/n7540/full/nature14236.html
"""
@tt.model(tracker=tf.train.ExponentialMovingAverage(1 - .0005), # TODO: replace with original weight freeze
optimizer=tf.train.RMSPropOptimizer(.00025, .95, .95, .01))
def q_network(x):
x /= 255
x = layers.conv2d(x, 32, 8, 4)
x = layers.conv2d(x, 64, 4, 2)
x = layers.conv2d(x, 64, 3, 1)
x = layers.flatten(x)
x = layers.fully_connected(x, 512)
x = layers.fully_connected(x, env.action_space.n, activation_fn=None)
x = tf.identity(x, name='Q')
return x
return q_network
def delling_network():
""" Architecture according to Duelling DQN:
https://arxiv.org/abs/1511.06581
"""
@tt.model(tracker=tf.train.ExponentialMovingAverage(1 - .0005), # TODO: replace with original weight freeze
optimizer=tf.train.RMSPropOptimizer(6.25e-5, .95, .95, .01))
def q_network(x):
x /= 255
x = layers.conv2d(x, 32, 8, 4)
x = layers.conv2d(x, 64, 4, 2)
x = layers.conv2d(x, 64, 3, 1)
x = layers.flatten(x)
xv = layers.fully_connected(x, 512)
val = layers.fully_connected(xv, 1, activation_fn=None)
# val = tf.squeeze(val, 1)
xa = layers.fully_connected(x, 512)
adv = layers.fully_connected(xa, env.action_space.n, activation_fn=None)
q = val + adv - tf.reduce_mean(adv, axis=1, keep_dims=True)
q = tf.identity(q, name='Q')
return q
# Tests
def dqn_test(env='OneRoundDeterministicReward-v0'):
def make_env(env=env):
e = gym.make(env)
e = ObservationShapeWrapper(e)
return e
env = make_env()
env_test = make_env()
@tt.model(tracker=tf.train.ExponentialMovingAverage(1-.01),
optimizer=tf.train.AdamOptimizer(.001))
def q_network(x):
x = layers.fully_connected(x, 32)
x = layers.fully_connected(x, env.action_space.n, activation_fn=None,
weights_initializer=tf.random_normal_initializer(0, 1e-4))
return x
dqn = DQN(env.action_space.n, env.observation_space.shape, q_network)
agent = dqn.make_agent()
agent_test = dqn.make_agent(test=True)
for ep in range(4000):
r = agent.run_episode(env)
if ep > 64:
dqn.train_step()
if ep % 100 == 0:
rs = [agent_test.run_episode(env) for _ in range(100)]
print(f'Return after episode {ep} is {sum(rs)/len(rs)}')
def test_dqn():
with tf.Graph().as_default(), tf.Session().as_default():
dqn_test() # optimal return = 1
with tf.Graph().as_default(), tf.Session().as_default():
dqn_test('OneRoundNondeterministicReward-v0') # optimal return = 1
with tf.Graph().as_default(), tf.Session().as_default():
dqn_test('TwoRoundDeterministicReward-v0') # optimal return = 3
# Test Utils
class ObservationShapeWrapper(gym.ObservationWrapper):
def __init__(self, env):
from gym.spaces import Box
super().__init__(env)
self.observation_space = Box(1, 1, [1])
def _observation(self, observation):
return [observation]
if __name__ == '__main__':
# chi.chi.tf_debug = True
test_dqn()
| mit |
nschloe/seacas | cmake/tribits/python_utils/GenerateDocUtilsOutput.py | 2 | 4168 | import sys
import os
import stat
import subprocess
#
# A) Set up basic paths and import modules
#
from GeneralScriptSupport import *
#
# B) Define some helper functions
#
def openWriteFilePermissions(filePath):
if os.path.exists(filePath):
os.chmod(filePath, stat.S_IREAD | stat.S_IWRITE \
| stat.S_IRGRP | stat.S_IWGRP)
def setGeneratedFilePermissions(filePath):
os.chmod(filePath, stat.S_IREAD | stat.S_IRGRP | stat.S_IROTH)
def generateFile(filePath, generateCmnd, outFile=None, workingDir="", runTwice=False):
openWriteFilePermissions(filePath)
runSysCmnd(generateCmnd, outFile=outFile, workingDir=workingDir)
if runTwice:
runSysCmnd(generateCmnd, outFile=outFile, workingDir=workingDir)
setGeneratedFilePermissions(filePath)
def addCmndLineOptions(clp):
# Find the right default for the current system
rst2html = "rst2html"
rst2latex = "rst2latex"
rst2htmlWhich = getCmndOutput("which rst2html", True, False)
if rst2htmlWhich == "" or re.match(".+no rst2html.+", rst2htmlWhich):
rst2html = rst2html+".py"
rst2latex = rst2latex+".py"
clp.add_option(
"--file-base", dest="fileBase", type="string",
default="",
help="Base name for the reStructuredText *.rst file. This may include the" \
" relative or absolute path up to but not including the '.rst' extension." \
" [Required]"
)
clp.add_option(
"--generate-html", dest="generateHtml", type="string",
help="Generate the HTML output file using provided script (i.e. rst2html)" \
" [Default '"+rst2html+"']",
default=rst2html )
clp.add_option(
"--generate-latex", dest="generateLatex", type="string",
help="Generate the Latex (*.tex) output file using provided script" \
" (i.e. rst2latex) [Default '"+rst2latex+"']",
default=rst2latex )
clp.add_option(
"--generate-latex-options", dest="generateLatexOptions", type="string",
help="Options to pass to the generate latex command",
default="" )
clp.add_option(
"--generate-pdf", dest="generatePDF", type="string",
help="Generate the PDF output file from the latex file using provided" \
" script (i.e. pdflatex) [Default 'pdflatex']",
default="pdflatex" )
clp.add_option(
"--clean-temp-files", dest="cleanTempFiles", action="store_true",
help="Clean temporary files used in generation. [default]" )
clp.add_option(
"--no-clean-temp-files", dest="cleanTempFiles", action="store_false",
help="Do not delete temporary files.",
default=True )
def generateDocutilsOuputFiles(options):
filesToClean = []
# Base name including path (must just be relative)
outputFileBase = options.fileBase
# The path of the rst file:
rstFile = outputFileBase+".rst"
# Just the base name
outputFileBaseName = os.path.basename(outputFileBase)
if options.generateHtml:
print("Generating " + outputFileBaseName + ".html ...")
outputHtmlFile = outputFileBase+".html"
generateFile(outputHtmlFile,
options.generateHtml+" "+rstFile+" "+outputHtmlFile)
if options.generateLatex:
print("Generating " + outputFileBaseName + ".tex ...")
outputLatexFile = outputFileBase+".tex"
runSysCmnd(options.generateLatex+" "+options.generateLatexOptions+ \
" "+rstFile+" "+outputLatexFile)
if options.generatePDF:
print("Generating " + outputFileBaseName + ".pdf ...")
outputPdfFile = outputFileBase+".pdf"
outputPdfFileLog = outputLatexFile+".log"
generateFile(outputPdfFile,
options.generatePDF+" "+outputLatexFile,
outFile=outputPdfFileLog,
runTwice=True)
filesToClean.append(outputPdfFileLog)
#
# Clean the intermediate files
#
if options.cleanTempFiles:
print("Cleaning intermediate files ...")
filesToClean.extend(
[
outputFileBase+".aux",
outputFileBase+".log",
outputFileBase+".out",
outputFileBase+".tex",
outputFileBase+".toc",
]
)
for tempFile in filesToClean:
if os.path.exists(tempFile):
runSysCmnd("rm "+tempFile)
else:
print("Keeping temp files ...")
| bsd-3-clause |
mujiansu/arangodb | 3rdParty/V8-4.3.61/build/gyp/test/library/gyptest-shared.py | 430 | 2230 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies simple build of a "Hello, world!" program with shared libraries,
including verifying that libraries are rebuilt correctly when functions
move between libraries.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('library.gyp',
'-Dlibrary=shared_library',
'-Dmoveable_function=lib1',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('library.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello from program.c
Hello from lib1.c
Hello from lib2.c
Hello from lib1_moveable.c
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.run_gyp('library.gyp',
'-Dlibrary=shared_library',
'-Dmoveable_function=lib2',
chdir='relocate/src')
# Update program.c to force a rebuild.
test.sleep()
contents = test.read('relocate/src/program.c')
contents = contents.replace('Hello', 'Hello again')
test.write('relocate/src/program.c', contents)
test.build('library.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello again from program.c
Hello from lib1.c
Hello from lib2.c
Hello from lib2_moveable.c
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.run_gyp('library.gyp',
'-Dlibrary=shared_library',
'-Dmoveable_function=lib1',
chdir='relocate/src')
# Update program.c to force a rebuild.
test.sleep()
contents = test.read('relocate/src/program.c')
contents = contents.replace('again', 'again again')
test.write('relocate/src/program.c', contents)
# TODO(sgk): we have to force a rebuild of lib2 so that it weeds out
# the "moved" module. This should be done in gyp by adding a dependency
# on the generated .vcproj file itself.
test.touch('relocate/src/lib2.c')
test.build('library.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello again again from program.c
Hello from lib1.c
Hello from lib2.c
Hello from lib1_moveable.c
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.pass_test()
| apache-2.0 |
andyaguiar/aiohttp | tests/test_web_response.py | 1 | 23820 | import asyncio
import datetime
import unittest
from unittest import mock
from aiohttp import hdrs
from aiohttp.multidict import CIMultiDict
from aiohttp.web import ContentCoding, Request, StreamResponse, Response
from aiohttp.protocol import HttpVersion, HttpVersion11, HttpVersion10
from aiohttp.protocol import RawRequestMessage
class TestStreamResponse(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
def tearDown(self):
self.loop.close()
def make_request(self, method, path, headers=CIMultiDict(),
version=HttpVersion11):
message = RawRequestMessage(method, path, version, headers,
False, False)
return self.request_from_message(message)
def request_from_message(self, message):
self.app = mock.Mock()
self.payload = mock.Mock()
self.transport = mock.Mock()
self.reader = mock.Mock()
self.writer = mock.Mock()
req = Request(self.app, message, self.payload,
self.transport, self.reader, self.writer)
return req
def test_ctor(self):
resp = StreamResponse()
self.assertEqual(200, resp.status)
self.assertIsNone(resp.keep_alive)
def test_content_length(self):
resp = StreamResponse()
self.assertIsNone(resp.content_length)
def test_content_length_setter(self):
resp = StreamResponse()
resp.content_length = 234
self.assertEqual(234, resp.content_length)
def test_drop_content_length_header_on_setting_len_to_None(self):
resp = StreamResponse()
resp.content_length = 1
self.assertEqual("1", resp.headers['Content-Length'])
resp.content_length = None
self.assertNotIn('Content-Length', resp.headers)
def test_set_content_length_to_None_on_non_set(self):
resp = StreamResponse()
resp.content_length = None
self.assertNotIn('Content-Length', resp.headers)
resp.content_length = None
self.assertNotIn('Content-Length', resp.headers)
def test_setting_content_type(self):
resp = StreamResponse()
resp.content_type = 'text/html'
self.assertEqual('text/html', resp.headers['content-type'])
def test_setting_charset(self):
resp = StreamResponse()
resp.content_type = 'text/html'
resp.charset = 'koi8-r'
self.assertEqual('text/html; charset=koi8-r',
resp.headers['content-type'])
def test_default_charset(self):
resp = StreamResponse()
self.assertIsNone(resp.charset)
def test_reset_charset(self):
resp = StreamResponse()
resp.content_type = 'text/html'
resp.charset = None
self.assertIsNone(resp.charset)
def test_reset_charset_after_setting(self):
resp = StreamResponse()
resp.content_type = 'text/html'
resp.charset = 'koi8-r'
resp.charset = None
self.assertIsNone(resp.charset)
def test_charset_without_content_type(self):
resp = StreamResponse()
with self.assertRaises(RuntimeError):
resp.charset = 'koi8-r'
def test_last_modified_initial(self):
resp = StreamResponse()
self.assertIsNone(resp.last_modified)
def test_last_modified_string(self):
resp = StreamResponse()
dt = datetime.datetime(1990, 1, 2, 3, 4, 5, 0, datetime.timezone.utc)
resp.last_modified = 'Mon, 2 Jan 1990 03:04:05 GMT'
self.assertEqual(resp.last_modified, dt)
def test_last_modified_timestamp(self):
resp = StreamResponse()
dt = datetime.datetime(1970, 1, 1, 0, 0, 0, 0, datetime.timezone.utc)
resp.last_modified = 0
self.assertEqual(resp.last_modified, dt)
resp.last_modified = 0.0
self.assertEqual(resp.last_modified, dt)
def test_last_modified_datetime(self):
resp = StreamResponse()
dt = datetime.datetime(2001, 2, 3, 4, 5, 6, 0, datetime.timezone.utc)
resp.last_modified = dt
self.assertEqual(resp.last_modified, dt)
def test_last_modified_reset(self):
resp = StreamResponse()
resp.last_modified = 0
resp.last_modified = None
self.assertEqual(resp.last_modified, None)
@mock.patch('aiohttp.web_reqrep.ResponseImpl')
def test_start(self, ResponseImpl):
req = self.make_request('GET', '/')
resp = StreamResponse()
self.assertIsNone(resp.keep_alive)
msg = resp.start(req)
self.assertTrue(msg.send_headers.called)
self.assertIs(msg, resp.start(req))
self.assertTrue(resp.keep_alive)
req2 = self.make_request('GET', '/')
with self.assertRaises(RuntimeError):
resp.start(req2)
@mock.patch('aiohttp.web_reqrep.ResponseImpl')
def test_chunked_encoding(self, ResponseImpl):
req = self.make_request('GET', '/')
resp = StreamResponse()
self.assertFalse(resp.chunked)
resp.enable_chunked_encoding()
self.assertTrue(resp.chunked)
msg = resp.start(req)
self.assertTrue(msg.chunked)
@mock.patch('aiohttp.web_reqrep.ResponseImpl')
def test_chunk_size(self, ResponseImpl):
req = self.make_request('GET', '/')
resp = StreamResponse()
self.assertFalse(resp.chunked)
resp.enable_chunked_encoding(chunk_size=8192)
self.assertTrue(resp.chunked)
msg = resp.start(req)
self.assertTrue(msg.chunked)
msg.add_chunking_filter.assert_called_with(8192)
self.assertIsNotNone(msg.filter)
def test_chunked_encoding_forbidden_for_http_10(self):
req = self.make_request('GET', '/', version=HttpVersion10)
resp = StreamResponse()
resp.enable_chunked_encoding()
with self.assertRaisesRegex(
RuntimeError,
"Using chunked encoding is forbidden for HTTP/1.0"):
resp.start(req)
@mock.patch('aiohttp.web_reqrep.ResponseImpl')
def test_compression_no_accept(self, ResponseImpl):
req = self.make_request('GET', '/')
resp = StreamResponse()
self.assertFalse(resp.chunked)
self.assertFalse(resp.compression)
resp.enable_compression()
self.assertTrue(resp.compression)
msg = resp.start(req)
self.assertFalse(msg.add_compression_filter.called)
@mock.patch('aiohttp.web_reqrep.ResponseImpl')
def test_force_compression_no_accept_backwards_compat(self, ResponseImpl):
req = self.make_request('GET', '/')
resp = StreamResponse()
self.assertFalse(resp.chunked)
self.assertFalse(resp.compression)
resp.enable_compression(force=True)
self.assertTrue(resp.compression)
msg = resp.start(req)
self.assertTrue(msg.add_compression_filter.called)
self.assertIsNotNone(msg.filter)
@mock.patch('aiohttp.web_reqrep.ResponseImpl')
def test_force_compression_false_backwards_compat(self, ResponseImpl):
req = self.make_request('GET', '/')
resp = StreamResponse()
self.assertFalse(resp.compression)
resp.enable_compression(force=False)
self.assertTrue(resp.compression)
msg = resp.start(req)
self.assertFalse(msg.add_compression_filter.called)
@mock.patch('aiohttp.web_reqrep.ResponseImpl')
def test_compression_default_coding(self, ResponseImpl):
req = self.make_request(
'GET', '/',
headers=CIMultiDict({hdrs.ACCEPT_ENCODING: 'gzip, deflate'}))
resp = StreamResponse()
self.assertFalse(resp.chunked)
self.assertFalse(resp.compression)
resp.enable_compression()
self.assertTrue(resp.compression)
msg = resp.start(req)
msg.add_compression_filter.assert_called_with('deflate')
self.assertEqual('deflate', resp.headers.get(hdrs.CONTENT_ENCODING))
self.assertIsNotNone(msg.filter)
@mock.patch('aiohttp.web_reqrep.ResponseImpl')
def test_force_compression_deflate(self, ResponseImpl):
req = self.make_request(
'GET', '/',
headers=CIMultiDict({hdrs.ACCEPT_ENCODING: 'gzip, deflate'}))
resp = StreamResponse()
resp.enable_compression(ContentCoding.deflate)
self.assertTrue(resp.compression)
msg = resp.start(req)
msg.add_compression_filter.assert_called_with('deflate')
self.assertEqual('deflate', resp.headers.get(hdrs.CONTENT_ENCODING))
@mock.patch('aiohttp.web_reqrep.ResponseImpl')
def test_force_compression_no_accept_deflate(self, ResponseImpl):
req = self.make_request('GET', '/')
resp = StreamResponse()
resp.enable_compression(ContentCoding.deflate)
self.assertTrue(resp.compression)
msg = resp.start(req)
msg.add_compression_filter.assert_called_with('deflate')
self.assertEqual('deflate', resp.headers.get(hdrs.CONTENT_ENCODING))
@mock.patch('aiohttp.web_reqrep.ResponseImpl')
def test_force_compression_gzip(self, ResponseImpl):
req = self.make_request(
'GET', '/',
headers=CIMultiDict({hdrs.ACCEPT_ENCODING: 'gzip, deflate'}))
resp = StreamResponse()
resp.enable_compression(ContentCoding.gzip)
self.assertTrue(resp.compression)
msg = resp.start(req)
msg.add_compression_filter.assert_called_with('gzip')
self.assertEqual('gzip', resp.headers.get(hdrs.CONTENT_ENCODING))
@mock.patch('aiohttp.web_reqrep.ResponseImpl')
def test_force_compression_no_accept_gzip(self, ResponseImpl):
req = self.make_request('GET', '/')
resp = StreamResponse()
resp.enable_compression(ContentCoding.gzip)
self.assertTrue(resp.compression)
msg = resp.start(req)
msg.add_compression_filter.assert_called_with('gzip')
self.assertEqual('gzip', resp.headers.get(hdrs.CONTENT_ENCODING))
@mock.patch('aiohttp.web_reqrep.ResponseImpl')
def test_delete_content_length_if_compression_enabled(self, ResponseImpl):
req = self.make_request('GET', '/')
resp = Response(body=b'answer')
self.assertEqual(6, resp.content_length)
resp.enable_compression(ContentCoding.gzip)
resp.start(req)
self.assertIsNone(resp.content_length)
def test_write_non_byteish(self):
resp = StreamResponse()
resp.start(self.make_request('GET', '/'))
with self.assertRaises(AssertionError):
resp.write(123)
def test_write_before_start(self):
resp = StreamResponse()
with self.assertRaises(RuntimeError):
resp.write(b'data')
def test_cannot_write_after_eof(self):
resp = StreamResponse()
resp.start(self.make_request('GET', '/'))
resp.write(b'data')
self.writer.drain.return_value = ()
self.loop.run_until_complete(resp.write_eof())
self.writer.write.reset_mock()
with self.assertRaises(RuntimeError):
resp.write(b'next data')
self.assertFalse(self.writer.write.called)
def test_cannot_write_eof_before_headers(self):
resp = StreamResponse()
with self.assertRaises(RuntimeError):
self.loop.run_until_complete(resp.write_eof())
def test_cannot_write_eof_twice(self):
resp = StreamResponse()
resp.start(self.make_request('GET', '/'))
resp.write(b'data')
self.writer.drain.return_value = ()
self.loop.run_until_complete(resp.write_eof())
self.assertTrue(self.writer.write.called)
self.writer.write.reset_mock()
self.loop.run_until_complete(resp.write_eof())
self.assertFalse(self.writer.write.called)
def test_write_returns_drain(self):
resp = StreamResponse()
resp.start(self.make_request('GET', '/'))
self.assertEqual((), resp.write(b'data'))
def test_write_returns_empty_tuple_on_empty_data(self):
resp = StreamResponse()
resp.start(self.make_request('GET', '/'))
self.assertEqual((), resp.write(b''))
def test_force_close(self):
resp = StreamResponse()
self.assertIsNone(resp.keep_alive)
resp.force_close()
self.assertFalse(resp.keep_alive)
def test_response_cookies(self):
resp = StreamResponse()
self.assertEqual(resp.cookies, {})
self.assertEqual(str(resp.cookies), '')
resp.set_cookie('name', 'value')
self.assertEqual(str(resp.cookies), 'Set-Cookie: name=value; Path=/')
resp.set_cookie('name', 'other_value')
self.assertEqual(str(resp.cookies),
'Set-Cookie: name=other_value; Path=/')
resp.cookies['name'] = 'another_other_value'
resp.cookies['name']['max-age'] = 10
self.assertEqual(
str(resp.cookies),
'Set-Cookie: name=another_other_value; Max-Age=10; Path=/')
resp.del_cookie('name')
self.assertEqual(
str(resp.cookies),
'Set-Cookie: name=; Max-Age=0; Path=/')
resp.set_cookie('name', 'value', domain='local.host', path=None)
self.assertEqual(str(resp.cookies),
'Set-Cookie: name=value; Domain=local.host')
def test_response_cookie_path(self):
resp = StreamResponse()
self.assertEqual(resp.cookies, {})
resp.set_cookie('name', 'value', path='/some/path')
self.assertEqual(str(resp.cookies),
'Set-Cookie: name=value; Path=/some/path')
resp.set_cookie('name', 'value', expires='123')
self.assertEqual(str(resp.cookies),
'Set-Cookie: name=value; expires=123;'
' Path=/')
resp.set_cookie('name', 'value', domain='example.com',
path='/home', expires='123', max_age='10',
secure=True, httponly=True, version='2.0')
self.assertEqual(str(resp.cookies).lower(),
'set-cookie: name=value; '
'domain=example.com; '
'expires=123; '
'httponly; '
'max-age=10; '
'path=/home; '
'secure; '
'version=2.0')
def test_response_cookie__issue_del_cookie(self):
resp = StreamResponse()
self.assertEqual(resp.cookies, {})
self.assertEqual(str(resp.cookies), '')
resp.del_cookie('name')
self.assertEqual(str(resp.cookies),
'Set-Cookie: name=; Max-Age=0; Path=/')
def test_cookie_set_after_del(self):
resp = StreamResponse()
resp.del_cookie('name')
resp.set_cookie('name', 'val')
# check for Max-Age dropped
self.assertEqual(str(resp.cookies),
'Set-Cookie: name=val; Path=/')
def test_set_status_with_reason(self):
resp = StreamResponse()
resp.set_status(200, "Everithing is fine!")
self.assertEqual(200, resp.status)
self.assertEqual("Everithing is fine!", resp.reason)
def test_start_force_close(self):
req = self.make_request('GET', '/')
resp = StreamResponse()
resp.force_close()
self.assertFalse(resp.keep_alive)
msg = resp.start(req)
self.assertFalse(resp.keep_alive)
self.assertTrue(msg.closing)
def test___repr__(self):
req = self.make_request('GET', '/path/to')
resp = StreamResponse(reason=301)
resp.start(req)
self.assertEqual("<StreamResponse 301 GET /path/to >", repr(resp))
def test___repr__not_started(self):
resp = StreamResponse(reason=301)
self.assertEqual("<StreamResponse 301 not started>", repr(resp))
def test_keep_alive_http10(self):
message = RawRequestMessage('GET', '/', HttpVersion10, CIMultiDict(),
True, False)
req = self.request_from_message(message)
resp = StreamResponse()
resp.start(req)
self.assertFalse(resp.keep_alive)
headers = CIMultiDict(Connection='keep-alive')
message = RawRequestMessage('GET', '/', HttpVersion10, headers,
False, False)
req = self.request_from_message(message)
resp = StreamResponse()
resp.start(req)
self.assertEqual(resp.keep_alive, True)
def test_keep_alive_http09(self):
headers = CIMultiDict(Connection='keep-alive')
message = RawRequestMessage('GET', '/', HttpVersion(0, 9), headers,
False, False)
req = self.request_from_message(message)
resp = StreamResponse()
resp.start(req)
self.assertFalse(resp.keep_alive)
class TestResponse(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
def tearDown(self):
self.loop.close()
def make_request(self, method, path, headers=CIMultiDict()):
self.app = mock.Mock()
message = RawRequestMessage(method, path, HttpVersion11, headers,
False, False)
self.payload = mock.Mock()
self.transport = mock.Mock()
self.reader = mock.Mock()
self.writer = mock.Mock()
req = Request(self.app, message, self.payload,
self.transport, self.reader, self.writer)
return req
def test_ctor(self):
resp = Response()
self.assertEqual(200, resp.status)
self.assertEqual('OK', resp.reason)
self.assertIsNone(resp.body)
self.assertEqual(0, resp.content_length)
self.assertEqual(CIMultiDict([('CONTENT-LENGTH', '0')]),
resp.headers)
def test_ctor_with_headers_and_status(self):
resp = Response(body=b'body', status=201, headers={'Age': '12'})
self.assertEqual(201, resp.status)
self.assertEqual(b'body', resp.body)
self.assertEqual(4, resp.content_length)
self.assertEqual(CIMultiDict(
[('AGE', '12'),
('CONTENT-LENGTH', '4')]), resp.headers)
def test_ctor_content_type(self):
resp = Response(content_type='application/json')
self.assertEqual(200, resp.status)
self.assertEqual('OK', resp.reason)
self.assertEqual(
CIMultiDict(
[('CONTENT-TYPE', 'application/json'),
('CONTENT-LENGTH', '0')]),
resp.headers)
def test_ctor_text_body_combined(self):
with self.assertRaises(ValueError):
Response(body=b'123', text='test text')
def test_ctor_text(self):
resp = Response(text='test text')
self.assertEqual(200, resp.status)
self.assertEqual('OK', resp.reason)
self.assertEqual(
CIMultiDict(
[('CONTENT-TYPE', 'text/plain; charset=utf-8'),
('CONTENT-LENGTH', '9')]),
resp.headers)
self.assertEqual(resp.body, b'test text')
self.assertEqual(resp.text, 'test text')
def test_assign_nonbyteish_body(self):
resp = Response(body=b'data')
with self.assertRaises(TypeError):
resp.body = 123
self.assertEqual(b'data', resp.body)
self.assertEqual(4, resp.content_length)
def test_assign_nonstr_text(self):
resp = Response(text='test')
with self.assertRaises(TypeError):
resp.text = b'123'
self.assertEqual(b'test', resp.body)
self.assertEqual(4, resp.content_length)
def test_send_headers_for_empty_body(self):
req = self.make_request('GET', '/')
resp = Response()
self.writer.drain.return_value = ()
buf = b''
def append(data):
nonlocal buf
buf += data
self.writer.write.side_effect = append
resp.start(req)
self.loop.run_until_complete(resp.write_eof())
txt = buf.decode('utf8')
self.assertRegex(txt, 'HTTP/1.1 200 OK\r\nCONTENT-LENGTH: 0\r\n'
'CONNECTION: keep-alive\r\n'
'DATE: .+\r\nSERVER: .+\r\n\r\n')
def test_render_with_body(self):
req = self.make_request('GET', '/')
resp = Response(body=b'data')
self.writer.drain.return_value = ()
buf = b''
def append(data):
nonlocal buf
buf += data
self.writer.write.side_effect = append
resp.start(req)
self.loop.run_until_complete(resp.write_eof())
txt = buf.decode('utf8')
self.assertRegex(txt, 'HTTP/1.1 200 OK\r\nCONTENT-LENGTH: 4\r\n'
'CONNECTION: keep-alive\r\n'
'DATE: .+\r\nSERVER: .+\r\n\r\ndata')
def test_send_set_cookie_header(self):
resp = Response()
resp.cookies['name'] = 'value'
req = self.make_request('GET', '/')
self.writer.drain.return_value = ()
buf = b''
def append(data):
nonlocal buf
buf += data
self.writer.write.side_effect = append
resp.start(req)
self.loop.run_until_complete(resp.write_eof())
txt = buf.decode('utf8')
self.assertRegex(txt, 'HTTP/1.1 200 OK\r\nCONTENT-LENGTH: 0\r\n'
'SET-COOKIE: name=value\r\n'
'CONNECTION: keep-alive\r\n'
'DATE: .+\r\nSERVER: .+\r\n\r\n')
def test_set_text_with_content_type(self):
resp = Response()
resp.content_type = "text/html"
resp.text = "text"
self.assertEqual("text", resp.text)
self.assertEqual(b"text", resp.body)
self.assertEqual("text/html", resp.content_type)
def test_set_text_with_charset(self):
resp = Response()
resp.content_type = 'text/plain'
resp.charset = "KOI8-R"
resp.text = "текст"
self.assertEqual("текст", resp.text)
self.assertEqual("текст".encode('koi8-r'), resp.body)
self.assertEqual("koi8-r", resp.charset)
def test_started_when_not_started(self):
resp = StreamResponse()
self.assertFalse(resp.started)
def test_started_when_started(self):
resp = StreamResponse()
resp.start(self.make_request('GET', '/'))
self.assertTrue(resp.started)
def test_drain_before_start(self):
@asyncio.coroutine
def go():
resp = StreamResponse()
with self.assertRaises(RuntimeError):
yield from resp.drain()
self.loop.run_until_complete(go())
def test_nonstr_text_in_ctor(self):
with self.assertRaises(TypeError):
Response(text=b'data')
def test_text_in_ctor_with_content_type(self):
resp = Response(text='data', content_type='text/html')
self.assertEqual('data', resp.text)
self.assertEqual('text/html', resp.content_type)
def test_text_in_ctor_with_content_type_header(self):
resp = Response(text='текст',
headers={'Content-Type': 'text/html; charset=koi8-r'})
self.assertEqual('текст'.encode('koi8-r'), resp.body)
self.assertEqual('text/html', resp.content_type)
self.assertEqual('koi8-r', resp.charset)
def test_text_with_empty_payload(self):
resp = Response(status=200)
self.assertEqual(resp.body, None)
self.assertEqual(resp.text, None)
| apache-2.0 |
mega-force/osmc | package/mediacenter-skin-osmc/files/usr/share/kodi/addons/script.module.unidecode/lib/unidecode/x0c2.py | 253 | 4710 | data = (
'syon', # 0x00
'syonj', # 0x01
'syonh', # 0x02
'syod', # 0x03
'syol', # 0x04
'syolg', # 0x05
'syolm', # 0x06
'syolb', # 0x07
'syols', # 0x08
'syolt', # 0x09
'syolp', # 0x0a
'syolh', # 0x0b
'syom', # 0x0c
'syob', # 0x0d
'syobs', # 0x0e
'syos', # 0x0f
'syoss', # 0x10
'syong', # 0x11
'syoj', # 0x12
'syoc', # 0x13
'syok', # 0x14
'syot', # 0x15
'syop', # 0x16
'syoh', # 0x17
'su', # 0x18
'sug', # 0x19
'sugg', # 0x1a
'sugs', # 0x1b
'sun', # 0x1c
'sunj', # 0x1d
'sunh', # 0x1e
'sud', # 0x1f
'sul', # 0x20
'sulg', # 0x21
'sulm', # 0x22
'sulb', # 0x23
'suls', # 0x24
'sult', # 0x25
'sulp', # 0x26
'sulh', # 0x27
'sum', # 0x28
'sub', # 0x29
'subs', # 0x2a
'sus', # 0x2b
'suss', # 0x2c
'sung', # 0x2d
'suj', # 0x2e
'suc', # 0x2f
'suk', # 0x30
'sut', # 0x31
'sup', # 0x32
'suh', # 0x33
'sweo', # 0x34
'sweog', # 0x35
'sweogg', # 0x36
'sweogs', # 0x37
'sweon', # 0x38
'sweonj', # 0x39
'sweonh', # 0x3a
'sweod', # 0x3b
'sweol', # 0x3c
'sweolg', # 0x3d
'sweolm', # 0x3e
'sweolb', # 0x3f
'sweols', # 0x40
'sweolt', # 0x41
'sweolp', # 0x42
'sweolh', # 0x43
'sweom', # 0x44
'sweob', # 0x45
'sweobs', # 0x46
'sweos', # 0x47
'sweoss', # 0x48
'sweong', # 0x49
'sweoj', # 0x4a
'sweoc', # 0x4b
'sweok', # 0x4c
'sweot', # 0x4d
'sweop', # 0x4e
'sweoh', # 0x4f
'swe', # 0x50
'sweg', # 0x51
'swegg', # 0x52
'swegs', # 0x53
'swen', # 0x54
'swenj', # 0x55
'swenh', # 0x56
'swed', # 0x57
'swel', # 0x58
'swelg', # 0x59
'swelm', # 0x5a
'swelb', # 0x5b
'swels', # 0x5c
'swelt', # 0x5d
'swelp', # 0x5e
'swelh', # 0x5f
'swem', # 0x60
'sweb', # 0x61
'swebs', # 0x62
'swes', # 0x63
'swess', # 0x64
'sweng', # 0x65
'swej', # 0x66
'swec', # 0x67
'swek', # 0x68
'swet', # 0x69
'swep', # 0x6a
'sweh', # 0x6b
'swi', # 0x6c
'swig', # 0x6d
'swigg', # 0x6e
'swigs', # 0x6f
'swin', # 0x70
'swinj', # 0x71
'swinh', # 0x72
'swid', # 0x73
'swil', # 0x74
'swilg', # 0x75
'swilm', # 0x76
'swilb', # 0x77
'swils', # 0x78
'swilt', # 0x79
'swilp', # 0x7a
'swilh', # 0x7b
'swim', # 0x7c
'swib', # 0x7d
'swibs', # 0x7e
'swis', # 0x7f
'swiss', # 0x80
'swing', # 0x81
'swij', # 0x82
'swic', # 0x83
'swik', # 0x84
'swit', # 0x85
'swip', # 0x86
'swih', # 0x87
'syu', # 0x88
'syug', # 0x89
'syugg', # 0x8a
'syugs', # 0x8b
'syun', # 0x8c
'syunj', # 0x8d
'syunh', # 0x8e
'syud', # 0x8f
'syul', # 0x90
'syulg', # 0x91
'syulm', # 0x92
'syulb', # 0x93
'syuls', # 0x94
'syult', # 0x95
'syulp', # 0x96
'syulh', # 0x97
'syum', # 0x98
'syub', # 0x99
'syubs', # 0x9a
'syus', # 0x9b
'syuss', # 0x9c
'syung', # 0x9d
'syuj', # 0x9e
'syuc', # 0x9f
'syuk', # 0xa0
'syut', # 0xa1
'syup', # 0xa2
'syuh', # 0xa3
'seu', # 0xa4
'seug', # 0xa5
'seugg', # 0xa6
'seugs', # 0xa7
'seun', # 0xa8
'seunj', # 0xa9
'seunh', # 0xaa
'seud', # 0xab
'seul', # 0xac
'seulg', # 0xad
'seulm', # 0xae
'seulb', # 0xaf
'seuls', # 0xb0
'seult', # 0xb1
'seulp', # 0xb2
'seulh', # 0xb3
'seum', # 0xb4
'seub', # 0xb5
'seubs', # 0xb6
'seus', # 0xb7
'seuss', # 0xb8
'seung', # 0xb9
'seuj', # 0xba
'seuc', # 0xbb
'seuk', # 0xbc
'seut', # 0xbd
'seup', # 0xbe
'seuh', # 0xbf
'syi', # 0xc0
'syig', # 0xc1
'syigg', # 0xc2
'syigs', # 0xc3
'syin', # 0xc4
'syinj', # 0xc5
'syinh', # 0xc6
'syid', # 0xc7
'syil', # 0xc8
'syilg', # 0xc9
'syilm', # 0xca
'syilb', # 0xcb
'syils', # 0xcc
'syilt', # 0xcd
'syilp', # 0xce
'syilh', # 0xcf
'syim', # 0xd0
'syib', # 0xd1
'syibs', # 0xd2
'syis', # 0xd3
'syiss', # 0xd4
'sying', # 0xd5
'syij', # 0xd6
'syic', # 0xd7
'syik', # 0xd8
'syit', # 0xd9
'syip', # 0xda
'syih', # 0xdb
'si', # 0xdc
'sig', # 0xdd
'sigg', # 0xde
'sigs', # 0xdf
'sin', # 0xe0
'sinj', # 0xe1
'sinh', # 0xe2
'sid', # 0xe3
'sil', # 0xe4
'silg', # 0xe5
'silm', # 0xe6
'silb', # 0xe7
'sils', # 0xe8
'silt', # 0xe9
'silp', # 0xea
'silh', # 0xeb
'sim', # 0xec
'sib', # 0xed
'sibs', # 0xee
'sis', # 0xef
'siss', # 0xf0
'sing', # 0xf1
'sij', # 0xf2
'sic', # 0xf3
'sik', # 0xf4
'sit', # 0xf5
'sip', # 0xf6
'sih', # 0xf7
'ssa', # 0xf8
'ssag', # 0xf9
'ssagg', # 0xfa
'ssags', # 0xfb
'ssan', # 0xfc
'ssanj', # 0xfd
'ssanh', # 0xfe
'ssad', # 0xff
)
| gpl-2.0 |
hirofumi0810/tensorflow_end2end_speech_recognition | models/encoders/core/vgg_blstm.py | 1 | 9163 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""VGG + bidirectional LSTM encoder."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from models.encoders.core.cnn_util import conv_layer, max_pool, batch_normalization
from models.encoders.core.blstm import basiclstmcell, lstmcell, lstmblockcell, lstmblockfusedcell, cudnnlstm
class VGGBLSTMEncoder(object):
"""VGG + bidirectional LSTM encoder.
Args:
input_size (int): the dimensions of input vectors.
This is expected to be num_channels * 3 (static + Δ + ΔΔ)
splice (int): frames to splice
num_stack (int): the number of frames to stack
num_units (int): the number of units in each layer
num_proj (int): the number of nodes in the projection layer
num_layers (int): the number of layers
lstm_impl (string, optional): a base implementation of LSTM.
- BasicLSTMCell: tf.contrib.rnn.BasicLSTMCell (no peephole)
- LSTMCell: tf.contrib.rnn.LSTMCell
- LSTMBlockCell: tf.contrib.rnn.LSTMBlockCell
- LSTMBlockFusedCell: under implementation
- CudnnLSTM: under implementation
Choose the background implementation of tensorflow.
use_peephole (bool): if True, use peephole
parameter_init (float): the range of uniform distribution to
initialize weight parameters (>= 0)
clip_activation (float): the range of activation clipping (> 0)
time_major (bool, optional): if True, time-major computation will be
performed
name (string, optional): the name of encoder
"""
def __init__(self,
input_size,
splice,
num_stack,
num_units,
num_proj,
num_layers,
lstm_impl,
use_peephole,
parameter_init,
clip_activation,
time_major=False,
name='vgg_blstm_encoder'):
assert num_proj != 0
assert input_size % 3 == 0
self.num_channels = input_size // 3
self.splice = splice
self.num_stack = num_stack
self.num_units = num_units
if lstm_impl != 'LSTMCell':
self.num_proj = None
else:
self.num_proj = num_proj
# TODO: fix this
self.num_layers = num_layers
self.lstm_impl = lstm_impl
self.use_peephole = use_peephole
self.parameter_init = parameter_init
self.clip_activation = clip_activation
self.time_major = time_major
self.name = name
def __call__(self, inputs, inputs_seq_len, keep_prob, is_training):
"""Construct model graph.
Args:
inputs (placeholder): A tensor of size
`[B, T, input_size (num_channels * (splice * num_stack) * 3)]`
inputs_seq_len (placeholder): A tensor of size` [B]`
keep_prob (placeholder, float): A probability to keep nodes
in the hidden-hidden connection
is_training (bool):
Returns:
outputs: Encoder states.
if time_major is True, a tensor of size
`[T, B, num_units (num_proj)]`
otherwise, `[B, T, num_units (num_proj)]`
final_state: A final hidden state of the encoder
"""
# inputs: 3D tensor `[B, T, input_dim]`
batch_size = tf.shape(inputs)[0]
max_time = tf.shape(inputs)[1]
input_dim = inputs.shape.as_list()[-1]
# NOTE: input_dim: num_channels * splice * num_stack * 3
# For debug
# print(input_dim)
# print(self.num_channels)
# print(self.splice)
# print(self.num_stack)
assert input_dim == self.num_channels * self.splice * self.num_stack * 3
# Reshape to 4D tensor `[B * T, num_channels, splice * num_stack, 3]`
inputs = tf.reshape(
inputs,
shape=[batch_size * max_time, self.num_channels, self.splice * self.num_stack, 3])
# NOTE: filter_size: `[H, W, C_in, C_out]`
with tf.variable_scope('VGG1'):
inputs = conv_layer(inputs,
filter_size=[3, 3, 3, 64],
stride=[1, 1],
parameter_init=self.parameter_init,
activation='relu',
name='conv1')
# inputs = batch_normalization(inputs, is_training=is_training)
inputs = tf.nn.dropout(inputs, keep_prob)
inputs = conv_layer(inputs,
filter_size=[3, 3, 64, 64],
stride=[1, 1],
parameter_init=self.parameter_init,
activation='relu',
name='conv2')
# inputs = batch_normalization(inputs, is_training=is_training)
inputs = max_pool(inputs,
pooling_size=[2, 2],
stride=[2, 2],
name='max_pool')
inputs = tf.nn.dropout(inputs, keep_prob)
with tf.variable_scope('VGG2'):
inputs = conv_layer(inputs,
filter_size=[3, 3, 64, 128],
stride=[1, 1],
parameter_init=self.parameter_init,
activation='relu',
name='conv1')
# inputs = batch_normalization(inputs, is_training=is_training)
inputs = tf.nn.dropout(inputs, keep_prob)
inputs = conv_layer(inputs,
filter_size=[3, 3, 128, 128],
stride=[1, 1],
parameter_init=self.parameter_init,
activation='relu',
name='conv2')
# inputs = batch_normalization(inputs, is_training=is_training)
inputs = max_pool(inputs,
pooling_size=[2, 2],
stride=[2, 2],
name='max_pool')
inputs = tf.nn.dropout(inputs, keep_prob)
# Reshape to 2D tensor `[B * T, new_h * new_w * C_out]`
inputs = tf.reshape(
inputs, shape=[batch_size * max_time, np.prod(inputs.shape.as_list()[-3:])])
# Insert linear layer to recude CNN's output demention
# from (new_h * new_w * C_out) to 256
with tf.variable_scope('bridge') as scope:
inputs = tf.contrib.layers.fully_connected(
inputs=inputs,
num_outputs=256,
activation_fn=tf.nn.relu,
weights_initializer=tf.truncated_normal_initializer(
stddev=self.parameter_init),
biases_initializer=tf.zeros_initializer(),
scope=scope)
inputs = tf.nn.dropout(inputs, keep_prob)
# Reshape back to 3D tensor `[B, T, 256]`
inputs = tf.reshape(inputs, shape=[batch_size, max_time, 256])
initializer = tf.random_uniform_initializer(
minval=-self.parameter_init, maxval=self.parameter_init)
if self.lstm_impl == 'BasicLSTMCell':
outputs, final_state = basiclstmcell(
self.num_units, self.num_layers,
inputs, inputs_seq_len, keep_prob, initializer,
self.time_major)
elif self.lstm_impl == 'LSTMCell':
outputs, final_state = lstmcell(
self.num_units, self.num_proj, self.num_layers,
self.use_peephole, self.clip_activation,
inputs, inputs_seq_len, keep_prob, initializer,
self.time_major)
elif self.lstm_impl == 'LSTMBlockCell':
outputs, final_state = lstmblockcell(
self.num_units, self.num_layers,
self.use_peephole, self.clip_activation,
inputs, inputs_seq_len, keep_prob, initializer,
self.time_major)
elif self.lstm_impl == 'LSTMBlockFusedCell':
outputs, final_state = lstmblockfusedcell(
self.num_units, self.num_layers,
self.use_peephole, self.clip_activation,
inputs, inputs_seq_len, keep_prob, initializer,
self.time_major)
elif self.lstm_impl == 'CudnnLSTM':
outputs, final_state = cudnnlstm(
self.num_units, self.num_layers, self.parameter_init,
inputs, inputs_seq_len, keep_prob, initializer,
self.time_major)
else:
raise IndexError(
'lstm_impl is "BasicLSTMCell" or "LSTMCell" or ' +
'"LSTMBlockCell" or "LSTMBlockFusedCell" or ' +
'"CudnnLSTM".')
return outputs, final_state
| mit |
hbrunn/OpenUpgrade | addons/stock_account/wizard/stock_invoice_onshipping.py | 33 | 4939 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class stock_invoice_onshipping(osv.osv_memory):
def _get_journal(self, cr, uid, context=None):
res = self._get_journal_id(cr, uid, context=context)
if res:
return res[0][0]
return False
def _get_journal_id(self, cr, uid, context=None):
if context is None:
context = {}
journal_obj = self.pool.get('account.journal')
value = journal_obj.search(cr, uid, [('type', 'in',('sale','sale_Refund'))])
vals = []
for jr_type in journal_obj.browse(cr, uid, value, context=context):
t1 = jr_type.id,jr_type.name
if t1 not in vals:
vals.append(t1)
return vals
_name = "stock.invoice.onshipping"
_description = "Stock Invoice Onshipping"
_columns = {
'journal_id': fields.selection(_get_journal_id, 'Destination Journal',required=True),
'group': fields.boolean("Group by partner"),
'inv_type': fields.selection([('out_invoice','Create Invoice'),('out_refund','Refund Invoice')], "Invoice Type"),
'invoice_date': fields.date('Invoice Date'),
}
_defaults = {
'journal_id' : _get_journal,
'inv_type': lambda self,cr,uid,ctx: ctx.get('inv_type', 'out_invoice')
}
def view_init(self, cr, uid, fields_list, context=None):
if context is None:
context = {}
res = super(stock_invoice_onshipping, self).view_init(cr, uid, fields_list, context=context)
pick_obj = self.pool.get('stock.picking')
count = 0
active_ids = context.get('active_ids',[])
for pick in pick_obj.browse(cr, uid, active_ids, context=context):
if pick.invoice_state != '2binvoiced':
count += 1
if len(active_ids) == count:
raise osv.except_osv(_('Warning!'), _('None of these picking lists require invoicing.'))
return res
def open_invoice(self, cr, uid, ids, context=None):
if context is None:
context = {}
invoice_ids = self.create_invoice(cr, uid, ids, context=context)
if not invoice_ids:
raise osv.except_osv(_('Error!'), _('No invoice created!'))
onshipdata_obj = self.read(cr, uid, ids, ['journal_id', 'group', 'invoice_date', 'inv_type'])
inv_type = onshipdata_obj[0]['inv_type']
action_model = False
action = {}
data_pool = self.pool.get('ir.model.data')
if inv_type == "out_refund":
action_model,action_id = data_pool.get_object_reference(cr, uid, 'account', "action_invoice_tree3")
elif inv_type == "out_invoice":
action_model,action_id = data_pool.get_object_reference(cr, uid, 'account', "action_invoice_tree1")
if action_model:
action_pool = self.pool[action_model]
action = action_pool.read(cr, uid, action_id, context=context)
action['domain'] = "[('id','in', ["+','.join(map(str,invoice_ids))+"])]"
return action
return True
def create_invoice(self, cr, uid, ids, context=None):
context = context or {}
picking_pool = self.pool.get('stock.picking')
onshipdata_obj = self.read(cr, uid, ids, ['journal_id', 'group', 'invoice_date', 'inv_type'])
context['date_inv'] = onshipdata_obj[0]['invoice_date']
inv_type = onshipdata_obj[0]['inv_type']
context['inv_type'] = inv_type
active_ids = context.get('active_ids', [])
if isinstance(onshipdata_obj[0]['journal_id'], tuple):
onshipdata_obj[0]['journal_id'] = onshipdata_obj[0]['journal_id'][0]
res = picking_pool.action_invoice_create(cr, uid, active_ids,
journal_id = onshipdata_obj[0]['journal_id'],
group = onshipdata_obj[0]['group'],
type = inv_type,
context=context)
return res
| agpl-3.0 |
indevgr/django | tests/aggregation/models.py | 282 | 1444 | # -*- coding: utf-8 -*-
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=100)
age = models.IntegerField()
friends = models.ManyToManyField('self', blank=True)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Publisher(models.Model):
name = models.CharField(max_length=255)
num_awards = models.IntegerField()
duration = models.DurationField(blank=True, null=True)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Book(models.Model):
isbn = models.CharField(max_length=9)
name = models.CharField(max_length=255)
pages = models.IntegerField()
rating = models.FloatField()
price = models.DecimalField(decimal_places=2, max_digits=6)
authors = models.ManyToManyField(Author)
contact = models.ForeignKey(Author, models.CASCADE, related_name='book_contact_set')
publisher = models.ForeignKey(Publisher, models.CASCADE)
pubdate = models.DateField()
def __str__(self):
return self.name
@python_2_unicode_compatible
class Store(models.Model):
name = models.CharField(max_length=255)
books = models.ManyToManyField(Book)
original_opening = models.DateTimeField()
friday_night_closing = models.TimeField()
def __str__(self):
return self.name
| bsd-3-clause |
maresja1/qemu-helenos | scripts/tracetool/backend/ftrace.py | 102 | 1351 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Ftrace built-in backend.
"""
__author__ = "Eiichi Tsukata <[email protected]>"
__copyright__ = "Copyright (C) 2013 Hitachi, Ltd."
__license__ = "GPL version 2 or (at your option) any later version"
__maintainer__ = "Stefan Hajnoczi"
__email__ = "[email protected]"
from tracetool import out
PUBLIC = True
def generate_h_begin(events):
out('#include "trace/ftrace.h"',
'#include "trace/control.h"',
'')
def generate_h(event):
argnames = ", ".join(event.args.names())
if len(event.args) > 0:
argnames = ", " + argnames
out(' {',
' char ftrace_buf[MAX_TRACE_STRLEN];',
' int unused __attribute__ ((unused));',
' int trlen;',
' if (trace_event_get_state(%(event_id)s)) {',
' trlen = snprintf(ftrace_buf, MAX_TRACE_STRLEN,',
' "%(name)s " %(fmt)s "\\n" %(argnames)s);',
' trlen = MIN(trlen, MAX_TRACE_STRLEN - 1);',
' unused = write(trace_marker_fd, ftrace_buf, trlen);',
' }',
' }',
name=event.name,
args=event.args,
event_id="TRACE_" + event.name.upper(),
fmt=event.fmt.rstrip("\n"),
argnames=argnames)
| gpl-2.0 |
CalebSLane/openelisglobal-core | liquibase/HaitiLNSPMassive/scripts/sampleTypePanel.py | 6 | 1060 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
type = []
panel = []
done_combos = []
type_file = open('sampleType.txt','r')
panel_file = open('panels.txt','r')
type_panel_results = open("typePanelResults.txt", 'w')
for line in type_file:
type.append(line.strip())
type_file.close()
for line in panel_file:
panel.append(line.strip())
panel_file.close()
for row in range(0, len(type)):
if len(panel[row]) > 1:
if (type[row]+panel[row]) not in done_combos:
type_panel_results.write("INSERT INTO clinlims.sampletype_panel (id, sample_type_id, panel_id ) VALUES \n\t(nextval( 'sample_type_panel_seq') , ")
type_panel_results.write("(select id from clinlims.type_of_sample where description = '" + type[row] + "' ) , ")
type_panel_results.write("(select id from clinlims.panel where name = '" + panel[row] + "' ) );\n")
done_combos.append(type[row]+panel[row])
type_panel_results.close()
print "Done look for results in typePanelResults.txt" | mpl-2.0 |
HoracioAlvarado/fwd | venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py | 499 | 5766 | """A collection of modules for iterating through different kinds of
tree, generating tokens identical to those produced by the tokenizer
module.
To create a tree walker for a new type of tree, you need to do
implement a tree walker object (called TreeWalker by convention) that
implements a 'serialize' method taking a tree as sole argument and
returning an iterator generating tokens.
"""
from __future__ import absolute_import, division, unicode_literals
__all__ = ["getTreeWalker", "pprint", "dom", "etree", "genshistream", "lxmletree",
"pulldom"]
import sys
from .. import constants
from ..utils import default_etree
treeWalkerCache = {}
def getTreeWalker(treeType, implementation=None, **kwargs):
"""Get a TreeWalker class for various types of tree with built-in support
treeType - the name of the tree type required (case-insensitive). Supported
values are:
"dom" - The xml.dom.minidom DOM implementation
"pulldom" - The xml.dom.pulldom event stream
"etree" - A generic walker for tree implementations exposing an
elementtree-like interface (known to work with
ElementTree, cElementTree and lxml.etree).
"lxml" - Optimized walker for lxml.etree
"genshi" - a Genshi stream
implementation - (Currently applies to the "etree" tree type only). A module
implementing the tree type e.g. xml.etree.ElementTree or
cElementTree."""
treeType = treeType.lower()
if treeType not in treeWalkerCache:
if treeType in ("dom", "pulldom"):
name = "%s.%s" % (__name__, treeType)
__import__(name)
mod = sys.modules[name]
treeWalkerCache[treeType] = mod.TreeWalker
elif treeType == "genshi":
from . import genshistream
treeWalkerCache[treeType] = genshistream.TreeWalker
elif treeType == "lxml":
from . import lxmletree
treeWalkerCache[treeType] = lxmletree.TreeWalker
elif treeType == "etree":
from . import etree
if implementation is None:
implementation = default_etree
# XXX: NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeWalker
return treeWalkerCache.get(treeType)
def concatenateCharacterTokens(tokens):
pendingCharacters = []
for token in tokens:
type = token["type"]
if type in ("Characters", "SpaceCharacters"):
pendingCharacters.append(token["data"])
else:
if pendingCharacters:
yield {"type": "Characters", "data": "".join(pendingCharacters)}
pendingCharacters = []
yield token
if pendingCharacters:
yield {"type": "Characters", "data": "".join(pendingCharacters)}
def pprint(walker):
"""Pretty printer for tree walkers"""
output = []
indent = 0
for token in concatenateCharacterTokens(walker):
type = token["type"]
if type in ("StartTag", "EmptyTag"):
# tag name
if token["namespace"] and token["namespace"] != constants.namespaces["html"]:
if token["namespace"] in constants.prefixes:
ns = constants.prefixes[token["namespace"]]
else:
ns = token["namespace"]
name = "%s %s" % (ns, token["name"])
else:
name = token["name"]
output.append("%s<%s>" % (" " * indent, name))
indent += 2
# attributes (sorted for consistent ordering)
attrs = token["data"]
for (namespace, localname), value in sorted(attrs.items()):
if namespace:
if namespace in constants.prefixes:
ns = constants.prefixes[namespace]
else:
ns = namespace
name = "%s %s" % (ns, localname)
else:
name = localname
output.append("%s%s=\"%s\"" % (" " * indent, name, value))
# self-closing
if type == "EmptyTag":
indent -= 2
elif type == "EndTag":
indent -= 2
elif type == "Comment":
output.append("%s<!-- %s -->" % (" " * indent, token["data"]))
elif type == "Doctype":
if token["name"]:
if token["publicId"]:
output.append("""%s<!DOCTYPE %s "%s" "%s">""" %
(" " * indent,
token["name"],
token["publicId"],
token["systemId"] if token["systemId"] else ""))
elif token["systemId"]:
output.append("""%s<!DOCTYPE %s "" "%s">""" %
(" " * indent,
token["name"],
token["systemId"]))
else:
output.append("%s<!DOCTYPE %s>" % (" " * indent,
token["name"]))
else:
output.append("%s<!DOCTYPE >" % (" " * indent,))
elif type == "Characters":
output.append("%s\"%s\"" % (" " * indent, token["data"]))
elif type == "SpaceCharacters":
assert False, "concatenateCharacterTokens should have got rid of all Space tokens"
else:
raise ValueError("Unknown token type, %s" % type)
return "\n".join(output)
| mit |
kbase/data_api | bin/stress_service.py | 2 | 3471 | #!/usr/bin/env python
import argparse
import os
import multiprocessing
from six import print_
import sys
import time
import doekbase.data_api
#from doekbase.data_api.taxonomy.taxon.api import TaxonClientAPI
from doekbase.data_api.annotation.genome_annotation.api import GenomeAnnotationClientAPI
from doekbase.data_api.sequence.assembly.api import AssemblyClientAPI
from doekbase.workspace.client import Workspace
PORTS = {
'taxon': 9101,
'assembly': 9102,
'genome_annotation': 9103
}
token = os.environ["KB_AUTH_TOKEN"]
workspace_names = ["ReferenceEnsemblPlantGenomeAnnotations", "ReferenceGenomeAnnotations"]
def get_genome_annotation_url(host):
return "http://{shost}:{port}".format(shost=host, port=PORTS['genome_annotation'])
def get_assembly_url(host):
return "http://{shost}:{port}".format(shost=host, port=PORTS['assembly'])
def get_workspace_url(host):
return "https://{khost}.kbase.us/services/ws/".format(khost=host)
def run(service_host, kbase_host):
pid = os.getpid()
ws = Workspace(url=get_workspace_url(kbase_host), token=token)
for name in workspace_names:
while 1:
print('[{:d}] List objects'.format(pid))
try:
annotations = ws.list_objects({"workspaces": [name], "type": "KBaseGenomeAnnotations.GenomeAnnotation"})
break
except Exception as err:
print('Retry on timeout: {}'.format(str(err)))
print('[{:d}] Got {:d} objects'.format(pid, len(annotations)))
for obj_num, obj in enumerate(annotations):
ref = obj[7] + "/" + obj[1]
print('[{:d}] Fetch {:d}/{:d}: {}'.format(pid, obj_num +1, len(annotations), ref))
ga = GenomeAnnotationClientAPI(get_genome_annotation_url(service_host),token,ref)
#taxon = TaxonClientAPI(services["taxon_service_url"],token,ga.get_taxon())
assembly = AssemblyClientAPI(get_assembly_url(service_host), token, ga.get_assembly())
while 1:
try:
fids = ga.get_feature_ids()
fdata = ga.get_features()
cids = assembly.get_contig_ids()
contigs = assembly.get_contigs()
except doekbase.data_api.exceptions.ServiceError as err:
print('[{:d}] Error: {}'.format(pid, err))
time.sleep(0.5)
print('[{:d}] Retrying'.format(pid))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-s', dest='shost', help='service host (localhost)', default='localhost',
metavar='HOST')
parser.add_argument('-k', dest='khost', help='kbase host (ci)', default='ci', metavar='HOST')
parser.add_argument('-p', dest='par', help='run in parallel N times', default=1, metavar='N',
type=int)
args = parser.parse_args()
if args.par < 2:
run(args.shost, args.khost)
else:
processes = []
print_('start {} processes'.format(args.par))
for i in range(args.par):
print_('.', end='\r')
process = multiprocessing.Process(target=run, args=(args.shost, args.khost))
process.start()
processes.append(process)
print_('\njoin processes')
for process in processes:
print_('.', end='\r')
process.join()
return 0
if __name__ == '__main__':
sys.exit(main())
| mit |
eamuntz/Django-Tut | env/lib/python2.7/site-packages/django/db/models/fields/files.py | 105 | 15978 | import datetime
import os
from django import forms
from django.db.models.fields import Field
from django.core.files.base import File
from django.core.files.storage import default_storage
from django.core.files.images import ImageFile
from django.db.models import signals
from django.utils.encoding import force_str, force_text
from django.utils import six
from django.utils.translation import ugettext_lazy as _
class FieldFile(File):
def __init__(self, instance, field, name):
super(FieldFile, self).__init__(None, name)
self.instance = instance
self.field = field
self.storage = field.storage
self._committed = True
def __eq__(self, other):
# Older code may be expecting FileField values to be simple strings.
# By overriding the == operator, it can remain backwards compatibility.
if hasattr(other, 'name'):
return self.name == other.name
return self.name == other
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.name)
# The standard File contains most of the necessary properties, but
# FieldFiles can be instantiated without a name, so that needs to
# be checked for here.
def _require_file(self):
if not self:
raise ValueError("The '%s' attribute has no file associated with it." % self.field.name)
def _get_file(self):
self._require_file()
if not hasattr(self, '_file') or self._file is None:
self._file = self.storage.open(self.name, 'rb')
return self._file
def _set_file(self, file):
self._file = file
def _del_file(self):
del self._file
file = property(_get_file, _set_file, _del_file)
def _get_path(self):
self._require_file()
return self.storage.path(self.name)
path = property(_get_path)
def _get_url(self):
self._require_file()
return self.storage.url(self.name)
url = property(_get_url)
def _get_size(self):
self._require_file()
if not self._committed:
return self.file.size
return self.storage.size(self.name)
size = property(_get_size)
def open(self, mode='rb'):
self._require_file()
self.file.open(mode)
# open() doesn't alter the file's contents, but it does reset the pointer
open.alters_data = True
# In addition to the standard File API, FieldFiles have extra methods
# to further manipulate the underlying file, as well as update the
# associated model instance.
def save(self, name, content, save=True):
name = self.field.generate_filename(self.instance, name)
self.name = self.storage.save(name, content)
setattr(self.instance, self.field.name, self.name)
# Update the filesize cache
self._size = content.size
self._committed = True
# Save the object because it has changed, unless save is False
if save:
self.instance.save()
save.alters_data = True
def delete(self, save=True):
if not self:
return
# Only close the file if it's already open, which we know by the
# presence of self._file
if hasattr(self, '_file'):
self.close()
del self.file
self.storage.delete(self.name)
self.name = None
setattr(self.instance, self.field.name, self.name)
# Delete the filesize cache
if hasattr(self, '_size'):
del self._size
self._committed = False
if save:
self.instance.save()
delete.alters_data = True
def _get_closed(self):
file = getattr(self, '_file', None)
return file is None or file.closed
closed = property(_get_closed)
def close(self):
file = getattr(self, '_file', None)
if file is not None:
file.close()
def __getstate__(self):
# FieldFile needs access to its associated model field and an instance
# it's attached to in order to work properly, but the only necessary
# data to be pickled is the file's name itself. Everything else will
# be restored later, by FileDescriptor below.
return {'name': self.name, 'closed': False, '_committed': True, '_file': None}
class FileDescriptor(object):
"""
The descriptor for the file attribute on the model instance. Returns a
FieldFile when accessed so you can do stuff like::
>>> instance.file.size
Assigns a file object on assignment so you can do::
>>> instance.file = File(...)
"""
def __init__(self, field):
self.field = field
def __get__(self, instance=None, owner=None):
if instance is None:
raise AttributeError(
"The '%s' attribute can only be accessed from %s instances."
% (self.field.name, owner.__name__))
# This is slightly complicated, so worth an explanation.
# instance.file`needs to ultimately return some instance of `File`,
# probably a subclass. Additionally, this returned object needs to have
# the FieldFile API so that users can easily do things like
# instance.file.path and have that delegated to the file storage engine.
# Easy enough if we're strict about assignment in __set__, but if you
# peek below you can see that we're not. So depending on the current
# value of the field we have to dynamically construct some sort of
# "thing" to return.
# The instance dict contains whatever was originally assigned
# in __set__.
file = instance.__dict__[self.field.name]
# If this value is a string (instance.file = "path/to/file") or None
# then we simply wrap it with the appropriate attribute class according
# to the file field. [This is FieldFile for FileFields and
# ImageFieldFile for ImageFields; it's also conceivable that user
# subclasses might also want to subclass the attribute class]. This
# object understands how to convert a path to a file, and also how to
# handle None.
if isinstance(file, six.string_types) or file is None:
attr = self.field.attr_class(instance, self.field, file)
instance.__dict__[self.field.name] = attr
# Other types of files may be assigned as well, but they need to have
# the FieldFile interface added to the. Thus, we wrap any other type of
# File inside a FieldFile (well, the field's attr_class, which is
# usually FieldFile).
elif isinstance(file, File) and not isinstance(file, FieldFile):
file_copy = self.field.attr_class(instance, self.field, file.name)
file_copy.file = file
file_copy._committed = False
instance.__dict__[self.field.name] = file_copy
# Finally, because of the (some would say boneheaded) way pickle works,
# the underlying FieldFile might not actually itself have an associated
# file. So we need to reset the details of the FieldFile in those cases.
elif isinstance(file, FieldFile) and not hasattr(file, 'field'):
file.instance = instance
file.field = self.field
file.storage = self.field.storage
# That was fun, wasn't it?
return instance.__dict__[self.field.name]
def __set__(self, instance, value):
instance.__dict__[self.field.name] = value
class FileField(Field):
# The class to wrap instance attributes in. Accessing the file object off
# the instance will always return an instance of attr_class.
attr_class = FieldFile
# The descriptor to use for accessing the attribute off of the class.
descriptor_class = FileDescriptor
description = _("File")
def __init__(self, verbose_name=None, name=None, upload_to='', storage=None, **kwargs):
for arg in ('primary_key', 'unique'):
if arg in kwargs:
raise TypeError("'%s' is not a valid argument for %s." % (arg, self.__class__))
self.storage = storage or default_storage
self.upload_to = upload_to
if callable(upload_to):
self.generate_filename = upload_to
kwargs['max_length'] = kwargs.get('max_length', 100)
super(FileField, self).__init__(verbose_name, name, **kwargs)
def get_internal_type(self):
return "FileField"
def get_prep_lookup(self, lookup_type, value):
if hasattr(value, 'name'):
value = value.name
return super(FileField, self).get_prep_lookup(lookup_type, value)
def get_prep_value(self, value):
"Returns field's value prepared for saving into a database."
# Need to convert File objects provided via a form to unicode for database insertion
if value is None:
return None
return six.text_type(value)
def pre_save(self, model_instance, add):
"Returns field's value just before saving."
file = super(FileField, self).pre_save(model_instance, add)
if file and not file._committed:
# Commit the file to storage prior to saving the model
file.save(file.name, file, save=False)
return file
def contribute_to_class(self, cls, name):
super(FileField, self).contribute_to_class(cls, name)
setattr(cls, self.name, self.descriptor_class(self))
def get_directory_name(self):
return os.path.normpath(force_text(datetime.datetime.now().strftime(force_str(self.upload_to))))
def get_filename(self, filename):
return os.path.normpath(self.storage.get_valid_name(os.path.basename(filename)))
def generate_filename(self, instance, filename):
return os.path.join(self.get_directory_name(), self.get_filename(filename))
def save_form_data(self, instance, data):
# Important: None means "no change", other false value means "clear"
# This subtle distinction (rather than a more explicit marker) is
# needed because we need to consume values that are also sane for a
# regular (non Model-) Form to find in its cleaned_data dictionary.
if data is not None:
# This value will be converted to unicode and stored in the
# database, so leaving False as-is is not acceptable.
if not data:
data = ''
setattr(instance, self.name, data)
def formfield(self, **kwargs):
defaults = {'form_class': forms.FileField, 'max_length': self.max_length}
# If a file has been provided previously, then the form doesn't require
# that a new file is provided this time.
# The code to mark the form field as not required is used by
# form_for_instance, but can probably be removed once form_for_instance
# is gone. ModelForm uses a different method to check for an existing file.
if 'initial' in kwargs:
defaults['required'] = False
defaults.update(kwargs)
return super(FileField, self).formfield(**defaults)
class ImageFileDescriptor(FileDescriptor):
"""
Just like the FileDescriptor, but for ImageFields. The only difference is
assigning the width/height to the width_field/height_field, if appropriate.
"""
def __set__(self, instance, value):
previous_file = instance.__dict__.get(self.field.name)
super(ImageFileDescriptor, self).__set__(instance, value)
# To prevent recalculating image dimensions when we are instantiating
# an object from the database (bug #11084), only update dimensions if
# the field had a value before this assignment. Since the default
# value for FileField subclasses is an instance of field.attr_class,
# previous_file will only be None when we are called from
# Model.__init__(). The ImageField.update_dimension_fields method
# hooked up to the post_init signal handles the Model.__init__() cases.
# Assignment happening outside of Model.__init__() will trigger the
# update right here.
if previous_file is not None:
self.field.update_dimension_fields(instance, force=True)
class ImageFieldFile(ImageFile, FieldFile):
def delete(self, save=True):
# Clear the image dimensions cache
if hasattr(self, '_dimensions_cache'):
del self._dimensions_cache
super(ImageFieldFile, self).delete(save)
class ImageField(FileField):
attr_class = ImageFieldFile
descriptor_class = ImageFileDescriptor
description = _("Image")
def __init__(self, verbose_name=None, name=None, width_field=None,
height_field=None, **kwargs):
self.width_field, self.height_field = width_field, height_field
super(ImageField, self).__init__(verbose_name, name, **kwargs)
def contribute_to_class(self, cls, name):
super(ImageField, self).contribute_to_class(cls, name)
# Attach update_dimension_fields so that dimension fields declared
# after their corresponding image field don't stay cleared by
# Model.__init__, see bug #11196.
signals.post_init.connect(self.update_dimension_fields, sender=cls)
def update_dimension_fields(self, instance, force=False, *args, **kwargs):
"""
Updates field's width and height fields, if defined.
This method is hooked up to model's post_init signal to update
dimensions after instantiating a model instance. However, dimensions
won't be updated if the dimensions fields are already populated. This
avoids unnecessary recalculation when loading an object from the
database.
Dimensions can be forced to update with force=True, which is how
ImageFileDescriptor.__set__ calls this method.
"""
# Nothing to update if the field doesn't have have dimension fields.
has_dimension_fields = self.width_field or self.height_field
if not has_dimension_fields:
return
# getattr will call the ImageFileDescriptor's __get__ method, which
# coerces the assigned value into an instance of self.attr_class
# (ImageFieldFile in this case).
file = getattr(instance, self.attname)
# Nothing to update if we have no file and not being forced to update.
if not file and not force:
return
dimension_fields_filled = not(
(self.width_field and not getattr(instance, self.width_field))
or (self.height_field and not getattr(instance, self.height_field))
)
# When both dimension fields have values, we are most likely loading
# data from the database or updating an image field that already had
# an image stored. In the first case, we don't want to update the
# dimension fields because we are already getting their values from the
# database. In the second case, we do want to update the dimensions
# fields and will skip this return because force will be True since we
# were called from ImageFileDescriptor.__set__.
if dimension_fields_filled and not force:
return
# file should be an instance of ImageFieldFile or should be None.
if file:
width = file.width
height = file.height
else:
# No file, so clear dimensions fields.
width = None
height = None
# Update the width and height fields.
if self.width_field:
setattr(instance, self.width_field, width)
if self.height_field:
setattr(instance, self.height_field, height)
def formfield(self, **kwargs):
defaults = {'form_class': forms.ImageField}
defaults.update(kwargs)
return super(ImageField, self).formfield(**defaults)
| mit |
zhjunlang/kbengine | assets/scripts/base/Account.py | 27 | 1034 | # -*- coding: utf-8 -*-
import KBEngine
from KBEDebug import *
class Account(KBEngine.Proxy):
def __init__(self):
KBEngine.Proxy.__init__(self)
def onTimer(self, id, userArg):
"""
KBEngine method.
使用addTimer后, 当时间到达则该接口被调用
@param id : addTimer 的返回值ID
@param userArg : addTimer 最后一个参数所给入的数据
"""
DEBUG_MSG(id, userArg)
def onEntitiesEnabled(self):
"""
KBEngine method.
该entity被正式激活为可使用, 此时entity已经建立了client对应实体, 可以在此创建它的
cell部分。
"""
INFO_MSG("account[%i] entities enable. mailbox:%s" % (self.id, self.client))
def onLogOnAttempt(self, ip, port, password):
"""
KBEngine method.
客户端登陆失败时会回调到这里
"""
INFO_MSG(ip, port, password)
return KBEngine.LOG_ON_ACCEPT
def onClientDeath(self):
"""
KBEngine method.
客户端对应实体已经销毁
"""
DEBUG_MSG("Account[%i].onClientDeath:" % self.id)
self.destroy()
| lgpl-3.0 |
emanueldima/b2share | b2share/modules/communities/errors.py | 1 | 1826 | # -*- coding: utf-8 -*-
#
# This file is part of EUDAT B2Share.
# Copyright (C) 2016 CERN.
#
# B2Share is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# B2Share is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with B2Share; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""B2Share Communities exceptions."""
from __future__ import absolute_import
from invenio_rest.errors import RESTException
class InvalidCommunityError(Exception):
"""Exception raised when a community is invalid."""
pass
class CommunityDoesNotExistError(Exception):
"""Exception raised when a requested community does not exist."""
pass
class CommunityDeletedError(Exception):
"""Exception raised when a requested community is marked as deleted."""
pass
class InvalidPublicationStateError(RESTException):
"""Exception raised when a deposit is an invalid publication state."""
code = 400
"""HTTP Status code."""
class NotACommunityRoleError(RESTException):
"""Exception raised a role does not belong to a community."""
code = 400
description = 'This role doesn\'t belong to any community.'
| gpl-2.0 |
drpngx/tensorflow | tensorflow/python/ops/collective_ops_test.py | 8 | 3394 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Collective Operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import collective_ops
from tensorflow.python.platform import test
# TODO(tucker): Make these ops work in eager mode. b/79776476
class CollectiveOpTest(test.TestCase):
def _testCollectiveReduce(self, t0, t1, expected):
group_key = 1
instance_key = 1
with self.test_session(
config=config_pb2.ConfigProto(device_count={'CPU': 2})) as sess:
with ops.device('/CPU:0'):
in0 = constant_op.constant(t0)
colred0 = collective_ops.all_reduce(in0, 2, group_key, instance_key,
'Add', 'Div')
with ops.device('/CPU:1'):
in1 = constant_op.constant(t1)
colred1 = collective_ops.all_reduce(in1, 2, group_key, instance_key,
'Add', 'Div')
run_options = config_pb2.RunOptions()
run_options.experimental.collective_graph_key = 1
results = sess.run([colred0, colred1], options=run_options)
self.assertAllClose(results[0], expected, rtol=1e-5, atol=1e-5)
self.assertAllClose(results[1], expected, rtol=1e-5, atol=1e-5)
def testCollectiveReduce(self):
self._testCollectiveReduce([0.1, 1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1],
[0.3, 1.3, 2.3, 3.3, 4.3, 5.3, 6.3, 7.3],
[0.2, 1.2, 2.2, 3.2, 4.2, 5.2, 6.2, 7.2])
def _testCollectiveBroadcast(self, t0):
group_key = 1
instance_key = 1
with self.test_session(
config=config_pb2.ConfigProto(device_count={'CPU': 2})) as sess:
with ops.device('/CPU:0'):
in0 = constant_op.constant(t0)
out0 = collective_ops.broadcast_send(in0, in0.shape, in0.dtype,
2, group_key, instance_key)
with ops.device('/CPU:1'):
c1 = constant_op.constant(t0)
out1 = collective_ops.broadcast_recv(c1.shape, c1.dtype,
2, group_key, instance_key)
run_options = config_pb2.RunOptions()
run_options.experimental.collective_graph_key = 1
results = sess.run([out0, out1], options=run_options)
self.assertAllClose(results[0], t0, rtol=1e-5, atol=1e-5)
self.assertAllClose(results[1], t0, rtol=1e-5, atol=1e-5)
def testCollectiveBroadcast(self):
self._testCollectiveBroadcast([0.1, 1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1])
if __name__ == '__main__':
test.main()
| apache-2.0 |
ar7z1/ansible | lib/ansible/modules/cloud/memset/memset_zone_domain.py | 35 | 9082 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018, Simon Weald <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: memset_zone_domain
author: "Simon Weald (@analbeard)"
version_added: "2.6"
short_description: Create and delete domains in Memset DNS zones.
notes:
- Zone domains can be thought of as a collection of domains, all of which share the
same DNS records (i.e. they point to the same IP). An API key generated via the
Memset customer control panel is needed with the following minimum scope -
I(dns.zone_domain_create), I(dns.zone_domain_delete), I(dns.zone_domain_list).
- Currently this module can only create one domain at a time. Multiple domains should
be created using C(with_items).
description:
- Manage DNS zone domains in a Memset account.
options:
state:
default: present
description:
- Indicates desired state of resource.
choices: [ absent, present ]
api_key:
required: true
description:
- The API key obtained from the Memset control panel.
domain:
required: true
description:
- The zone domain name. Ensure this value has at most 250 characters.
aliases: ['name']
zone:
required: true
description:
- The zone to add the domain to (this must already exist).
'''
EXAMPLES = '''
# Create the zone domain 'test.com'
- name: create zone domain
memset_zone_domain:
domain: test.com
zone: testzone
state: present
api_key: 5eb86c9196ab03919abcf03857163741
delegate_to: localhost
'''
RETURN = '''
memset_api:
description: Domain info from the Memset API
returned: when changed or state == present
type: complex
contains:
domain:
description: Domain name
returned: always
type: string
sample: "example.com"
id:
description: Domain ID
returned: always
type: string
sample: "b0bb1ce851aeea6feeb2dc32fe83bf9c"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.memset import get_zone_id
from ansible.module_utils.memset import check_zone_domain
from ansible.module_utils.memset import memset_api_call
def api_validation(args=None):
'''
Perform some validation which will be enforced by Memset's API (see:
https://www.memset.com/apidocs/methods_dns.html#dns.zone_domain_create)
'''
# zone domain length must be less than 250 chars
if len(args['domain']) > 250:
stderr = 'Zone domain must be less than 250 characters in length.'
module.fail_json(failed=True, msg=stderr)
def check(args=None):
'''
Support for running with check mode.
'''
retvals = dict()
has_changed = False
api_method = 'dns.zone_domain_list'
has_failed, msg, response = memset_api_call(api_key=args['api_key'], api_method=api_method)
domain_exists = check_zone_domain(data=response, domain=args['domain'])
# set changed to true if the operation would cause a change.
has_changed = ((domain_exists and args['state'] == 'absent') or (not domain_exists and args['state'] == 'present'))
retvals['changed'] = has_changed
retvals['failed'] = has_failed
return(retvals)
def create_zone_domain(args=None, zone_exists=None, zone_id=None, payload=None):
'''
At this point we already know whether the containing zone exists,
so we just need to create the domain (or exit if it already exists).
'''
has_changed, has_failed = False, False
msg = None
api_method = 'dns.zone_domain_list'
_has_failed, _msg, response = memset_api_call(api_key=args['api_key'], api_method=api_method)
for zone_domain in response.json():
if zone_domain['domain'] == args['domain']:
# zone domain already exists, nothing to change.
has_changed = False
break
else:
# we need to create the domain
api_method = 'dns.zone_domain_create'
payload['domain'] = args['domain']
payload['zone_id'] = zone_id
has_failed, msg, response = memset_api_call(api_key=args['api_key'], api_method=api_method, payload=payload)
if not has_failed:
has_changed = True
return(has_failed, has_changed, msg)
def delete_zone_domain(args=None, payload=None):
'''
Deletion is pretty simple, domains are always unique so we
we don't need to do any sanity checking to avoid deleting the
wrong thing.
'''
has_changed, has_failed = False, False
msg, memset_api = None, None
api_method = 'dns.zone_domain_list'
_has_failed, _msg, response = memset_api_call(api_key=args['api_key'], api_method=api_method)
domain_exists = check_zone_domain(data=response, domain=args['domain'])
if domain_exists:
api_method = 'dns.zone_domain_delete'
payload['domain'] = args['domain']
has_failed, msg, response = memset_api_call(api_key=args['api_key'], api_method=api_method, payload=payload)
if not has_failed:
has_changed = True
memset_api = response.json()
# unset msg as we don't want to return unecessary info to the user.
msg = None
return(has_failed, has_changed, memset_api, msg)
def create_or_delete_domain(args=None):
'''
We need to perform some initial sanity checking and also look
up required info before handing it off to create or delete.
'''
retvals, payload = dict(), dict()
has_changed, has_failed = False, False
msg, stderr, memset_api = None, None, None
# get the zones and check if the relevant zone exists.
api_method = 'dns.zone_list'
has_failed, msg, response = memset_api_call(api_key=args['api_key'], api_method=api_method)
if has_failed:
# this is the first time the API is called; incorrect credentials will
# manifest themselves at this point so we need to ensure the user is
# informed of the reason.
retvals['failed'] = has_failed
retvals['msg'] = msg
retvals['stderr'] = "API returned an error: {0}" . format(response.status_code)
return(retvals)
zone_exists, msg, counter, zone_id = get_zone_id(zone_name=args['zone'], current_zones=response.json())
if not zone_exists:
# the zone needs to be unique - this isn't a requirement of Memset's API but it
# makes sense in the context of this module.
has_failed = True
if counter == 0:
stderr = "DNS zone '{0}' does not exist, cannot create domain." . format(args['zone'])
elif counter > 1:
stderr = "{0} matches multiple zones, cannot create domain." . format(args['zone'])
retvals['failed'] = has_failed
retvals['msg'] = stderr
return(retvals)
if args['state'] == 'present':
has_failed, has_changed, msg = create_zone_domain(args=args, zone_exists=zone_exists, zone_id=zone_id, payload=payload)
if args['state'] == 'absent':
has_failed, has_changed, memset_api, msg = delete_zone_domain(args=args, payload=payload)
retvals['changed'] = has_changed
retvals['failed'] = has_failed
for val in ['msg', 'stderr', 'memset_api']:
if val is not None:
retvals[val] = eval(val)
return(retvals)
def main():
global module
module = AnsibleModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
api_key=dict(required=True, type='str', no_log=True),
domain=dict(required=True, aliases=['name'], type='str'),
zone=dict(required=True, type='str')
),
supports_check_mode=True
)
# populate the dict with the user-provided vars.
args = dict()
for key, arg in module.params.items():
args[key] = arg
args['check_mode'] = module.check_mode
# validate some API-specific limitations.
api_validation(args=args)
if module.check_mode:
retvals = check(args)
else:
retvals = create_or_delete_domain(args)
# we would need to populate the return values with the API's response
# in several places so it's easier to do it at the end instead.
if not retvals['failed']:
if args['state'] == 'present' and not module.check_mode:
payload = dict()
payload['domain'] = args['domain']
api_method = 'dns.zone_domain_info'
_has_failed, _msg, response = memset_api_call(api_key=args['api_key'], api_method=api_method, payload=payload)
retvals['memset_api'] = response.json()
if retvals['failed']:
module.fail_json(**retvals)
else:
module.exit_json(**retvals)
if __name__ == '__main__':
main()
| gpl-3.0 |
swiftix/swift | utils/swift_build_support/tests/test_migration.py | 39 | 2818 | # test_migration.py - Tests for swift_build_support.migration -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
import argparse
import os
import unittest
from swift_build_support import migration
class MigrateImplArgsTestCase(unittest.TestCase):
def test_report_unknown_args(self):
parser = argparse.ArgumentParser()
parser.add_argument('-R', '--release', action='store_true')
parser.add_argument('-T', '--validation-test', action='store_true')
parser.add_argument('--darwin-xcrun-toolchain')
args = migration.parse_args(
parser,
['-RT', '--unknown', 'true', '--darwin-xcrun-toolchain=foo', '--',
'--darwin-xcrun-toolchain=bar', '--other'])
self.assertEqual(
args,
argparse.Namespace(
release=True,
validation_test=True,
darwin_xcrun_toolchain='bar',
build_script_impl_args=['--unknown', 'true', '--other']))
def test_no_unknown_args(self):
parser = argparse.ArgumentParser()
parser.add_argument('-R', '--release', action='store_true')
parser.add_argument('-T', '--validation-test', action='store_true')
parser.add_argument('--darwin-xcrun-toolchain')
args = migration.parse_args(
parser,
['-RT', '--darwin-xcrun-toolchain=bar'])
self.assertEqual(
args,
argparse.Namespace(
release=True,
validation_test=True,
darwin_xcrun_toolchain='bar',
build_script_impl_args=[]))
def test_check_impl_args(self):
# Assuming file locations:
# utils/swift_build_support/tests/test_migration.py
# utils/build-script-impl
build_script_impl = os.path.join(
os.path.dirname(__file__), '..', '..', 'build-script-impl')
self.assertIsNone(migration.check_impl_args(build_script_impl,
['--reconfigure']))
with self.assertRaises(ValueError) as cm:
migration.check_impl_args(build_script_impl, ['foo'])
self.assertIn('foo', str(cm.exception))
with self.assertRaises(ValueError) as cm:
migration.check_impl_args(build_script_impl, ['--reconfigure',
'--foo=true'])
self.assertIn('foo', str(cm.exception))
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
turbokongen/home-assistant | homeassistant/components/alarmdecoder/__init__.py | 9 | 5742 | """Support for AlarmDecoder devices."""
import asyncio
from datetime import timedelta
import logging
from adext import AdExt
from alarmdecoder.devices import SerialDevice, SocketDevice
from alarmdecoder.util import NoDeviceError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_HOST,
CONF_PORT,
CONF_PROTOCOL,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.util import dt as dt_util
from .const import (
CONF_DEVICE_BAUD,
CONF_DEVICE_PATH,
DATA_AD,
DATA_REMOVE_STOP_LISTENER,
DATA_REMOVE_UPDATE_LISTENER,
DATA_RESTART,
DOMAIN,
PROTOCOL_SERIAL,
PROTOCOL_SOCKET,
SIGNAL_PANEL_MESSAGE,
SIGNAL_REL_MESSAGE,
SIGNAL_RFX_MESSAGE,
SIGNAL_ZONE_FAULT,
SIGNAL_ZONE_RESTORE,
)
_LOGGER = logging.getLogger(__name__)
PLATFORMS = ["alarm_control_panel", "sensor", "binary_sensor"]
async def async_setup(hass, config):
"""Set up for the AlarmDecoder devices."""
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Set up AlarmDecoder config flow."""
undo_listener = entry.add_update_listener(_update_listener)
ad_connection = entry.data
protocol = ad_connection[CONF_PROTOCOL]
def stop_alarmdecoder(event):
"""Handle the shutdown of AlarmDecoder."""
if not hass.data.get(DOMAIN):
return
_LOGGER.debug("Shutting down alarmdecoder")
hass.data[DOMAIN][entry.entry_id][DATA_RESTART] = False
controller.close()
async def open_connection(now=None):
"""Open a connection to AlarmDecoder."""
try:
await hass.async_add_executor_job(controller.open, baud)
except NoDeviceError:
_LOGGER.debug("Failed to connect. Retrying in 5 seconds")
hass.helpers.event.async_track_point_in_time(
open_connection, dt_util.utcnow() + timedelta(seconds=5)
)
return
_LOGGER.debug("Established a connection with the alarmdecoder")
hass.data[DOMAIN][entry.entry_id][DATA_RESTART] = True
def handle_closed_connection(event):
"""Restart after unexpected loss of connection."""
if not hass.data[DOMAIN][entry.entry_id][DATA_RESTART]:
return
hass.data[DOMAIN][entry.entry_id][DATA_RESTART] = False
_LOGGER.warning("AlarmDecoder unexpectedly lost connection")
hass.add_job(open_connection)
def handle_message(sender, message):
"""Handle message from AlarmDecoder."""
hass.helpers.dispatcher.dispatcher_send(SIGNAL_PANEL_MESSAGE, message)
def handle_rfx_message(sender, message):
"""Handle RFX message from AlarmDecoder."""
hass.helpers.dispatcher.dispatcher_send(SIGNAL_RFX_MESSAGE, message)
def zone_fault_callback(sender, zone):
"""Handle zone fault from AlarmDecoder."""
hass.helpers.dispatcher.dispatcher_send(SIGNAL_ZONE_FAULT, zone)
def zone_restore_callback(sender, zone):
"""Handle zone restore from AlarmDecoder."""
hass.helpers.dispatcher.dispatcher_send(SIGNAL_ZONE_RESTORE, zone)
def handle_rel_message(sender, message):
"""Handle relay or zone expander message from AlarmDecoder."""
hass.helpers.dispatcher.dispatcher_send(SIGNAL_REL_MESSAGE, message)
baud = ad_connection.get(CONF_DEVICE_BAUD)
if protocol == PROTOCOL_SOCKET:
host = ad_connection[CONF_HOST]
port = ad_connection[CONF_PORT]
controller = AdExt(SocketDevice(interface=(host, port)))
if protocol == PROTOCOL_SERIAL:
path = ad_connection[CONF_DEVICE_PATH]
controller = AdExt(SerialDevice(interface=path))
controller.on_message += handle_message
controller.on_rfx_message += handle_rfx_message
controller.on_zone_fault += zone_fault_callback
controller.on_zone_restore += zone_restore_callback
controller.on_close += handle_closed_connection
controller.on_expander_message += handle_rel_message
remove_stop_listener = hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STOP, stop_alarmdecoder
)
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][entry.entry_id] = {
DATA_AD: controller,
DATA_REMOVE_UPDATE_LISTENER: undo_listener,
DATA_REMOVE_STOP_LISTENER: remove_stop_listener,
DATA_RESTART: False,
}
await open_connection()
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Unload a AlarmDecoder entry."""
hass.data[DOMAIN][entry.entry_id][DATA_RESTART] = False
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if not unload_ok:
return False
hass.data[DOMAIN][entry.entry_id][DATA_REMOVE_UPDATE_LISTENER]()
hass.data[DOMAIN][entry.entry_id][DATA_REMOVE_STOP_LISTENER]()
await hass.async_add_executor_job(hass.data[DOMAIN][entry.entry_id][DATA_AD].close)
if hass.data[DOMAIN][entry.entry_id]:
hass.data[DOMAIN].pop(entry.entry_id)
if not hass.data[DOMAIN]:
hass.data.pop(DOMAIN)
return True
async def _update_listener(hass: HomeAssistantType, entry: ConfigEntry):
"""Handle options update."""
_LOGGER.debug("AlarmDecoder options updated: %s", entry.as_dict()["options"])
await hass.config_entries.async_reload(entry.entry_id)
| apache-2.0 |
akalipetis/tornado | tornado/test/gen_test.py | 60 | 43781 | from __future__ import absolute_import, division, print_function, with_statement
import contextlib
import datetime
import functools
import sys
import textwrap
import time
import weakref
from tornado.concurrent import return_future, Future
from tornado.escape import url_escape
from tornado.httpclient import AsyncHTTPClient
from tornado.ioloop import IOLoop
from tornado.log import app_log
from tornado import stack_context
from tornado.testing import AsyncHTTPTestCase, AsyncTestCase, ExpectLog, gen_test
from tornado.test.util import unittest, skipOnTravis, skipBefore33, skipBefore35, skipNotCPython, exec_test
from tornado.web import Application, RequestHandler, asynchronous, HTTPError
from tornado import gen
try:
from concurrent import futures
except ImportError:
futures = None
class GenEngineTest(AsyncTestCase):
def setUp(self):
super(GenEngineTest, self).setUp()
self.named_contexts = []
def named_context(self, name):
@contextlib.contextmanager
def context():
self.named_contexts.append(name)
try:
yield
finally:
self.assertEqual(self.named_contexts.pop(), name)
return context
def run_gen(self, f):
f()
return self.wait()
def delay_callback(self, iterations, callback, arg):
"""Runs callback(arg) after a number of IOLoop iterations."""
if iterations == 0:
callback(arg)
else:
self.io_loop.add_callback(functools.partial(
self.delay_callback, iterations - 1, callback, arg))
@return_future
def async_future(self, result, callback):
self.io_loop.add_callback(callback, result)
@gen.coroutine
def async_exception(self, e):
yield gen.moment
raise e
def test_no_yield(self):
@gen.engine
def f():
self.stop()
self.run_gen(f)
def test_inline_cb(self):
@gen.engine
def f():
(yield gen.Callback("k1"))()
res = yield gen.Wait("k1")
self.assertTrue(res is None)
self.stop()
self.run_gen(f)
def test_ioloop_cb(self):
@gen.engine
def f():
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
self.stop()
self.run_gen(f)
def test_exception_phase1(self):
@gen.engine
def f():
1 / 0
self.assertRaises(ZeroDivisionError, self.run_gen, f)
def test_exception_phase2(self):
@gen.engine
def f():
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
1 / 0
self.assertRaises(ZeroDivisionError, self.run_gen, f)
def test_exception_in_task_phase1(self):
def fail_task(callback):
1 / 0
@gen.engine
def f():
try:
yield gen.Task(fail_task)
raise Exception("did not get expected exception")
except ZeroDivisionError:
self.stop()
self.run_gen(f)
def test_exception_in_task_phase2(self):
# This is the case that requires the use of stack_context in gen.engine
def fail_task(callback):
self.io_loop.add_callback(lambda: 1 / 0)
@gen.engine
def f():
try:
yield gen.Task(fail_task)
raise Exception("did not get expected exception")
except ZeroDivisionError:
self.stop()
self.run_gen(f)
def test_with_arg(self):
@gen.engine
def f():
(yield gen.Callback("k1"))(42)
res = yield gen.Wait("k1")
self.assertEqual(42, res)
self.stop()
self.run_gen(f)
def test_with_arg_tuple(self):
@gen.engine
def f():
(yield gen.Callback((1, 2)))((3, 4))
res = yield gen.Wait((1, 2))
self.assertEqual((3, 4), res)
self.stop()
self.run_gen(f)
def test_key_reuse(self):
@gen.engine
def f():
yield gen.Callback("k1")
yield gen.Callback("k1")
self.stop()
self.assertRaises(gen.KeyReuseError, self.run_gen, f)
def test_key_reuse_tuple(self):
@gen.engine
def f():
yield gen.Callback((1, 2))
yield gen.Callback((1, 2))
self.stop()
self.assertRaises(gen.KeyReuseError, self.run_gen, f)
def test_key_mismatch(self):
@gen.engine
def f():
yield gen.Callback("k1")
yield gen.Wait("k2")
self.stop()
self.assertRaises(gen.UnknownKeyError, self.run_gen, f)
def test_key_mismatch_tuple(self):
@gen.engine
def f():
yield gen.Callback((1, 2))
yield gen.Wait((2, 3))
self.stop()
self.assertRaises(gen.UnknownKeyError, self.run_gen, f)
def test_leaked_callback(self):
@gen.engine
def f():
yield gen.Callback("k1")
self.stop()
self.assertRaises(gen.LeakedCallbackError, self.run_gen, f)
def test_leaked_callback_tuple(self):
@gen.engine
def f():
yield gen.Callback((1, 2))
self.stop()
self.assertRaises(gen.LeakedCallbackError, self.run_gen, f)
def test_parallel_callback(self):
@gen.engine
def f():
for k in range(3):
self.io_loop.add_callback((yield gen.Callback(k)))
yield gen.Wait(1)
self.io_loop.add_callback((yield gen.Callback(3)))
yield gen.Wait(0)
yield gen.Wait(3)
yield gen.Wait(2)
self.stop()
self.run_gen(f)
def test_bogus_yield(self):
@gen.engine
def f():
yield 42
self.assertRaises(gen.BadYieldError, self.run_gen, f)
def test_bogus_yield_tuple(self):
@gen.engine
def f():
yield (1, 2)
self.assertRaises(gen.BadYieldError, self.run_gen, f)
def test_reuse(self):
@gen.engine
def f():
self.io_loop.add_callback((yield gen.Callback(0)))
yield gen.Wait(0)
self.stop()
self.run_gen(f)
self.run_gen(f)
def test_task(self):
@gen.engine
def f():
yield gen.Task(self.io_loop.add_callback)
self.stop()
self.run_gen(f)
def test_wait_all(self):
@gen.engine
def f():
(yield gen.Callback("k1"))("v1")
(yield gen.Callback("k2"))("v2")
results = yield gen.WaitAll(["k1", "k2"])
self.assertEqual(results, ["v1", "v2"])
self.stop()
self.run_gen(f)
def test_exception_in_yield(self):
@gen.engine
def f():
try:
yield gen.Wait("k1")
raise Exception("did not get expected exception")
except gen.UnknownKeyError:
pass
self.stop()
self.run_gen(f)
def test_resume_after_exception_in_yield(self):
@gen.engine
def f():
try:
yield gen.Wait("k1")
raise Exception("did not get expected exception")
except gen.UnknownKeyError:
pass
(yield gen.Callback("k2"))("v2")
self.assertEqual((yield gen.Wait("k2")), "v2")
self.stop()
self.run_gen(f)
def test_orphaned_callback(self):
@gen.engine
def f():
self.orphaned_callback = yield gen.Callback(1)
try:
self.run_gen(f)
raise Exception("did not get expected exception")
except gen.LeakedCallbackError:
pass
self.orphaned_callback()
def test_multi(self):
@gen.engine
def f():
(yield gen.Callback("k1"))("v1")
(yield gen.Callback("k2"))("v2")
results = yield [gen.Wait("k1"), gen.Wait("k2")]
self.assertEqual(results, ["v1", "v2"])
self.stop()
self.run_gen(f)
def test_multi_dict(self):
@gen.engine
def f():
(yield gen.Callback("k1"))("v1")
(yield gen.Callback("k2"))("v2")
results = yield dict(foo=gen.Wait("k1"), bar=gen.Wait("k2"))
self.assertEqual(results, dict(foo="v1", bar="v2"))
self.stop()
self.run_gen(f)
# The following tests explicitly run with both gen.Multi
# and gen.multi_future (Task returns a Future, so it can be used
# with either).
def test_multi_yieldpoint_delayed(self):
@gen.engine
def f():
# callbacks run at different times
responses = yield gen.Multi([
gen.Task(self.delay_callback, 3, arg="v1"),
gen.Task(self.delay_callback, 1, arg="v2"),
])
self.assertEqual(responses, ["v1", "v2"])
self.stop()
self.run_gen(f)
def test_multi_yieldpoint_dict_delayed(self):
@gen.engine
def f():
# callbacks run at different times
responses = yield gen.Multi(dict(
foo=gen.Task(self.delay_callback, 3, arg="v1"),
bar=gen.Task(self.delay_callback, 1, arg="v2"),
))
self.assertEqual(responses, dict(foo="v1", bar="v2"))
self.stop()
self.run_gen(f)
def test_multi_future_delayed(self):
@gen.engine
def f():
# callbacks run at different times
responses = yield gen.multi_future([
gen.Task(self.delay_callback, 3, arg="v1"),
gen.Task(self.delay_callback, 1, arg="v2"),
])
self.assertEqual(responses, ["v1", "v2"])
self.stop()
self.run_gen(f)
def test_multi_future_dict_delayed(self):
@gen.engine
def f():
# callbacks run at different times
responses = yield gen.multi_future(dict(
foo=gen.Task(self.delay_callback, 3, arg="v1"),
bar=gen.Task(self.delay_callback, 1, arg="v2"),
))
self.assertEqual(responses, dict(foo="v1", bar="v2"))
self.stop()
self.run_gen(f)
@skipOnTravis
@gen_test
def test_multi_performance(self):
# Yielding a list used to have quadratic performance; make
# sure a large list stays reasonable. On my laptop a list of
# 2000 used to take 1.8s, now it takes 0.12.
start = time.time()
yield [gen.Task(self.io_loop.add_callback) for i in range(2000)]
end = time.time()
self.assertLess(end - start, 1.0)
@gen_test
def test_multi_empty(self):
# Empty lists or dicts should return the same type.
x = yield []
self.assertTrue(isinstance(x, list))
y = yield {}
self.assertTrue(isinstance(y, dict))
@gen_test
def test_multi_mixed_types(self):
# A YieldPoint (Wait) and Future (Task) can be combined
# (and use the YieldPoint codepath)
(yield gen.Callback("k1"))("v1")
responses = yield [gen.Wait("k1"),
gen.Task(self.delay_callback, 3, arg="v2")]
self.assertEqual(responses, ["v1", "v2"])
@gen_test
def test_future(self):
result = yield self.async_future(1)
self.assertEqual(result, 1)
@gen_test
def test_multi_future(self):
results = yield [self.async_future(1), self.async_future(2)]
self.assertEqual(results, [1, 2])
@gen_test
def test_multi_future_duplicate(self):
f = self.async_future(2)
results = yield [self.async_future(1), f, self.async_future(3), f]
self.assertEqual(results, [1, 2, 3, 2])
@gen_test
def test_multi_dict_future(self):
results = yield dict(foo=self.async_future(1), bar=self.async_future(2))
self.assertEqual(results, dict(foo=1, bar=2))
@gen_test
def test_multi_exceptions(self):
with ExpectLog(app_log, "Multiple exceptions in yield list"):
with self.assertRaises(RuntimeError) as cm:
yield gen.Multi([self.async_exception(RuntimeError("error 1")),
self.async_exception(RuntimeError("error 2"))])
self.assertEqual(str(cm.exception), "error 1")
# With only one exception, no error is logged.
with self.assertRaises(RuntimeError):
yield gen.Multi([self.async_exception(RuntimeError("error 1")),
self.async_future(2)])
# Exception logging may be explicitly quieted.
with self.assertRaises(RuntimeError):
yield gen.Multi([self.async_exception(RuntimeError("error 1")),
self.async_exception(RuntimeError("error 2"))],
quiet_exceptions=RuntimeError)
@gen_test
def test_multi_future_exceptions(self):
with ExpectLog(app_log, "Multiple exceptions in yield list"):
with self.assertRaises(RuntimeError) as cm:
yield [self.async_exception(RuntimeError("error 1")),
self.async_exception(RuntimeError("error 2"))]
self.assertEqual(str(cm.exception), "error 1")
# With only one exception, no error is logged.
with self.assertRaises(RuntimeError):
yield [self.async_exception(RuntimeError("error 1")),
self.async_future(2)]
# Exception logging may be explicitly quieted.
with self.assertRaises(RuntimeError):
yield gen.multi_future(
[self.async_exception(RuntimeError("error 1")),
self.async_exception(RuntimeError("error 2"))],
quiet_exceptions=RuntimeError)
def test_arguments(self):
@gen.engine
def f():
(yield gen.Callback("noargs"))()
self.assertEqual((yield gen.Wait("noargs")), None)
(yield gen.Callback("1arg"))(42)
self.assertEqual((yield gen.Wait("1arg")), 42)
(yield gen.Callback("kwargs"))(value=42)
result = yield gen.Wait("kwargs")
self.assertTrue(isinstance(result, gen.Arguments))
self.assertEqual(((), dict(value=42)), result)
self.assertEqual(dict(value=42), result.kwargs)
(yield gen.Callback("2args"))(42, 43)
result = yield gen.Wait("2args")
self.assertTrue(isinstance(result, gen.Arguments))
self.assertEqual(((42, 43), {}), result)
self.assertEqual((42, 43), result.args)
def task_func(callback):
callback(None, error="foo")
result = yield gen.Task(task_func)
self.assertTrue(isinstance(result, gen.Arguments))
self.assertEqual(((None,), dict(error="foo")), result)
self.stop()
self.run_gen(f)
def test_stack_context_leak(self):
# regression test: repeated invocations of a gen-based
# function should not result in accumulated stack_contexts
def _stack_depth():
head = stack_context._state.contexts[1]
length = 0
while head is not None:
length += 1
head = head.old_contexts[1]
return length
@gen.engine
def inner(callback):
yield gen.Task(self.io_loop.add_callback)
callback()
@gen.engine
def outer():
for i in range(10):
yield gen.Task(inner)
stack_increase = _stack_depth() - initial_stack_depth
self.assertTrue(stack_increase <= 2)
self.stop()
initial_stack_depth = _stack_depth()
self.run_gen(outer)
def test_stack_context_leak_exception(self):
# same as previous, but with a function that exits with an exception
@gen.engine
def inner(callback):
yield gen.Task(self.io_loop.add_callback)
1 / 0
@gen.engine
def outer():
for i in range(10):
try:
yield gen.Task(inner)
except ZeroDivisionError:
pass
stack_increase = len(stack_context._state.contexts) - initial_stack_depth
self.assertTrue(stack_increase <= 2)
self.stop()
initial_stack_depth = len(stack_context._state.contexts)
self.run_gen(outer)
def function_with_stack_context(self, callback):
# Technically this function should stack_context.wrap its callback
# upon entry. However, it is very common for this step to be
# omitted.
def step2():
self.assertEqual(self.named_contexts, ['a'])
self.io_loop.add_callback(callback)
with stack_context.StackContext(self.named_context('a')):
self.io_loop.add_callback(step2)
@gen_test
def test_wait_transfer_stack_context(self):
# Wait should not pick up contexts from where callback was invoked,
# even if that function improperly fails to wrap its callback.
cb = yield gen.Callback('k1')
self.function_with_stack_context(cb)
self.assertEqual(self.named_contexts, [])
yield gen.Wait('k1')
self.assertEqual(self.named_contexts, [])
@gen_test
def test_task_transfer_stack_context(self):
yield gen.Task(self.function_with_stack_context)
self.assertEqual(self.named_contexts, [])
def test_raise_after_stop(self):
# This pattern will be used in the following tests so make sure
# the exception propagates as expected.
@gen.engine
def f():
self.stop()
1 / 0
with self.assertRaises(ZeroDivisionError):
self.run_gen(f)
def test_sync_raise_return(self):
# gen.Return is allowed in @gen.engine, but it may not be used
# to return a value.
@gen.engine
def f():
self.stop(42)
raise gen.Return()
result = self.run_gen(f)
self.assertEqual(result, 42)
def test_async_raise_return(self):
@gen.engine
def f():
yield gen.Task(self.io_loop.add_callback)
self.stop(42)
raise gen.Return()
result = self.run_gen(f)
self.assertEqual(result, 42)
def test_sync_raise_return_value(self):
@gen.engine
def f():
raise gen.Return(42)
with self.assertRaises(gen.ReturnValueIgnoredError):
self.run_gen(f)
def test_sync_raise_return_value_tuple(self):
@gen.engine
def f():
raise gen.Return((1, 2))
with self.assertRaises(gen.ReturnValueIgnoredError):
self.run_gen(f)
def test_async_raise_return_value(self):
@gen.engine
def f():
yield gen.Task(self.io_loop.add_callback)
raise gen.Return(42)
with self.assertRaises(gen.ReturnValueIgnoredError):
self.run_gen(f)
def test_async_raise_return_value_tuple(self):
@gen.engine
def f():
yield gen.Task(self.io_loop.add_callback)
raise gen.Return((1, 2))
with self.assertRaises(gen.ReturnValueIgnoredError):
self.run_gen(f)
def test_return_value(self):
# It is an error to apply @gen.engine to a function that returns
# a value.
@gen.engine
def f():
return 42
with self.assertRaises(gen.ReturnValueIgnoredError):
self.run_gen(f)
def test_return_value_tuple(self):
# It is an error to apply @gen.engine to a function that returns
# a value.
@gen.engine
def f():
return (1, 2)
with self.assertRaises(gen.ReturnValueIgnoredError):
self.run_gen(f)
@skipNotCPython
def test_task_refcounting(self):
# On CPython, tasks and their arguments should be released immediately
# without waiting for garbage collection.
@gen.engine
def f():
class Foo(object):
pass
arg = Foo()
self.arg_ref = weakref.ref(arg)
task = gen.Task(self.io_loop.add_callback, arg=arg)
self.task_ref = weakref.ref(task)
yield task
self.stop()
self.run_gen(f)
self.assertIs(self.arg_ref(), None)
self.assertIs(self.task_ref(), None)
class GenCoroutineTest(AsyncTestCase):
def setUp(self):
# Stray StopIteration exceptions can lead to tests exiting prematurely,
# so we need explicit checks here to make sure the tests run all
# the way through.
self.finished = False
super(GenCoroutineTest, self).setUp()
def tearDown(self):
super(GenCoroutineTest, self).tearDown()
assert self.finished
@gen_test
def test_sync_gen_return(self):
@gen.coroutine
def f():
raise gen.Return(42)
result = yield f()
self.assertEqual(result, 42)
self.finished = True
@gen_test
def test_async_gen_return(self):
@gen.coroutine
def f():
yield gen.Task(self.io_loop.add_callback)
raise gen.Return(42)
result = yield f()
self.assertEqual(result, 42)
self.finished = True
@gen_test
def test_sync_return(self):
@gen.coroutine
def f():
return 42
result = yield f()
self.assertEqual(result, 42)
self.finished = True
@skipBefore33
@gen_test
def test_async_return(self):
namespace = exec_test(globals(), locals(), """
@gen.coroutine
def f():
yield gen.Task(self.io_loop.add_callback)
return 42
""")
result = yield namespace['f']()
self.assertEqual(result, 42)
self.finished = True
@skipBefore33
@gen_test
def test_async_early_return(self):
# A yield statement exists but is not executed, which means
# this function "returns" via an exception. This exception
# doesn't happen before the exception handling is set up.
namespace = exec_test(globals(), locals(), """
@gen.coroutine
def f():
if True:
return 42
yield gen.Task(self.io_loop.add_callback)
""")
result = yield namespace['f']()
self.assertEqual(result, 42)
self.finished = True
@skipBefore35
@gen_test
def test_async_await(self):
# This test verifies that an async function can await a
# yield-based gen.coroutine, and that a gen.coroutine
# (the test method itself) can yield an async function.
namespace = exec_test(globals(), locals(), """
async def f():
await gen.Task(self.io_loop.add_callback)
return 42
""")
result = yield namespace['f']()
self.assertEqual(result, 42)
self.finished = True
@skipBefore35
@gen_test
def test_async_await_mixed_multi(self):
namespace = exec_test(globals(), locals(), """
async def f1():
await gen.Task(self.io_loop.add_callback)
return 42
""")
@gen.coroutine
def f2():
yield gen.Task(self.io_loop.add_callback)
raise gen.Return(43)
results = yield [namespace['f1'](), f2()]
self.assertEqual(results, [42, 43])
self.finished = True
@gen_test
def test_sync_return_no_value(self):
@gen.coroutine
def f():
return
result = yield f()
self.assertEqual(result, None)
self.finished = True
@gen_test
def test_async_return_no_value(self):
# Without a return value we don't need python 3.3.
@gen.coroutine
def f():
yield gen.Task(self.io_loop.add_callback)
return
result = yield f()
self.assertEqual(result, None)
self.finished = True
@gen_test
def test_sync_raise(self):
@gen.coroutine
def f():
1 / 0
# The exception is raised when the future is yielded
# (or equivalently when its result method is called),
# not when the function itself is called).
future = f()
with self.assertRaises(ZeroDivisionError):
yield future
self.finished = True
@gen_test
def test_async_raise(self):
@gen.coroutine
def f():
yield gen.Task(self.io_loop.add_callback)
1 / 0
future = f()
with self.assertRaises(ZeroDivisionError):
yield future
self.finished = True
@gen_test
def test_pass_callback(self):
@gen.coroutine
def f():
raise gen.Return(42)
result = yield gen.Task(f)
self.assertEqual(result, 42)
self.finished = True
@gen_test
def test_replace_yieldpoint_exception(self):
# Test exception handling: a coroutine can catch one exception
# raised by a yield point and raise a different one.
@gen.coroutine
def f1():
1 / 0
@gen.coroutine
def f2():
try:
yield f1()
except ZeroDivisionError:
raise KeyError()
future = f2()
with self.assertRaises(KeyError):
yield future
self.finished = True
@gen_test
def test_swallow_yieldpoint_exception(self):
# Test exception handling: a coroutine can catch an exception
# raised by a yield point and not raise a different one.
@gen.coroutine
def f1():
1 / 0
@gen.coroutine
def f2():
try:
yield f1()
except ZeroDivisionError:
raise gen.Return(42)
result = yield f2()
self.assertEqual(result, 42)
self.finished = True
@gen_test
def test_replace_context_exception(self):
# Test exception handling: exceptions thrown into the stack context
# can be caught and replaced.
# Note that this test and the following are for behavior that is
# not really supported any more: coroutines no longer create a
# stack context automatically; but one is created after the first
# YieldPoint (i.e. not a Future).
@gen.coroutine
def f2():
(yield gen.Callback(1))()
yield gen.Wait(1)
self.io_loop.add_callback(lambda: 1 / 0)
try:
yield gen.Task(self.io_loop.add_timeout,
self.io_loop.time() + 10)
except ZeroDivisionError:
raise KeyError()
future = f2()
with self.assertRaises(KeyError):
yield future
self.finished = True
@gen_test
def test_swallow_context_exception(self):
# Test exception handling: exceptions thrown into the stack context
# can be caught and ignored.
@gen.coroutine
def f2():
(yield gen.Callback(1))()
yield gen.Wait(1)
self.io_loop.add_callback(lambda: 1 / 0)
try:
yield gen.Task(self.io_loop.add_timeout,
self.io_loop.time() + 10)
except ZeroDivisionError:
raise gen.Return(42)
result = yield f2()
self.assertEqual(result, 42)
self.finished = True
@gen_test
def test_moment(self):
calls = []
@gen.coroutine
def f(name, yieldable):
for i in range(5):
calls.append(name)
yield yieldable
# First, confirm the behavior without moment: each coroutine
# monopolizes the event loop until it finishes.
immediate = Future()
immediate.set_result(None)
yield [f('a', immediate), f('b', immediate)]
self.assertEqual(''.join(calls), 'aaaaabbbbb')
# With moment, they take turns.
calls = []
yield [f('a', gen.moment), f('b', gen.moment)]
self.assertEqual(''.join(calls), 'ababababab')
self.finished = True
calls = []
yield [f('a', gen.moment), f('b', immediate)]
self.assertEqual(''.join(calls), 'abbbbbaaaa')
@gen_test
def test_sleep(self):
yield gen.sleep(0.01)
self.finished = True
@skipBefore33
@gen_test
def test_py3_leak_exception_context(self):
class LeakedException(Exception):
pass
@gen.coroutine
def inner(iteration):
raise LeakedException(iteration)
try:
yield inner(1)
except LeakedException as e:
self.assertEqual(str(e), "1")
self.assertIsNone(e.__context__)
try:
yield inner(2)
except LeakedException as e:
self.assertEqual(str(e), "2")
self.assertIsNone(e.__context__)
self.finished = True
class GenSequenceHandler(RequestHandler):
@asynchronous
@gen.engine
def get(self):
self.io_loop = self.request.connection.stream.io_loop
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
self.write("1")
self.io_loop.add_callback((yield gen.Callback("k2")))
yield gen.Wait("k2")
self.write("2")
# reuse an old key
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
self.finish("3")
class GenCoroutineSequenceHandler(RequestHandler):
@gen.coroutine
def get(self):
self.io_loop = self.request.connection.stream.io_loop
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
self.write("1")
self.io_loop.add_callback((yield gen.Callback("k2")))
yield gen.Wait("k2")
self.write("2")
# reuse an old key
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
self.finish("3")
class GenCoroutineUnfinishedSequenceHandler(RequestHandler):
@asynchronous
@gen.coroutine
def get(self):
self.io_loop = self.request.connection.stream.io_loop
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
self.write("1")
self.io_loop.add_callback((yield gen.Callback("k2")))
yield gen.Wait("k2")
self.write("2")
# reuse an old key
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
# just write, don't finish
self.write("3")
class GenTaskHandler(RequestHandler):
@asynchronous
@gen.engine
def get(self):
io_loop = self.request.connection.stream.io_loop
client = AsyncHTTPClient(io_loop=io_loop)
response = yield gen.Task(client.fetch, self.get_argument('url'))
response.rethrow()
self.finish(b"got response: " + response.body)
class GenExceptionHandler(RequestHandler):
@asynchronous
@gen.engine
def get(self):
# This test depends on the order of the two decorators.
io_loop = self.request.connection.stream.io_loop
yield gen.Task(io_loop.add_callback)
raise Exception("oops")
class GenCoroutineExceptionHandler(RequestHandler):
@gen.coroutine
def get(self):
# This test depends on the order of the two decorators.
io_loop = self.request.connection.stream.io_loop
yield gen.Task(io_loop.add_callback)
raise Exception("oops")
class GenYieldExceptionHandler(RequestHandler):
@asynchronous
@gen.engine
def get(self):
io_loop = self.request.connection.stream.io_loop
# Test the interaction of the two stack_contexts.
def fail_task(callback):
io_loop.add_callback(lambda: 1 / 0)
try:
yield gen.Task(fail_task)
raise Exception("did not get expected exception")
except ZeroDivisionError:
self.finish('ok')
class UndecoratedCoroutinesHandler(RequestHandler):
@gen.coroutine
def prepare(self):
self.chunks = []
yield gen.Task(IOLoop.current().add_callback)
self.chunks.append('1')
@gen.coroutine
def get(self):
self.chunks.append('2')
yield gen.Task(IOLoop.current().add_callback)
self.chunks.append('3')
yield gen.Task(IOLoop.current().add_callback)
self.write(''.join(self.chunks))
class AsyncPrepareErrorHandler(RequestHandler):
@gen.coroutine
def prepare(self):
yield gen.Task(IOLoop.current().add_callback)
raise HTTPError(403)
def get(self):
self.finish('ok')
class NativeCoroutineHandler(RequestHandler):
if sys.version_info > (3, 5):
exec(textwrap.dedent("""
async def get(self):
await gen.Task(IOLoop.current().add_callback)
self.write("ok")
"""))
class GenWebTest(AsyncHTTPTestCase):
def get_app(self):
return Application([
('/sequence', GenSequenceHandler),
('/coroutine_sequence', GenCoroutineSequenceHandler),
('/coroutine_unfinished_sequence',
GenCoroutineUnfinishedSequenceHandler),
('/task', GenTaskHandler),
('/exception', GenExceptionHandler),
('/coroutine_exception', GenCoroutineExceptionHandler),
('/yield_exception', GenYieldExceptionHandler),
('/undecorated_coroutine', UndecoratedCoroutinesHandler),
('/async_prepare_error', AsyncPrepareErrorHandler),
('/native_coroutine', NativeCoroutineHandler),
])
def test_sequence_handler(self):
response = self.fetch('/sequence')
self.assertEqual(response.body, b"123")
def test_coroutine_sequence_handler(self):
response = self.fetch('/coroutine_sequence')
self.assertEqual(response.body, b"123")
def test_coroutine_unfinished_sequence_handler(self):
response = self.fetch('/coroutine_unfinished_sequence')
self.assertEqual(response.body, b"123")
def test_task_handler(self):
response = self.fetch('/task?url=%s' % url_escape(self.get_url('/sequence')))
self.assertEqual(response.body, b"got response: 123")
def test_exception_handler(self):
# Make sure we get an error and not a timeout
with ExpectLog(app_log, "Uncaught exception GET /exception"):
response = self.fetch('/exception')
self.assertEqual(500, response.code)
def test_coroutine_exception_handler(self):
# Make sure we get an error and not a timeout
with ExpectLog(app_log, "Uncaught exception GET /coroutine_exception"):
response = self.fetch('/coroutine_exception')
self.assertEqual(500, response.code)
def test_yield_exception_handler(self):
response = self.fetch('/yield_exception')
self.assertEqual(response.body, b'ok')
def test_undecorated_coroutines(self):
response = self.fetch('/undecorated_coroutine')
self.assertEqual(response.body, b'123')
def test_async_prepare_error_handler(self):
response = self.fetch('/async_prepare_error')
self.assertEqual(response.code, 403)
@skipBefore35
def test_native_coroutine_handler(self):
response = self.fetch('/native_coroutine')
self.assertEqual(response.code, 200)
self.assertEqual(response.body, b'ok')
class WithTimeoutTest(AsyncTestCase):
@gen_test
def test_timeout(self):
with self.assertRaises(gen.TimeoutError):
yield gen.with_timeout(datetime.timedelta(seconds=0.1),
Future())
@gen_test
def test_completes_before_timeout(self):
future = Future()
self.io_loop.add_timeout(datetime.timedelta(seconds=0.1),
lambda: future.set_result('asdf'))
result = yield gen.with_timeout(datetime.timedelta(seconds=3600),
future, io_loop=self.io_loop)
self.assertEqual(result, 'asdf')
@gen_test
def test_fails_before_timeout(self):
future = Future()
self.io_loop.add_timeout(
datetime.timedelta(seconds=0.1),
lambda: future.set_exception(ZeroDivisionError()))
with self.assertRaises(ZeroDivisionError):
yield gen.with_timeout(datetime.timedelta(seconds=3600),
future, io_loop=self.io_loop)
@gen_test
def test_already_resolved(self):
future = Future()
future.set_result('asdf')
result = yield gen.with_timeout(datetime.timedelta(seconds=3600),
future, io_loop=self.io_loop)
self.assertEqual(result, 'asdf')
@unittest.skipIf(futures is None, 'futures module not present')
@gen_test
def test_timeout_concurrent_future(self):
with futures.ThreadPoolExecutor(1) as executor:
with self.assertRaises(gen.TimeoutError):
yield gen.with_timeout(self.io_loop.time(),
executor.submit(time.sleep, 0.1))
@unittest.skipIf(futures is None, 'futures module not present')
@gen_test
def test_completed_concurrent_future(self):
with futures.ThreadPoolExecutor(1) as executor:
yield gen.with_timeout(datetime.timedelta(seconds=3600),
executor.submit(lambda: None))
class WaitIteratorTest(AsyncTestCase):
@gen_test
def test_empty_iterator(self):
g = gen.WaitIterator()
self.assertTrue(g.done(), 'empty generator iterated')
with self.assertRaises(ValueError):
g = gen.WaitIterator(False, bar=False)
self.assertEqual(g.current_index, None, "bad nil current index")
self.assertEqual(g.current_future, None, "bad nil current future")
@gen_test
def test_already_done(self):
f1 = Future()
f2 = Future()
f3 = Future()
f1.set_result(24)
f2.set_result(42)
f3.set_result(84)
g = gen.WaitIterator(f1, f2, f3)
i = 0
while not g.done():
r = yield g.next()
# Order is not guaranteed, but the current implementation
# preserves ordering of already-done Futures.
if i == 0:
self.assertEqual(g.current_index, 0)
self.assertIs(g.current_future, f1)
self.assertEqual(r, 24)
elif i == 1:
self.assertEqual(g.current_index, 1)
self.assertIs(g.current_future, f2)
self.assertEqual(r, 42)
elif i == 2:
self.assertEqual(g.current_index, 2)
self.assertIs(g.current_future, f3)
self.assertEqual(r, 84)
i += 1
self.assertEqual(g.current_index, None, "bad nil current index")
self.assertEqual(g.current_future, None, "bad nil current future")
dg = gen.WaitIterator(f1=f1, f2=f2)
while not dg.done():
dr = yield dg.next()
if dg.current_index == "f1":
self.assertTrue(dg.current_future == f1 and dr == 24,
"WaitIterator dict status incorrect")
elif dg.current_index == "f2":
self.assertTrue(dg.current_future == f2 and dr == 42,
"WaitIterator dict status incorrect")
else:
self.fail("got bad WaitIterator index {}".format(
dg.current_index))
i += 1
self.assertEqual(dg.current_index, None, "bad nil current index")
self.assertEqual(dg.current_future, None, "bad nil current future")
def finish_coroutines(self, iteration, futures):
if iteration == 3:
futures[2].set_result(24)
elif iteration == 5:
futures[0].set_exception(ZeroDivisionError())
elif iteration == 8:
futures[1].set_result(42)
futures[3].set_result(84)
if iteration < 8:
self.io_loop.add_callback(self.finish_coroutines, iteration + 1, futures)
@gen_test
def test_iterator(self):
futures = [Future(), Future(), Future(), Future()]
self.finish_coroutines(0, futures)
g = gen.WaitIterator(*futures)
i = 0
while not g.done():
try:
r = yield g.next()
except ZeroDivisionError:
self.assertIs(g.current_future, futures[0],
'exception future invalid')
else:
if i == 0:
self.assertEqual(r, 24, 'iterator value incorrect')
self.assertEqual(g.current_index, 2, 'wrong index')
elif i == 2:
self.assertEqual(r, 42, 'iterator value incorrect')
self.assertEqual(g.current_index, 1, 'wrong index')
elif i == 3:
self.assertEqual(r, 84, 'iterator value incorrect')
self.assertEqual(g.current_index, 3, 'wrong index')
i += 1
@skipBefore35
@gen_test
def test_iterator_async_await(self):
# Recreate the previous test with py35 syntax. It's a little clunky
# because of the way the previous test handles an exception on
# a single iteration.
futures = [Future(), Future(), Future(), Future()]
self.finish_coroutines(0, futures)
self.finished = False
namespace = exec_test(globals(), locals(), """
async def f():
i = 0
g = gen.WaitIterator(*futures)
try:
async for r in g:
if i == 0:
self.assertEqual(r, 24, 'iterator value incorrect')
self.assertEqual(g.current_index, 2, 'wrong index')
else:
raise Exception("expected exception on iteration 1")
i += 1
except ZeroDivisionError:
i += 1
async for r in g:
if i == 2:
self.assertEqual(r, 42, 'iterator value incorrect')
self.assertEqual(g.current_index, 1, 'wrong index')
elif i == 3:
self.assertEqual(r, 84, 'iterator value incorrect')
self.assertEqual(g.current_index, 3, 'wrong index')
else:
raise Exception("didn't expect iteration %d" % i)
i += 1
self.finished = True
""")
yield namespace['f']()
self.assertTrue(self.finished)
@gen_test
def test_no_ref(self):
# In this usage, there is no direct hard reference to the
# WaitIterator itself, only the Future it returns. Since
# WaitIterator uses weak references internally to improve GC
# performance, this used to cause problems.
yield gen.with_timeout(datetime.timedelta(seconds=0.1),
gen.WaitIterator(gen.sleep(0)).next())
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
shivam1111/odoo | openerp-wsgi.py | 363 | 1811 | # WSGI Handler sample configuration file.
#
# Change the appropriate settings below, in order to provide the parameters
# that would normally be passed in the command-line.
# (at least conf['addons_path'])
#
# For generic wsgi handlers a global application is defined.
# For uwsgi this should work:
# $ uwsgi_python --http :9090 --pythonpath . --wsgi-file openerp-wsgi.py
#
# For gunicorn additional globals need to be defined in the Gunicorn section.
# Then the following command should run:
# $ gunicorn openerp:service.wsgi_server.application -c openerp-wsgi.py
import openerp
#----------------------------------------------------------
# Common
#----------------------------------------------------------
openerp.multi_process = True # Nah!
# Equivalent of --load command-line option
openerp.conf.server_wide_modules = ['web']
conf = openerp.tools.config
# Path to the OpenERP Addons repository (comma-separated for
# multiple locations)
conf['addons_path'] = '../../addons/trunk,../../web/trunk/addons'
# Optional database config if not using local socket
#conf['db_name'] = 'mycompany'
#conf['db_host'] = 'localhost'
#conf['db_user'] = 'foo'
#conf['db_port'] = 5432
#conf['db_password'] = 'secret'
#----------------------------------------------------------
# Generic WSGI handlers application
#----------------------------------------------------------
application = openerp.service.wsgi_server.application
openerp.service.server.load_server_wide_modules()
#----------------------------------------------------------
# Gunicorn
#----------------------------------------------------------
# Standard OpenERP XML-RPC port is 8069
bind = '127.0.0.1:8069'
pidfile = '.gunicorn.pid'
workers = 4
timeout = 240
max_requests = 2000
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mikemow/youtube-dl | youtube_dl/extractor/camdemy.py | 124 | 5425 | # coding: utf-8
from __future__ import unicode_literals
import datetime
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urlparse,
)
from ..utils import (
parse_iso8601,
str_to_int,
)
class CamdemyIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?camdemy\.com/media/(?P<id>\d+)'
_TESTS = [{
# single file
'url': 'http://www.camdemy.com/media/5181/',
'md5': '5a5562b6a98b37873119102e052e311b',
'info_dict': {
'id': '5181',
'ext': 'mp4',
'title': 'Ch1-1 Introduction, Signals (02-23-2012)',
'thumbnail': 're:^https?://.*\.jpg$',
'description': '',
'creator': 'ss11spring',
'upload_date': '20130114',
'timestamp': 1358154556,
'view_count': int,
}
}, {
# With non-empty description
'url': 'http://www.camdemy.com/media/13885',
'md5': '4576a3bb2581f86c61044822adbd1249',
'info_dict': {
'id': '13885',
'ext': 'mp4',
'title': 'EverCam + Camdemy QuickStart',
'thumbnail': 're:^https?://.*\.jpg$',
'description': 'md5:050b62f71ed62928f8a35f1a41e186c9',
'creator': 'evercam',
'upload_date': '20140620',
'timestamp': 1403271569,
}
}, {
# External source
'url': 'http://www.camdemy.com/media/14842',
'md5': '50e1c3c3aa233d3d7b7daa2fa10b1cf7',
'info_dict': {
'id': '2vsYQzNIsJo',
'ext': 'mp4',
'upload_date': '20130211',
'uploader': 'Hun Kim',
'description': 'Excel 2013 Tutorial for Beginners - How to add Password Protection',
'uploader_id': 'hunkimtutorials',
'title': 'Excel 2013 Tutorial - How to add Password Protection',
}
}]
def _real_extract(self, url):
video_id = self._match_id(url)
page = self._download_webpage(url, video_id)
src_from = self._html_search_regex(
r"<div class='srcFrom'>Source: <a title='([^']+)'", page,
'external source', default=None)
if src_from:
return self.url_result(src_from)
oembed_obj = self._download_json(
'http://www.camdemy.com/oembed/?format=json&url=' + url, video_id)
thumb_url = oembed_obj['thumbnail_url']
video_folder = compat_urlparse.urljoin(thumb_url, 'video/')
file_list_doc = self._download_xml(
compat_urlparse.urljoin(video_folder, 'fileList.xml'),
video_id, 'Filelist XML')
file_name = file_list_doc.find('./video/item/fileName').text
video_url = compat_urlparse.urljoin(video_folder, file_name)
timestamp = parse_iso8601(self._html_search_regex(
r"<div class='title'>Posted\s*:</div>\s*<div class='value'>([^<>]+)<",
page, 'creation time', fatal=False),
delimiter=' ', timezone=datetime.timedelta(hours=8))
view_count = str_to_int(self._html_search_regex(
r"<div class='title'>Views\s*:</div>\s*<div class='value'>([^<>]+)<",
page, 'view count', fatal=False))
return {
'id': video_id,
'url': video_url,
'title': oembed_obj['title'],
'thumbnail': thumb_url,
'description': self._html_search_meta('description', page),
'creator': oembed_obj['author_name'],
'duration': oembed_obj['duration'],
'timestamp': timestamp,
'view_count': view_count,
}
class CamdemyFolderIE(InfoExtractor):
_VALID_URL = r'http://www.camdemy.com/folder/(?P<id>\d+)'
_TESTS = [{
# links with trailing slash
'url': 'http://www.camdemy.com/folder/450',
'info_dict': {
'id': '450',
'title': '信號與系統 2012 & 2011 (Signals and Systems)',
},
'playlist_mincount': 145
}, {
# links without trailing slash
# and multi-page
'url': 'http://www.camdemy.com/folder/853',
'info_dict': {
'id': '853',
'title': '科學計算 - 使用 Matlab'
},
'playlist_mincount': 20
}, {
# with displayMode parameter. For testing the codes to add parameters
'url': 'http://www.camdemy.com/folder/853/?displayMode=defaultOrderByOrg',
'info_dict': {
'id': '853',
'title': '科學計算 - 使用 Matlab'
},
'playlist_mincount': 20
}]
def _real_extract(self, url):
folder_id = self._match_id(url)
# Add displayMode=list so that all links are displayed in a single page
parsed_url = list(compat_urlparse.urlparse(url))
query = dict(compat_urlparse.parse_qsl(parsed_url[4]))
query.update({'displayMode': 'list'})
parsed_url[4] = compat_urllib_parse.urlencode(query)
final_url = compat_urlparse.urlunparse(parsed_url)
page = self._download_webpage(final_url, folder_id)
matches = re.findall(r"href='(/media/\d+/?)'", page)
entries = [self.url_result('http://www.camdemy.com' + media_path)
for media_path in matches]
folder_title = self._html_search_meta('keywords', page)
return self.playlist_result(entries, folder_id, folder_title)
| unlicense |
zturchan/CMPUT410-Lab6 | v1/lib/python2.7/site-packages/pip-1.1-py2.7.egg/pip/util.py | 51 | 16632 | import sys
import shutil
import os
import stat
import re
import posixpath
import pkg_resources
import zipfile
import tarfile
from pip.exceptions import InstallationError, BadCommand
from pip.backwardcompat import WindowsError, string_types, raw_input
from pip.locations import site_packages, running_under_virtualenv
from pip.log import logger
__all__ = ['rmtree', 'display_path', 'backup_dir',
'find_command', 'ask', 'Inf',
'normalize_name', 'splitext',
'format_size', 'is_installable_dir',
'is_svn_page', 'file_contents',
'split_leading_dir', 'has_leading_dir',
'make_path_relative', 'normalize_path',
'renames', 'get_terminal_size',
'unzip_file', 'untar_file', 'create_download_cache_folder',
'cache_download', 'unpack_file']
def rmtree(dir, ignore_errors=False):
shutil.rmtree(dir, ignore_errors=ignore_errors,
onerror=rmtree_errorhandler)
def rmtree_errorhandler(func, path, exc_info):
"""On Windows, the files in .svn are read-only, so when rmtree() tries to
remove them, an exception is thrown. We catch that here, remove the
read-only attribute, and hopefully continue without problems."""
exctype, value = exc_info[:2]
# On Python 2.4, it will be OSError number 13
# On all more recent Pythons, it'll be WindowsError number 5
if not ((exctype is WindowsError and value.args[0] == 5) or
(exctype is OSError and value.args[0] == 13)):
raise
# file type should currently be read only
if ((os.stat(path).st_mode & stat.S_IREAD) != stat.S_IREAD):
raise
# convert to read/write
os.chmod(path, stat.S_IWRITE)
# use the original function to repeat the operation
func(path)
def display_path(path):
"""Gives the display value for a given path, making it relative to cwd
if possible."""
path = os.path.normcase(os.path.abspath(path))
if path.startswith(os.getcwd() + os.path.sep):
path = '.' + path[len(os.getcwd()):]
return path
def backup_dir(dir, ext='.bak'):
"""Figure out the name of a directory to back up the given dir to
(adding .bak, .bak2, etc)"""
n = 1
extension = ext
while os.path.exists(dir + extension):
n += 1
extension = ext + str(n)
return dir + extension
def find_command(cmd, paths=None, pathext=None):
"""Searches the PATH for the given command and returns its path"""
if paths is None:
paths = os.environ.get('PATH', '').split(os.pathsep)
if isinstance(paths, string_types):
paths = [paths]
# check if there are funny path extensions for executables, e.g. Windows
if pathext is None:
pathext = get_pathext()
pathext = [ext for ext in pathext.lower().split(os.pathsep)]
# don't use extensions if the command ends with one of them
if os.path.splitext(cmd)[1].lower() in pathext:
pathext = ['']
# check if we find the command on PATH
for path in paths:
# try without extension first
cmd_path = os.path.join(path, cmd)
for ext in pathext:
# then including the extension
cmd_path_ext = cmd_path + ext
if os.path.isfile(cmd_path_ext):
return cmd_path_ext
if os.path.isfile(cmd_path):
return cmd_path
raise BadCommand('Cannot find command %r' % cmd)
def get_pathext(default_pathext=None):
"""Returns the path extensions from environment or a default"""
if default_pathext is None:
default_pathext = os.pathsep.join(['.COM', '.EXE', '.BAT', '.CMD'])
pathext = os.environ.get('PATHEXT', default_pathext)
return pathext
def ask_path_exists(message, options):
for action in os.environ.get('PIP_EXISTS_ACTION', ''):
if action in options:
return action
return ask(message, options)
def ask(message, options):
"""Ask the message interactively, with the given possible responses"""
while 1:
if os.environ.get('PIP_NO_INPUT'):
raise Exception('No input was expected ($PIP_NO_INPUT set); question: %s' % message)
response = raw_input(message)
response = response.strip().lower()
if response not in options:
print('Your response (%r) was not one of the expected responses: %s' % (
response, ', '.join(options)))
else:
return response
class _Inf(object):
"""I am bigger than everything!"""
def __cmp__(self, a):
if self is a:
return 0
return 1
def __repr__(self):
return 'Inf'
Inf = _Inf()
del _Inf
_normalize_re = re.compile(r'[^a-z]', re.I)
def normalize_name(name):
return _normalize_re.sub('-', name.lower())
def format_size(bytes):
if bytes > 1000*1000:
return '%.1fMb' % (bytes/1000.0/1000)
elif bytes > 10*1000:
return '%iKb' % (bytes/1000)
elif bytes > 1000:
return '%.1fKb' % (bytes/1000.0)
else:
return '%ibytes' % bytes
def is_installable_dir(path):
"""Return True if `path` is a directory containing a setup.py file."""
if not os.path.isdir(path):
return False
setup_py = os.path.join(path, 'setup.py')
if os.path.isfile(setup_py):
return True
return False
def is_svn_page(html):
"""Returns true if the page appears to be the index page of an svn repository"""
return (re.search(r'<title>[^<]*Revision \d+:', html)
and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
def file_contents(filename):
fp = open(filename, 'rb')
try:
return fp.read().decode('utf-8')
finally:
fp.close()
def split_leading_dir(path):
path = str(path)
path = path.lstrip('/').lstrip('\\')
if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
or '\\' not in path):
return path.split('/', 1)
elif '\\' in path:
return path.split('\\', 1)
else:
return path, ''
def has_leading_dir(paths):
"""Returns true if all the paths have the same leading path name
(i.e., everything is in one subdirectory in an archive)"""
common_prefix = None
for path in paths:
prefix, rest = split_leading_dir(path)
if not prefix:
return False
elif common_prefix is None:
common_prefix = prefix
elif prefix != common_prefix:
return False
return True
def make_path_relative(path, rel_to):
"""
Make a filename relative, where the filename path, and it is
relative to rel_to
>>> make_relative_path('/usr/share/something/a-file.pth',
... '/usr/share/another-place/src/Directory')
'../../../something/a-file.pth'
>>> make_relative_path('/usr/share/something/a-file.pth',
... '/home/user/src/Directory')
'../../../usr/share/something/a-file.pth'
>>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
'a-file.pth'
"""
path_filename = os.path.basename(path)
path = os.path.dirname(path)
path = os.path.normpath(os.path.abspath(path))
rel_to = os.path.normpath(os.path.abspath(rel_to))
path_parts = path.strip(os.path.sep).split(os.path.sep)
rel_to_parts = rel_to.strip(os.path.sep).split(os.path.sep)
while path_parts and rel_to_parts and path_parts[0] == rel_to_parts[0]:
path_parts.pop(0)
rel_to_parts.pop(0)
full_parts = ['..']*len(rel_to_parts) + path_parts + [path_filename]
if full_parts == ['']:
return '.' + os.path.sep
return os.path.sep.join(full_parts)
def normalize_path(path):
"""
Convert a path to its canonical, case-normalized, absolute version.
"""
return os.path.normcase(os.path.realpath(path))
def splitext(path):
"""Like os.path.splitext, but take off .tar too"""
base, ext = posixpath.splitext(path)
if base.lower().endswith('.tar'):
ext = base[-4:] + ext
base = base[:-4]
return base, ext
def renames(old, new):
"""Like os.renames(), but handles renaming across devices."""
# Implementation borrowed from os.renames().
head, tail = os.path.split(new)
if head and tail and not os.path.exists(head):
os.makedirs(head)
shutil.move(old, new)
head, tail = os.path.split(old)
if head and tail:
try:
os.removedirs(head)
except OSError:
pass
def is_local(path):
"""
Return True if path is within sys.prefix, if we're running in a virtualenv.
If we're not in a virtualenv, all paths are considered "local."
"""
if not running_under_virtualenv():
return True
return normalize_path(path).startswith(normalize_path(sys.prefix))
def dist_is_local(dist):
"""
Return True if given Distribution object is installed locally
(i.e. within current virtualenv).
Always True if we're not in a virtualenv.
"""
return is_local(dist_location(dist))
def get_installed_distributions(local_only=True, skip=('setuptools', 'pip', 'python')):
"""
Return a list of installed Distribution objects.
If ``local_only`` is True (default), only return installations
local to the current virtualenv, if in a virtualenv.
``skip`` argument is an iterable of lower-case project names to
ignore; defaults to ('setuptools', 'pip', 'python'). [FIXME also
skip virtualenv?]
"""
if local_only:
local_test = dist_is_local
else:
local_test = lambda d: True
return [d for d in pkg_resources.working_set if local_test(d) and d.key not in skip]
def egg_link_path(dist):
"""
Return the path where we'd expect to find a .egg-link file for
this distribution. (There doesn't seem to be any metadata in the
Distribution object for a develop egg that points back to its
.egg-link and easy-install.pth files).
This won't find a globally-installed develop egg if we're in a
virtualenv.
"""
return os.path.join(site_packages, dist.project_name) + '.egg-link'
def dist_location(dist):
"""
Get the site-packages location of this distribution. Generally
this is dist.location, except in the case of develop-installed
packages, where dist.location is the source code location, and we
want to know where the egg-link file is.
"""
egg_link = egg_link_path(dist)
if os.path.exists(egg_link):
return egg_link
return dist.location
def get_terminal_size():
"""Returns a tuple (x, y) representing the width(x) and the height(x)
in characters of the terminal window."""
def ioctl_GWINSZ(fd):
try:
import fcntl
import termios
import struct
cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ,
'1234'))
except:
return None
if cr == (0, 0):
return None
if cr == (0, 0):
return None
return cr
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
if not cr:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except:
pass
if not cr:
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
return int(cr[1]), int(cr[0])
def unzip_file(filename, location, flatten=True):
"""Unzip the file (zip file located at filename) to the destination
location"""
if not os.path.exists(location):
os.makedirs(location)
zipfp = open(filename, 'rb')
try:
zip = zipfile.ZipFile(zipfp)
leading = has_leading_dir(zip.namelist()) and flatten
for name in zip.namelist():
data = zip.read(name)
fn = name
if leading:
fn = split_leading_dir(name)[1]
fn = os.path.join(location, fn)
dir = os.path.dirname(fn)
if not os.path.exists(dir):
os.makedirs(dir)
if fn.endswith('/') or fn.endswith('\\'):
# A directory
if not os.path.exists(fn):
os.makedirs(fn)
else:
fp = open(fn, 'wb')
try:
fp.write(data)
finally:
fp.close()
finally:
zipfp.close()
def untar_file(filename, location):
"""Untar the file (tar file located at filename) to the destination location"""
if not os.path.exists(location):
os.makedirs(location)
if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
mode = 'r:gz'
elif filename.lower().endswith('.bz2') or filename.lower().endswith('.tbz'):
mode = 'r:bz2'
elif filename.lower().endswith('.tar'):
mode = 'r'
else:
logger.warn('Cannot determine compression type for file %s' % filename)
mode = 'r:*'
tar = tarfile.open(filename, mode)
try:
# note: python<=2.5 doesnt seem to know about pax headers, filter them
leading = has_leading_dir([
member.name for member in tar.getmembers()
if member.name != 'pax_global_header'
])
for member in tar.getmembers():
fn = member.name
if fn == 'pax_global_header':
continue
if leading:
fn = split_leading_dir(fn)[1]
path = os.path.join(location, fn)
if member.isdir():
if not os.path.exists(path):
os.makedirs(path)
elif member.issym():
try:
tar._extract_member(member, path)
except:
e = sys.exc_info()[1]
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
logger.warn(
'In the tar file %s the member %s is invalid: %s'
% (filename, member.name, e))
continue
else:
try:
fp = tar.extractfile(member)
except (KeyError, AttributeError):
e = sys.exc_info()[1]
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
logger.warn(
'In the tar file %s the member %s is invalid: %s'
% (filename, member.name, e))
continue
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
destfp = open(path, 'wb')
try:
shutil.copyfileobj(fp, destfp)
finally:
destfp.close()
fp.close()
finally:
tar.close()
def create_download_cache_folder(folder):
logger.indent -= 2
logger.notify('Creating supposed download cache at %s' % folder)
logger.indent += 2
os.makedirs(folder)
def cache_download(target_file, temp_location, content_type):
logger.notify('Storing download in cache at %s' % display_path(target_file))
shutil.copyfile(temp_location, target_file)
fp = open(target_file+'.content-type', 'w')
fp.write(content_type)
fp.close()
os.unlink(temp_location)
def unpack_file(filename, location, content_type, link):
if (content_type == 'application/zip'
or filename.endswith('.zip')
or filename.endswith('.pybundle')
or zipfile.is_zipfile(filename)):
unzip_file(filename, location, flatten=not filename.endswith('.pybundle'))
elif (content_type == 'application/x-gzip'
or tarfile.is_tarfile(filename)
or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
untar_file(filename, location)
elif (content_type and content_type.startswith('text/html')
and is_svn_page(file_contents(filename))):
# We don't really care about this
from pip.vcs.subversion import Subversion
Subversion('svn+' + link.url).unpack(location)
else:
## FIXME: handle?
## FIXME: magic signatures?
logger.fatal('Cannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive format'
% (filename, location, content_type))
raise InstallationError('Cannot determine archive format of %s' % location)
| apache-2.0 |
inercia/candelabra | candelabra/topology/box.py | 1 | 7659 | #
# Candelabra
#
# Copyright Alvaro Saurin 2013 - All right Reserved
#
import os
from logging import getLogger
import tempfile
import tarfile
import json
from candelabra.tasks import TaskGenerator
from candelabra.errors import UnsupportedBoxException, ImportException
from candelabra.plugins import PLUGINS_REGISTRIES
from candelabra.topology.node import TopologyNode, TopologyAttribute
logger = getLogger(__name__)
#: a mbyte
MBYTE = 1024 * 1024
#: download chunk size
CHUNK_SIZE = 4 * MBYTE
class BoxNode(TopologyNode, TaskGenerator):
""" A box is one or more virtual machine templates that will be used for creating
multiple virtual machines following a topology.
Boxes contain subdirectories for providers, where appliances are stored.
Example: box1 has two appliances: a virtualbox appliance and a vmware appliance
.. code-block:: yaml
- box1
- virtualbox
- box.ovf
- disk.vmdk
- vmware
- ...
"""
__known_attributes = [
TopologyAttribute('name', str, default='', inherited=True),
TopologyAttribute('path', str, default='', inherited=True),
TopologyAttribute('url', str, default='', inherited=True),
TopologyAttribute('username', str, default='vagrant', inherited=True),
TopologyAttribute('password', str, default='password', inherited=True),
TopologyAttribute('sudo_command', str, default='/usr/bin/sudo', inherited=True),
TopologyAttribute('ifconfig_command', str, default='/sbin/ifconfig', inherited=True),
]
def __init__(self, _parent=None, **kwargs):
""" Initialize a topology node
"""
super(BoxNode, self).__init__(_parent=_parent, **kwargs)
TopologyAttribute.setall(self, kwargs, self.__known_attributes)
self.appliances = {}
self.path = getattr(self, 'cfg_path', None)
if not self.path:
from candelabra.boxes import BoxesStorage
self.path = os.path.join(BoxesStorage.get_storage_root(), self.cfg_name)
if not os.path.exists(self.path):
logger.debug('creating box directory "%s"', self.path)
os.makedirs(self.path)
self.missing = True
else:
self.missing = False
def load(self):
""" Load a box from a path
"""
supported_providers_names = PLUGINS_REGISTRIES['candelabra.provider'].names
logger.debug('... searching for providers in box %s', self.cfg_name)
self.appliances.clear()
for provider in os.listdir(self.path):
if provider in set(supported_providers_names):
logger.debug('...... found a %s template', provider)
providers_registry = PLUGINS_REGISTRIES['candelabra.provider']
provider_plugin = providers_registry.plugins[provider]
appliance_class = provider_plugin.APPLIANCE
full_provider_path = os.path.abspath(os.path.join(self.path, provider))
# try to load the appliance provider for this box class (ie, 'VirtualboxAppliance')
try:
appliance_instance = appliance_class.from_dir(full_provider_path)
except UnsupportedBoxException, e:
logger.warning('...... not a valid %s template in %s', provider, full_provider_path)
else:
self.appliances[provider] = appliance_instance
return bool(len(self.appliances) > 0)
def get_appliance(self, provider):
""" Get a instance of one of the appliances in this box, or None if not found
"""
if len(self.appliances) == 0:
self.load()
try:
# returns the provider (ie, a 'VirtualboxAppliance' instance)
return self.appliances[provider]
except KeyError:
return None
#####################
# tasks
#####################
def do_download(self):
""" Download a box
"""
if self.missing:
if not self.cfg_url:
raise ImportException('input URL not specified (url="%s")' % str(self.cfg_url))
logger.info('downloading image from "%s"', self.cfg_url)
try:
temp_box_fp, temp_box_name = tempfile.mkstemp()
temp_box_file = os.fdopen(temp_box_fp, 'w+')
except IOError, e:
raise ImportException('could not create temporal file for download: %s' % str(e))
try:
logger.info('... downloading to temporal file "%s"', temp_box_name)
import requests
downloaded_bytes = 0
last_downloaded_mbytes = 0
r = requests.get(self.cfg_url, stream=True)
for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
if chunk: # filter out keep-alive new chunks
downloaded_bytes += temp_box_file.write(chunk)
downloaded_mbytes = int(downloaded_bytes / MBYTE)
if downloaded_mbytes > last_downloaded_mbytes:
logger.debug('downloaded=%d Mb', downloaded_mbytes)
last_downloaded_mbytes = downloaded_mbytes
temp_box_file.flush()
except KeyboardInterrupt:
logger.debug('... removing %s', temp_box_name)
os.remove(temp_box_name)
raise ImportException('could not perform download')
except Exception, e:
logger.critical('while downloading: %s', str(e))
logger.debug('... removing %s', temp_box_name)
os.remove(temp_box_name)
raise ImportException('could not perform download')
finally:
temp_box_file.close()
logger.info('downloaded %d bytes!', self.downloaded_mbytes)
self.missing = False
appliance_path = os.path.join(self.path, 'unknown')
if not os.path.exists(appliance_path):
logger.debug('creating unknown box directory "%s"', appliance_path)
os.makedirs(appliance_path)
try:
logger.info('extracting box...')
tar = tarfile.open(temp_box_name)
tar.extractall(path=appliance_path)
tar.close()
logger.debug('... done')
except IOError, e:
logger.info('error extracting box: %s', str(e))
finally:
logger.debug('removing temporal file...')
os.remove(temp_box_name)
metadata_file_path = os.path.join(appliance_path, 'metadata.json')
if os.path.isfile(metadata_file_path):
with open(metadata_file_path, 'r') as metadata_file:
metadata = json.load(metadata_file)
provider = metadata["provider"]
logger.debug('renaming box to "%s"', provider)
fixed_appliance_path = os.path.join(self.path, provider)
os.rename(appliance_path, fixed_appliance_path)
#####################
# auxiliary
#####################
def __repr__(self):
""" The representation
"""
return "<Box %s appliances=%s %s at 0x%x>" % (self.cfg_name,
self.appliances, '[missing]' if self.missing else '[present]',
id(self))
| bsd-2-clause |
kkintaro/termite-data-server | web2py/applications-original/admin/controllers/debug.py | 12 | 7978 | import os
import sys
import cStringIO
import gluon.contrib.shell
import gluon.dal
import gluon.html
import gluon.validators
import code
import thread
from gluon.debug import communicate, web_debugger, qdb_debugger
import pydoc
if DEMO_MODE or MULTI_USER_MODE:
session.flash = T('disabled in demo mode')
redirect(URL('default', 'site'))
FE = 10 ** 9
def index():
app = request.args(0) or 'admin'
reset()
# read buffer
data = communicate()
return dict(app=app, data=data)
def callback():
app = request.args[0]
command = request.vars.statement
session['debug_commands:' + app].append(command)
output = communicate(command)
k = len(session['debug_commands:' + app]) - 1
return '[%i] %s%s\n' % (k + 1, command, output)
def reset():
app = request.args(0) or 'admin'
session['debug_commands:' + app] = []
return 'done'
# new implementation using qdb
def interact():
app = request.args(0) or 'admin'
reset()
# process all pending messages in the frontend
web_debugger.run()
# if debugging, filename and lineno should have valid values
filename = web_debugger.filename
lineno = web_debugger.lineno
if filename:
# prevent IOError 2 on some circuntances (EAFP instead of os.access)
try:
lines = open(filename).readlines()
except:
lines = ""
lines = dict([(i + 1, l) for (i, l) in enumerate(
[l.strip("\n").strip("\r") for l in lines])])
filename = os.path.basename(filename)
else:
lines = {}
if filename:
web_debugger.set_burst(2)
env = web_debugger.do_environment()
f_locals = env['locals']
f_globals = {}
for name, value in env['globals'].items():
if name not in gluon.html.__all__ and \
name not in gluon.validators.__all__ and \
name not in gluon.dal.__all__:
f_globals[name] = pydoc.text.repr(value)
else:
f_locals = {}
f_globals = {}
response.headers['refresh'] = "3"
if web_debugger.exception_info:
response.flash = T('"User Exception" debug mode. '
'An error ticket could be issued!')
return dict(app=app, data="",
filename=web_debugger.filename, lines=lines, lineno=lineno,
f_globals=f_globals, f_locals=f_locals,
exception=web_debugger.exception_info)
def step():
web_debugger.do_step()
redirect(URL("interact"))
def next():
web_debugger.do_next()
redirect(URL("interact"))
def cont():
web_debugger.do_continue()
redirect(URL("interact"))
def ret():
web_debugger.do_return()
redirect(URL("interact"))
def stop():
web_debugger.do_quit()
redirect(URL("interact"))
def execute():
app = request.args[0]
command = request.vars.statement
session['debug_commands:' + app].append(command)
try:
output = web_debugger.do_exec(command)
if output is None:
output = ""
except Exception, e:
output = T("Exception %s") % str(e)
k = len(session['debug_commands:' + app]) - 1
return '[%i] %s%s\n' % (k + 1, command, output)
def breakpoints():
"Add or remove breakpoints"
# Get all .py files
files = listdir(apath('', r=request), '.*\.py$')
files = [filename for filename in files
if filename and 'languages' not in filename
and not filename.startswith("admin")
and not filename.startswith("examples")]
form = SQLFORM.factory(
Field('filename', requires=IS_IN_SET(files), label=T("Filename")),
Field('lineno', 'integer', label=T("Line number"),
requires=IS_NOT_EMPTY()),
Field('temporary', 'boolean', label=T("Temporary"),
comment=T("deleted after first hit")),
Field('condition', 'string', label=T("Condition"),
comment=T("honored only if the expression evaluates to true")),
)
if form.accepts(request.vars, session):
filename = os.path.join(request.env['applications_parent'],
'applications', form.vars.filename)
err = qdb_debugger.do_set_breakpoint(filename,
form.vars.lineno,
form.vars.temporary,
form.vars.condition)
response.flash = T("Set Breakpoint on %s at line %s: %s") % (
filename, form.vars.lineno, err or T('successful'))
for item in request.vars:
if item[:7] == 'delete_':
qdb_debugger.do_clear(item[7:])
breakpoints = [{'number': bp[0], 'filename': os.path.basename(bp[1]),
'path': bp[1], 'lineno': bp[2],
'temporary': bp[3], 'enabled': bp[4], 'hits': bp[5],
'condition': bp[6]}
for bp in qdb_debugger.do_list_breakpoint()]
return dict(breakpoints=breakpoints, form=form)
def toggle_breakpoint():
"Set or clear a breakpoint"
lineno = None
ok = None
try:
filename = os.path.join(request.env['applications_parent'],
'applications', request.vars.filename)
# normalize path name: replace slashes, references, etc...
filename = os.path.normpath(os.path.normcase(filename))
if not request.vars.data:
# ace send us the line number!
lineno = int(request.vars.sel_start) + 1
else:
# editarea send us the offset, manually check the cursor pos
start = 0
sel_start = int(request.vars.sel_start)
for lineno, line in enumerate(request.vars.data.split("\n")):
if sel_start <= start:
break
start += len(line) + 1
else:
lineno = None
if lineno is not None:
for bp in qdb_debugger.do_list_breakpoint():
no, bp_filename, bp_lineno, temporary, enabled, hits, cond = bp
# normalize path name: replace slashes, references, etc...
bp_filename = os.path.normpath(os.path.normcase(bp_filename))
if filename == bp_filename and lineno == bp_lineno:
err = qdb_debugger.do_clear_breakpoint(filename, lineno)
response.flash = T("Removed Breakpoint on %s at line %s", (
filename, lineno))
ok = False
break
else:
err = qdb_debugger.do_set_breakpoint(filename, lineno)
response.flash = T("Set Breakpoint on %s at line %s: %s") % (
filename, lineno, err or T('successful'))
ok = True
else:
response.flash = T("Unable to determine the line number!")
except Exception, e:
session.flash = str(e)
return response.json({'ok': ok, 'lineno': lineno})
def list_breakpoints():
"Return a list of linenumbers for current breakpoints"
breakpoints = []
ok = None
try:
filename = os.path.join(request.env['applications_parent'],
'applications', request.vars.filename)
# normalize path name: replace slashes, references, etc...
filename = os.path.normpath(os.path.normcase(filename))
for bp in qdb_debugger.do_list_breakpoint():
no, bp_filename, bp_lineno, temporary, enabled, hits, cond = bp
# normalize path name: replace slashes, references, etc...
bp_filename = os.path.normpath(os.path.normcase(bp_filename))
if filename == bp_filename:
breakpoints.append(bp_lineno)
ok = True
except Exception, e:
session.flash = str(e)
ok = False
return response.json({'ok': ok, 'breakpoints': breakpoints})
| bsd-3-clause |
openstack-hyper-v-python/numpy | numpy/distutils/fcompiler/lahey.py | 229 | 1438 | from __future__ import division, absolute_import, print_function
import os
from numpy.distutils.fcompiler import FCompiler
compilers = ['LaheyFCompiler']
class LaheyFCompiler(FCompiler):
compiler_type = 'lahey'
description = 'Lahey/Fujitsu Fortran 95 Compiler'
version_pattern = r'Lahey/Fujitsu Fortran 95 Compiler Release (?P<version>[^\s*]*)'
executables = {
'version_cmd' : ["<F90>", "--version"],
'compiler_f77' : ["lf95", "--fix"],
'compiler_fix' : ["lf95", "--fix"],
'compiler_f90' : ["lf95"],
'linker_so' : ["lf95", "-shared"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"]
}
module_dir_switch = None #XXX Fix me
module_include_switch = None #XXX Fix me
def get_flags_opt(self):
return ['-O']
def get_flags_debug(self):
return ['-g', '--chk', '--chkglobal']
def get_library_dirs(self):
opt = []
d = os.environ.get('LAHEY')
if d:
opt.append(os.path.join(d, 'lib'))
return opt
def get_libraries(self):
opt = []
opt.extend(['fj9f6', 'fj9i6', 'fj9ipp', 'fj9e6'])
return opt
if __name__ == '__main__':
from distutils import log
log.set_verbosity(2)
from numpy.distutils.fcompiler import new_fcompiler
compiler = new_fcompiler(compiler='lahey')
compiler.customize()
print(compiler.get_version())
| bsd-3-clause |
db0/Doomtown-Reloaded-OCTGN | o8g/Scripts/customscripts.py | 1 | 127061 | # Python Scripts for the Star Wards LCG definition for OCTGN
# Copyright (C) 2013 Konstantine Thoukydides
# This python script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
###==================================================File Contents==================================================###
# This file contains the autoscripting for cards with specialized effects. So called 'CustomScripts'
# * UseCustomAbility() is used among other scripts, and it just one custom ability among other normal core commands
# * CustomScipt() is a completely specialized effect, that is usually so unique, that it's not worth updating my core commands to facilitate it for just one card.
# Remote Functions are custom functions coming from specific cards which usually affect other players and are called via remoteCall()
###=================================================================================================================###
def UseCustomAbility(Autoscript, announceText, card, targetCards = None, notification = None, n = 0):
mute()
announceString = ''
debugNotify(">>> UseCustomAbility() with Autoscript: {}".format(Autoscript)) #Debug
if card.name == "Plasma Drill":
targetDeed = findTarget('Targeted-atDeed',card = card)
if len(targetDeed) == 0: return 'ABORT'
production = compileCardStat(targetDeed[0], stat = 'Production')
if not production: notify(":> {} uses the plasma drill on, but it has no production, so that was fairly useless, wasn't it?".format(me))
else: remoteCall(targetDeed[0].owner,'PlasmaDrill',[targetDeed[0]])
elif card.name == "Allie Hensman":
remoteCall(targetCards[0].controller,'AllieHensmanXP',[targetCards[0],card])
### F&F ###
elif card.name == "Desolation Row":
leader = targetCards[0]
if leader.group == table and (leader.highlight == AttackColor or leader.highlight == InitiateColor):
count = compileCardStat(leader, stat = 'Bullets')
if count > 4: count = 4
me.GhostRock += count
leader.markers[mdict['Bounty']] += 2
notify("{} completes the {} job and gains {} ghost rock, while {}'s bounty increases by 2".format(me,card,count,leader))
else: notify("{} completes the {} job abut gains nothing since {} is not in their posse anymore".format(me,card,leader))
elif card.name == "Mirror, Mirror":
#targetCards = findTarget('DemiAutoTargeted-atDude-isParticipating-choose1',card = card, choiceTitle = "Choose which dude to mirror.")
#if not len(targetCards): return 'ABORT'
huckster = fetchHost(card)
target = targetCards[0]
hucksterBullets = compileCardStat(huckster, stat = 'Bullets')
targetBullets = compileCardStat(target, stat = 'Bullets')
if re.search(r'-isFirstCustom',Autoscript):
if hucksterBullets < targetBullets: plusBulletShootout(huckster, count = targetBullets - hucksterBullets, silent = True)
elif hucksterBullets > targetBullets: minusBulletShootout(huckster, count = hucksterBullets - targetBullets, silent = True)
if fetchDrawType(target) == 'Draw' and fetchDrawType(huckster) == 'Stud':
TokensX('Remove999Shootout:Stud', '', huckster)
if huckster.properties['Draw Type'] == 'Stud': TokensX('Put1Shootout:Draw', '', huckster)
elif fetchDrawType(target) == 'Stud' and fetchDrawType(huckster) == 'Draw':
TokensX('Remove999Shootout:Draw', '', huckster)
if huckster.properties['Draw Type'] == 'Draw': TokensX('Put1Shootout:Stud', '', huckster)
notify("{} sets {}'s bullets to {} {}".format(me,huckster,targetBullets, fetchDrawType(target)))
else:
if targetBullets < hucksterBullets: plusBulletShootout(target, count = hucksterBullets - targetBullets, silent = True)
elif targetBullets > hucksterBullets: minusBulletShootout(target, count = targetBullets - hucksterBullets, silent = True)
if fetchDrawType(huckster) == 'Draw' and fetchDrawType(target) == 'Stud':
TokensX('Remove999Shootout:Stud', '', target)
if target.properties['Draw Type'] == 'Stud': TokensX('Put1Shootout:Draw', '', target)
elif fetchDrawType(huckster) == 'Stud' and fetchDrawType(target) == 'Draw':
TokensX('Remove999Shootout:Draw', '', target)
if target.properties['Draw Type'] == 'Draw': TokensX('Put1Shootout:Stud', '', target)
notify("{} sets {}'s bullets to {} {}".format(me,target,hucksterBullets, fetchDrawType(huckster)))
elif card.name == 'Felix Amador':
me.piles['Deck'].addViewer(me)
whisper("The top card of your deck is {} ({} of {})".format(me.piles['Deck'].top(),fullrank(me.piles['Deck'].top().Rank),fullsuit(me.piles['Deck'].top().Suit)))
me.piles['Deck'].removeViewer(me)
### Frontier Justice ###
elif card.name == "Confession":
if not len(targetCards): return 'ABORT'
targetDude = targetCards[0]
dude = fetchHost(card)
skillCheck = fetchSkills(dude)
targetDude.markers[mdict['Bounty']] += skillCheck[0][1]
notify('{} increased {} bounty by {} Blessed skill rating'.format(card.name, targetDude, dude))
### IOUF ###
elif card.name == 'Marcia Ridge':
notify(":> {} use {}".format(announceText,targetCards[0]))
remoteCall(targetCards[0].controller,'MarciaRidgeStart',[card,targetCards[0]])
#useAbility(targetCards[0])
elif card.name == 'Eagle Wardens':
bootingDude = targetCards[0]
boot(bootingDude, silent = True)
if compileCardStat(bootingDude, 'Influence') >= 2: cardDraw = 3
else: cardDraw = 2
drawMany(me.deck, cardDraw, silent = True)
if len(me.hand):
choicehand = None
while choicehand == None:
choicehand = askCardFromList([c for c in me.hand],'Choose which card to discard or ace from your hand',card.Name)
destination = SingleChoice("Discard or Ace {}?".format(choicehand.Name), ['Discard','Ace'])
if not destination: destination = 0
if destination:
choicehand.moveTo(me.piles['Boot Hill'])
verb = 'ace'
else:
choicehand.moveTo(me.piles['Discard Pile'])
verb = 'discard'
notify("{} booted {} to draw {} cards and {} {} from their hand".format(me,bootingDude,cardDraw,verb,choicehand))
elif card.name == 'Den of Thieves':
if not len(targetCards):
whisper(":::ERROR::: You did not select valid target to use the den of thieves ablility")
return 'ABORT'
targetDude = targetCards[0]
drawHandCards = [c for c in table if c.highlight == DrawHandColor and c.controller == me]
if len(drawHandCards) != 5:
# we want to let user use this ability even though there is no draw hand on the table (outside lowball and shootouts),
# in case he fogot to do that or his opponent pressed ctrl+W too quickly
if confirm('There is no draw hand on the table.\nDo you want to use the ability anyway?'):
targetDude.markers[mdict['Bounty']] += 1
notify('{} boots {} to place 1 bounty on {} even though he/she did not have a draw hand revealed on the table'.format(me, card, targetDude))
return
whisper(":::ERROR::: You can only use the den of thieves if you have a draw hand revealed on the table")
return 'ABORT'
targetDude.markers[mdict['Bounty']] += 1
notify('{} boots {} to place 1 bounty on {}'.format(me, card, targetDude))
cxp, cyp = drawHandCards[2].position
if not len([c for c in table if c.model == 'cd31eabe-e2d8-49f7-b4de-16ee4fedf3c1' and c.controller == me]):
if type == 'shootout':
if playeraxis == Xaxis:
cheatinNotice = table.create("cd31eabe-e2d8-49f7-b4de-16ee4fedf3c1",cxp, cyp - 30, 1, False)
elif playeraxis == Yaxis:
cheatinNotice = table.create("cd31eabe-e2d8-49f7-b4de-16ee4fedf3c1",cxp, cyp - 30, 1, False)
else:
cheatinNotice = table.create("cd31eabe-e2d8-49f7-b4de-16ee4fedf3c1",cxp, 0, 1, False)
else:
if playeraxis == Xaxis:
cheatinNotice = table.create("cd31eabe-e2d8-49f7-b4de-16ee4fedf3c1",cxp, cyp + 30, 1, False)
elif playeraxis == Yaxis:
cheatinNotice = table.create("cd31eabe-e2d8-49f7-b4de-16ee4fedf3c1",cxp, cyp + 30, 1, False)
else:
cheatinNotice = table.create("cd31eabe-e2d8-49f7-b4de-16ee4fedf3c1",cxp, 30, 1, False)
notify("{} make their hand illegal and increase its rank by 1".format(announceText))
### TLS ###
elif card.name == 'Phantom Fingers' and re.search(r'(Gadget|Mystical)',targetCards[0].Keywords): DrawX('Draw1Card', announceText, card, notification = 'Quick')
### SB7-9 ###
elif card.name == "Morgan Stables":
drawMany(me.deck, 1, silent = True)
choicehand = None
while choicehand == None:
choicehand = askCardFromList([c for c in me.hand],'Choose which card to discard from your hand',card.Name)
choicehand.moveTo(me.piles['Discard Pile'])
notify("{} boot {} to draw 1 card and discard {} from their hand".format(announceText,card,choicehand))
elif card.name == "Xemo's Turban":
if card.orientation == Rot90: return 'ABORT'
if pull()[1] == 'Clubs':
notify(":> {}'s {} has malfunctioned and provides no more insights into the future".format(fetchHost(card),card))
elif payCost(1) == 'ABORT':
notify(":> {} remembered he didn't have the money to pay for Xemo's Turban, so they just pulled a card for nothing.".format(me))
else:
drawMany(me.deck, 1, silent = True)
choicehand = None
while choicehand == None:
choicehand = askCardFromList([c for c in me.hand],'Choose which card to discard from your hand',card.Name)
choicehand.moveTo(me.piles['Discard Pile'])
remoteCall(me,'boot',[card])
notify("{} boot {} to draw 1 card and discard {} from their hand".format(announceText,card,choicehand))
elif card.name == "Arnold Stewart":
topCards = list(me.piles['Deck'].top(5))
for c in topCards: c.moveTo(me.piles['Discard Pile'])
notify(":> {} discards {}".format(card,[c.Name for c in topCards]))
availDeeds = [c for c in topCards if re.search(r'Out of Town',c.Keywords)]
if availDeeds and card.orientation == Rot0:
choiceDeed = askCardFromList(availDeeds,'These were the available Out of Town deeds that were at the top of your deck. You may boot Arnold to take one in your hand',card.Name)
notify("{} boot {} to take {} to their hand".format(announceText,card,choiceDeed))
if choiceDeed:
choiceDeed.moveTo(me.hand)
boot(card,True)
elif not availDeeds and card.orientation == Rot0:
notify(":> {} didn't discover any good spots out of town.".format(card))
elif card.name == "Fool's Gold":
potCard = getPotCard(True)
if potCard:
if potCard.markers[mdict['Ghost Rock']]:
me.GhostRock += 1
potCard.markers[mdict['Ghost Rock']] -= 1
else: me.GhostRock += 1 # If the potcard is not there for some reason (bug) we just give the player 1 GR
notify("{} take one Ghost Rock from the pot".format(announceText))
### Foul Play ###
elif card.name == "Old Marge's Manor":
if not len(targetCards): return 'ABORT'
targetCard = targetCards[0]
count = targetCard.markers[mdict['Ghost Rock']]
card.markers[mdict['Ghost Rock']] += count
targetCard.markers[mdict['Ghost Rock']] = 0;
notify('{} moved {} Ghost Rock from {} to itself.'.format(card.name, count, targetCard))
### Ghost Town ###
elif card.name == "Sight Beyond Sight":
opponents = [player for player in getPlayers() if player != me or len(getPlayers()) == 1]
if len(opponents) == 1: player = opponents[0]
else:
choice = SingleChoice("Choose which player to hex", [pl.name for pl in opponents])
if choice == None: return 'ABORT'
else: player = opponents[choice]
remoteCall(player,'SightBeyondSightStart',[card])
elif card.name == "Technological Exhibition":
jobResults = eval(getGlobalVariable('Job Active'))
leader = Card(num(jobResults[3]))
if leader.group != table:
whisper("Your leader is gone and cannot invent anything anymore!")
return
handG = []
discG = []
for c in me.hand:
if re.search(r'Gadget',c.Keywords):
handG.append(c)
for c in me.piles['Discard Pile']:
if re.search(r'Gadget',c.Keywords):
discG.append(c)
if len(handG) and len(discG):
choice = SingleChoice("Choose Gadget Seek Option",['Hand ({} Gadgets Available)'.format(len(handG)),'Discard Pile ({} Gadgets Available)'.format(len(discG))])
if choice == None: return 'ABORT'
elif len(handG): choice = 0
elif len(discG): choice = 1
else:
notify("{} didn't have any gadgets in their hand or discard pile to exhibit!".format(me))
return
if choice == 0:
if len(handG) == 1: gadget = handG[0]
else: gadget = askCardFromList([c for c in handG],"Which of your Gadgets in your hand will you exhibit?",card.Name)
else:
if len(discG) == 1: gadget = discG[0]
else: gadget = askCardFromList([c for c in discG],"Which of your Gadgets in your discard pile will you exhibit?",card.Name)
if not gadget: return 'ABORT'
playcard(gadget,costReduction = 5, preHost = leader) # We make sure it's the leader who tries to make the gadget.
mark = Card(eval(getGlobalVariable('Mark')))
if mark.Name == 'Town Square':
gadget.markers[mdict['ControlPlus']] += 1
notify("{}'s {} succeeds at a technological exhibition in the Town Square. They invented a {} which gained a permanent Control Point!".format(me,leader,gadget))
else:
notify("{}'s {} succeeds at a technological exhibition. They invented a {}.".format(me,leader,gadget))
elif card.model == '28b4125d-61a9-4714-870c-2f27e4872e9f': # Turtle's Guard
if len([c for c in table if c.model == 'cd31eabe-e2d8-49f7-b4de-16ee4fedf3c1' and c.controller != me]): # If the opponent is cheatin' we want to create a harrowed spirit token.
token = spawnNature()
participateDude(token)
token.markers[mdict['Harrowed']] += 1
notify("{} marks all their dudes as harrowed for this round and spawns a harrowed nature spirit".format(me))
else:
notify("{} marks all their dudes as harrowed for this round".format(me))
elif card.name == 'Dr. Dayl Burnett':
gadgets = findTarget('DemiAutoTargeted-atGadget-fromHand-choose1',card = card, choiceTitle = "Choose which gadget to invent.")
if len(gadgets):
gadget = gadgets[0]
# playcard(gadget,preHost = card)
if payCost(num(gadget.Cost), loud) == 'ABORT' : return # Check if the player can pay the cost. If not, abort.
gadgetPull = pull(silent = True) # pull returns a tuple with the results of the pull
notify("{} attempted to manufacture a {} and pulled a {} {}".format(card,gadget,fullrank(gadgetPull[0]), fullsuit(gadgetPull[1])))
attachCard(gadget,card)
elif card.name == 'Sun-Touched Raven':
me.deck.visibility = 'me'
topCards = list(me.deck.top(5))
for iter in range(3):
notify(":> {}'s {} is helping them choose a card to discard ({}/3)".format(me,card,iter))
choiceCard = askCardFromList(topCards,"Double click on any card to discard it, or close this window to finish ({}/3)".format(iter))
if not choiceCard: break
else:
topCards.remove(choiceCard)
choiceCard.moveTo(me.piles['Discard Pile'])
me.deck.visibility = 'None'
shamanHost = fetchHost(card)
if shamanHost.orientation == Rot0 and confirm("Do you want to boot this shaman to draw a card?"):
shamanHost.orientation = Rot90
drawMany(me.deck,count = 1, silent = True)
notify(":> {} booted {} to draw 1 card".format(me,shamanHost))
elif card.name == 'Inner Struggle':
remoteCall(card.controller,'randomDiscard',[card.controller.hand])
#There comes a reconing
elif card.name == "Intercession":
if re.search(r'-isFirstCustom',Autoscript):
if getGlobalVariable('Shootout') == 'True':
dudes = findTarget('DemiAutoTargeted-atDude-targetMine-isParticipating-choose1')
TokensX('Put1Shootout:Stud', '', dudes[0])
else:
dudes = findTarget('DemiAutoTargeted-atDude-targetMine-choose1')
TokensX('Put1Noon:Stud', '', dudes[0])
else:
mDude = findTarget("DemiAutoTargeted-atDude-targetMine-isParticipating-choose1")
oDude = findTarget("DemiAutoTargeted-atDude-targetOpponents-isParticipating-choose1")
TokensX('Put2BulletNoonPlus', '', mDude[0])
boot(mDude[0], forced = 'unboot')
remoteCall(oDude[0].controller, 'Intercession', oDude)
elif card.name == "Raven's Ruin":
dudes = findTarget('DemiAutoTargeted-atDude-targetMine-choose1', card = card, choiceTitle = "Choose which dude to move")
ModifyStatus('MoveTarget-moveToHere', '', card, dudes)
deed = fetchHost(card)
if deed.owner != me and deed.orientation == Rot0 and compileCardStat(deed, 'Control') > 0:
if confirm("Do you want to boot this deed to get 1 GR?"):
me.GhostRock += 1
notify("{} gains 1 GR".format(me))
boot(deed, forced = 'boot')
elif card.name == "Censure":
out = findTarget('DemiAutoTargeted-atOutfit-choose1', card = card, choiceTitle = "Choose outfit of the player whose hand rank will be increased")
notify("{} hand rank is increased by 2".format(out[0].controller))
if getGlobalVariable('Shootout') == 'True':
notify("{} casualties are reduced by 3".format( me))
if confirm("Do you want to send one of opposing dudes home"):
dudes = findTarget("DemiAutoTargeted-atDude-targetOpponents-isParticipating-choose1")
remoteCall(dudes[0].controller, "Censure", [card, dudes])
elif card.name == "Sentinel":
host = fetchHost(card)
TokensX('Put1High Noon:Stud', '',host)
TokensX('Put1BulletNoonPlus', '',host)
TokensX('Put1Sentinel', '', host)
TokensX('Put1High Noon:Skill Bonus:1', '', host)
notify("You might move {} to adjecent location or Town Square".format(host))
elif card.name == "Ezekiah Grimme":
if re.search(r'-isFirstCustom',Autoscript):
spell = me.piles['Discard Pile'].top()
if not (re.search(r'Hex',spell.Keywords) or re.search(r'Miracle',spell.Keywords) or re.search(r'Spirit',spell.Keywords)):
notify(" You need a spell to use this ability")
return
playcard(spell)
boot(spell, forced = 'boot')
else:
dude = findTarget("DemiAutoTargeted-atHuckster_or_Shaman_or_Blessed-targetMine-isUnbooted-choose1")
if not len(dude):
notify("Your dude has to be unbooted to use this ability")
return
boot(dude[0], forced = 'boot')
topCards = list(me.piles['Deck'].top(5))
for c in topCards: c.moveTo(me.piles['Discard Pile'])
notify(":> {} discards {}".format(card,[c.Name for c in topCards]))
availSpells = [c for c in topCards if re.search(r'Spell',c.Type)]
playSpells = []
for c in table:
if c.type == "Spell" and c.controller == me:
playSpells.append(c)
for spell in availSpells:
a = 0
for playS in playSpells:
if spell.name == playS.name:
a += 1
if a > 1:
availSpells.remove(spell)
if availSpells:
choiceSpell = askCardFromList(availSpells,'These were the available Spells that were at the top of your deck. Do you want to play one on your dude',card.Name)
notify("{} decided to play {} on {} ".format(me,choiceSpell, dude[0]))
playcard(choiceSpell, preHost=dude[0])
else:
notify(":> {} didn't discover any spells available to play.".format(card))
#2t2d
elif card.name == "Property Is Theft":
if getGlobalVariable('Shootout') == 'True':
drawMany(me.Deck, 1, silent =True)
if confirm("do you want to discard card(s)?"):
disloop = 2;
while disloop:
chosenCard = askCardFromList([c for c in me.hand],"Choose which card to discard from your hand")
chosenCard.moveTo(me.piles['Discard Pile'])
disloop -= 1
else:
if confirm("do you want to discard card(s)?"):
chosenCard = askCardFromList([c for c in me.hand],"Choose which card to discard from your hand")
chosenCard.moveTo(me.piles['Discard Pile'])
elif card.model == "ae22bba2-cf1e-4038-b7bb-1d3429c10004": #Ying-Ssi Chieh T'ang
drawhandMany(me.Deck, 1, True)
discardCards = findTarget('DemiAutoTargeted-isDrawHand-targetMine-choose1', choiceTitle = "Choose a card to discard from your hand")
discardCards[0].moveTo(discardCards[0].owner.piles['Discard Pile'])
for c in table:
if c.highlight == DrawHandColor and c.controller == me: c.moveTo(me.piles['Draw Hand'])
revealHand(me.piles['Draw Hand'], type = 'shootout') # We move the cards back ot the draw hand and reveal again, letting the game announce the new rank.
elif card.name == 'Murdered in Tombstone':
disCR = [c for c in me.piles['Discard Pile'] if ("Action" in c.Type and re.search(r'Cheatin',c.Text))]
deckCR = [c for c in me.piles['Deck'] if ("Action" in c.Type and re.search(r'Cheatin',c.Text))]
string = len(disCR)
cards = disCR + deckCR
if len(cards) == 0:
notify("You have no cards you can fetch with this ability")
return
card = askCardFromList(cards, "Choose a CR to fetch if you do you will have to discard a card from your hand.First {} are in your discard".format(string))
card.moveTo(me.hand)
if card:
choicehand = askCardFromList([c for c in me.hand],'Choose which card to discard from your hand',card.Name)
choicehand.moveTo(me.piles['Discard Pile'])
notify("{} fetched {} using Murdered in Tombstone ability and discarded".format(me, card.name, choicehand))
opponents = [pl for pl in getActivePlayers() if (pl != me or len(getActivePlayers()) == 1)]
for opponent in opponents:
remoteCall(opponent,'Murdered',card)
return
elif card.name == "Burn 'Em Out":
opponents = [player for player in getPlayers() if player != me or len(getPlayers()) == 1]
if len(opponents) == 1: player = opponents[0]
else:
choice = SingleChoice("Choose player ", [pl.name for pl in opponents])
if choice == None: return 'ABORT'
else: player = opponents[choice]
remoteCall(player,'BurnOut',card)
elif card.name == 'Heist':
mark = Card(eval(getGlobalVariable('Mark')))
production = compileCardStat(mark, stat = 'Production')
me.GhostRock += production
if mark.controller != me:
jobResults = eval(getGlobalVariable('Job Active'))
leader = Card(num(jobResults[3]))
if leader:
dude.markers[mdict['Bounty']] += 1
boot(leader , forced = 'unboot')
notify("{} gained {} GR because of successful {}, {} unboots as heist happened at opponents location.".format(me, production, card.name, leader.name))
notify("{} gained {} GR because of successful {}.".format(me, production, card.name))
elif card.name == 'Ricochet':
for card in table:
if card.controller == me:
if card.model == 'cd31eabe-e2d8-49f7-b4de-16ee4fedf3c1': # The cheatin' icon
return
drawMany(me.deck, 1, silent = True)
notify('{} does not take damage this turn and drew a card.'.format(me))
elif card.name == 'Kevin Wainwright (Exp.1)':
if confirm('Is there opponents dude with grit 11 or higher at your location?'):
drawMany(me.deck, 2, silent = True)
notify('{} drew to card after {} moved to location with high grit dude.'.format(me, card.name))
elif card.name == 'Guiding Wind':
dudes = findTarget('DemiAutoTargeted-atDude-isParticipating-choose1')
dude = dudes[0]
influence = compilecardstat(card, stat = 'Influence')
if influence > 3:
modifier = 3
else: modifier = influence
bullets = compilecardstat(card, stat = 'Bullets')
if bullets < modifier:
while bullets != modifier:
TokensX('Put1BulletShootoutPlus', '', dude)
bullets = compilecardstat(card, stat = 'Bullets')
else:
while bullets != modifier:
TokensX('Put1BulletShootoutMinus', '', dude)
bullets = compilecardstat(card, stat = 'Bullets')
elif card.name =='High Stakes Haven':
randomC = me.hand.random()
if me.GhostRock <= 0:
if randomC == None:
notify('{} is broke and without cards to pay for {}.'.format(me, card))
else:
randomC.moveTo(me.piles['Discard Pile'])
notify('{} is broke, thus discarding random card for {}.'.format(me, card))
elif randomC == None:
me.GhostRock -= 1
notify('{} is without cards, thus paying 1 Ghost Rock for {}.'.format(me, card))
elif confirm('High Stakes Haven trait: Do you want to pay 1 Ghost Rock?\nIf No is selected, random card will be discarded instead.'):
me.GhostRock -= 1
notify('{} chose to pay 1 Ghost Rock for {}.'.format(me, card))
else:
randomC.moveTo(me.piles['Discard Pile'])
notify('{} chose to discard random card for {}.'.format(me, card))
#ausc
debugNotify("<<< UseCustomAbility() with announceString: {}".format(announceString)) #Debug
return announceString
def CustomScript(card, action = 'PLAY'): # Scripts that are complex and fairly unique to specific cards, not worth making a whole generic function for them.
debugNotify(">>> CustomScript() with action: {}".format(action)) #Debug
mute()
discardPile = me.piles['Discard Pile']
deck = me.piles['Deck']
bootHill = me.piles['Boot Hill']
if card.name == "Bottom Dealin'" and action == 'PLAY':
debugNotify("Bottom Dealin' Script")
drawHandPlayers = []
for c in table:
if c.controller != me and c.controller not in drawHandPlayers and c.highlight == DrawHandColor: drawHandPlayers.append(c.controller)
if len(drawHandPlayers) == 1: targetPL = drawHandPlayers[0]
elif len(drawHandPlayers) == 0:
whisper(":::ERROR::: No valid player found to bottom deal. Aborting!")
return 'ABORT'
else:
choice = SingleChoice("Please choose which of your opponents you're bottom dealin'.", [pl.name for pl in drawHandPlayers])
if choice == None: return 'ABORT'
targetPL = drawHandPlayers[choice]
passPileControl(deck,targetPL)
passPileControl(discardPile,targetPL)
remoteCall(targetPL,'BottomDealing',[me,card])
elif card.name == "Coachwhip!" and action == 'PLAY':
debugNotify("Coachwhip Script")
targetDude = [c for c in table if c.targetedBy and c.targetedBy == me and c.controller != me and c.Type == 'Dude']
if not len(targetDude): notify(":> No target selected. Cheating player has to select one of their dudes to boot or ace")
else:
if getGlobalVariable('Shootout') == 'True':
aceTarget(targetCards = targetDude[1:], silent = True)
notify(":> {} is caught cheating in a shootout while {} has a legal hand and the Coachwhip forces them to ace {}".format(targetDude[0].controller,me,targetDude[0]))
else:
remoteCall(targetDude[0].controller,'boot',[targetDude[0]])
notify(":> {} is caught cheating in lowball while {} has a legal hand and the Coachwhip forces them to boot {}".format(targetDude[0].controller,me,targetDude[0]))
elif card.model == "8e50a03b-b42c-4207-9d0d-7a144ad31e3b" and action == 'USE': # Elander Boldman nonxp
if getGlobalVariable('Shootout') != 'True':
whisper(":::ERROR::: {} can only use his shootout ability during shootouts".format(card))
return 'ABORT'
foundDude = False
hostCards = eval(getGlobalVariable('Host Cards'))
for c in table:
attachedWG = [Card(att_id) for att_id in hostCards if hostCards[att_id] == c._id and re.search(r'Weapon',Card(att_id).Keywords) and re.search(r'Gadget',Card(att_id).Keywords)]
if c.targetedBy and c.targetedBy == me and c.Type == 'Dude' and c.controller == me and len(attachedWG): # If we've targeted a dude with a weapon gadget...
rank,suit = pull(silent = True)
notify("{} attempts to optimize {} and pulls a {} {}.".format(card,attachedWG[0],rank, suit))
if suit == 'Clubs':
notify(":> Oops! The {} explodes. {} is discarded".format(attachedWG[0],c))
if confirm("You pulled a club! Go ahead and discard {}?".format(c.name)): discard(c,silent = True)
else:
attachedWG[0].markers[mdict['BulletShootoutPlus']] += 3
notify(":> The tweaks done on {} give it +3 bullet bonus for this shootout".format(attachedWG[0],c))
foundDude = True
break
if not foundDude:
whisper(":::ERROR::: No dude targeted. Aborting!")
return 'ABORT'
elif card.name == "Jarrett Blake" and action == 'USE':
if getGlobalVariable('Shootout') != 'True':
whisper(":::ERROR::: {} can only use his shootout ability during shootouts".format(card))
return 'ABORT'
hostCards = eval(getGlobalVariable('Host Cards'))
if not len([Card(att_id) for att_id in hostCards if hostCards[att_id] == card._id and re.search(r'Horse',Card(att_id).Keywords)]):
whisper(":::ERROR::: {} can only use his shootout ability while they have a horse attached".format(card))
return 'ABORT'
foundDude = False
for c in table:
if c.targetedBy and c.targetedBy == me and c.Type == 'Dude' and c.controller == me and (c.highlight == AttackColor or c.highlight == DefendColor): # If we've targeted a dude in a shootout
x,y = c.position
Jx,Jy = card.position
c.moveToTable(Jx,Jy)
card.moveToTable(x,y)
orgAttachments(card)
orgAttachments(c)
participateDude(card)
leavePosse(c)
foundDude = True
notify("{} switches places with {}".format(card,c))
break
if not foundDude:
whisper(":::ERROR::: No dude targeted. Aborting!")
return 'ABORT'
elif card.name == "Morgan Cattle Co." and action == 'USE':
targetDeed = findTarget('DemiAutoTargeted-atDeed-fromHand-choose1')
if not len(targetDeed):
whisper(":::ERROR::: You have no deeds in your hand to attempt to build")
return 'ABORT'
targetDude = findTarget('DemiAutoTargeted-atDude-isUnbooted-choose1-targetMine')
if not len(targetDude):
whisper(":::ERROR::: You have no available dudes in play to build that deed")
return 'ABORT'
reduction = compileCardStat(targetDude[0], 'Influence')
playcard(targetDeed[0],costReduction = reduction)
x,y = targetDeed[0].position
boot(targetDude[0],forced = 'boot')
targetDude[0].moveToTable(x + cardDistance(), y)
orgAttachments(targetDude[0])
notify("{} uses {} and boots {} to build {}, reducing its cost by {}.".format(me,card,targetDude[0],targetDeed[0],reduction))
elif card.name == "The Union Casino" and action == 'USE':
targetDude = findTarget('Targeted-atDude')
if not len(targetDude):
whisper(":::ERROR::: You need to target an unbooted dudes at this deed to use this ability")
return 'ABORT'
#boot(targetDude[0],silent = True) # It doesn't actually boot the dude. Huh.
myBet = askInteger("How much ghost rock do you want to bet on the Union Casino?",4)
if payCost(myBet, loud) == 'ABORT': return 'ABORT'
if myBet <= 3: notify(":> {} felt the need to burn some money by wasting {} Ghost Rock on Union Casino. Nothing else happens".format(me,myBet))
else:
notify(":> {} boots {} and uses {}'s ability to bet {}".format(me,targetDude[0],card,myBet))
for player in getActivePlayers():
if player != me or len(getActivePlayers()) == 1: remoteCall(player,'UnionCasino',[card,myBet,targetDude[0],'others bet'])
elif card.name == "This is a Holdup!" and action == 'PLAY':
targetDeed = findTarget('Targeted-atDeed')
if not len(targetDeed):
whisper(":::ERROR::: You need to target a deed with production to steal from first. Aborting.")
return 'ABORT'
deed = targetDeed[0]
if deed.owner.GhostRock == 0:
whisper(":::ERROR::: {} has no money in their bank to steal. Aborting")
return 'ABORT'
deedProd = compileCardStat(deed, stat = 'Production')
if deedProd == 0:
whisper(":::ERROR::: {} has no production to steal. Aborting")
return 'ABORT'
targetDude = findTarget('Targeted-atDude-isUnbooted')
if not len(targetDude):
whisper(":::ERROR::: You need to target an unbooted dudes at this deed to use this ability. Aborting.")
return 'ABORT'
boot(targetDude[0],silent = True, forced = 'boot')
if deedProd > deed.owner.GhostRock:
notify(":> {} doesn't have the full {} ghost rock to steal, so {} is taking the {} possible.".format(deed.owner,deedProd,card,deed.controller.GhostRock))
me.GhostRock += deed.owner.GhostRock # We cannot steal more money than the target player has.
targetDude[0].markers[mdict['Bounty']] += deed.owner.GhostRock
deed.owner.GhostRock = 0
else:
notify(":> {} is holding up {} and taking {} ghost rock from {}.".format(targetDude[0],deed,deedProd,deed.owner))
me.GhostRock += deedProd # We cannot steal more money than the target player has.
deed.owner.GhostRock -= deedProd
targetDude[0].markers[mdict['Bounty']] += deedProd
elif card.name == "Unprepared" and action == 'PLAY':
targetDude = findTarget('DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1')
if not len(targetDude):
whisper(":::ERROR::: You need to target an dude in this shootout. Aborting.")
return 'ABORT'
boot(targetDude[0],silent = True, forced = 'boot')
TokensX('Put1Unprepared', '', targetDude[0])
dudeGoods = findTarget('AutoTargeted-atGoods_or_Spell-onAttachment', card = targetDude[0])
for attachment in dudeGoods:
boot(attachment,silent = True, forced = 'boot')
TokensX('Put1Unprepared', '', attachment)
targetDude[0].markers[mdict['BulletShootoutMinus']] += 1
notify("{} has been caught with their pants down.".format(targetDude[0]))
elif card.name =="Cheatin' Varmint" and action == 'PLAY':
if me.GhostRock - 5 < 0:
if not confirm("You do not have enough Ghost Rock to use this ability. Proceed?"):
notify("{} didn't have enough Ghost Rock, {} Should return Cheatin' Varmint to their hand".format(me,me))
return 0
me.GhostRock -= 5
notify("{} pays 5 Ghost Rock to reduce a player's draw rank by 2 hand ranks".format(me))
### SB 1-3 ###
if card.name == "Make 'em Sweat" and action == 'PLAY':
myDude = findTarget('DemiAutoTargeted-atDude-targetMine-isUnbooted-isParticipating-choose1', choiceTitle = "Choose which of your dudes to boot for {}".format(card.name))
opDude = findTarget('DemiAutoTargeted-atDude-targetOpponents-isParticipating-choose1', choiceTitle = "Choose which dude to affect with {}".format(card.name))
if len(myDude) == 0 or len(opDude) == 0: return 'ABORT'
boot(myDude[0], silent = True)
bulletReduction = compileCardStat(myDude[0], stat = 'Bullets')
if bulletReduction: minusBulletShootout(opDude[0],count = bulletReduction)
if compileCardStat(opDude[0], stat = 'Bullets') == 0 and opDude[0].orientation == Rot0:
boot(opDude[0], silent = True)
sweatedTXT = ' {} is booted.'.format(opDude[0])
else: sweatedTXT = ''
notify(":> {} boots to reduce {}'s bullets by {}.{}".format(myDude[0],opDude[0],bulletReduction,sweatedTXT))
elif card.name == "The R&D Ranch" and action == 'USE':
rank,suit = pull(silent = True)
me.GhostRock += 2 # You always get the GR first anyway.
if suit == 'Clubs':
notify(":> {} tries to use {} and pulled a {} of {}. It all goes horribly wrong! They have to discard the deed and all cards at that location".format(me,card,rank,suit))
if confirm("You pulled a club! Proceed to discard {}?".format(card.name)): discard(card)
else: notify(":::INFO::: {} did not discard {} even though they pulled a {}".format(me,card,suit))
else:
notify(":> {} succesfully used {} by pulling a {} of {} and produced some very tasty meat indeed. They gain 2 ghost rock.".format(me,card,rank,suit))
elif card.name == "Gang Yi" and action == 'USE':
if getGlobalVariable('Shootout') != 'True':
whisper(":::ERROR::: {} can only use his shootout ability during shootouts".format(card))
return 'ABORT'
foundDude = False
for c in table:
if c.targetedBy and c.targetedBy == me and c.Type == 'Dude' and c.controller == me and (c.highlight == AttackColor or c.highlight == DefendColor): # If we've targeted a dude in a shootout
x,y = c.position
Jx,Jy = card.position
c.moveToTable(Jx,Jy)
card.moveToTable(x,y)
orgAttachments(card)
orgAttachments(c)
participateDude(card)
leavePosse(c)
foundDude = True
notify("{} switches places with {}".format(card,c))
break
if not foundDude:
whisper(":::ERROR::: No dude targeted. Aborting!")
return 'ABORT'
elif card.name == "Telepathy Helmet" and action == 'USE':
opponents = [pl for pl in getActivePlayers() if (pl != me or len(getActivePlayers()) == 1)]
if len(opponents) == 1: choice = 0
else: choice = SingleChoice("Choose which player's hand to look at",[pl.name for pl in opponents])
if choice == None: return
remoteCall(opponents[choice],'TelepathyHelmet',[me,card])
### F&F ###
elif card.name == "This'll Hurt in the Mornin" and action == 'PLAY':
targetCards = findTarget('DemiAutoTargeted-isDrawHand-targetOpponents-choose2',card = card, choiceTitle = "Choose which of your opponent's cards to discard")
if not len(targetCards): return 'ABORT'
if confirm("If your own draw hand illegal?"): remoteCall(targetCards[0].controller,'TWHITM',[targetCards,True])
else: remoteCall(targetCards[0].controller,'TWHITM',[targetCards,False])
elif card.name == "California Tax Office" and action == 'USE':
targetCards = findTarget('Targeted-atDude',card = card, choiceTitle = "Choose which of your opponent's dudes has to pay their taxes")
if not len(targetCards): return 'ABORT'
else: remoteCall(targetCards[0].controller,'TaxOffice',[targetCards[0]])
elif card.name == "The Fixer" and action == 'USE':
for c in me.Deck.top(5): c.moveTo(me.piles['Discard Pile'])
update()
discardCards = [c for c in me.piles['Discard Pile']]
choice = SingleChoice('Choose one of your discarded cards to take to your hand', makeChoiceListfromCardList(discardCards))
notify("{} uses {} to take {} into their hand".format(me,card,discardCards[choice]))
rnd(1,10)
discardCards[choice].moveTo(me.hand)
update()
if re.search(r'Noon Job',discardCards[choice].Text) and discardCards[choice].Type == 'Action': remoteCall(me,'boot',[card]) # Doing remote call, so as to have a chance to finish the animation
### SB 4-6 ###
elif card.name == "Howard Aswell" and action == 'USE':
handCards = [c for c in me.hand]
revealCards(handCards)
while not confirm("You are now revealing your hand to all players. Press Yes to continue, Press No to ping the other players to see if they had enough time to see the cards"):
notify("{} wants to know if it's OK to hide their hand once more".format(me))
for c in handCards: c.moveTo(me.hand)
notify("{} hides their play hand once more".format(me))
for c in me.Deck.top(10): c.moveTo(me.piles['Discard Pile'])
update()
discardCards = [c for c in me.piles['Discard Pile'] if re.search(r'(Ranch|Improvement)',c.Keywords)]
if not len(discardCards):
notify("{} tried to design some Ranches or Improvements but was unsuccesful.".format(card))
else:
choice = askCardFromList(discardCards,"Choose card to retrieve")
if choice == None:
notify("{} chooses not to take a Ranch of Improvement into their hand".format(me))
else:
choicehand = askCardFromList([c for c in me.hand],"Choose card to discard from hand.")
choicehand.moveTo(me.piles['Discard Pile'])
notify("{} uses {} and discards {} to take {} into their hand".format(me,card,choicehand,choice))
choice.moveTo(me.hand)
update()
elif card.name == "Funtime Freddy" and action == 'USE':
notify(":> {} is choosing the two hexes to retrieve with {}".format(me,card))
whisper(":::CHOICE::: Choose first hex to retrieve")
spell1 = askCardFromList([c for c in deck if c.Type == 'Spell' and re.search(r'Hex',c.Keywords)],"Choose first spell to retrieve")
if not spell1:
deck.shuffle()
return 'ABORT'
spell1.moveToTable(cwidth(),0)
spell1.highlight = DrawHandColor
spell2 = askCardFromList([c for c in deck if c.Type == 'Spell' and re.search(r'Hex',c.Keywords)],"Choose second spell to retrieve")
whisper(":::CHOICE::: Choose second hex to retrieve")
while not spell2 or spell2.model == spell1.model:
if confirm(":::ERROR::: You need to choose two different Hexes. Abort?"):
deck.shuffle()
return 'ABORT'
else: spell2 = askCardFromList([c for c in deck if c.Type == 'Spell' and re.search(r'Hex',c.Keywords)],"Choose second spell to retrieve")
spell2.moveToTable(-1 * cwidth(),0)
spell2.highlight = DrawHandColor
opponents = [pl for pl in getPlayers() if pl != me or len(getPlayers()) == 1]
if len(opponents) == 1: player = opponents[0]
else:
choice = SingleChoice("Choose the player who is going to select which spell you get to keep",[pl.name for pl in opponents])
if choice == None: return 'ABORT'
else: player = opponents[choice]
remoteCall(player,'FuntimeFreddyChoose',[card,spell1,spell2])
deck.shuffle()
elif card.model == "294a7ce9-af00-46e1-b33c-aab21ebf3b09" and action == 'USE': # Elander Boldman Xp
if getGlobalVariable('Shootout') != 'True':
whisper(":::ERROR::: {} can only use his shootout ability during shootouts".format(card))
return 'ABORT'
foundGadget = False
hostCards = eval(getGlobalVariable('Host Cards'))
for c in table:
if c.targetedBy and c.targetedBy == me and c.Type == 'Goods' and (Card(hostCards[c._id]).highlight == AttackColor or Card(hostCards[c._id]).highlight == DefendColor): # If we've targeted a gadget with a participating dude...
foundGadget = c
break
if not foundGadget:
whisper(":::ERROR::: No Gadget on a participating dude targeted. Aborting!")
return 'ABORT'
else:
foundGadget.orientation = Rot0
if re.search(r'Experimental',foundGadget.Keywords):
elUnboot = ", then finally unboots {}".format(card)
update()
rnd(1,10)
remoteCall(me,'boot',[card,0,0,True]) # Doing remote call, so as to have a chance to finish the animation
else: elUnboot = "".format(card)
if re.search(r'Weapon',foundGadget.Keywords):
weaponBonus = ", make it provide +1 bullet and make {} a stud".format(Card(hostCards[foundGadget._id]))
foundGadget.markers[mdict['BulletShootoutPlus']] += 1
TokensX('Remove999Shootout:Draw', '', Card(hostCards[foundGadget._id]))
TokensX('Put1Shootout:Stud', '', Card(hostCards[foundGadget._id]))
else: weaponBonus = ""
notify("{} uses {} to unboot {}{}{}".format(me,card,foundGadget,weaponBonus,elUnboot))
elif card.name == "Cookin' Up Trouble" and action == 'PLAY':
opponents = [player for player in getPlayers() if player != me or len(getPlayers()) == 1]
if len(opponents) == 1: player = opponents[0]
else:
choice = SingleChoice("Choose which player to sabotage", [pl.name for pl in opponents])
if choice == None: return 'ABORT'
else: player = opponents[choice]
remoteCall(player,'CookinTroubleStart',[card])
elif card.name == "Nathan Shane" and action == 'USE':
opponents = [player for player in getPlayers() if player != me or len(getPlayers()) == 1]
if len(opponents) == 1: player = opponents[0]
else:
choice = SingleChoice("Choose which player to snipe", [pl.name for pl in opponents])
if choice == None: return 'ABORT'
else: player = opponents[choice]
remoteCall(player,'NathanShaneStart',[card])
### IOUF ###
elif card.name == "Butch Deuces" and action == 'USE':
topC = list(deck.top(5))
chosenC = askCardFromList(topC,'Choose one Spirit or Attire to reveal or close this window to leave these cards where they are',card.Name)
if not chosenC: notify("{} boots {} to look at the top 5 cards of their deck but opts to keep their current hand".format(me,card))
else:
for c in me.hand: c.moveToBottom(deck)
for c in deck.top(5): c.moveTo(me.hand)
notify("{} boots {} to reveal {} from the top 5 cards of their deck, take those cards to their hand and shuffle their previous hand to their deck".format(me,card,chosenC.Name))
deck.shuffle()
elif card.name == "Laughing Crow" and action == 'USE':
topC = list(deck.top(2))
spirits = []
for c in topC:
c.moveTo(discardPile)
if re.search(r'Spirit',c.Keywords): spirits.append(c)
playedSpirit = 'select'
while len(spirits) and playedSpirit != None:
playedSpirit = askCardFromList(spirits,'Select a spirit to play (paying all costs)',card.Name)
if playedSpirit:
playcard(playedSpirit)
spirits.remove(playedSpirit)
topC.remove(playedSpirit)
if len(topC): notify("{} discarded {} from the top of their deck".format(card,[c.Name for c in topC]))
elif card.name == "Benjamin Washington" and action == 'USE':
handCards = list(me.hand)
discardedC = askCardFromList(handCards,'Select one card to discard or close this window to finish',card.Name)
if not discardedC: return
while discardedC != None and len(me.hand):
discardedC.moveTo(discardPile)
notify(":> {} uses {} to discard {}".format(me,card,discardedC))
handCards.remove(discardedC)
discardedC = askCardFromList(handCards,'Select another card to discard or close this window to finish',card.Name)
discardedNR = 5 - len(handCards)
for iter in range(discardedNR):
upkeepDudes = [c for c in table if compileCardStat(c, 'Upkeep') >= 1 and c.controller == me]
upkeepFixed = askCardFromList(upkeepDudes,'Select one of your dudes to reduce their upkeep by 2 ({}/{})'.format(iter + 1,discardedNR),card.Name)
if not upkeepFixed: break
if compileCardStat(upkeepFixed, 'Upkeep') >= 2:
upkeepFixed.markers[mdict['ProdPlus']] += 2
TokensX('Put2UpkeepPrePaid', '', upkeepFixed)
else:
upkeepFixed.markers[mdict['ProdPlus']] += 1 # We cannot exceed their production, as they would get prod instead then.
TokensX('Put1UpkeepPrePaid', '', upkeepFixed)
notify(":> {} reduces the upkeep of {} to {} until High Noon".format(card,upkeepFixed,compileCardStat(upkeepFixed, 'Upkeep')))
draw()
elif card.name == "Smiling Frog" and action == 'USE':
discardC = findTarget('DemiAutoTargeted-choose1-fromHand')
if not len(discardC): return 'ABORT'
else:
if re.search(r'Spirit',discardC[0].Keywords):
TokensX('Put2BulletNoonPlus', '', card)
notify("{} discards {} to empower himself with 2 extra bullets".format(card,discardC[0]))
else:
TokensX('Put1BulletNoonPlus', '', card)
notify("{} discards {} and gains 1 extra bullet".format(card,discardC[0]))
discardTarget(targetCards = discardC, silent = True)
### TLS ###
elif card.name == "The Extra Bet" and action == 'USE':
if getGlobalVariable('Phase') != '1':
#if not confirm(":::WARNING::: It is not yet the Gamblin' phase. Do you want to jump to lowball now?"): return
goToGamblin()
drawhandMany(me.Deck, 5, True)
betLowball()
if me.GhostRock < 1 and confirm("You do not seem to have enough ghost rock in your stash to use {}. Proceed to reveal your lowball hand as normal instead?".format(card.Name)):
revealLowballHand()
notify("{} did not have enough ghost rock in their stash to use {}".format(me,card))
else:
betLowball()
me.piles['Draw Hand'].lookAt(-1)
notify("{} uses {} to ante an extra ghost rock and is looking at their draw hand for a card to redraw".format(me,card))
### SB7-9 ###
elif card.name == "Rico Rodegain" and action == 'USE':
opponents = [player for player in getPlayers() if player != me or len(getPlayers()) == 1]
if len(opponents) == 1: player = opponents[0]
else:
choice = SingleChoice("Choose which player to investigate", [pl.name for pl in opponents])
if choice == None: return 'ABORT'
else: player = opponents[choice]
remoteCall(player,'RicoStart',[card])
elif card.name == "Jael's Guile" and action == 'USE':
for c in table:
if c.model == "cd31eabe-e2d8-49f7-b4de-16ee4fedf3c1" and c.controller == me:
dude = fetchHost(card)
if dude.orientation == Rot90 and not confirm("You need to boot your dude to use {} when your hand is illegal. Bypass?".format(card.Name)): return 'ABORT'
boot(dude)
break
opponents = [player for player in getPlayers() if player != me or len(getPlayers()) == 1]
if len(opponents) == 1: player = opponents[0]
else:
choice = SingleChoice("Which player are you shooting it out with?", [pl.name for pl in opponents])
if choice == None: return 'ABORT'
else: player = opponents[choice]
remoteCall(player,'JaelGuile',[card])
elif card.name == "Rick Henderson" and action == 'USE':
targetCards = findTarget('Targeted-atDude',card = card, choiceTitle = "Choose the dude you're robbing")
if not len(targetCards): return 'ABORT'
else: remoteCall(targetCards[0].controller,'RickHenderson',[targetCards[0],card])
### Ghost Town ###
elif card.name == "Silent Sigil":
if getGlobalVariable('Phase') == '4':
drawMany(me.deck,count = 1, silent = True)
notify("{} used {} ability.".format(me, card.name))
else:
notify("{} can be used only during Sundown.".format(card.name))
elif card.name == "Ol' Howard" and action == 'USE':
retrieveTuple = RetrieveX('Retrieve1Card-grabDeed-toTable-payCost', '', card)
if retrieveTuple == 'ABORT':return 'ABORT'
elif len(retrieveTuple[1]):
deed = retrieveTuple[1][0]
deed.markers[mdict['ProdMinus']] += num(deed.Production)
deed.markers[mdict['PermControlMinus']] += num(deed.Control)
attachCard(card,deed)
notify("{} startings haunting {}".format(card,deed))
elif card.name == "Notary Public" and action == 'USE':
deeds = findTarget('Targeted-atDeed_and_Government_or_Deed_and_Public-isUnbooted')
if not len(deeds): return 'ABORT'
else:
pub = False
gov = False
deed = deeds[0]
boot(deed,silent = True)
if re.search(r'Government',getKeywords(deed)):
dudesGov = findTarget('DemiAutoTargeted-atDude-choose1-choiceTitle{Choose which dude should receive a bounty}')
if len(dudesGov):
dudesGov[0].markers[mdict['Bounty']] += 1
gov = True
if re.search(r'Public',getKeywords(deed)):
dudesPub = findTarget('DemiAutoTargeted-atDude-targetMine-choose1-choiceTitle{Choose which dude should be moved}')
if len(dudesPub):
ModifyStatus('MoveTarget-moveToDeed_or_Town Square_or_Outfit', '', card, dudesPub)
pub = True
if gov and pub: notify("{} boots {} to increase the bounty of {} and move {} to another location".format(me,card,dudesGov[0], dudesPub[0]))
elif gov: notify("{} boots {} to increase the bounty of {}".format(me,card,dudesGov[0]))
elif pub: notify("{} boots {} move {} to another location".format(me,card,dudesPub[0]))
elif card.name == "Framed" and action == 'PLAY':
dudes = findTarget('DemiAutoTargeted-atDude-targetOpponents-choose1')
if len(dudes):
dude = dudes[0]
if dude.markers[mdict['Bounty']]:
dude.markers[mdict['Bounty']] += 3
notify("{} easily frames the already wanted {}, increasing their bounty by 3.".format(me,dude))
else:
if payCost(compileCardStat(dude,'Influence'), MSG = "You do not have enough ghost rock to frame {}! Bypass?".format(dude)) == 'ABORT': return 'ABORT'
remoteCall(dude.controller,'Framed',[card,dude])
elif card.name == "Plague of Grasshoppers" and action == 'PLAY':
dudes = findTarget('Targeted-atDude_and_Kung Fu-isUnbooted')
location = findTarget('DemiAutoTargeted-atDeed_or_Town Square_or_Outfit-choose1')
successDudes = []
for dude in dudes:
rank,suit = pull(silent = True)
dudeKF = compileCardStat(dude,'Value')
if num(rank) < dudeKF:
notify(":> {} pulled {} for {}, succeeding their Kung Fu pull by {}".format(me,rank,dude,dudeKF - num(rank) - 1))
successDudes.append(dude)
else:
notify(":> {} failed their Kung Fu pull by {} by pulling a {}".format(dude,num(rank) - dudeKF + 1,rank))
iter = 0
for dude in successDudes:
iter += 1
x,y = location[0].position
dude.moveToTable(x + cardDistance() * iter, y)
orgAttachments(dude)
if len(successDudes):
notify("{} moves {} dudes to {} ({})".format(me,len(successDudes),location[0],[c.Name for c in successDudes]))
else:
notify ("{} tried to move {} dudes to {} but failed miserably to move even a single one".format(me,len(dudes),location[0]))
elif card.name == "Walters Creek Distillery" and action == 'USE':
deeds = findTarget('Targeted-atDeed_and_Saloon_or_Deed_and_Casino-isUnbooted')
if not len(deeds): return 'ABORT'
else:
saloon = False
casino = False
deed = deeds[0]
boot(deed,silent = True)
if re.search(r'Saloon',getKeywords(deed)):
handC = findTarget('DemiAutoTargeted-fromHand-choose1')
if len(handC):
discardTarget(targetCards = handC, silent = True)
drawMany(deck, 1, silent = True)
saloon = True
if re.search(r'Casino',getKeywords(deed)):
me.GhostRock += 2
casino = True
if saloon and casino: notify("{} boots {} to gain 2 ghost rock and discard a card to draw a card".format(me,card))
elif saloon: notify("{} boots {} to discard a card to draw a card".format(me,card))
elif casino: notify("{} boots {} to gain 2 ghost rock".format(me,card))
elif card.name == "A Piece Of The Action" and action == 'PLAY':
handDudes = findTarget('DemiAutoTargeted-atDude-fromHand-choose1')
if not len(handDudes): return 'ABORT'
dude = handDudes[0]
cost = num(dude.Cost)
if cost >= 4:
reducedCost = cost - 4
if reducedCost < 4: reducedCost = 4
reduction = cost - reducedCost
else:
reducedCost = cost
reduction = 0
if chkGadgetCraft(dude):
if payCost(reducedCost) == 'ABORT' : return 'ABORT'
placeCard(dude,'HireDude')
notify(":> {} is giving {} a piece of the action".format(me,dude))
availGoods = [c for c in discardPile if c.Type == 'Goods' and not re.search(r'Gadget',c.Keywords)]
if len(availGoods):
goods = askCardFromList(availGoods,'You can equip a goods from your discard pile to {} with a {} Ghost Rock reduction to its cost. \nChoose one or close this window to equip nothing'.format(dude.Name,reduction),card.Name)
if goods: playcard(goods,costReduction = reduction, preHost = dude)
elif card.name == "Foreboding Glance" and action == 'PLAY':
myDudes = findTarget('DemiAutoTargeted-atDude-targetMine-choose1')
opDudes = findTarget('DemiAutoTargeted-atDude-targetOpponents-choose1')
if not len(myDudes) or not len(opDudes):
whisper("You need to target one of your dudes and one opposing dude to use this action")
return 'ABORT'
myDude = myDudes[0]
opDude = opDudes[0]
if compileCardStat(myDude, 'Control') > compileCardStat(opDude, 'Control'):
hostCards = eval(getGlobalVariable('Host Cards'))
attachmentsList = [Card(cID) for cID in hostCards if hostCards[cID] == opDude._id]
for attachment in attachmentsList: boot(attachment, silent = 'True')
notify(":> {}'s attachements are booted".format(opDude))
if compileCardStat(myDude, 'Influence') > compileCardStat(opDude, 'Influence'):
boot(opDude, silent = 'True')
notify(":> {} is booted".format(opDude))
if myDude.markers[mdict['Bounty']] > opDude.markers[mdict['Bounty']]:
callout(myDude, silent = 'True', targetDudes = opDudes)
notify(":> {} is calling out {}".format(myDude,opDude))
elif card.name == "Ambrose Douglas" and action == 'USE':
discardC = findTarget('DemiAutoTargeted-choose1-fromHand')
if not len(discardC): return 'ABORT'
else:
TokensX('Put1InfluencePlus', '', card)
if discardC[0].type == 'Spell' or (discardC[0].type == 'Goods' and re.search(r'Mystical',discardC[0].Keywords)):
OutfitCard.orientation = Rot0
OutfitCard.markers[mdict['UsedAbility']] = 0
notify("{} discards {} to gain 1 influence and are able to re-use their outfit card ability".format(card,discardC[0]))
else:
notify("{} discards {} to gain 1 influence ".format(card,discardC[0]))
discardTarget(targetCards = discardC, silent = True)
elif card.name == "Fool Me Once..." and action == 'USE':
for player in getActivePlayers():
if player != me: remoteCall(player,'drawMany',[player.Deck, 1, None, True])
notify("{} revealed an illegal draw hand and everyone else gets to draw a card".format(me))
elif card.name == "Theo Whateley-Boyer" and action == 'USE':
if getGlobalVariable('Phase') == '1':
if payCost(1, silent) == 'ABORT': return 'ABORT'
jokers = findTarget('DemiAutoTargeted-atJoker-isDrawHand-choose1-targetMine')
if not len(jokers):
whisper(":::ERROR::: You need to have revealed a joker to use this ability")
return 'ABORT'
attachCard(jokers[0],card)
jokers[0].highlight = None
notify("{} paid 1 Ghost Rock to attach {} to {}".format(me,jokers[0],card))
elif getGlobalVariable('Shootout') == 'True':
if not card.markers[mdict['UsedAbility']] or (card.markers[mdict['UsedAbility']] and confirm("You've already used {}'s Ability Bypass Restriction?".format(card.name))):
if not card.markers[mdict['UsedAbility']]: card.markers[mdict['UsedAbility']] += 1
else: notify(":::WARN::: {} bypassed once-per turn restriction on {}'s ability".format(me,card))
jokers = []
hostCards = eval(getGlobalVariable('Host Cards'))
attachmentsList = [Card(cID) for cID in hostCards if hostCards[cID] == card._id]
for attachment in attachmentsList:
if attachment.Type == 'Joker': jokers.append(attachment)
if not len(jokers):
whisper(":::ERROR::: You need to have an attached joker to use this ability")
return 'ABORT'
elif len(jokers) == 1: joker = jokers[0]
else:
joker = jokers[SingleChoice('Choose one of your attached jokers to put in your draw hand',[c.Name for c in jokers])]
hex = findTarget('DemiAutoTargeted-atHex-onAttachment-isUnbooted-choose1', card = card, choiceTitle = "Choose which hex to boot to use this ability")
if not len(hex): return 'ABORT'
boot(hex[0], silent = True)
discardCards = findTarget('DemiAutoTargeted-isDrawHand-targetMine-choose1')
if not len(discardCards): return 'ABORT'
discardCards[0].moveTo(discardCards[0].owner.piles['Discard Pile'])
for c in table:
if c.highlight == DrawHandColor and c.controller == me: c.moveTo(me.piles['Draw Hand'])
joker.moveTo(me.piles['Draw Hand'])
clearAttachLinks(joker)
notify("{} boots their {} to replace {} with {}".format(card,hex[0],discardCards[0],joker))
revealHand(me.piles['Draw Hand'], type = 'shootout') # We move the cards back ot the draw hand and reveal again, letting the game announce the new rank.
else:
whisper(":::ERROR::: You can only use Theo's ability during lowball or shootouts")
elif card.name == "Antoine Peterson" and action == 'PLAY':
discardedJobs = [c for c in me.piles['Discard Pile'] if re.search(r'Noon Job',c.Text) and c.Type == 'Action']
choice = SingleChoice('Choose one of your jobs to take to your hand', makeChoiceListfromCardList(discardedJobs))
if not choice: return
discardedJobs[choice].moveTo(me.hand)
handDiscard = None
while not handDiscard:
handDiscard = askCardFromList([c for c in me.hand],"Choose which card to discard from your hand")
handDiscard.moveTo(me.piles['Discard Pile'])
notify("{} uses {} to take {} into their hand and discard {}".format(me,card,discardedJobs[choice],handDiscard))
Sloanies = findTarget("Targeted-atDude_and_The Sloane Gang-targetMine-isBooted-noTargetingError")
if len(Sloanies):
boot(Sloanies[0], forced = 'unboot')
Sloanies[0].markers[mdict['Bounty']] += 2
notify("{} unboots {} and gives them 2 bounty".format(me,Sloanies[0]))
elif card.name == "Denise Brancini" and action == 'DISCARD':
if getGlobalVariable('Phase') == '2':
notify(":> {} couldn't pay the upkeep of {} and so all other players refill their aced jokers".format(me,card))
for player in getActivePlayers(): remoteCall(player,'DeniseBrancini',[card])
elif card.name == "Serendipitous Arrival" and action == 'PLAY':
handDudes = findTarget('DemiAutoTargeted-atDude-fromHand-choose1')
if not len(handDudes): return 'ABORT'
dude = handDudes[0]
cost = num(dude.Cost)
if cost - 5 > 0:
cost -= 5
extraTXT = " was paid {} ghost rock and".format(cost)
else:
cost = 0
extraTXT = ''
if payCost(cost) == 'ABORT' : return 'ABORT'
placeCard(dude,'HireDude')
notify(":> {}{} has serendipitously arrived in battle!".format(dude,extraTXT))
participateDude(dude)
TokensX('Put1Serendipitous', '', dude)
### There comes a reconing ###
elif card.name == "Agent Provocateur" and action == 'USE':
if getGlobalVariable('Phase') != '4':
notify(":> {} ability can be used only during Sundown".format(card.name))
return
handDudes = findTarget('DemiAutoTargeted-atDude-fromHand-choose1')
if not len(handDudes): return 'ABORT'
dude = handDudes[0]
cost = num(dude.Cost)
if cost > 4:
reducedCost = cost - 4
else:
reducedCost = 0
if payCost(reducedCost) == 'ABORT' : return 'ABORT'
placeCard(dude,'HireDude')
notify(":> {} is bringing {} into play".format(card.name,dude))
ace(card)
elif card.name == "Stewart Davidson" and action == 'USE':
handDiscard = askCardFromList([c for c in me.hand],"Choose which card to discard from your hand")
if not handDiscard: return
handDiscard.moveTo(me.piles['Discard Pile'])
dudes = findTarget('DemiAutoTargeted-atDude-targetOpponents-choose1')
if len(dudes):
dude = dudes[0]
if payCost(compileCardStat(dude,'Influence'), MSG = "You do not have enough ghost rock to frame {}! Bypass?".format(dude)) == 'ABORT': return 'ABORT'
notify(":> {} succesfully framed {} for 1 bounty".format(card.controller,dude))
dude.markers[mdict['Bounty']] += 1
elif card.name == "The Winning Agenda":
dudes = findTarget('DemiAutoTargeted-atDude-targetMine-choose1')
toDiscard = compileCardStat(dudes[0],'Influence')
if toDiscard > 3: toDiscard = 3
if toDiscard > len([c for c in me.hand]):
notify("You don't have enough cards in your hand to use this ability")
return
disloop = toDiscard
while disloop:
chosenCard = askCardFromList([c for c in me.hand],"Choose which card to discard from your hand")
chosenCard.moveTo(me.piles['Discard Pile'])
disloop -= 1
drawMany(me.Deck, toDiscard, silent =True)
notify(":> {} used {} to discard and draw {} cards".format(me,card.name, toDiscard))
elif card.name == "Rosenbaum's Golem" and action == 'USE':
if card.orientation == Rot90:
notify(":> {} has to be unbooted to use his ability".format(card))
return
else:
boot(card, forced = 'boot')
dudes = findTarget('DemiAutoTargeted-atDude-isParticipating-targetOpponents-isUnbooted-choose1')
boot(dudes[0], forced = 'boot')
inf = compileCardStat(dudes[0],'Influence')
val = compileCardStat(dudes[0], 'Value')
bul = compileCardStat(dudes[0], 'Bullets')
grit = inf + val + bul
choice = confirm("Do you want give that dude 5 shootout value? {} has currently {} grit.".format(dudes[0].name, grit))
if choice:
dudes[0].markers[mdict['ValueShootoutPlus']] += 5
grit +=5
if grit >= 11:
dudes = findTarget("AutoTargeted-atDude-isParticipating-isBooted-targetMine")
dudes.remove(card)
if not len(dudes): return
if len(dudes) == 1:
chosenCard = dudes[0]
else:
chosenCard = askCardFromList(dudes,"Choose which dude to unboot")
boot(chosenCard, forced = 'unboot')
elif card.name == 'Zeb Whateley-Dupont':
if OutfitCard.Outfit == 'The Fourth Ring':
TokensX('Put1High Noon:Stud', '', card)
elif card.name == "Rabid Rance Hitchcock":
if card.markers[mdict['Bounty']] == 0:
notify(":> {} has to be WANTED DUDE to use his ability".format(card.name))
return
deed = findTarget("Targeted-atDeed-choose1")
notify("{} uses {}'s ability".format(me,card))
if not len(deed):
TokensX('Remove999Noon:ControlBullets', '', card)
return
if deed[0].controller != me:
notify(":> You have to control {} to use this ability".format(deed[0].name))
return
TokensX('Remove999Noon:ControlBullets', '', card)
prodBullets = int(compileCardStat(deed[0], stat = 'Production'))
if prodBullets > 3: prodBullets = 3
TokensX('Put{}Noon:ControlBullets'.format(prodBullets), '', card)
elif card.name == "Sheriff Eli Waters":
wanted = findTarget("AutoTargeted-atDude-hasMarker{Bounty}-targetOpponents-isParticipating")
maxBounty = 0
for dude in wanted:
bounty = dude.markers[mdict['Bounty']]
if bounty > maxBounty: maxBounty = bounty
if maxBounty > 4: maxBounty = 4
TokensX('Remove999Shootout:BountyBullets'.format(maxBounty), '', card)
TokensX('Put{}Shootout:BountyBullets'.format(maxBounty), '', card)
elif card.name == "Epitaph Branch Office":
dudes = findTarget("DemiAutoTargeted-atDude-targetMine-choose1")
if compileCardStat(dudes[0],'Influence') < 1:
notify(":> {} has no influence!".format(dudes[0]))
return
if dudes[0].markers[mdict['PermControlPlus']] != 0:
notify(":> {} has already a control point!".format(dudes[0]))
return
dudes[0].markers[mdict['PermControlPlus']] += 1
notify(":> {} gains one control point!".format(dudes[0]))
elif card.name == "Buffalo Emporium":
dudes = findTarget("DemiAutoTargeted-atDude-targetMine-choose1")
TokensX('Put1High Noon:Cannot be Moved or Booted', '', dudes[0])
if re.search(r'Abomination',dudes[0].Keywords):
boot(dudes[0], forced = 'unboot')
TokensX('Put1High Noon:1 Stud posse bonus', '', dudes[0])
elif card.name == "Explorer's Lodge":
dudes = findTarget("DemiAutoTargeted-atDude-choose1")
ModifyStatus('MoveTarget-moveToTown Squa', '', card, dudes)
boot(dudes[0], forced = 'boot')
if dudes[0].controller == me and me.GhostRock > 1:
choice = confirm("Do you want to pay 2GR to unboot {}?.".format(dudes[0].name))
if choice:
boot(dudes[0], forced = 'unboot')
me.GhostRock -= 2
notify(":> {} pays 2GR to unboot{}".format(me, dudes[0]))
else:
remoteCall(dudes[0].controller,'ExpLodge', dudes[0])
elif card.name == "The Oriental":
dudes = findTarget("DemiAutoTargeted-atDude-targetMine-choose1")
if not dudes[0].orientation == Rot90:
notify(":> You can only use this ability on unbooted dude!!!!")
return
boot(dudes[0], forced = 'unboot')
dudes[0].markers[mdict['Bounty']] += 2
notify(":> {} unbooted {} and raised their bounty bu two using connections at {}!".format(me, dudes[0], card))
elif card.name == "Disarm":
if getGlobalVariable('Shootout') == 'True':
dudes = findTarget("DemiAutoTargeted-atDude-targetOpponents-isParticipating-choose1")
dudeGoods = findTarget('DemiAutoTargeted-atGoods_or_Spell-onAttachment-isUnbooted-choose1', card = dudes[0])
if len(dudeGoods) and re.search(r'Unique',dudeGoods[0].Keywords):
dudeGoods[0].moveTo(dudeGoods[0].controller.piles['Discard Pile'])
notify(":> {} disarmed {} and discarded {} !".format(me, dudes[0], dudeGoods[0]))
elif len(dudeGoods):
dudeGoods[0].moveTo(dudeGoods[0].controller.piles['Boot Hill'])
notify(":> {} disarmed {} and aced {} !".format(me, dudes[0], dudeGoods[0]))
if confirm("Is your revealed hand legal?"):
dudes = findTarget("DemiAutoTargeted-atDude-targetMine-isParticipating-choose1")
TokensX('Put1High Noon:Stud', '', dudes[0])
TokensX('Put2BulletNoonPlus', '', dudes[0])
notify("{} gets 2 noon bullets and becomes a stud till Sundown".format(dudes[0]))
notify(":> {} ignores all casualties this round of shootout !".format(me))
return
else:
dudes = findTarget("DemiAutoTargeted-atDude-targetOpponents-choose1")
dudeGoods = findTarget('DemiAutoTargeted-atGoods_or_Spell-onAttachment-isUnbooted-choose1', card = dudes[0])
if len(dudeGoods):
boot(dudeGoods[0], forced = 'boot')
notify(":> {} disarmed {} by booting {} !".format(me, dudes[0], dudeGoods[0]))
if confirm("Is your revealed hand legal?"):
dudes = findTarget("DemiAutoTargeted-atDude-targetMine-choose1")
TokensX('Put1High Noon:Stud', '', dudes[0])
TokensX('Put2BulletNoonPlus', '', dudes[0])
notify("{} gets 2 noon bullets and becomes a stud till Sundown".format(dudes[0]))
elif card.name == "Friends in High Places":
if confirm("Are you sure your dude has higher influence?"):
clearShootout()
notify("{} uses his dude's influence to avoid shootout".format(me))
elif card.name == "You Had ONE Job!":
dudes = findTarget('DemiAutoTargeted-atDude-hasMarker{Bounty}-isParticipating-choose1')
if not len(dudes):
notify("There are no valid targets for this ability")
return
if dudes[0].markers[mdict['Bounty']] < 3:
notify("You need at least 3 bounty on a dude to use this ability")
return
if fetchDrawType(dudes[0]) == 'Draw':
TokensX('Remove999Shootout:Draw', '', dudes[0])
TokensX('Put1Shootout:Stud', '', dudes[0])
else:
TokensX('Remove999Shootout:Stud', '', dudes[0])
TokensX('Put1Shootout:Draw', '', dudes[0])
elif card.name == "Grim Servant O' Death":
dudes = findTarget("DemiAutoTargeted-atDude-isNotParticipating-choose1")
if dudes[0].controller == me:
choice = confirm("Do you want {} to participate in the shootout".format(dudes[0].name))
if choice:
participateDude(dudes[0])
return
else:
remoteCall(dudes[0].controller,"GSOD2",[card])
else:
if remoteCall(dudes[0].controller,"GSOD",[card, dudes[0]]) == "True":
return
return
elif card.name == "Decimator Array" and action == 'PLAY':
if confirm("Do you want to unboot one of your mad scientists?"):
dude = findTarget("DemiAutoTargeted-atMad Scientist-targetMine-isBooted-choose1")
update()
boot(dude[0], forced = 'unboot')
elif card.name == "Devil's Six Gun":
j1 = [j for j in me.piles['Boot Hill'] if j.type == 'Joker']
j2 = [j for j in me.piles['Discard Pile'] if j.type == 'Joker']
j3 = [j for j in me.piles['Deck'] if j.type == 'Joker']
jokers = j1 + j2 + j3
if not len(jokers):
notify("You have no jokers that are available to retrieve")
return
if len(j1) == 2:
string = "Both jokers are in your Boot Hill."
elif len(j2) == 2:
string = "Both jokers are in your Discard Pile."
elif len(j3) == 2:
string = "Both jokers are in your Deck."
elif j1 and j2:
string = "Left joker is in your Boot Hill, right joker is in your Discard Pile."
elif j1 and j3:
string = "Left joker is in your Boot Hill, right joker is in your Deck."
elif j2 and j3:
string = "Left joker is in your Discard Pile, right joker is in your deck."
elif j1:
string = "Joker is in your Boot Hill"
elif j2:
string = "Joker is in your Discard Pile"
else:
string = "Joker is in your Deck"
joker = askCardFromList(jokers, "Choose Joker to use.{}".format(string))
discardCards = findTarget('DemiAutoTargeted-isDrawHand-targetMine-choose1')
if not len(discardCards): return 'ABORT'
discardCards[0].moveTo(discardCards[0].owner.piles['Discard Pile'])
for c in table:
if c.highlight == DrawHandColor and c.controller == me: c.moveTo(me.piles['Draw Hand'])
joker.moveTo(me.piles['Draw Hand'])
notify("{} uses their {} to replace {} with {}".format(me,card,discardCards[0],joker))
if getGlobalVariable("Shootout") == "True":
revealHand(me.piles['Draw Hand'], type = 'shootout')
else:
revealHand(me.piles['Draw Hand'], type = 'lowball')
TokensX('Put1DoNotAce', '', joker)
elif card.name == "Darius Hellstromme":
weapon = findTarget("DemiAutoTargeted-atWeapon_and_Gadget-isParticipating-isUnbooted-targetMine-choose1")
if not len(weapon):
notify("You need unbooted weapon-gadget to use this ability")
return
dude = fetchHost(weapon[0])
boot(weapon[0], forced = 'boot')
dude.markers[mdict['PermControlPlus']] += 1
elif card.name == "Father Tolarios":
if len(me.hand):
cardChoice = askCardFromList([c for c in me.hand], "Choose a card you want to discard")
else:
notify("You need to discard a card to use this ability")
return
cardChoice.moveTo(me.piles['Discard Pile'])
wanted = findTarget("DemiAutoTargeted-atDude-hasMarker{Bounty}-targetOpponents-choose1")
if not len(wanted):
notify("You need a wanted dude to fetch a card")
return
wanted[0].markers[mdict['Bounty']] -= 1
mirOrMysDiscard = [c for c in me.piles['Discard Pile'] if ("Miracle" in c.keywords and "Unique" not in c.keywords )] + [c for c in me.piles['Discard Pile'] if ("Mystic" in c.keywords and "Unique" not in c.keywords) ]
mirOrMysDeck = [c for c in me.piles['Deck'] if ("Miracle" in c.keywords and "Unique" not in c.keywords )] + [c for c in me.piles['Deck'] if ("Mystic" in c.keywords and "Unique" not in c.keywords) ]
string = len(mirOrMysDiscard)
cards = mirOrMysDiscard + mirOrMysDeck
if len(cards) == 0:
notify("You have no cards you can fetch with this ability")
return
card = askCardFromList(cards, "Choose a card to fetch.First {} are in your discard".format(string))
card.moveTo(me.hand)
notify("{} fetched {} using Father Tolarios ability".format(me, card.name))
#2t2d
elif card.name == "The Spiritualy Society":
opponents = [player for player in getPlayers() if player != me or len(getPlayers()) == 1]
if len(opponents) == 1: player = opponents[0]
else: return
if me.Influence > player.Influence:
if confirm('Do you have more influence than {} in Town Square? '.format(player.Name)):
drawMany(me.deck, 1, silent = True)
else: return
notify("{} drew one card because they control Town Square ".format(me))
elif card.name == "Buskers":
opponents = [player for player in getPlayers() if player != me or len(getPlayers()) == 1]
if len(opponents) == 1: player = opponents[0]
targetDude = findTarget("DemiAutoTargeted-atDude-targetOpponents-choose1")
if player.GhostRock >= 2: remoteCall(player,'Buskers',targetDude)
boot(targetDude[0], silent = True)
notify("{} booted {} using {} ability.".format(me, targetDude[0].name,card.name))
elif card.name == "Taiyari":
targetDude = findTarget('DemiAutoTargeted-atDude-targetMine-isParticipating-choose1')
bullets = compileCardStat(targetDude[0], stat = 'Bullets')
if bullets == 0: bullets = 1
opponents = [player for player in getPlayers() if player != me or len(getPlayers()) == 1]
if len(opponents) == 1: player = opponents[0]
me.GhostRock -= bullets
player.GhostRock += bullets
TokensX('Put1Shootout:Stud', '', targetDude[0])
notify("{} paid {} GR to make {} a stud, {} has maximum 4 bullets till teh end of the shootout".format(me,bullets,targetDude[0], targetDude[0]))
elif card.name == "Feichi Suitcase Lee":
targetDude = findTarget('DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1')
boot(targetDude[0], silent = True)
dudeInfluence = compileCardStat(targetDude[0], stat = 'Influence')
sInfluence = compileCardStat(card, stat = 'Influence')
if dudeInfluence < sInfluence:
dude = findTarget('DemiAutoTargeted-atDude-isNotParticipating-isMine-choose1')
participateDude(dude[0])
notify("{} booted {} and brought {} into shootout".format(card.name, targetDude[0], dude[0]))
return
notify("{} booted {}".format(card.name, targetDude[0]))
elif card.name == "Vida Azul":
discardCards = findTarget('DemiAutoTargeted-isDrawHand-targetMine-choose1', choiceTitle = "Choose a card to discard from your hand")
discardCards[0].moveTo(discardCards[0].owner.piles['Discard Pile'])
targetDude = findTarget('DemiAutoTargeted-atDude-targetMine-choose1')
dudeHorse = findTarget('DemiAutoTargeted-atHorse-onAttachment-isBooted-choose1', card = targetDude[0])
TokensX("Remove1AbilityUsed",'', dudeHorse[0])
boot(dudeHorse[0], forced = 'unboot')
if(discardCards[0].Type == "Goods"):
boot(targetDude[0], forced = 'unboot')
notify("{} discarded {} to unboot {} and {}".format(me, discardCards[0], dudeHorse[0],targetDude[0]))
return
notify("{} discarded {} to unboot {}".format(me, discardCards[0], dudeHorse[0]))
elif card.model == 'ae22bba2-cf1e-4038-b7bb-1d3429c10026': # Silas Aims (Exp.1)
if not confirm("Is {} a mark?".format(card.name)):
boot(card, forced = 'boot')
targetDude = findTarget('DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1')
if card.markers[mdict['Bounty']] > targetDude[0].markers[mdict['Bounty']]:
remoteCall(dudes[0].controller, "Censure", [card, targetDude])
notify('{} send {} home booted'.format(card,targetDude[0]))
else:
notify('Your dude doesnt have enough bounty')
return 'Abort'
elif card.name == 'Electrostatic Pump Gun':
topd = findTarget('DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1')
topDude = topd[0]
TokensX('Put1BulletShootoutMinus', '', topDude)
if not pull()[1] == 'Clubs':
bullets = compilecardstat(topDude, stat = 'Bullets')
if bullets == 0:
if fetchDrawType(topDude) == 'Stud':
TokensX('Put1Shootout:Draw', '', topDude)
notify("{} is a draw till the end of the shootout".format(topDude.name))
else:
if confirm("Do you want to play react ability to change this pull to different suite?"):
bullets = compilecardstat(topDude, stat = 'Bullets')
if bullets == 0:
if fetchDrawType(topDude) == 'Stud':
TokenX('Put1Shootout:Draw', '', topDude)
notify("{} is a draw till the end of the shootout".format(topDude.name))
notify("Reminder: {} has to use react ability to change pull's suite.".format(card.controller))
else: notify("EPG malfunctioned as {} used water insted of ghost rock to fuel it.".format(card.controller))
elif card.name == 'Analytical Cognisizer':
if confirm('Have you succesfully invented this gadget, if you answer yes your MS unboots and you will draw a card?'):
ModifyStatus('UnbootHost')
card = deck.top(1)
card[0].moveTo(me.hand)
notify('{} successfully invented {} so he unbooted his MS and drew a card'.format(me, card.name))
elif card.name == 'Exultant Translocation':
dude = fetchHost(card)
tmd = findTarget('DemiAutoTargeted-atDude-targetMine-choose1')
tmDude = tmd[0]
x,y = dude.position
Jx,Jy = tmDude.position
dude.moveToTable(Jx,Jy)
tmDude.moveToTable(x,y)
orgAttachments(dude)
orgAttachments(tmDude)
if confirm('Did pull succeed by 6 or more?'):
aCard = askCardFromList([dude, tmDude], 'Chose a dude to unboot.')
boot(aCard, forced = 'unboot')
notify("{} and {} swapped places thanks to {}.".format(dude, tmDude, card.name))
notify("{} got unbooted as pull was successful by 6 or more.".format(aCard))
notify("{} and {} swapped places thanks to {}.".format(dude, tmDude, card.name))
elif card.name =='Hostile Takeover':
tmd = findTarget('DemiAutoTargeted-atDude-targetMine-choose1')
tmDude = tmd[0]
if fetchDrawType(tmDude) == 'Draw':
notify("You need a stud to use this ability.")
return 'Abort'
tdeed = findTarget('DemiAutoTargeted-atDeed-targetOpponents-choose1')
deed = tdeed[0]
if tmDude.orientation != Rot90 and tdeed.orientation != Rot90:
boot(tmDude, forced = 'boot')
boot(deed, forced = 'boot')
production = compileCardStat(deed, stat = 'Production')
me.GhostRock += production
notify('{} gained {} Ghost Rock by plundering {}.'.format(me, production, deed.name ))
if confirm("Do you want to ace {} to give {} 1 control point and {} 1 influence?".format(card.name, deed.name, tmDude)):
ace(card)
deed.markers[mdict['PermControlPlus']] += 1
tmDude.markers[mdict['PermInfluencePlus']] += 1
notify('{} aced {} to give {} permanent influence point and {} permanent control point.'.format(me, card.name, tmDude.name, deed.name))
else:
notify('You need to boot a dude and a deed to use this ability.')
elif card.name == "I'm Your Huckleberry":
opponents = [player for player in getPlayers() if player != me or len(getPlayers()) == 1]
if len(opponents) == 1: player = opponents[0]
for card in table:
if card.controller == me:
if card.model == 'cd31eabe-e2d8-49f7-b4de-16ee4fedf3c1': # The cheatin' icon
remoteCall(player,'Huckleberry',card)
targetDude= findTarget('DemiAutoTargeted-atDude-targetOpponents-isParticipating-choose1', choiceTitle = "Choose a dude that will be controlled by your opponent till the end of the shootout.")
if targetDude[0].highlight == AttackColor:
targetDude[0].highlight = DefendColor
else: targetDude[0].highlight = AttackColor
TokensX('Put1Shootout:Huckleberry', '', targetDude[0])
notify('{} takes control over {} till the end of the shootout'.format(card.controller, targetDude[0].name))
return
elif card.name == 'House of Many Faiths':
tmd = findTarget('DemiAutoTargeted-atDude-targetMine-choose1')
tmDude = tmd[0]
hostCards = eval(getGlobalVariable('Host Cards'))
if not len([Card(att_id) for att_id in hostCards if hostCards[att_id] == tmDude._id and re.search(r'Miracle',Card(att_id).Keywords)]):
notify("You have to chose a dude with Miracle to use this ability")
return 'Abort'
if len(me.hand) and len(me.piles['Discard Pile']):
handDiscard = askCardFromList([c for c in me.hand],"Choose which card to discard from your hand")
handDiscard.moveTo(me.piles['Deck'])
cardFromDiscard = askCardFromList([ c for c in me.piles['Discard Pile']])
cardFromDiscard.moveTo(me.piles['Deck'])
me.Deck.shuffle()
notify('{} put {} and {} that was fetched from discard into their deck.'.format(me, handDiscard, cardFromDiscard))
else:
notify ("You have to have a cards in hand and in discard pile to use this ability!")
return 'Abort'
ModifyStatus('MoveTarget-moveToTown Square', '', card, tmDude)
boot(tmDude, forced = 'unboot')
tmDude.markers[mdict['PermInfluencePlus']] += 1
notify("{} moves {} to Town Squere unboots them and gives them noon influence.")
elif card.name == "Jonah's Alliance":
tmd = findTarget('DemiAutoTargeted-atDude-targetMine-choose1')
tmDude = tmd[0]
boot(tmDude, forced = 'boot')
TokensX('Remove1High Noon:Draw', '',tmDude)
TokensX('Put1High Noon:Stud', '',tmDude)
for iter in range(2):
if tmDude.markers[mdict['Bounty']] < 4:
tmDude.markers[mdict['Bounty']] += 1
TokensX('Jonah Control', '',tmDude)
if confirm('Do you want to boot a hex at a location to remove all traits, abilities and bonuses at that location?'):
spell = findTarget('DemiAutoTargeted-atSpell-isMine-isUnbooted-choose1',choiceTitle='Chose spell to boot')
boot(spell[0], forced = 'boot')
target = findTarget('DemiAutoTargeted-atSpell_or_Goods-targetOpponents-choose1', choiceTitle='Choose opponents attachment to blank it till the end of a day.')
TokensX('Put1High Noon:Blank', '',target[0])
elif card.name == 'Ke Wang':
topd = findTarget('DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1')
topDude = topd[0]
upkeep = compileCardStat(topDude, stat = 'Upkeep')
if me.GhostRock >= upkeep:
me.GhostRock -= upkeep
topDude.controller.GhostRock += upkeep
boot(topDude, forced = 'boot')
TokensX('Put1BulletShootoutMinus', '', topDude)
norify('{} booted {} and gave them -1 bullets using {} ability'.format(me, topDude.name, card.name))
else:
notify('You do not have enough ghost rock to use this ability.')
return
elif card.name =='Doc Holliday':
tpd = findTarget('DemiAutoTargeted-atDude-isParticipating-choose1', choiceTitle = 'Choose a skilled dude that ranking will be used for bullet bonus.')
tpDude = topd[0]
skillCheck = fetchSkills(tpDude)
tmpd = findTarget('DemiAutoTargeted-atDude-targetMine-isParticipating-choose1', choiceTitle = 'Choose a dude that receives a bonus')
tmpDude = tmpd[0]
bullets = compileCardStat(card, stat = 'Bullets')
if bullets < 0:
bullets = 0
if (bullets + skillCheck[0][1] <= 4):
for iter in range(skillCheck[0][1]):
TokensX('Put1BulletNoonPlus', '',tmpDude)
notify('{} increased {} bullets by {} skill rating'.format(me, tmpDude.name, tpDude))
return
else:
bRange = 4 - bullets
if bRange > 0:
for iter in range(bRange):
TokensX('Put1BulletNoonPlus', '',tmpDude)
notify('{} increased {} bullets by {} skill rating'.format(me, tmpDude.name, tpDude))
return
#qwe
else: notify("{} uses {}'s ability".format(me,card)) # Just a catch-all.
return 'OK'
def fetchCustomUpkeep(card):
extraUpkeep = 0
if card.name == "Denise Brancini":
for player in getActivePlayers():
extraUpkeep += len([c for c in player.piles['Boot Hill'] if (c.type == 'Joker')])
return extraUpkeep
def fetchCustomProduction(card):
extraProduction = 0
if card.name == "Long Strides Ranch":
if len([c for c in table if re.search(r'Horse',c.Keywords)]) >= 2: extraProduction = 2
return extraProduction
def markerEffects(Time = 'Start'):
debugNotify(">>> markerEffects() at time: {}".format(Time)) #Debug
cardList = [c for c in table if c.markers]
for card in cardList:
for marker in card.markers:
if (Time == 'Sundown'
and (re.search(r'Bad Company',marker[0])
or re.search(r'High Noon:',marker[0])
or re.search(r'Hiding in the Shadows',marker[0])
or re.search(r'Rumors',marker[0]))):
TokensX('Remove999'+marker[0], marker[0] + ':', card)
notify("--> {} removes the {} resident effect from {}".format(me,marker[0],card))
if Time == 'Sundown' and re.search(r'Come Git Some',marker[0]) and card.controller == me and card.owner == me: # This is the Sloane outfit ability
if card.markers[mdict['PermControlPlus']]:
choice = SingleChoice("Do you want to take one Ghost Rock per player?", ['No, {} is not in the Town Square anymore.'.format(card.name),'Yes! Take 1 GR per player.'])
else: choice = SingleChoice("Do you want to take one Ghost Rock per player, or put a permanent control point on this dude?'.", ['None of the above. {} is not in the Town Square anymore.'.format(card.name),'Take 1 GR per player.','Put 1 permanent CP on {}.'.format(card.name)])
if not choice: # If the choice is 0 or None (i.e. closed the selection window) the dude is assumed to be out of the Town Square
notify("{}'s {} didn't manage to hold the Town Square. They gain nothing this turn".format(me,card,len(getActivePlayers()) - 1))
elif choice == 1: # If the choice is 0 or None (i.e. closed the selection window, we give them money)
me.GhostRock += len(getActivePlayers()) - 1
notify("{}'s {} and shakes down the citizens of Gomorra for {} Ghost Rock".format(me,card,len(getActivePlayers()) - 1))
else:
notify("{}'s {} puts the fear of the gun to the town, giving them one permanent control point".format(me,card))
card.markers[mdict['PermControlPlus']] += 1
card.markers[marker] = 0
if Time == 'Sundown' and re.search(r'Sentinel',marker[0]) and card.controller == me and card.owner == me:
dudeSpells = findTarget('AutoTargeted-atSpell-onAttachment-isBooted', card = card)
if not confirm("Is {} in the Town Square?".format(card.name)):
continue
for Spell in dudeSpells:
if Spell.name == "Sentinel":
if confirm("Do you want to discard {} to give {} 1 Control Point?".format(Spell.name, card.name)):
TokensX("Remove1Sentinel",'', card)
card.markers[mdict['PermControlPlus']] += 1
Spell.moveTo(me.piles['Discard Pile'])
else:
TokensX("Remove1Sentinel",'', card)
if Time == 'Sundown' and re.search(r'HandsomeCP',marker[0]) and card.owner == me and me.GhostRock >=4:
if confirm("Do you want to make pay 4 GR to make control point on {} permanent?".format(card.name)):
me.GhostRock -= 4
card.markers[mdict['PermControlPlus']] += 1
notify("{} decided to pay 4 GR to gain additional CP at {}".format(me, card.name))
TokensX("Remove1HandsomeCP",'', card)
TokensX("Remove1ControlPlus",'', card)
if Time == 'Sundown' and re.search(r'Blank',marker[0]) and card.owner == me :
TokensX("Remove1High Noon:Blank",'', card)
if Time == 'Sundown' and re.search(r'Jonah Control',marker[0]) and card.owner == me :
TokensX("Remove1High Noon:Jonah Control",'', card)
if (Time == 'ShootoutEnd'
and (re.search(r'Sun In Yer Eyes',marker[0])
or re.search(r'Unprepared',marker[0])
or re.search(r'Shootout:',marker[0]))):
TokensX('Remove999'+marker[0], marker[0] + ':', card)
notify("--> {} removes the {} resident effect from {}".format(me,marker[0],card))
if (Time == 'ShootoutEnd'
and (re.search(r'Huckleberry',marker[0]))):
TokensX('Remove999'+marker[0], marker[0] + ':', card)
ModifyStatus('SendHomeBootedTarget-DemiAutoTargeted-atDude', '',card, card)
if Time == 'High Noon' and re.search(r'UpkeepPrePaid',marker[0]) and card.controller == me: # Tax Office reduction effects removal
modProd(card, -card.markers[marker], True)
card.markers[marker] = 0
if Time == 'High Noon' and re.search(r'Rowdy Ike',marker[0]) and card.controller == me:
TokensX("Remove1Rowdy Ike",'', card)
if Time == 'High Noon' and re.search(r'Ike Place',marker[0]) and card.controller == me:
TokensX("Remove1Ike Place",'', card)
card.markers[mdict['ProdMinus']] = 0
if (Time == 'ShootoutEnd' and re.search(r'Serendipitous',marker[0])):
if confirm("Do you want to pay 3 Ghost Rock to keep this dude in play?") and payCost(3) != 'ABORT':
TokensX('Remove999'+marker[0], marker[0] + ':', card)
notify("--> {} paid 3 ghost rock to retain {} in play.".format(me,card))
else:
discard(card,silent = True)
notify("--> {} wasn't paid properly so they left play.".format(card))
#------------------------------------------------------------------------------
# Remote Functions
#------------------------------------------------------------------------------
def UnionCasino(card,mainBet,targetDude, function = 'others bet'):
mute()
if function == 'others bet' and len(getActivePlayers()) > 1:
minBet = mainBet - 3
if minBet < 0: minBet = 0
myBet = askInteger("{} has started the Union Casino pool with {}. You need {} to check. Bet how much?".format(card.controller,mainBet,minBet),0)
if payCost(myBet, loud) == 'ABORT': myBet = 0
if myBet == 0: TokensX('Put1Union Casino Zero Bet:{}'.format(me.name), '', card)
else: TokensX('Put{}Union Casino Bet:{}'.format(myBet,me.name), '', card)
remoteCall(card.controller,'UnionCasino',[card,mainBet,targetDude,'resolve'])
else:
#confirm('b')
betPlayers = 1
for player in getActivePlayers():
if player != me and findMarker(card, ':{}'.format(player.name)): betPlayers += 1
if betPlayers == len(getActivePlayers()): # We compare to see if the controller won only after all players have finished betting.
highBet = 0
highBetter = None
#confirm('a')
for player in getActivePlayers():
if player != me:
zeroMarker = findMarker(card, 'Union Casino Zero Bet:{}'.format(player.name))
if zeroMarker:
card.markers[zeroMarker] = 0
continue
else:
currBet = findMarker(card, ':{}'.format(player.name))
if highBet < card.markers[currBet]:
highBet = card.markers[currBet]
card.markers[currBet] = 0
highBetter = player
if mainBet >= (highBet + 4) or not highBetter:
targetDude.markers[mdict['PermControlPlus']] += 1
notify(":> {} outbid all other players by {} and thus {} gains a permanent control point".format(me,mainBet - highBet,targetDude))
else: notify(":> {} checked the bet by raising {} to {}'s {}".format(me,highBet,card.controller,mainBet))
def PlasmaDrill(card):
mute()
production = compileCardStat(card, stat = 'Production')
if production > me.GhostRock: extraTXT = "\n\nAttention! You do not seem to have enough Ghost Rock to save this deed from the Plasma Drill. Pay anyway?\
\n(Saying yes will bring your ghost rock bank to the negative)"
else: extraTXT = ''
if confirm("Do you want to pay {} to save {} from the Plasma Drill?{}".format(production,card.name,extraTXT)) and payCost(production) != 'ABORT':
notify(":> {} pays {} to repair {} from the damage inflicted by the Plasma Drill".format(me,production,card))
else:
discard(card,silent = True)
notify(":> {} is damaged beyond repair by the plasma drill and is discarded".format(card))
def TelepathyHelmet(originator,card):
mute()
notify("{}'s {} is revealing {} hand...".format(originator,card,me))
if originator != me: me.hand.addViewer(originator)
update()
#remoteCall(originator,'WhisperCards',[me,[c for c in me.hand]])
while not confirm("You are now revealing your hand to {}. Press Yes to continue, Press No to ping the other player to see if they had enough time to see the cards".format(originator.name)):
notify("{} wants to know if it's OK to hide their hand once more".format(me))
if originator != me: me.hand.removeViewer(originator)
notify("{} hides their play hand once more".format(me))
def WhisperCards(player,cardList):
mute()
initText = "{} is revealing:\n".format(player)
for c in cardList:
initText += "- {}\n".format(c)
whisper(initText)
def BottomDealing(originPlayer,card):
drawhandMany(originPlayer.Deck, 5, True,scripted = True)
if getGlobalVariable('Shootout') == 'True': Drawtype = 'lowball'
else: Drawtype = 'shootout' # These two are reversed so that the jokers are always used for their worst value comparatively.
rnd(1,10)
resultTXT = revealHand(me.piles['Draw Hand'], type = Drawtype, event = None, silent = True)
notify("{}'s new hand rank is {}".format(me,resultTXT))
passPileControl(originPlayer.Deck,originPlayer)
passPileControl(originPlayer.piles['Discard Pile'],originPlayer)
def AllieHensmanXP(mark,allie):
mute()
markInfluence = compileCardStat(mark, stat = 'Influence')
if confirm("Do you want to pay {} to {} to avoid discarding {}?".format(markInfluence,allie.controller.name,mark.Name)):
if me.GhostRock >= markInfluence or not confirm("You do not seem to have enough Ghost Rock left. Bypass?"):
me.GhostRock -= markInfluence
allie.controller.GhostRock += markInfluence
notify("{} corners {} and extracts {} Ghost Rock from {} for their safety".format(allie,mark,markInfluence,mark.controller))
else:
discard(mark,silent = True)
notify("{} couldn't afford {}'s tax and has to discard {}".format(mark.controller,allie,mark))
else:
discard(mark,silent = True)
notify("{} couldn't afford {}'s tax and has to discard {}".format(mark.controller,allie,mark))
def TWHITM(targetCards, discardCard = True): # This Will Hurt in the Morning
mute()
for card in targetCards:
if discardCard: discard(card)
else:
if me.GhostRock >= 1 and confirm("Pay 1 Ghost Rock to prevent {} from being aced?".format(card.name)):
me.GhostRock -= 1
discard(card)
notify("{} pays 1 ghost rock to avoid acing {}".format(me,card))
else:
ace(card)
for c in table:
if c.highlight == DrawHandColor: c.moveTo(me.piles['Draw Hand']) # We move the remaining card back to the draw hand to be able to calculate value again
drawhandMany(me.Deck, 2, True,scripted = True)
if getGlobalVariable('Shootout') == 'True': Drawtype = 'shootout'
else: Drawtype = 'lowball'
resultTXT = revealHand(me.piles['Draw Hand'], type = Drawtype, event = None, silent = True)
notify("{}'s new hand rank is {}".format(me,resultTXT))
def TaxOffice(dude):
mute()
upkeep = compileCardStat(dude, stat = 'Upkeep')
if upkeep:
if not confirm("Pay {} Ghost Rock to retain this dude this turn?".format(upkeep)):
discard(dude)
else:
msg = payCost(upkeep, MSG = "You do you not seem to have enough ghost rock to pay your taxes varmint! Bypass?")
if msg == 'ABORT': discard(dude)
else:
modProd(dude, upkeep, True)
TokensX('Put{}UpkeepPrePaid'.format(upkeep), '', dude)
notify("{} pays the tax required to retain {}".format(me,dude))
else: notify(":> {} has 0 upkeep, so their accounts were already in order.".format(dude))
def FuntimeFreddyChoose(card,spell1,spell2):
mute()
notify("{} is choosing which hex to ace from {}'s ability".format(me,card))
acedSpell = None
whisper(":::CHOICE::: Choose which hex to ace")
while not acedSpell: acedSpell = askCardFromList([spell1,spell2],'Choose which spell to ace')
if acedSpell == spell1: savedSpell = spell2
else: savedSpell = spell1
remoteCall(card.controller,'FuntimeFreddyFinish',[card,acedSpell,savedSpell,me])
def FuntimeFreddyFinish(card,acedSpell,savedSpell,acingPlayer):
mute()
ace(acedSpell, silent = True)
ace(card, silent = True)
hostCard = findHost(savedSpell)
if hostCard:
attachCard(savedSpell,hostCard)
payCost(savedSpell.Cost)
savedSpell.highlight = None
handDiscard = None
while not handDiscard:
handDiscard = askCardFromList([c for c in me.hand],"Choose which card to discard from your hand")
handDiscard.moveTo(me.piles['Discard Pile'])
notify("{} discarded {} and aced {} to fetch and play {} (paying {}) on {} and {} chose to ace {}".format(me,handDiscard,card,savedSpell,savedSpell.Cost,hostCard,acingPlayer,acedSpell))
def CookinTroubleStart(card):
mute()
if not len(me.hand): notify(":::INFO::: {}'s play hand is empty. You have nothing to cook".format(me))
else:
me.hand.addViewer(card.controller)
notify(":> {} Reveals their hand to {}".format(me,card.controller))
remoteCall(card.controller,'CookinTroubleChoose',[card,[c for c in me.hand]])
def CookinTroubleChoose(card,handList):
mute()
update()
whisper(":::CHOICE::: If your opponent cheated this turn, choose an action, goods, or spell to discard.")
cardChoice = askCardFromList([c for c in handList],'Choose an action,goods or spell card to discard.')
if cardChoice == None:
notify("{} does not sabotage any card in {}'s hand".format(me,handList[0].controller))
while cardChoice.Type != 'Action' and cardChoice.Type != 'Goods' and cardChoice.Type != 'Spell': # If they chose a non-action, we force them to choose an action.
if confirm("You cannot select cards which are not action, goods or spell to discard with Cookin' Up Trouble. Do you want to choose nothing?"):
notify("{} does not sabotage any card in {}'s hand".format(me,handList[0].controller))
cardChoice = None
break
else:
actionsList = [c for c in handList if (c.Type == 'Action' or c.Type == 'Goods' or c.Type == 'Spell')]
if not actionsList:
notify("{} does not find any appropriate cards in {}'s hand to sabotage".format(me,handList[0].controller))
cardChoice = None
break
else:
cardChoice = askCardFromList(actionsList,'Choose an action,goods or spell card to discard.')
if cardChoice == None:
notify("{} does not sabotage any card in {}'s hand".format(me,handList[0].controller))
break
remoteCall(handList[0].controller,'CookinTroubleEnd',[card,cardChoice])
def CookinTroubleEnd(card,cardChoice):
mute()
if cardChoice:
cardChoice.moveTo(me.piles['Discard Pile'])
notify("{}'s {} sabotages {} out of {}'s play hand".format(card.controller,card,cardChoice,me))
me.hand.removeViewer(card.controller)
def NathanShaneStart(card):
mute()
bullets = compileCardStat(card, stat = 'Bullets')
if bullets > len(me.hand):
bullets = len(me.hand)
if not len(me.hand): notify(":::INFO::: {}'s play hand is empty. Nathan has nothing to snipe".format(me))
elif not bullets: notify(":::INFO::: {} has currently 0 bullets and no capacity to snipe anything".format(card))
else:
randomCards = []
for iter in range(bullets):
randomC = me.hand.random()
randomCards.append(randomC)
randomC.moveTo(me.ScriptingPile)
notify(":> {} Reveals {} random cards to {}".format(me,bullets,card.controller))
remoteCall(card.controller,'NathanShaneChoose',[card,[c for c in me.ScriptingPile]])
def NathanShaneChoose(card,handList):
mute()
update()
cardChoice = askCardFromList([c for c in handList],"Choose action card to discard.")
if cardChoice == None:
notify("{} does not snipe any card in {}'s hand".format(me,handList[0].controller))
else:
while cardChoice.Type != 'Action': # If they chose a non-action, we force them to choose an action.
if confirm("You cannot select non-action cards to discard with Nathan's ability. Do you want to choose nothing?"):
notify("{} does not snipe any card in {}'s hand".format(me,handList[0].controller))
cardChoice = None
break
else:
actionsList = [c for c in handList if c.Type == 'Action']
if not actionsList:
notify("{} does not find any action in {}'s hand to snipe".format(me,handList[0].controller))
cardChoice = None
break
else:
cardChoice = askCardFromList(actionsList,"Choose action card to discard.")
if cardChoice == None:
notify("{} does not snipe any card in {}'s hand".format(me,handList[0].controller))
break
remoteCall(handList[0].controller,'NathanShaneEnd',[card,cardChoice])
def NathanShaneEnd(card,cardChoice):
mute()
if cardChoice:
cardChoice.moveTo(me.piles['Discard Pile'])
notify("{}'s {} snipes {} out of {}'s play hand".format(card.controller,card,cardChoice,me))
for c in me.ScriptingPile: c.moveTo(me.hand)
def MarciaRidgeStart(marcia,usedDeed):
usedDeed.controller = marcia.controller
remoteCall(marcia.controller,'MarciaRidgeDeedUse',[marcia,usedDeed,me])
def MarciaRidgeDeedUse(marcia,usedDeed,origController):
useAbility(usedDeed)
usedDeed.controller = origController
def RicoStart(card):
mute()
if not len(me.hand): notify(":::INFO::: {}'s play hand is empty for some reason!".format(me))
else:
me.hand.addViewer(card.controller)
notify(":> {} Reveals their hand to {}".format(me,card.controller))
remoteCall(card.controller,'RicoView',[card,[c for c in me.hand]])
def RicoView(card,handList):
mute()
update()
askCardFromList([c for c in handList],"This is your opponent's current hand. Double click a card or close this window to continue")
whisper("Reminder: The opponent's hand contained: {}".format([c.Name for c in handList]))
remoteCall(handList[0].controller,'RicoStopView',[card])
if confirm("Do you want to retain your current starting gang? (In order to save time)"):
notify(":> {} opts to retain their current starting gang without change")
else:
startingDudesNR = 0
for c in table:
if c.Type == 'Dude' and c.controller == me and not re.search(r'Grifter',c.Keywords):
clearAttachLinks(c)
me.GhostRock += num(c.Cost)
c.moveTo(me.Deck)
startingDudesNR += 1
selectedDudesNR = 0
if startingDudesNR:
me.Deck.addViewer(me)
while selectedDudesNR < startingDudesNR:
choiceDude = askCardFromList([c for c in me.Deck if c.Type == 'Dude' and not re.search(r'Grifter',c.Keywords) and not re.search(r'Gadget',c.Keywords)],"Select dude to add to your starting posse ({}/{})\n Close the window to finish".format(selectedDudesNR + 1,startingDudesNR))
if not choiceDude: break
placeCard(choiceDude,'SetupDude',selectedDudesNR)
payCost(choiceDude.Cost)
selectedDudesNR += 1
me.Deck.removeViewer(me)
me.Deck.shuffle()
announceText = "{}'s new starting gang is ".format(me)
for dude in [c for c in table if c.controller == me and c.Type != 'Outfit' and c.Type != "Token"]:
announceText += "{}, ".format(dude)
notify(announceText)
def RicoStopView(card):
mute()
if card.controller != me: me.hand.removeViewer(card.controller) # If clause, for debug purposes
def JaelGuile(card):
mute()
for iter in range(2):
choiceDudes = findTarget('AutoTargeted-atDude-hasMarker{Bounty}-targetMine-isParticipating')
if not len(choiceDudes): choiceDudes = findTarget('AutoTargeted-atDude-targetMine-isParticipating')
if len(choiceDudes):
choiceDude = askCardFromList(choiceDudes,"Select a dude to be hit by {} ({}/2). \nAn unbooted dude will boot. A Booted dude will be discarded".format(card.Name,iter + 1))
if choiceDude.orientation == Rot0: boot(choiceDude)
else: discard(choiceDude)
def RickHenderson(dude,rick):
mute()
if not confirm("Pay 1 Ghost Rock to {} to retain {}?".format(rick.Name,dude.Name)): discard(dude)
else:
msg = payCost(1, MSG = "You do you not seem to have enough ghost rock to pay off {}! Bypass?".format(rick.Name))
if msg == 'ABORT': discard(dude)
else:
rick.controller.GhostRock += 1
notify("{} pays off {} to retain {}".format(me,rick,dude))
def Framed(card,dude):
if dude.orientation != Rot90 and me.GhostRock > 0 and confirm('Do you want to boot {} and pay 1 Ghost Rock to {} to avoid giving them a bounty?'.format(dude.Name,card.controller)):
dude.orientation = Rot90
me.GhostRock -= 1
card.controller.GhostRock += 1
notify(":> {} boots {} and pays 1 Ghost Rock to {} to avoid becoming wanted".format(me,dude,card.controller))
return
dude.markers[mdict['Bounty']] += 1
notify(":> {} succesfull framed {} for 1 bounty".format(card.controller,dude))
def SightBeyondSightStart(card):
mute()
if not len(me.hand):
notify(":::INFO::: {}'s play hand is empty. Sight Beyond Sight has nothing to discard".format(me))
return 'ABORT'
else:
randomCards = []
if len(me.hand) >= 2:
for iter in range(2):
randomC = me.hand.random()
randomCards.append(randomC)
randomC.moveTo(me.ScriptingPile)
notify(":> {} Reveals 2 random cards to {}".format(me,card.controller))
remoteCall(card.controller,'SightBeyondSightChoose',[card,[c for c in me.ScriptingPile]])
else:
randomC = me.hand.random()
randomCards.append(randomC)
randomC.moveTo(me.ScriptingPile)
notify(":> {} Reveals 1 random cards to {}".format(me,card.controller))
remoteCall(card.controller,'SightBeyondSightChoose',[card,[c for c in me.ScriptingPile]])
def SightBeyondSightChoose(card,handList):
mute()
update()
cardChoice = askCardFromList([c for c in handList],"Choose one non-Unique card to ace or close this window to ace none.")
if cardChoice == None:
notify("{} does not hex any card in {}'s hand".format(me,handList[0].controller))
else:
while ((cardChoice.Type == 'Dude' or cardChoice.Type == 'Deed') and not re.search(r'Non-Unique',cardChoice.Keywords)) or ((cardChoice.Type == 'Goods' or cardChoice.Type == 'Action') and re.search(r'Unique',cardChoice.Keywords)): # If they chose a unique card, we force them to choose an non-unique.
if confirm("You cannot select unique cards to ace with Sight Beyond Sight's ability. Do you want to choose nothing?"):
notify("{} does not hex any card in {}'s hand".format(me,handList[0].controller))
cardChoice = None
break
else:
cardList = [c for c in handList if ((c.Type == 'Dude' or c.Type == 'Deed') and re.search(r'Non-Unique',c.Keywords)) or ((c.Type == 'Goods' or c.Type == 'Action') and not re.search(r'Unique',c.Keywords))]
if not cardList:
notify("{} does not find any non-unique in {}'s hand to hex".format(me,handList[0].controller))
cardChoice = None
break
else:
cardChoice = askCardFromList(cardList,"Choose non-unique card to hex.")
if cardChoice == None:
notify("{} does not hex any card in {}'s hand".format(me,handList[0].controller))
break
if cardChoice:
card.moveTo(me.piles['Boot Hill'])
notify("{}'s {} hex was aced to use its ability.".format(me, card))
remoteCall(handList[0].controller,'SightBeyondSightEnd',[card,cardChoice])
def SightBeyondSightEnd(card,cardChoice):
mute()
if cardChoice:
cardChoice.moveTo(me.piles['Boot Hill'])
notify("{}'s {} hexes {} out of {}'s play hand".format(card.controller,card,cardChoice,me))
for c in me.ScriptingPile: c.moveTo(me.hand)
def BurnOut(card):
mute()
if not len(me.hand):
notify(":::INFO::: {}'s play hand is empty. There is no card to ace.".format(me))
return 'ABORT'
else:
cards = me.hand
remoteCall(card.controller,'BurnOutChoice',[card, cards])
def BurnOutChoice(card, cards):
update()
cardChoice = askCardFromList([c for c in cards],"Choose one card to ace")
remoteCall(cardChoice.controller, 'BurnOutEnd', cardChoice)
def BurnOutChoice(cardChoice):
cardChoice.moveTo(me.piles['Boot Hill'])
notify("{}'s aces {} out of {}'s play hand".format(card.controller,cardChoice,me))
def chkHenryMoran(type):
if type == 'lowball':
for card in table:
if card.Name == 'Henry Moran' and card.controller == me and card.orientation == Rot0:
notify(":> {} is about to reveal a cheating hand in lowball, so {} is booting and forcing them to reveal the top 5 cards of their deck instead".format(me,card))
boot(card, silent = True)
for c in table:
if c.highlight == DrawHandColor and c.controller == me: c.moveTo(c.owner.piles['Discard Pile']) # Henry always discards, and won't ace Jokers.
drawhandMany(count = 5, silent = True, scripted = True)
revealHand(me.piles['Draw Hand'], 'lowball')
return True
return False
def DeniseBrancini():
for c in me.piles['Boot Hill']:
if c.type == 'Joker': c.moveTo(me.Deck)
shuffle(me.Deck)
def ExpLodge(dude):
if me.GhostRock > 1:
choice = confirm("Do you want to pay 2GR to unboot {}?.".format(dude.name))
if choice:
boot(dude, forced = 'unboot')
me.GhostRock -= 2
notify(":> {} pays 2GR to unboot{}".format(dude.controller, dude))
def GSOD(card,dude):
choice = confirm("Do you want {} to participate in the shootout".format(dude))
if choice:
participateDude(dude)
return choice
else:
remoteCall(card.controller,"GSOD2", card)
def GSOD2(card):
dude = findTarget("DemiAutoTargeted-atDude-targetMine-isParticipating-choose1")
TokensX('Put1Shootout:Stud', '', dude[0])
TokensX('Put2BulletShootoutPlus', '', dude[0])
notify("Your opponent take two casualties if he reveals cheating hand this round of shootout!!!!!")
def Censure(card,dudes):
ModifyStatus('SendHomeBootedTarget-DemiAutoTargeted-atDude', '',card, dudes)
def Intercession(oDude):
TokensX('Put2BulletShootoutPlus', '', oDude)
boot(oDude, forced = 'unboot')
def chkPropertyIsTheft(type):
if type == 'shootout':
opponents = [player for player in getPlayers() if player != me or len(getPlayers()) == 1]
if len(opponents) == 1: player = opponents[0]
for card in table:
if card.Name == 'Property Is Theft' and card.controller == player:
if me.GhostRock > 0:
payCost(1)
else: return
if not len([c for c in table if c.model == 'cd31eabe-e2d8-49f7-b4de-16ee4fedf3c1' and c.controller == player]):
player.GhostRock += 1
notify("{} got caught cheatin' during shootout and paid {} 1 Ghost Rock for it".format(me, player))
else:
notify("{} got caught cheatin' during shootout and paid bank 1 Ghost Rock for it.".format(me))
return True
return False
def Buskers(targetDude):
if confirm("Do you want pay {} 2GR to avoid booting {}?".format(opponent, targetDude[0])):
me.GhostRock -= 2
opponents = [player for player in getPlayers() if player != me or len(getPlayers()) == 1]
if len(opponents) == 1: player = opponents[0]
player.GhostRock +=2
notify("{} decided to pay 2 GR to avoid booting {}".format(me,targetDude[0]))
return
else:
boot(targetDude[0], silent = True)
return
def Murdered(card):
disCR = [c for c in me.piles['Discard Pile'] if ("Action" in c.Type and re.search(r'Cheatin',c.Text))]
deckCR = [c for c in me.piles['Deck'] if ("Action" in c.Type and re.search(r'Cheatin',c.Text))]
string = len(disCR)
cards = disCR + deckCR
if len(cards) == 0:
notify("You have no cards you can fetch with this ability")
return
card = askCardFromList(cards, "Choose a CR to fetch if you do you will have to discard a card from your hand.First {} are in your discard".format(string))
card.moveTo(me.hand)
if card:
choicehand = askCardFromList([c for c in me.hand],'Choose which card to discard from your hand',card.Name)
choicehand.moveTo(me.piles['Discard Pile'])
else: return
notify("{} fetched {} using Murdered in Tombstone ability and discarded {}".format(me, card.name, choicehand))
return
def Huckleberry(card):
targetDude= findTarget('DemiAutoTargeted-atDude-targetMine-isParticipating-choose1', choiceTitle = "Choose a dude that will be controlled by your opponent till the end of the shootout.")
if targetDude[0].highlight == AttackColor:
targetDude[0].highlight = DefendColor
else:
targetDude[0].highlight = AttackColor
TokensX('Put1Shootout:Huckleberry', '', targetDude[0])
notify('{} takes control over {} till the end of the shootout'.format(card.controller, targetDude[0].name))
return
'''
#target opponents participating dude
topd = findTarget('DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1')
topDude = topd[0]
#target my participating dude
tmpd = findTarget('DemiAutoTargeted-atDude-targetMine-isParticipating-choose1')
tmpDude = tmpd[0]
#target opponents dude
tod = findTarget('DemiAutoTargeted-atDude-targetOpponents-choose1')
toDude = tod[0]
#target my dude
tmd = findTarget('DemiAutoTargeted-atDude-targetMine-choose1')
tmDude = tmd[0]
#target dudes attachement
#target dudes attachement
dudeGoods = findTarget('DemiAutoTargeted-atGoods_or_Spell-onAttachment-isUnbooted-choose1', card =DUDECARD)
#discarding card from hand
choicehand = askCardFromList([c for c in me.hand],'Choose which card to discard from your hand',card.Name)
choicehand.moveTo(me.piles['Discard Pile'])
#booting and unbooting cards
boot(CARD , forced = 'unboot')
boot(CARD, forced = 'boot')
#example of searching cards
if re.search(r'(Ranch|Improvement)',c.Keywords)
#discard pile, deck, boot hill and moving cards to pile
discardPile = me.piles['Discard Pile']
deck = me.piles['Deck']
bootHill = me.piles['Boot Hill']
CARD.moveTo(me.piles['Discard Pile'])
#examples of modifying statuses
ModifyStatus('SendHomeBootedTarget-DemiAutoTargeted-atDude', '',card, dudes)
ModifyStatus('MoveTarget-moveToDeed_or_Town Square_or_Outfit', '', card, dudesPub)
#MARKERS AND SIMILAR
dude.markers[mdict['Bounty']] += 1
'''
| agpl-3.0 |
NikNitro/Python-iBeacon-Scan | sympy/core/tests/test_count_ops.py | 36 | 4575 | from sympy import symbols, sin, exp, cos, Derivative, Integral, Basic, \
count_ops, S, And, I, pi, Eq, Or, Not, Xor, Nand, Nor, Implies, \
Equivalent, MatrixSymbol, Symbol, ITE
from sympy.core.containers import Tuple
x, y, z = symbols('x,y,z')
a, b, c = symbols('a,b,c')
def test_count_ops_non_visual():
def count(val):
return count_ops(val, visual=False)
assert count(x) == 0
assert count(x) is not S.Zero
assert count(x + y) == 1
assert count(x + y) is not S.One
assert count(x + y*x + 2*y) == 4
assert count({x + y: x}) == 1
assert count({x + y: S(2) + x}) is not S.One
assert count(Or(x,y)) == 1
assert count(And(x,y)) == 1
assert count(Not(x)) == 1
assert count(Nor(x,y)) == 2
assert count(Nand(x,y)) == 2
assert count(Xor(x,y)) == 1
assert count(Implies(x,y)) == 1
assert count(Equivalent(x,y)) == 1
assert count(ITE(x,y,z)) == 1
assert count(ITE(True,x,y)) == 0
def test_count_ops_visual():
ADD, MUL, POW, SIN, COS, EXP, AND, D, G = symbols(
'Add Mul Pow sin cos exp And Derivative Integral'.upper())
DIV, SUB, NEG = symbols('DIV SUB NEG')
NOT, OR, AND, XOR, IMPLIES, EQUIVALENT, ITE, BASIC, TUPLE = symbols(
'Not Or And Xor Implies Equivalent ITE Basic Tuple'.upper())
def count(val):
return count_ops(val, visual=True)
assert count(7) is S.Zero
assert count(S(7)) is S.Zero
assert count(-1) == NEG
assert count(-2) == NEG
assert count(S(2)/3) == DIV
assert count(pi/3) == DIV
assert count(-pi/3) == DIV + NEG
assert count(I - 1) == SUB
assert count(1 - I) == SUB
assert count(1 - 2*I) == SUB + MUL
assert count(x) is S.Zero
assert count(-x) == NEG
assert count(-2*x/3) == NEG + DIV + MUL
assert count(1/x) == DIV
assert count(1/(x*y)) == DIV + MUL
assert count(-1/x) == NEG + DIV
assert count(-2/x) == NEG + DIV
assert count(x/y) == DIV
assert count(-x/y) == NEG + DIV
assert count(x**2) == POW
assert count(-x**2) == POW + NEG
assert count(-2*x**2) == POW + MUL + NEG
assert count(x + pi/3) == ADD + DIV
assert count(x + S(1)/3) == ADD + DIV
assert count(x + y) == ADD
assert count(x - y) == SUB
assert count(y - x) == SUB
assert count(-1/(x - y)) == DIV + NEG + SUB
assert count(-1/(y - x)) == DIV + NEG + SUB
assert count(1 + x**y) == ADD + POW
assert count(1 + x + y) == 2*ADD
assert count(1 + x + y + z) == 3*ADD
assert count(1 + x**y + 2*x*y + y**2) == 3*ADD + 2*POW + 2*MUL
assert count(2*z + y + x + 1) == 3*ADD + MUL
assert count(2*z + y**17 + x + 1) == 3*ADD + MUL + POW
assert count(2*z + y**17 + x + sin(x)) == 3*ADD + POW + MUL + SIN
assert count(2*z + y**17 + x + sin(x**2)) == 3*ADD + MUL + 2*POW + SIN
assert count(2*z + y**17 + x + sin(
x**2) + exp(cos(x))) == 4*ADD + MUL + 2*POW + EXP + COS + SIN
assert count(Derivative(x, x)) == D
assert count(Integral(x, x) + 2*x/(1 + x)) == G + DIV + MUL + 2*ADD
assert count(Basic()) is S.Zero
assert count({x + 1: sin(x)}) == ADD + SIN
assert count([x + 1, sin(x) + y, None]) == ADD + SIN + ADD
assert count({x + 1: sin(x), y: cos(x) + 1}) == SIN + COS + 2*ADD
assert count({}) is S.Zero
assert count([x + 1, sin(x)*y, None]) == SIN + ADD + MUL
assert count([]) is S.Zero
assert count(Basic()) == 0
assert count(Basic(Basic(),Basic(x,x+y))) == ADD + 2*BASIC
assert count(Basic(x, x + y)) == ADD + BASIC
assert count(Or(x,y)) == OR
assert count(And(x,y)) == AND
assert count(And(x**y,z)) == AND + POW
assert count(Or(x,Or(y,And(z,a)))) == AND + OR
assert count(Nor(x,y)) == NOT + OR
assert count(Nand(x,y)) == NOT + AND
assert count(Xor(x,y)) == XOR
assert count(Implies(x,y)) == IMPLIES
assert count(Equivalent(x,y)) == EQUIVALENT
assert count(ITE(x,y,z)) == ITE
assert count([Or(x,y), And(x,y), Basic(x+y)]) == ADD + AND + BASIC + OR
assert count(Basic(Tuple(x))) == BASIC + TUPLE
#It checks that TUPLE is counted as an operation.
assert count(Eq(x + y, S(2))) == ADD
def test_issue_9324():
def count(val):
return count_ops(val, visual=False)
M = MatrixSymbol('M', 10, 10)
assert count(M[0, 0]) == 0
assert count(2 * M[0, 0] + M[5, 7]) == 2
P = MatrixSymbol('P', 3, 3)
Q = MatrixSymbol('Q', 3, 3)
assert count(P + Q) == 3
m = Symbol('m', integer=True)
n = Symbol('n', integer=True)
M = MatrixSymbol('M', m + n, m * m)
assert count(M[0, 1]) == 2
| gpl-3.0 |
qbuat/hhntup | pbs.py | 4 | 4980 | #!/usr/bin/env python
import subprocess
from subprocess import call
import getpass
import time
import datetime
import os
import errno
def mkdir_p(path):
"""
mkdir -p functionality
http://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python
"""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def print_table(table, sep=' '):
# Reorganize data by columns
cols = zip(*table)
# Compute column widths by taking maximum length of values per column
col_widths = [max(len(str(value)) for value in col) for col in cols]
# Create a suitable format string
format = sep.join(['%%-%ds' % width for width in col_widths])
# Print each row using the computed format
for row in table:
print format % tuple(row)
class Job(object):
def __init__(self, id, info):
self.id = id
self.info = info
def __getattr__(self, attr):
return self.info[attr]
@property
def name(self):
return self.info['Job_Name']
@property
def hung(self):
# is the wall time higher than the CPU time by 50%?
return self.walltime > 1.5 * self.cputime and self.walltime > 60
@property
def healthy(self):
return not self.hung
@property
def health_status(self):
# is the wall time higher than the CPU time?
if self.healthy:
return 'GOOD'
return 'HUNG'
@property
def cputime(self):
if 'resources_used.cput' not in self.info:
return 0
x = map(int, self.info['resources_used.cput'].split(':'))
return datetime.timedelta(hours=x[0],minutes=x[1],seconds=x[2]).total_seconds()
@property
def walltime(self):
if 'resources_used.walltime' not in self.info:
return 0
x = map(int, self.info['resources_used.walltime'].split(':'))
return datetime.timedelta(hours=x[0],minutes=x[1],seconds=x[2]).total_seconds()
@property
def host(self):
if 'exec_host' in self.info:
return self.info['exec_host']
return '-'
@property
def status(self):
return (self.id,
self.info['job_state'],
self.host,
self.info['Job_Name'],
self.cputime,
self.walltime,
self.health_status)
class PBSMonitor(object):
def __init__(self):
self.user = getpass.getuser()
self.jobs = {}
self.job_names = {}
self.update()
def update(self):
qstat = subprocess.Popen(
['qstat', '-f', '-1'],
stdout=subprocess.PIPE).communicate()[0]
jobs = qstat.split('\n\n')
self.jobs = {}
for block in jobs:
if not block:
continue
block = block.split('\n')
user = block[2].split(' = ')[-1].split('@')[0]
if self.user != user:
continue
info = {}
jobid = block[0].split(': ')[-1]
for line in block[1:]:
param, value = line.split(' = ')
info[param.strip()] = value.strip()
job = Job(jobid, info)
self.job_names[job.name] = jobid
self.jobs[jobid] = job
def has_jobname(self, name):
return name in self.job_names
def print_jobs(self):
rows = []
for id, job in sorted(self.jobs.items(),
key=lambda item: int(item[0].split('.')[0])):
rows.append(job.status)
print_table(rows)
MONITOR = PBSMonitor()
def qsub(cmd,
queue='medium',
ppn=1,
mem=None,
vmem=None,
pmem=None,
stderr_path=None,
stdout_path=None,
name=None,
dry_run=False):
MONITOR.update()
kwargs = {}
if name is not None:
if MONITOR.has_jobname(name):
print "job {0} already exists".format(name)
return
kwargs['-N'] = name
if stderr_path is not None:
kwargs['-e'] = stderr_path
if stdout_path is not None:
kwargs['-o'] = stdout_path
args = ' '.join(['%s "%s"' % arg for arg in kwargs.items()])
resources = 'nodes=1:ppn={0:d}'.format(ppn)
if mem is not None:
resources += ',mem={0}'.format(mem)
if vmem is not None:
resources += ',vmem={0}'.format(vmem)
if pmem is not None:
resources += ',pmem={0}'.format(pmem)
cmd = "echo '{0}' | qsub -q {1} {2} -l {3}".format(
cmd, queue, args, resources)
print cmd
if not dry_run:
if stderr_path and not os.path.exists(stderr_path):
mkdir_p(stderr_path)
if stdout_path and not os.path.exists(stdout_path):
mkdir_p(stdout_path)
call(cmd, shell=True)
if __name__ == '__main__':
MONITOR.print_jobs()
| gpl-3.0 |
dgarnier/pyms | Peak/List/Utils.py | 7 | 3335 | """
Utilities for manipulation of peak lists
"""
#############################################################################
# #
# PyMS software for processing of metabolomic mass-spectrometry data #
# Copyright (C) 2005-2012 Vladimir Likic #
# #
# This program is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License version 2 as #
# published by the Free Software Foundation. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. #
# #
#############################################################################
import math
from pyms.Peak.Class import Peak
from pyms.Utils.Error import error
from pyms.Utils.Utils import is_list, is_str
from pyms.Utils.Time import time_str_secs
def is_peak_list(peaks):
"""
@summary: Returns True if 'peaks' is a valid peak list, False
otherwise
@param peaks: A list of peak objects
@type peaks: ListType
@return: A boolean indicator
@rtype: BooleanType
@author: Vladimir Likic
"""
flag = True
if not is_list(peaks):
flag = False
else:
for item in peaks:
if not isinstance(item, Peak):
flag = False
return flag
def sele_peaks_by_rt(peaks, rt_range):
"""
@summary: Selects peaks from a retention time range
@param peaks: A list of peak objects
@type peaks: ListType
@param rt_range: A list of two time strings, specifying lower and
upper retention times
@type rt_range: ListType
@return: A list of peak objects
@rtype: ListType
"""
if not is_peak_list(peaks):
error("'peaks' not a peak list")
if not is_list(rt_range):
error("'rt_range' not a list")
else:
if len(rt_range) != 2:
error("'rt_range' must have exactly two elements")
if not is_str(rt_range[0]) or not is_str(rt_range[1]):
error("lower/upper retention time limits must be strings")
rt_lo = time_str_secs(rt_range[0])
rt_hi = time_str_secs(rt_range[1])
if not rt_lo < rt_hi:
error("lower retention time limit must be less than upper")
peaks_sele = []
for peak in peaks:
rt = peak.get_rt()
if rt > rt_lo and rt < rt_hi:
peaks_sele.append(peak)
#print "%d peaks selected" % (len(peaks_sele))
return peaks_sele
| gpl-2.0 |
Richard2ndQuadrant/ansible | lib/ansible/plugins/callback/timer.py | 168 | 1125 | # Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from datetime import datetime
from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase):
"""
This callback module tells you how long your plays ran for.
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate'
CALLBACK_NAME = 'timer'
CALLBACK_NEEDS_WHITELIST = True
def __init__(self):
super(CallbackModule, self).__init__()
self.start_time = datetime.now()
def days_hours_minutes_seconds(self, runtime):
minutes = (runtime.seconds // 60) % 60
r_seconds = runtime.seconds - (minutes * 60)
return runtime.days, runtime.seconds // 3600, minutes, r_seconds
def playbook_on_stats(self, stats):
self.v2_playbook_on_stats(stats)
def v2_playbook_on_stats(self, stats):
end_time = datetime.now()
runtime = end_time - self.start_time
self._display.display("Playbook run took %s days, %s hours, %s minutes, %s seconds" % (self.days_hours_minutes_seconds(runtime)))
| gpl-3.0 |
ovnicraft/edx-platform | common/test/acceptance/pages/lms/discussion.py | 16 | 25860 | from contextlib import contextmanager
from bok_choy.javascript import wait_for_js
from bok_choy.page_object import PageObject
from bok_choy.promise import EmptyPromise, Promise
from .course_page import CoursePage
class DiscussionPageMixin(object):
def is_ajax_finished(self):
return self.browser.execute_script("return jQuery.active") == 0
class DiscussionThreadPage(PageObject, DiscussionPageMixin):
url = None
def __init__(self, browser, thread_selector):
super(DiscussionThreadPage, self).__init__(browser)
self.thread_selector = thread_selector
def _find_within(self, selector):
"""
Returns a query corresponding to the given CSS selector within the scope
of this thread page
"""
return self.q(css=self.thread_selector + " " + selector)
def is_browser_on_page(self):
return self.q(css=self.thread_selector).visible
def _get_element_text(self, selector):
"""
Returns the text of the first element matching the given selector, or
None if no such element exists
"""
text_list = self._find_within(selector).text
return text_list[0] if text_list else None
def _is_element_visible(self, selector):
query = self._find_within(selector)
return query.present and query.visible
@contextmanager
def _secondary_action_menu_open(self, ancestor_selector):
"""
Given the selector for an ancestor of a secondary menu, return a context
manager that will open and close the menu
"""
self._find_within(ancestor_selector + " .action-more").click()
EmptyPromise(
lambda: self._is_element_visible(ancestor_selector + " .actions-dropdown"),
"Secondary action menu opened"
).fulfill()
yield
if self._is_element_visible(ancestor_selector + " .actions-dropdown"):
self._find_within(ancestor_selector + " .action-more").click()
EmptyPromise(
lambda: not self._is_element_visible(ancestor_selector + " .actions-dropdown"),
"Secondary action menu closed"
).fulfill()
def get_group_visibility_label(self):
"""
Returns the group visibility label shown for the thread.
"""
return self._get_element_text(".group-visibility-label")
def get_response_total_text(self):
"""Returns the response count text, or None if not present"""
return self._get_element_text(".response-count")
def get_num_displayed_responses(self):
"""Returns the number of responses actually rendered"""
return len(self._find_within(".discussion-response"))
def get_shown_responses_text(self):
"""Returns the shown response count text, or None if not present"""
return self._get_element_text(".response-display-count")
def get_load_responses_button_text(self):
"""Returns the load more responses button text, or None if not present"""
return self._get_element_text(".load-response-button")
def load_more_responses(self):
"""Clicks the load more responses button and waits for responses to load"""
self._find_within(".load-response-button").click()
EmptyPromise(
self.is_ajax_finished,
"Loading more Responses"
).fulfill()
def has_add_response_button(self):
"""Returns true if the add response button is visible, false otherwise"""
return self._is_element_visible(".add-response-btn")
def click_add_response_button(self):
"""
Clicks the add response button and ensures that the response text
field receives focus
"""
self._find_within(".add-response-btn").first.click()
EmptyPromise(
lambda: self._find_within(".discussion-reply-new textarea:focus").present,
"Response field received focus"
).fulfill()
@wait_for_js
def is_response_editor_visible(self, response_id):
"""Returns true if the response editor is present, false otherwise"""
return self._is_element_visible(".response_{} .edit-post-body".format(response_id))
@wait_for_js
def is_discussion_body_visible(self):
return self._is_element_visible(".post-body")
def verify_mathjax_preview_available(self):
""" Checks that MathJax Preview css class is present """
self.wait_for(
lambda: len(self.q(css=".MathJax_Preview").text) > 0 and self.q(css=".MathJax_Preview").text[0] == "",
description="MathJax Preview is rendered"
)
def verify_mathjax_rendered(self):
""" Checks that MathJax css class is present """
self.wait_for(
lambda: self._is_element_visible(".MathJax"),
description="MathJax Preview is rendered"
)
def is_response_visible(self, comment_id):
"""Returns true if the response is viewable onscreen"""
return self._is_element_visible(".response_{} .response-body".format(comment_id))
def is_response_editable(self, response_id):
"""Returns true if the edit response button is present, false otherwise"""
with self._secondary_action_menu_open(".response_{} .discussion-response".format(response_id)):
return self._is_element_visible(".response_{} .discussion-response .action-edit".format(response_id))
def get_response_body(self, response_id):
return self._get_element_text(".response_{} .response-body".format(response_id))
def start_response_edit(self, response_id):
"""Click the edit button for the response, loading the editing view"""
with self._secondary_action_menu_open(".response_{} .discussion-response".format(response_id)):
self._find_within(".response_{} .discussion-response .action-edit".format(response_id)).first.click()
EmptyPromise(
lambda: self.is_response_editor_visible(response_id),
"Response edit started"
).fulfill()
def get_link_href(self):
"""Extracts href attribute of the referenced link"""
link_href = self._find_within(".post-body p a").attrs('href')
return link_href[0] if link_href else None
def get_response_vote_count(self, response_id):
return self._get_element_text(".response_{} .discussion-response .action-vote .vote-count".format(response_id))
def vote_response(self, response_id):
current_count = self._get_element_text(".response_{} .discussion-response .action-vote .vote-count".format(response_id))
self._find_within(".response_{} .discussion-response .action-vote".format(response_id)).first.click()
self.wait_for_ajax()
EmptyPromise(
lambda: current_count != self.get_response_vote_count(response_id),
"Response is voted"
).fulfill()
def is_response_reported(self, response_id):
return self._is_element_visible(".response_{} .discussion-response .post-label-reported".format(response_id))
def report_response(self, response_id):
with self._secondary_action_menu_open(".response_{} .discussion-response".format(response_id)):
self._find_within(".response_{} .discussion-response .action-report".format(response_id)).first.click()
self.wait_for_ajax()
EmptyPromise(
lambda: self.is_response_reported(response_id),
"Response is reported"
).fulfill()
def is_response_endorsed(self, response_id):
return "endorsed" in self._get_element_text(".response_{} .discussion-response .posted-details".format(response_id))
def endorse_response(self, response_id):
self._find_within(".response_{} .discussion-response .action-endorse".format(response_id)).first.click()
self.wait_for_ajax()
EmptyPromise(
lambda: self.is_response_endorsed(response_id),
"Response edit started"
).fulfill()
def set_response_editor_value(self, response_id, new_body):
"""Replace the contents of the response editor"""
self._find_within(".response_{} .discussion-response .wmd-input".format(response_id)).fill(new_body)
def submit_response_edit(self, response_id, new_response_body):
"""Click the submit button on the response editor"""
self._find_within(".response_{} .discussion-response .post-update".format(response_id)).first.click()
EmptyPromise(
lambda: (
not self.is_response_editor_visible(response_id) and
self.is_response_visible(response_id) and
self.get_response_body(response_id) == new_response_body
),
"Comment edit succeeded"
).fulfill()
def is_show_comments_visible(self, response_id):
"""Returns true if the "show comments" link is visible for a response"""
return self._is_element_visible(".response_{} .action-show-comments".format(response_id))
def show_comments(self, response_id):
"""Click the "show comments" link for a response"""
self._find_within(".response_{} .action-show-comments".format(response_id)).first.click()
EmptyPromise(
lambda: self._is_element_visible(".response_{} .comments".format(response_id)),
"Comments shown"
).fulfill()
def is_add_comment_visible(self, response_id):
"""Returns true if the "add comment" form is visible for a response"""
return self._is_element_visible("#wmd-input-comment-body-{}".format(response_id))
def is_comment_visible(self, comment_id):
"""Returns true if the comment is viewable onscreen"""
return self._is_element_visible("#comment_{} .response-body".format(comment_id))
def get_comment_body(self, comment_id):
return self._get_element_text("#comment_{} .response-body".format(comment_id))
def is_comment_deletable(self, comment_id):
"""Returns true if the delete comment button is present, false otherwise"""
with self._secondary_action_menu_open("#comment_{}".format(comment_id)):
return self._is_element_visible("#comment_{} .action-delete".format(comment_id))
def delete_comment(self, comment_id):
with self.handle_alert():
with self._secondary_action_menu_open("#comment_{}".format(comment_id)):
self._find_within("#comment_{} .action-delete".format(comment_id)).first.click()
EmptyPromise(
lambda: not self.is_comment_visible(comment_id),
"Deleted comment was removed"
).fulfill()
def is_comment_editable(self, comment_id):
"""Returns true if the edit comment button is present, false otherwise"""
with self._secondary_action_menu_open("#comment_{}".format(comment_id)):
return self._is_element_visible("#comment_{} .action-edit".format(comment_id))
def is_comment_editor_visible(self, comment_id):
"""Returns true if the comment editor is present, false otherwise"""
return self._is_element_visible(".edit-comment-body[data-id='{}']".format(comment_id))
def _get_comment_editor_value(self, comment_id):
return self._find_within("#wmd-input-edit-comment-body-{}".format(comment_id)).text[0]
def start_comment_edit(self, comment_id):
"""Click the edit button for the comment, loading the editing view"""
old_body = self.get_comment_body(comment_id)
with self._secondary_action_menu_open("#comment_{}".format(comment_id)):
self._find_within("#comment_{} .action-edit".format(comment_id)).first.click()
EmptyPromise(
lambda: (
self.is_comment_editor_visible(comment_id) and
not self.is_comment_visible(comment_id) and
self._get_comment_editor_value(comment_id) == old_body
),
"Comment edit started"
).fulfill()
def set_comment_editor_value(self, comment_id, new_body):
"""Replace the contents of the comment editor"""
self._find_within("#comment_{} .wmd-input".format(comment_id)).fill(new_body)
def submit_comment_edit(self, comment_id, new_comment_body):
"""Click the submit button on the comment editor"""
self._find_within("#comment_{} .post-update".format(comment_id)).first.click()
EmptyPromise(
lambda: (
not self.is_comment_editor_visible(comment_id) and
self.is_comment_visible(comment_id) and
self.get_comment_body(comment_id) == new_comment_body
),
"Comment edit succeeded"
).fulfill()
def cancel_comment_edit(self, comment_id, original_body):
"""Click the cancel button on the comment editor"""
self._find_within("#comment_{} .post-cancel".format(comment_id)).first.click()
EmptyPromise(
lambda: (
not self.is_comment_editor_visible(comment_id) and
self.is_comment_visible(comment_id) and
self.get_comment_body(comment_id) == original_body
),
"Comment edit was canceled"
).fulfill()
class DiscussionSortPreferencePage(CoursePage):
"""
Page that contain the discussion board with sorting options
"""
def __init__(self, browser, course_id):
super(DiscussionSortPreferencePage, self).__init__(browser, course_id)
self.url_path = "discussion/forum"
def is_browser_on_page(self):
"""
Return true if the browser is on the right page else false.
"""
return self.q(css="body.discussion .forum-nav-sort-control").present
def get_selected_sort_preference(self):
"""
Return the text of option that is selected for sorting.
"""
options = self.q(css="body.discussion .forum-nav-sort-control option")
return options.filter(lambda el: el.is_selected())[0].get_attribute("value")
def change_sort_preference(self, sort_by):
"""
Change the option of sorting by clicking on new option.
"""
self.q(css="body.discussion .forum-nav-sort-control option[value='{0}']".format(sort_by)).click()
def refresh_page(self):
"""
Reload the page.
"""
self.browser.refresh()
class DiscussionTabSingleThreadPage(CoursePage):
def __init__(self, browser, course_id, discussion_id, thread_id):
super(DiscussionTabSingleThreadPage, self).__init__(browser, course_id)
self.thread_page = DiscussionThreadPage(
browser,
"body.discussion .discussion-article[data-id='{thread_id}']".format(thread_id=thread_id)
)
self.url_path = "discussion/forum/{discussion_id}/threads/{thread_id}".format(
discussion_id=discussion_id, thread_id=thread_id
)
def is_browser_on_page(self):
return self.thread_page.is_browser_on_page()
def __getattr__(self, name):
return getattr(self.thread_page, name)
def close_open_thread(self):
with self.thread_page._secondary_action_menu_open(".forum-thread-main-wrapper"):
self._find_within(".forum-thread-main-wrapper .action-close").first.click()
def is_focused_on_element(self, selector):
"""
Check if the focus is on element
"""
return self.browser.execute_script("return $('{}').is(':focus')".format(selector))
def _thread_is_rendered_successfully(self, thread_id):
return self.q(css=".discussion-article[data-id='{}']".format(thread_id)).visible
def click_and_open_thread(self, thread_id):
"""
Click specific thread on the list.
"""
thread_selector = "li[data-id='{}']".format(thread_id)
self.q(css=thread_selector).first.click()
EmptyPromise(
lambda: self._thread_is_rendered_successfully(thread_id),
"Thread has been rendered"
).fulfill()
def check_threads_rendered_successfully(self, thread_count):
"""
Count the number of threads available on page.
"""
return len(self.q(css=".forum-nav-thread").results) == thread_count
def check_focus_is_set(self, selector):
"""
Check focus is set
"""
EmptyPromise(
lambda: self.is_focused_on_element(selector),
"Focus is on other element"
).fulfill()
class InlineDiscussionPage(PageObject):
url = None
def __init__(self, browser, discussion_id):
super(InlineDiscussionPage, self).__init__(browser)
self._discussion_selector = (
".discussion-module[data-discussion-id='{discussion_id}'] ".format(
discussion_id=discussion_id
)
)
def _find_within(self, selector):
"""
Returns a query corresponding to the given CSS selector within the scope
of this discussion page
"""
return self.q(css=self._discussion_selector + " " + selector)
def is_browser_on_page(self):
self.wait_for_ajax()
return self.q(css=self._discussion_selector).present
def is_discussion_expanded(self):
return self._find_within(".discussion").present
def expand_discussion(self):
"""Click the link to expand the discussion"""
self._find_within(".discussion-show").first.click()
EmptyPromise(
self.is_discussion_expanded,
"Discussion expanded"
).fulfill()
def get_num_displayed_threads(self):
return len(self._find_within(".discussion-thread"))
def has_thread(self, thread_id):
"""Returns true if this page is showing the thread with the specified id."""
return self._find_within('.discussion-thread#thread_{}'.format(thread_id)).present
def element_exists(self, selector):
return self.q(css=self._discussion_selector + " " + selector).present
def is_new_post_opened(self):
return self._find_within(".new-post-article").visible
def click_element(self, selector):
self.wait_for_element_presence(
"{discussion} {selector}".format(discussion=self._discussion_selector, selector=selector),
"{selector} is visible".format(selector=selector)
)
self._find_within(selector).click()
def click_cancel_new_post(self):
self.click_element(".cancel")
EmptyPromise(
lambda: not self.is_new_post_opened(),
"New post closed"
).fulfill()
def click_new_post_button(self):
self.click_element(".new-post-btn")
EmptyPromise(
self.is_new_post_opened,
"New post opened"
).fulfill()
@wait_for_js
def _is_element_visible(self, selector):
query = self._find_within(selector)
return query.present and query.visible
class InlineDiscussionThreadPage(DiscussionThreadPage):
def __init__(self, browser, thread_id):
super(InlineDiscussionThreadPage, self).__init__(
browser,
"body.courseware .discussion-module #thread_{thread_id}".format(thread_id=thread_id)
)
def expand(self):
"""Clicks the link to expand the thread"""
self._find_within(".forum-thread-expand").first.click()
EmptyPromise(
lambda: bool(self.get_response_total_text()),
"Thread expanded"
).fulfill()
def is_thread_anonymous(self):
return not self.q(css=".posted-details > .username").present
@wait_for_js
def check_if_selector_is_focused(self, selector):
"""
Check if selector is focused
"""
return self.browser.execute_script("return $('{}').is(':focus')".format(selector))
class DiscussionUserProfilePage(CoursePage):
TEXT_NEXT = u'Next >'
TEXT_PREV = u'< Previous'
PAGING_SELECTOR = "a.discussion-pagination[data-page-number]"
def __init__(self, browser, course_id, user_id, username, page=1):
super(DiscussionUserProfilePage, self).__init__(browser, course_id)
self.url_path = "discussion/forum/dummy/users/{}?page={}".format(user_id, page)
self.username = username
def is_browser_on_page(self):
return (
self.q(css='section.discussion-user-threads[data-course-id="{}"]'.format(self.course_id)).present
and
self.q(css='section.user-profile a.learner-profile-link').present
and
self.q(css='section.user-profile a.learner-profile-link').text[0] == self.username
)
@wait_for_js
def is_window_on_top(self):
return self.browser.execute_script("return $('html, body').offset().top") == 0
def get_shown_thread_ids(self):
elems = self.q(css="article.discussion-thread")
return [elem.get_attribute("id")[7:] for elem in elems]
def get_current_page(self):
def check_func():
try:
current_page = int(self.q(css="nav.discussion-paginator li.current-page").text[0])
except:
return False, None
return True, current_page
return Promise(
check_func, 'discussion-paginator current page has text', timeout=5,
).fulfill()
def _check_pager(self, text, page_number=None):
"""
returns True if 'text' matches the text in any of the pagination elements. If
page_number is provided, only return True if the element points to that result
page.
"""
elems = self.q(css=self.PAGING_SELECTOR).filter(lambda elem: elem.text == text)
if page_number:
elems = elems.filter(lambda elem: int(elem.get_attribute('data-page-number')) == page_number)
return elems.present
def get_clickable_pages(self):
return sorted([
int(elem.get_attribute('data-page-number'))
for elem in self.q(css=self.PAGING_SELECTOR)
if str(elem.text).isdigit()
])
def is_prev_button_shown(self, page_number=None):
return self._check_pager(self.TEXT_PREV, page_number)
def is_next_button_shown(self, page_number=None):
return self._check_pager(self.TEXT_NEXT, page_number)
def _click_pager_with_text(self, text, page_number):
"""
click the first pagination element with whose text is `text` and ensure
the resulting page number matches `page_number`.
"""
targets = [elem for elem in self.q(css=self.PAGING_SELECTOR) if elem.text == text]
targets[0].click()
EmptyPromise(
lambda: self.get_current_page() == page_number,
"navigated to desired page"
).fulfill()
def click_prev_page(self):
self._click_pager_with_text(self.TEXT_PREV, self.get_current_page() - 1)
EmptyPromise(
self.is_window_on_top,
"Window is on top"
).fulfill()
def click_next_page(self):
self._click_pager_with_text(self.TEXT_NEXT, self.get_current_page() + 1)
EmptyPromise(
self.is_window_on_top,
"Window is on top"
).fulfill()
def click_on_page(self, page_number):
self._click_pager_with_text(unicode(page_number), page_number)
EmptyPromise(
self.is_window_on_top,
"Window is on top"
).fulfill()
def click_on_sidebar_username(self):
self.wait_for_page()
self.q(css='.learner-profile-link').first.click()
class DiscussionTabHomePage(CoursePage, DiscussionPageMixin):
ALERT_SELECTOR = ".discussion-body .forum-nav .search-alert"
def __init__(self, browser, course_id):
super(DiscussionTabHomePage, self).__init__(browser, course_id)
self.url_path = "discussion/forum/"
def is_browser_on_page(self):
return self.q(css=".discussion-body section.home-header").present
def perform_search(self, text="dummy"):
self.q(css=".forum-nav-search-input").fill(text + chr(10))
EmptyPromise(
self.is_ajax_finished,
"waiting for server to return result"
).fulfill()
def get_search_alert_messages(self):
return self.q(css=self.ALERT_SELECTOR + " .message").text
def get_search_alert_links(self):
return self.q(css=self.ALERT_SELECTOR + " .link-jump")
def dismiss_alert_message(self, text):
"""
dismiss any search alert message containing the specified text.
"""
def _match_messages(text):
return self.q(css=".search-alert").filter(lambda elem: text in elem.text)
for alert_id in _match_messages(text).attrs("id"):
self.q(css="{}#{} a.dismiss".format(self.ALERT_SELECTOR, alert_id)).click()
EmptyPromise(
lambda: _match_messages(text).results == [],
"waiting for dismissed alerts to disappear"
).fulfill()
def click_new_post_button(self):
"""
Clicks the 'New Post' button.
"""
self.new_post_button.click()
EmptyPromise(
lambda: (
self.new_post_form
),
"New post action succeeded"
).fulfill()
@property
def new_post_button(self):
"""
Returns the new post button.
"""
elements = self.q(css="ol.course-tabs .new-post-btn")
return elements.first if elements.visible and len(elements) == 1 else None
@property
def new_post_form(self):
"""
Returns the new post form.
"""
elements = self.q(css=".forum-new-post-form")
return elements[0] if elements.visible and len(elements) == 1 else None
| agpl-3.0 |
google/iree | build_tools/bazel_to_cmake/bazel_to_cmake_converter.py | 1 | 28002 | # Lint as: python3
# Copyright 2020 The IREE Authors
#
# Licensed under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
"""Converter class for converting Bazel BUILD files to CMakeLists.txt files.
See bazel_to_cmake.py for usage.
"""
# pylint: disable=missing-docstring
# pylint: disable=invalid-name
# pylint: disable=unused-argument
# pylint: disable=exec-used
import itertools
import textwrap
import bazel_to_cmake_targets
# ------------------------------------------------------------------------- #
# Conversion utilities, written to reduce boilerplate and allow for reuse #
# between similar rule conversions (e.g. cc_library and cc_binary). #
# ------------------------------------------------------------------------- #
def _expand_cmake_var(var):
return "${" + var + "}"
def _convert_string_arg_block(name, value, quote=True):
# NAME
# "value"
if value is None:
return ""
if quote:
return f' {name}\n "{value}"\n'
else:
return f" {name}\n {value}\n"
def _convert_string_list_block(name, values, quote=True, sort=False):
# Note this deliberately distinguishes between an empty list (argument
# explicitly specified) and None (argument left as default).
if values is None:
return ""
if sort:
values = sorted(values)
if quote:
values_list = "\n".join([f' "{v}"' for v in values])
else:
values_list = "\n".join([f" {v}" for v in values])
return f" {name}\n{values_list}\n"
def _convert_option_block(option, option_value):
if option_value:
# Note: this is a truthiness check as well as an existence check, e.g.
# Bazel `testonly = False` will be handled correctly by this condition.
return f" {option}\n"
else:
return ""
def _convert_translate_tool_block(translate_tool):
if translate_tool is None:
return ""
# Bazel target name to cmake binary name
# Bazel `//iree/custom:custom-translate` -> CMake `iree_custom_custom-translate`
translate_tool = translate_tool.replace(
"//iree", "iree") # iree/custom:custom-translate
translate_tool = translate_tool.replace(":",
"_") # iree/custom_custom-translate
translate_tool = translate_tool.replace("/",
"_") # iree_custom_custom-translate
return _convert_string_arg_block("TRANSLATE_TOOL",
translate_tool,
quote=False)
def _convert_srcs_block(srcs):
if srcs is None:
return ""
generated_srcs = [src for src in srcs if src.startswith(":")]
srcs = [src for src in srcs if src not in generated_srcs]
sets = []
if srcs:
sets.append(_convert_string_list_block("SRCS", srcs, sort=True))
if generated_srcs:
sets.append(
_convert_string_list_block("GENERATED_SRCS",
[src[1:] for src in generated_srcs],
sort=True))
return "\n".join(sets)
def _convert_td_file_block(td_file):
if td_file.startswith("//iree"):
# Bazel `//iree/dir/td_file.td`
# -> CMake `${IREE_ROOT_DIR}/iree/dir/td_file.td
# Bazel `//iree/dir/IR:td_file.td`
# -> CMake `${IREE_ROOT_DIR}/iree/dir/IR/td_file.td
td_file = td_file.replace("//iree", "${IREE_ROOT_DIR}/iree")
td_file = td_file.replace(":", "/")
return _convert_string_arg_block("TD_FILE", td_file)
def _convert_tbl_outs_block(tbl_outs):
outs_list = "\n".join(
[f" {' '.join(flags)} {value}" for flags, value in tbl_outs])
return f" OUTS\n{outs_list}\n"
def _convert_tblgen_block(tblgen):
if tblgen.endswith("iree-tblgen"):
return " TBLGEN\n IREE\n"
else:
return ""
def _convert_target(target):
"""Returns a list of targets that correspond to the specified Bazel target.
Note that this must be a list because some targets have a one to many mapping.
"""
return bazel_to_cmake_targets.convert_target(target)
def _convert_single_target(target):
replacement_targets = _convert_target(target)
if len(replacement_targets) != 1:
raise RuntimeError(f"Expected single target replacement for {target},"
f" but got multiple: {replacement_targets}")
return replacement_targets[0]
def _convert_single_target_block(name, target):
mapped_target = _convert_single_target(target)
return _convert_string_arg_block(name, mapped_target, quote=False)
def _convert_target_list_block(list_name, targets):
if targets is None:
return ""
# DEPS
# package1::target1
# package1::target2
# package2::target
targets = [_convert_target(t) for t in targets]
# Flatten lists
targets = list(itertools.chain.from_iterable(targets))
# Remove duplicates
targets = set(targets)
# Remove Falsey (None and empty string) values
targets = filter(None, targets)
return _convert_string_list_block(list_name, targets, sort=True, quote=False)
# Copied from integrations/tensorflow/e2e/iree_e2e_cartesian_product_test_suite.bzl
def _normalize_dictionary(dictionary):
"""Wraps every value of dictionary in a list if it isn't one already."""
for key, value in dictionary.items():
if type(value) != type([]):
dictionary[key] = [value]
return dictionary
def _dictionary_product(dictionary):
"""Returns a named cartesian product of dictionary's values."""
# Converts {'a': [1, 2], 'b': [3, 4]} into
# [{'a': 1, 'b': 3}, {'a': 1, 'b': 4}, {'a': 2, 'b': 3}, {'a': 2, 'b': 4}]
product = [[]]
for values in dictionary.values():
# Iteratively grow the elements of the product.
product = [element + [value] for element in product for value in values]
dicts = [{k: v for k, v in zip(dictionary, element)} for element in product]
return dicts
class BuildFileFunctions(object):
"""Object passed to `exec` that has handlers for BUILD file functions."""
def __init__(self, converter):
self.converter = converter
def _convert_unimplemented_function(self, function, details=""):
message = f"Unimplemented {function}: {details}"
if not self.converter.first_error:
self.converter.first_error = NotImplementedError(message)
# Avoid submitting the raw results from non-strict runs. These are still
# useful but are generally not safe to submit as-is. An upstream check
# prevents changes with this phrase from being submitted.
# Written as separate literals to avoid the check triggering here.
submit_blocker = "DO" + " NOT" + " SUBMIT."
self.converter.body += f"# {submit_blocker} {message}\n"
# ------------------------------------------------------------------------- #
# Function handlers that convert BUILD definitions to CMake definitions. #
# #
# Names and signatures must match 1:1 with those expected in BUILD files #
# except that default values for optional arguments should generally be #
# `None` so we don't set them unnecessarily in the CMakeLists.txt files. #
# Each function that may be found in a BUILD file must be listed here. #
# ------------------------------------------------------------------------- #
# Functions with no mapping to CMake. Just ignore these.
def load(self, *args, **kwargs):
pass
def package(self, **kwargs):
pass
def iree_build_test(self, **kwargs):
pass
def test_suite(self, **kwargs):
pass
def config_setting(self, **kwargs):
pass
def exports_files(self, *args, **kwargs):
pass
# Technically we could do something with a CMake equivalent but we have no use
# case.
def py_binary(self, *args, **kwargs):
pass
def filegroup(self, name, **kwargs):
# Not implemented yet. Might be a no-op, or may want to evaluate the srcs
# attribute and pass them along to any targets that depend on the filegroup.
# Cross-package dependencies and complicated globs could be hard to handle.
# We have a bunch of filegroups that just contain TD files. CMake doesn't
# model this at all, so we'll just hardcode this special case.
# TODO(gcmn): Handle this robustly
if name == "td_files":
return
self._convert_unimplemented_function("filegroup", name)
def sh_binary(self, name, **kwargs):
self._convert_unimplemented_function("sh_binary", name)
def enforce_glob(self, files, **kwargs):
return files
def glob(self, include, exclude=None, exclude_directories=1):
if exclude_directories != 1:
self._convert_unimplemented_function("glob", "with exclude_directories")
if exclude is None:
exclude = []
glob_vars = []
for pattern in include:
if "**" in pattern:
# bazel's glob has some specific restrictions about crossing package
# boundaries. We have no uses of recursive globs. Rather than try to
# emulate them or silently give different behavior, just error out.
# See https://docs.bazel.build/versions/master/be/functions.html#glob
raise NotImplementedError("Recursive globs not supported")
# Bazel `*.mlir` glob -> CMake Variable `_GLOB_X_MLIR`
var = "_GLOB_" + pattern.replace("*", "X").replace(".", "_").upper()
glob_vars.append(var)
self.converter.body += (
f"file(GLOB {var} LIST_DIRECTORIES false"
f" RELATIVE {_expand_cmake_var('CMAKE_CURRENT_SOURCE_DIR')}"
f" CONFIGURE_DEPENDS {pattern})\n")
for pattern in exclude:
if "**" in pattern:
raise NotImplementedError("Recursive globs not supported")
exclude_var = ("_GLOB_" +
pattern.replace("*", "X").replace(".", "_").upper())
self.converter.body += (
f"file(GLOB {exclude_var} LIST_DIRECTORIES false"
f" RELATIVE {_expand_cmake_var('CMAKE_CURRENT_SOURCE_DIR')}"
f" CONFIGURE_DEPENDS {pattern})\n")
for glob_var in glob_vars:
self.converter.body += (
f"list(REMOVE_ITEM {glob_var} {_expand_cmake_var(exclude_var)})\n")
return [_expand_cmake_var(var) for var in glob_vars]
# TODO(gcmn) implement these types of functions in a less hard-coded way
def platform_trampoline_deps(self, basename, path="base"):
return [f"//iree/{path}/internal:{basename}_internal"]
def select(self, d):
self._convert_unimplemented_function("select", str(d))
return d["//conditions:default"]
def cc_library(self,
name,
hdrs=None,
textual_hdrs=None,
srcs=None,
copts=None,
defines=None,
data=None,
deps=None,
testonly=None,
linkopts=None,
**kwargs):
if linkopts:
self._convert_unimplemented_function("linkopts")
name_block = _convert_string_arg_block("NAME", name, quote=False)
hdrs_block = _convert_string_list_block("HDRS", hdrs, sort=True)
textual_hdrs_block = _convert_string_list_block("TEXTUAL_HDRS",
textual_hdrs,
sort=True)
srcs_block = _convert_srcs_block(srcs)
copts_block = _convert_string_list_block("COPTS", copts, sort=False)
defines_block = _convert_string_list_block("DEFINES", defines)
data_block = _convert_target_list_block("DATA", data)
deps_block = _convert_target_list_block("DEPS", deps)
testonly_block = _convert_option_block("TESTONLY", testonly)
self.converter.body += (f"iree_cc_library(\n"
f"{name_block}"
f"{copts_block}"
f"{hdrs_block}"
f"{textual_hdrs_block}"
f"{srcs_block}"
f"{data_block}"
f"{deps_block}"
f"{defines_block}"
f"{testonly_block}"
f" PUBLIC\n)\n\n")
def cc_test(self,
name,
hdrs=None,
srcs=None,
copts=None,
defines=None,
data=None,
deps=None,
tags=None,
**kwargs):
name_block = _convert_string_arg_block("NAME", name, quote=False)
hdrs_block = _convert_string_list_block("HDRS", hdrs, sort=True)
srcs_block = _convert_srcs_block(srcs)
copts_block = _convert_string_list_block("COPTS", copts, sort=False)
defines_block = _convert_string_list_block("DEFINES", defines)
data_block = _convert_target_list_block("DATA", data)
deps_block = _convert_target_list_block("DEPS", deps)
labels_block = _convert_string_list_block("LABELS", tags)
self.converter.body += (f"iree_cc_test(\n"
f"{name_block}"
f"{hdrs_block}"
f"{srcs_block}"
f"{copts_block}"
f"{defines_block}"
f"{data_block}"
f"{deps_block}"
f"{labels_block}"
f")\n\n")
def cc_binary(self,
name,
srcs=None,
data=None,
deps=None,
copts=None,
defines=None,
linkopts=None,
testonly=None,
**kwargs):
if linkopts:
self._convert_unimplemented_function("linkopts")
name_block = _convert_string_arg_block("NAME", name, quote=False)
copts_block = _convert_string_list_block("COPTS", copts, sort=False)
defines_block = _convert_string_list_block("DEFINES", defines)
srcs_block = _convert_srcs_block(srcs)
data_block = _convert_target_list_block("DATA", data)
deps_block = _convert_target_list_block("DEPS", deps)
testonly_block = _convert_option_block("TESTONLY", testonly)
self.converter.body += (f"iree_cc_binary(\n"
f"{name_block}"
f"{srcs_block}"
f"{copts_block}"
f"{defines_block}"
f"{data_block}"
f"{deps_block}"
f"{testonly_block}"
f")\n\n")
# Effectively an alias in IREE code.
iree_cc_binary = cc_binary
def c_embed_data(self,
name,
srcs,
c_file_output,
h_file_output,
testonly=None,
strip_prefix=None,
flatten=None,
identifier=None,
**kwargs):
name_block = _convert_string_arg_block("NAME", name, quote=False)
srcs_block = _convert_srcs_block(srcs)
c_file_output_block = _convert_string_arg_block("C_FILE_OUTPUT",
c_file_output)
h_file_output_block = _convert_string_arg_block("H_FILE_OUTPUT",
h_file_output)
testonly_block = _convert_option_block("TESTONLY", testonly)
identifier_block = _convert_string_arg_block("IDENTIFIER", identifier)
flatten_block = _convert_option_block("FLATTEN", flatten)
self.converter.body += (f"iree_c_embed_data(\n"
f"{name_block}"
f"{srcs_block}"
f"{c_file_output_block}"
f"{h_file_output_block}"
f"{identifier_block}"
f"{testonly_block}"
f"{flatten_block}"
f" PUBLIC\n)\n\n")
def spirv_kernel_cc_library(self, name, srcs):
name_block = _convert_string_arg_block("NAME", name, quote=False)
srcs_block = _convert_srcs_block(srcs)
self.converter.body += (f"iree_spirv_kernel_cc_library(\n"
f"{name_block}"
f"{srcs_block}"
f")\n\n")
def iree_bytecode_module(self,
name,
src,
flags=None,
translate_tool=None,
c_identifier=None,
testonly=None):
name_block = _convert_string_arg_block("NAME", name, quote=False)
src_block = _convert_string_arg_block("SRC", src)
c_identifier_block = _convert_string_arg_block("C_IDENTIFIER", c_identifier)
translate_tool_block = _convert_translate_tool_block(translate_tool)
flags_block = _convert_string_list_block("FLAGS", flags)
testonly_block = _convert_option_block("TESTONLY", testonly)
self.converter.body += (f"iree_bytecode_module(\n"
f"{name_block}"
f"{src_block}"
f"{c_identifier_block}"
f"{translate_tool_block}"
f"{flags_block}"
f"{testonly_block}"
f" PUBLIC\n)\n\n")
def iree_flatbuffer_c_library(self, name, srcs, flatcc_args=None):
name_block = _convert_string_arg_block("NAME", name, quote=False)
srcs_block = _convert_srcs_block(srcs)
flatcc_args_block = _convert_string_list_block("FLATCC_ARGS", flatcc_args)
self.converter.body += (f"flatbuffer_c_library(\n"
f"{name_block}"
f"{srcs_block}"
f"{flatcc_args_block}"
f" PUBLIC\n)\n\n")
def gentbl_cc_library(self,
name,
tblgen,
td_file,
tbl_outs,
td_srcs=None,
td_includes=None,
strip_include_prefix=None,
test=None):
name_block = _convert_string_arg_block("NAME", name, quote=False)
tblgen_block = _convert_tblgen_block(tblgen)
td_file_block = _convert_td_file_block(td_file)
outs_block = _convert_tbl_outs_block(tbl_outs)
self.converter.body += (f"iree_tablegen_library(\n"
f"{name_block}"
f"{td_file_block}"
f"{outs_block}"
f"{tblgen_block}"
f")\n\n")
def iree_tablegen_doc(self,
name,
tblgen,
td_file,
tbl_outs,
td_srcs=None,
td_includes=None,
strip_include_prefix=None):
name_block = _convert_string_arg_block("NAME", name, quote=False)
tblgen_block = _convert_tblgen_block(tblgen)
td_file_block = _convert_td_file_block(td_file)
outs_block = _convert_tbl_outs_block(tbl_outs)
self.converter.body += (f"iree_tablegen_doc(\n"
f"{name_block}"
f"{td_file_block}"
f"{outs_block}"
f"{tblgen_block}"
f")\n\n")
def iree_lit_test_suite(self, name, srcs, data, tags=None, **kwargs):
name_block = _convert_string_arg_block("NAME", name, quote=False)
srcs_block = _convert_srcs_block(srcs)
data_block = _convert_target_list_block("DATA", data)
labels_block = _convert_string_list_block("LABELS", tags)
self.converter.body += (f"iree_lit_test_suite(\n"
f"{name_block}"
f"{srcs_block}"
f"{data_block}"
f"{labels_block}"
f")\n\n")
def iree_check_single_backend_test_suite(self,
name,
srcs,
target_backend,
driver,
compiler_flags=None,
target_backends_and_drivers=None,
runner_args=None,
tags=None,
**kwargs):
name_block = _convert_string_arg_block("NAME", name, quote=False)
srcs_block = _convert_srcs_block(srcs)
target_backend_block = _convert_string_arg_block("TARGET_BACKEND",
target_backend)
driver_block = _convert_string_arg_block("DRIVER", driver)
compiler_flags_block = _convert_string_list_block("COMPILER_FLAGS",
compiler_flags)
runner_args_block = _convert_string_list_block("RUNNER_ARGS", runner_args)
labels_block = _convert_string_list_block("LABELS", tags)
self.converter.body += (f"iree_check_single_backend_test_suite(\n"
f"{name_block}"
f"{srcs_block}"
f"{target_backend_block}"
f"{driver_block}"
f"{compiler_flags_block}"
f"{runner_args_block}"
f"{labels_block}"
f")\n\n")
def iree_check_test_suite(self,
name,
srcs,
target_backends_and_drivers=None,
compiler_flags=None,
runner_args=None,
tags=None,
**kwargs):
target_backends = None
drivers = None
if target_backends_and_drivers is not None:
target_backends = [it[0] for it in target_backends_and_drivers]
drivers = [it[1] for it in target_backends_and_drivers]
name_block = _convert_string_arg_block("NAME", name, quote=False)
srcs_block = _convert_srcs_block(srcs)
target_backends_block = _convert_string_list_block("TARGET_BACKENDS",
target_backends)
drivers_block = _convert_string_list_block("DRIVERS", drivers)
compiler_flags_block = _convert_string_list_block("COMPILER_FLAGS",
compiler_flags)
runner_args_block = _convert_string_list_block("RUNNER_ARGS", runner_args)
labels_block = _convert_string_list_block("LABELS", tags)
self.converter.body += (f"iree_check_test_suite(\n"
f"{name_block}"
f"{srcs_block}"
f"{target_backends_block}"
f"{drivers_block}"
f"{compiler_flags_block}"
f"{runner_args_block}"
f"{labels_block}"
f")\n\n")
def iree_e2e_cartesian_product_test_suite(self,
name,
matrix,
failing_configurations=None,
tags=None,
data=None,
**kwargs):
# Note kwargs deps, size, python_version are unused
if data is not None:
self._convert_unimplemented_function(
"iree_e2e_cartesian_product_test_suite", name + " has data")
matrix_keys = matrix.keys()
name_block = _convert_string_arg_block("NAME", name, quote=False)
matrix_keys_block = _convert_string_list_block("MATRIX_KEYS", matrix_keys)
labels_block = _convert_string_list_block("LABELS", tags)
value_strings = []
for key in matrix_keys:
# ensure matching order
values = matrix[key]
if not isinstance(values, list):
values = [values]
if not values:
self._convert_unimplemented_function(
"iree_e2e_cartesian_product_test_suite",
name + f" has empty list for matrix key {key}")
value_strings.append(";".join(str(value) for value in values))
matrix_values_block = _convert_string_list_block("MATRIX_VALUES",
value_strings)
# Copied from integrations/tensorflow/e2e/iree_e2e_cartesian_product_test_suite.bzl
failing_configurations_block = ""
if failing_configurations is not None:
failing_matrix_configurations = []
for failing_configuration in failing_configurations:
failing_configuration = _normalize_dictionary(failing_configuration)
failing_matrix_configurations.extend(
_dictionary_product(failing_configuration))
failing_configuration_strings = []
for failing_configuration in failing_matrix_configurations:
failing_config_string = ",".join(
str(failing_configuration.get(key, "")) for key in matrix_keys)
failing_configuration_strings.append(failing_config_string)
failing_configurations_block = _convert_string_list_block(
"FAILING_CONFIGURATIONS", failing_configuration_strings)
self.converter.body += (f"iree_e2e_cartesian_product_test_suite(\n"
f"{name_block}"
f"{matrix_keys_block}"
f"{matrix_values_block}"
f"{failing_configurations_block}"
f"{labels_block}"
f")\n\n")
def run_binary_test(self, name, test_binary, args=None, data=None):
if data is not None:
self._convert_unimplemented_function("iree_run_binary_test",
name + " has data")
name_block = _convert_string_arg_block("NAME", name)
test_binary_block = _convert_single_target_block("TEST_BINARY", test_binary)
args_block = _convert_string_list_block("ARGS", args)
self.converter.body += (f"iree_run_binary_test(\n"
f"{name_block}"
f"{args_block}"
f"{test_binary_block}"
f")\n\n")
def iree_cmake_extra_content(self, content, inline=False):
if inline:
self.converter.body += (f"\n{content}\n")
else:
self.converter.header += (f"\n{content}\n")
class Converter(object):
"""Conversion state tracking and full file template substitution."""
def __init__(self):
# Header appears after the license block but before `iree_add_all_subdirs`.
self.header = ""
# Body appears after `iree_add_all_subdirs`.
self.body = ""
self.first_error = None
def convert(self):
converted_content = (f"{self.header}\n\n"
f"iree_add_all_subdirs()\n\n"
f"{self.body}")
# Cleanup newline characters. This is more convenient than ensuring all
# conversions are careful with where they insert newlines.
converted_content = converted_content.replace("\n\n\n", "\n")
converted_content = converted_content.rstrip() + "\n"
return converted_content
def GetDict(obj):
ret = {}
for k in dir(obj):
if not k.startswith("_"):
ret[k] = getattr(obj, k)
return ret
def convert_build_file(build_file_code, allow_partial_conversion=False):
converter = Converter()
exec(build_file_code, GetDict(BuildFileFunctions(converter)))
converted_text = converter.convert()
if not allow_partial_conversion and converter.first_error:
raise converter.first_error # pylint: disable=raising-bad-type
return converted_text
| apache-2.0 |
Immortalin/python-for-android | python-modules/twisted/twisted/test/test_process.py | 49 | 78614 | # Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test running processes.
"""
import gzip
import os
import sys
import signal
import StringIO
import errno
import gc
import stat
try:
import fcntl
except ImportError:
fcntl = process = None
else:
from twisted.internet import process
from zope.interface.verify import verifyObject
from twisted.python.log import msg
from twisted.internet import reactor, protocol, error, interfaces, defer
from twisted.trial import unittest
from twisted.python import util, runtime, procutils
from twisted.python.compat import set
class StubProcessProtocol(protocol.ProcessProtocol):
"""
ProcessProtocol counter-implementation: all methods on this class raise an
exception, so instances of this may be used to verify that only certain
methods are called.
"""
def outReceived(self, data):
raise NotImplementedError()
def errReceived(self, data):
raise NotImplementedError()
def inConnectionLost(self):
raise NotImplementedError()
def outConnectionLost(self):
raise NotImplementedError()
def errConnectionLost(self):
raise NotImplementedError()
class ProcessProtocolTests(unittest.TestCase):
"""
Tests for behavior provided by the process protocol base class,
L{protocol.ProcessProtocol}.
"""
def test_interface(self):
"""
L{ProcessProtocol} implements L{IProcessProtocol}.
"""
verifyObject(interfaces.IProcessProtocol, protocol.ProcessProtocol())
def test_outReceived(self):
"""
Verify that when stdout is delivered to
L{ProcessProtocol.childDataReceived}, it is forwarded to
L{ProcessProtocol.outReceived}.
"""
received = []
class OutProtocol(StubProcessProtocol):
def outReceived(self, data):
received.append(data)
bytes = "bytes"
p = OutProtocol()
p.childDataReceived(1, bytes)
self.assertEqual(received, [bytes])
def test_errReceived(self):
"""
Similar to L{test_outReceived}, but for stderr.
"""
received = []
class ErrProtocol(StubProcessProtocol):
def errReceived(self, data):
received.append(data)
bytes = "bytes"
p = ErrProtocol()
p.childDataReceived(2, bytes)
self.assertEqual(received, [bytes])
def test_inConnectionLost(self):
"""
Verify that when stdin close notification is delivered to
L{ProcessProtocol.childConnectionLost}, it is forwarded to
L{ProcessProtocol.inConnectionLost}.
"""
lost = []
class InLostProtocol(StubProcessProtocol):
def inConnectionLost(self):
lost.append(None)
p = InLostProtocol()
p.childConnectionLost(0)
self.assertEqual(lost, [None])
def test_outConnectionLost(self):
"""
Similar to L{test_inConnectionLost}, but for stdout.
"""
lost = []
class OutLostProtocol(StubProcessProtocol):
def outConnectionLost(self):
lost.append(None)
p = OutLostProtocol()
p.childConnectionLost(1)
self.assertEqual(lost, [None])
def test_errConnectionLost(self):
"""
Similar to L{test_inConnectionLost}, but for stderr.
"""
lost = []
class ErrLostProtocol(StubProcessProtocol):
def errConnectionLost(self):
lost.append(None)
p = ErrLostProtocol()
p.childConnectionLost(2)
self.assertEqual(lost, [None])
class TrivialProcessProtocol(protocol.ProcessProtocol):
"""
Simple process protocol for tests purpose.
@ivar outData: data received from stdin
@ivar errData: data received from stderr
"""
def __init__(self, d):
"""
Create the deferred that will be fired at the end, and initialize
data structures.
"""
self.deferred = d
self.outData = []
self.errData = []
def processEnded(self, reason):
self.reason = reason
self.deferred.callback(None)
def outReceived(self, data):
self.outData.append(data)
def errReceived(self, data):
self.errData.append(data)
class TestProcessProtocol(protocol.ProcessProtocol):
def connectionMade(self):
self.stages = [1]
self.data = ''
self.err = ''
self.transport.write("abcd")
def childDataReceived(self, childFD, data):
"""
Override and disable the dispatch provided by the base class to ensure
that it is really this method which is being called, and the transport
is not going directly to L{outReceived} or L{errReceived}.
"""
if childFD == 1:
self.data += data
elif childFD == 2:
self.err += data
def childConnectionLost(self, childFD):
"""
Similarly to L{childDataReceived}, disable the automatic dispatch
provided by the base implementation to verify that the transport is
calling this method directly.
"""
if childFD == 1:
self.stages.append(2)
if self.data != "abcd":
raise RuntimeError
self.transport.write("1234")
elif childFD == 2:
self.stages.append(3)
if self.err != "1234":
print 'err != 1234: ' + repr(self.err)
raise RuntimeError()
self.transport.write("abcd")
self.stages.append(4)
elif childFD == 0:
self.stages.append(5)
def processEnded(self, reason):
self.reason = reason
self.deferred.callback(None)
class EchoProtocol(protocol.ProcessProtocol):
s = "1234567" * 1001
n = 10
finished = 0
failure = None
def __init__(self, onEnded):
self.onEnded = onEnded
self.count = 0
def connectionMade(self):
assert self.n > 2
for i in range(self.n - 2):
self.transport.write(self.s)
# test writeSequence
self.transport.writeSequence([self.s, self.s])
self.buffer = self.s * self.n
def outReceived(self, data):
if buffer(self.buffer, self.count, len(data)) != buffer(data):
self.failure = ("wrong bytes received", data, self.count)
self.transport.closeStdin()
else:
self.count += len(data)
if self.count == len(self.buffer):
self.transport.closeStdin()
def processEnded(self, reason):
self.finished = 1
if not reason.check(error.ProcessDone):
self.failure = "process didn't terminate normally: " + str(reason)
self.onEnded.callback(self)
class SignalProtocol(protocol.ProcessProtocol):
"""
A process protocol that sends a signal when data is first received.
@ivar deferred: deferred firing on C{processEnded}.
@type deferred: L{defer.Deferred}
@ivar signal: the signal to send to the process.
@type signal: C{str}
@ivar signaled: A flag tracking whether the signal has been sent to the
child or not yet. C{False} until it is sent, then C{True}.
@type signaled: C{bool}
"""
def __init__(self, deferred, sig):
self.deferred = deferred
self.signal = sig
self.signaled = False
def outReceived(self, data):
"""
Handle the first output from the child process (which indicates it
is set up and ready to receive the signal) by sending the signal to
it. Also log all output to help with debugging.
"""
msg("Received %r from child stdout" % (data,))
if not self.signaled:
self.signaled = True
self.transport.signalProcess(self.signal)
def errReceived(self, data):
"""
Log all data received from the child's stderr to help with
debugging.
"""
msg("Received %r from child stderr" % (data,))
def processEnded(self, reason):
"""
Callback C{self.deferred} with C{None} if C{reason} is a
L{error.ProcessTerminated} failure with C{exitCode} set to C{None},
C{signal} set to C{self.signal}, and C{status} holding the status code
of the exited process. Otherwise, errback with a C{ValueError}
describing the problem.
"""
msg("Child exited: %r" % (reason.getTraceback(),))
if not reason.check(error.ProcessTerminated):
return self.deferred.errback(
ValueError("wrong termination: %s" % (reason,)))
v = reason.value
if isinstance(self.signal, str):
signalValue = getattr(signal, 'SIG' + self.signal)
else:
signalValue = self.signal
if v.exitCode is not None:
return self.deferred.errback(
ValueError("SIG%s: exitCode is %s, not None" %
(self.signal, v.exitCode)))
if v.signal != signalValue:
return self.deferred.errback(
ValueError("SIG%s: .signal was %s, wanted %s" %
(self.signal, v.signal, signalValue)))
if os.WTERMSIG(v.status) != signalValue:
return self.deferred.errback(
ValueError('SIG%s: %s' % (self.signal, os.WTERMSIG(v.status))))
self.deferred.callback(None)
class TestManyProcessProtocol(TestProcessProtocol):
def __init__(self):
self.deferred = defer.Deferred()
def processEnded(self, reason):
self.reason = reason
if reason.check(error.ProcessDone):
self.deferred.callback(None)
else:
self.deferred.errback(reason)
class UtilityProcessProtocol(protocol.ProcessProtocol):
"""
Helper class for launching a Python process and getting a result from it.
@ivar program: A string giving a Python program for the child process to
run.
"""
program = None
def run(cls, reactor, argv, env):
"""
Run a Python process connected to a new instance of this protocol
class. Return the protocol instance.
The Python process is given C{self.program} on the command line to
execute, in addition to anything specified by C{argv}. C{env} is
the complete environment.
"""
exe = sys.executable
self = cls()
reactor.spawnProcess(
self, exe, [exe, "-c", self.program] + argv, env=env)
return self
run = classmethod(run)
def __init__(self):
self.bytes = []
self.requests = []
def parseChunks(self, bytes):
"""
Called with all bytes received on stdout when the process exits.
"""
raise NotImplementedError()
def getResult(self):
"""
Return a Deferred which will fire with the result of L{parseChunks}
when the child process exits.
"""
d = defer.Deferred()
self.requests.append(d)
return d
def _fireResultDeferreds(self, result):
"""
Callback all Deferreds returned up until now by L{getResult}
with the given result object.
"""
requests = self.requests
self.requests = None
for d in requests:
d.callback(result)
def outReceived(self, bytes):
"""
Accumulate output from the child process in a list.
"""
self.bytes.append(bytes)
def processEnded(self, reason):
"""
Handle process termination by parsing all received output and firing
any waiting Deferreds.
"""
self._fireResultDeferreds(self.parseChunks(self.bytes))
class GetArgumentVector(UtilityProcessProtocol):
"""
Protocol which will read a serialized argv from a process and
expose it to interested parties.
"""
program = (
"from sys import stdout, argv\n"
"stdout.write(chr(0).join(argv))\n"
"stdout.flush()\n")
def parseChunks(self, chunks):
"""
Parse the output from the process to which this protocol was
connected, which is a single unterminated line of \\0-separated
strings giving the argv of that process. Return this as a list of
str objects.
"""
return ''.join(chunks).split('\0')
class GetEnvironmentDictionary(UtilityProcessProtocol):
"""
Protocol which will read a serialized environment dict from a process
and expose it to interested parties.
"""
program = (
"from sys import stdout\n"
"from os import environ\n"
"items = environ.iteritems()\n"
"stdout.write(chr(0).join([k + chr(0) + v for k, v in items]))\n"
"stdout.flush()\n")
def parseChunks(self, chunks):
"""
Parse the output from the process to which this protocol was
connected, which is a single unterminated line of \\0-separated
strings giving key value pairs of the environment from that process.
Return this as a dictionary.
"""
environString = ''.join(chunks)
if not environString:
return {}
environ = iter(environString.split('\0'))
d = {}
while 1:
try:
k = environ.next()
except StopIteration:
break
else:
v = environ.next()
d[k] = v
return d
class ProcessTestCase(unittest.TestCase):
"""Test running a process."""
usePTY = False
def testStdio(self):
"""twisted.internet.stdio test."""
exe = sys.executable
scriptPath = util.sibpath(__file__, "process_twisted.py")
p = Accumulator()
d = p.endedDeferred = defer.Deferred()
env = {"PYTHONPATH": os.pathsep.join(sys.path)}
reactor.spawnProcess(p, exe, [exe, "-u", scriptPath], env=env,
path=None, usePTY=self.usePTY)
p.transport.write("hello, world")
p.transport.write("abc")
p.transport.write("123")
p.transport.closeStdin()
def processEnded(ign):
self.assertEquals(p.outF.getvalue(), "hello, worldabc123",
"Output follows:\n"
"%s\n"
"Error message from process_twisted follows:\n"
"%s\n" % (p.outF.getvalue(), p.errF.getvalue()))
return d.addCallback(processEnded)
def test_unsetPid(self):
"""
Test if pid is None/non-None before/after process termination. This
reuses process_echoer.py to get a process that blocks on stdin.
"""
finished = defer.Deferred()
p = TrivialProcessProtocol(finished)
exe = sys.executable
scriptPath = util.sibpath(__file__, "process_echoer.py")
procTrans = reactor.spawnProcess(p, exe,
[exe, scriptPath], env=None)
self.failUnless(procTrans.pid)
def afterProcessEnd(ignored):
self.assertEqual(procTrans.pid, None)
p.transport.closeStdin()
return finished.addCallback(afterProcessEnd)
def test_process(self):
"""
Test running a process: check its output, it exitCode, some property of
signalProcess.
"""
exe = sys.executable
scriptPath = util.sibpath(__file__, "process_tester.py")
d = defer.Deferred()
p = TestProcessProtocol()
p.deferred = d
reactor.spawnProcess(p, exe, [exe, "-u", scriptPath], env=None)
def check(ignored):
self.assertEquals(p.stages, [1, 2, 3, 4, 5])
f = p.reason
f.trap(error.ProcessTerminated)
self.assertEquals(f.value.exitCode, 23)
# would .signal be available on non-posix?
# self.assertEquals(f.value.signal, None)
self.assertRaises(
error.ProcessExitedAlready, p.transport.signalProcess, 'INT')
try:
import process_tester, glob
for f in glob.glob(process_tester.test_file_match):
os.remove(f)
except:
pass
d.addCallback(check)
return d
def testManyProcesses(self):
def _check(results, protocols):
for p in protocols:
self.assertEquals(p.stages, [1, 2, 3, 4, 5], "[%d] stages = %s" % (id(p.transport), str(p.stages)))
# test status code
f = p.reason
f.trap(error.ProcessTerminated)
self.assertEquals(f.value.exitCode, 23)
exe = sys.executable
scriptPath = util.sibpath(__file__, "process_tester.py")
args = [exe, "-u", scriptPath]
protocols = []
deferreds = []
for i in xrange(50):
p = TestManyProcessProtocol()
protocols.append(p)
reactor.spawnProcess(p, exe, args, env=None)
deferreds.append(p.deferred)
deferredList = defer.DeferredList(deferreds, consumeErrors=True)
deferredList.addCallback(_check, protocols)
return deferredList
def test_echo(self):
"""
A spawning a subprocess which echoes its stdin to its stdout via
C{reactor.spawnProcess} will result in that echoed output being
delivered to outReceived.
"""
finished = defer.Deferred()
p = EchoProtocol(finished)
exe = sys.executable
scriptPath = util.sibpath(__file__, "process_echoer.py")
reactor.spawnProcess(p, exe, [exe, scriptPath], env=None)
def asserts(ignored):
self.failIf(p.failure, p.failure)
self.failUnless(hasattr(p, 'buffer'))
self.assertEquals(len(''.join(p.buffer)), len(p.s * p.n))
def takedownProcess(err):
p.transport.closeStdin()
return err
return finished.addCallback(asserts).addErrback(takedownProcess)
def testCommandLine(self):
args = [r'a\"b ', r'a\b ', r' a\\"b', r' a\\b', r'"foo bar" "', '\tab', '"\\', 'a"b', "a'b"]
pyExe = sys.executable
scriptPath = util.sibpath(__file__, "process_cmdline.py")
p = Accumulator()
d = p.endedDeferred = defer.Deferred()
reactor.spawnProcess(p, pyExe, [pyExe, "-u", scriptPath]+args, env=None,
path=None)
def processEnded(ign):
self.assertEquals(p.errF.getvalue(), "")
recvdArgs = p.outF.getvalue().splitlines()
self.assertEquals(recvdArgs, args)
return d.addCallback(processEnded)
def test_wrongArguments(self):
"""
Test invalid arguments to spawnProcess: arguments and environment
must only contains string or unicode, and not null bytes.
"""
exe = sys.executable
p = protocol.ProcessProtocol()
badEnvs = [
{"foo": 2},
{"foo": "egg\0a"},
{3: "bar"},
{"bar\0foo": "bar"}]
badArgs = [
[exe, 2],
"spam",
[exe, "foo\0bar"]]
# Sanity check - this will fail for people who have mucked with
# their site configuration in a stupid way, but there's nothing we
# can do about that.
badUnicode = u'\N{SNOWMAN}'
try:
badUnicode.encode(sys.getdefaultencoding())
except UnicodeEncodeError:
# Okay, that unicode doesn't encode, put it in as a bad environment
# key.
badEnvs.append({badUnicode: 'value for bad unicode key'})
badEnvs.append({'key for bad unicode value': badUnicode})
badArgs.append([exe, badUnicode])
else:
# It _did_ encode. Most likely, Gtk2 is being used and the
# default system encoding is UTF-8, which can encode anything.
# In any case, if implicit unicode -> str conversion works for
# that string, we can't test that TypeError gets raised instead,
# so just leave it off.
pass
for env in badEnvs:
self.assertRaises(
TypeError,
reactor.spawnProcess, p, exe, [exe, "-c", ""], env=env)
for args in badArgs:
self.assertRaises(
TypeError,
reactor.spawnProcess, p, exe, args, env=None)
# Use upper-case so that the environment key test uses an upper case
# name: some versions of Windows only support upper case environment
# variable names, and I think Python (as of 2.5) doesn't use the right
# syscall for lowercase or mixed case names to work anyway.
okayUnicode = u"UNICODE"
encodedValue = "UNICODE"
def _deprecatedUnicodeSupportTest(self, processProtocolClass, argv=[], env={}):
"""
Check that a deprecation warning is emitted when passing unicode to
spawnProcess for an argv value or an environment key or value.
Check that the warning is of the right type, has the right message,
and refers to the correct file. Unfortunately, don't check that the
line number is correct, because that is too hard for me to figure
out.
@param processProtocolClass: A L{UtilityProcessProtocol} subclass
which will be instantiated to communicate with the child process.
@param argv: The argv argument to spawnProcess.
@param env: The env argument to spawnProcess.
@return: A Deferred which fires when the test is complete.
"""
# Sanity to check to make sure we can actually encode this unicode
# with the default system encoding. This may be excessively
# paranoid. -exarkun
self.assertEqual(
self.okayUnicode.encode(sys.getdefaultencoding()),
self.encodedValue)
p = self.assertWarns(DeprecationWarning,
"Argument strings and environment keys/values passed to "
"reactor.spawnProcess should be str, not unicode.", __file__,
processProtocolClass.run, reactor, argv, env)
return p.getResult()
def test_deprecatedUnicodeArgvSupport(self):
"""
Test that a unicode string passed for an argument value is allowed
if it can be encoded with the default system encoding, but that a
deprecation warning is emitted.
"""
d = self._deprecatedUnicodeSupportTest(GetArgumentVector, argv=[self.okayUnicode])
def gotArgVector(argv):
self.assertEqual(argv, ['-c', self.encodedValue])
d.addCallback(gotArgVector)
return d
def test_deprecatedUnicodeEnvKeySupport(self):
"""
Test that a unicode string passed for the key of the environment
dictionary is allowed if it can be encoded with the default system
encoding, but that a deprecation warning is emitted.
"""
d = self._deprecatedUnicodeSupportTest(
GetEnvironmentDictionary, env={self.okayUnicode: self.encodedValue})
def gotEnvironment(environ):
self.assertEqual(environ[self.encodedValue], self.encodedValue)
d.addCallback(gotEnvironment)
return d
def test_deprecatedUnicodeEnvValueSupport(self):
"""
Test that a unicode string passed for the value of the environment
dictionary is allowed if it can be encoded with the default system
encoding, but that a deprecation warning is emitted.
"""
d = self._deprecatedUnicodeSupportTest(
GetEnvironmentDictionary, env={self.encodedValue: self.okayUnicode})
def gotEnvironment(environ):
# On Windows, the environment contains more things than we
# specified, so only make sure that at least the key we wanted
# is there, rather than testing the dictionary for exact
# equality.
self.assertEqual(environ[self.encodedValue], self.encodedValue)
d.addCallback(gotEnvironment)
return d
class TwoProcessProtocol(protocol.ProcessProtocol):
num = -1
finished = 0
def __init__(self):
self.deferred = defer.Deferred()
def outReceived(self, data):
pass
def processEnded(self, reason):
self.finished = 1
self.deferred.callback(None)
class TestTwoProcessesBase:
def setUp(self):
self.processes = [None, None]
self.pp = [None, None]
self.done = 0
self.verbose = 0
def createProcesses(self, usePTY=0):
exe = sys.executable
scriptPath = util.sibpath(__file__, "process_reader.py")
for num in (0,1):
self.pp[num] = TwoProcessProtocol()
self.pp[num].num = num
p = reactor.spawnProcess(self.pp[num],
exe, [exe, "-u", scriptPath], env=None,
usePTY=usePTY)
self.processes[num] = p
def close(self, num):
if self.verbose: print "closing stdin [%d]" % num
p = self.processes[num]
pp = self.pp[num]
self.failIf(pp.finished, "Process finished too early")
p.loseConnection()
if self.verbose: print self.pp[0].finished, self.pp[1].finished
def _onClose(self):
return defer.gatherResults([ p.deferred for p in self.pp ])
def testClose(self):
if self.verbose: print "starting processes"
self.createProcesses()
reactor.callLater(1, self.close, 0)
reactor.callLater(2, self.close, 1)
return self._onClose()
class TestTwoProcessesNonPosix(TestTwoProcessesBase, unittest.TestCase):
pass
class TestTwoProcessesPosix(TestTwoProcessesBase, unittest.TestCase):
def tearDown(self):
for pp, pr in zip(self.pp, self.processes):
if not pp.finished:
try:
os.kill(pr.pid, signal.SIGTERM)
except OSError:
# If the test failed the process may already be dead
# The error here is only noise
pass
return self._onClose()
def kill(self, num):
if self.verbose: print "kill [%d] with SIGTERM" % num
p = self.processes[num]
pp = self.pp[num]
self.failIf(pp.finished, "Process finished too early")
os.kill(p.pid, signal.SIGTERM)
if self.verbose: print self.pp[0].finished, self.pp[1].finished
def testKill(self):
if self.verbose: print "starting processes"
self.createProcesses(usePTY=0)
reactor.callLater(1, self.kill, 0)
reactor.callLater(2, self.kill, 1)
return self._onClose()
def testClosePty(self):
if self.verbose: print "starting processes"
self.createProcesses(usePTY=1)
reactor.callLater(1, self.close, 0)
reactor.callLater(2, self.close, 1)
return self._onClose()
def testKillPty(self):
if self.verbose: print "starting processes"
self.createProcesses(usePTY=1)
reactor.callLater(1, self.kill, 0)
reactor.callLater(2, self.kill, 1)
return self._onClose()
class FDChecker(protocol.ProcessProtocol):
state = 0
data = ""
failed = None
def __init__(self, d):
self.deferred = d
def fail(self, why):
self.failed = why
self.deferred.callback(None)
def connectionMade(self):
self.transport.writeToChild(0, "abcd")
self.state = 1
def childDataReceived(self, childFD, data):
if self.state == 1:
if childFD != 1:
self.fail("read '%s' on fd %d (not 1) during state 1" \
% (childFD, data))
return
self.data += data
#print "len", len(self.data)
if len(self.data) == 6:
if self.data != "righto":
self.fail("got '%s' on fd1, expected 'righto'" \
% self.data)
return
self.data = ""
self.state = 2
#print "state2", self.state
self.transport.writeToChild(3, "efgh")
return
if self.state == 2:
self.fail("read '%s' on fd %s during state 2" % (childFD, data))
return
if self.state == 3:
if childFD != 1:
self.fail("read '%s' on fd %s (not 1) during state 3" \
% (childFD, data))
return
self.data += data
if len(self.data) == 6:
if self.data != "closed":
self.fail("got '%s' on fd1, expected 'closed'" \
% self.data)
return
self.state = 4
return
if self.state == 4:
self.fail("read '%s' on fd %s during state 4" % (childFD, data))
return
def childConnectionLost(self, childFD):
if self.state == 1:
self.fail("got connectionLost(%d) during state 1" % childFD)
return
if self.state == 2:
if childFD != 4:
self.fail("got connectionLost(%d) (not 4) during state 2" \
% childFD)
return
self.state = 3
self.transport.closeChildFD(5)
return
def processEnded(self, status):
rc = status.value.exitCode
if self.state != 4:
self.fail("processEnded early, rc %d" % rc)
return
if status.value.signal != None:
self.fail("processEnded with signal %s" % status.value.signal)
return
if rc != 0:
self.fail("processEnded with rc %d" % rc)
return
self.deferred.callback(None)
class FDTest(unittest.TestCase):
def testFD(self):
exe = sys.executable
scriptPath = util.sibpath(__file__, "process_fds.py")
d = defer.Deferred()
p = FDChecker(d)
reactor.spawnProcess(p, exe, [exe, "-u", scriptPath], env=None,
path=None,
childFDs={0:"w", 1:"r", 2:2,
3:"w", 4:"r", 5:"w"})
d.addCallback(lambda x : self.failIf(p.failed, p.failed))
return d
def testLinger(self):
# See what happens when all the pipes close before the process
# actually stops. This test *requires* SIGCHLD catching to work,
# as there is no other way to find out the process is done.
exe = sys.executable
scriptPath = util.sibpath(__file__, "process_linger.py")
p = Accumulator()
d = p.endedDeferred = defer.Deferred()
reactor.spawnProcess(p, exe, [exe, "-u", scriptPath], env=None,
path=None,
childFDs={1:"r", 2:2},
)
def processEnded(ign):
self.failUnlessEqual(p.outF.getvalue(),
"here is some text\ngoodbye\n")
return d.addCallback(processEnded)
class Accumulator(protocol.ProcessProtocol):
"""Accumulate data from a process."""
closed = 0
endedDeferred = None
def connectionMade(self):
self.outF = StringIO.StringIO()
self.errF = StringIO.StringIO()
def outReceived(self, d):
self.outF.write(d)
def errReceived(self, d):
self.errF.write(d)
def outConnectionLost(self):
pass
def errConnectionLost(self):
pass
def processEnded(self, reason):
self.closed = 1
if self.endedDeferred is not None:
d, self.endedDeferred = self.endedDeferred, None
d.callback(None)
class PosixProcessBase:
"""
Test running processes.
"""
usePTY = False
def getCommand(self, commandName):
"""
Return the path of the shell command named C{commandName}, looking at
common locations.
"""
if os.path.exists('/bin/%s' % (commandName,)):
cmd = '/bin/%s' % (commandName,)
elif os.path.exists('/usr/bin/%s' % (commandName,)):
cmd = '/usr/bin/%s' % (commandName,)
else:
raise RuntimeError(
"%s not found in /bin or /usr/bin" % (commandName,))
return cmd
def testNormalTermination(self):
cmd = self.getCommand('true')
d = defer.Deferred()
p = TrivialProcessProtocol(d)
reactor.spawnProcess(p, cmd, ['true'], env=None,
usePTY=self.usePTY)
def check(ignored):
p.reason.trap(error.ProcessDone)
self.assertEquals(p.reason.value.exitCode, 0)
self.assertEquals(p.reason.value.signal, None)
d.addCallback(check)
return d
def test_abnormalTermination(self):
"""
When a process terminates with a system exit code set to 1,
C{processEnded} is called with a L{error.ProcessTerminated} error,
the C{exitCode} attribute reflecting the system exit code.
"""
exe = sys.executable
d = defer.Deferred()
p = TrivialProcessProtocol(d)
reactor.spawnProcess(p, exe, [exe, '-c', 'import sys; sys.exit(1)'],
env=None, usePTY=self.usePTY)
def check(ignored):
p.reason.trap(error.ProcessTerminated)
self.assertEquals(p.reason.value.exitCode, 1)
self.assertEquals(p.reason.value.signal, None)
d.addCallback(check)
return d
def _testSignal(self, sig):
exe = sys.executable
scriptPath = util.sibpath(__file__, "process_signal.py")
d = defer.Deferred()
p = SignalProtocol(d, sig)
reactor.spawnProcess(p, exe, [exe, "-u", scriptPath], env=None,
usePTY=self.usePTY)
return d
def test_signalHUP(self):
"""
Sending the SIGHUP signal to a running process interrupts it, and
C{processEnded} is called with a L{error.ProcessTerminated} instance
with the C{exitCode} set to C{None} and the C{signal} attribute set to
C{signal.SIGHUP}. C{os.WTERMSIG} can also be used on the C{status}
attribute to extract the signal value.
"""
return self._testSignal('HUP')
def test_signalINT(self):
"""
Sending the SIGINT signal to a running process interrupts it, and
C{processEnded} is called with a L{error.ProcessTerminated} instance
with the C{exitCode} set to C{None} and the C{signal} attribute set to
C{signal.SIGINT}. C{os.WTERMSIG} can also be used on the C{status}
attribute to extract the signal value.
"""
return self._testSignal('INT')
def test_signalKILL(self):
"""
Sending the SIGKILL signal to a running process interrupts it, and
C{processEnded} is called with a L{error.ProcessTerminated} instance
with the C{exitCode} set to C{None} and the C{signal} attribute set to
C{signal.SIGKILL}. C{os.WTERMSIG} can also be used on the C{status}
attribute to extract the signal value.
"""
return self._testSignal('KILL')
def test_signalTERM(self):
"""
Sending the SIGTERM signal to a running process interrupts it, and
C{processEnded} is called with a L{error.ProcessTerminated} instance
with the C{exitCode} set to C{None} and the C{signal} attribute set to
C{signal.SIGTERM}. C{os.WTERMSIG} can also be used on the C{status}
attribute to extract the signal value.
"""
return self._testSignal('TERM')
def test_childSignalHandling(self):
"""
The disposition of signals which are ignored in the parent
process is reset to the default behavior for the child
process.
"""
# Somewhat arbitrarily select SIGUSR1 here. It satisfies our
# requirements that:
# - The interpreter not fiddle around with the handler
# behind our backs at startup time (this disqualifies
# signals like SIGINT and SIGPIPE).
# - The default behavior is to exit.
#
# This lets us send the signal to the child and then verify
# that it exits with a status code indicating that it was
# indeed the signal which caused it to exit.
which = signal.SIGUSR1
# Ignore the signal in the parent (and make sure we clean it
# up).
handler = signal.signal(which, signal.SIG_IGN)
self.addCleanup(signal.signal, signal.SIGUSR1, handler)
# Now do the test.
return self._testSignal(signal.SIGUSR1)
def test_executionError(self):
"""
Raise an error during execvpe to check error management.
"""
cmd = self.getCommand('false')
d = defer.Deferred()
p = TrivialProcessProtocol(d)
def buggyexecvpe(command, args, environment):
raise RuntimeError("Ouch")
oldexecvpe = os.execvpe
os.execvpe = buggyexecvpe
try:
reactor.spawnProcess(p, cmd, ['false'], env=None,
usePTY=self.usePTY)
def check(ignored):
errData = "".join(p.errData + p.outData)
self.assertIn("Upon execvpe", errData)
self.assertIn("Ouch", errData)
d.addCallback(check)
finally:
os.execvpe = oldexecvpe
return d
def test_errorInProcessEnded(self):
"""
The handler which reaps a process is removed when the process is
reaped, even if the protocol's C{processEnded} method raises an
exception.
"""
connected = defer.Deferred()
ended = defer.Deferred()
# This script runs until we disconnect its transport.
pythonExecutable = sys.executable
scriptPath = util.sibpath(__file__, "process_twisted.py")
class ErrorInProcessEnded(protocol.ProcessProtocol):
"""
A protocol that raises an error in C{processEnded}.
"""
def makeConnection(self, transport):
connected.callback(transport)
def processEnded(self, reason):
reactor.callLater(0, ended.callback, None)
raise RuntimeError("Deliberate error")
# Launch the process.
reactor.spawnProcess(
ErrorInProcessEnded(), pythonExecutable,
[pythonExecutable, scriptPath],
env=None, path=None)
pid = []
def cbConnected(transport):
pid.append(transport.pid)
# There's now a reap process handler registered.
self.assertIn(transport.pid, process.reapProcessHandlers)
# Kill the process cleanly, triggering an error in the protocol.
transport.loseConnection()
connected.addCallback(cbConnected)
def checkTerminated(ignored):
# The exception was logged.
excs = self.flushLoggedErrors(RuntimeError)
self.assertEqual(len(excs), 1)
# The process is no longer scheduled for reaping.
self.assertNotIn(pid[0], process.reapProcessHandlers)
ended.addCallback(checkTerminated)
return ended
class MockSignal(object):
"""
Neuter L{signal.signal}, but pass other attributes unscathed
"""
def signal(self, sig, action):
return signal.getsignal(sig)
def __getattr__(self, attr):
return getattr(signal, attr)
class MockOS(object):
"""
The mock OS: overwrite L{os}, L{fcntl} and {sys} functions with fake ones.
@ivar exited: set to True when C{_exit} is called.
@type exited: C{bool}
@ivar O_RDWR: dumb value faking C{os.O_RDWR}.
@type O_RDWR: C{int}
@ivar O_NOCTTY: dumb value faking C{os.O_NOCTTY}.
@type O_NOCTTY: C{int}
@ivar WNOHANG: dumb value faking C{os.WNOHANG}.
@type WNOHANG: C{int}
@ivar raiseFork: if not C{None}, subsequent calls to fork will raise this
object.
@type raiseFork: C{NoneType} or C{Exception}
@ivar raiseExec: if set, subsequent calls to execvpe will raise an error.
@type raiseExec: C{bool}
@ivar fdio: fake file object returned by calls to fdopen.
@type fdio: C{StringIO.StringIO}
@ivar actions: hold names of some actions executed by the object, in order
of execution.
@type actions: C{list} of C{str}
@ivar closed: keep track of the file descriptor closed.
@param closed: C{list} of C{int}
@ivar child: whether fork return for the child or the parent.
@type child: C{bool}
@ivar pipeCount: count the number of time that C{os.pipe} has been called.
@type pipeCount: C{int}
@ivar raiseWaitPid: if set, subsequent calls to waitpid will raise an
the error specified.
@type raiseWaitPid: C{None} or a class
@ivar waitChild: if set, subsequent calls to waitpid will return it.
@type waitChild: C{None} or a tuple
@ivar euid: the uid returned by the fake C{os.geteuid}
@type euid: C{int}
@ivar egid: the gid returned by the fake C{os.getegid}
@type egid: C{int}
@ivar seteuidCalls: stored results of C{os.seteuid} calls.
@type seteuidCalls: C{list}
@ivar setegidCalls: stored results of C{os.setegid} calls.
@type setegidCalls: C{list}
@ivar path: the path returned by C{os.path.expanduser}.
@type path: C{str}
"""
exited = False
raiseExec = False
fdio = None
child = True
raiseWaitPid = None
raiseFork = None
waitChild = None
euid = 0
egid = 0
path = None
def __init__(self):
"""
Initialize data structures.
"""
self.actions = []
self.closed = []
self.pipeCount = 0
self.O_RDWR = -1
self.O_NOCTTY = -2
self.WNOHANG = -4
self.WEXITSTATUS = lambda x: 0
self.WIFEXITED = lambda x: 1
self.seteuidCalls = []
self.setegidCalls = []
def open(self, dev, flags):
"""
Fake C{os.open}. Return a non fd number to be sure it's not used
elsewhere.
"""
return -3
def fstat(self, fd):
"""
Fake C{os.fstat}. Return a C{os.stat_result} filled with garbage.
"""
return os.stat_result((0,) * 10)
def fdopen(self, fd, flag):
"""
Fake C{os.fdopen}. Return a StringIO object whose content can be tested
later via C{self.fdio}.
"""
self.fdio = StringIO.StringIO()
return self.fdio
def setsid(self):
"""
Fake C{os.setsid}. Do nothing.
"""
def fork(self):
"""
Fake C{os.fork}. Save the action in C{self.actions}, and return 0 if
C{self.child} is set, or a dumb number.
"""
self.actions.append(('fork', gc.isenabled()))
if self.raiseFork is not None:
raise self.raiseFork
elif self.child:
# Child result is 0
return 0
else:
return 21
def close(self, fd):
"""
Fake C{os.close}, saving the closed fd in C{self.closed}.
"""
self.closed.append(fd)
def dup2(self, fd1, fd2):
"""
Fake C{os.dup2}. Do nothing.
"""
def write(self, fd, data):
"""
Fake C{os.write}. Do nothing.
"""
def execvpe(self, command, args, env):
"""
Fake C{os.execvpe}. Save the action, and raise an error if
C{self.raiseExec} is set.
"""
self.actions.append('exec')
if self.raiseExec:
raise RuntimeError("Bar")
def pipe(self):
"""
Fake C{os.pipe}. Return non fd numbers to be sure it's not used
elsewhere, and increment C{self.pipeCount}. This is used to uniquify
the result.
"""
self.pipeCount += 1
return - 2 * self.pipeCount + 1, - 2 * self.pipeCount
def ttyname(self, fd):
"""
Fake C{os.ttyname}. Return a dumb string.
"""
return "foo"
def _exit(self, code):
"""
Fake C{os._exit}. Save the action, set the C{self.exited} flag, and
raise C{SystemError}.
"""
self.actions.append('exit')
self.exited = True
# Don't forget to raise an error, or you'll end up in parent
# code path.
raise SystemError()
def ioctl(self, fd, flags, arg):
"""
Override C{fcntl.ioctl}. Do nothing.
"""
def setNonBlocking(self, fd):
"""
Override C{fdesc.setNonBlocking}. Do nothing.
"""
def waitpid(self, pid, options):
"""
Override C{os.waitpid}. Return values meaning that the child process
has exited, save executed action.
"""
self.actions.append('waitpid')
if self.raiseWaitPid is not None:
raise self.raiseWaitPid
if self.waitChild is not None:
return self.waitChild
return 1, 0
def settrace(self, arg):
"""
Override C{sys.settrace} to keep coverage working.
"""
def getgid(self):
"""
Override C{os.getgid}. Return a dumb number.
"""
return 1235
def getuid(self):
"""
Override C{os.getuid}. Return a dumb number.
"""
return 1237
def setuid(self, val):
"""
Override C{os.setuid}. Do nothing.
"""
self.actions.append(('setuid', val))
def setgid(self, val):
"""
Override C{os.setgid}. Do nothing.
"""
self.actions.append(('setgid', val))
def setregid(self, val1, val2):
"""
Override C{os.setregid}. Do nothing.
"""
self.actions.append(('setregid', val1, val2))
def setreuid(self, val1, val2):
"""
Override C{os.setreuid}. Save the action.
"""
self.actions.append(('setreuid', val1, val2))
def switchUID(self, uid, gid):
"""
Override C{util.switchuid}. Save the action.
"""
self.actions.append(('switchuid', uid, gid))
def openpty(self):
"""
Override C{pty.openpty}, returning fake file descriptors.
"""
return -12, -13
def geteuid(self):
"""
Mock C{os.geteuid}, returning C{self.euid} instead.
"""
return self.euid
def getegid(self):
"""
Mock C{os.getegid}, returning C{self.egid} instead.
"""
return self.egid
def seteuid(self, egid):
"""
Mock C{os.seteuid}, store result.
"""
self.seteuidCalls.append(egid)
def setegid(self, egid):
"""
Mock C{os.setegid}, store result.
"""
self.setegidCalls.append(egid)
def expanduser(self, path):
"""
Mock C{os.path.expanduser}.
"""
return self.path
def getpwnam(self, user):
"""
Mock C{pwd.getpwnam}.
"""
return 0, 0, 1, 2
def listdir(self, path):
"""
Override C{os.listdir}, returning fake contents of '/dev/fd'
"""
return "-1", "-2"
if process is not None:
class DumbProcessWriter(process.ProcessWriter):
"""
A fake L{process.ProcessWriter} used for tests.
"""
def startReading(self):
"""
Here's the faking: don't do anything here.
"""
class DumbProcessReader(process.ProcessReader):
"""
A fake L{process.ProcessReader} used for tests.
"""
def startReading(self):
"""
Here's the faking: don't do anything here.
"""
class DumbPTYProcess(process.PTYProcess):
"""
A fake L{process.PTYProcess} used for tests.
"""
def startReading(self):
"""
Here's the faking: don't do anything here.
"""
class MockProcessTestCase(unittest.TestCase):
"""
Mock a process runner to test forked child code path.
"""
if process is None:
skip = "twisted.internet.process is never used on Windows"
def setUp(self):
"""
Replace L{process} os, fcntl, sys, switchUID, fdesc and pty modules
with the mock class L{MockOS}.
"""
if gc.isenabled():
self.addCleanup(gc.enable)
else:
self.addCleanup(gc.disable)
self.mockos = MockOS()
self.mockos.euid = 1236
self.mockos.egid = 1234
self.patch(process, "os", self.mockos)
self.patch(process, "fcntl", self.mockos)
self.patch(process, "sys", self.mockos)
self.patch(process, "switchUID", self.mockos.switchUID)
self.patch(process, "fdesc", self.mockos)
self.patch(process.Process, "processReaderFactory", DumbProcessReader)
self.patch(process.Process, "processWriterFactory", DumbProcessWriter)
self.patch(process, "pty", self.mockos)
self.mocksig = MockSignal()
self.patch(process, "signal", self.mocksig)
def tearDown(self):
"""
Reset processes registered for reap.
"""
process.reapProcessHandlers = {}
def test_mockFork(self):
"""
Test a classic spawnProcess. Check the path of the client code:
fork, exec, exit.
"""
gc.enable()
cmd = '/mock/ouch'
d = defer.Deferred()
p = TrivialProcessProtocol(d)
try:
reactor.spawnProcess(p, cmd, ['ouch'], env=None,
usePTY=False)
except SystemError:
self.assert_(self.mockos.exited)
self.assertEquals(
self.mockos.actions, [("fork", False), "exec", "exit"])
else:
self.fail("Should not be here")
# It should leave the garbage collector disabled.
self.assertFalse(gc.isenabled())
def _mockForkInParentTest(self):
"""
Assert that in the main process, spawnProcess disables the garbage
collector, calls fork, closes the pipe file descriptors it created for
the child process, and calls waitpid.
"""
self.mockos.child = False
cmd = '/mock/ouch'
d = defer.Deferred()
p = TrivialProcessProtocol(d)
reactor.spawnProcess(p, cmd, ['ouch'], env=None,
usePTY=False)
# It should close the first read pipe, and the 2 last write pipes
self.assertEqual(set(self.mockos.closed), set([-1, -4, -6]))
self.assertEquals(self.mockos.actions, [("fork", False), "waitpid"])
def test_mockForkInParentGarbageCollectorEnabled(self):
"""
The garbage collector should be enabled when L{reactor.spawnProcess}
returns if it was initially enabled.
@see L{_mockForkInParentTest}
"""
gc.enable()
self._mockForkInParentTest()
self.assertTrue(gc.isenabled())
def test_mockForkInParentGarbageCollectorDisabled(self):
"""
The garbage collector should be disabled when L{reactor.spawnProcess}
returns if it was initially disabled.
@see L{_mockForkInParentTest}
"""
gc.disable()
self._mockForkInParentTest()
self.assertFalse(gc.isenabled())
def test_mockForkTTY(self):
"""
Test a TTY spawnProcess: check the path of the client code:
fork, exec, exit.
"""
cmd = '/mock/ouch'
d = defer.Deferred()
p = TrivialProcessProtocol(d)
try:
reactor.spawnProcess(p, cmd, ['ouch'], env=None,
usePTY=True)
except SystemError:
self.assert_(self.mockos.exited)
self.assertEquals(
self.mockos.actions, [("fork", False), "exec", "exit"])
else:
self.fail("Should not be here")
def _mockWithForkError(self):
"""
Assert that if the fork call fails, no other process setup calls are
made and that spawnProcess raises the exception fork raised.
"""
self.mockos.raiseFork = OSError(errno.EAGAIN, None)
protocol = TrivialProcessProtocol(None)
self.assertRaises(OSError, reactor.spawnProcess, protocol, None)
self.assertEqual(self.mockos.actions, [("fork", False)])
def test_mockWithForkErrorGarbageCollectorEnabled(self):
"""
The garbage collector should be enabled when L{reactor.spawnProcess}
raises because L{os.fork} raised, if it was initially enabled.
"""
gc.enable()
self._mockWithForkError()
self.assertTrue(gc.isenabled())
def test_mockWithForkErrorGarbageCollectorDisabled(self):
"""
The garbage collector should be disabled when
L{reactor.spawnProcess} raises because L{os.fork} raised, if it was
initially disabled.
"""
gc.disable()
self._mockWithForkError()
self.assertFalse(gc.isenabled())
def test_mockForkErrorCloseFDs(self):
"""
When C{os.fork} raises an exception, the file descriptors created
before are closed and don't leak.
"""
self._mockWithForkError()
self.assertEqual(set(self.mockos.closed), set([-1, -4, -6, -2, -3, -5]))
def test_mockForkErrorGivenFDs(self):
"""
When C{os.forks} raises an exception and that file descriptors have
been specified with the C{childFDs} arguments of
L{reactor.spawnProcess}, they are not closed.
"""
self.mockos.raiseFork = OSError(errno.EAGAIN, None)
protocol = TrivialProcessProtocol(None)
self.assertRaises(OSError, reactor.spawnProcess, protocol, None,
childFDs={0: -10, 1: -11, 2: -13})
self.assertEqual(self.mockos.actions, [("fork", False)])
self.assertEqual(self.mockos.closed, [])
# We can also put "r" or "w" to let twisted create the pipes
self.assertRaises(OSError, reactor.spawnProcess, protocol, None,
childFDs={0: "r", 1: -11, 2: -13})
self.assertEqual(set(self.mockos.closed), set([-1, -2]))
def test_mockForkErrorClosePTY(self):
"""
When C{os.fork} raises an exception, the file descriptors created by
C{pty.openpty} are closed and don't leak, when C{usePTY} is set to
C{True}.
"""
self.mockos.raiseFork = OSError(errno.EAGAIN, None)
protocol = TrivialProcessProtocol(None)
self.assertRaises(OSError, reactor.spawnProcess, protocol, None,
usePTY=True)
self.assertEqual(self.mockos.actions, [("fork", False)])
self.assertEqual(set(self.mockos.closed), set([-12, -13]))
def test_mockForkErrorPTYGivenFDs(self):
"""
If a tuple is passed to C{usePTY} to specify slave and master file
descriptors and that C{os.fork} raises an exception, these file
descriptors aren't closed.
"""
self.mockos.raiseFork = OSError(errno.EAGAIN, None)
protocol = TrivialProcessProtocol(None)
self.assertRaises(OSError, reactor.spawnProcess, protocol, None,
usePTY=(-20, -21, 'foo'))
self.assertEqual(self.mockos.actions, [("fork", False)])
self.assertEqual(self.mockos.closed, [])
def test_mockWithExecError(self):
"""
Spawn a process but simulate an error during execution in the client
path: C{os.execvpe} raises an error. It should close all the standard
fds, try to print the error encountered, and exit cleanly.
"""
cmd = '/mock/ouch'
d = defer.Deferred()
p = TrivialProcessProtocol(d)
self.mockos.raiseExec = True
try:
reactor.spawnProcess(p, cmd, ['ouch'], env=None,
usePTY=False)
except SystemError:
self.assert_(self.mockos.exited)
self.assertEquals(
self.mockos.actions, [("fork", False), "exec", "exit"])
# Check that fd have been closed
self.assertIn(0, self.mockos.closed)
self.assertIn(1, self.mockos.closed)
self.assertIn(2, self.mockos.closed)
# Check content of traceback
self.assertIn("RuntimeError: Bar", self.mockos.fdio.getvalue())
else:
self.fail("Should not be here")
def test_mockSetUid(self):
"""
Try creating a process with setting its uid: it's almost the same path
as the standard path, but with a C{switchUID} call before the exec.
"""
cmd = '/mock/ouch'
d = defer.Deferred()
p = TrivialProcessProtocol(d)
try:
reactor.spawnProcess(p, cmd, ['ouch'], env=None,
usePTY=False, uid=8080)
except SystemError:
self.assert_(self.mockos.exited)
self.assertEquals(self.mockos.actions,
[('setuid', 0), ('setgid', 0), ('fork', False),
('switchuid', 8080, 1234), 'exec', 'exit'])
else:
self.fail("Should not be here")
def test_mockSetUidInParent(self):
"""
Try creating a process with setting its uid, in the parent path: it
should switch to root before fork, then restore initial uid/gids.
"""
self.mockos.child = False
cmd = '/mock/ouch'
d = defer.Deferred()
p = TrivialProcessProtocol(d)
reactor.spawnProcess(p, cmd, ['ouch'], env=None,
usePTY=False, uid=8080)
self.assertEquals(self.mockos.actions,
[('setuid', 0), ('setgid', 0), ('fork', False),
('setregid', 1235, 1234), ('setreuid', 1237, 1236), 'waitpid'])
def test_mockPTYSetUid(self):
"""
Try creating a PTY process with setting its uid: it's almost the same
path as the standard path, but with a C{switchUID} call before the
exec.
"""
cmd = '/mock/ouch'
d = defer.Deferred()
p = TrivialProcessProtocol(d)
try:
reactor.spawnProcess(p, cmd, ['ouch'], env=None,
usePTY=True, uid=8081)
except SystemError:
self.assert_(self.mockos.exited)
self.assertEquals(self.mockos.actions,
[('setuid', 0), ('setgid', 0), ('fork', False),
('switchuid', 8081, 1234), 'exec', 'exit'])
else:
self.fail("Should not be here")
def test_mockPTYSetUidInParent(self):
"""
Try creating a PTY process with setting its uid, in the parent path: it
should switch to root before fork, then restore initial uid/gids.
"""
self.mockos.child = False
cmd = '/mock/ouch'
d = defer.Deferred()
p = TrivialProcessProtocol(d)
oldPTYProcess = process.PTYProcess
try:
process.PTYProcess = DumbPTYProcess
reactor.spawnProcess(p, cmd, ['ouch'], env=None,
usePTY=True, uid=8080)
finally:
process.PTYProcess = oldPTYProcess
self.assertEquals(self.mockos.actions,
[('setuid', 0), ('setgid', 0), ('fork', False),
('setregid', 1235, 1234), ('setreuid', 1237, 1236), 'waitpid'])
def test_mockWithWaitError(self):
"""
Test that reapProcess logs errors raised.
"""
self.mockos.child = False
cmd = '/mock/ouch'
self.mockos.waitChild = (0, 0)
d = defer.Deferred()
p = TrivialProcessProtocol(d)
proc = reactor.spawnProcess(p, cmd, ['ouch'], env=None,
usePTY=False)
self.assertEquals(self.mockos.actions, [("fork", False), "waitpid"])
self.mockos.raiseWaitPid = OSError()
proc.reapProcess()
errors = self.flushLoggedErrors()
self.assertEquals(len(errors), 1)
errors[0].trap(OSError)
def test_mockErrorECHILDInReapProcess(self):
"""
Test that reapProcess doesn't log anything when waitpid raises a
C{OSError} with errno C{ECHILD}.
"""
self.mockos.child = False
cmd = '/mock/ouch'
self.mockos.waitChild = (0, 0)
d = defer.Deferred()
p = TrivialProcessProtocol(d)
proc = reactor.spawnProcess(p, cmd, ['ouch'], env=None,
usePTY=False)
self.assertEquals(self.mockos.actions, [("fork", False), "waitpid"])
self.mockos.raiseWaitPid = OSError()
self.mockos.raiseWaitPid.errno = errno.ECHILD
# This should not produce any errors
proc.reapProcess()
def test_mockErrorInPipe(self):
"""
If C{os.pipe} raises an exception after some pipes where created, the
created pipes are closed and don't leak.
"""
pipes = [-1, -2, -3, -4]
def pipe():
try:
return pipes.pop(0), pipes.pop(0)
except IndexError:
raise OSError()
self.mockos.pipe = pipe
protocol = TrivialProcessProtocol(None)
self.assertRaises(OSError, reactor.spawnProcess, protocol, None)
self.assertEqual(self.mockos.actions, [])
self.assertEqual(set(self.mockos.closed), set([-4, -3, -2, -1]))
def test_mockErrorInForkRestoreUID(self):
"""
If C{os.fork} raises an exception and a UID change has been made, the
previous UID and GID are restored.
"""
self.mockos.raiseFork = OSError(errno.EAGAIN, None)
protocol = TrivialProcessProtocol(None)
self.assertRaises(OSError, reactor.spawnProcess, protocol, None,
uid=8080)
self.assertEqual(self.mockos.actions,
[('setuid', 0), ('setgid', 0), ("fork", False),
('setregid', 1235, 1234), ('setreuid', 1237, 1236)])
class PosixProcessTestCase(unittest.TestCase, PosixProcessBase):
# add two non-pty test cases
def test_stderr(self):
"""
Bytes written to stderr by the spawned process are passed to the
C{errReceived} callback on the C{ProcessProtocol} passed to
C{spawnProcess}.
"""
cmd = sys.executable
value = "42"
p = Accumulator()
d = p.endedDeferred = defer.Deferred()
reactor.spawnProcess(p, cmd,
[cmd, "-c",
"import sys; sys.stderr.write('%s')" % (value,)],
env=None, path="/tmp",
usePTY=self.usePTY)
def processEnded(ign):
self.assertEquals(value, p.errF.getvalue())
return d.addCallback(processEnded)
def testProcess(self):
cmd = self.getCommand('gzip')
s = "there's no place like home!\n" * 3
p = Accumulator()
d = p.endedDeferred = defer.Deferred()
reactor.spawnProcess(p, cmd, [cmd, "-c"], env=None, path="/tmp",
usePTY=self.usePTY)
p.transport.write(s)
p.transport.closeStdin()
def processEnded(ign):
f = p.outF
f.seek(0, 0)
gf = gzip.GzipFile(fileobj=f)
self.assertEquals(gf.read(), s)
return d.addCallback(processEnded)
class PosixProcessTestCasePTY(unittest.TestCase, PosixProcessBase):
"""
Just like PosixProcessTestCase, but use ptys instead of pipes.
"""
usePTY = True
# PTYs only offer one input and one output. What still makes sense?
# testNormalTermination
# test_abnormalTermination
# testSignal
# testProcess, but not without p.transport.closeStdin
# might be solveable: TODO: add test if so
def testOpeningTTY(self):
exe = sys.executable
scriptPath = util.sibpath(__file__, "process_tty.py")
p = Accumulator()
d = p.endedDeferred = defer.Deferred()
reactor.spawnProcess(p, exe, [exe, "-u", scriptPath], env=None,
path=None, usePTY=self.usePTY)
p.transport.write("hello world!\n")
def processEnded(ign):
self.assertRaises(
error.ProcessExitedAlready, p.transport.signalProcess, 'HUP')
self.assertEquals(
p.outF.getvalue(),
"hello world!\r\nhello world!\r\n",
"Error message from process_tty follows:\n\n%s\n\n" % p.outF.getvalue())
return d.addCallback(processEnded)
def testBadArgs(self):
pyExe = sys.executable
pyArgs = [pyExe, "-u", "-c", "print 'hello'"]
p = Accumulator()
self.assertRaises(ValueError, reactor.spawnProcess, p, pyExe, pyArgs,
usePTY=1, childFDs={1:'r'})
class Win32SignalProtocol(SignalProtocol):
"""
A win32-specific process protocol that handles C{processEnded}
differently: processes should exit with exit code 1.
"""
def processEnded(self, reason):
"""
Callback C{self.deferred} with C{None} if C{reason} is a
L{error.ProcessTerminated} failure with C{exitCode} set to 1.
Otherwise, errback with a C{ValueError} describing the problem.
"""
if not reason.check(error.ProcessTerminated):
return self.deferred.errback(
ValueError("wrong termination: %s" % (reason,)))
v = reason.value
if v.exitCode != 1:
return self.deferred.errback(
ValueError("Wrong exit code: %s" % (reason.exitCode,)))
self.deferred.callback(None)
class Win32ProcessTestCase(unittest.TestCase):
"""
Test process programs that are packaged with twisted.
"""
def testStdinReader(self):
pyExe = sys.executable
scriptPath = util.sibpath(__file__, "process_stdinreader.py")
p = Accumulator()
d = p.endedDeferred = defer.Deferred()
reactor.spawnProcess(p, pyExe, [pyExe, "-u", scriptPath], env=None,
path=None)
p.transport.write("hello, world")
p.transport.closeStdin()
def processEnded(ign):
self.assertEquals(p.errF.getvalue(), "err\nerr\n")
self.assertEquals(p.outF.getvalue(), "out\nhello, world\nout\n")
return d.addCallback(processEnded)
def testBadArgs(self):
pyExe = sys.executable
pyArgs = [pyExe, "-u", "-c", "print 'hello'"]
p = Accumulator()
self.assertRaises(ValueError,
reactor.spawnProcess, p, pyExe, pyArgs, uid=1)
self.assertRaises(ValueError,
reactor.spawnProcess, p, pyExe, pyArgs, gid=1)
self.assertRaises(ValueError,
reactor.spawnProcess, p, pyExe, pyArgs, usePTY=1)
self.assertRaises(ValueError,
reactor.spawnProcess, p, pyExe, pyArgs, childFDs={1:'r'})
def _testSignal(self, sig):
exe = sys.executable
scriptPath = util.sibpath(__file__, "process_signal.py")
d = defer.Deferred()
p = Win32SignalProtocol(d, sig)
reactor.spawnProcess(p, exe, [exe, "-u", scriptPath], env=None)
return d
def test_signalTERM(self):
"""
Sending the SIGTERM signal terminates a created process, and
C{processEnded} is called with a L{error.ProcessTerminated} instance
with the C{exitCode} attribute set to 1.
"""
return self._testSignal('TERM')
def test_signalINT(self):
"""
Sending the SIGINT signal terminates a created process, and
C{processEnded} is called with a L{error.ProcessTerminated} instance
with the C{exitCode} attribute set to 1.
"""
return self._testSignal('INT')
def test_signalKILL(self):
"""
Sending the SIGKILL signal terminates a created process, and
C{processEnded} is called with a L{error.ProcessTerminated} instance
with the C{exitCode} attribute set to 1.
"""
return self._testSignal('KILL')
def test_closeHandles(self):
"""
The win32 handles should be properly closed when the process exits.
"""
import win32api
connected = defer.Deferred()
ended = defer.Deferred()
class SimpleProtocol(protocol.ProcessProtocol):
"""
A protocol that fires deferreds when connected and disconnected.
"""
def makeConnection(self, transport):
connected.callback(transport)
def processEnded(self, reason):
ended.callback(None)
p = SimpleProtocol()
pyExe = sys.executable
pyArgs = [pyExe, "-u", "-c", "print 'hello'"]
proc = reactor.spawnProcess(p, pyExe, pyArgs)
def cbConnected(transport):
self.assertIdentical(transport, proc)
# perform a basic validity test on the handles
win32api.GetHandleInformation(proc.hProcess)
win32api.GetHandleInformation(proc.hThread)
# And save their values for later
self.hProcess = proc.hProcess
self.hThread = proc.hThread
connected.addCallback(cbConnected)
def checkTerminated(ignored):
# The attributes on the process object must be reset...
self.assertIdentical(proc.pid, None)
self.assertIdentical(proc.hProcess, None)
self.assertIdentical(proc.hThread, None)
# ...and the handles must be closed.
self.assertRaises(win32api.error,
win32api.GetHandleInformation, self.hProcess)
self.assertRaises(win32api.error,
win32api.GetHandleInformation, self.hThread)
ended.addCallback(checkTerminated)
return defer.gatherResults([connected, ended])
class Dumbwin32procPidTest(unittest.TestCase):
"""
Simple test for the pid attribute of Process on win32.
"""
def test_pid(self):
"""
Launch process with mock win32process. The only mock aspect of this
module is that the pid of the process created will always be 42.
"""
from twisted.internet import _dumbwin32proc
from twisted.test import mock_win32process
self.patch(_dumbwin32proc, "win32process", mock_win32process)
exe = sys.executable
scriptPath = util.sibpath(__file__, "process_cmdline.py")
d = defer.Deferred()
processProto = TrivialProcessProtocol(d)
comspec = str(os.environ["COMSPEC"])
cmd = [comspec, "/c", exe, scriptPath]
p = _dumbwin32proc.Process(reactor,
processProto,
None,
cmd,
{},
None)
self.assertEquals(42, p.pid)
self.assertEquals("<Process pid=42>", repr(p))
def pidCompleteCb(result):
self.assertEquals(None, p.pid)
return d.addCallback(pidCompleteCb)
class UtilTestCase(unittest.TestCase):
"""
Tests for process-related helper functions (currently only
L{procutils.which}.
"""
def setUp(self):
"""
Create several directories and files, some of which are executable
and some of which are not. Save the current PATH setting.
"""
j = os.path.join
base = self.mktemp()
self.foo = j(base, "foo")
self.baz = j(base, "baz")
self.foobar = j(self.foo, "bar")
self.foobaz = j(self.foo, "baz")
self.bazfoo = j(self.baz, "foo")
self.bazbar = j(self.baz, "bar")
for d in self.foobar, self.foobaz, self.bazfoo, self.bazbar:
os.makedirs(d)
for name, mode in [(j(self.foobaz, "executable"), 0700),
(j(self.foo, "executable"), 0700),
(j(self.bazfoo, "executable"), 0700),
(j(self.bazfoo, "executable.bin"), 0700),
(j(self.bazbar, "executable"), 0)]:
f = file(name, "w")
f.close()
os.chmod(name, mode)
self.oldPath = os.environ.get('PATH', None)
os.environ['PATH'] = os.pathsep.join((
self.foobar, self.foobaz, self.bazfoo, self.bazbar))
def tearDown(self):
"""
Restore the saved PATH setting, and set all created files readable
again so that they can be deleted easily.
"""
os.chmod(os.path.join(self.bazbar, "executable"), stat.S_IWUSR)
if self.oldPath is None:
try:
del os.environ['PATH']
except KeyError:
pass
else:
os.environ['PATH'] = self.oldPath
def test_whichWithoutPATH(self):
"""
Test that if C{os.environ} does not have a C{'PATH'} key,
L{procutils.which} returns an empty list.
"""
del os.environ['PATH']
self.assertEqual(procutils.which("executable"), [])
def testWhich(self):
j = os.path.join
paths = procutils.which("executable")
expectedPaths = [j(self.foobaz, "executable"),
j(self.bazfoo, "executable")]
if runtime.platform.isWindows():
expectedPaths.append(j(self.bazbar, "executable"))
self.assertEquals(paths, expectedPaths)
def testWhichPathExt(self):
j = os.path.join
old = os.environ.get('PATHEXT', None)
os.environ['PATHEXT'] = os.pathsep.join(('.bin', '.exe', '.sh'))
try:
paths = procutils.which("executable")
finally:
if old is None:
del os.environ['PATHEXT']
else:
os.environ['PATHEXT'] = old
expectedPaths = [j(self.foobaz, "executable"),
j(self.bazfoo, "executable"),
j(self.bazfoo, "executable.bin")]
if runtime.platform.isWindows():
expectedPaths.append(j(self.bazbar, "executable"))
self.assertEquals(paths, expectedPaths)
class ClosingPipesProcessProtocol(protocol.ProcessProtocol):
output = ''
errput = ''
def __init__(self, outOrErr):
self.deferred = defer.Deferred()
self.outOrErr = outOrErr
def processEnded(self, reason):
self.deferred.callback(reason)
def outReceived(self, data):
self.output += data
def errReceived(self, data):
self.errput += data
class ClosingPipes(unittest.TestCase):
def doit(self, fd):
"""
Create a child process and close one of its output descriptors using
L{IProcessTransport.closeStdout} or L{IProcessTransport.closeStderr}.
Return a L{Deferred} which fires after verifying that the descriptor was
really closed.
"""
p = ClosingPipesProcessProtocol(True)
self.assertFailure(p.deferred, error.ProcessTerminated)
p.deferred.addCallback(self._endProcess, p)
reactor.spawnProcess(
p, sys.executable, [
sys.executable, '-u', '-c',
'raw_input()\n'
'import sys, os, time\n'
# Give the system a bit of time to notice the closed
# descriptor. Another option would be to poll() for HUP
# instead of relying on an os.write to fail with SIGPIPE.
# However, that wouldn't work on OS X (or Windows?).
'for i in range(1000):\n'
' os.write(%d, "foo\\n")\n'
' time.sleep(0.01)\n'
'sys.exit(42)\n' % (fd,)
],
env=None)
if fd == 1:
p.transport.closeStdout()
elif fd == 2:
p.transport.closeStderr()
else:
raise RuntimeError
# Give the close time to propagate
p.transport.write('go\n')
# make the buggy case not hang
p.transport.closeStdin()
return p.deferred
def _endProcess(self, reason, p):
"""
Check that a failed write prevented the process from getting to its
custom exit code.
"""
# child must not get past that write without raising
self.assertNotEquals(
reason.exitCode, 42, 'process reason was %r' % reason)
self.assertEquals(p.output, '')
return p.errput
def test_stdout(self):
"""
ProcessProtocol.transport.closeStdout actually closes the pipe.
"""
d = self.doit(1)
def _check(errput):
self.assertIn('OSError', errput)
if runtime.platform.getType() != 'win32':
self.assertIn('Broken pipe', errput)
d.addCallback(_check)
return d
def test_stderr(self):
"""
ProcessProtocol.transport.closeStderr actually closes the pipe.
"""
d = self.doit(2)
def _check(errput):
# there should be no stderr open, so nothing for it to
# write the error to.
self.assertEquals(errput, '')
d.addCallback(_check)
return d
skipMessage = "wrong platform or reactor doesn't support IReactorProcess"
if (runtime.platform.getType() != 'posix') or (not interfaces.IReactorProcess(reactor, None)):
PosixProcessTestCase.skip = skipMessage
PosixProcessTestCasePTY.skip = skipMessage
TestTwoProcessesPosix.skip = skipMessage
FDTest.skip = skipMessage
if (runtime.platform.getType() != 'win32') or (not interfaces.IReactorProcess(reactor, None)):
Win32ProcessTestCase.skip = skipMessage
TestTwoProcessesNonPosix.skip = skipMessage
Dumbwin32procPidTest.skip = skipMessage
if not interfaces.IReactorProcess(reactor, None):
ProcessTestCase.skip = skipMessage
ClosingPipes.skip = skipMessage
| apache-2.0 |
staslev/incubator-beam | sdks/python/apache_beam/utils/processes.py | 18 | 1744 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Cross-platform utilities for creating subprocesses.
For internal use only; no backwards-compatibility guarantees.
"""
import platform
import subprocess
# On Windows, we need to use shell=True when creating subprocesses for binary
# paths to be resolved correctly.
force_shell = platform.system() == 'Windows'
# We mimic the interface of the standard Python subprocess module.
PIPE = subprocess.PIPE
STDOUT = subprocess.STDOUT
def call(*args, **kwargs):
if force_shell:
kwargs['shell'] = True
return subprocess.call(*args, **kwargs)
def check_call(*args, **kwargs):
if force_shell:
kwargs['shell'] = True
return subprocess.check_call(*args, **kwargs)
def check_output(*args, **kwargs):
if force_shell:
kwargs['shell'] = True
return subprocess.check_output(*args, **kwargs)
def Popen(*args, **kwargs): # pylint: disable=invalid-name
if force_shell:
kwargs['shell'] = True
return subprocess.Popen(*args, **kwargs)
| apache-2.0 |
poojavade/Genomics_Docker | Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/scipy/io/wavfile.py | 13 | 9734 | """
Module to read / write wav files using numpy arrays
Functions
---------
`read`: Return the sample rate (in samples/sec) and data from a WAV file.
`write`: Write a numpy array as a WAV file.
"""
from __future__ import division, print_function, absolute_import
import sys
import numpy
import struct
import warnings
__all__ = [
'WavFileWarning',
'read',
'write'
]
class WavFileWarning(UserWarning):
pass
WAVE_FORMAT_PCM = 0x0001
WAVE_FORMAT_IEEE_FLOAT = 0x0003
WAVE_FORMAT_EXTENSIBLE = 0xfffe
KNOWN_WAVE_FORMATS = (WAVE_FORMAT_PCM, WAVE_FORMAT_IEEE_FLOAT)
# assumes file pointer is immediately
# after the 'fmt ' id
def _read_fmt_chunk(fid, is_big_endian):
if is_big_endian:
fmt = '>'
else:
fmt = '<'
size = res = struct.unpack(fmt+'I', fid.read(4))[0]
bytes_read = 0
if size < 16:
raise ValueError("Binary structure of wave file is not compliant")
res = struct.unpack(fmt+'HHIIHH', fid.read(16))
bytes_read += 16
comp, noc, rate, sbytes, ba, bits = res
if comp == WAVE_FORMAT_EXTENSIBLE and size >= (16+2):
ext_chunk_size = struct.unpack(fmt+'H', fid.read(2))[0]
bytes_read += 2
if ext_chunk_size >= 22:
extensible_chunk_data = fid.read(22)
bytes_read += 22
raw_guid = extensible_chunk_data[2+4:2+4+16]
# GUID template {XXXXXXXX-0000-0010-8000-00AA00389B71} (RFC-2361)
# MS GUID byte order: first three groups are native byte order, rest is Big Endian
if is_big_endian:
tail = b'\x00\x00\x00\x10\x80\x00\x00\xAA\x00\x38\x9B\x71'
else:
tail = b'\x00\x00\x10\x00\x80\x00\x00\xAA\x00\x38\x9B\x71'
if raw_guid.endswith(tail):
comp = struct.unpack(fmt+'I', raw_guid[:4])[0]
else:
raise ValueError("Binary structure of wave file is not compliant")
if comp not in KNOWN_WAVE_FORMATS:
raise ValueError("Unknown wave file format")
# move file pointer to next chunk
if size > (bytes_read):
fid.read(size - bytes_read)
return size, comp, noc, rate, sbytes, ba, bits
# assumes file pointer is immediately
# after the 'data' id
def _read_data_chunk(fid, comp, noc, bits, is_big_endian, mmap=False):
if is_big_endian:
fmt = '>I'
else:
fmt = '<I'
size = struct.unpack(fmt, fid.read(4))[0]
bytes = bits//8
if bits == 8:
dtype = 'u1'
else:
if is_big_endian:
dtype = '>'
else:
dtype = '<'
if comp == 1:
dtype += 'i%d' % bytes
else:
dtype += 'f%d' % bytes
if not mmap:
data = numpy.fromstring(fid.read(size), dtype=dtype)
else:
start = fid.tell()
data = numpy.memmap(fid, dtype=dtype, mode='c', offset=start,
shape=(size//bytes,))
fid.seek(start + size)
if noc > 1:
data = data.reshape(-1, noc)
return data
def _skip_unknown_chunk(fid, is_big_endian):
if is_big_endian:
fmt = '>I'
else:
fmt = '<I'
data = fid.read(4)
# call unpack() and seek() only if we have really read data from file
# otherwise empty read at the end of the file would trigger
# unnecessary exception at unpack() call
# in case data equals somehow to 0, there is no need for seek() anyway
if data:
size = struct.unpack(fmt, data)[0]
fid.seek(size, 1)
def _read_riff_chunk(fid):
str1 = fid.read(4)
if str1 == b'RIFF':
is_big_endian = False
elif str1 == b'RIFX':
is_big_endian = True
else:
raise ValueError("Not a WAV file.")
if is_big_endian:
fmt = '>I'
else:
fmt = '<I'
fsize = struct.unpack(fmt, fid.read(4))[0] + 8
str2 = fid.read(4)
if (str2 != b'WAVE'):
raise ValueError("Not a WAV file.")
return fsize, is_big_endian
# open a wave-file
def read(filename, mmap=False):
"""
Return the sample rate (in samples/sec) and data from a WAV file
Parameters
----------
filename : string or open file handle
Input wav file.
mmap : bool, optional
Whether to read data as memory mapped.
Only to be used on real files (Default: False)
.. versionadded:: 0.12.0
Returns
-------
rate : int
Sample rate of wav file
data : numpy array
Data read from wav file
Notes
-----
* The file can be an open file or a filename.
* The returned sample rate is a Python integer.
* The data is returned as a numpy array with a data-type determined
from the file.
* This function cannot read wav files with 24 bit data.
"""
if hasattr(filename, 'read'):
fid = filename
mmap = False
else:
fid = open(filename, 'rb')
try:
fsize, is_big_endian = _read_riff_chunk(fid)
fmt_chunk_received = False
noc = 1
bits = 8
comp = WAVE_FORMAT_PCM
while (fid.tell() < fsize):
# read the next chunk
chunk_id = fid.read(4)
if chunk_id == b'fmt ':
fmt_chunk_received = True
size, comp, noc, rate, sbytes, ba, bits = _read_fmt_chunk(fid, is_big_endian=is_big_endian)
if bits == 24:
raise ValueError("Unsupported bit depth: the wav file "
"has 24 bit data.")
elif chunk_id == b'fact':
_skip_unknown_chunk(fid, is_big_endian=is_big_endian)
elif chunk_id == b'data':
if not fmt_chunk_received:
raise ValueError("No fmt chunk before data")
data = _read_data_chunk(fid, comp, noc, bits, is_big_endian=is_big_endian, mmap=mmap)
elif chunk_id == b'LIST':
# Someday this could be handled properly but for now skip it
_skip_unknown_chunk(fid, is_big_endian=is_big_endian)
elif chunk_id in (b'JUNK', b'Fake'):
# Skip alignment chunks without warning
_skip_unknown_chunk(fid, is_big_endian=is_big_endian)
else:
warnings.warn("Chunk (non-data) not understood, skipping it.",
WavFileWarning)
_skip_unknown_chunk(fid, is_big_endian=is_big_endian)
finally:
if not hasattr(filename, 'read'):
fid.close()
else:
fid.seek(0)
return rate, data
# Write a wave-file
# sample rate, data
def write(filename, rate, data):
"""
Write a numpy array as a WAV file
Parameters
----------
filename : string or open file handle
Output wav file
rate : int
The sample rate (in samples/sec).
data : ndarray
A 1-D or 2-D numpy array of either integer or float data-type.
Notes
-----
* The file can be an open file or a filename.
* Writes a simple uncompressed WAV file.
* The bits-per-sample will be determined by the data-type.
* To write multiple-channels, use a 2-D array of shape
(Nsamples, Nchannels).
"""
if hasattr(filename, 'write'):
fid = filename
else:
fid = open(filename, 'wb')
try:
dkind = data.dtype.kind
if not (dkind == 'i' or dkind == 'f' or (dkind == 'u' and
data.dtype.itemsize == 1)):
raise ValueError("Unsupported data type '%s'" % data.dtype)
header_data = b''
header_data += b'RIFF'
header_data += b'\x00\x00\x00\x00'
header_data += b'WAVE'
# fmt chunk
header_data += b'fmt '
if dkind == 'f':
comp = 3
else:
comp = 1
if data.ndim == 1:
noc = 1
else:
noc = data.shape[1]
bits = data.dtype.itemsize * 8
sbytes = rate*(bits // 8)*noc
ba = noc * (bits // 8)
fmt_chunk_data = struct.pack('<HHIIHH', comp, noc, rate, sbytes,
ba, bits)
if not (dkind == 'i' or dkind == 'u'):
# add cbSize field for non-PCM files
fmt_chunk_data += b'\x00\x00'
header_data += struct.pack('<I', len(fmt_chunk_data))
header_data += fmt_chunk_data
# fact chunk (non-PCM files)
if not (dkind == 'i' or dkind == 'u'):
header_data += b'fact'
header_data += struct.pack('<II', 4, data.shape[0])
# check data size (needs to be immediately before the data chunk)
if ((len(header_data)-4-4) + (4+4+data.nbytes)) > 0xFFFFFFFF:
raise ValueError("Data exceeds wave file size limit")
fid.write(header_data)
# data chunk
fid.write(b'data')
fid.write(struct.pack('<I', data.nbytes))
if data.dtype.byteorder == '>' or (data.dtype.byteorder == '=' and
sys.byteorder == 'big'):
data = data.byteswap()
_array_tofile(fid, data)
# Determine file size and place it in correct
# position at start of the file.
size = fid.tell()
fid.seek(4)
fid.write(struct.pack('<I', size-8))
finally:
if not hasattr(filename, 'write'):
fid.close()
else:
fid.seek(0)
if sys.version_info[0] >= 3:
def _array_tofile(fid, data):
# ravel gives a c-contiguous buffer
fid.write(data.ravel().view('b').data)
else:
def _array_tofile(fid, data):
fid.write(data.tostring())
| apache-2.0 |
dmick/teuthology | teuthology/repo_utils.py | 1 | 14228 | import logging
import os
import re
import shutil
import subprocess
import time
from teuthology import misc
from teuthology.util.flock import FileLock
from teuthology.config import config
from teuthology.contextutil import MaxWhileTries, safe_while
from teuthology.exceptions import BootstrapError, BranchNotFoundError, GitError
log = logging.getLogger(__name__)
# Repos must not have been fetched in the last X seconds to get fetched again.
# Similar for teuthology's bootstrap
FRESHNESS_INTERVAL = 60
def touch_file(path):
out = subprocess.check_output(('touch', path))
if out:
log.info(out)
def is_fresh(path):
"""
Has this file been modified in the last FRESHNESS_INTERVAL seconds?
Returns False if the file does not exist
"""
if not os.path.exists(path):
return False
elif time.time() - os.stat(path).st_mtime < FRESHNESS_INTERVAL:
return True
return False
def build_git_url(project, project_owner='ceph'):
"""
Return the git URL to clone the project
"""
if project == 'ceph-qa-suite':
base = config.get_ceph_qa_suite_git_url()
elif project == 'ceph-cm-ansible':
base = config.get_ceph_cm_ansible_git_url()
elif project == 'ceph':
base = config.get_ceph_git_url()
else:
base = 'https://github.com/{project_owner}/{project}'
url_templ = re.sub('\.git$', '', base)
return url_templ.format(project_owner=project_owner, project=project)
def ls_remote(url, ref):
"""
Return the current sha1 for a given repository and ref
:returns: The sha1 if found; else None
"""
sha1 = None
cmd = "git ls-remote {} {}".format(url, ref)
result = subprocess.check_output(
cmd, shell=True).split()
if result:
sha1 = result[0].decode()
log.debug("{} -> {}".format(cmd, sha1))
return sha1
def enforce_repo_state(repo_url, dest_path, branch, remove_on_error=True):
"""
Use git to either clone or update a given repo, forcing it to switch to the
specified branch.
:param repo_url: The full URL to the repo (not including the branch)
:param dest_path: The full path to the destination directory
:param branch: The branch.
:param remove: Whether or not to remove dest_dir when an error occurs
:raises: BranchNotFoundError if the branch is not found;
GitError for other errors
"""
validate_branch(branch)
sentinel = os.path.join(dest_path, '.fetched')
try:
if not os.path.isdir(dest_path):
clone_repo(repo_url, dest_path, branch)
elif not is_fresh(sentinel):
set_remote(dest_path, repo_url)
fetch_branch(dest_path, branch)
touch_file(sentinel)
else:
log.info("%s was just updated; assuming it is current", dest_path)
reset_repo(repo_url, dest_path, branch)
# remove_pyc_files(dest_path)
except BranchNotFoundError:
if remove_on_error:
shutil.rmtree(dest_path, ignore_errors=True)
raise
def clone_repo(repo_url, dest_path, branch, shallow=True):
"""
Clone a repo into a path
:param repo_url: The full URL to the repo (not including the branch)
:param dest_path: The full path to the destination directory
:param branch: The branch.
:param shallow: Whether to perform a shallow clone (--depth 1)
:raises: BranchNotFoundError if the branch is not found;
GitError for other errors
"""
validate_branch(branch)
log.info("Cloning %s %s from upstream", repo_url, branch)
if branch.startswith('refs/'):
clone_repo_ref(repo_url, dest_path, branch)
return
args = ['git', 'clone']
if shallow:
args.extend(['--depth', '1'])
args.extend(['--branch', branch, repo_url, dest_path])
proc = subprocess.Popen(
args,
cwd=os.path.dirname(dest_path),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
not_found_str = "Remote branch %s not found" % branch
out = proc.stdout.read().decode()
result = proc.wait()
# Newer git versions will bail if the branch is not found, but older ones
# will not. Fortunately they both output similar text.
if not_found_str in out:
log.error(out)
if result == 0:
# Old git left a repo with the wrong branch. Remove it.
shutil.rmtree(dest_path, ignore_errors=True)
raise BranchNotFoundError(branch, repo_url)
elif result != 0:
# Unknown error
raise GitError("git clone failed!")
def rsstrip(s, suffix):
return s[:-len(suffix)] if s.endswith(suffix) else s
def lsstrip(s, prefix):
return s[len(prefix):] if s.startswith(prefix) else s
def remote_ref_from_ref(ref, remote='origin'):
if ref.startswith('refs/pull/'):
return 'refs/remotes/' + remote + lsstrip(ref, 'refs')
elif ref.startswith('refs/heads/'):
return 'refs/remotes/' + remote + lsstrip(ref, 'refs/heads')
raise GitError("Unsupported ref '%s'" % ref)
def local_branch_from_ref(ref):
if ref.startswith('refs/pull/'):
s = lsstrip(ref, 'refs/pull/')
s = rsstrip(s, '/merge')
s = rsstrip(s, '/head')
return "PR#%s" % s
elif ref.startswith('refs/heads/'):
return lsstrip(ref, 'refs/heads/')
raise GitError("Unsupported ref '%s', try 'refs/heads/' or 'refs/pull/'" % ref)
def fetch_refspec(ref):
if '/' in ref:
remote_ref = remote_ref_from_ref(ref)
return "+%s:%s" % (ref, remote_ref)
else:
# looks like a branch name
return ref
def clone_repo_ref(repo_url, dest_path, ref):
branch_name = local_branch_from_ref(ref)
remote_ref = remote_ref_from_ref(ref)
misc.sh('git init %s' % dest_path)
misc.sh('git remote add origin %s' % repo_url, cwd=dest_path)
#misc.sh('git fetch --depth 1 origin %s' % fetch_refspec(ref),
# cwd=dest_path)
fetch_branch(dest_path, ref)
misc.sh('git checkout -b %s %s' % (branch_name, remote_ref),
cwd=dest_path)
def set_remote(repo_path, repo_url):
"""
Call "git remote set-url origin <repo_url>"
:param repo_url: The full URL to the repo (not including the branch)
:param repo_path: The full path to the repository
:raises: GitError if the operation fails
"""
log.debug("Setting repo remote to %s", repo_url)
proc = subprocess.Popen(
('git', 'remote', 'set-url', 'origin', repo_url),
cwd=repo_path,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if proc.wait() != 0:
out = proc.stdout.read()
log.error(out)
raise GitError("git remote set-url failed!")
def fetch(repo_path):
"""
Call "git fetch -p origin"
:param repo_path: The full path to the repository
:raises: GitError if the operation fails
"""
log.info("Fetching from upstream into %s", repo_path)
proc = subprocess.Popen(
('git', 'fetch', '-p', 'origin'),
cwd=repo_path,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if proc.wait() != 0:
out = proc.stdout.read().decode()
log.error(out)
raise GitError("git fetch failed!")
def fetch_branch(repo_path, branch, shallow=True):
"""
Call "git fetch -p origin <branch>"
:param repo_path: The full path to the repository on-disk
:param branch: The branch.
:param shallow: Whether to perform a shallow fetch (--depth 1)
:raises: BranchNotFoundError if the branch is not found;
GitError for other errors
"""
validate_branch(branch)
log.info("Fetching %s from origin", branch)
args = ['git', 'fetch']
if shallow:
args.extend(['--depth', '1'])
args.extend(['-p', 'origin', fetch_refspec(branch)])
proc = subprocess.Popen(
args,
cwd=repo_path,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if proc.wait() != 0:
not_found_str = "fatal: couldn't find remote ref %s" % branch
out = proc.stdout.read().decode()
log.error(out)
if not_found_str in out.lower():
raise BranchNotFoundError(branch)
else:
raise GitError("git fetch failed!")
def reset_repo(repo_url, dest_path, branch):
"""
:param repo_url: The full URL to the repo (not including the branch)
:param dest_path: The full path to the destination directory
:param branch: The branch.
:raises: BranchNotFoundError if the branch is not found;
GitError for other errors
"""
validate_branch(branch)
if '/' in branch:
reset_branch = lsstrip(remote_ref_from_ref(branch), 'refs/remotes/')
else:
reset_branch = 'origin/%s' % branch
log.info('Resetting repo at %s to branch %s', dest_path, reset_branch)
# This try/except block will notice if the requested branch doesn't
# exist, whether it was cloned or fetched.
try:
subprocess.check_output(
('git', 'reset', '--hard', reset_branch),
cwd=dest_path,
)
except subprocess.CalledProcessError:
raise BranchNotFoundError(branch, repo_url)
def remove_pyc_files(dest_path):
subprocess.check_call(
['find', dest_path, '-name', '*.pyc', '-exec', 'rm', '{}', ';']
)
def validate_branch(branch):
if ' ' in branch:
raise ValueError("Illegal branch name: '%s'" % branch)
def fetch_repo(url, branch, bootstrap=None, lock=True):
"""
Make sure we have a given project's repo checked out and up-to-date with
the current branch requested
:param url: The URL to the repo
:param bootstrap: An optional callback function to execute. Gets passed a
dest_dir argument: the path to the repo on-disk.
:param branch: The branch we want
:returns: The destination path
"""
src_base_path = config.src_base_path
if not os.path.exists(src_base_path):
os.mkdir(src_base_path)
branch_dir = ref_to_dirname(branch)
dirname = '%s_%s' % (url_to_dirname(url), branch_dir)
dest_path = os.path.join(src_base_path, dirname)
# only let one worker create/update the checkout at a time
lock_path = dest_path.rstrip('/') + '.lock'
with FileLock(lock_path, noop=not lock):
with safe_while(sleep=10, tries=60) as proceed:
try:
while proceed():
try:
enforce_repo_state(url, dest_path, branch)
if bootstrap:
bootstrap(dest_path)
break
except GitError:
log.exception("Git error encountered; retrying")
except BootstrapError:
log.exception("Bootstrap error encountered; retrying")
except MaxWhileTries:
shutil.rmtree(dest_path, ignore_errors=True)
raise
return dest_path
def ref_to_dirname(branch):
if '/' in branch:
return local_branch_from_ref(branch)
else:
return branch
def url_to_dirname(url):
"""
Given a URL, returns a string that's safe to use as a directory name.
Examples:
[email protected]/ceph-qa-suite.git -> git.ceph.com_ceph-qa-suite
git://git.ceph.com/ceph-qa-suite.git -> git.ceph.com_ceph-qa-suite
https://github.com/ceph/ceph -> github.com_ceph_ceph
https://github.com/liewegas/ceph.git -> github.com_liewegas_ceph
file:///my/dir/has/ceph.git -> my_dir_has_ceph
"""
# Strip protocol from left-hand side
string = re.match('(?:.*://|.*@)(.*)', url).groups()[0]
# Strip '.git' from the right-hand side
string = string.rstrip('.git')
# Replace certain characters with underscores
string = re.sub('[:/]', '_', string)
# Remove duplicate underscores
string = re.sub('_+', '_', string)
# Remove leading or trailing underscore
string = string.strip('_')
return string
def fetch_qa_suite(branch, lock=True):
"""
Make sure ceph-qa-suite is checked out.
:param branch: The branch to fetch
:returns: The destination path
"""
return fetch_repo(config.get_ceph_qa_suite_git_url(),
branch, lock=lock)
def fetch_teuthology(branch, lock=True):
"""
Make sure we have the correct teuthology branch checked out and up-to-date
:param branch: The branch we want
:returns: The destination path
"""
url = config.ceph_git_base_url + 'teuthology.git'
return fetch_repo(url, branch, bootstrap_teuthology, lock)
def bootstrap_teuthology(dest_path):
sentinel = os.path.join(dest_path, '.bootstrapped')
if is_fresh(sentinel):
log.info(
"Skipping bootstrap as it was already done in the last %ss",
FRESHNESS_INTERVAL,
)
return
log.info("Bootstrapping %s", dest_path)
# This magic makes the bootstrap script not attempt to clobber an
# existing virtualenv. But the branch's bootstrap needs to actually
# check for the NO_CLOBBER variable.
env = os.environ.copy()
env['NO_CLOBBER'] = '1'
cmd = './bootstrap'
boot_proc = subprocess.Popen(cmd, shell=True, cwd=dest_path, env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
out, err = boot_proc.communicate()
returncode = boot_proc.wait()
log.info("Bootstrap exited with status %s", returncode)
if returncode != 0:
for line in out.split():
log.warn(line.strip())
venv_path = os.path.join(dest_path, 'virtualenv')
log.info("Removing %s", venv_path)
shutil.rmtree(venv_path, ignore_errors=True)
raise BootstrapError("Bootstrap failed!")
touch_file(sentinel)
| mit |
arquetype/thus | src/canonical/i18n.py | 3 | 4162 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
#
# Copyright (C) 2012 Canonical Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import gettext
import canonical.misc as misc
def utf8(s, errors="strict"):
"""Decode a string as UTF-8 if it isn't already Unicode."""
if isinstance(s, str):
return s
else:
return str(s, "utf-8", errors)
# Returns a tuple of (current language, sorted choices, display map).
def get_languages(language_list="data/languagelist.data.gz", current_language_index=-1, only_installable=False):
import gzip
#import icu
current_language = "English"
if only_installable:
from apt.cache import Cache
#workaround for an issue where euid != uid and the
#apt cache has not yet been loaded causing a SystemError
#when libapt-pkg tries to load the Cache the first time.
with misc.raised_privileges():
cache = Cache()
languagelist = gzip.open(language_list)
language_display_map = {}
i = 0
for line in languagelist:
line = utf8(line)
if line == '' or line == '\n':
continue
code, name, trans = line.strip('\n').split(':')[1:]
if code in ('C', 'dz', 'km'):
i += 1
continue
# KDE fails to round-trip strings containing U+FEFF ZERO WIDTH
# NO-BREAK SPACE, and we don't care about the NBSP anyway, so strip
# it.
# https://bugs.launchpad.net/bugs/1001542
# (comment #5 and on)
trans = trans.strip(" \ufeff")
if only_installable:
pkg_name = 'language-pack-%s' % code
#special case these
if pkg_name.endswith('_CN'):
pkg_name = 'language-pack-zh-hans'
elif pkg_name.endswith('_TW'):
pkg_name = 'language-pack-zh-hant'
elif pkg_name.endswith('_NO'):
pkg_name = pkg_name.split('_NO')[0]
elif pkg_name.endswith('_BR'):
pkg_name = pkg_name.split('_BR')[0]
try:
pkg = cache[pkg_name]
if not (pkg.installed or pkg.candidate):
i += 1
continue
except KeyError:
i += 1
continue
language_display_map[trans] = (name, code)
if i == current_language_index:
current_language = trans
i += 1
languagelist.close()
if only_installable:
del cache
#try:
# Note that we always collate with the 'C' locale. This is far
# from ideal. But proper collation always requires a specific
# language for its collation rules (languages frequently have
# custom sorting). This at least gives us common sorting rules,
# like stripping accents.
#collator = icu.Collator.createInstance(icu.Locale('C'))
#except:
# collator = None
collator = None
def compare_choice(x):
if language_display_map[x][1] == 'C':
return None # place C first
if collator:
try:
return collator.getCollationKey(x).getByteArray()
except:
pass
# Else sort by unicode code point, which isn't ideal either,
# but also has the virtue of sorting like-glyphs together
return x
sorted_choices = sorted(language_display_map, key=compare_choice)
return current_language, sorted_choices, language_display_map
| gpl-3.0 |
google/objectfilter | objectfilter/lexer.py | 1 | 14047 | #!/bin/env python
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An LL(1) lexer. This lexer is very tolerant of errors and can resync."""
import logging
import re
import utils
class Token(object):
"""A token action."""
state_regex = None
def __init__(self, state_regex, regex, actions, next_state, flags=re.I):
"""Constructor.
Args:
state_regex: If this regular expression matches the current state this
rule is considered.
regex: A regular expression to try and match from the current point.
actions: A command separated list of method names in the Lexer to call.
next_state: The next state we transition to if this Token matches.
flags: re flags.
"""
if state_regex:
self.state_regex = re.compile(state_regex,
re.DOTALL | re.M | re.S | re.U | flags)
self.regex = re.compile(regex, re.DOTALL | re.M | re.S | re.U | flags)
self.re_str = regex
self.actions = []
if actions:
self.actions = actions.split(",")
self.next_state = next_state
def Action(self, lexer):
"""Method is called when the token matches."""
class Error(Exception):
"""Module exception."""
class ParseError(Error):
"""A parse error occured."""
class Lexer(object):
"""A generic feed lexer."""
# A list of Token() instances.
tokens = []
# The first state
state = "INITIAL"
# The buffer we are parsing now
buffer = ""
error = 0
verbose = 0
# The index into the buffer where we are currently pointing
processed = 0
processed_buffer = ""
# Regex flags
flags = 0
def __init__(self, data=""):
self.buffer = data
self.state_stack = []
def NextToken(self):
"""Fetch the next token by trying to match any of the regexes in order."""
current_state = self.state
for token in self.tokens:
# Does the rule apply to us?
if token.state_regex and not token.state_regex.match(current_state):
continue
if self.verbose:
logging.debug("%s: Trying to match %r with %r",
self.state, self.buffer[:10], token.re_str)
# Try to match the rule
m = token.regex.match(self.buffer)
if not m: continue
if self.verbose:
logging.debug("%s matched %s", token.re_str, m.group(0))
# The match consumes the data off the buffer (the handler can put it back
# if it likes)
self.processed_buffer += self.buffer[:m.end()]
self.buffer = self.buffer[m.end():]
self.processed += m.end()
next_state = token.next_state
for action in token.actions:
if self.verbose:
logging.debug("Calling %s with %s", action, m.group(0))
# Is there a callback to handle this action?
cb = getattr(self, action, self.Default)
# Allow a callback to skip other callbacks.
try:
possible_next_state = cb(string=m.group(0), match=m)
if possible_next_state == "CONTINUE":
continue
# Override the state from the Token
elif possible_next_state:
next_state = possible_next_state
except ParseError as e:
self.Error(e)
# Update the next state
if next_state:
self.state = next_state
return token
# Check that we are making progress - if we are too full, we assume we are
# stuck.
self.Error("Expected %s" % self.state)
self.processed_buffer += self.buffer[:1]
self.buffer = self.buffer[1:]
return "Error"
def Feed(self, data):
self.buffer += data
def Empty(self):
return not self.buffer
def Default(self, **kwarg):
logging.debug("Default handler: %s", kwarg)
def Error(self, message=None, weight=1):
logging.debug("Error(%s): %s", weight, message)
# Keep a count of errors
self.error += weight
def PushState(self, **_):
"""Push the current state on the state stack."""
if self.verbose:
logging.debug("Storing state %r", self.state)
self.state_stack.append(self.state)
def PopState(self, **_):
"""Pop the previous state from the stack."""
try:
self.state = self.state_stack.pop()
if self.verbose:
logging.debug("Returned state to %s", self.state)
return self.state
except IndexError:
self.Error("Tried to pop the state but failed - possible recursion error")
def PushBack(self, string="", **_):
"""Push the match back on the stream."""
self.buffer = string + self.buffer
self.processed_buffer = self.processed_buffer[:-len(string)]
def Close(self):
"""A convenience function to force us to parse all the data."""
while self.NextToken():
if not self.buffer:
return
class SelfFeederMixIn(Lexer):
"""This mixin is used to make a lexer which feeds itself.
Note that self.fd must be the fd we read from.
"""
def __init__(self, fd=""):
self.fd = fd
super(SelfFeederMixIn, self).__init__()
def NextToken(self, end=True):
# If we dont have enough data - feed ourselves: We assume
# that we must have at least one sector in our buffer.
if len(self.buffer) < 512:
if self.Feed() == 0 and not self.buffer:
return None
return Lexer.next_token(self, end)
def Feed(self, size=512):
data = self.fd.read(size)
Lexer.feed(self, data)
return len(data)
class Expression(object):
"""A class representing an expression."""
attribute = None
args = None
operator = None
# The expected number of args
number_of_args = 1
def __init__(self):
self.args = []
def SetAttribute(self, attribute):
self.attribute = attribute
def SetOperator(self, operator):
self.operator = operator
def AddArg(self, arg):
"""Adds a new arg to this expression.
Args:
arg: The argument to add (string).
Returns:
True if this arg is the last arg, False otherwise.
Raises:
ParseError: If there are too many args.
"""
self.args.append(arg)
if len(self.args) > self.number_of_args:
raise ParseError("Too many args for this expression.")
elif len(self.args) == self.number_of_args:
return True
return False
def __str__(self):
return "Expression: (%s) (%s) %s" % (
self.attribute, self.operator, self.args)
def PrintTree(self, depth=""):
return "%s %s" % (depth, self)
def Compile(self, filter_implemention):
"""Given a filter implementation, compile this expression."""
raise NotImplementedError("%s does not implement Compile." %
self.__class__.__name__)
class BinaryExpression(Expression):
"""An expression which takes two other expressions."""
def __init__(self, operator="", part=None):
self.operator = operator
self.args = []
if part: self.args.append(part)
super(BinaryExpression, self).__init__()
def __str__(self):
return "Binary Expression: %s %s" % (
self.operator, [str(x) for x in self.args])
def AddOperands(self, lhs, rhs):
if isinstance(lhs, Expression) and isinstance(rhs, Expression):
self.args = [lhs, rhs]
else:
raise ParseError("Expected expression, got %s %s %s" % (
lhs, self.operator, rhs))
def PrintTree(self, depth=""):
result = "%s%s\n" % (depth, self.operator)
for part in self.args:
result += "%s-%s\n" % (depth, part.PrintTree(depth + " "))
return result
def Compile(self, filter_implemention):
"""Compile the binary expression into a filter object."""
operator = self.operator.lower()
if operator == "and" or operator == "&&":
method = "AndFilter"
elif operator == "or" or operator == "||":
method = "OrFilter"
else:
raise ParseError("Invalid binary operator %s" % operator)
args = [x.Compile(filter_implemention) for x in self.args]
return getattr(filter_implemention, method)(*args)
class IdentityExpression(Expression):
"""An Expression which always evaluates to True."""
def Compile(self, filter_implemention):
return filter_implemention.IdentityFilter()
class SearchParser(Lexer):
"""This parser can parse the mini query language and build an AST.
Examples of valid syntax:
filename contains "foo" and (size > 100k or date before "2011-10")
date between 2011 and 2010
files older than 1 year
"""
expression_cls = Expression
binary_expression_cls = BinaryExpression
identity_expression_cls = IdentityExpression
tokens = [
# Double quoted string
Token("STRING", "\"", "PopState,StringFinish", None),
Token("STRING", r"\\(.)", "StringEscape", None),
Token("STRING", r"[^\\\"]+", "StringInsert", None),
# Single quoted string
Token("SQ_STRING", "'", "PopState,StringFinish", None),
Token("SQ_STRING", r"\\(.)", "StringEscape", None),
Token("SQ_STRING", r"[^\\']+", "StringInsert", None),
# TODO(scudette): Implement a unary not operator.
# The first thing we see in the initial state takes up to the ATTRIBUTE
Token("INITIAL", r"(and|or|\&\&|\|\|)", "BinaryOperator", None),
Token("INITIAL", r"[^\s\(\)]", "PushState,PushBack", "ATTRIBUTE"),
Token("INITIAL", r"\(", "BracketOpen", None),
Token("INITIAL", r"\)", "BracketClose", None),
Token("ATTRIBUTE", r"[\w._0-9]+", "StoreAttribute", "OPERATOR"),
Token("OPERATOR",
r"[a-z0-9<>=\-\+\!\^\&%]+", "StoreOperator", "ARG_LIST"),
Token("OPERATOR", "(!=|[<>=])", "StoreSpecialOperator", "ARG_LIST"),
Token("ARG_LIST", r"[^\s'\"]+", "InsertArg", None),
# Start a string.
Token(".", "\"", "PushState,StringStart", "STRING"),
Token(".", "'", "PushState,StringStart", "SQ_STRING"),
# Skip whitespace.
Token(".", r"\s+", None, None),
]
def __init__(self, data):
# Holds expression
self.current_expression = self.expression_cls()
self.filter_string = data
# The token stack
self.stack = []
Lexer.__init__(self, data)
def BinaryOperator(self, string=None, **_):
self.stack.append(self.binary_expression_cls(string))
def BracketOpen(self, **_):
self.stack.append("(")
def BracketClose(self, **_):
self.stack.append(")")
def StringStart(self, **_):
self.string = ""
def StringEscape(self, string, match, **_):
"""Escape backslashes found inside a string quote.
Backslashes followed by anything other than ['"rnbt] will just be included
in the string.
Args:
string: The string that matched.
match: The match object (m.group(1) is the escaped code)
"""
if match.group(1) in "'\"rnbt":
self.string += string.decode("string_escape")
else:
self.string += string
def StringInsert(self, string="", **_):
self.string += string
def StringFinish(self, **_):
if self.state == "ATTRIBUTE":
return self.StoreAttribute(string=self.string)
elif self.state == "ARG_LIST":
return self.InsertArg(string=self.string)
def StoreAttribute(self, string="", **_):
logging.debug("Storing attribute %r", string)
try:
self.current_expression.SetAttribute(string)
except AttributeError:
raise ParseError("Invalid attribute '%s'" % string)
return "OPERATOR"
def StoreOperator(self, string="", **_):
logging.debug("Storing operator %r", string)
self.current_expression.SetOperator(string)
def InsertArg(self, string="", **_):
"""Insert an arg to the current expression."""
logging.debug("Storing Argument %s", string)
# This expression is complete
if self.current_expression.AddArg(string):
self.stack.append(self.current_expression)
self.current_expression = self.expression_cls()
return self.PopState()
def _CombineBinaryExpressions(self, operator):
for i in range(1, len(self.stack)-1):
item = self.stack[i]
if (isinstance(item, BinaryExpression) and item.operator == operator and
isinstance(self.stack[i-1], Expression) and
isinstance(self.stack[i+1], Expression)):
lhs = self.stack[i-1]
rhs = self.stack[i+1]
self.stack[i].AddOperands(lhs, rhs)
self.stack[i-1] = None
self.stack[i+1] = None
self.stack = filter(None, self.stack)
def _CombineParenthesis(self):
for i in range(len(self.stack)-2):
if (self.stack[i] == "(" and self.stack[i+2] == ")" and
isinstance(self.stack[i+1], Expression)):
self.stack[i] = None
self.stack[i+2] = None
self.stack = filter(None, self.stack)
def Reduce(self):
"""Reduce the token stack into an AST."""
# Check for sanity
if self.state != "INITIAL":
self.Error("Premature end of expression")
length = len(self.stack)
while length > 1:
# Precendence order
self._CombineParenthesis()
self._CombineBinaryExpressions("and")
self._CombineBinaryExpressions("or")
# No change
if len(self.stack) == length: break
length = len(self.stack)
if length != 1:
self.Error("Illegal query expression")
return self.stack[0]
def Error(self, message=None, weight=1):
raise ParseError(u"%s in position %s: %s <----> %s )" % (
utils.SmartUnicode(message), len(self.processed_buffer),
self.processed_buffer, self.buffer))
def Parse(self):
if not self.filter_string:
return self.identity_expression_cls()
self.Close()
return self.Reduce()
| apache-2.0 |
ioana-delaney/spark | examples/src/main/python/avro_inputformat.py | 51 | 3170 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Read data file users.avro in local Spark distro:
$ cd $SPARK_HOME
$ ./bin/spark-submit --driver-class-path /path/to/example/jar \
> ./examples/src/main/python/avro_inputformat.py \
> examples/src/main/resources/users.avro
{u'favorite_color': None, u'name': u'Alyssa', u'favorite_numbers': [3, 9, 15, 20]}
{u'favorite_color': u'red', u'name': u'Ben', u'favorite_numbers': []}
To read name and favorite_color fields only, specify the following reader schema:
$ cat examples/src/main/resources/user.avsc
{"namespace": "example.avro",
"type": "record",
"name": "User",
"fields": [
{"name": "name", "type": "string"},
{"name": "favorite_color", "type": ["string", "null"]}
]
}
$ ./bin/spark-submit --driver-class-path /path/to/example/jar \
> ./examples/src/main/python/avro_inputformat.py \
> examples/src/main/resources/users.avro examples/src/main/resources/user.avsc
{u'favorite_color': None, u'name': u'Alyssa'}
{u'favorite_color': u'red', u'name': u'Ben'}
"""
from __future__ import print_function
import sys
from functools import reduce
from pyspark.sql import SparkSession
if __name__ == "__main__":
if len(sys.argv) != 2 and len(sys.argv) != 3:
print("""
Usage: avro_inputformat <data_file> [reader_schema_file]
Run with example jar:
./bin/spark-submit --driver-class-path /path/to/example/jar \
/path/to/examples/avro_inputformat.py <data_file> [reader_schema_file]
Assumes you have Avro data stored in <data_file>. Reader schema can be optionally specified
in [reader_schema_file].
""", file=sys.stderr)
sys.exit(-1)
path = sys.argv[1]
spark = SparkSession\
.builder\
.appName("AvroKeyInputFormat")\
.getOrCreate()
sc = spark.sparkContext
conf = None
if len(sys.argv) == 3:
schema_rdd = sc.textFile(sys.argv[2], 1).collect()
conf = {"avro.schema.input.key": reduce(lambda x, y: x + y, schema_rdd)}
avro_rdd = sc.newAPIHadoopFile(
path,
"org.apache.avro.mapreduce.AvroKeyInputFormat",
"org.apache.avro.mapred.AvroKey",
"org.apache.hadoop.io.NullWritable",
keyConverter="org.apache.spark.examples.pythonconverters.AvroWrapperToJavaConverter",
conf=conf)
output = avro_rdd.map(lambda x: x[0]).collect()
for k in output:
print(k)
spark.stop()
| apache-2.0 |
atombrella/django-rest-framework | tests/test_relations_pk.py | 3 | 19626 | from __future__ import unicode_literals
from django.test import TestCase
from django.utils import six
from rest_framework import serializers
from tests.models import (
ForeignKeySource, ForeignKeyTarget, ManyToManySource, ManyToManyTarget,
NullableForeignKeySource, NullableOneToOneSource,
NullableUUIDForeignKeySource, OneToOneTarget, UUIDForeignKeyTarget
)
# ManyToMany
class ManyToManyTargetSerializer(serializers.ModelSerializer):
class Meta:
model = ManyToManyTarget
fields = ('id', 'name', 'sources')
class ManyToManySourceSerializer(serializers.ModelSerializer):
class Meta:
model = ManyToManySource
fields = ('id', 'name', 'targets')
# ForeignKey
class ForeignKeyTargetSerializer(serializers.ModelSerializer):
class Meta:
model = ForeignKeyTarget
fields = ('id', 'name', 'sources')
class ForeignKeySourceSerializer(serializers.ModelSerializer):
class Meta:
model = ForeignKeySource
fields = ('id', 'name', 'target')
# Nullable ForeignKey
class NullableForeignKeySourceSerializer(serializers.ModelSerializer):
class Meta:
model = NullableForeignKeySource
fields = ('id', 'name', 'target')
# Nullable UUIDForeignKey
class NullableUUIDForeignKeySourceSerializer(serializers.ModelSerializer):
target = serializers.PrimaryKeyRelatedField(
pk_field=serializers.UUIDField(),
queryset=UUIDForeignKeyTarget.objects.all(),
allow_null=True)
class Meta:
model = NullableUUIDForeignKeySource
fields = ('id', 'name', 'target')
# Nullable OneToOne
class NullableOneToOneTargetSerializer(serializers.ModelSerializer):
class Meta:
model = OneToOneTarget
fields = ('id', 'name', 'nullable_source')
# TODO: Add test that .data cannot be accessed prior to .is_valid
class PKManyToManyTests(TestCase):
def setUp(self):
for idx in range(1, 4):
target = ManyToManyTarget(name='target-%d' % idx)
target.save()
source = ManyToManySource(name='source-%d' % idx)
source.save()
for target in ManyToManyTarget.objects.all():
source.targets.add(target)
def test_many_to_many_retrieve(self):
queryset = ManyToManySource.objects.all()
serializer = ManyToManySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'targets': [1]},
{'id': 2, 'name': 'source-2', 'targets': [1, 2]},
{'id': 3, 'name': 'source-3', 'targets': [1, 2, 3]}
]
with self.assertNumQueries(4):
assert serializer.data == expected
def test_many_to_many_retrieve_prefetch_related(self):
queryset = ManyToManySource.objects.all().prefetch_related('targets')
serializer = ManyToManySourceSerializer(queryset, many=True)
with self.assertNumQueries(2):
serializer.data
def test_reverse_many_to_many_retrieve(self):
queryset = ManyToManyTarget.objects.all()
serializer = ManyToManyTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'sources': [1, 2, 3]},
{'id': 2, 'name': 'target-2', 'sources': [2, 3]},
{'id': 3, 'name': 'target-3', 'sources': [3]}
]
with self.assertNumQueries(4):
assert serializer.data == expected
def test_many_to_many_update(self):
data = {'id': 1, 'name': 'source-1', 'targets': [1, 2, 3]}
instance = ManyToManySource.objects.get(pk=1)
serializer = ManyToManySourceSerializer(instance, data=data)
assert serializer.is_valid()
serializer.save()
assert serializer.data == data
# Ensure source 1 is updated, and everything else is as expected
queryset = ManyToManySource.objects.all()
serializer = ManyToManySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'targets': [1, 2, 3]},
{'id': 2, 'name': 'source-2', 'targets': [1, 2]},
{'id': 3, 'name': 'source-3', 'targets': [1, 2, 3]}
]
assert serializer.data == expected
def test_reverse_many_to_many_update(self):
data = {'id': 1, 'name': 'target-1', 'sources': [1]}
instance = ManyToManyTarget.objects.get(pk=1)
serializer = ManyToManyTargetSerializer(instance, data=data)
assert serializer.is_valid()
serializer.save()
assert serializer.data == data
# Ensure target 1 is updated, and everything else is as expected
queryset = ManyToManyTarget.objects.all()
serializer = ManyToManyTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'sources': [1]},
{'id': 2, 'name': 'target-2', 'sources': [2, 3]},
{'id': 3, 'name': 'target-3', 'sources': [3]}
]
assert serializer.data == expected
def test_many_to_many_create(self):
data = {'id': 4, 'name': 'source-4', 'targets': [1, 3]}
serializer = ManyToManySourceSerializer(data=data)
assert serializer.is_valid()
obj = serializer.save()
assert serializer.data == data
assert obj.name == 'source-4'
# Ensure source 4 is added, and everything else is as expected
queryset = ManyToManySource.objects.all()
serializer = ManyToManySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'targets': [1]},
{'id': 2, 'name': 'source-2', 'targets': [1, 2]},
{'id': 3, 'name': 'source-3', 'targets': [1, 2, 3]},
{'id': 4, 'name': 'source-4', 'targets': [1, 3]},
]
assert serializer.data == expected
def test_many_to_many_unsaved(self):
source = ManyToManySource(name='source-unsaved')
serializer = ManyToManySourceSerializer(source)
expected = {'id': None, 'name': 'source-unsaved', 'targets': []}
# no query if source hasn't been created yet
with self.assertNumQueries(0):
assert serializer.data == expected
def test_reverse_many_to_many_create(self):
data = {'id': 4, 'name': 'target-4', 'sources': [1, 3]}
serializer = ManyToManyTargetSerializer(data=data)
assert serializer.is_valid()
obj = serializer.save()
assert serializer.data == data
assert obj.name == 'target-4'
# Ensure target 4 is added, and everything else is as expected
queryset = ManyToManyTarget.objects.all()
serializer = ManyToManyTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'sources': [1, 2, 3]},
{'id': 2, 'name': 'target-2', 'sources': [2, 3]},
{'id': 3, 'name': 'target-3', 'sources': [3]},
{'id': 4, 'name': 'target-4', 'sources': [1, 3]}
]
assert serializer.data == expected
class PKForeignKeyTests(TestCase):
def setUp(self):
target = ForeignKeyTarget(name='target-1')
target.save()
new_target = ForeignKeyTarget(name='target-2')
new_target.save()
for idx in range(1, 4):
source = ForeignKeySource(name='source-%d' % idx, target=target)
source.save()
def test_foreign_key_retrieve(self):
queryset = ForeignKeySource.objects.all()
serializer = ForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 1},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': 1}
]
with self.assertNumQueries(1):
assert serializer.data == expected
def test_reverse_foreign_key_retrieve(self):
queryset = ForeignKeyTarget.objects.all()
serializer = ForeignKeyTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'sources': [1, 2, 3]},
{'id': 2, 'name': 'target-2', 'sources': []},
]
with self.assertNumQueries(3):
assert serializer.data == expected
def test_reverse_foreign_key_retrieve_prefetch_related(self):
queryset = ForeignKeyTarget.objects.all().prefetch_related('sources')
serializer = ForeignKeyTargetSerializer(queryset, many=True)
with self.assertNumQueries(2):
serializer.data
def test_foreign_key_update(self):
data = {'id': 1, 'name': 'source-1', 'target': 2}
instance = ForeignKeySource.objects.get(pk=1)
serializer = ForeignKeySourceSerializer(instance, data=data)
assert serializer.is_valid()
serializer.save()
assert serializer.data == data
# Ensure source 1 is updated, and everything else is as expected
queryset = ForeignKeySource.objects.all()
serializer = ForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 2},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': 1}
]
assert serializer.data == expected
def test_foreign_key_update_incorrect_type(self):
data = {'id': 1, 'name': 'source-1', 'target': 'foo'}
instance = ForeignKeySource.objects.get(pk=1)
serializer = ForeignKeySourceSerializer(instance, data=data)
assert not serializer.is_valid()
assert serializer.errors == {'target': ['Incorrect type. Expected pk value, received %s.' % six.text_type.__name__]}
def test_reverse_foreign_key_update(self):
data = {'id': 2, 'name': 'target-2', 'sources': [1, 3]}
instance = ForeignKeyTarget.objects.get(pk=2)
serializer = ForeignKeyTargetSerializer(instance, data=data)
assert serializer.is_valid()
# We shouldn't have saved anything to the db yet since save
# hasn't been called.
queryset = ForeignKeyTarget.objects.all()
new_serializer = ForeignKeyTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'sources': [1, 2, 3]},
{'id': 2, 'name': 'target-2', 'sources': []},
]
assert new_serializer.data == expected
serializer.save()
assert serializer.data == data
# Ensure target 2 is update, and everything else is as expected
queryset = ForeignKeyTarget.objects.all()
serializer = ForeignKeyTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'sources': [2]},
{'id': 2, 'name': 'target-2', 'sources': [1, 3]},
]
assert serializer.data == expected
def test_foreign_key_create(self):
data = {'id': 4, 'name': 'source-4', 'target': 2}
serializer = ForeignKeySourceSerializer(data=data)
assert serializer.is_valid()
obj = serializer.save()
assert serializer.data == data
assert obj.name == 'source-4'
# Ensure source 4 is added, and everything else is as expected
queryset = ForeignKeySource.objects.all()
serializer = ForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 1},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': 1},
{'id': 4, 'name': 'source-4', 'target': 2},
]
assert serializer.data == expected
def test_reverse_foreign_key_create(self):
data = {'id': 3, 'name': 'target-3', 'sources': [1, 3]}
serializer = ForeignKeyTargetSerializer(data=data)
assert serializer.is_valid()
obj = serializer.save()
assert serializer.data == data
assert obj.name == 'target-3'
# Ensure target 3 is added, and everything else is as expected
queryset = ForeignKeyTarget.objects.all()
serializer = ForeignKeyTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'sources': [2]},
{'id': 2, 'name': 'target-2', 'sources': []},
{'id': 3, 'name': 'target-3', 'sources': [1, 3]},
]
assert serializer.data == expected
def test_foreign_key_update_with_invalid_null(self):
data = {'id': 1, 'name': 'source-1', 'target': None}
instance = ForeignKeySource.objects.get(pk=1)
serializer = ForeignKeySourceSerializer(instance, data=data)
assert not serializer.is_valid()
assert serializer.errors == {'target': ['This field may not be null.']}
def test_foreign_key_with_unsaved(self):
source = ForeignKeySource(name='source-unsaved')
expected = {'id': None, 'name': 'source-unsaved', 'target': None}
serializer = ForeignKeySourceSerializer(source)
# no query if source hasn't been created yet
with self.assertNumQueries(0):
assert serializer.data == expected
def test_foreign_key_with_empty(self):
"""
Regression test for #1072
https://github.com/encode/django-rest-framework/issues/1072
"""
serializer = NullableForeignKeySourceSerializer()
assert serializer.data['target'] is None
def test_foreign_key_not_required(self):
"""
Let's say we wanted to fill the non-nullable model field inside
Model.save(), we would make it empty and not required.
"""
class ModelSerializer(ForeignKeySourceSerializer):
class Meta(ForeignKeySourceSerializer.Meta):
extra_kwargs = {'target': {'required': False}}
serializer = ModelSerializer(data={'name': 'test'})
serializer.is_valid(raise_exception=True)
assert 'target' not in serializer.validated_data
class PKNullableForeignKeyTests(TestCase):
def setUp(self):
target = ForeignKeyTarget(name='target-1')
target.save()
for idx in range(1, 4):
if idx == 3:
target = None
source = NullableForeignKeySource(name='source-%d' % idx, target=target)
source.save()
def test_foreign_key_retrieve_with_null(self):
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 1},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None},
]
assert serializer.data == expected
def test_foreign_key_create_with_valid_null(self):
data = {'id': 4, 'name': 'source-4', 'target': None}
serializer = NullableForeignKeySourceSerializer(data=data)
assert serializer.is_valid()
obj = serializer.save()
assert serializer.data == data
assert obj.name == 'source-4'
# Ensure source 4 is created, and everything else is as expected
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 1},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None},
{'id': 4, 'name': 'source-4', 'target': None}
]
assert serializer.data == expected
def test_foreign_key_create_with_valid_emptystring(self):
"""
The emptystring should be interpreted as null in the context
of relationships.
"""
data = {'id': 4, 'name': 'source-4', 'target': ''}
expected_data = {'id': 4, 'name': 'source-4', 'target': None}
serializer = NullableForeignKeySourceSerializer(data=data)
assert serializer.is_valid()
obj = serializer.save()
assert serializer.data == expected_data
assert obj.name == 'source-4'
# Ensure source 4 is created, and everything else is as expected
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 1},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None},
{'id': 4, 'name': 'source-4', 'target': None}
]
assert serializer.data == expected
def test_foreign_key_update_with_valid_null(self):
data = {'id': 1, 'name': 'source-1', 'target': None}
instance = NullableForeignKeySource.objects.get(pk=1)
serializer = NullableForeignKeySourceSerializer(instance, data=data)
assert serializer.is_valid()
serializer.save()
assert serializer.data == data
# Ensure source 1 is updated, and everything else is as expected
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': None},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None}
]
assert serializer.data == expected
def test_foreign_key_update_with_valid_emptystring(self):
"""
The emptystring should be interpreted as null in the context
of relationships.
"""
data = {'id': 1, 'name': 'source-1', 'target': ''}
expected_data = {'id': 1, 'name': 'source-1', 'target': None}
instance = NullableForeignKeySource.objects.get(pk=1)
serializer = NullableForeignKeySourceSerializer(instance, data=data)
assert serializer.is_valid()
serializer.save()
assert serializer.data == expected_data
# Ensure source 1 is updated, and everything else is as expected
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': None},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None}
]
assert serializer.data == expected
def test_null_uuid_foreign_key_serializes_as_none(self):
source = NullableUUIDForeignKeySource(name='Source')
serializer = NullableUUIDForeignKeySourceSerializer(source)
data = serializer.data
assert data["target"] is None
def test_nullable_uuid_foreign_key_is_valid_when_none(self):
data = {"name": "Source", "target": None}
serializer = NullableUUIDForeignKeySourceSerializer(data=data)
assert serializer.is_valid(), serializer.errors
class PKNullableOneToOneTests(TestCase):
def setUp(self):
target = OneToOneTarget(name='target-1')
target.save()
new_target = OneToOneTarget(name='target-2')
new_target.save()
source = NullableOneToOneSource(name='source-1', target=new_target)
source.save()
def test_reverse_foreign_key_retrieve_with_null(self):
queryset = OneToOneTarget.objects.all()
serializer = NullableOneToOneTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'nullable_source': None},
{'id': 2, 'name': 'target-2', 'nullable_source': 1},
]
assert serializer.data == expected
| bsd-2-clause |
rickerc/glance_audit | glance/api/cached_images.py | 5 | 3619 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Controller for Image Cache Management API
"""
import webob.exc
from glance.api import policy
from glance.api.v1 import controller
from glance.common import exception
from glance.common import wsgi
from glance import image_cache
class Controller(controller.BaseController):
"""
A controller for managing cached images.
"""
def __init__(self):
self.cache = image_cache.ImageCache()
self.policy = policy.Enforcer()
def _enforce(self, req):
"""Authorize request against 'manage_image_cache' policy"""
try:
self.policy.enforce(req.context, 'manage_image_cache', {})
except exception.Forbidden:
raise webob.exc.HTTPForbidden()
def get_cached_images(self, req):
"""
GET /cached_images
Returns a mapping of records about cached images.
"""
self._enforce(req)
images = self.cache.get_cached_images()
return dict(cached_images=images)
def delete_cached_image(self, req, image_id):
"""
DELETE /cached_images/<IMAGE_ID>
Removes an image from the cache.
"""
self._enforce(req)
self.cache.delete_cached_image(image_id)
def delete_cached_images(self, req):
"""
DELETE /cached_images - Clear all active cached images
Removes all images from the cache.
"""
self._enforce(req)
return dict(num_deleted=self.cache.delete_all_cached_images())
def get_queued_images(self, req):
"""
GET /queued_images
Returns a mapping of records about queued images.
"""
self._enforce(req)
images = self.cache.get_queued_images()
return dict(queued_images=images)
def queue_image(self, req, image_id):
"""
PUT /queued_images/<IMAGE_ID>
Queues an image for caching. We do not check to see if
the image is in the registry here. That is done by the
prefetcher...
"""
self._enforce(req)
self.cache.queue_image(image_id)
def delete_queued_image(self, req, image_id):
"""
DELETE /queued_images/<IMAGE_ID>
Removes an image from the cache.
"""
self._enforce(req)
self.cache.delete_queued_image(image_id)
def delete_queued_images(self, req):
"""
DELETE /queued_images - Clear all active queued images
Removes all images from the cache.
"""
self._enforce(req)
return dict(num_deleted=self.cache.delete_all_queued_images())
class CachedImageDeserializer(wsgi.JSONRequestDeserializer):
pass
class CachedImageSerializer(wsgi.JSONResponseSerializer):
pass
def create_resource():
"""Cached Images resource factory method"""
deserializer = CachedImageDeserializer()
serializer = CachedImageSerializer()
return wsgi.Resource(Controller(), deserializer, serializer)
| apache-2.0 |
walty8/trac | trac/ticket/tests/admin.py | 1 | 12046 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import unittest
from trac.resource import ResourceNotFound
from trac.test import EnvironmentStub, MockRequest
from trac.ticket.admin import ComponentAdminPanel, MilestoneAdminPanel, \
PriorityAdminPanel, ResolutionAdminPanel, \
SeverityAdminPanel, TicketTypeAdminPanel, \
VersionAdminPanel
from trac.ticket.model import Component, Milestone, Priority, Resolution,\
Severity, Type, Version
from trac.web.api import RequestDone
class BaseTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub(default_data=True)
def tearDown(self):
self.env.reset_db()
class ComponentAdminPanelTestCase(BaseTestCase):
def test_add_component(self):
cap = ComponentAdminPanel(self.env)
name, owner = 'component3', 'user3'
req = MockRequest(self.env, method='POST',
args={'name': name, 'owner': owner, 'add': True})
self.assertRaises(ResourceNotFound, Component, self.env, name)
self.assertRaises(RequestDone, cap.render_admin_panel, req,
'ticket', 'component', None)
component = Component(self.env, name)
self.assertEqual(name, component.name)
self.assertEqual(owner, component.owner)
def test_remove_component(self):
cap = ComponentAdminPanel(self.env)
name = 'component2'
req = MockRequest(self.env, method='POST',
args={'sel': name, 'remove': True})
component = Component(self.env, name)
self.assertEqual(name, component.name)
self.assertEqual('somebody', component.owner)
self.assertRaises(RequestDone, cap.render_admin_panel, req,
'ticket', 'component', None)
self.assertRaises(ResourceNotFound, Component, self.env, name)
def test_remove_multiple_components(self):
cap = ComponentAdminPanel(self.env)
names = ['component1', 'component2']
req = MockRequest(self.env, method='POST',
args={'sel': names, 'remove': True})
for name in names:
component = Component(self.env, name)
self.assertEqual(name, component.name)
self.assertEqual('somebody', component.owner)
self.assertRaises(RequestDone, cap.render_admin_panel, req,
'ticket', 'component', None)
for name in names:
self.assertRaises(ResourceNotFound, Component, self.env, name)
def test_set_default_component(self):
name = 'component2'
config_key = 'default_component'
cap = ComponentAdminPanel(self.env)
req = MockRequest(self.env, method='POST',
args={'default': name, 'apply': True})
self.assertRaises(RequestDone, cap.render_admin_panel, req,
'ticket', 'component', None)
self.assertEqual(name, self.env.config.get('ticket', config_key))
def test_remove_default_component(self):
name = 'component2'
cap = ComponentAdminPanel(self.env)
config_key = 'default_component'
self.env.config.set('ticket', config_key, name)
req = MockRequest(self.env, method='POST',
args={'sel': name, 'remove': True})
self.assertRaises(RequestDone, cap.render_admin_panel, req,
'ticket', 'component', None)
self.assertEqual('', self.env.config.get('ticket', config_key))
class MilestoneAdminPanelTestCase(BaseTestCase):
def test_add_milestone(self):
name = 'milestone5'
map = MilestoneAdminPanel(self.env)
req = MockRequest(self.env, method='POST',
args={'name': name, 'add': True})
self.assertRaises(ResourceNotFound, Milestone, self.env, name)
self.assertRaises(RequestDone, map.render_admin_panel, req,
'ticket', 'milestone', None)
milestone = Milestone(self.env, name)
self.assertEqual(name, milestone.name)
def test_set_default_milestone(self):
name = 'milestone2'
config_key = 'default_milestone'
map = MilestoneAdminPanel(self.env)
req = MockRequest(self.env, method='POST',
args={'ticket_default': name, 'apply': True})
self.assertRaises(RequestDone, map.render_admin_panel, req,
'ticket', 'milestone', None)
self.assertEqual(name, self.env.config.get('ticket', config_key))
def test_set_default_retarget_to(self):
name = 'milestone2'
config_key = 'default_retarget_to'
map = MilestoneAdminPanel(self.env)
req = MockRequest(self.env, method='POST',
args={'retarget_default': name, 'apply': True})
self.assertRaises(RequestDone, map.render_admin_panel, req,
'ticket', 'milestone', None)
self.assertEqual(name, self.env.config.get('milestone', config_key))
def test_remove_default_milestone(self):
name = 'milestone2'
map = MilestoneAdminPanel(self.env)
self.env.config.set('ticket', 'default_milestone', 'milestone2')
self.env.config.set('milestone', 'default_retarget_to', 'milestone2')
req = MockRequest(self.env, method='POST',
args={'sel': name, 'remove': True})
self.assertRaises(RequestDone, map.render_admin_panel, req,
'ticket', 'milestone', None)
self.assertEqual('', self.env.config.get('ticket',
'default_milestone'))
self.assertEqual('', self.env.config.get('milestone',
'default_retarget_to'))
class AbstractEnumTestCase(BaseTestCase):
type = None
cls = None
def _test_add(self, panel, name):
req = MockRequest(self.env, method='POST',
args={'name': name, 'add': True})
self.assertRaises(ResourceNotFound, self.cls, self.env, name)
self.assertRaises(RequestDone, panel.render_admin_panel, req,
'ticket', self.type, None)
item = self.cls(self.env, name)
self.assertEqual(name, item.name)
def _test_set_default(self, panel, name):
config_key = 'default_' + self.type
req = MockRequest(self.env, method='POST',
args={'default': name, 'apply': True})
for item in self.cls.select(self.env):
req.args.update({'value_' + str(item.value): str(item.value)})
self.assertRaises(RequestDone, panel.render_admin_panel, req,
'ticket', self.type, None)
self.assertEqual(name, self.env.config.get('ticket', config_key))
def _test_remove_default(self, panel, name):
config_key = 'default_' + self.type
self.env.config.set('ticket', config_key, name)
req = MockRequest(self.env, method='POST',
args={'sel': name, 'remove': True})
self.assertRaises(RequestDone, panel.render_admin_panel, req,
'ticket', self.type, None)
self.assertEqual('', self.env.config.get('ticket', config_key))
class PriorityAdminPanelTestCase(AbstractEnumTestCase):
type = 'priority'
cls = Priority
def test_add_priority(self):
ap = PriorityAdminPanel(self.env)
self._test_add(ap, 'priority 1')
def test_set_default_priority(self):
ap = PriorityAdminPanel(self.env)
self._test_set_default(ap, 'critical')
def test_remove_default_priority(self):
ap = PriorityAdminPanel(self.env)
self._test_remove_default(ap, 'critical')
class ResolutionAdminPanelTestCase(AbstractEnumTestCase):
type = 'resolution'
cls = Resolution
def test_add_resolution(self):
ap = ResolutionAdminPanel(self.env)
self._test_add(ap, 'resolution 1')
def test_set_default_resolution(self):
ap = ResolutionAdminPanel(self.env)
self._test_set_default(ap, 'invalid')
def test_remove_default_resolution(self):
ap = ResolutionAdminPanel(self.env)
self._test_remove_default(ap, 'invalid')
class SeverityAdminPanelTestCase(AbstractEnumTestCase):
type = 'severity'
cls = Severity
def test_add_severity(self):
ap = SeverityAdminPanel(self.env)
self._test_add(ap, 'severity 1')
def test_set_default_severity(self):
s = Severity(self.env)
s.name = 'severity 1'
s.insert()
ap = SeverityAdminPanel(self.env)
self._test_set_default(ap, 'severity 1')
def test_remove_default_severity(self):
s = Severity(self.env)
s.name = 'severity 1'
s.insert()
ap = SeverityAdminPanel(self.env)
self._test_remove_default(ap, 'severity 1')
class TicketTypeAdminPanelTestCase(AbstractEnumTestCase):
type = 'type'
cls = Type
def test_add_type(self):
ap = TicketTypeAdminPanel(self.env)
self._test_add(ap, 'improvement')
def test_set_default_type(self):
ap = TicketTypeAdminPanel(self.env)
self._test_set_default(ap, 'task')
def test_remove_default_type(self):
ap = TicketTypeAdminPanel(self.env)
self._test_remove_default(ap, 'task')
class VersionAdminPanelTestCase(BaseTestCase):
def test_add_version(self):
name = '3.0'
ap = VersionAdminPanel(self.env)
req = MockRequest(self.env, method='POST',
args={'name': name, 'add': True})
self.assertRaises(ResourceNotFound, Version, self.env, name)
self.assertRaises(RequestDone, ap.render_admin_panel, req,
'ticket', 'version', None)
version = Version(self.env, name)
self.assertEqual(name, version.name)
def test_set_default_version(self):
name = '1.0'
ap = VersionAdminPanel(self.env)
config_key = 'default_version'
req = MockRequest(self.env, method='POST',
args={'default': name, 'apply': True})
self.assertRaises(RequestDone, ap.render_admin_panel, req,
'ticket', 'version', None)
self.assertEqual(name, self.env.config.get('ticket', config_key))
def test_remove_default_version(self):
name = '1.0'
ap = VersionAdminPanel(self.env)
config_key = 'default_version'
self.env.config.set('ticket', config_key, name)
req = MockRequest(self.env, method='POST',
args={'sel': name, 'remove': True})
self.assertRaises(RequestDone, ap.render_admin_panel, req,
'ticket', 'version', None)
self.assertEqual(self.env.config.get('ticket', config_key), '')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ComponentAdminPanelTestCase))
suite.addTest(unittest.makeSuite(MilestoneAdminPanelTestCase))
suite.addTest(unittest.makeSuite(PriorityAdminPanelTestCase))
suite.addTest(unittest.makeSuite(ResolutionAdminPanelTestCase))
suite.addTest(unittest.makeSuite(SeverityAdminPanelTestCase))
suite.addTest(unittest.makeSuite(TicketTypeAdminPanelTestCase))
suite.addTest(unittest.makeSuite(VersionAdminPanelTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| bsd-3-clause |
jsternberg/ansible-modules-core | network/basics/get_url.py | 6 | 16581 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Jan-Piet Mens <jpmens () gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# see examples/playbooks/get_url.yml
import shutil
import datetime
import re
import tempfile
DOCUMENTATION = '''
---
module: get_url
short_description: Downloads files from HTTP, HTTPS, or FTP to node
description:
- Downloads files from HTTP, HTTPS, or FTP to the remote server. The remote
server I(must) have direct access to the remote resource.
- By default, if an environment variable C(<protocol>_proxy) is set on
the target host, requests will be sent through that proxy. This
behaviour can be overridden by setting a variable for this task
(see `setting the environment
<http://docs.ansible.com/playbooks_environment.html>`_),
or by using the use_proxy option.
- HTTP redirects can redirect from HTTP to HTTPS so you should be sure that
your proxy environment for both protocols is correct.
version_added: "0.6"
options:
url:
description:
- HTTP, HTTPS, or FTP URL in the form (http|https|ftp)://[user[:pass]]@host.domain[:port]/path
required: true
dest:
description:
- absolute path of where to download the file to.
- If C(dest) is a directory, either the server provided filename or, if
none provided, the base name of the URL on the remote server will be
used. If a directory, C(force) has no effect.
If C(dest) is a directory, the file will always be
downloaded (regardless of the force option), but replaced only if the contents changed.
required: true
tmp_dest:
description:
- absolute path of where temporary file is downloaded to.
- Defaults to TMPDIR, TEMP or TMP env variables or a platform specific value
- https://docs.python.org/2/library/tempfile.html#tempfile.tempdir
required: false
default: ''
version_added: '2.1'
force:
description:
- If C(yes) and C(dest) is not a directory, will download the file every
time and replace the file if the contents change. If C(no), the file
will only be downloaded if the destination does not exist. Generally
should be C(yes) only for small local files. Prior to 0.6, this module
behaved as if C(yes) was the default.
version_added: "0.7"
required: false
choices: [ "yes", "no" ]
default: "no"
aliases: [ "thirsty" ]
backup:
description:
- Create a backup file including the timestamp information so you can get
the original file back if you somehow clobbered it incorrectly.
required: false
choices: [ "yes", "no" ]
default: "no"
version_added: '2.1'
sha256sum:
description:
- If a SHA-256 checksum is passed to this parameter, the digest of the
destination file will be calculated after it is downloaded to ensure
its integrity and verify that the transfer completed successfully.
This option is deprecated. Use 'checksum'.
version_added: "1.3"
required: false
default: null
checksum:
description:
- 'If a checksum is passed to this parameter, the digest of the
destination file will be calculated after it is downloaded to ensure
its integrity and verify that the transfer completed successfully.
Format: <algorithm>:<checksum>, e.g.: checksum="sha256:D98291AC[...]B6DC7B97"
If you worry about portability, only the sha1 algorithm is available
on all platforms and python versions. The third party hashlib
library can be installed for access to additional algorithms.
Additionaly, if a checksum is passed to this parameter, and the file exist under
the C(dest) location, the destination_checksum would be calculated, and if
checksum equals destination_checksum, the file download would be skipped
(unless C(force) is true). '
version_added: "2.0"
required: false
default: null
use_proxy:
description:
- if C(no), it will not use a proxy, even if one is defined in
an environment variable on the target hosts.
required: false
default: 'yes'
choices: ['yes', 'no']
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
timeout:
description:
- Timeout in seconds for URL request
required: false
default: 10
version_added: '1.8'
headers:
description:
- 'Add custom HTTP headers to a request in the format "key:value,key:value"'
required: false
default: null
version_added: '2.0'
url_username:
description:
- The username for use in HTTP basic authentication. This parameter can be used
without C(url_password) for sites that allow empty passwords.
required: false
version_added: '1.6'
url_password:
description:
- The password for use in HTTP basic authentication. If the C(url_username)
parameter is not specified, the C(url_password) parameter will not be used.
required: false
version_added: '1.6'
force_basic_auth:
version_added: '2.0'
description:
- httplib2, the library used by the uri module only sends authentication information when a webservice
responds to an initial request with a 401 status. Since some basic auth services do not properly
send a 401, logins will fail. This option forces the sending of the Basic authentication header
upon initial request.
required: false
choices: [ "yes", "no" ]
default: "no"
others:
description:
- all arguments accepted by the M(file) module also work here
required: false
# informational: requirements for nodes
requirements: [ ]
author: "Jan-Piet Mens (@jpmens)"
'''
EXAMPLES='''
- name: download foo.conf
get_url: url=http://example.com/path/file.conf dest=/etc/foo.conf mode=0440
- name: download file and force basic auth
get_url: url=http://example.com/path/file.conf dest=/etc/foo.conf force_basic_auth=yes
- name: download file with custom HTTP headers
get_url: url=http://example.com/path/file.conf dest=/etc/foo.conf headers='key:value,key:value'
- name: download file with check
get_url: url=http://example.com/path/file.conf dest=/etc/foo.conf checksum=sha256:b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c
get_url: url=http://example.com/path/file.conf dest=/etc/foo.conf checksum=md5:66dffb5228a211e61d6d7ef4a86f5758
- name: download file from a file path
get_url: url="file:///tmp/afile.txt" dest=/tmp/afilecopy.txt
'''
import urlparse
# ==============================================================
# url handling
def url_filename(url):
fn = os.path.basename(urlparse.urlsplit(url)[2])
if fn == '':
return 'index.html'
return fn
def url_get(module, url, dest, use_proxy, last_mod_time, force, timeout=10, headers=None, tmp_dest=''):
"""
Download data from the url and store in a temporary file.
Return (tempfile, info about the request)
"""
rsp, info = fetch_url(module, url, use_proxy=use_proxy, force=force, last_mod_time=last_mod_time, timeout=timeout, headers=headers)
if info['status'] == 304:
module.exit_json(url=url, dest=dest, changed=False, msg=info.get('msg', ''))
# create a temporary file and copy content to do checksum-based replacement
if info['status'] != 200 and not url.startswith('file:/'):
module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'], url=url, dest=dest)
if tmp_dest != '':
# tmp_dest should be an existing dir
tmp_dest_is_dir = os.path.isdir(tmp_dest)
if not tmp_dest_is_dir:
if os.path.exists(tmp_dest):
module.fail_json(msg="%s is a file but should be a directory." % tmp_dest)
else:
module.fail_json(msg="%s directoy does not exist." % tmp_dest)
fd, tempname = tempfile.mkstemp(dir=tmp_dest)
else:
fd, tempname = tempfile.mkstemp()
f = os.fdopen(fd, 'wb')
try:
shutil.copyfileobj(rsp, f)
except Exception:
err = get_exception()
os.remove(tempname)
module.fail_json(msg="failed to create temporary content file: %s" % str(err))
f.close()
rsp.close()
return tempname, info
def extract_filename_from_headers(headers):
"""
Extracts a filename from the given dict of HTTP headers.
Looks for the content-disposition header and applies a regex.
Returns the filename if successful, else None."""
cont_disp_regex = 'attachment; ?filename="?([^"]+)'
res = None
if 'content-disposition' in headers:
cont_disp = headers['content-disposition']
match = re.match(cont_disp_regex, cont_disp)
if match:
res = match.group(1)
# Try preventing any funny business.
res = os.path.basename(res)
return res
# ==============================================================
# main
def main():
argument_spec = url_argument_spec()
argument_spec.update(
url = dict(required=True),
dest = dict(required=True),
backup = dict(default=False, type='bool'),
sha256sum = dict(default=''),
checksum = dict(default=''),
timeout = dict(required=False, type='int', default=10),
headers = dict(required=False, default=None),
tmp_dest = dict(required=False, default=''),
)
module = AnsibleModule(
# not checking because of daisy chain to file module
argument_spec = argument_spec,
add_file_common_args=True
)
url = module.params['url']
dest = os.path.expanduser(module.params['dest'])
backup = module.params['backup']
force = module.params['force']
sha256sum = module.params['sha256sum']
checksum = module.params['checksum']
use_proxy = module.params['use_proxy']
timeout = module.params['timeout']
tmp_dest = os.path.expanduser(module.params['tmp_dest'])
# Parse headers to dict
if module.params['headers']:
try:
headers = dict(item.split(':') for item in module.params['headers'].split(','))
except:
module.fail_json(msg="The header parameter requires a key:value,key:value syntax to be properly parsed.")
else:
headers = None
dest_is_dir = os.path.isdir(dest)
last_mod_time = None
# workaround for usage of deprecated sha256sum parameter
if sha256sum != '':
checksum = 'sha256:%s' % (sha256sum)
# checksum specified, parse for algorithm and checksum
if checksum != '':
try:
algorithm, checksum = checksum.rsplit(':', 1)
# Remove any non-alphanumeric characters, including the infamous
# Unicode zero-width space
checksum = re.sub(r'\W+', '', checksum).lower()
# Ensure the checksum portion is a hexdigest
int(checksum, 16)
except ValueError:
module.fail_json(msg="The checksum parameter has to be in format <algorithm>:<checksum>")
if not dest_is_dir and os.path.exists(dest):
checksum_mismatch = False
# If the download is not forced and there is a checksum, allow
# checksum match to skip the download.
if not force and checksum != '':
destination_checksum = module.digest_from_file(dest, algorithm)
if checksum == destination_checksum:
module.exit_json(msg="file already exists", dest=dest, url=url, changed=False)
checksum_mismatch = True
# Not forcing redownload, unless checksum does not match
if not force and not checksum_mismatch:
# allow file attribute changes
module.params['path'] = dest
file_args = module.load_file_common_arguments(module.params)
file_args['path'] = dest
changed = module.set_fs_attributes_if_different(file_args, False)
if changed:
module.exit_json(msg="file already exists but file attributes changed", dest=dest, url=url, changed=changed)
module.exit_json(msg="file already exists", dest=dest, url=url, changed=changed)
# If the file already exists, prepare the last modified time for the
# request.
mtime = os.path.getmtime(dest)
last_mod_time = datetime.datetime.utcfromtimestamp(mtime)
# download to tmpsrc
tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force, timeout, headers, tmp_dest)
# Now the request has completed, we can finally generate the final
# destination file name from the info dict.
if dest_is_dir:
filename = extract_filename_from_headers(info)
if not filename:
# Fall back to extracting the filename from the URL.
# Pluck the URL from the info, since a redirect could have changed
# it.
filename = url_filename(info['url'])
dest = os.path.join(dest, filename)
checksum_src = None
checksum_dest = None
# raise an error if there is no tmpsrc file
if not os.path.exists(tmpsrc):
os.remove(tmpsrc)
module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'])
if not os.access(tmpsrc, os.R_OK):
os.remove(tmpsrc)
module.fail_json( msg="Source %s not readable" % (tmpsrc))
checksum_src = module.sha1(tmpsrc)
# check if there is no dest file
if os.path.exists(dest):
# raise an error if copy has no permission on dest
if not os.access(dest, os.W_OK):
os.remove(tmpsrc)
module.fail_json( msg="Destination %s not writable" % (dest))
if not os.access(dest, os.R_OK):
os.remove(tmpsrc)
module.fail_json( msg="Destination %s not readable" % (dest))
checksum_dest = module.sha1(dest)
else:
if not os.access(os.path.dirname(dest), os.W_OK):
os.remove(tmpsrc)
module.fail_json( msg="Destination %s not writable" % (os.path.dirname(dest)))
backup_file = None
if checksum_src != checksum_dest:
try:
if backup:
if os.path.exists(dest):
backup_file = module.backup_local(dest)
shutil.copyfile(tmpsrc, dest)
except Exception:
err = get_exception()
os.remove(tmpsrc)
module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, str(err)))
changed = True
else:
changed = False
if checksum != '':
destination_checksum = module.digest_from_file(dest, algorithm)
if checksum != destination_checksum:
os.remove(dest)
module.fail_json(msg="The checksum for %s did not match %s; it was %s." % (dest, checksum, destination_checksum))
os.remove(tmpsrc)
# allow file attribute changes
module.params['path'] = dest
file_args = module.load_file_common_arguments(module.params)
file_args['path'] = dest
changed = module.set_fs_attributes_if_different(file_args, changed)
# Backwards compat only. We'll return None on FIPS enabled systems
try:
md5sum = module.md5(dest)
except ValueError:
md5sum = None
res_args = dict(
url = url, dest = dest, src = tmpsrc, md5sum = md5sum, checksum_src = checksum_src,
checksum_dest = checksum_dest, changed = changed, msg = info.get('msg', '')
)
if backup_file:
res_args['backup_file'] = backup_file
# Mission complete
module.exit_json(**res_args)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.