repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
technicalpickles/zulip
|
scripts/setup/generate_secrets.py
|
114
|
2179
|
#!/usr/bin/env python
# This tools generates local_settings_generated.py using the template
import sys, os, os.path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
os.environ['DJANGO_SETTINGS_MODULE'] = 'zproject.settings'
from django.utils.crypto import get_random_string
from zerver.lib.utils import generate_random_token
os.chdir(os.path.join(os.path.dirname(__file__), '..', '..'))
CAMO_CONFIG_FILENAME = '/etc/default/camo'
AUTOGENERATED_SETTINGS = ['shared_secret', 'avatar_salt', 'rabbitmq_password', 'local_database_password',
'initial_password_salt']
def generate_camo_config_file(camo_key):
camo_config = """ENABLED=yes
PORT=9292
CAMO_KEY=%s
""" % (camo_key,)
with open(CAMO_CONFIG_FILENAME, 'w') as camo_file:
camo_file.write(camo_config)
print "Generated Camo config file %s" % (CAMO_CONFIG_FILENAME,)
def generate_django_secretkey():
# Secret key generation taken from Django's startproject.py
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
return get_random_string(50, chars)
def generate_secrets(development=False):
if development:
OUTPUT_SETTINGS_FILENAME = "zproject/dev-secrets.conf"
else:
OUTPUT_SETTINGS_FILENAME = "/etc/zulip/zulip-secrets.conf"
lines = ['[secrets]\n']
def config_line(var, value):
return "%s = %s\n" % (var, value)
for name in AUTOGENERATED_SETTINGS:
lines.append(config_line(name, generate_random_token(64)))
lines.append(config_line('secret_key', generate_django_secretkey()))
camo_key = get_random_string(64)
lines.append(config_line('camo_key', camo_key))
if not development:
# Write the Camo config file directly
generate_camo_config_file(camo_key)
out = open(OUTPUT_SETTINGS_FILENAME, 'w')
out.write("".join(lines))
out.close()
print "Generated %s with auto-generated secrets!" % (OUTPUT_SETTINGS_FILENAME,)
if __name__ == '__main__':
development = False
extra_args = sys.argv[1:]
if len(extra_args) and extra_args[0] in ('-d', '--development'):
development = True
generate_secrets(development)
|
apache-2.0
|
YOTOV-LIMITED/kuma
|
vendor/packages/nose/plugins/cover.py
|
48
|
11673
|
"""If you have Ned Batchelder's coverage_ module installed, you may activate a
coverage report with the ``--with-coverage`` switch or NOSE_WITH_COVERAGE
environment variable. The coverage report will cover any python source module
imported after the start of the test run, excluding modules that match
testMatch. If you want to include those modules too, use the ``--cover-tests``
switch, or set the NOSE_COVER_TESTS environment variable to a true value. To
restrict the coverage report to modules from a particular package or packages,
use the ``--cover-package`` switch or the NOSE_COVER_PACKAGE environment
variable.
.. _coverage: http://www.nedbatchelder.com/code/modules/coverage.html
"""
import logging
import re
import sys
import StringIO
from nose.plugins.base import Plugin
from nose.util import src, tolist
log = logging.getLogger(__name__)
class Coverage(Plugin):
"""
Activate a coverage report using Ned Batchelder's coverage module.
"""
coverTests = False
coverPackages = None
coverInstance = None
coverErase = False
coverMinPercentage = None
score = 200
status = {}
def options(self, parser, env):
"""
Add options to command line.
"""
super(Coverage, self).options(parser, env)
parser.add_option("--cover-package", action="append",
default=env.get('NOSE_COVER_PACKAGE'),
metavar="PACKAGE",
dest="cover_packages",
help="Restrict coverage output to selected packages "
"[NOSE_COVER_PACKAGE]")
parser.add_option("--cover-erase", action="store_true",
default=env.get('NOSE_COVER_ERASE'),
dest="cover_erase",
help="Erase previously collected coverage "
"statistics before run")
parser.add_option("--cover-tests", action="store_true",
dest="cover_tests",
default=env.get('NOSE_COVER_TESTS'),
help="Include test modules in coverage report "
"[NOSE_COVER_TESTS]")
parser.add_option("--cover-min-percentage", action="store",
dest="cover_min_percentage",
default=env.get('NOSE_COVER_MIN_PERCENTAGE'),
help="Minimum percentage of coverage for tests "
"to pass [NOSE_COVER_MIN_PERCENTAGE]")
parser.add_option("--cover-inclusive", action="store_true",
dest="cover_inclusive",
default=env.get('NOSE_COVER_INCLUSIVE'),
help="Include all python files under working "
"directory in coverage report. Useful for "
"discovering holes in test coverage if not all "
"files are imported by the test suite. "
"[NOSE_COVER_INCLUSIVE]")
parser.add_option("--cover-html", action="store_true",
default=env.get('NOSE_COVER_HTML'),
dest='cover_html',
help="Produce HTML coverage information")
parser.add_option('--cover-html-dir', action='store',
default=env.get('NOSE_COVER_HTML_DIR', 'cover'),
dest='cover_html_dir',
metavar='DIR',
help='Produce HTML coverage information in dir')
parser.add_option("--cover-branches", action="store_true",
default=env.get('NOSE_COVER_BRANCHES'),
dest="cover_branches",
help="Include branch coverage in coverage report "
"[NOSE_COVER_BRANCHES]")
parser.add_option("--cover-xml", action="store_true",
default=env.get('NOSE_COVER_XML'),
dest="cover_xml",
help="Produce XML coverage information")
parser.add_option("--cover-xml-file", action="store",
default=env.get('NOSE_COVER_XML_FILE', 'coverage.xml'),
dest="cover_xml_file",
metavar="FILE",
help="Produce XML coverage information in file")
def configure(self, options, conf):
"""
Configure plugin.
"""
try:
self.status.pop('active')
except KeyError:
pass
super(Coverage, self).configure(options, conf)
if self.enabled:
try:
import coverage
if not hasattr(coverage, 'coverage'):
raise ImportError("Unable to import coverage module")
except ImportError:
log.error("Coverage not available: "
"unable to import coverage module")
self.enabled = False
return
self.conf = conf
self.coverErase = options.cover_erase
self.coverTests = options.cover_tests
self.coverPackages = []
if options.cover_packages:
if isinstance(options.cover_packages, (list, tuple)):
cover_packages = options.cover_packages
else:
cover_packages = [options.cover_packages]
for pkgs in [tolist(x) for x in cover_packages]:
self.coverPackages.extend(pkgs)
self.coverInclusive = options.cover_inclusive
if self.coverPackages:
log.info("Coverage report will include only packages: %s",
self.coverPackages)
self.coverHtmlDir = None
if options.cover_html:
self.coverHtmlDir = options.cover_html_dir
log.debug('Will put HTML coverage report in %s', self.coverHtmlDir)
self.coverBranches = options.cover_branches
self.coverXmlFile = None
if options.cover_min_percentage:
self.coverMinPercentage = int(options.cover_min_percentage.rstrip('%'))
if options.cover_xml:
self.coverXmlFile = options.cover_xml_file
log.debug('Will put XML coverage report in %s', self.coverXmlFile)
if self.enabled:
self.status['active'] = True
self.coverInstance = coverage.coverage(auto_data=False,
branch=self.coverBranches, data_suffix=conf.worker,
source=self.coverPackages)
self.coverInstance._warn_no_data = False
self.coverInstance.is_worker = conf.worker
self.coverInstance.exclude('#pragma[: ]+[nN][oO] [cC][oO][vV][eE][rR]')
log.debug("Coverage begin")
self.skipModules = sys.modules.keys()[:]
if self.coverErase:
log.debug("Clearing previously collected coverage statistics")
self.coverInstance.combine()
self.coverInstance.erase()
if not self.coverInstance.is_worker:
self.coverInstance.load()
self.coverInstance.start()
def beforeTest(self, *args, **kwargs):
"""
Begin recording coverage information.
"""
if self.coverInstance.is_worker:
self.coverInstance.load()
self.coverInstance.start()
def afterTest(self, *args, **kwargs):
"""
Stop recording coverage information.
"""
if self.coverInstance.is_worker:
self.coverInstance.stop()
self.coverInstance.save()
def report(self, stream):
"""
Output code coverage report.
"""
log.debug("Coverage report")
self.coverInstance.stop()
self.coverInstance.combine()
self.coverInstance.save()
modules = [module
for name, module in sys.modules.items()
if self.wantModuleCoverage(name, module)]
log.debug("Coverage report will cover modules: %s", modules)
self.coverInstance.report(modules, file=stream)
import coverage
if self.coverHtmlDir:
log.debug("Generating HTML coverage report")
try:
self.coverInstance.html_report(modules, self.coverHtmlDir)
except coverage.misc.CoverageException, e:
log.warning("Failed to generate HTML report: %s" % str(e))
if self.coverXmlFile:
log.debug("Generating XML coverage report")
try:
self.coverInstance.xml_report(modules, self.coverXmlFile)
except coverage.misc.CoverageException, e:
log.warning("Failed to generate XML report: %s" % str(e))
# make sure we have minimum required coverage
if self.coverMinPercentage:
f = StringIO.StringIO()
self.coverInstance.report(modules, file=f)
multiPackageRe = (r'-------\s\w+\s+\d+\s+\d+(?:\s+\d+\s+\d+)?'
r'\s+(\d+)%\s+\d*\s{0,1}$')
singlePackageRe = (r'-------\s[\w./]+\s+\d+\s+\d+(?:\s+\d+\s+\d+)?'
r'\s+(\d+)%(?:\s+[-\d, ]+)\s{0,1}$')
m = re.search(multiPackageRe, f.getvalue())
if m is None:
m = re.search(singlePackageRe, f.getvalue())
if m:
percentage = int(m.groups()[0])
if percentage < self.coverMinPercentage:
log.error('TOTAL Coverage did not reach minimum '
'required: %d%%' % self.coverMinPercentage)
sys.exit(1)
else:
log.error("No total percentage was found in coverage output, "
"something went wrong.")
def wantModuleCoverage(self, name, module):
if not hasattr(module, '__file__'):
log.debug("no coverage of %s: no __file__", name)
return False
module_file = src(module.__file__)
if not module_file or not module_file.endswith('.py'):
log.debug("no coverage of %s: not a python file", name)
return False
if self.coverPackages:
for package in self.coverPackages:
if (re.findall(r'^%s\b' % re.escape(package), name)
and (self.coverTests
or not self.conf.testMatch.search(name))):
log.debug("coverage for %s", name)
return True
if name in self.skipModules:
log.debug("no coverage for %s: loaded before coverage start",
name)
return False
if self.conf.testMatch.search(name) and not self.coverTests:
log.debug("no coverage for %s: is a test", name)
return False
# accept any package that passed the previous tests, unless
# coverPackages is on -- in that case, if we wanted this
# module, we would have already returned True
return not self.coverPackages
def wantFile(self, file, package=None):
"""If inclusive coverage enabled, return true for all source files
in wanted packages.
"""
if self.coverInclusive:
if file.endswith(".py"):
if package and self.coverPackages:
for want in self.coverPackages:
if package.startswith(want):
return True
else:
return True
return None
|
mpl-2.0
|
EchO-KID/google-breakpad
|
src/testing/gtest/test/gtest_filter_unittest.py
|
2826
|
21261
|
#!/usr/bin/env python
#
# Copyright 2005 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test test filters.
A user can specify which test(s) in a Google Test program to run via either
the GTEST_FILTER environment variable or the --gtest_filter flag.
This script tests such functionality by invoking
gtest_filter_unittest_ (a program written with Google Test) with different
environments and command line flags.
Note that test sharding may also influence which tests are filtered. Therefore,
we test that here also.
"""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import re
import sets
import sys
import gtest_test_utils
# Constants.
# Checks if this platform can pass empty environment variables to child
# processes. We set an env variable to an empty string and invoke a python
# script in a subprocess to print whether the variable is STILL in
# os.environ. We then use 'eval' to parse the child's output so that an
# exception is thrown if the input is anything other than 'True' nor 'False'.
os.environ['EMPTY_VAR'] = ''
child = gtest_test_utils.Subprocess(
[sys.executable, '-c', 'import os; print \'EMPTY_VAR\' in os.environ'])
CAN_PASS_EMPTY_ENV = eval(child.output)
# Check if this platform can unset environment variables in child processes.
# We set an env variable to a non-empty string, unset it, and invoke
# a python script in a subprocess to print whether the variable
# is NO LONGER in os.environ.
# We use 'eval' to parse the child's output so that an exception
# is thrown if the input is neither 'True' nor 'False'.
os.environ['UNSET_VAR'] = 'X'
del os.environ['UNSET_VAR']
child = gtest_test_utils.Subprocess(
[sys.executable, '-c', 'import os; print \'UNSET_VAR\' not in os.environ'])
CAN_UNSET_ENV = eval(child.output)
# Checks if we should test with an empty filter. This doesn't
# make sense on platforms that cannot pass empty env variables (Win32)
# and on platforms that cannot unset variables (since we cannot tell
# the difference between "" and NULL -- Borland and Solaris < 5.10)
CAN_TEST_EMPTY_FILTER = (CAN_PASS_EMPTY_ENV and CAN_UNSET_ENV)
# The environment variable for specifying the test filters.
FILTER_ENV_VAR = 'GTEST_FILTER'
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
SHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE'
# The command line flag for specifying the test filters.
FILTER_FLAG = 'gtest_filter'
# The command line flag for including disabled tests.
ALSO_RUN_DISABED_TESTS_FLAG = 'gtest_also_run_disabled_tests'
# Command to run the gtest_filter_unittest_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_filter_unittest_')
# Regex for determining whether parameterized tests are enabled in the binary.
PARAM_TEST_REGEX = re.compile(r'/ParamTest')
# Regex for parsing test case names from Google Test's output.
TEST_CASE_REGEX = re.compile(r'^\[\-+\] \d+ tests? from (\w+(/\w+)?)')
# Regex for parsing test names from Google Test's output.
TEST_REGEX = re.compile(r'^\[\s*RUN\s*\].*\.(\w+(/\w+)?)')
# The command line flag to tell Google Test to output the list of tests it
# will run.
LIST_TESTS_FLAG = '--gtest_list_tests'
# Indicates whether Google Test supports death tests.
SUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess(
[COMMAND, LIST_TESTS_FLAG]).output
# Full names of all tests in gtest_filter_unittests_.
PARAM_TESTS = [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
'SeqQ/ParamTest.TestX/0',
'SeqQ/ParamTest.TestX/1',
'SeqQ/ParamTest.TestY/0',
'SeqQ/ParamTest.TestY/1',
]
DISABLED_TESTS = [
'BarTest.DISABLED_TestFour',
'BarTest.DISABLED_TestFive',
'BazTest.DISABLED_TestC',
'DISABLED_FoobarTest.Test1',
'DISABLED_FoobarTest.DISABLED_Test2',
'DISABLED_FoobarbazTest.TestA',
]
if SUPPORTS_DEATH_TESTS:
DEATH_TESTS = [
'HasDeathTest.Test1',
'HasDeathTest.Test2',
]
else:
DEATH_TESTS = []
# All the non-disabled tests.
ACTIVE_TESTS = [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
] + DEATH_TESTS + PARAM_TESTS
param_tests_present = None
# Utilities.
environ = os.environ.copy()
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def RunAndReturnOutput(args = None):
"""Runs the test program and returns its output."""
return gtest_test_utils.Subprocess([COMMAND] + (args or []),
env=environ).output
def RunAndExtractTestList(args = None):
"""Runs the test program and returns its exit code and a list of tests run."""
p = gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ)
tests_run = []
test_case = ''
test = ''
for line in p.output.split('\n'):
match = TEST_CASE_REGEX.match(line)
if match is not None:
test_case = match.group(1)
else:
match = TEST_REGEX.match(line)
if match is not None:
test = match.group(1)
tests_run.append(test_case + '.' + test)
return (tests_run, p.exit_code)
def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs):
"""Runs the given function and arguments in a modified environment."""
try:
original_env = environ.copy()
environ.update(extra_env)
return function(*args, **kwargs)
finally:
environ.clear()
environ.update(original_env)
def RunWithSharding(total_shards, shard_index, command):
"""Runs a test program shard and returns exit code and a list of tests run."""
extra_env = {SHARD_INDEX_ENV_VAR: str(shard_index),
TOTAL_SHARDS_ENV_VAR: str(total_shards)}
return InvokeWithModifiedEnv(extra_env, RunAndExtractTestList, command)
# The unit test.
class GTestFilterUnitTest(gtest_test_utils.TestCase):
"""Tests the env variable or the command line flag to filter tests."""
# Utilities.
def AssertSetEqual(self, lhs, rhs):
"""Asserts that two sets are equal."""
for elem in lhs:
self.assert_(elem in rhs, '%s in %s' % (elem, rhs))
for elem in rhs:
self.assert_(elem in lhs, '%s in %s' % (elem, lhs))
def AssertPartitionIsValid(self, set_var, list_of_sets):
"""Asserts that list_of_sets is a valid partition of set_var."""
full_partition = []
for slice_var in list_of_sets:
full_partition.extend(slice_var)
self.assertEqual(len(set_var), len(full_partition))
self.assertEqual(sets.Set(set_var), sets.Set(full_partition))
def AdjustForParameterizedTests(self, tests_to_run):
"""Adjust tests_to_run in case value parameterized tests are disabled."""
global param_tests_present
if not param_tests_present:
return list(sets.Set(tests_to_run) - sets.Set(PARAM_TESTS))
else:
return tests_to_run
def RunAndVerify(self, gtest_filter, tests_to_run):
"""Checks that the binary runs correct set of tests for a given filter."""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# First, tests using the environment variable.
# Windows removes empty variables from the environment when passing it
# to a new process. This means it is impossible to pass an empty filter
# into a process using the environment variable. However, we can still
# test the case when the variable is not supplied (i.e., gtest_filter is
# None).
# pylint: disable-msg=C6403
if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
SetEnvVar(FILTER_ENV_VAR, gtest_filter)
tests_run = RunAndExtractTestList()[0]
SetEnvVar(FILTER_ENV_VAR, None)
self.AssertSetEqual(tests_run, tests_to_run)
# pylint: enable-msg=C6403
# Next, tests using the command line flag.
if gtest_filter is None:
args = []
else:
args = ['--%s=%s' % (FILTER_FLAG, gtest_filter)]
tests_run = RunAndExtractTestList(args)[0]
self.AssertSetEqual(tests_run, tests_to_run)
def RunAndVerifyWithSharding(self, gtest_filter, total_shards, tests_to_run,
args=None, check_exit_0=False):
"""Checks that binary runs correct tests for the given filter and shard.
Runs all shards of gtest_filter_unittest_ with the given filter, and
verifies that the right set of tests were run. The union of tests run
on each shard should be identical to tests_to_run, without duplicates.
Args:
gtest_filter: A filter to apply to the tests.
total_shards: A total number of shards to split test run into.
tests_to_run: A set of tests expected to run.
args : Arguments to pass to the to the test binary.
check_exit_0: When set to a true value, make sure that all shards
return 0.
"""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# Windows removes empty variables from the environment when passing it
# to a new process. This means it is impossible to pass an empty filter
# into a process using the environment variable. However, we can still
# test the case when the variable is not supplied (i.e., gtest_filter is
# None).
# pylint: disable-msg=C6403
if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
SetEnvVar(FILTER_ENV_VAR, gtest_filter)
partition = []
for i in range(0, total_shards):
(tests_run, exit_code) = RunWithSharding(total_shards, i, args)
if check_exit_0:
self.assertEqual(0, exit_code)
partition.append(tests_run)
self.AssertPartitionIsValid(tests_to_run, partition)
SetEnvVar(FILTER_ENV_VAR, None)
# pylint: enable-msg=C6403
def RunAndVerifyAllowingDisabled(self, gtest_filter, tests_to_run):
"""Checks that the binary runs correct set of tests for the given filter.
Runs gtest_filter_unittest_ with the given filter, and enables
disabled tests. Verifies that the right set of tests were run.
Args:
gtest_filter: A filter to apply to the tests.
tests_to_run: A set of tests expected to run.
"""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# Construct the command line.
args = ['--%s' % ALSO_RUN_DISABED_TESTS_FLAG]
if gtest_filter is not None:
args.append('--%s=%s' % (FILTER_FLAG, gtest_filter))
tests_run = RunAndExtractTestList(args)[0]
self.AssertSetEqual(tests_run, tests_to_run)
def setUp(self):
"""Sets up test case.
Determines whether value-parameterized tests are enabled in the binary and
sets the flags accordingly.
"""
global param_tests_present
if param_tests_present is None:
param_tests_present = PARAM_TEST_REGEX.search(
RunAndReturnOutput()) is not None
def testDefaultBehavior(self):
"""Tests the behavior of not specifying the filter."""
self.RunAndVerify(None, ACTIVE_TESTS)
def testDefaultBehaviorWithShards(self):
"""Tests the behavior without the filter, with sharding enabled."""
self.RunAndVerifyWithSharding(None, 1, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, 2, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) - 1, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS), ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) + 1, ACTIVE_TESTS)
def testEmptyFilter(self):
"""Tests an empty filter."""
self.RunAndVerify('', [])
self.RunAndVerifyWithSharding('', 1, [])
self.RunAndVerifyWithSharding('', 2, [])
def testBadFilter(self):
"""Tests a filter that matches nothing."""
self.RunAndVerify('BadFilter', [])
self.RunAndVerifyAllowingDisabled('BadFilter', [])
def testFullName(self):
"""Tests filtering by full name."""
self.RunAndVerify('FooTest.Xyz', ['FooTest.Xyz'])
self.RunAndVerifyAllowingDisabled('FooTest.Xyz', ['FooTest.Xyz'])
self.RunAndVerifyWithSharding('FooTest.Xyz', 5, ['FooTest.Xyz'])
def testUniversalFilters(self):
"""Tests filters that match everything."""
self.RunAndVerify('*', ACTIVE_TESTS)
self.RunAndVerify('*.*', ACTIVE_TESTS)
self.RunAndVerifyWithSharding('*.*', len(ACTIVE_TESTS) - 3, ACTIVE_TESTS)
self.RunAndVerifyAllowingDisabled('*', ACTIVE_TESTS + DISABLED_TESTS)
self.RunAndVerifyAllowingDisabled('*.*', ACTIVE_TESTS + DISABLED_TESTS)
def testFilterByTestCase(self):
"""Tests filtering by test case name."""
self.RunAndVerify('FooTest.*', ['FooTest.Abc', 'FooTest.Xyz'])
BAZ_TESTS = ['BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB']
self.RunAndVerify('BazTest.*', BAZ_TESTS)
self.RunAndVerifyAllowingDisabled('BazTest.*',
BAZ_TESTS + ['BazTest.DISABLED_TestC'])
def testFilterByTest(self):
"""Tests filtering by test name."""
self.RunAndVerify('*.TestOne', ['BarTest.TestOne', 'BazTest.TestOne'])
def testFilterDisabledTests(self):
"""Select only the disabled tests to run."""
self.RunAndVerify('DISABLED_FoobarTest.Test1', [])
self.RunAndVerifyAllowingDisabled('DISABLED_FoobarTest.Test1',
['DISABLED_FoobarTest.Test1'])
self.RunAndVerify('*DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('*DISABLED_*', DISABLED_TESTS)
self.RunAndVerify('*.DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('*.DISABLED_*', [
'BarTest.DISABLED_TestFour',
'BarTest.DISABLED_TestFive',
'BazTest.DISABLED_TestC',
'DISABLED_FoobarTest.DISABLED_Test2',
])
self.RunAndVerify('DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('DISABLED_*', [
'DISABLED_FoobarTest.Test1',
'DISABLED_FoobarTest.DISABLED_Test2',
'DISABLED_FoobarbazTest.TestA',
])
def testWildcardInTestCaseName(self):
"""Tests using wildcard in the test case name."""
self.RunAndVerify('*a*.*', [
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS)
def testWildcardInTestName(self):
"""Tests using wildcard in the test name."""
self.RunAndVerify('*.*A*', ['FooTest.Abc', 'BazTest.TestA'])
def testFilterWithoutDot(self):
"""Tests a filter that has no '.' in it."""
self.RunAndVerify('*z*', [
'FooTest.Xyz',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
])
def testTwoPatterns(self):
"""Tests filters that consist of two patterns."""
self.RunAndVerify('Foo*.*:*A*', [
'FooTest.Abc',
'FooTest.Xyz',
'BazTest.TestA',
])
# An empty pattern + a non-empty one
self.RunAndVerify(':*A*', ['FooTest.Abc', 'BazTest.TestA'])
def testThreePatterns(self):
"""Tests filters that consist of three patterns."""
self.RunAndVerify('*oo*:*A*:*One', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BazTest.TestOne',
'BazTest.TestA',
])
# The 2nd pattern is empty.
self.RunAndVerify('*oo*::*One', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BazTest.TestOne',
])
# The last 2 patterns are empty.
self.RunAndVerify('*oo*::', [
'FooTest.Abc',
'FooTest.Xyz',
])
def testNegativeFilters(self):
self.RunAndVerify('*-BazTest.TestOne', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestA',
'BazTest.TestB',
] + DEATH_TESTS + PARAM_TESTS)
self.RunAndVerify('*-FooTest.Abc:BazTest.*', [
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
] + DEATH_TESTS + PARAM_TESTS)
self.RunAndVerify('BarTest.*-BarTest.TestOne', [
'BarTest.TestTwo',
'BarTest.TestThree',
])
# Tests without leading '*'.
self.RunAndVerify('-FooTest.Abc:FooTest.Xyz:BazTest.*', [
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
] + DEATH_TESTS + PARAM_TESTS)
# Value parameterized tests.
self.RunAndVerify('*/*', PARAM_TESTS)
# Value parameterized tests filtering by the sequence name.
self.RunAndVerify('SeqP/*', [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
])
# Value parameterized tests filtering by the test name.
self.RunAndVerify('*/0', [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestY/0',
'SeqQ/ParamTest.TestX/0',
'SeqQ/ParamTest.TestY/0',
])
def testFlagOverridesEnvVar(self):
"""Tests that the filter flag overrides the filtering env. variable."""
SetEnvVar(FILTER_ENV_VAR, 'Foo*')
args = ['--%s=%s' % (FILTER_FLAG, '*One')]
tests_run = RunAndExtractTestList(args)[0]
SetEnvVar(FILTER_ENV_VAR, None)
self.AssertSetEqual(tests_run, ['BarTest.TestOne', 'BazTest.TestOne'])
def testShardStatusFileIsCreated(self):
"""Tests that the shard file is created if specified in the environment."""
shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
'shard_status_file')
self.assert_(not os.path.exists(shard_status_file))
extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
try:
InvokeWithModifiedEnv(extra_env, RunAndReturnOutput)
finally:
self.assert_(os.path.exists(shard_status_file))
os.remove(shard_status_file)
def testShardStatusFileIsCreatedWithListTests(self):
"""Tests that the shard file is created with the "list_tests" flag."""
shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
'shard_status_file2')
self.assert_(not os.path.exists(shard_status_file))
extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
try:
output = InvokeWithModifiedEnv(extra_env,
RunAndReturnOutput,
[LIST_TESTS_FLAG])
finally:
# This assertion ensures that Google Test enumerated the tests as
# opposed to running them.
self.assert_('[==========]' not in output,
'Unexpected output during test enumeration.\n'
'Please ensure that LIST_TESTS_FLAG is assigned the\n'
'correct flag value for listing Google Test tests.')
self.assert_(os.path.exists(shard_status_file))
os.remove(shard_status_file)
if SUPPORTS_DEATH_TESTS:
def testShardingWorksWithDeathTests(self):
"""Tests integration with death tests and sharding."""
gtest_filter = 'HasDeathTest.*:SeqP/*'
expected_tests = [
'HasDeathTest.Test1',
'HasDeathTest.Test2',
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
]
for flag in ['--gtest_death_test_style=threadsafe',
'--gtest_death_test_style=fast']:
self.RunAndVerifyWithSharding(gtest_filter, 3, expected_tests,
check_exit_0=True, args=[flag])
self.RunAndVerifyWithSharding(gtest_filter, 5, expected_tests,
check_exit_0=True, args=[flag])
if __name__ == '__main__':
gtest_test_utils.Main()
|
bsd-3-clause
|
boomsbloom/dtm-fmri
|
DTM/for_gensim/lib/python2.7/site-packages/sklearn/manifold/tests/test_locally_linear.py
|
85
|
5600
|
from itertools import product
import numpy as np
from numpy.testing import assert_almost_equal, assert_array_almost_equal
from scipy import linalg
from sklearn import neighbors, manifold
from sklearn.manifold.locally_linear import barycenter_kneighbors_graph
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import ignore_warnings
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_true
eigen_solvers = ['dense', 'arpack']
# ----------------------------------------------------------------------
# Test utility routines
def test_barycenter_kneighbors_graph():
X = np.array([[0, 1], [1.01, 1.], [2, 0]])
A = barycenter_kneighbors_graph(X, 1)
assert_array_almost_equal(
A.toarray(),
[[0., 1., 0.],
[1., 0., 0.],
[0., 1., 0.]])
A = barycenter_kneighbors_graph(X, 2)
# check that columns sum to one
assert_array_almost_equal(np.sum(A.toarray(), 1), np.ones(3))
pred = np.dot(A.toarray(), X)
assert_less(linalg.norm(pred - X) / X.shape[0], 1)
# ----------------------------------------------------------------------
# Test LLE by computing the reconstruction error on some manifolds.
def test_lle_simple_grid():
# note: ARPACK is numerically unstable, so this test will fail for
# some random seeds. We choose 2 because the tests pass.
rng = np.random.RandomState(2)
# grid of equidistant points in 2D, n_components = n_dim
X = np.array(list(product(range(5), repeat=2)))
X = X + 1e-10 * rng.uniform(size=X.shape)
n_components = 2
clf = manifold.LocallyLinearEmbedding(n_neighbors=5,
n_components=n_components,
random_state=rng)
tol = 0.1
N = barycenter_kneighbors_graph(X, clf.n_neighbors).toarray()
reconstruction_error = linalg.norm(np.dot(N, X) - X, 'fro')
assert_less(reconstruction_error, tol)
for solver in eigen_solvers:
clf.set_params(eigen_solver=solver)
clf.fit(X)
assert_true(clf.embedding_.shape[1] == n_components)
reconstruction_error = linalg.norm(
np.dot(N, clf.embedding_) - clf.embedding_, 'fro') ** 2
assert_less(reconstruction_error, tol)
assert_almost_equal(clf.reconstruction_error_,
reconstruction_error, decimal=1)
# re-embed a noisy version of X using the transform method
noise = rng.randn(*X.shape) / 100
X_reembedded = clf.transform(X + noise)
assert_less(linalg.norm(X_reembedded - clf.embedding_), tol)
def test_lle_manifold():
rng = np.random.RandomState(0)
# similar test on a slightly more complex manifold
X = np.array(list(product(np.arange(18), repeat=2)))
X = np.c_[X, X[:, 0] ** 2 / 18]
X = X + 1e-10 * rng.uniform(size=X.shape)
n_components = 2
for method in ["standard", "hessian", "modified", "ltsa"]:
clf = manifold.LocallyLinearEmbedding(n_neighbors=6,
n_components=n_components,
method=method, random_state=0)
tol = 1.5 if method == "standard" else 3
N = barycenter_kneighbors_graph(X, clf.n_neighbors).toarray()
reconstruction_error = linalg.norm(np.dot(N, X) - X)
assert_less(reconstruction_error, tol)
for solver in eigen_solvers:
clf.set_params(eigen_solver=solver)
clf.fit(X)
assert_true(clf.embedding_.shape[1] == n_components)
reconstruction_error = linalg.norm(
np.dot(N, clf.embedding_) - clf.embedding_, 'fro') ** 2
details = ("solver: %s, method: %s" % (solver, method))
assert_less(reconstruction_error, tol, msg=details)
assert_less(np.abs(clf.reconstruction_error_ -
reconstruction_error),
tol * reconstruction_error, msg=details)
# Test the error raised when parameter passed to lle is invalid
def test_lle_init_parameters():
X = np.random.rand(5, 3)
clf = manifold.LocallyLinearEmbedding(eigen_solver="error")
msg = "unrecognized eigen_solver 'error'"
assert_raise_message(ValueError, msg, clf.fit, X)
clf = manifold.LocallyLinearEmbedding(method="error")
msg = "unrecognized method 'error'"
assert_raise_message(ValueError, msg, clf.fit, X)
def test_pipeline():
# check that LocallyLinearEmbedding works fine as a Pipeline
# only checks that no error is raised.
# TODO check that it actually does something useful
from sklearn import pipeline, datasets
X, y = datasets.make_blobs(random_state=0)
clf = pipeline.Pipeline(
[('filter', manifold.LocallyLinearEmbedding(random_state=0)),
('clf', neighbors.KNeighborsClassifier())])
clf.fit(X, y)
assert_less(.9, clf.score(X, y))
# Test the error raised when the weight matrix is singular
def test_singular_matrix():
M = np.ones((10, 3))
f = ignore_warnings
assert_raises(ValueError, f(manifold.locally_linear_embedding),
M, 2, 1, method='standard', eigen_solver='arpack')
# regression test for #6033
def test_integer_input():
rand = np.random.RandomState(0)
X = rand.randint(0, 100, size=(20, 3))
for method in ["standard", "hessian", "modified", "ltsa"]:
clf = manifold.LocallyLinearEmbedding(method=method, n_neighbors=10)
clf.fit(X) # this previously raised a TypeError
|
mit
|
arraypan/Food-Stickers
|
FoodStickersMessages/lib/python3.5/site-packages/PIL/ImageQt.py
|
14
|
6051
|
#
# The Python Imaging Library.
# $Id$
#
# a simple Qt image interface.
#
# history:
# 2006-06-03 fl: created
# 2006-06-04 fl: inherit from QImage instead of wrapping it
# 2006-06-05 fl: removed toimage helper; move string support to ImageQt
# 2013-11-13 fl: add support for Qt5 ([email protected])
#
# Copyright (c) 2006 by Secret Labs AB
# Copyright (c) 2006 by Fredrik Lundh
#
# See the README file for information on usage and redistribution.
#
from PIL import Image
from PIL._util import isPath
from io import BytesIO
qt_is_installed = True
qt_version = None
try:
from PyQt5.QtGui import QImage, qRgba, QPixmap
from PyQt5.QtCore import QBuffer, QIODevice
qt_version = '5'
except (ImportError, RuntimeError):
try:
from PyQt4.QtGui import QImage, qRgba, QPixmap
from PyQt4.QtCore import QBuffer, QIODevice
qt_version = '4'
except (ImportError, RuntimeError):
try:
from PySide.QtGui import QImage, qRgba, QPixmap
from PySide.QtCore import QBuffer, QIODevice
qt_version = 'side'
except ImportError:
qt_is_installed = False
def rgb(r, g, b, a=255):
"""(Internal) Turns an RGB color into a Qt compatible color integer."""
# use qRgb to pack the colors, and then turn the resulting long
# into a negative integer with the same bitpattern.
return (qRgba(r, g, b, a) & 0xffffffff)
def fromqimage(im):
"""
:param im: A PIL Image object, or a file name
(given either as Python string or a PyQt string object)
"""
buffer = QBuffer()
buffer.open(QIODevice.ReadWrite)
# preserve alha channel with png
# otherwise ppm is more friendly with Image.open
if im.hasAlphaChannel():
im.save(buffer, 'png')
else:
im.save(buffer, 'ppm')
b = BytesIO()
try:
b.write(buffer.data())
except TypeError:
# workaround for Python 2
b.write(str(buffer.data()))
buffer.close()
b.seek(0)
return Image.open(b)
def fromqpixmap(im):
return fromqimage(im)
# buffer = QBuffer()
# buffer.open(QIODevice.ReadWrite)
# # im.save(buffer)
# # What if png doesn't support some image features like animation?
# im.save(buffer, 'ppm')
# bytes_io = BytesIO()
# bytes_io.write(buffer.data())
# buffer.close()
# bytes_io.seek(0)
# return Image.open(bytes_io)
def align8to32(bytes, width, mode):
"""
converts each scanline of data from 8 bit to 32 bit aligned
"""
bits_per_pixel = {
'1': 1,
'L': 8,
'P': 8,
}[mode]
# calculate bytes per line and the extra padding if needed
bits_per_line = bits_per_pixel * width
full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8)
bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0)
extra_padding = -bytes_per_line % 4
# already 32 bit aligned by luck
if not extra_padding:
return bytes
new_data = []
for i in range(len(bytes) // bytes_per_line):
new_data.append(bytes[i*bytes_per_line:(i+1)*bytes_per_line] + b'\x00' * extra_padding)
return b''.join(new_data)
def _toqclass_helper(im):
data = None
colortable = None
# handle filename, if given instead of image name
if hasattr(im, "toUtf8"):
# FIXME - is this really the best way to do this?
if str is bytes:
im = unicode(im.toUtf8(), "utf-8")
else:
im = str(im.toUtf8(), "utf-8")
if isPath(im):
im = Image.open(im)
if im.mode == "1":
format = QImage.Format_Mono
elif im.mode == "L":
format = QImage.Format_Indexed8
colortable = []
for i in range(256):
colortable.append(rgb(i, i, i))
elif im.mode == "P":
format = QImage.Format_Indexed8
colortable = []
palette = im.getpalette()
for i in range(0, len(palette), 3):
colortable.append(rgb(*palette[i:i+3]))
elif im.mode == "RGB":
data = im.tobytes("raw", "BGRX")
format = QImage.Format_RGB32
elif im.mode == "RGBA":
try:
data = im.tobytes("raw", "BGRA")
except SystemError:
# workaround for earlier versions
r, g, b, a = im.split()
im = Image.merge("RGBA", (b, g, r, a))
format = QImage.Format_ARGB32
else:
raise ValueError("unsupported image mode %r" % im.mode)
__data = data or align8to32(im.tobytes(), im.size[0], im.mode)
return {
'data': __data, 'im': im, 'format': format, 'colortable': colortable
}
if qt_is_installed:
class ImageQt(QImage):
def __init__(self, im):
"""
An PIL image wrapper for Qt. This is a subclass of PyQt's QImage
class.
:param im: A PIL Image object, or a file name (given either as Python
string or a PyQt string object).
"""
im_data = _toqclass_helper(im)
# must keep a reference, or Qt will crash!
# All QImage constructors that take data operate on an existing
# buffer, so this buffer has to hang on for the life of the image.
# Fixes https://github.com/python-pillow/Pillow/issues/1370
self.__data = im_data['data']
QImage.__init__(self,
self.__data, im_data['im'].size[0],
im_data['im'].size[1], im_data['format'])
if im_data['colortable']:
self.setColorTable(im_data['colortable'])
def toqimage(im):
return ImageQt(im)
def toqpixmap(im):
# # This doesn't work. For now using a dumb approach.
# im_data = _toqclass_helper(im)
# result = QPixmap(im_data['im'].size[0], im_data['im'].size[1])
# result.loadFromData(im_data['data'])
# Fix some strange bug that causes
if im.mode == 'RGB':
im = im.convert('RGBA')
qimage = toqimage(im)
return QPixmap.fromImage(qimage)
|
mit
|
DavidPurcell/murano_temp
|
murano/tests/unit/policy/test_congress_rules.py
|
1
|
10637
|
# Copyright (c) 2014 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import os.path
import unittest2 as unittest
import yaml
from murano.common import uuidutils
from murano.dsl import helpers
from murano.dsl import package_loader
import murano.policy.congress_rules as congress
TENANT_ID = 'de305d5475b4431badb2eb6b9e546013'
class MockPackageLoader(package_loader.MuranoPackageLoader):
def __init__(self, rules):
"""Create rules like this: ['child->parent', 'child->parent2']."""
self._classes = {}
rules_dict = {}
for rule in rules:
split = rule.split('->')
rules_dict.setdefault(split[0], []).append(split[1])
classes = (self.get_class(cls, rules_dict) for cls in rules_dict)
self._package = MockPackage(classes)
def get_class(self, name, rules_dict):
if name in self._classes:
return self._classes[name]
parents = [self.get_class(parent, rules_dict)
for parent in rules_dict.get(name, [])]
result = MockClass({'name': name, 'declared_parents': parents})
self._classes[name] = result
return result
def register_package(self, package):
pass
def load_class_package(self, class_name, version_spec):
return self._package
def load_package(self, package_name, version_spec):
return self._package
def export_fixation_table(self):
pass
def import_fixation_table(self, fixations):
pass
def compact_fixation_table(self):
pass
class MockPackage(object):
def __init__(self, classes):
self._classes = {}
for cls in classes:
self._classes[cls.name] = cls
@property
def classes(self):
return self._classes.keys()
def find_class(self, name, *args, **kwargs):
return self._classes.get(name)
class MockClass(object):
def __init__(self, entries):
self.__dict__.update(entries)
def ancestors(self):
return helpers.traverse(self, lambda t: t.declared_parents)
class TestCongressRules(unittest.TestCase):
def _load_file(self, file_name):
model_file = os.path.join(
os.path.dirname(inspect.getfile(self.__class__)), file_name)
with open(model_file) as stream:
return yaml.safe_load(stream)
def _create_rules_str(self, model_file, package_loader=None):
model = self._load_file(model_file)
congress_rules = congress.CongressRulesManager()
rules = congress_rules.convert(model, package_loader,
tenant_id=TENANT_ID)
rules_str = ", \n".join(map(str, rules))
return rules_str
def test_transitive_closure(self):
closure = congress.CongressRulesManager.transitive_closure(
[(1, 2), (2, 3), (3, 4)])
self.assertIn((1, 4), closure)
self.assertIn((2, 4), closure)
def test_empty_model(self):
congress_rules = congress.CongressRulesManager()
rules = congress_rules.convert(None)
self.assertEqual(0, len(rules))
def test_convert_simple_app(self):
rules_str = self._create_and_check_rules_str('model')
self.assertNotIn("instance.", rules_str)
def test_convert_model_two_instances(self):
rules_str = self._create_and_check_rules_str('model_two_instances')
self.assertNotIn("\"instances\"", rules_str)
def test_convert_model_with_relations(self):
rules_str = self._create_rules_str('model_with_relations.yaml')
self.assertNotIn(
'murano:properties+("50fa68ff-cd9a-4845-b573-2c80879d158d", '
'"server", "8ce94f23-f16a-40a1-9d9d-a877266c315d")', rules_str)
self.assertIn(
'murano:relationships+("50fa68ff-cd9a-4845-b573-2c80879d158d", '
'"8ce94f23-f16a-40a1-9d9d-a877266c315d", "server")', rules_str)
self.assertIn(
'murano:relationships+("0aafd67e-72e9-4ae0-bb62-fe724f77df2a", '
'"ed8df2b0-ddd2-4009-b3c9-2e7a368f3cb8", "instance")', rules_str)
def test_convert_model_transitive_relationships(self):
rules_str = self._create_rules_str('model_with_relations.yaml')
self.assertIn(
'murano:connected+("50fa68ff-cd9a-4845-b573-2c80879d158d", '
'"8ce94f23-f16a-40a1-9d9d-a877266c315d")', rules_str)
self.assertIn(
'murano:connected+("8ce94f23-f16a-40a1-9d9d-a877266c315d", '
'"fc6b8c41-166f-4fc9-a640-d82009e0a03d")', rules_str)
def test_convert_model_services_relationship(self):
rules_str = self._create_rules_str('model_with_relations.yaml')
self.assertIn(
'murano:relationships+("3409bdd0590e4c60b70fda5e6777ff96", '
'"8ce94f23-f16a-40a1-9d9d-a877266c315d", "services")', rules_str)
self.assertIn(
'murano:relationships+("3409bdd0590e4c60b70fda5e6777ff96", '
'"50fa68ff-cd9a-4845-b573-2c80879d158d", "services")', rules_str)
def test_convert_model_complex(self):
self._create_and_check_rules_str('model_complex')
def test_convert_renamed_app(self):
self._create_and_check_rules_str('model_renamed')
def test_parent_types(self):
# grand-parent
# / \
# parent1 parent2
# \ /
# io.murano.apps.linux.Git
package_loader = MockPackageLoader([
'io.murano.apps.linux.Git->parent1',
'io.murano.apps.linux.Git->parent2',
'parent1->grand-parent',
'parent2->grand-parent'
])
rules_str = self._create_rules_str('model.yaml', package_loader)
self.assertIn(
'murano:parent_types+("0c810278-7282-4e4a-9d69-7b4c36b6ce6f",'
' "parent1")', rules_str)
self.assertIn(
'murano:parent_types+("0c810278-7282-4e4a-9d69-7b4c36b6ce6f",'
' "parent2")', rules_str)
self.assertIn(
'murano:parent_types+("0c810278-7282-4e4a-9d69-7b4c36b6ce6f",'
' "grand-parent")', rules_str)
self.assertIn(
'murano:parent_types+("0c810278-7282-4e4a-9d69-7b4c36b6ce6f",'
' "io.murano.apps.linux.Git")', rules_str)
def test_to_dictionary(self):
"""test to_dictionary
If model contains object entry (not dict)
we try to convert to dict using 'to_dictionary' method.
"""
class Struct(object):
def __init__(self, d):
self.__dict__ = d
def to_dictionary(self):
return self.__dict__
def __getitem__(self, item):
return self.__dict__[item]
d = {'?': {'id': '1', 'type': 't1'},
'apps': [Struct({'?': {'id': '2', 'type': 't2'},
'instances': [Struct(
{'?': {'id': '3', 'type': 't3'}})]})]
}
model = Struct(d)
congress_rules = congress.CongressRulesManager()
tenant_id = uuidutils.generate_uuid()
rules = congress_rules.convert(model, tenant_id=tenant_id)
rules_str = ", \n".join(map(str, rules))
self.assertIn('murano:objects+("1", "{0}", "t1")'.format(tenant_id),
rules_str)
self.assertIn('murano:objects+("2", "1", "t2")', rules_str)
self.assertIn('murano:objects+("3", "2", "t3")', rules_str)
def test_environment_owner(self):
model = self._load_file("model.yaml")
congress_rules = congress.CongressRulesManager()
rules = congress_rules.convert(model, tenant_id='tenant1')
rules_str = ", \n".join(map(str, rules))
self.assertIn('murano:objects+("c86104748a0c4907b4c5981e6d3bce9f", '
'"tenant1", "io.murano.Environment")', rules_str)
def test_wordpress(self):
package_loader = MockPackageLoader([
'io.murano.Environment->io.murano.Object',
'io.murano.resources.NeutronNetwork->io.murano.resources.Network',
'io.murano.resources.Network->io.murano.Object',
'io.murano.databases.MySql->io.murano.databases.SqlDatabase',
'io.murano.databases.MySql->io.murano.Application',
'io.murano.databases.SqlDatabase->io.murano.Object',
'io.murano.resources.LinuxInstance->io.murano.resources.Instance',
'io.murano.resources.Instance->io.murano.Object',
'io.murano.Application->io.murano.Object',
'io.murano.apps.apache.ApacheHttpServer->io.murano.Application',
'io.murano.apps.ZabbixServer->io.murano.Application',
'io.murano.apps.ZabbixAgent->io.murano.Application',
'io.murano.apps.WordPress->io.murano.Application',
'io.murano.resources.LinuxMuranoInstance->'
'io.murano.resources.LinuxInstance'
])
self._create_and_check_rules_str('wordpress', package_loader)
def _create_and_check_rules_str(self, model_name, package_loader=None):
rules_str = self._create_rules_str(
'{0}.yaml'.format(model_name), package_loader)
self._check_expected_rules(rules_str,
'expected_rules_{0}.txt'.format(model_name))
return rules_str
def _check_expected_rules(self, rules_str, expected_rules_file_name):
expected_rules_file = os.path.join(
os.path.dirname(inspect.getfile(self.__class__)),
expected_rules_file_name)
s = ''
with open(expected_rules_file) as f:
for line in f:
line = line.rstrip('\n')
if line not in rules_str:
s += 'Expected rule not found:\n\t' + line + '\n'
if len(s) > 0:
self.fail(s)
def test_state_rule(self):
rules_str = self._create_rules_str('model.yaml')
self.assertIn(
'murano:states+("c86104748a0c4907b4c5981e6d3bce9f", "pending")',
rules_str)
|
apache-2.0
|
xen0l/ansible
|
lib/ansible/modules/remote_management/oneview/oneview_san_manager.py
|
146
|
7717
|
#!/usr/bin/python
# Copyright (c) 2016-2017 Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: oneview_san_manager
short_description: Manage OneView SAN Manager resources
description:
- Provides an interface to manage SAN Manager resources. Can create, update, or delete.
version_added: "2.4"
requirements:
- hpOneView >= 3.1.1
author:
- Felipe Bulsoni (@fgbulsoni)
- Thiago Miotto (@tmiotto)
- Adriane Cardozo (@adriane-cardozo)
options:
state:
description:
- Indicates the desired state for the Uplink Set resource.
- C(present) ensures data properties are compliant with OneView.
- C(absent) removes the resource from OneView, if it exists.
- C(connection_information_set) updates the connection information for the SAN Manager. This operation is non-idempotent.
default: present
choices: [present, absent, connection_information_set]
data:
description:
- List with SAN Manager properties.
required: true
extends_documentation_fragment:
- oneview
- oneview.validateetag
'''
EXAMPLES = '''
- name: Creates a Device Manager for the Brocade SAN provider with the given hostname and credentials
oneview_san_manager:
config: /etc/oneview/oneview_config.json
state: present
data:
providerDisplayName: Brocade Network Advisor
connectionInfo:
- name: Host
value: 172.18.15.1
- name: Port
value: 5989
- name: Username
value: username
- name: Password
value: password
- name: UseSsl
value: true
delegate_to: localhost
- name: Ensure a Device Manager for the Cisco SAN Provider is present
oneview_san_manager:
config: /etc/oneview/oneview_config.json
state: present
data:
name: 172.18.20.1
providerDisplayName: Cisco
connectionInfo:
- name: Host
value: 172.18.20.1
- name: SnmpPort
value: 161
- name: SnmpUserName
value: admin
- name: SnmpAuthLevel
value: authnopriv
- name: SnmpAuthProtocol
value: sha
- name: SnmpAuthString
value: password
delegate_to: localhost
- name: Sets the SAN Manager connection information
oneview_san_manager:
config: /etc/oneview/oneview_config.json
state: connection_information_set
data:
connectionInfo:
- name: Host
value: '172.18.15.1'
- name: Port
value: '5989'
- name: Username
value: 'username'
- name: Password
value: 'password'
- name: UseSsl
value: true
delegate_to: localhost
- name: Refreshes the SAN Manager
oneview_san_manager:
config: /etc/oneview/oneview_config.json
state: present
data:
name: 172.18.15.1
refreshState: RefreshPending
delegate_to: localhost
- name: Delete the SAN Manager recently created
oneview_san_manager:
config: /etc/oneview/oneview_config.json
state: absent
data:
name: '172.18.15.1'
delegate_to: localhost
'''
RETURN = '''
san_manager:
description: Has the OneView facts about the SAN Manager.
returned: On state 'present'. Can be null.
type: dict
'''
from ansible.module_utils.oneview import OneViewModuleBase, OneViewModuleValueError
class SanManagerModule(OneViewModuleBase):
MSG_CREATED = 'SAN Manager created successfully.'
MSG_UPDATED = 'SAN Manager updated successfully.'
MSG_DELETED = 'SAN Manager deleted successfully.'
MSG_ALREADY_PRESENT = 'SAN Manager is already present.'
MSG_ALREADY_ABSENT = 'SAN Manager is already absent.'
MSG_SAN_MANAGER_PROVIDER_DISPLAY_NAME_NOT_FOUND = "The provider '{0}' was not found."
argument_spec = dict(
state=dict(type='str', default='present', choices=['absent', 'present', 'connection_information_set']),
data=dict(type='dict', required=True)
)
def __init__(self):
super(SanManagerModule, self).__init__(additional_arg_spec=self.argument_spec, validate_etag_support=True)
self.resource_client = self.oneview_client.san_managers
def execute_module(self):
if self.data.get('connectionInfo'):
for connection_hash in self.data.get('connectionInfo'):
if connection_hash.get('name') == 'Host':
resource_name = connection_hash.get('value')
elif self.data.get('name'):
resource_name = self.data.get('name')
else:
msg = 'A "name" or "connectionInfo" must be provided inside the "data" field for this operation. '
msg += 'If a "connectionInfo" is provided, the "Host" name is considered as the "name" for the resource.'
raise OneViewModuleValueError(msg.format())
resource = self.resource_client.get_by_name(resource_name)
if self.state == 'present':
changed, msg, san_manager = self._present(resource)
return dict(changed=changed, msg=msg, ansible_facts=dict(san_manager=san_manager))
elif self.state == 'absent':
return self.resource_absent(resource, method='remove')
elif self.state == 'connection_information_set':
changed, msg, san_manager = self._connection_information_set(resource)
return dict(changed=changed, msg=msg, ansible_facts=dict(san_manager=san_manager))
def _present(self, resource):
if not resource:
provider_uri = self.data.get('providerUri', self._get_provider_uri_by_display_name(self.data))
return True, self.MSG_CREATED, self.resource_client.add(self.data, provider_uri)
else:
merged_data = resource.copy()
merged_data.update(self.data)
# Remove 'connectionInfo' from comparison, since it is not possible to validate it.
resource.pop('connectionInfo', None)
merged_data.pop('connectionInfo', None)
if self.compare(resource, merged_data):
return False, self.MSG_ALREADY_PRESENT, resource
else:
updated_san_manager = self.resource_client.update(resource=merged_data, id_or_uri=resource['uri'])
return True, self.MSG_UPDATED, updated_san_manager
def _connection_information_set(self, resource):
if not resource:
return self._present(resource)
else:
merged_data = resource.copy()
merged_data.update(self.data)
merged_data.pop('refreshState', None)
if not self.data.get('connectionInfo', None):
raise OneViewModuleValueError('A connectionInfo field is required for this operation.')
updated_san_manager = self.resource_client.update(resource=merged_data, id_or_uri=resource['uri'])
return True, self.MSG_UPDATED, updated_san_manager
def _get_provider_uri_by_display_name(self, data):
display_name = data.get('providerDisplayName')
provider_uri = self.resource_client.get_provider_uri(display_name)
if not provider_uri:
raise OneViewModuleValueError(self.MSG_SAN_MANAGER_PROVIDER_DISPLAY_NAME_NOT_FOUND.format(display_name))
return provider_uri
def main():
SanManagerModule().run()
if __name__ == '__main__':
main()
|
gpl-3.0
|
clovertrail/cloudinit-bis
|
tests/unittests/test_builtin_handlers.py
|
3
|
2366
|
"""Tests of the built-in user data handlers."""
import os
import shutil
import tempfile
try:
from unittest import mock
except ImportError:
import mock
from . import helpers as test_helpers
from cloudinit import handlers
from cloudinit import helpers
from cloudinit import util
from cloudinit.handlers import upstart_job
from cloudinit.settings import (PER_ALWAYS, PER_INSTANCE)
class TestBuiltins(test_helpers.FilesystemMockingTestCase):
def test_upstart_frequency_no_out(self):
c_root = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, c_root)
up_root = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, up_root)
paths = helpers.Paths({
'cloud_dir': c_root,
'upstart_dir': up_root,
})
freq = PER_ALWAYS
h = upstart_job.UpstartJobPartHandler(paths)
# No files should be written out when
# the frequency is ! per-instance
h.handle_part('', handlers.CONTENT_START,
None, None, None)
h.handle_part('blah', 'text/upstart-job',
'test.conf', 'blah', freq)
h.handle_part('', handlers.CONTENT_END,
None, None, None)
self.assertEqual(0, len(os.listdir(up_root)))
def test_upstart_frequency_single(self):
# files should be written out when frequency is ! per-instance
new_root = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, new_root)
freq = PER_INSTANCE
self.patchOS(new_root)
self.patchUtils(new_root)
paths = helpers.Paths({
'upstart_dir': "/etc/upstart",
})
upstart_job.SUITABLE_UPSTART = True
util.ensure_dir("/run")
util.ensure_dir("/etc/upstart")
with mock.patch.object(util, 'subp') as mockobj:
h = upstart_job.UpstartJobPartHandler(paths)
h.handle_part('', handlers.CONTENT_START,
None, None, None)
h.handle_part('blah', 'text/upstart-job',
'test.conf', 'blah', freq)
h.handle_part('', handlers.CONTENT_END,
None, None, None)
self.assertEqual(len(os.listdir('/etc/upstart')), 1)
mockobj.assert_called_once_with(
['initctl', 'reload-configuration'], capture=False)
|
gpl-3.0
|
minhphung171093/OpenERP_V8
|
openerp/addons/stock_dropshipping/__init__.py
|
223
|
1085
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import stock_dropshipping
import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
ralphbean/moksha
|
moksha/apps/docs/moksha/apps/docs.py
|
2
|
1876
|
# This file is part of Moksha.
# Copyright (C) 2008-2010 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:mod:`moksha.apps.docs.docs` - Moksha Documentation & Widget Demo App
=====================================================================
This Moksha app integrates your Sphinx documentation with the ToscaWidgets
WidgetBrowser. This gives you the ability to write your documentation
in reStructuredText, and easily expose it in your application via the
`/apps/docs` URL. Since it integrates the ToscaWidgets WidgetBrowser,
this also gives you the ability to create Widget demos and embed them
in your documentation.
.. seealso::
See the ToscaWidgets WidgetBrowser documentation for more information
http://toscawidgets.org/documentation/WidgetBrowser/widget_demo_howto.html
.. moduleauthor:: Luke Macken <[email protected]>
"""
import os
from tg import config
from tg.controllers import WSGIAppController
from pkg_resources import resource_filename
from moksha.widgetbrowser import WidgetBrowser
os.environ['TW_BROWSER_PREFIX'] = '/apps/docs'
docs = WSGIAppController(
WidgetBrowser(
template_dirs=[
resource_filename('moksha.widgetbrowser','templates')],
docs_dir=config.get('docs_dir', 'docs'),
full_stack=False,
interactive=False))
|
apache-2.0
|
mikeh77/mi-instrument
|
mi/instrument/teledyne/test/test_driver.py
|
2
|
32748
|
"""
@package mi.instrument.teledyne.test.test_driver
@file marine-integrations/mi/instrument/teledyne/test/test_driver.py
@author Roger Unwin
@brief Driver for the teledyne family
Release notes:
"""
__author__ = 'Roger Unwin'
__license__ = 'Apache 2.0'
import time
import unittest
from mi.core.log import get_logger
log = get_logger()
from nose.plugins.attrib import attr
from mi.idk.unit_test import InstrumentDriverUnitTestCase
from mi.idk.unit_test import InstrumentDriverIntegrationTestCase
from mi.idk.unit_test import InstrumentDriverQualificationTestCase
from mi.idk.unit_test import InstrumentDriverPublicationTestCase
from mi.core.exceptions import NotImplementedException
from mi.instrument.teledyne.particles import DataParticleType
from mi.instrument.teledyne.driver import TeledyneProtocolState
from mi.instrument.teledyne.driver import TeledyneProtocolEvent
from mi.instrument.teledyne.driver import TeledyneParameter
DEFAULT_CLOCK_DIFF = 5
###############################################################################
# UNIT TESTS #
# Unit tests test the method calls and parameters using Mock. #
# 1. Pick a single method within the class. #
# 2. Create an instance of the class #
# 3. If the method to be tested tries to call out, over-ride the offending #
# method with a mock #
# 4. Using above, try to cover all paths through the functions #
# 5. Negative testing if at all possible. #
###############################################################################
@attr('UNIT', group='mi')
class TeledyneUnitTest(InstrumentDriverUnitTestCase):
def setUp(self):
InstrumentDriverUnitTestCase.setUp(self)
###############################################################################
# INTEGRATION TESTS #
# Integration test test the direct driver / instrument interaction #
# but making direct calls via zeromq. #
# - Common Integration tests test the driver through the instrument agent #
# and common for all drivers (minimum requirement for ION ingestion) #
###############################################################################
@attr('INT', group='mi')
class TeledyneIntegrationTest(InstrumentDriverIntegrationTestCase):
def setUp(self):
InstrumentDriverIntegrationTestCase.setUp(self)
def _is_time_set(self, time_param, expected_time, time_format="%d %b %Y %H:%M:%S", tolerance=DEFAULT_CLOCK_DIFF):
"""
Verify is what we expect it to be within a given tolerance
@param time_param: driver parameter
@param expected_time: what the time should be in seconds since unix epoch or formatted time string
@param time_format: date time format
@param tolerance: how close to the set time should the get be?
"""
log.debug("Expected time un-formatted: %s", expected_time)
result_time = self.assert_get(time_param)
log.debug("RESULT TIME = " + str(result_time))
log.debug("TIME FORMAT = " + time_format)
result_time_struct = time.strptime(result_time, time_format)
converted_time = time.mktime(result_time_struct)
if isinstance(expected_time, float):
expected_time_struct = time.localtime(expected_time)
else:
expected_time_struct = time.strptime(expected_time, time_format)
log.debug("Current Time: %s, Expected Time: %s", time.strftime("%d %b %y %H:%M:%S", result_time_struct),
time.strftime("%d %b %y %H:%M:%S", expected_time_struct))
log.debug("Current Time: %s, Expected Time: %s, Tolerance: %s",
converted_time, time.mktime(expected_time_struct), tolerance)
# Verify the clock is set within the tolerance
return abs(converted_time - time.mktime(expected_time_struct)) <= tolerance
###
# Test scheduled events
###
def assert_compass_calibration(self):
"""
Verify a calibration particle was generated
"""
raise NotImplementedException()
def assert_acquire_status(self):
"""
Verify a status particle was generated
"""
raise NotImplementedException()
def assert_clock_sync(self):
"""
Verify the clock is set to at least the current date
"""
dt = self.assert_get(TeledyneParameter.TIME)
lt = time.strftime("%Y/%m/%d,%H:%M:%S", time.gmtime(time.mktime(time.localtime())))
self.assertTrue(lt[:13].upper() in dt.upper())
def assert_acquire_status(self):
"""
Assert that Acquire_status return the following ASYNC particles
"""
self.assert_async_particle_generation(DataParticleType.ADCP_COMPASS_CALIBRATION, self.assert_calibration,
timeout=60)
self.assert_async_particle_generation(DataParticleType.ADCP_ANCILLARY_SYSTEM_DATA, self.assert_ancillary_data,
timeout=60)
self.assert_async_particle_generation(DataParticleType.ADCP_TRANSMIT_PATH, self.assert_transmit_data,
timeout=60)
def assert_transmit_data(self, data_particle, verify_values=True):
"""
Verify an adcpt ps0 data particle
@param data_particle: ADCP_PS0DataParticle data particle
@param verify_values: bool, should we verify parameter values
"""
self.assert_data_particle_header(data_particle, DataParticleType.ADCP_TRANSMIT_PATH)
def assert_ancillary_data(self, data_particle, verify_values=True):
"""
Verify an adcp ps0 data particle
@param data_particle: ADCP_PS0DataParticle data particle
@param verify_values: bool, should we verify parameter values
"""
self.assert_data_particle_header(data_particle, DataParticleType.ADCP_ANCILLARY_SYSTEM_DATA)
def assert_calibration(self, data_particle, verify_values=True):
self.assert_data_particle_header(data_particle, DataParticleType.ADCP_COMPASS_CALIBRATION)
def test_scheduled_interval_clock_sync_command(self):
"""
Verify the scheduled clock sync is triggered and functions as expected
"""
self.assert_initialize_driver()
self.assert_set(TeledyneParameter.CLOCK_SYNCH_INTERVAL, '00:00:04')
time.sleep(10)
self.assert_set(TeledyneParameter.CLOCK_SYNCH_INTERVAL, '00:00:00')
self.assert_current_state(TeledyneProtocolState.COMMAND)
def test_scheduled_interval_acquire_status_command(self):
"""
Verify the scheduled clock sync is triggered and functions as expected
"""
self.assert_initialize_driver()
self.assert_set(TeledyneParameter.GET_STATUS_INTERVAL, '00:00:04')
time.sleep(10)
self.assert_acquire_status()
self.assert_set(TeledyneParameter.GET_STATUS_INTERVAL, '00:00:00')
self.assert_current_state(TeledyneProtocolState.COMMAND)
failed = False
try:
self.assert_acquire_status()
failed = True
except AssertionError:
pass
self.assertFalse(failed)
@unittest.skip('It takes many hours for this test')
def test_scheduled_acquire_status_autosample(self):
"""
Verify the scheduled acquire status is triggered and functions as expected
"""
self.assert_initialize_driver()
self.assert_current_state(TeledyneProtocolState.COMMAND)
self.assert_set(TeledyneParameter.GET_STATUS_INTERVAL, '00:00:04')
self.assert_driver_command(TeledyneProtocolEvent.START_AUTOSAMPLE)
self.assert_current_state(TeledyneProtocolState.AUTOSAMPLE)
time.sleep(10)
self.assert_acquire_status()
self.assert_driver_command(TeledyneProtocolEvent.STOP_AUTOSAMPLE)
self.assert_current_state(TeledyneProtocolState.COMMAND)
self.assert_set(TeledyneParameter.GET_STATUS_INTERVAL, '00:00:00')
self.assert_current_state(TeledyneProtocolState.COMMAND)
@unittest.skip('It takes many hours for this test')
def test_scheduled_clock_sync_autosample(self):
"""
Verify the scheduled clock sync is triggered and functions as expected
"""
self.assert_initialize_driver()
self.assert_current_state(TeledyneProtocolState.COMMAND)
self.assert_set(TeledyneParameter.CLOCK_SYNCH_INTERVAL, '00:00:04')
self.assert_driver_command(TeledyneProtocolEvent.START_AUTOSAMPLE)
self.assert_current_state(TeledyneProtocolState.AUTOSAMPLE)
time.sleep(10)
self.assert_driver_command(TeledyneProtocolEvent.STOP_AUTOSAMPLE)
self.assert_current_state(TeledyneProtocolState.COMMAND)
self.assert_set(TeledyneParameter.CLOCK_SYNCH_INTERVAL, '00:00:00')
self.assert_current_state(TeledyneProtocolState.COMMAND)
@unittest.skip('It takes time')
def test_acquire_status(self):
"""
Verify the acquire_status command is functional
"""
self.assert_initialize_driver()
self.assert_driver_command(TeledyneProtocolEvent.ACQUIRE_STATUS)
self.assert_acquire_status()
# This will be called by test_set_range()
def _tst_set_xmit_power(self):
###
# test get set of a variety of parameter ranges
###
# XMIT_POWER: -- Int 0-255
self.assert_set(TeledyneParameter.XMIT_POWER, 0)
self.assert_set(TeledyneParameter.XMIT_POWER, 128)
self.assert_set(TeledyneParameter.XMIT_POWER, 254)
self.assert_set_exception(TeledyneParameter.XMIT_POWER, "LEROY JENKINS")
self.assert_set_exception(TeledyneParameter.XMIT_POWER, 256)
self.assert_set_exception(TeledyneParameter.XMIT_POWER, -1)
self.assert_set_exception(TeledyneParameter.XMIT_POWER, 3.1415926)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.XMIT_POWER, self._driver_parameters[TeledyneParameter.XMIT_POWER][self.VALUE])
# This will be called by test_set_range()
def _tst_set_speed_of_sound(self):
###
# test get set of a variety of parameter ranges
###
# SPEED_OF_SOUND: -- Int 1485 (1400 - 1600)
self.assert_set(TeledyneParameter.SPEED_OF_SOUND, 1400)
self.assert_set(TeledyneParameter.SPEED_OF_SOUND, 1450)
self.assert_set(TeledyneParameter.SPEED_OF_SOUND, 1500)
self.assert_set(TeledyneParameter.SPEED_OF_SOUND, 1550)
self.assert_set(TeledyneParameter.SPEED_OF_SOUND, 1600)
self.assert_set_exception(TeledyneParameter.SPEED_OF_SOUND, 0)
self.assert_set_exception(TeledyneParameter.SPEED_OF_SOUND, 1399)
self.assert_set_exception(TeledyneParameter.SPEED_OF_SOUND, 1601)
self.assert_set_exception(TeledyneParameter.SPEED_OF_SOUND, "LEROY JENKINS")
self.assert_set_exception(TeledyneParameter.SPEED_OF_SOUND, -256)
self.assert_set_exception(TeledyneParameter.SPEED_OF_SOUND, -1)
self.assert_set_exception(TeledyneParameter.SPEED_OF_SOUND, 3.1415926)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.SPEED_OF_SOUND,
self._driver_parameters[TeledyneParameter.SPEED_OF_SOUND][self.VALUE])
# This will be called by test_set_range()
def _tst_set_salinity(self):
###
# test get set of a variety of parameter ranges
###
# SALINITY: -- Int (0 - 40)
self.assert_set(TeledyneParameter.SALINITY, 1)
self.assert_set(TeledyneParameter.SALINITY, 10)
self.assert_set(TeledyneParameter.SALINITY, 20)
self.assert_set(TeledyneParameter.SALINITY, 30)
self.assert_set(TeledyneParameter.SALINITY, 40)
self.assert_set_exception(TeledyneParameter.SALINITY, "LEROY JENKINS")
# AssertionError: Unexpected exception: ES no value match (40 != -1)
self.assert_set_exception(TeledyneParameter.SALINITY, -1)
# AssertionError: Unexpected exception: ES no value match (35 != 41)
self.assert_set_exception(TeledyneParameter.SALINITY, 41)
self.assert_set_exception(TeledyneParameter.SALINITY, 3.1415926)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.SALINITY, self._driver_parameters[TeledyneParameter.SALINITY][self.VALUE])
# This will be called by test_set_range()
def _tst_set_sensor_source(self):
###
# test get set of a variety of parameter ranges
###
# SENSOR_SOURCE: -- (0/1) for 7 positions.
# note it lacks capability to have a 1 in the #6 position
self.assert_set(TeledyneParameter.SENSOR_SOURCE, "0000000")
self.assert_set(TeledyneParameter.SENSOR_SOURCE, "1111101")
self.assert_set(TeledyneParameter.SENSOR_SOURCE, "1010101")
self.assert_set(TeledyneParameter.SENSOR_SOURCE, "0101000")
self.assert_set(TeledyneParameter.SENSOR_SOURCE, "1100100")
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.SENSOR_SOURCE, "1111101")
self.assert_set_exception(TeledyneParameter.SENSOR_SOURCE, "LEROY JENKINS")
self.assert_set_exception(TeledyneParameter.SENSOR_SOURCE, 2)
self.assert_set_exception(TeledyneParameter.SENSOR_SOURCE, -1)
self.assert_set_exception(TeledyneParameter.SENSOR_SOURCE, "1111112")
self.assert_set_exception(TeledyneParameter.SENSOR_SOURCE, "11111112")
self.assert_set_exception(TeledyneParameter.SENSOR_SOURCE, 3.1415926)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.SENSOR_SOURCE,
self._driver_parameters[TeledyneParameter.SENSOR_SOURCE][self.VALUE])
# This will be called by test_set_range()
def _tst_set_time_per_ensemble(self):
###
# test get set of a variety of parameter ranges
###
# TIME_PER_ENSEMBLE: -- String 01:00:00.00 (hrs:min:sec.sec/100)
self.assert_set(TeledyneParameter.TIME_PER_ENSEMBLE, "00:00:00.00")
self.assert_set(TeledyneParameter.TIME_PER_ENSEMBLE, "00:00:01.00")
self.assert_set(TeledyneParameter.TIME_PER_ENSEMBLE, "00:01:00.00")
self.assert_set_exception(TeledyneParameter.TIME_PER_ENSEMBLE, '30:30:30.30')
self.assert_set_exception(TeledyneParameter.TIME_PER_ENSEMBLE, '59:59:59.99')
self.assert_set_exception(TeledyneParameter.TIME_PER_ENSEMBLE, "LEROY JENKINS")
self.assert_set_exception(TeledyneParameter.TIME_PER_ENSEMBLE, 2)
self.assert_set_exception(TeledyneParameter.TIME_PER_ENSEMBLE, -1)
self.assert_set_exception(TeledyneParameter.TIME_PER_ENSEMBLE, '99:99:99.99')
self.assert_set_exception(TeledyneParameter.TIME_PER_ENSEMBLE, '-1:-1:-1.+1')
self.assert_set_exception(TeledyneParameter.TIME_PER_ENSEMBLE, 3.1415926)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.TIME_PER_ENSEMBLE,
self._driver_parameters[TeledyneParameter.TIME_PER_ENSEMBLE][self.VALUE])
# This will be called by test_set_range()
def _tst_set_pitch(self):
###
# test get set of a variety of parameter ranges
###
# PITCH: -- Int -6000 to 6000
self.assert_set(TeledyneParameter.PITCH, -6000)
self.assert_set(TeledyneParameter.PITCH, -4000)
self.assert_set(TeledyneParameter.PITCH, -2000)
self.assert_set(TeledyneParameter.PITCH, -1)
self.assert_set(TeledyneParameter.PITCH, 0)
self.assert_set(TeledyneParameter.PITCH, 1)
self.assert_set(TeledyneParameter.PITCH, 2000)
self.assert_set(TeledyneParameter.PITCH, 4000)
self.assert_set(TeledyneParameter.PITCH, 6000)
self.assert_set_exception(TeledyneParameter.PITCH, "LEROY JENKINS")
self.assert_set_exception(TeledyneParameter.PITCH, -6001)
self.assert_set_exception(TeledyneParameter.PITCH, 6001)
self.assert_set_exception(TeledyneParameter.PITCH, 3.1415926)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.PITCH, self._driver_parameters[TeledyneParameter.PITCH][self.VALUE])
# This will be called by test_set_range()
def _tst_set_roll(self):
###
# test get set of a variety of parameter ranges
###
# ROLL: -- Int -6000 to 6000
self.assert_set(TeledyneParameter.ROLL, -6000)
self.assert_set(TeledyneParameter.ROLL, -4000)
self.assert_set(TeledyneParameter.ROLL, -2000)
self.assert_set(TeledyneParameter.ROLL, -1)
self.assert_set(TeledyneParameter.ROLL, 0)
self.assert_set(TeledyneParameter.ROLL, 1)
self.assert_set(TeledyneParameter.ROLL, 2000)
self.assert_set(TeledyneParameter.ROLL, 4000)
self.assert_set(TeledyneParameter.ROLL, 6000)
self.assert_set_exception(TeledyneParameter.ROLL, "LEROY JENKINS")
self.assert_set_exception(TeledyneParameter.ROLL, -6001)
self.assert_set_exception(TeledyneParameter.ROLL, 6001)
self.assert_set_exception(TeledyneParameter.ROLL, 3.1415926)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.ROLL, self._driver_parameters[TeledyneParameter.ROLL][self.VALUE])
# This will be called by test_set_range()
def _tst_set_time_per_ping(self):
###
# test get set of a variety of parameter ranges
###
# TIME_PER_PING: '00:01.00'
self.assert_set(TeledyneParameter.TIME_PER_PING, '01:00.00')
self.assert_set(TeledyneParameter.TIME_PER_PING, '59:59.99')
self.assert_set(TeledyneParameter.TIME_PER_PING, '30:30.30')
self.assert_set_exception(TeledyneParameter.TIME_PER_PING, "LEROY JENKINS")
self.assert_set_exception(TeledyneParameter.TIME_PER_PING, 2)
self.assert_set_exception(TeledyneParameter.TIME_PER_PING, -1)
self.assert_set_exception(TeledyneParameter.TIME_PER_PING, '99:99.99')
self.assert_set_exception(TeledyneParameter.TIME_PER_PING, '-1:-1.+1')
self.assert_set_exception(TeledyneParameter.TIME_PER_PING, 3.1415926)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.TIME_PER_PING,
self._driver_parameters[TeledyneParameter.TIME_PER_PING][self.VALUE])
# This will be called by test_set_range()
def _tst_set_false_target_threshold(self):
###
# test get set of a variety of parameter ranges
###
# FALSE_TARGET_THRESHOLD: string of 0-255,0-255
self.assert_set(TeledyneParameter.FALSE_TARGET_THRESHOLD, "000,000")
self.assert_set(TeledyneParameter.FALSE_TARGET_THRESHOLD, "255,000")
self.assert_set(TeledyneParameter.FALSE_TARGET_THRESHOLD, "000,255")
self.assert_set(TeledyneParameter.FALSE_TARGET_THRESHOLD, "255,255")
self.assert_set_exception(TeledyneParameter.FALSE_TARGET_THRESHOLD, "256,000")
self.assert_set_exception(TeledyneParameter.FALSE_TARGET_THRESHOLD, "256,255")
self.assert_set_exception(TeledyneParameter.FALSE_TARGET_THRESHOLD, "000,256")
self.assert_set_exception(TeledyneParameter.FALSE_TARGET_THRESHOLD, "255,256")
self.assert_set_exception(TeledyneParameter.FALSE_TARGET_THRESHOLD, -1)
self.assert_set_exception(TeledyneParameter.FALSE_TARGET_THRESHOLD, "LEROY JENKINS")
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.FALSE_TARGET_THRESHOLD,
self._driver_parameters[TeledyneParameter.FALSE_TARGET_THRESHOLD][self.VALUE])
# This will be called by test_set_range()
def _tst_set_bandwidth_control(self):
###
# test get set of a variety of parameter ranges
###
# BANDWIDTH_CONTROL: 0/1,
self.assert_set(TeledyneParameter.BANDWIDTH_CONTROL, 1)
self.assert_set_exception(TeledyneParameter.BANDWIDTH_CONTROL, -1)
self.assert_set_exception(TeledyneParameter.BANDWIDTH_CONTROL, 2)
self.assert_set_exception(TeledyneParameter.BANDWIDTH_CONTROL, "LEROY JENKINS")
self.assert_set_exception(TeledyneParameter.BANDWIDTH_CONTROL, 3.1415926)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.BANDWIDTH_CONTROL,
self._driver_parameters[TeledyneParameter.BANDWIDTH_CONTROL][self.VALUE])
# This will be called by test_set_range()
def _tst_set_correlation_threshold(self):
###
# test get set of a variety of parameter ranges
###
# CORRELATION_THRESHOLD: int 064, 0 - 255
self.assert_set(TeledyneParameter.CORRELATION_THRESHOLD, 50)
self.assert_set(TeledyneParameter.CORRELATION_THRESHOLD, 100)
self.assert_set(TeledyneParameter.CORRELATION_THRESHOLD, 150)
self.assert_set(TeledyneParameter.CORRELATION_THRESHOLD, 200)
self.assert_set(TeledyneParameter.CORRELATION_THRESHOLD, 255)
self.assert_set_exception(TeledyneParameter.CORRELATION_THRESHOLD, "LEROY JENKINS")
self.assert_set_exception(TeledyneParameter.CORRELATION_THRESHOLD, -256)
self.assert_set_exception(TeledyneParameter.CORRELATION_THRESHOLD, -1)
self.assert_set_exception(TeledyneParameter.CORRELATION_THRESHOLD, 3.1415926)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.CORRELATION_THRESHOLD,
self._driver_parameters[TeledyneParameter.CORRELATION_THRESHOLD][self.VALUE])
# This will be called by test_set_range()
def _tst_set_error_velocity_threshold(self):
###
# test get set of a variety of parameter ranges
###
# ERROR_VELOCITY_THRESHOLD: int (0-5000 mm/s) NOTE it enforces 0-9999
# decimals are truncated to ints
self.assert_set(TeledyneParameter.ERROR_VELOCITY_THRESHOLD, 0)
self.assert_set(TeledyneParameter.ERROR_VELOCITY_THRESHOLD, 128)
self.assert_set(TeledyneParameter.ERROR_VELOCITY_THRESHOLD, 2000)
self.assert_set(TeledyneParameter.ERROR_VELOCITY_THRESHOLD, 5000)
self.assert_set_exception(TeledyneParameter.ERROR_VELOCITY_THRESHOLD, "LEROY JENKINS")
self.assert_set_exception(TeledyneParameter.ERROR_VELOCITY_THRESHOLD, -1)
self.assert_set_exception(TeledyneParameter.ERROR_VELOCITY_THRESHOLD, 10000)
self.assert_set_exception(TeledyneParameter.ERROR_VELOCITY_THRESHOLD, -3.1415926)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.ERROR_VELOCITY_THRESHOLD,
self._driver_parameters[TeledyneParameter.ERROR_VELOCITY_THRESHOLD][self.VALUE])
# This will be called by test_set_range()
def _tst_set_blank_after_transmit(self):
###
# test get set of a variety of parameter ranges
###
# BLANK_AFTER_TRANSMIT: int 704, (0 - 9999)
self.assert_set(TeledyneParameter.BLANK_AFTER_TRANSMIT, 0)
self.assert_set(TeledyneParameter.BLANK_AFTER_TRANSMIT, 128)
self.assert_set(TeledyneParameter.BLANK_AFTER_TRANSMIT, 9999)
self.assert_set_exception(TeledyneParameter.BLANK_AFTER_TRANSMIT, "LEROY JENKINS")
self.assert_set_exception(TeledyneParameter.BLANK_AFTER_TRANSMIT, -1)
self.assert_set_exception(TeledyneParameter.BLANK_AFTER_TRANSMIT, 10000)
self.assert_set_exception(TeledyneParameter.BLANK_AFTER_TRANSMIT, -3.1415926)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.BLANK_AFTER_TRANSMIT,
self._driver_parameters[TeledyneParameter.BLANK_AFTER_TRANSMIT][self.VALUE])
# This will be called by test_set_range()
def _tst_set_clip_data_past_bottom(self):
###
# test get set of a variety of parameter ranges
###
# CLIP_DATA_PAST_BOTTOM: True/False,
self.assert_set(TeledyneParameter.CLIP_DATA_PAST_BOTTOM, True)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.CLIP_DATA_PAST_BOTTOM,
self._driver_parameters[TeledyneParameter.CLIP_DATA_PAST_BOTTOM][self.VALUE])
# This will be called by test_set_range()
def _tst_set_receiver_gain_select(self):
###
# test get set of a variety of parameter ranges
###
# RECEIVER_GAIN_SELECT: (0/1),
self.assert_set(TeledyneParameter.RECEIVER_GAIN_SELECT, 0)
self.assert_set(TeledyneParameter.RECEIVER_GAIN_SELECT, 1)
self.assert_set_exception(TeledyneParameter.RECEIVER_GAIN_SELECT, 2)
self.assert_set_exception(TeledyneParameter.RECEIVER_GAIN_SELECT, -1)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.RECEIVER_GAIN_SELECT,
self._driver_parameters[TeledyneParameter.RECEIVER_GAIN_SELECT][self.VALUE])
# This will be called by test_set_range()
def _tst_set_number_of_depth_cells(self):
###
# test get set of a variety of parameter ranges
###
# NUMBER_OF_DEPTH_CELLS: -- int (1-255) 100,
self.assert_set(TeledyneParameter.NUMBER_OF_DEPTH_CELLS, 1)
self.assert_set(TeledyneParameter.NUMBER_OF_DEPTH_CELLS, 128)
self.assert_set_exception(TeledyneParameter.NUMBER_OF_DEPTH_CELLS, 256)
self.assert_set_exception(TeledyneParameter.NUMBER_OF_DEPTH_CELLS, 0)
self.assert_set_exception(TeledyneParameter.NUMBER_OF_DEPTH_CELLS, -1)
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.NUMBER_OF_DEPTH_CELLS,
self._driver_parameters[TeledyneParameter.NUMBER_OF_DEPTH_CELLS][self.VALUE])
# This will be called by test_set_range()
def _tst_set_pings_per_ensemble(self):
###
# test get set of a variety of parameter ranges
###
# PINGS_PER_ENSEMBLE: -- int (0-16384) 1,
self.assert_set(TeledyneParameter.PINGS_PER_ENSEMBLE, 0)
self.assert_set(TeledyneParameter.PINGS_PER_ENSEMBLE, 16384)
self.assert_set_exception(TeledyneParameter.PINGS_PER_ENSEMBLE, 16385)
self.assert_set_exception(TeledyneParameter.PINGS_PER_ENSEMBLE, -1)
self.assert_set_exception(TeledyneParameter.PINGS_PER_ENSEMBLE, 32767)
self.assert_set_exception(TeledyneParameter.PINGS_PER_ENSEMBLE, 3.1415926)
self.assert_set_exception(TeledyneParameter.PINGS_PER_ENSEMBLE, "LEROY JENKINS")
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.PINGS_PER_ENSEMBLE,
self._driver_parameters[TeledyneParameter.PINGS_PER_ENSEMBLE][self.VALUE])
# This will be called by test_set_range()
def _tst_set_depth_cell_size(self):
###
# test get set of a variety of parameter ranges
###
# DEPTH_CELL_SIZE: int 80 - 3200
self.assert_set(TeledyneParameter.DEPTH_CELL_SIZE, 80)
self.assert_set_exception(TeledyneParameter.DEPTH_CELL_SIZE, 3201)
self.assert_set_exception(TeledyneParameter.DEPTH_CELL_SIZE, -1)
self.assert_set_exception(TeledyneParameter.DEPTH_CELL_SIZE, 2)
self.assert_set_exception(TeledyneParameter.DEPTH_CELL_SIZE, 3.1415926)
self.assert_set_exception(TeledyneParameter.DEPTH_CELL_SIZE, "LEROY JENKINS")
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.DEPTH_CELL_SIZE,
self._driver_parameters[TeledyneParameter.DEPTH_CELL_SIZE][self.VALUE])
# This will be called by test_set_range()
def _tst_set_transmit_length(self):
###
# test get set of a variety of parameter ranges
###
# TRANSMIT_LENGTH: int 0 to 3200
self.assert_set(TeledyneParameter.TRANSMIT_LENGTH, 80)
self.assert_set(TeledyneParameter.TRANSMIT_LENGTH, 3200)
self.assert_set_exception(TeledyneParameter.TRANSMIT_LENGTH, 3201)
self.assert_set_exception(TeledyneParameter.TRANSMIT_LENGTH, -1)
self.assert_set_exception(TeledyneParameter.TRANSMIT_LENGTH, 3.1415926)
self.assert_set_exception(TeledyneParameter.TRANSMIT_LENGTH, "LEROY JENKINS")
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.TRANSMIT_LENGTH,
self._driver_parameters[TeledyneParameter.TRANSMIT_LENGTH][self.VALUE])
# This will be called by test_set_range()
def _tst_set_ping_weight(self):
###
# test get set of a variety of parameter ranges
###
# PING_WEIGHT: (0/1),
self.assert_set(TeledyneParameter.PING_WEIGHT, 0)
self.assert_set(TeledyneParameter.PING_WEIGHT, 1)
self.assert_set_exception(TeledyneParameter.PING_WEIGHT, 2)
self.assert_set_exception(TeledyneParameter.PING_WEIGHT, -1)
self.assert_set_exception(TeledyneParameter.PING_WEIGHT, 3.1415926)
self.assert_set_exception(TeledyneParameter.PING_WEIGHT, "LEROY JENKINS")
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.PING_WEIGHT,
self._driver_parameters[TeledyneParameter.PING_WEIGHT][self.VALUE])
# This will be called by test_set_range()
def _tst_set_ambiguity_velocity(self):
###
# test get set of a variety of parameter ranges
###
# AMBIGUITY_VELOCITY: int 2 - 700
self.assert_set(TeledyneParameter.AMBIGUITY_VELOCITY, 2)
self.assert_set(TeledyneParameter.AMBIGUITY_VELOCITY, 333)
self.assert_set(TeledyneParameter.AMBIGUITY_VELOCITY, 700)
self.assert_set_exception(TeledyneParameter.AMBIGUITY_VELOCITY, 0)
self.assert_set_exception(TeledyneParameter.AMBIGUITY_VELOCITY, 1)
self.assert_set_exception(TeledyneParameter.AMBIGUITY_VELOCITY, -1)
self.assert_set_exception(TeledyneParameter.AMBIGUITY_VELOCITY, 3.1415926)
self.assert_set_exception(TeledyneParameter.AMBIGUITY_VELOCITY, "LEROY JENKINS")
#
# Reset to good value.
#
self.assert_set(TeledyneParameter.AMBIGUITY_VELOCITY,
self._driver_parameters[TeledyneParameter.AMBIGUITY_VELOCITY][self.VALUE])
# ReadOnly parameter setting exception tests
#@unittest.skip('It takes many hours for this test')
def test_set_parameter_test(self):
self.assert_initialize_driver()
self.assert_set_exception(TeledyneParameter.HEADING_ALIGNMENT, +10000)
self.assert_set_exception(TeledyneParameter.HEADING_ALIGNMENT, +40000)
self.assert_set_exception(TeledyneParameter.ENSEMBLE_PER_BURST, 600)
self.assert_set_exception(TeledyneParameter.ENSEMBLE_PER_BURST, 70000)
self.assert_set_exception(TeledyneParameter.LATENCY_TRIGGER, 1)
self.assert_set_exception(TeledyneParameter.DATA_STREAM_SELECTION, 10)
self.assert_set_exception(TeledyneParameter.DATA_STREAM_SELECTION, 19)
self.assert_set_exception(TeledyneParameter.BUFFERED_OUTPUT_PERIOD, "00:00:11")
###############################################################################
# QUALIFICATION TESTS #
# Device specific qualification tests are for #
# testing device specific capabilities #
###############################################################################
@attr('QUAL', group='mi')
class TeledyneQualificationTest(InstrumentDriverQualificationTestCase):
def setUp(self):
InstrumentDriverQualificationTestCase.setUp(self)
###############################################################################
# PUBLICATION TESTS #
# Device specific publication tests are for #
# testing device specific capabilities #
###############################################################################
@attr('PUB', group='mi')
class TeledynePublicationTest(InstrumentDriverPublicationTestCase):
def setUp(self):
InstrumentDriverPublicationTestCase.setUp(self)
|
bsd-2-clause
|
vaidap/zulip
|
zerver/webhooks/teamcity/tests.py
|
43
|
2927
|
# -*- coding: utf-8 -*-
import ujson
from zerver.models import Recipient
from zerver.lib.test_classes import WebhookTestCase
class TeamcityHookTests(WebhookTestCase):
STREAM_NAME = 'teamcity'
URL_TEMPLATE = u"/api/v1/external/teamcity?stream={stream}&api_key={api_key}"
SUBJECT = u"Project :: Compile"
FIXTURE_DIR_NAME = 'teamcity'
def test_teamcity_success(self):
# type: () -> None
expected_message = u"Project :: Compile build 5535 - CL 123456 was successful! :thumbsup:\nDetails: [changes](http://teamcity/viewLog.html?buildTypeId=Project_Compile&buildId=19952&tab=buildChangesDiv), [build log](http://teamcity/viewLog.html?buildTypeId=Project_Compile&buildId=19952)"
self.send_and_test_stream_message('success', self.SUBJECT, expected_message)
def test_teamcity_broken(self):
# type: () -> None
expected_message = u"Project :: Compile build 5535 - CL 123456 is broken with status Exit code 1 (new)! :thumbsdown:\nDetails: [changes](http://teamcity/viewLog.html?buildTypeId=Project_Compile&buildId=19952&tab=buildChangesDiv), [build log](http://teamcity/viewLog.html?buildTypeId=Project_Compile&buildId=19952)"
self.send_and_test_stream_message('broken', self.SUBJECT, expected_message)
def test_teamcity_failure(self):
# type: () -> None
expected_message = u"Project :: Compile build 5535 - CL 123456 is still broken with status Exit code 1! :thumbsdown:\nDetails: [changes](http://teamcity/viewLog.html?buildTypeId=Project_Compile&buildId=19952&tab=buildChangesDiv), [build log](http://teamcity/viewLog.html?buildTypeId=Project_Compile&buildId=19952)"
self.send_and_test_stream_message('failure', self.SUBJECT, expected_message)
def test_teamcity_fixed(self):
# type: () -> None
expected_message = u"Project :: Compile build 5535 - CL 123456 has been fixed! :thumbsup:\nDetails: [changes](http://teamcity/viewLog.html?buildTypeId=Project_Compile&buildId=19952&tab=buildChangesDiv), [build log](http://teamcity/viewLog.html?buildTypeId=Project_Compile&buildId=19952)"
self.send_and_test_stream_message('fixed', self.SUBJECT, expected_message)
def test_teamcity_personal(self):
# type: () -> None
expected_message = u"Your personal build of Project :: Compile build 5535 - CL 123456 is broken with status Exit code 1 (new)! :thumbsdown:\nDetails: [changes](http://teamcity/viewLog.html?buildTypeId=Project_Compile&buildId=19952&tab=buildChangesDiv), [build log](http://teamcity/viewLog.html?buildTypeId=Project_Compile&buildId=19952)"
payload = ujson.dumps(ujson.loads(self.fixture_data(self.FIXTURE_DIR_NAME, 'personal')))
self.client_post(self.url, payload, content_type="application/json")
msg = self.get_last_message()
self.assertEqual(msg.content, expected_message)
self.assertEqual(msg.recipient.type, Recipient.PERSONAL)
|
apache-2.0
|
Chilledheart/chromium
|
build/android/pylib/monkey/test_runner.py
|
26
|
4186
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs a monkey test on a single device."""
import logging
import random
from devil.android import device_errors
from devil.android.sdk import intent
from pylib import constants
from pylib.base import base_test_result
from pylib.base import base_test_runner
_CHROME_PACKAGE = constants.PACKAGE_INFO['chrome'].package
class TestRunner(base_test_runner.BaseTestRunner):
"""A TestRunner instance runs a monkey test on a single device."""
def __init__(self, test_options, device, _):
super(TestRunner, self).__init__(device, None)
self._options = test_options
self._package = constants.PACKAGE_INFO[self._options.package].package
self._activity = constants.PACKAGE_INFO[self._options.package].activity
def _LaunchMonkeyTest(self):
"""Runs monkey test for a given package.
Returns:
Output from the monkey command on the device.
"""
timeout_ms = self._options.event_count * self._options.throttle * 1.5
cmd = ['monkey',
'-p %s' % self._package,
' '.join(['-c %s' % c for c in self._options.category]),
'--throttle %d' % self._options.throttle,
'-s %d' % (self._options.seed or random.randint(1, 100)),
'-v ' * self._options.verbose_count,
'--monitor-native-crashes',
'--kill-process-after-error',
self._options.extra_args,
'%d' % self._options.event_count]
return self.device.RunShellCommand(' '.join(cmd), timeout=timeout_ms)
def RunTest(self, test_name):
"""Run a Monkey test on the device.
Args:
test_name: String to use for logging the test result.
Returns:
A tuple of (TestRunResults, retry).
"""
self.device.StartActivity(
intent.Intent(package=self._package, activity=self._activity,
action='android.intent.action.MAIN'),
blocking=True, force_stop=True)
# Chrome crashes are not always caught by Monkey test runner.
# Verify Chrome has the same PID before and after the test.
before_pids = self.device.GetPids(self._package)
# Run the test.
output = ''
if before_pids:
if len(before_pids.get(self._package, [])) > 1:
raise Exception(
'At most one instance of process %s expected but found pids: '
'%s' % (self._package, before_pids))
output = '\n'.join(self._LaunchMonkeyTest())
after_pids = self.device.GetPids(self._package)
crashed = True
if not self._package in before_pids:
logging.error('Failed to start the process.')
elif not self._package in after_pids:
logging.error('Process %s has died.', before_pids[self._package])
elif before_pids[self._package] != after_pids[self._package]:
logging.error('Detected process restart %s -> %s',
before_pids[self._package], after_pids[self._package])
else:
crashed = False
results = base_test_result.TestRunResults()
success_pattern = 'Events injected: %d' % self._options.event_count
if success_pattern in output and not crashed:
result = base_test_result.BaseTestResult(
test_name, base_test_result.ResultType.PASS, log=output)
else:
result = base_test_result.BaseTestResult(
test_name, base_test_result.ResultType.FAIL, log=output)
if 'chrome' in self._options.package:
logging.warning('Starting MinidumpUploadService...')
# TODO(jbudorick): Update this after upstreaming.
minidump_intent = intent.Intent(
action='%s.crash.ACTION_FIND_ALL' % _CHROME_PACKAGE,
package=self._package,
activity='%s.crash.MinidumpUploadService' % _CHROME_PACKAGE)
try:
self.device.RunShellCommand(
['am', 'startservice'] + minidump_intent.am_args,
as_root=True, check_return=True)
except device_errors.CommandFailedError:
logging.exception('Failed to start MinidumpUploadService')
results.AddResult(result)
return results, False
|
bsd-3-clause
|
eawag-rdm/ckanapi
|
ckanapi/tests/mock/mock_ckan.py
|
7
|
2184
|
import json
import cgi
import csv
from wsgiref.util import setup_testing_defaults
from wsgiref.simple_server import make_server
def mock_ckan(environ, start_response):
status = '200 OK'
headers = [
('Content-type', 'application/json;charset=utf-8'),
]
if environ['PATH_INFO'] == '/api/action/site_read':
start_response(status, headers)
return [json.dumps(True).encode('utf-8')]
if environ['PATH_INFO'] == '/api/action/organization_list':
start_response(status, headers)
return [json.dumps({
"help": "none",
"success": True,
"result": ["aa", "bb", "cc"]
}).encode('utf-8')]
if environ['PATH_INFO'] == '/api/action/test_echo_user_agent':
start_response(status, headers)
return [json.dumps({
"help": "none",
"success": True,
"result": environ['HTTP_USER_AGENT']
}).encode('utf-8')]
if environ['PATH_INFO'] == '/api/action/test_echo_content_type':
start_response(status, headers)
return [json.dumps({
"help": "none",
"success": True,
"result": environ['CONTENT_TYPE']
}).encode('utf-8')]
if environ['PATH_INFO'] == '/api/action/test_upload':
fs = cgi.FieldStorage(
fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=True,
)
records = list(csv.reader(fs['upload'].file))
start_response(status, headers)
return [json.dumps({
"help": "none",
"success": True,
"result": {
'option': fs.getvalue('option'),
'last_row': records[-1],
},
}).encode('utf-8')]
if environ['PATH_INFO'].startswith('/api/action/'):
start_response(status, headers)
return [json.dumps({
"help": "none",
"success": False,
"error": {'__type': 'Not Found Error'},
}).encode('utf-8')]
start_response('404 Not Found', headers)
return []
httpd = make_server('localhost', 8901, mock_ckan)
httpd.serve_forever()
|
mit
|
elfi-dev/elfi
|
elfi/loader.py
|
2
|
5463
|
"""Loading makes precomputed data accessible to nodes."""
import numpy as np
from elfi.utils import get_sub_seed, observed_name
class Loader:
"""Base class for Loaders."""
@classmethod
def load(cls, context, compiled_net, batch_index):
"""Load precomputed data into nodes of `compiled_net`.
Parameters
----------
context : ComputationContext
compiled_net : nx.DiGraph
batch_index : int
Returns
-------
net : nx.DiGraph
Loaded net, which is the `compiled_net` that has been loaded with data that
can depend on the batch_index.
"""
raise NotImplementedError
class ObservedLoader(Loader): # noqa: D101
@classmethod
def load(cls, context, compiled_net, batch_index):
"""Add the observed data to the `compiled_net`.
Parameters
----------
context : ComputationContext
compiled_net : nx.DiGraph
batch_index : int
Returns
-------
net : nx.DiGraph
Loaded net, which is the `compiled_net` that has been loaded with data that
can depend on the batch_index.
"""
observed = compiled_net.graph['observed']
for name, obs in observed.items():
obs_name = observed_name(name)
if not compiled_net.has_node(obs_name):
continue
compiled_net.nodes[obs_name].update(dict(output=obs))
del compiled_net.nodes[obs_name]['operation']
del compiled_net.graph['observed']
return compiled_net
class AdditionalNodesLoader(Loader): # noqa: D101
@classmethod
def load(cls, context, compiled_net, batch_index):
"""Add runtime information to instruction nodes.
Parameters
----------
context : ComputationContext
compiled_net : nx.DiGraph
batch_index : int
Returns
-------
net : nx.DiGraph
Loaded net, which is the `compiled_net` that has been loaded with data that
can depend on the batch_index.
"""
meta_dict = {
'batch_index': batch_index,
'submission_index': context.num_submissions,
'master_seed': context.seed,
'model_name': compiled_net.graph['name']
}
details = dict(_batch_size=context.batch_size, _meta=meta_dict)
for node, v in details.items():
if node in compiled_net:
compiled_net.nodes[node]['output'] = v
return compiled_net
class PoolLoader(Loader): # noqa: D101
@classmethod
def load(cls, context, compiled_net, batch_index):
"""Add data from the pools in `context`.
Parameters
----------
context : ComputationContext
compiled_net : nx.DiGraph
batch_index : int
Returns
-------
net : nx.DiGraph
Loaded net, which is the `compiled_net` that has been loaded with data that
can depend on the batch_index.
"""
if context.pool is None:
return compiled_net
batch = context.pool.get_batch(batch_index)
for node in context.pool.stores:
if not compiled_net.has_node(node):
continue
elif node in batch:
compiled_net.nodes[node]['output'] = batch[node]
compiled_net.nodes[node].pop('operation', None)
elif node not in compiled_net.graph['outputs']:
# We are missing this item from the batch so add the output to the
# requested outputs so that it can be stored when the results arrive
compiled_net.graph['outputs'].add(node)
return compiled_net
# We use a getter function so that the local process np.random doesn't get
# copied to the loaded_net.
def get_np_random():
"""Get RandomState."""
return np.random.mtrand._rand
class RandomStateLoader(Loader): # noqa: D101
@classmethod
def load(cls, context, compiled_net, batch_index):
"""Add an instance of random state to the corresponding node.
Parameters
----------
context : ComputationContext
compiled_net : nx.DiGraph
batch_index : int
Returns
-------
net : nx.DiGraph
Loaded net, which is the `compiled_net` that has been loaded with data that
can depend on the batch_index.
"""
key = 'output'
seed = context.seed
if seed == 'global':
# Get the random_state of the respective worker by delaying the evaluation
random_state = get_np_random
key = 'operation'
elif isinstance(seed, (int, np.int32, np.uint32)):
# TODO: In the future, we could use https://pypi.python.org/pypi/randomstate to enable
# jumps?
cache = context.caches.get('sub_seed', None)
sub_seed = get_sub_seed(seed, batch_index, cache=cache)
random_state = np.random.RandomState(sub_seed)
else:
raise ValueError("Seed of type {} is not supported".format(seed))
# Assign the random state or its acquirer function to the corresponding node
node_name = '_random_state'
if compiled_net.has_node(node_name):
compiled_net.nodes[node_name][key] = random_state
return compiled_net
|
bsd-3-clause
|
saneyuki/servo
|
tests/wpt/web-platform-tests/tools/third_party/hyperframe/test/test_frames.py
|
24
|
25009
|
# -*- coding: utf-8 -*-
from hyperframe.frame import (
Frame, Flags, DataFrame, PriorityFrame, RstStreamFrame, SettingsFrame,
PushPromiseFrame, PingFrame, GoAwayFrame, WindowUpdateFrame, HeadersFrame,
ContinuationFrame, AltSvcFrame, ExtensionFrame
)
from hyperframe.exceptions import (
UnknownFrameError, InvalidPaddingError, InvalidFrameError
)
import pytest
def decode_frame(frame_data):
f, length = Frame.parse_frame_header(frame_data[:9])
f.parse_body(memoryview(frame_data[9:9 + length]))
assert 9 + length == len(frame_data)
return f
class TestGeneralFrameBehaviour(object):
def test_base_frame_ignores_flags(self):
f = Frame(stream_id=0)
flags = f.parse_flags(0xFF)
assert not flags
assert isinstance(flags, Flags)
def test_base_frame_cant_serialize(self):
f = Frame(stream_id=0)
with pytest.raises(NotImplementedError):
f.serialize()
def test_base_frame_cant_parse_body(self):
data = b''
f = Frame(stream_id=0)
with pytest.raises(NotImplementedError):
f.parse_body(data)
def test_parse_frame_header_unknown_type_strict(self):
with pytest.raises(UnknownFrameError) as excinfo:
Frame.parse_frame_header(
b'\x00\x00\x59\xFF\x00\x00\x00\x00\x01',
strict=True
)
exception = excinfo.value
assert exception.frame_type == 0xFF
assert exception.length == 0x59
assert str(exception) == (
"UnknownFrameError: Unknown frame type 0xFF received, "
"length 89 bytes"
)
def test_parse_frame_header_ignore_first_bit_of_stream_id(self):
s = b'\x00\x00\x00\x06\x01\x80\x00\x00\x00'
f, _ = Frame.parse_frame_header(s)
assert f.stream_id == 0
def test_parse_frame_header_unknown_type(self):
f, l = Frame.parse_frame_header(
b'\x00\x00\x59\xFF\x00\x00\x00\x00\x01'
)
assert f.type == 0xFF
assert l == 0x59
assert isinstance(f, ExtensionFrame)
assert f.stream_id == 1
def test_flags_are_persisted(self):
f, l = Frame.parse_frame_header(
b'\x00\x00\x59\xFF\x09\x00\x00\x00\x01'
)
assert f.type == 0xFF
assert l == 0x59
assert f.flag_byte == 0x09
def test_parse_body_unknown_type(self):
f = decode_frame(
b'\x00\x00\x0C\xFF\x00\x00\x00\x00\x01hello world!'
)
assert f.body == b'hello world!'
assert f.body_len == 12
assert f.stream_id == 1
def test_can_round_trip_unknown_frames(self):
frame_data = b'\x00\x00\x0C\xFF\x00\x00\x00\x00\x01hello world!'
f = decode_frame(frame_data)
assert f.serialize() == frame_data
def test_repr(self, monkeypatch):
f = Frame(stream_id=0)
monkeypatch.setattr(Frame, "serialize_body", lambda _: b"body")
assert repr(f) == "Frame(Stream: 0; Flags: None): 626f6479"
monkeypatch.setattr(Frame, "serialize_body", lambda _: b"A"*25)
assert repr(f) == (
"Frame(Stream: 0; Flags: None): {}...".format("41"*10)
)
def test_cannot_parse_invalid_frame_header(self):
with pytest.raises(InvalidFrameError):
Frame.parse_frame_header(b'\x00\x00\x08\x00\x01\x00\x00\x00')
class TestDataFrame(object):
payload = b'\x00\x00\x08\x00\x01\x00\x00\x00\x01testdata'
payload_with_padding = (
b'\x00\x00\x13\x00\x09\x00\x00\x00\x01\x0Atestdata' + b'\0' * 10
)
def test_data_frame_has_correct_flags(self):
f = DataFrame(1)
flags = f.parse_flags(0xFF)
assert flags == set([
'END_STREAM', 'PADDED'
])
@pytest.mark.parametrize('data', [
b'testdata',
memoryview(b'testdata')
])
def test_data_frame_serializes_properly(self, data):
f = DataFrame(1)
f.flags = set(['END_STREAM'])
f.data = data
s = f.serialize()
assert s == self.payload
def test_data_frame_with_padding_serializes_properly(self):
f = DataFrame(1)
f.flags = set(['END_STREAM', 'PADDED'])
f.data = b'testdata'
f.pad_length = 10
s = f.serialize()
assert s == self.payload_with_padding
def test_data_frame_parses_properly(self):
f = decode_frame(self.payload)
assert isinstance(f, DataFrame)
assert f.flags == set(['END_STREAM'])
assert f.pad_length == 0
assert f.data == b'testdata'
assert f.body_len == 8
def test_data_frame_with_padding_parses_properly(self):
f = decode_frame(self.payload_with_padding)
assert isinstance(f, DataFrame)
assert f.flags == set(['END_STREAM', 'PADDED'])
assert f.pad_length == 10
assert f.data == b'testdata'
assert f.body_len == 19
def test_data_frame_with_invalid_padding_errors(self):
with pytest.raises(InvalidFrameError):
decode_frame(self.payload_with_padding[:9])
def test_data_frame_with_padding_calculates_flow_control_len(self):
f = DataFrame(1)
f.flags = set(['PADDED'])
f.data = b'testdata'
f.pad_length = 10
assert f.flow_controlled_length == 19
def test_data_frame_zero_length_padding_calculates_flow_control_len(self):
f = DataFrame(1)
f.flags = set(['PADDED'])
f.data = b'testdata'
f.pad_length = 0
assert f.flow_controlled_length == len(b'testdata') + 1
def test_data_frame_without_padding_calculates_flow_control_len(self):
f = DataFrame(1)
f.data = b'testdata'
assert f.flow_controlled_length == 8
def test_data_frame_comes_on_a_stream(self):
with pytest.raises(ValueError):
DataFrame(0)
def test_long_data_frame(self):
f = DataFrame(1)
# Use more than 256 bytes of data to force setting higher bits.
f.data = b'\x01' * 300
data = f.serialize()
# The top three bytes should be numerically equal to 300. That means
# they should read 00 01 2C.
# The weird double index trick is to ensure this test behaves equally
# on Python 2 and Python 3.
assert data[0] == b'\x00'[0]
assert data[1] == b'\x01'[0]
assert data[2] == b'\x2C'[0]
def test_body_length_behaves_correctly(self):
f = DataFrame(1)
f.data = b'\x01' * 300
# Initially the body length is zero. For now this is incidental, but
# I'm going to test it to ensure that the behaviour is codified. We
# should change this test if we change that.
assert f.body_len == 0
f.serialize()
assert f.body_len == 300
def test_data_frame_with_invalid_padding_fails_to_parse(self):
# This frame has a padding length of 6 bytes, but a total length of
# only 5.
data = b'\x00\x00\x05\x00\x0b\x00\x00\x00\x01\x06\x54\x65\x73\x74'
with pytest.raises(InvalidPaddingError):
decode_frame(data)
def test_data_frame_with_no_length_parses(self):
# Fixes issue with empty data frames raising InvalidPaddingError.
f = DataFrame(1)
f.data = b''
data = f.serialize()
new_frame = decode_frame(data)
assert new_frame.data == b''
class TestPriorityFrame(object):
payload = b'\x00\x00\x05\x02\x00\x00\x00\x00\x01\x80\x00\x00\x04\x40'
def test_priority_frame_has_no_flags(self):
f = PriorityFrame(1)
flags = f.parse_flags(0xFF)
assert flags == set()
assert isinstance(flags, Flags)
def test_priority_frame_default_serializes_properly(self):
f = PriorityFrame(1)
assert f.serialize() == (
b'\x00\x00\x05\x02\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00'
)
def test_priority_frame_with_all_data_serializes_properly(self):
f = PriorityFrame(1)
f.depends_on = 0x04
f.stream_weight = 64
f.exclusive = True
assert f.serialize() == self.payload
def test_priority_frame_with_all_data_parses_properly(self):
f = decode_frame(self.payload)
assert isinstance(f, PriorityFrame)
assert f.flags == set()
assert f.depends_on == 4
assert f.stream_weight == 64
assert f.exclusive is True
assert f.body_len == 5
def test_priority_frame_comes_on_a_stream(self):
with pytest.raises(ValueError):
PriorityFrame(0)
def test_short_priority_frame_errors(self):
with pytest.raises(InvalidFrameError):
decode_frame(self.payload[:-2])
class TestRstStreamFrame(object):
def test_rst_stream_frame_has_no_flags(self):
f = RstStreamFrame(1)
flags = f.parse_flags(0xFF)
assert not flags
assert isinstance(flags, Flags)
def test_rst_stream_frame_serializes_properly(self):
f = RstStreamFrame(1)
f.error_code = 420
s = f.serialize()
assert s == b'\x00\x00\x04\x03\x00\x00\x00\x00\x01\x00\x00\x01\xa4'
def test_rst_stream_frame_parses_properly(self):
s = b'\x00\x00\x04\x03\x00\x00\x00\x00\x01\x00\x00\x01\xa4'
f = decode_frame(s)
assert isinstance(f, RstStreamFrame)
assert f.flags == set()
assert f.error_code == 420
assert f.body_len == 4
def test_rst_stream_frame_comes_on_a_stream(self):
with pytest.raises(ValueError):
RstStreamFrame(0)
def test_rst_stream_frame_must_have_body_length_four(self):
f = RstStreamFrame(1)
with pytest.raises(ValueError):
f.parse_body(b'\x01')
class TestSettingsFrame(object):
serialized = (
b'\x00\x00\x24\x04\x01\x00\x00\x00\x00' + # Frame header
b'\x00\x01\x00\x00\x10\x00' + # HEADER_TABLE_SIZE
b'\x00\x02\x00\x00\x00\x00' + # ENABLE_PUSH
b'\x00\x03\x00\x00\x00\x64' + # MAX_CONCURRENT_STREAMS
b'\x00\x04\x00\x00\xFF\xFF' + # INITIAL_WINDOW_SIZE
b'\x00\x05\x00\x00\x40\x00' + # MAX_FRAME_SIZE
b'\x00\x06\x00\x00\xFF\xFF' # MAX_HEADER_LIST_SIZE
)
settings = {
SettingsFrame.HEADER_TABLE_SIZE: 4096,
SettingsFrame.ENABLE_PUSH: 0,
SettingsFrame.MAX_CONCURRENT_STREAMS: 100,
SettingsFrame.INITIAL_WINDOW_SIZE: 65535,
SettingsFrame.MAX_FRAME_SIZE: 16384,
SettingsFrame.MAX_HEADER_LIST_SIZE: 65535,
}
def test_settings_frame_has_only_one_flag(self):
f = SettingsFrame()
flags = f.parse_flags(0xFF)
assert flags == set(['ACK'])
def test_settings_frame_serializes_properly(self):
f = SettingsFrame()
f.parse_flags(0xFF)
f.settings = self.settings
s = f.serialize()
assert s == self.serialized
def test_settings_frame_with_settings(self):
f = SettingsFrame(settings=self.settings)
assert f.settings == self.settings
def test_settings_frame_without_settings(self):
f = SettingsFrame()
assert f.settings == {}
def test_settings_frame_with_ack(self):
f = SettingsFrame(flags=('ACK',))
assert 'ACK' in f.flags
def test_settings_frame_ack_and_settings(self):
with pytest.raises(ValueError):
SettingsFrame(settings=self.settings, flags=('ACK',))
def test_settings_frame_parses_properly(self):
f = decode_frame(self.serialized)
assert isinstance(f, SettingsFrame)
assert f.flags == set(['ACK'])
assert f.settings == self.settings
assert f.body_len == 36
def test_settings_frames_never_have_streams(self):
with pytest.raises(ValueError):
SettingsFrame(stream_id=1)
def test_short_settings_frame_errors(self):
with pytest.raises(InvalidFrameError):
decode_frame(self.serialized[:-2])
class TestPushPromiseFrame(object):
def test_push_promise_frame_flags(self):
f = PushPromiseFrame(1)
flags = f.parse_flags(0xFF)
assert flags == set(['END_HEADERS', 'PADDED'])
def test_push_promise_frame_serializes_properly(self):
f = PushPromiseFrame(1)
f.flags = set(['END_HEADERS'])
f.promised_stream_id = 4
f.data = b'hello world'
s = f.serialize()
assert s == (
b'\x00\x00\x0F\x05\x04\x00\x00\x00\x01' +
b'\x00\x00\x00\x04' +
b'hello world'
)
def test_push_promise_frame_parses_properly(self):
s = (
b'\x00\x00\x0F\x05\x04\x00\x00\x00\x01' +
b'\x00\x00\x00\x04' +
b'hello world'
)
f = decode_frame(s)
assert isinstance(f, PushPromiseFrame)
assert f.flags == set(['END_HEADERS'])
assert f.promised_stream_id == 4
assert f.data == b'hello world'
assert f.body_len == 15
def test_push_promise_frame_with_invalid_padding_fails_to_parse(self):
# This frame has a padding length of 6 bytes, but a total length of
# only 5.
data = b'\x00\x00\x05\x05\x08\x00\x00\x00\x01\x06\x54\x65\x73\x74'
with pytest.raises(InvalidPaddingError):
decode_frame(data)
def test_push_promise_frame_with_no_length_parses(self):
# Fixes issue with empty data frames raising InvalidPaddingError.
f = PushPromiseFrame(1)
f.data = b''
data = f.serialize()
new_frame = decode_frame(data)
assert new_frame.data == b''
def test_short_push_promise_errors(self):
s = (
b'\x00\x00\x0F\x05\x04\x00\x00\x00\x01' +
b'\x00\x00\x00' # One byte short
)
with pytest.raises(InvalidFrameError):
decode_frame(s)
class TestPingFrame(object):
def test_ping_frame_has_only_one_flag(self):
f = PingFrame()
flags = f.parse_flags(0xFF)
assert flags == set(['ACK'])
def test_ping_frame_serializes_properly(self):
f = PingFrame()
f.parse_flags(0xFF)
f.opaque_data = b'\x01\x02'
s = f.serialize()
assert s == (
b'\x00\x00\x08\x06\x01\x00\x00\x00\x00\x01\x02\x00\x00\x00\x00\x00'
b'\x00'
)
def test_no_more_than_8_octets(self):
f = PingFrame()
f.opaque_data = b'\x01\x02\x03\x04\x05\x06\x07\x08\x09'
with pytest.raises(ValueError):
f.serialize()
def test_ping_frame_parses_properly(self):
s = (
b'\x00\x00\x08\x06\x01\x00\x00\x00\x00\x01\x02\x00\x00\x00\x00\x00'
b'\x00'
)
f = decode_frame(s)
assert isinstance(f, PingFrame)
assert f.flags == set(['ACK'])
assert f.opaque_data == b'\x01\x02\x00\x00\x00\x00\x00\x00'
assert f.body_len == 8
def test_ping_frame_never_has_a_stream(self):
with pytest.raises(ValueError):
PingFrame(stream_id=1)
def test_ping_frame_has_no_more_than_body_length_8(self):
f = PingFrame()
with pytest.raises(ValueError):
f.parse_body(b'\x01\x02\x03\x04\x05\x06\x07\x08\x09')
def test_ping_frame_has_no_less_than_body_length_8(self):
f = PingFrame()
with pytest.raises(ValueError):
f.parse_body(b'\x01\x02\x03\x04\x05\x06\x07')
class TestGoAwayFrame(object):
def test_go_away_has_no_flags(self):
f = GoAwayFrame()
flags = f.parse_flags(0xFF)
assert not flags
assert isinstance(flags, Flags)
def test_goaway_serializes_properly(self):
f = GoAwayFrame()
f.last_stream_id = 64
f.error_code = 32
f.additional_data = b'hello'
s = f.serialize()
assert s == (
b'\x00\x00\x0D\x07\x00\x00\x00\x00\x00' + # Frame header
b'\x00\x00\x00\x40' + # Last Stream ID
b'\x00\x00\x00\x20' + # Error Code
b'hello' # Additional data
)
def test_goaway_frame_parses_properly(self):
s = (
b'\x00\x00\x0D\x07\x00\x00\x00\x00\x00' + # Frame header
b'\x00\x00\x00\x40' + # Last Stream ID
b'\x00\x00\x00\x20' + # Error Code
b'hello' # Additional data
)
f = decode_frame(s)
assert isinstance(f, GoAwayFrame)
assert f.flags == set()
assert f.additional_data == b'hello'
assert f.body_len == 13
def test_goaway_frame_never_has_a_stream(self):
with pytest.raises(ValueError):
GoAwayFrame(stream_id=1)
def test_short_goaway_frame_errors(self):
s = (
b'\x00\x00\x0D\x07\x00\x00\x00\x00\x00' + # Frame header
b'\x00\x00\x00\x40' + # Last Stream ID
b'\x00\x00\x00' # short Error Code
)
with pytest.raises(InvalidFrameError):
decode_frame(s)
class TestWindowUpdateFrame(object):
def test_window_update_has_no_flags(self):
f = WindowUpdateFrame(0)
flags = f.parse_flags(0xFF)
assert not flags
assert isinstance(flags, Flags)
def test_window_update_serializes_properly(self):
f = WindowUpdateFrame(0)
f.window_increment = 512
s = f.serialize()
assert s == b'\x00\x00\x04\x08\x00\x00\x00\x00\x00\x00\x00\x02\x00'
def test_windowupdate_frame_parses_properly(self):
s = b'\x00\x00\x04\x08\x00\x00\x00\x00\x00\x00\x00\x02\x00'
f = decode_frame(s)
assert isinstance(f, WindowUpdateFrame)
assert f.flags == set()
assert f.window_increment == 512
assert f.body_len == 4
def test_short_windowupdate_frame_errors(self):
s = b'\x00\x00\x04\x08\x00\x00\x00\x00\x00\x00\x00\x02' # -1 byte
with pytest.raises(InvalidFrameError):
decode_frame(s)
class TestHeadersFrame(object):
def test_headers_frame_flags(self):
f = HeadersFrame(1)
flags = f.parse_flags(0xFF)
assert flags == set(['END_STREAM', 'END_HEADERS',
'PADDED', 'PRIORITY'])
def test_headers_frame_serializes_properly(self):
f = HeadersFrame(1)
f.flags = set(['END_STREAM', 'END_HEADERS'])
f.data = b'hello world'
s = f.serialize()
assert s == (
b'\x00\x00\x0B\x01\x05\x00\x00\x00\x01' +
b'hello world'
)
def test_headers_frame_parses_properly(self):
s = (
b'\x00\x00\x0B\x01\x05\x00\x00\x00\x01' +
b'hello world'
)
f = decode_frame(s)
assert isinstance(f, HeadersFrame)
assert f.flags == set(['END_STREAM', 'END_HEADERS'])
assert f.data == b'hello world'
assert f.body_len == 11
def test_headers_frame_with_priority_parses_properly(self):
# This test also tests that we can receive a HEADERS frame with no
# actual headers on it. This is technically possible.
s = (
b'\x00\x00\x05\x01\x20\x00\x00\x00\x01' +
b'\x80\x00\x00\x04\x40'
)
f = decode_frame(s)
assert isinstance(f, HeadersFrame)
assert f.flags == set(['PRIORITY'])
assert f.data == b''
assert f.depends_on == 4
assert f.stream_weight == 64
assert f.exclusive is True
assert f.body_len == 5
def test_headers_frame_with_priority_serializes_properly(self):
# This test also tests that we can receive a HEADERS frame with no
# actual headers on it. This is technically possible.
s = (
b'\x00\x00\x05\x01\x20\x00\x00\x00\x01' +
b'\x80\x00\x00\x04\x40'
)
f = HeadersFrame(1)
f.flags = set(['PRIORITY'])
f.data = b''
f.depends_on = 4
f.stream_weight = 64
f.exclusive = True
assert f.serialize() == s
def test_headers_frame_with_invalid_padding_fails_to_parse(self):
# This frame has a padding length of 6 bytes, but a total length of
# only 5.
data = b'\x00\x00\x05\x01\x08\x00\x00\x00\x01\x06\x54\x65\x73\x74'
with pytest.raises(InvalidPaddingError):
decode_frame(data)
def test_headers_frame_with_no_length_parses(self):
# Fixes issue with empty data frames raising InvalidPaddingError.
f = HeadersFrame(1)
f.data = b''
data = f.serialize()
new_frame = decode_frame(data)
assert new_frame.data == b''
class TestContinuationFrame(object):
def test_continuation_frame_flags(self):
f = ContinuationFrame(1)
flags = f.parse_flags(0xFF)
assert flags == set(['END_HEADERS'])
def test_continuation_frame_serializes(self):
f = ContinuationFrame(1)
f.parse_flags(0x04)
f.data = b'hello world'
s = f.serialize()
assert s == (
b'\x00\x00\x0B\x09\x04\x00\x00\x00\x01' +
b'hello world'
)
def test_continuation_frame_parses_properly(self):
s = b'\x00\x00\x0B\x09\x04\x00\x00\x00\x01hello world'
f = decode_frame(s)
assert isinstance(f, ContinuationFrame)
assert f.flags == set(['END_HEADERS'])
assert f.data == b'hello world'
assert f.body_len == 11
class TestAltSvcFrame(object):
payload_with_origin = (
b'\x00\x00\x31' # Length
b'\x0A' # Type
b'\x00' # Flags
b'\x00\x00\x00\x00' # Stream ID
b'\x00\x0B' # Origin len
b'example.com' # Origin
b'h2="alt.example.com:8000", h2=":443"' # Field Value
)
payload_without_origin = (
b'\x00\x00\x13' # Length
b'\x0A' # Type
b'\x00' # Flags
b'\x00\x00\x00\x01' # Stream ID
b'\x00\x00' # Origin len
b'' # Origin
b'h2=":8000"; ma=60' # Field Value
)
payload_with_origin_and_stream = (
b'\x00\x00\x36' # Length
b'\x0A' # Type
b'\x00' # Flags
b'\x00\x00\x00\x01' # Stream ID
b'\x00\x0B' # Origin len
b'example.com' # Origin
b'Alt-Svc: h2=":443"; ma=2592000; persist=1' # Field Value
)
def test_altsvc_frame_flags(self):
f = AltSvcFrame(stream_id=0)
flags = f.parse_flags(0xFF)
assert flags == set()
def test_altsvc_frame_with_origin_serializes_properly(self):
f = AltSvcFrame(stream_id=0)
f.origin = b'example.com'
f.field = b'h2="alt.example.com:8000", h2=":443"'
s = f.serialize()
assert s == self.payload_with_origin
def test_altsvc_frame_with_origin_parses_properly(self):
f = decode_frame(self.payload_with_origin)
assert isinstance(f, AltSvcFrame)
assert f.origin == b'example.com'
assert f.field == b'h2="alt.example.com:8000", h2=":443"'
assert f.body_len == 49
assert f.stream_id == 0
def test_altsvc_frame_without_origin_serializes_properly(self):
f = AltSvcFrame(stream_id=1, origin=b'', field=b'h2=":8000"; ma=60')
s = f.serialize()
assert s == self.payload_without_origin
def test_altsvc_frame_without_origin_parses_properly(self):
f = decode_frame(self.payload_without_origin)
assert isinstance(f, AltSvcFrame)
assert f.origin == b''
assert f.field == b'h2=":8000"; ma=60'
assert f.body_len == 19
assert f.stream_id == 1
def test_altsvc_frame_without_origin_parses_with_good_repr(self):
f = decode_frame(self.payload_without_origin)
assert repr(f) == (
"AltSvcFrame(Stream: 1; Flags: None): 000068323d223a383030..."
)
def test_altsvc_frame_with_origin_and_stream_serializes_properly(self):
# This frame is not valid, but we allow it to be serialized anyway.
f = AltSvcFrame(stream_id=1)
f.origin = b'example.com'
f.field = b'Alt-Svc: h2=":443"; ma=2592000; persist=1'
assert f.serialize() == self.payload_with_origin_and_stream
def test_short_altsvc_frame_errors(self):
with pytest.raises(InvalidFrameError):
decode_frame(self.payload_with_origin[:12])
with pytest.raises(InvalidFrameError):
decode_frame(self.payload_with_origin[:10])
def test_altsvc_with_unicode_origin_fails(self):
with pytest.raises(ValueError):
AltSvcFrame(
stream_id=0, origin=u'hello', field=b'h2=":8000"; ma=60'
)
def test_altsvc_with_unicode_field_fails(self):
with pytest.raises(ValueError):
AltSvcFrame(
stream_id=0, origin=b'hello', field=u'h2=":8000"; ma=60'
)
|
mpl-2.0
|
neteler/QGIS
|
python/plugins/processing/algs/gdal/ClipByExtent.py
|
3
|
3290
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
ClipByExtent.py
---------------------
Date : September 2013
Copyright : (C) 2013 by Alexander Bruy
Email : alexander bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'September 2013'
__copyright__ = '(C) 2013, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.core.parameters import ParameterRaster
from processing.core.parameters import ParameterExtent
from processing.core.parameters import ParameterString
from processing.core.outputs import OutputRaster
from processing.algs.gdal.GdalUtils import GdalUtils
class ClipByExtent(GdalAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
NO_DATA = 'NO_DATA'
PROJWIN = 'PROJWIN'
EXTRA = 'EXTRA'
def defineCharacteristics(self):
self.name = 'Clip raster by extent'
self.group = '[GDAL] Extraction'
self.addParameter(ParameterRaster(
self.INPUT, self.tr('Input layer'), False))
self.addParameter(ParameterString(self.NO_DATA,
self.tr("Nodata value, leave blank to take the nodata value from input"),
''))
self.addParameter(ParameterExtent(self.PROJWIN, self.tr('Clipping extent')))
self.addParameter(ParameterString(self.EXTRA,
self.tr('Additional creation parameters'), '', optional=True))
self.addOutput(OutputRaster(self.OUTPUT, self.tr('Output layer')))
def processAlgorithm(self, progress):
out = self.getOutputValue(self.OUTPUT)
noData = str(self.getParameterValue(self.NO_DATA))
projwin = str(self.getParameterValue(self.PROJWIN))
extra = str(self.getParameterValue(self.EXTRA))
arguments = []
arguments.append('-of')
arguments.append(GdalUtils.getFormatShortNameFromFilename(out))
if len(noData) > 0:
arguments.append('-a_nodata')
arguments.append(noData)
regionCoords = projwin.split(',')
arguments.append('-projwin')
arguments.append(regionCoords[0])
arguments.append(regionCoords[3])
arguments.append(regionCoords[1])
arguments.append(regionCoords[2])
if len(extra) > 0:
arguments.append(extra)
arguments.append(self.getParameterValue(self.INPUT))
arguments.append(out)
GdalUtils.runGdal(['gdal_translate',
GdalUtils.escapeAndJoin(arguments)], progress)
|
gpl-2.0
|
tarzan0820/addons-yelizariev
|
sale_mediation_custom/wizard/sale_case.py
|
16
|
1959
|
from openerp import api,models,fields,exceptions
from openerp.osv import fields as old_fields
from openerp.osv import osv
from openerp.tools.translate import _
def _get_active_id(self):
return self._context.get('active_id')
def _get_active_ids(self):
return self._context.get('active_ids')
SIGNAL_SELECTION = [
('fake_lead_signal', 'lead'),
('new', 'new'),
('qualified', 'qualified'),
('proposal_created', 'proposal_created'),
('proposal_sent', 'proposal_sent'),
('proposal_confirmed', 'proposal_confirmed'),
]
def fix_sale_case_workflow(sale_case, new_signal):
print 'fix_sale_case_workflow', sale_case, new_signal
sale_case.delete_workflow()
sale_case.create_workflow()
for signal,label in SIGNAL_SELECTION:
sale_case.signal_workflow(signal)
if signal == new_signal:
break
class create_proposal_lead(models.TransientModel):
_name = 'sale_mediation_custom.create_proposal_lead'
sale_case_id = fields.Many2one('crm.lead', default=_get_active_id)
proposal_template_id = fields.Many2one('website_proposal.template', string='Quotation template')
@api.multi
def action_apply(self):
assert len(self.ids) == 1, 'This option should only be used for a single id at a time.'
#context.pop('default_state', False)
for r in self:
assert r.proposal_template_id, 'You have to specify template'
sale_order = self.sale_case_id.create_sale_order()
#message = _("Opportunity has been <b>converted</b> to the quotation <em>%s</em>.") % (sale_order.name)
#r.sale_case_id.message_post(body=message)
## CREATE proposal
proposal_id = self.env['website_proposal.template'].with_context(default_state='draft').create_proposal(r.proposal_template_id.id, r.sale_case_id.id)
## SAVE new status and sale_order
r.sale_case_id.signal_workflow('proposal_created')
|
lgpl-3.0
|
KevinPike/pycache
|
tests/integration/test_cache_router.py
|
1
|
2491
|
from time import sleep
import unittest
import subprocess
import os
from hamcrest import assert_that, is_
from requests import request
current_directory = os.path.dirname(os.path.realpath(__file__))
class TestCacheRouter(unittest.TestCase):
"""Test running a external server and pointing the cache at it"""
def setUp(self):
self.p = subprocess.Popen(['python', "/".join([current_directory, 'run.py'])])
self.external = 'http://localhost:1123/'
self.cache = 'http://localhost:9090/'
tries = 0
while True:
try:
request('GET', self.external)
break
except Exception:
tries += 1
if tries > 10:
break
sleep(.5)
request('POST', self.external)
def tearDown(self):
self.p.terminate()
# Make a couple requests, make sure we get the fib numbers we expected
def test_no_store(self):
fib = [0, 1, 1, 2, 3]
for i in fib:
response = request('GET', self.cache, headers={'cache-control': 'no-store'})
assert_that(response.status_code, is_(200))
assert_that(response.text, is_(str(i)))
def test_max_age(self):
response = request('GET', self.cache, headers={'cache-control': 'max-age=0'})
assert_that(response.status_code, is_(200))
assert_that(response.text, is_(str(0)))
response = request('GET', self.cache, headers={'cache-control': 'max-age=5'})
assert_that(response.status_code, is_(200))
assert_that(response.text, is_(str(0)))
response = request('GET', self.cache, headers={'cache-control': 'max-age=5'})
assert_that(response.status_code, is_(200))
assert_that(response.text, is_(str(0)))
response = request('GET', self.cache, headers={'cache-control': 'max-age=0'})
assert_that(response.status_code, is_(200))
assert_that(response.text, is_(str(1)))
for i in range(3):
response = request('GET', self.cache, headers={'cache-control': 'max-age=10'})
assert_that(response.status_code, is_(200))
assert_that(response.text, is_(str(1)))
def test_no_cache_control_headers(self):
fib = [0, 1, 1, 2, 3]
for i in fib:
response = request('GET', self.cache)
assert_that(response.status_code, is_(200))
assert_that(response.text, is_(str(i)))
|
mit
|
quodlibet/quodlibet
|
quodlibet/plugins/__init__.py
|
2
|
17198
|
# Copyright 2012 - 2020 Christoph Reiter, Nick Boultbee
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
from typing import Optional, Iterable
from quodlibet import _
from quodlibet import config
from quodlibet import util
from quodlibet.qltk.ccb import ConfigCheckButton
from quodlibet.util.config import ConfigProxy
from quodlibet.util.dprint import print_d
from quodlibet.util.modulescanner import ModuleScanner
def init(folders=None, disable_plugins=False):
"""folders: list of paths to look for plugins
disable_plugins: disables all plugins, but does not forget which
plugins are enabled.
"""
if disable_plugins:
folders = []
manager = PluginManager.instance = PluginManager(folders)
return manager
def quit():
PluginManager.instance.save()
PluginManager.instance.quit()
PluginManager.instance = None
class PluginImportException(Exception):
desc = ""
def __init__(self, desc, *args, **kwargs):
super().__init__(desc)
self.desc = desc
def should_show(self):
"""If the error description should be shown to the user"""
return True
class PluginNotSupportedError(PluginImportException):
"""To hide the plugin (e.g. on Windows)"""
def __init__(self, msg=None):
msg = "not supported: %s" % (msg or "unknown reason")
super().__init__(msg)
def should_show(self):
return False
class MissingModulePluginException(PluginImportException):
"""Consistent Exception for reporting missing modules for plugins"""
def __init__(self, module_name):
msg = (_("Couldn't find module '{module}'. Perhaps you need to "
"install the package?").format(module=module_name))
super().__init__(msg)
class MissingGstreamerElementPluginException(PluginImportException):
"""Consistent Exception for reporting missing Gstreamer elements for
plugins"""
def __init__(self, element_name):
msg = (_("Couldn't find GStreamer element '{element}'.")
.format(element=element_name))
super().__init__(msg)
def migrate_old_config():
active = []
old_keys = ["songsmenuplugins", "eventplugins", "editingplugins",
"playorderplugins"]
for key in old_keys:
key = "active_" + key
try:
active.extend(config.get("plugins", key).splitlines())
except config.Error:
pass
else:
config._config.remove_option("plugins", key)
if active:
config.set("plugins", "active_plugins", "\n".join(active))
def list_plugins(module):
"""Return all objects of the module that satisfy the basic
plugin needs: id, name and don't start with '_'
If '__all__' is defined, only plugins in '__all__' will be loaded.
"""
try:
objs = [getattr(module, attr) for attr in module.__all__]
except AttributeError:
objs = [getattr(module, attr) for attr in vars(module)
if not attr.startswith("_")]
ok = []
for obj in objs:
if hasattr(obj, "PLUGIN_ID"):
if not hasattr(obj, "PLUGIN_NAME"):
obj.PLUGIN_NAME = obj.PLUGIN_ID
ok.append(obj)
return ok
class PluginModule:
def __init__(self, name, module):
self.name = name
self.module = module
self.plugins = [Plugin(cls) for cls in list_plugins(module)]
class Plugin:
def __init__(self, plugin_cls):
self.cls = plugin_cls
self.handlers = []
self.instance = None
def __repr__(self):
return "<%s id=%r name=%r>" % (type(self).__name__, self.id, self.name)
@property
def can_enable(self):
return getattr(self.cls, "PLUGIN_CAN_ENABLE", True)
@property
def id(self):
return self.cls.PLUGIN_ID
@property
def name(self):
return self.cls.PLUGIN_NAME
@property
def description(self):
return getattr(self.cls, "PLUGIN_DESC", None)
@property
def tags(self):
tags = getattr(self.cls, "PLUGIN_TAGS", [])
if isinstance(tags, str):
tags = [tags]
return tags
@property
def icon(self):
return getattr(self.cls, "PLUGIN_ICON", None)
def get_instance(self):
"""A singleton"""
if not getattr(self.cls, "PLUGIN_INSTANCE", False):
return
if self.instance is None:
try:
obj = self.cls()
except:
util.print_exc()
return
self.instance = obj
return self.instance
class PluginHandler:
"""A plugin handler can choose to handle plugins, as well as control
their enabled state."""
def plugin_handle(self, plugin):
"""Returns `True` IFF this handler can handle `plugin`"""
raise NotImplementedError
def plugin_enable(self, plugin):
"""Called to enable / register `plugin`"""
raise NotImplementedError
def plugin_disable(self, plugin):
"""Called to disable / de-register `plugin`"""
raise NotImplementedError
class PluginManager:
"""
The manager takes care of plugin loading/reloading. Interested plugin
handlers can register them self to get called when plugins get enabled
or disabled.
Plugins get exposed when at least one handler shows interest
in them (by returning True in the handle method).
Plugins have to be a class which defines PLUGIN_ID, PLUGIN_NAME.
Plugins that have a true PLUGIN_INSTANCE attribute get instantiated on
enable and the enabled/disabled methods get called.
If plugin handlers want a plugin instance, they have to call
Plugin.get_instance() to get a singleton.
handlers need to implement the following methods:
handler.plugin_handle(plugin)
Needs to return True if the handler should be called
whenever the plugin's enabled status changes.
handler.plugin_enable(plugin)
Gets called if the plugin gets enabled.
handler.plugin_disable(plugin)
Gets called if the plugin gets disabled.
Should remove all references.
"""
CONFIG_SECTION = "plugins"
CONFIG_OPTION = "active_plugins"
instance: Optional["PluginManager"] = None
"""Default instance"""
def __init__(self, folders=None):
"""folders is a list of paths that will be scanned for plugins.
Plugins in later paths will be preferred if they share a name.
"""
super().__init__()
if folders is None:
folders = []
self.__scanner = ModuleScanner(folders)
self.__modules = {} # name: PluginModule
self.__handlers = [] # handler list
self.__enabled = set() # (possibly) enabled plugin IDs
self.__restore()
def rescan(self):
"""Scan for plugin changes or to initially load all plugins"""
print_d("Rescanning..")
removed, added = self.__scanner.rescan()
# remember IDs of enabled plugin that get reloaded, so we can enable
# them again
reload_ids = []
for name in removed:
if name not in added:
continue
mod = self.__modules[name]
for plugin in mod.plugins:
if self.enabled(plugin):
reload_ids.append(plugin.id)
for name in removed:
# share the namespace with ModuleScanner for now
self.__remove_module(name)
# restore enabled state
self.__enabled.update(reload_ids)
for name in added:
new_module = self.__scanner.modules[name]
self.__add_module(name, new_module.module)
print_d("Rescanning done.")
@property
def _modules(self):
return self.__scanner.modules.values()
@property
def _plugins(self) -> Iterable[Plugin]:
"""All registered plugins"""
return (plugin
for module in self.__modules.values()
for plugin in module.plugins)
@property
def plugins(self):
"""Returns a list of plugins with active handlers"""
return [p for p in self._plugins if p.handlers]
def register_handler(self, handler):
"""
Registers a handler, attaching it to any current plugins it
advertises that it can handle
`handler` should probably be a `PluginHandler`
"""
print_d("Registering handler: %r" % type(handler).__name__)
self.__handlers.append(handler)
for plugin in self._plugins:
if not handler.plugin_handle(plugin):
continue
if plugin.handlers:
plugin.handlers.append(handler)
if self.enabled(plugin):
handler.plugin_enable(plugin)
else:
plugin.handlers.append(handler)
if self.enabled(plugin):
self.enable(plugin, True, force=True)
def save(self):
print_d("Saving plugins: %d active" % len(self.__enabled))
config.set(self.CONFIG_SECTION,
self.CONFIG_OPTION,
"\n".join(self.__enabled))
def enabled(self, plugin):
"""Returns if the plugin is enabled."""
if not plugin.handlers:
return False
return plugin.id in self.__enabled
def enable(self, plugin, status, force=False):
"""Enable or disable a plugin."""
if not force and self.enabled(plugin) == bool(status):
return
if not status:
print_d("Disable %r" % plugin.id)
for handler in plugin.handlers:
handler.plugin_disable(plugin)
self.__enabled.discard(plugin.id)
instance = plugin.instance
if instance and hasattr(instance, "disabled"):
try:
instance.disabled()
except Exception:
util.print_exc()
else:
print_d("Enable %r" % plugin.id)
obj = plugin.get_instance()
if obj and hasattr(obj, "enabled"):
try:
obj.enabled()
except Exception:
util.print_exc()
for handler in plugin.handlers:
handler.plugin_enable(plugin)
self.__enabled.add(plugin.id)
@property
def failures(self):
"""module name: list of error message text lines"""
errors = {}
for name, error in self.__scanner.failures.items():
exception = error.exception
if isinstance(exception, PluginImportException):
if not exception.should_show():
continue
errors[name] = [exception.desc]
else:
errors[name] = error.traceback
return errors
def quit(self):
"""Disable plugins and tell all handlers to clean up"""
for name in list(self.__modules.keys()):
self.__remove_module(name)
def __remove_module(self, name):
plugin_module = self.__modules.pop(name)
for plugin in plugin_module.plugins:
if plugin.handlers:
self.enable(plugin, False)
def __add_module(self, name, module):
plugin_mod = PluginModule(name, module)
self.__modules[name] = plugin_mod
for plugin in plugin_mod.plugins:
handlers = []
for handler in self.__handlers:
if handler.plugin_handle(plugin):
handlers.append(handler)
if handlers:
plugin.handlers = handlers
if self.enabled(plugin):
self.enable(plugin, True, force=True)
def __restore(self):
migrate_old_config()
active = config.get(self.CONFIG_SECTION,
self.CONFIG_OPTION, "").splitlines()
self.__enabled.update(active)
print_d("Restoring plugins: %d" % len(self.__enabled))
for plugin in self._plugins:
if self.enabled(plugin):
self.enable(plugin, True, force=True)
PM = PluginManager
def plugin_enabled(plugin):
"""Returns true if the plugin is enabled (or "always" enabled)"""
pm = PluginManager.instance
enabled = pm.enabled(plugin) or not plugin.can_enable
return enabled
class PluginConfig(ConfigProxy):
"""A proxy for a Config object that can be used by plugins.
Provides some methods of the Config class but doesn't need a
section and prefixes the config option name.
"""
def __init__(self, prefix, _config=None, _defaults=True):
self._prefix = prefix
if _config is None:
_config = config._config
super().__init__(
_config, PM.CONFIG_SECTION, _defaults)
def _new_defaults(self, real_default_config):
return PluginConfig(self._prefix, real_default_config, False)
def _option(self, name):
return "%s_%s" % (self._prefix, name)
def ConfigCheckButton(self, label, option, **kwargs):
return ConfigCheckButton(label, PM.CONFIG_SECTION,
self._option(option), **kwargs)
class PluginConfigMixin:
"""
Mixin for storage and editing of plugin config in a standard way.
"""
CONFIG_SECTION = ""
"""If defined, the section for storing config,
otherwise, it will based on a munged `PLUGIN_ID`"""
@classmethod
def _config_key(cls, name):
return cls._get_config_option(name)
@classmethod
def _get_config_option(cls, option):
prefix = cls.CONFIG_SECTION
if not prefix:
prefix = cls.PLUGIN_ID.lower().replace(" ", "_")
return "%s_%s" % (prefix, option)
@classmethod
def config_get(cls, name, default=""):
"""Gets a config string value for this plugin"""
return config.get(PM.CONFIG_SECTION, cls._config_key(name), default)
@classmethod
def config_set(cls, name, value):
"""Saves a config string value for this plugin"""
try:
config.set(PM.CONFIG_SECTION, cls._config_key(name), value)
except config.Error:
print_d("Couldn't set config item '%s' to %r" % (name, value))
@classmethod
def config_get_bool(cls, name, default=False):
"""Gets a config boolean for this plugin"""
return config.getboolean(PM.CONFIG_SECTION, cls._config_key(name),
default)
@classmethod
def config_get_stringlist(cls, name, default=False):
"""Gets a config string list for this plugin"""
return config.getstringlist(PM.CONFIG_SECTION, cls._config_key(name),
default)
def config_entry_changed(self, entry, key):
"""React to a change in an gtk.Entry (by saving it to config)"""
if entry.get_property('sensitive'):
self.config_set(key, entry.get_text())
@classmethod
def ConfigCheckButton(cls, label, name, default=False):
"""
Create a new `ConfigCheckButton` for `name`, pre-populated correctly
"""
option = cls._config_key(name)
try:
config.getboolean(PM.CONFIG_SECTION, option)
except config.Error:
cls.config_set(name, default)
return ConfigCheckButton(label, PM.CONFIG_SECTION,
option, populate=True)
class ConfProp:
def __init__(self, conf, name, default):
self._conf = conf
self._name = name
self._conf.defaults.set(name, default)
def __get__(self, *args, **kwargs):
return self._conf.get(self._name)
def __set__(self, obj, value):
self._conf.set(self._name, value)
class BoolConfProp(ConfProp):
def __get__(self, *args, **kwargs):
return self._conf.getboolean(self._name)
class IntConfProp(ConfProp):
def __get__(self, *args, **kwargs):
return self._conf.getint(self._name)
class FloatConfProp(ConfProp):
def __get__(self, *args, **kwargs):
return self._conf.getfloat(self._name)
def str_to_color_tuple(s):
"""Raises ValueError"""
lst = [float(p) for p in s.split()]
while len(lst) < 4:
lst.append(0.0)
return tuple(lst)
def color_tuple_to_str(t):
return " ".join(map(str, t))
class ColorConfProp(ConfProp):
def __init__(self, conf, name, default):
self._conf = conf
self._name = name
self._conf.defaults.set(name, color_tuple_to_str(default))
def __get__(self, *args, **kwargs):
s = self._conf.get(self._name)
try:
return str_to_color_tuple(s)
except ValueError:
return str_to_color_tuple(self._conf.defaults.get(self._name))
def __set__(self, obj, value):
self._conf.set(self._name, color_tuple_to_str(value))
|
gpl-2.0
|
mdsitton/pyogl
|
opengl/glx.py
|
1
|
35603
|
'''
OpenGL binding For python
WARNING - This is generated code, do not modify directly.
'''
import ctypes as ct
from opengl.bindutils import gl_func
from opengl import gltypes as t
def set_func(name, returnType, paramTypes):
'''gl_func wrapper that inserts function in globals.'''
globals()[name] = gl_func(name, returnType, paramTypes)
def set_enum(name, value):
globals()[name] = value
noParms = ()
# GLXFBConfigID = t.XID
# GLXFBConfig = ct.POINTER(t._GLXFBConfigRec)
# GLXContextID = t.XID
# GLXContext = ct.POINTER(t._GLXcontextRec)
# GLXPixmap = t.XID
# GLXDrawable = t.XID
# GLXWindow = t.XID
# GLXPbuffer = t.XID
# GLXVideoCaptureDeviceNV = t.XID
# GLXVideoDeviceNV = t.UINT
# GLXVideoSourceSGIX = t.XID
# GLXFBConfigIDSGIX = t.XID
# GLXFBConfigSGIX = ct.POINTER(t._GLXFBConfigRec)
# GLXPbufferSGIX = t.XID
#### GLX VERSION 1.0 ####
def init_glx_version_1_0():
# set_func('glXChooseVisual', ct.POINTER(t.XVisualInfo), (ct.POINTER(t.Display), t.INT, ct.POINTER(t.INT)))
# set_func('glXCreateContext', t.GLXContext, (ct.POINTER(t.Display), ct.POINTER(t.XVisualInfo), t.GLXContext, t.Bool))
# set_func('glXDestroyContext', t.void, (ct.POINTER(t.Display), t.GLXContext))
# set_func('glXMakeCurrent', t.Bool, (ct.POINTER(t.Display), t.GLXDrawable, t.GLXContext))
# set_func('glXCopyContext', t.void, (ct.POINTER(t.Display), t.GLXContext, t.GLXContext, t.ULONG))
# set_func('glXSwapBuffers', t.void, (ct.POINTER(t.Display), t.GLXDrawable))
# set_func('glXCreateGLXPixmap', t.GLXPixmap, (ct.POINTER(t.Display), ct.POINTER(t.XVisualInfo), t.Pixmap))
# set_func('glXDestroyGLXPixmap', t.void, (ct.POINTER(t.Display), t.GLXPixmap))
# set_func('glXQueryExtension', t.Bool, (ct.POINTER(t.Display), ct.POINTER(t.INT), ct.POINTER(t.INT)))
# set_func('glXQueryVersion', t.Bool, (ct.POINTER(t.Display), ct.POINTER(t.INT), ct.POINTER(t.INT)))
# set_func('glXIsDirect', t.Bool, (ct.POINTER(t.Display), t.GLXContext))
# set_func('glXGetConfig', t.INT, (ct.POINTER(t.Display), ct.POINTER(t.XVisualInfo), t.INT, ct.POINTER(t.INT)))
# set_func('glXGetCurrentContext', t.GLXContext, ())
# set_func('glXGetCurrentDrawable', t.GLXDrawable, ())
set_func('glXWaitGL', t.void, ())
set_func('glXWaitX', t.void, ())
# set_func('glXUseXFont', t.void, (t.Font, t.INT, t.INT, t.INT))
set_enum("GLX_EXTENSION_NAME", "GLX")
set_enum("GLX_PbufferClobber", 0)
set_enum("GLX_BufferSwapComplete", 1)
set_enum("__GLX_NUMBER_EVENTS", 17)
set_enum("GLX_BAD_SCREEN", 1)
set_enum("GLX_BAD_ATTRIBUTE", 2)
set_enum("GLX_NO_EXTENSION", 3)
set_enum("GLX_BAD_VISUAL", 4)
set_enum("GLX_BAD_CONTEXT", 5)
set_enum("GLX_BAD_VALUE", 6)
set_enum("GLX_BAD_ENUM", 7)
set_enum("GLX_USE_GL", 1)
set_enum("GLX_BUFFER_SIZE", 2)
set_enum("GLX_LEVEL", 3)
set_enum("GLX_RGBA", 4)
set_enum("GLX_DOUBLEBUFFER", 5)
set_enum("GLX_STEREO", 6)
set_enum("GLX_AUX_BUFFERS", 7)
set_enum("GLX_RED_SIZE", 8)
set_enum("GLX_GREEN_SIZE", 9)
set_enum("GLX_BLUE_SIZE", 10)
set_enum("GLX_ALPHA_SIZE", 11)
set_enum("GLX_DEPTH_SIZE", 12)
set_enum("GLX_STENCIL_SIZE", 13)
set_enum("GLX_ACCUM_RED_SIZE", 14)
set_enum("GLX_ACCUM_GREEN_SIZE", 15)
set_enum("GLX_ACCUM_BLUE_SIZE", 16)
set_enum("GLX_ACCUM_ALPHA_SIZE", 17)
#### GLX VERSION 1.1 ####
def init_glx_version_1_1():
# set_func('glXQueryExtensionsString', ct.POINTER(t.CHAR), (ct.POINTER(t.Display), t.INT))
# set_func('glXQueryServerString', ct.POINTER(t.CHAR), (ct.POINTER(t.Display), t.INT, t.INT))
# set_func('glXGetClientString', ct.POINTER(t.CHAR), (ct.POINTER(t.Display), t.INT))
set_enum("GLX_VENDOR", 0x1)
set_enum("GLX_VERSION", 0x2)
set_enum("GLX_EXTENSIONS", 0x3)
#### GLX VERSION 1.2 ####
def init_glx_version_1_2():
set_func('glXGetCurrentDisplay', ct.POINTER(t.Display), ())
#### GLX VERSION 1.3 ####
def init_glx_version_1_3():
# set_func('glXGetFBConfigs', ct.POINTER(t.GLXFBConfig), (ct.POINTER(t.Display), t.INT, ct.POINTER(t.INT)))
# set_func('glXChooseFBConfig', ct.POINTER(t.GLXFBConfig), (ct.POINTER(t.Display), t.INT, ct.POINTER(t.INT), ct.POINTER(t.INT)))
# set_func('glXGetFBConfigAttrib', t.INT, (ct.POINTER(t.Display), t.GLXFBConfig, t.INT, ct.POINTER(t.INT)))
# set_func('glXGetVisualFromFBConfig', ct.POINTER(t.XVisualInfo), (ct.POINTER(t.Display), t.GLXFBConfig))
# set_func('glXCreateWindow', t.GLXWindow, (ct.POINTER(t.Display), t.GLXFBConfig, t.Window, ct.POINTER(t.INT)))
# set_func('glXDestroyWindow', t.void, (ct.POINTER(t.Display), t.GLXWindow))
# set_func('glXCreatePixmap', t.GLXPixmap, (ct.POINTER(t.Display), t.GLXFBConfig, t.Pixmap, ct.POINTER(t.INT)))
# set_func('glXDestroyPixmap', t.void, (ct.POINTER(t.Display), t.GLXPixmap))
# set_func('glXCreatePbuffer', t.GLXPbuffer, (ct.POINTER(t.Display), t.GLXFBConfig, ct.POINTER(t.INT)))
# set_func('glXDestroyPbuffer', t.void, (ct.POINTER(t.Display), t.GLXPbuffer))
# set_func('glXQueryDrawable', t.void, (ct.POINTER(t.Display), t.GLXDrawable, t.INT, ct.POINTER(t.UINT)))
# set_func('glXCreateNewContext', t.GLXContext, (ct.POINTER(t.Display), t.GLXFBConfig, t.INT, t.GLXContext, t.Bool))
# set_func('glXMakeContextCurrent', t.Bool, (ct.POINTER(t.Display), t.GLXDrawable, t.GLXDrawable, t.GLXContext))
# set_func('glXGetCurrentReadDrawable', t.GLXDrawable, ())
# set_func('glXQueryContext', t.INT, (ct.POINTER(t.Display), t.GLXContext, t.INT, ct.POINTER(t.INT)))
# set_func('glXSelectEvent', t.void, (ct.POINTER(t.Display), t.GLXDrawable, t.ULONG))
# set_func('glXGetSelectedEvent', t.void, (ct.POINTER(t.Display), t.GLXDrawable, ct.POINTER(t.ULONG)))
set_enum("GLX_WINDOW_BIT", 0x00000001)
set_enum("GLX_PIXMAP_BIT", 0x00000002)
set_enum("GLX_PBUFFER_BIT", 0x00000004)
set_enum("GLX_RGBA_BIT", 0x00000001)
set_enum("GLX_COLOR_INDEX_BIT", 0x00000002)
set_enum("GLX_PBUFFER_CLOBBER_MASK", 0x08000000)
set_enum("GLX_FRONT_LEFT_BUFFER_BIT", 0x00000001)
set_enum("GLX_FRONT_RIGHT_BUFFER_BIT", 0x00000002)
set_enum("GLX_BACK_LEFT_BUFFER_BIT", 0x00000004)
set_enum("GLX_BACK_RIGHT_BUFFER_BIT", 0x00000008)
set_enum("GLX_AUX_BUFFERS_BIT", 0x00000010)
set_enum("GLX_DEPTH_BUFFER_BIT", 0x00000020)
set_enum("GLX_STENCIL_BUFFER_BIT", 0x00000040)
set_enum("GLX_ACCUM_BUFFER_BIT", 0x00000080)
set_enum("GLX_CONFIG_CAVEAT", 0x20)
set_enum("GLX_X_VISUAL_TYPE", 0x22)
set_enum("GLX_TRANSPARENT_TYPE", 0x23)
set_enum("GLX_TRANSPARENT_INDEX_VALUE", 0x24)
set_enum("GLX_TRANSPARENT_RED_VALUE", 0x25)
set_enum("GLX_TRANSPARENT_GREEN_VALUE", 0x26)
set_enum("GLX_TRANSPARENT_BLUE_VALUE", 0x27)
set_enum("GLX_TRANSPARENT_ALPHA_VALUE", 0x28)
set_enum("GLX_DONT_CARE", 0xFFFFFFFF)
set_enum("GLX_NONE", 0x8000)
set_enum("GLX_SLOW_CONFIG", 0x8001)
set_enum("GLX_TRUE_COLOR", 0x8002)
set_enum("GLX_DIRECT_COLOR", 0x8003)
set_enum("GLX_PSEUDO_COLOR", 0x8004)
set_enum("GLX_STATIC_COLOR", 0x8005)
set_enum("GLX_GRAY_SCALE", 0x8006)
set_enum("GLX_STATIC_GRAY", 0x8007)
set_enum("GLX_TRANSPARENT_RGB", 0x8008)
set_enum("GLX_TRANSPARENT_INDEX", 0x8009)
set_enum("GLX_VISUAL_ID", 0x800B)
set_enum("GLX_SCREEN", 0x800C)
set_enum("GLX_NON_CONFORMANT_CONFIG", 0x800D)
set_enum("GLX_DRAWABLE_TYPE", 0x8010)
set_enum("GLX_RENDER_TYPE", 0x8011)
set_enum("GLX_X_RENDERABLE", 0x8012)
set_enum("GLX_FBCONFIG_ID", 0x8013)
set_enum("GLX_RGBA_TYPE", 0x8014)
set_enum("GLX_COLOR_INDEX_TYPE", 0x8015)
set_enum("GLX_MAX_PBUFFER_WIDTH", 0x8016)
set_enum("GLX_MAX_PBUFFER_HEIGHT", 0x8017)
set_enum("GLX_MAX_PBUFFER_PIXELS", 0x8018)
set_enum("GLX_PRESERVED_CONTENTS", 0x801B)
set_enum("GLX_LARGEST_PBUFFER", 0x801C)
set_enum("GLX_WIDTH", 0x801D)
set_enum("GLX_HEIGHT", 0x801E)
set_enum("GLX_EVENT_MASK", 0x801F)
set_enum("GLX_DAMAGED", 0x8020)
set_enum("GLX_SAVED", 0x8021)
set_enum("GLX_WINDOW", 0x8022)
set_enum("GLX_PBUFFER", 0x8023)
set_enum("GLX_PBUFFER_HEIGHT", 0x8040)
set_enum("GLX_PBUFFER_WIDTH", 0x8041)
#### GLX VERSION 1.4 ####
def init_glx_version_1_4():
# set_func('glXGetProcAddress', t.__GLXextFuncPtr, (ct.POINTER(t.GLubyte),))
set_enum("GLX_SAMPLE_BUFFERS", 100000)
set_enum("GLX_SAMPLES", 100001)
#### GLX_3DFX_MULTISAMPLE ####
def init_glx_3dfx_multisample():
set_enum("GLX_SAMPLE_BUFFERS_3DFX", 0x8050)
set_enum("GLX_SAMPLES_3DFX", 0x8051)
#### GLX_AMD_GPU_ASSOCIATION ####
def init_glx_amd_gpu_association():
# set_func('glXGetGPUIDsAMD', t.UINT, (t.UINT, ct.POINTER(t.UINT)))
# set_func('glXGetGPUInfoAMD', t.INT, (t.UINT, t.INT, t.GLenum, t.UINT, ct.POINTER(t.void)))
# set_func('glXGetContextGPUIDAMD', t.UINT, (t.GLXContext,))
# set_func('glXCreateAssociatedContextAMD', t.GLXContext, (t.UINT, t.GLXContext))
# set_func('glXCreateAssociatedContextAttribsAMD', t.GLXContext, (t.UINT, t.GLXContext, ct.POINTER(t.INT)))
# set_func('glXDeleteAssociatedContextAMD', t.Bool, (t.GLXContext,))
# set_func('glXMakeAssociatedContextCurrentAMD', t.Bool, (t.GLXContext,))
# set_func('glXGetCurrentAssociatedContextAMD', t.GLXContext, ())
# set_func('glXBlitContextFramebufferAMD', t.void, (t.GLXContext, t.GLint, t.GLint, t.GLint, t.GLint, t.GLint, t.GLint, t.GLint, t.GLint, t.GLbitfield, t.GLenum))
set_enum("GLX_GPU_VENDOR_AMD", 0x1F00)
set_enum("GLX_GPU_RENDERER_STRING_AMD", 0x1F01)
set_enum("GLX_GPU_OPENGL_VERSION_STRING_AMD", 0x1F02)
set_enum("GLX_GPU_FASTEST_TARGET_GPUS_AMD", 0x21A2)
set_enum("GLX_GPU_RAM_AMD", 0x21A3)
set_enum("GLX_GPU_CLOCK_AMD", 0x21A4)
set_enum("GLX_GPU_NUM_PIPES_AMD", 0x21A5)
set_enum("GLX_GPU_NUM_SIMD_AMD", 0x21A6)
set_enum("GLX_GPU_NUM_RB_AMD", 0x21A7)
set_enum("GLX_GPU_NUM_SPI_AMD", 0x21A8)
#### GLX_ARB_CONTEXT_FLUSH_CONTROL ####
def init_glx_arb_context_flush_control():
set_enum("GLX_CONTEXT_RELEASE_BEHAVIOR_ARB", 0x2097)
set_enum("GLX_CONTEXT_RELEASE_BEHAVIOR_NONE_ARB", 0)
set_enum("GLX_CONTEXT_RELEASE_BEHAVIOR_FLUSH_ARB", 0x2098)
#### GLX_ARB_CREATE_CONTEXT ####
def init_glx_arb_create_context():
# set_func('glXCreateContextAttribsARB', t.GLXContext, (ct.POINTER(t.Display), t.GLXFBConfig, t.GLXContext, t.Bool, ct.POINTER(t.INT)))
set_enum("GLX_CONTEXT_DEBUG_BIT_ARB", 0x00000001)
set_enum("GLX_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB", 0x00000002)
set_enum("GLX_CONTEXT_MAJOR_VERSION_ARB", 0x2091)
set_enum("GLX_CONTEXT_MINOR_VERSION_ARB", 0x2092)
set_enum("GLX_CONTEXT_FLAGS_ARB", 0x2094)
#### GLX_ARB_CREATE_CONTEXT_PROFILE ####
def init_glx_arb_create_context_profile():
set_enum("GLX_CONTEXT_CORE_PROFILE_BIT_ARB", 0x00000001)
set_enum("GLX_CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB", 0x00000002)
set_enum("GLX_CONTEXT_PROFILE_MASK_ARB", 0x9126)
#### GLX_ARB_CREATE_CONTEXT_ROBUSTNESS ####
def init_glx_arb_create_context_robustness():
set_enum("GLX_CONTEXT_ROBUST_ACCESS_BIT_ARB", 0x00000004)
set_enum("GLX_LOSE_CONTEXT_ON_RESET_ARB", 0x8252)
set_enum("GLX_CONTEXT_RESET_NOTIFICATION_STRATEGY_ARB", 0x8256)
set_enum("GLX_NO_RESET_NOTIFICATION_ARB", 0x8261)
#### GLX_ARB_FBCONFIG_FLOAT ####
def init_glx_arb_fbconfig_float():
set_enum("GLX_RGBA_FLOAT_TYPE_ARB", 0x20B9)
set_enum("GLX_RGBA_FLOAT_BIT_ARB", 0x00000004)
#### GLX_ARB_FRAMEBUFFER_SRGB ####
def init_glx_arb_framebuffer_srgb():
set_enum("GLX_FRAMEBUFFER_SRGB_CAPABLE_ARB", 0x20B2)
#### GLX_ARB_GET_PROC_ADDRESS ####
def init_glx_arb_get_proc_address():
# set_func('glXGetProcAddressARB', t.__GLXextFuncPtr, (ct.POINTER(t.GLubyte),))
pass
#### GLX_ARB_MULTISAMPLE ####
def init_glx_arb_multisample():
set_enum("GLX_SAMPLE_BUFFERS_ARB", 100000)
set_enum("GLX_SAMPLES_ARB", 100001)
#### GLX_ARB_ROBUSTNESS_APPLICATION_ISOLATION ####
def init_glx_arb_robustness_application_isolation():
set_enum("GLX_CONTEXT_RESET_ISOLATION_BIT_ARB", 0x00000008)
#### GLX_ARB_ROBUSTNESS_SHARE_GROUP_ISOLATION ####
def init_glx_arb_robustness_share_group_isolation():
set_enum("GLX_CONTEXT_RESET_ISOLATION_BIT_ARB", 0x00000008)
#### GLX_ARB_VERTEX_BUFFER_OBJECT ####
def init_glx_arb_vertex_buffer_object():
set_enum("GLX_CONTEXT_ALLOW_BUFFER_BYTE_ORDER_MISMATCH_ARB", 0x2095)
#### GLX_EXT_BUFFER_AGE ####
def init_glx_ext_buffer_age():
set_enum("GLX_BACK_BUFFER_AGE_EXT", 0x20F4)
#### GLX_EXT_CREATE_CONTEXT_ES_PROFILE ####
def init_glx_ext_create_context_es_profile():
set_enum("GLX_CONTEXT_ES_PROFILE_BIT_EXT", 0x00000004)
#### GLX_EXT_CREATE_CONTEXT_ES2_PROFILE ####
def init_glx_ext_create_context_es2_profile():
set_enum("GLX_CONTEXT_ES2_PROFILE_BIT_EXT", 0x00000004)
#### GLX_EXT_FBCONFIG_PACKED_FLOAT ####
def init_glx_ext_fbconfig_packed_float():
set_enum("GLX_RGBA_UNSIGNED_FLOAT_TYPE_EXT", 0x20B1)
set_enum("GLX_RGBA_UNSIGNED_FLOAT_BIT_EXT", 0x00000008)
#### GLX_EXT_FRAMEBUFFER_SRGB ####
def init_glx_ext_framebuffer_srgb():
set_enum("GLX_FRAMEBUFFER_SRGB_CAPABLE_EXT", 0x20B2)
#### GLX_EXT_IMPORT_CONTEXT ####
def init_glx_ext_import_context():
set_func('glXGetCurrentDisplayEXT', ct.POINTER(t.Display), ())
# set_func('glXQueryContextInfoEXT', t.INT, (ct.POINTER(t.Display), t.GLXContext, t.INT, ct.POINTER(t.INT)))
# set_func('glXGetContextIDEXT', t.GLXContextID, (t.GLXContext,))
# set_func('glXImportContextEXT', t.GLXContext, (ct.POINTER(t.Display), t.GLXContextID))
# set_func('glXFreeContextEXT', t.void, (ct.POINTER(t.Display), t.GLXContext))
set_enum("GLX_SHARE_CONTEXT_EXT", 0x800A)
set_enum("GLX_VISUAL_ID_EXT", 0x800B)
set_enum("GLX_SCREEN_EXT", 0x800C)
#### GLX_EXT_LIBGLVND ####
def init_glx_ext_libglvnd():
set_enum("GLX_VENDOR_NAMES_EXT", 0x20F6)
#### GLX_EXT_STEREO_TREE ####
def init_glx_ext_stereo_tree():
set_enum("GLX_STEREO_TREE_EXT", 0x20F5)
set_enum("GLX_STEREO_NOTIFY_MASK_EXT", 0x00000001)
set_enum("GLX_STEREO_NOTIFY_EXT", 0x00000000)
#### GLX_EXT_SWAP_CONTROL ####
def init_glx_ext_swap_control():
# set_func('glXSwapIntervalEXT', t.void, (ct.POINTER(t.Display), t.GLXDrawable, t.INT))
set_enum("GLX_SWAP_INTERVAL_EXT", 0x20F1)
set_enum("GLX_MAX_SWAP_INTERVAL_EXT", 0x20F2)
#### GLX_EXT_SWAP_CONTROL_TEAR ####
def init_glx_ext_swap_control_tear():
set_enum("GLX_LATE_SWAPS_TEAR_EXT", 0x20F3)
#### GLX_EXT_TEXTURE_FROM_PIXMAP ####
def init_glx_ext_texture_from_pixmap():
# set_func('glXBindTexImageEXT', t.void, (ct.POINTER(t.Display), t.GLXDrawable, t.INT, ct.POINTER(t.INT)))
# set_func('glXReleaseTexImageEXT', t.void, (ct.POINTER(t.Display), t.GLXDrawable, t.INT))
set_enum("GLX_TEXTURE_1D_BIT_EXT", 0x00000001)
set_enum("GLX_TEXTURE_2D_BIT_EXT", 0x00000002)
set_enum("GLX_TEXTURE_RECTANGLE_BIT_EXT", 0x00000004)
set_enum("GLX_BIND_TO_TEXTURE_RGB_EXT", 0x20D0)
set_enum("GLX_BIND_TO_TEXTURE_RGBA_EXT", 0x20D1)
set_enum("GLX_BIND_TO_MIPMAP_TEXTURE_EXT", 0x20D2)
set_enum("GLX_BIND_TO_TEXTURE_TARGETS_EXT", 0x20D3)
set_enum("GLX_Y_INVERTED_EXT", 0x20D4)
set_enum("GLX_TEXTURE_FORMAT_EXT", 0x20D5)
set_enum("GLX_TEXTURE_TARGET_EXT", 0x20D6)
set_enum("GLX_MIPMAP_TEXTURE_EXT", 0x20D7)
set_enum("GLX_TEXTURE_FORMAT_NONE_EXT", 0x20D8)
set_enum("GLX_TEXTURE_FORMAT_RGB_EXT", 0x20D9)
set_enum("GLX_TEXTURE_FORMAT_RGBA_EXT", 0x20DA)
set_enum("GLX_TEXTURE_1D_EXT", 0x20DB)
set_enum("GLX_TEXTURE_2D_EXT", 0x20DC)
set_enum("GLX_TEXTURE_RECTANGLE_EXT", 0x20DD)
set_enum("GLX_FRONT_LEFT_EXT", 0x20DE)
set_enum("GLX_FRONT_RIGHT_EXT", 0x20DF)
set_enum("GLX_BACK_LEFT_EXT", 0x20E0)
set_enum("GLX_BACK_RIGHT_EXT", 0x20E1)
set_enum("GLX_FRONT_EXT", 0x20DE)
set_enum("GLX_BACK_EXT", 0x20E0)
set_enum("GLX_AUX0_EXT", 0x20E2)
set_enum("GLX_AUX1_EXT", 0x20E3)
set_enum("GLX_AUX2_EXT", 0x20E4)
set_enum("GLX_AUX3_EXT", 0x20E5)
set_enum("GLX_AUX4_EXT", 0x20E6)
set_enum("GLX_AUX5_EXT", 0x20E7)
set_enum("GLX_AUX6_EXT", 0x20E8)
set_enum("GLX_AUX7_EXT", 0x20E9)
set_enum("GLX_AUX8_EXT", 0x20EA)
set_enum("GLX_AUX9_EXT", 0x20EB)
#### GLX_EXT_VISUAL_INFO ####
def init_glx_ext_visual_info():
set_enum("GLX_X_VISUAL_TYPE_EXT", 0x22)
set_enum("GLX_TRANSPARENT_TYPE_EXT", 0x23)
set_enum("GLX_TRANSPARENT_INDEX_VALUE_EXT", 0x24)
set_enum("GLX_TRANSPARENT_RED_VALUE_EXT", 0x25)
set_enum("GLX_TRANSPARENT_GREEN_VALUE_EXT", 0x26)
set_enum("GLX_TRANSPARENT_BLUE_VALUE_EXT", 0x27)
set_enum("GLX_TRANSPARENT_ALPHA_VALUE_EXT", 0x28)
set_enum("GLX_NONE_EXT", 0x8000)
set_enum("GLX_TRUE_COLOR_EXT", 0x8002)
set_enum("GLX_DIRECT_COLOR_EXT", 0x8003)
set_enum("GLX_PSEUDO_COLOR_EXT", 0x8004)
set_enum("GLX_STATIC_COLOR_EXT", 0x8005)
set_enum("GLX_GRAY_SCALE_EXT", 0x8006)
set_enum("GLX_STATIC_GRAY_EXT", 0x8007)
set_enum("GLX_TRANSPARENT_RGB_EXT", 0x8008)
set_enum("GLX_TRANSPARENT_INDEX_EXT", 0x8009)
#### GLX_EXT_VISUAL_RATING ####
def init_glx_ext_visual_rating():
set_enum("GLX_VISUAL_CAVEAT_EXT", 0x20)
set_enum("GLX_SLOW_VISUAL_EXT", 0x8001)
set_enum("GLX_NON_CONFORMANT_VISUAL_EXT", 0x800D)
set_enum("GLX_NONE_EXT", 0x8000)
#### GLX_INTEL_SWAP_EVENT ####
def init_glx_intel_swap_event():
set_enum("GLX_BUFFER_SWAP_COMPLETE_INTEL_MASK", 0x04000000)
set_enum("GLX_EXCHANGE_COMPLETE_INTEL", 0x8180)
set_enum("GLX_COPY_COMPLETE_INTEL", 0x8181)
set_enum("GLX_FLIP_COMPLETE_INTEL", 0x8182)
#### GLX_MESA_AGP_OFFSET ####
def init_glx_mesa_agp_offset():
# set_func('glXGetAGPOffsetMESA', t.UINT, (ct.POINTER(t.void),))
pass
#### GLX_MESA_COPY_SUB_BUFFER ####
def init_glx_mesa_copy_sub_buffer():
# set_func('glXCopySubBufferMESA', t.void, (ct.POINTER(t.Display), t.GLXDrawable, t.INT, t.INT, t.INT, t.INT))
pass
#### GLX_MESA_PIXMAP_COLORMAP ####
def init_glx_mesa_pixmap_colormap():
# set_func('glXCreateGLXPixmapMESA', t.GLXPixmap, (ct.POINTER(t.Display), ct.POINTER(t.XVisualInfo), t.Pixmap, t.Colormap))
pass
#### GLX_MESA_QUERY_RENDERER ####
def init_glx_mesa_query_renderer():
# set_func('glXQueryCurrentRendererIntegerMESA', t.Bool, (t.INT, ct.POINTER(t.UINT)))
# set_func('glXQueryCurrentRendererStringMESA', ct.POINTER(t.CHAR), (t.INT,))
# set_func('glXQueryRendererIntegerMESA', t.Bool, (ct.POINTER(t.Display), t.INT, t.INT, t.INT, ct.POINTER(t.UINT)))
# set_func('glXQueryRendererStringMESA', ct.POINTER(t.CHAR), (ct.POINTER(t.Display), t.INT, t.INT, t.INT))
set_enum("GLX_RENDERER_VENDOR_ID_MESA", 0x8183)
set_enum("GLX_RENDERER_DEVICE_ID_MESA", 0x8184)
set_enum("GLX_RENDERER_VERSION_MESA", 0x8185)
set_enum("GLX_RENDERER_ACCELERATED_MESA", 0x8186)
set_enum("GLX_RENDERER_VIDEO_MEMORY_MESA", 0x8187)
set_enum("GLX_RENDERER_UNIFIED_MEMORY_ARCHITECTURE_MESA", 0x8188)
set_enum("GLX_RENDERER_PREFERRED_PROFILE_MESA", 0x8189)
set_enum("GLX_RENDERER_OPENGL_CORE_PROFILE_VERSION_MESA", 0x818A)
set_enum("GLX_RENDERER_OPENGL_COMPATIBILITY_PROFILE_VERSION_MESA", 0x818B)
set_enum("GLX_RENDERER_OPENGL_ES_PROFILE_VERSION_MESA", 0x818C)
set_enum("GLX_RENDERER_OPENGL_ES2_PROFILE_VERSION_MESA", 0x818D)
set_enum("GLX_RENDERER_ID_MESA", 0x818E)
#### GLX_MESA_RELEASE_BUFFERS ####
def init_glx_mesa_release_buffers():
# set_func('glXReleaseBuffersMESA', t.Bool, (ct.POINTER(t.Display), t.GLXDrawable))
pass
#### GLX_MESA_SET_3DFX_MODE ####
def init_glx_mesa_set_3dfx_mode():
# set_func('glXSet3DfxModeMESA', t.Bool, (t.INT,))
set_enum("GLX_3DFX_WINDOW_MODE_MESA", 0x1)
set_enum("GLX_3DFX_FULLSCREEN_MODE_MESA", 0x2)
#### GLX_NV_COPY_BUFFER ####
def init_glx_nv_copy_buffer():
# set_func('glXCopyBufferSubDataNV', t.void, (ct.POINTER(t.Display), t.GLXContext, t.GLXContext, t.GLenum, t.GLenum, t.GLintptr, t.GLintptr, t.GLsizeiptr))
# set_func('glXNamedCopyBufferSubDataNV', t.void, (ct.POINTER(t.Display), t.GLXContext, t.GLXContext, t.GLuint, t.GLuint, t.GLintptr, t.GLintptr, t.GLsizeiptr))
pass
#### GLX_NV_COPY_IMAGE ####
def init_glx_nv_copy_image():
# set_func('glXCopyImageSubDataNV', t.void, (ct.POINTER(t.Display), t.GLXContext, t.GLuint, t.GLenum, t.GLint, t.GLint, t.GLint, t.GLint, t.GLXContext, t.GLuint, t.GLenum, t.GLint, t.GLint, t.GLint, t.GLint, t.GLsizei, t.GLsizei, t.GLsizei))
pass
#### GLX_NV_DELAY_BEFORE_SWAP ####
def init_glx_nv_delay_before_swap():
# set_func('glXDelayBeforeSwapNV', t.Bool, (ct.POINTER(t.Display), t.GLXDrawable, t.GLfloat))
pass
#### GLX_NV_FLOAT_BUFFER ####
def init_glx_nv_float_buffer():
set_enum("GLX_FLOAT_COMPONENTS_NV", 0x20B0)
#### GLX_NV_MULTISAMPLE_COVERAGE ####
def init_glx_nv_multisample_coverage():
set_enum("GLX_COVERAGE_SAMPLES_NV", 100001)
set_enum("GLX_COLOR_SAMPLES_NV", 0x20B3)
#### GLX_NV_PRESENT_VIDEO ####
def init_glx_nv_present_video():
# set_func('glXEnumerateVideoDevicesNV', ct.POINTER(t.UINT), (ct.POINTER(t.Display), t.INT, ct.POINTER(t.INT)))
# set_func('glXBindVideoDeviceNV', t.INT, (ct.POINTER(t.Display), t.UINT, t.UINT, ct.POINTER(t.INT)))
set_enum("GLX_NUM_VIDEO_SLOTS_NV", 0x20F0)
#### GLX_NV_ROBUSTNESS_VIDEO_MEMORY_PURGE ####
def init_glx_nv_robustness_video_memory_purge():
set_enum("GLX_GENERATE_RESET_ON_VIDEO_MEMORY_PURGE_NV", 0x20F7)
#### GLX_NV_SWAP_GROUP ####
def init_glx_nv_swap_group():
# set_func('glXJoinSwapGroupNV', t.Bool, (ct.POINTER(t.Display), t.GLXDrawable, t.GLuint))
set_func('glXBindSwapBarrierNV', t.Bool, (ct.POINTER(t.Display), t.GLuint, t.GLuint))
# set_func('glXQuerySwapGroupNV', t.Bool, (ct.POINTER(t.Display), t.GLXDrawable, ct.POINTER(t.GLuint), ct.POINTER(t.GLuint)))
# set_func('glXQueryMaxSwapGroupsNV', t.Bool, (ct.POINTER(t.Display), t.INT, ct.POINTER(t.GLuint), ct.POINTER(t.GLuint)))
# set_func('glXQueryFrameCountNV', t.Bool, (ct.POINTER(t.Display), t.INT, ct.POINTER(t.GLuint)))
# set_func('glXResetFrameCountNV', t.Bool, (ct.POINTER(t.Display), t.INT))
#### GLX_NV_VIDEO_CAPTURE ####
def init_glx_nv_video_capture():
# set_func('glXBindVideoCaptureDeviceNV', t.INT, (ct.POINTER(t.Display), t.UINT, t.GLXVideoCaptureDeviceNV))
# set_func('glXEnumerateVideoCaptureDevicesNV', ct.POINTER(t.GLXVideoCaptureDeviceNV), (ct.POINTER(t.Display), t.INT, ct.POINTER(t.INT)))
# set_func('glXLockVideoCaptureDeviceNV', t.void, (ct.POINTER(t.Display), t.GLXVideoCaptureDeviceNV))
# set_func('glXQueryVideoCaptureDeviceNV', t.INT, (ct.POINTER(t.Display), t.GLXVideoCaptureDeviceNV, t.INT, ct.POINTER(t.INT)))
# set_func('glXReleaseVideoCaptureDeviceNV', t.void, (ct.POINTER(t.Display), t.GLXVideoCaptureDeviceNV))
set_enum("GLX_DEVICE_ID_NV", 0x20CD)
set_enum("GLX_UNIQUE_ID_NV", 0x20CE)
set_enum("GLX_NUM_VIDEO_CAPTURE_SLOTS_NV", 0x20CF)
#### GLX_NV_VIDEO_OUT ####
def init_glx_nv_video_out():
# set_func('glXGetVideoDeviceNV', t.INT, (ct.POINTER(t.Display), t.INT, t.INT, ct.POINTER(t.GLXVideoDeviceNV)))
# set_func('glXReleaseVideoDeviceNV', t.INT, (ct.POINTER(t.Display), t.INT, t.GLXVideoDeviceNV))
# set_func('glXBindVideoImageNV', t.INT, (ct.POINTER(t.Display), t.GLXVideoDeviceNV, t.GLXPbuffer, t.INT))
# set_func('glXReleaseVideoImageNV', t.INT, (ct.POINTER(t.Display), t.GLXPbuffer))
# set_func('glXSendPbufferToVideoNV', t.INT, (ct.POINTER(t.Display), t.GLXPbuffer, t.INT, ct.POINTER(t.ULONG), t.GLboolean))
# set_func('glXGetVideoInfoNV', t.INT, (ct.POINTER(t.Display), t.INT, t.GLXVideoDeviceNV, ct.POINTER(t.ULONG), ct.POINTER(t.ULONG)))
set_enum("GLX_VIDEO_OUT_COLOR_NV", 0x20C3)
set_enum("GLX_VIDEO_OUT_ALPHA_NV", 0x20C4)
set_enum("GLX_VIDEO_OUT_DEPTH_NV", 0x20C5)
set_enum("GLX_VIDEO_OUT_COLOR_AND_ALPHA_NV", 0x20C6)
set_enum("GLX_VIDEO_OUT_COLOR_AND_DEPTH_NV", 0x20C7)
set_enum("GLX_VIDEO_OUT_FRAME_NV", 0x20C8)
set_enum("GLX_VIDEO_OUT_FIELD_1_NV", 0x20C9)
set_enum("GLX_VIDEO_OUT_FIELD_2_NV", 0x20CA)
set_enum("GLX_VIDEO_OUT_STACKED_FIELDS_1_2_NV", 0x20CB)
set_enum("GLX_VIDEO_OUT_STACKED_FIELDS_2_1_NV", 0x20CC)
#### GLX_OML_SWAP_METHOD ####
def init_glx_oml_swap_method():
set_enum("GLX_SWAP_METHOD_OML", 0x8060)
set_enum("GLX_SWAP_EXCHANGE_OML", 0x8061)
set_enum("GLX_SWAP_COPY_OML", 0x8062)
set_enum("GLX_SWAP_UNDEFINED_OML", 0x8063)
#### GLX_OML_SYNC_CONTROL ####
def init_glx_oml_sync_control():
# set_func('glXGetSyncValuesOML', t.Bool, (ct.POINTER(t.Display), t.GLXDrawable, ct.POINTER(t.int64_t), ct.POINTER(t.int64_t), ct.POINTER(t.int64_t)))
# set_func('glXGetMscRateOML', t.Bool, (ct.POINTER(t.Display), t.GLXDrawable, ct.POINTER(t.int32_t), ct.POINTER(t.int32_t)))
# set_func('glXSwapBuffersMscOML', t.int64_t, (ct.POINTER(t.Display), t.GLXDrawable, t.int64_t, t.int64_t, t.int64_t))
# set_func('glXWaitForMscOML', t.Bool, (ct.POINTER(t.Display), t.GLXDrawable, t.int64_t, t.int64_t, t.int64_t, ct.POINTER(t.int64_t), ct.POINTER(t.int64_t), ct.POINTER(t.int64_t)))
# set_func('glXWaitForSbcOML', t.Bool, (ct.POINTER(t.Display), t.GLXDrawable, t.int64_t, ct.POINTER(t.int64_t), ct.POINTER(t.int64_t), ct.POINTER(t.int64_t)))
pass
#### GLX_SGI_CUSHION ####
def init_glx_sgi_cushion():
# set_func('glXCushionSGI', t.void, (ct.POINTER(t.Display), t.Window, t.FLOAT))
pass
#### GLX_SGI_MAKE_CURRENT_READ ####
def init_glx_sgi_make_current_read():
# set_func('glXMakeCurrentReadSGI', t.Bool, (ct.POINTER(t.Display), t.GLXDrawable, t.GLXDrawable, t.GLXContext))
# set_func('glXGetCurrentReadDrawableSGI', t.GLXDrawable, ())
pass
#### GLX_SGI_SWAP_CONTROL ####
def init_glx_sgi_swap_control():
# set_func('glXSwapIntervalSGI', t.INT, (t.INT,))
pass
#### GLX_SGI_VIDEO_SYNC ####
def init_glx_sgi_video_sync():
# set_func('glXGetVideoSyncSGI', t.INT, (ct.POINTER(t.UINT),))
# set_func('glXWaitVideoSyncSGI', t.INT, (t.INT, t.INT, ct.POINTER(t.UINT)))
pass
#### GLX_SGIS_BLENDED_OVERLAY ####
def init_glx_sgis_blended_overlay():
set_enum("GLX_BLENDED_RGBA_SGIS", 0x8025)
#### GLX_SGIS_MULTISAMPLE ####
def init_glx_sgis_multisample():
set_enum("GLX_SAMPLE_BUFFERS_SGIS", 100000)
set_enum("GLX_SAMPLES_SGIS", 100001)
#### GLX_SGIS_SHARED_MULTISAMPLE ####
def init_glx_sgis_shared_multisample():
set_enum("GLX_MULTISAMPLE_SUB_RECT_WIDTH_SGIS", 0x8026)
set_enum("GLX_MULTISAMPLE_SUB_RECT_HEIGHT_SGIS", 0x8027)
#### GLX_SGIX_DMBUFFER ####
def init_glx_sgix_dmbuffer():
# set_func('glXAssociateDMPbufferSGIX', t.Bool, (ct.POINTER(t.Display), t.GLXPbufferSGIX, ct.POINTER(t.DMparams), t.DMbuffer))
set_enum("GLX_DIGITAL_MEDIA_PBUFFER_SGIX", 0x8024)
#### GLX_SGIX_FBCONFIG ####
def init_glx_sgix_fbconfig():
# set_func('glXGetFBConfigAttribSGIX', t.INT, (ct.POINTER(t.Display), t.GLXFBConfigSGIX, t.INT, ct.POINTER(t.INT)))
# set_func('glXChooseFBConfigSGIX', ct.POINTER(t.GLXFBConfigSGIX), (ct.POINTER(t.Display), t.INT, ct.POINTER(t.INT), ct.POINTER(t.INT)))
# set_func('glXCreateGLXPixmapWithConfigSGIX', t.GLXPixmap, (ct.POINTER(t.Display), t.GLXFBConfigSGIX, t.Pixmap))
# set_func('glXCreateContextWithConfigSGIX', t.GLXContext, (ct.POINTER(t.Display), t.GLXFBConfigSGIX, t.INT, t.GLXContext, t.Bool))
# set_func('glXGetVisualFromFBConfigSGIX', ct.POINTER(t.XVisualInfo), (ct.POINTER(t.Display), t.GLXFBConfigSGIX))
# set_func('glXGetFBConfigFromVisualSGIX', t.GLXFBConfigSGIX, (ct.POINTER(t.Display), ct.POINTER(t.XVisualInfo)))
set_enum("GLX_WINDOW_BIT_SGIX", 0x00000001)
set_enum("GLX_PIXMAP_BIT_SGIX", 0x00000002)
set_enum("GLX_RGBA_BIT_SGIX", 0x00000001)
set_enum("GLX_COLOR_INDEX_BIT_SGIX", 0x00000002)
set_enum("GLX_DRAWABLE_TYPE_SGIX", 0x8010)
set_enum("GLX_RENDER_TYPE_SGIX", 0x8011)
set_enum("GLX_X_RENDERABLE_SGIX", 0x8012)
set_enum("GLX_FBCONFIG_ID_SGIX", 0x8013)
set_enum("GLX_RGBA_TYPE_SGIX", 0x8014)
set_enum("GLX_COLOR_INDEX_TYPE_SGIX", 0x8015)
set_enum("GLX_SCREEN_EXT", 0x800C)
#### GLX_SGIX_HYPERPIPE ####
def init_glx_sgix_hyperpipe():
# set_func('glXQueryHyperpipeNetworkSGIX', ct.POINTER(t.GLXHyperpipeNetworkSGIX), (ct.POINTER(t.Display), ct.POINTER(t.INT)))
# set_func('glXHyperpipeConfigSGIX', t.INT, (ct.POINTER(t.Display), t.INT, t.INT, ct.POINTER(t.GLXHyperpipeConfigSGIX), ct.POINTER(t.INT)))
# set_func('glXQueryHyperpipeConfigSGIX', ct.POINTER(t.GLXHyperpipeConfigSGIX), (ct.POINTER(t.Display), t.INT, ct.POINTER(t.INT)))
# set_func('glXDestroyHyperpipeConfigSGIX', t.INT, (ct.POINTER(t.Display), t.INT))
# set_func('glXBindHyperpipeSGIX', t.INT, (ct.POINTER(t.Display), t.INT))
# set_func('glXQueryHyperpipeBestAttribSGIX', t.INT, (ct.POINTER(t.Display), t.INT, t.INT, t.INT, ct.POINTER(t.void), ct.POINTER(t.void)))
# set_func('glXHyperpipeAttribSGIX', t.INT, (ct.POINTER(t.Display), t.INT, t.INT, t.INT, ct.POINTER(t.void)))
# set_func('glXQueryHyperpipeAttribSGIX', t.INT, (ct.POINTER(t.Display), t.INT, t.INT, t.INT, ct.POINTER(t.void)))
set_enum("GLX_HYPERPIPE_PIPE_NAME_LENGTH_SGIX", 80)
set_enum("GLX_BAD_HYPERPIPE_CONFIG_SGIX", 91)
set_enum("GLX_BAD_HYPERPIPE_SGIX", 92)
set_enum("GLX_HYPERPIPE_DISPLAY_PIPE_SGIX", 0x00000001)
set_enum("GLX_HYPERPIPE_RENDER_PIPE_SGIX", 0x00000002)
set_enum("GLX_PIPE_RECT_SGIX", 0x00000001)
set_enum("GLX_PIPE_RECT_LIMITS_SGIX", 0x00000002)
set_enum("GLX_HYPERPIPE_STEREO_SGIX", 0x00000003)
set_enum("GLX_HYPERPIPE_PIXEL_AVERAGE_SGIX", 0x00000004)
set_enum("GLX_HYPERPIPE_ID_SGIX", 0x8030)
#### GLX_SGIX_PBUFFER ####
def init_glx_sgix_pbuffer():
# set_func('glXCreateGLXPbufferSGIX', t.GLXPbufferSGIX, (ct.POINTER(t.Display), t.GLXFBConfigSGIX, t.UINT, t.UINT, ct.POINTER(t.INT)))
# set_func('glXDestroyGLXPbufferSGIX', t.void, (ct.POINTER(t.Display), t.GLXPbufferSGIX))
# set_func('glXQueryGLXPbufferSGIX', t.INT, (ct.POINTER(t.Display), t.GLXPbufferSGIX, t.INT, ct.POINTER(t.UINT)))
# set_func('glXSelectEventSGIX', t.void, (ct.POINTER(t.Display), t.GLXDrawable, t.ULONG))
# set_func('glXGetSelectedEventSGIX', t.void, (ct.POINTER(t.Display), t.GLXDrawable, ct.POINTER(t.ULONG)))
set_enum("GLX_PBUFFER_BIT_SGIX", 0x00000004)
set_enum("GLX_BUFFER_CLOBBER_MASK_SGIX", 0x08000000)
set_enum("GLX_FRONT_LEFT_BUFFER_BIT_SGIX", 0x00000001)
set_enum("GLX_FRONT_RIGHT_BUFFER_BIT_SGIX", 0x00000002)
set_enum("GLX_BACK_LEFT_BUFFER_BIT_SGIX", 0x00000004)
set_enum("GLX_BACK_RIGHT_BUFFER_BIT_SGIX", 0x00000008)
set_enum("GLX_AUX_BUFFERS_BIT_SGIX", 0x00000010)
set_enum("GLX_DEPTH_BUFFER_BIT_SGIX", 0x00000020)
set_enum("GLX_STENCIL_BUFFER_BIT_SGIX", 0x00000040)
set_enum("GLX_ACCUM_BUFFER_BIT_SGIX", 0x00000080)
set_enum("GLX_SAMPLE_BUFFERS_BIT_SGIX", 0x00000100)
set_enum("GLX_MAX_PBUFFER_WIDTH_SGIX", 0x8016)
set_enum("GLX_MAX_PBUFFER_HEIGHT_SGIX", 0x8017)
set_enum("GLX_MAX_PBUFFER_PIXELS_SGIX", 0x8018)
set_enum("GLX_OPTIMAL_PBUFFER_WIDTH_SGIX", 0x8019)
set_enum("GLX_OPTIMAL_PBUFFER_HEIGHT_SGIX", 0x801A)
set_enum("GLX_PRESERVED_CONTENTS_SGIX", 0x801B)
set_enum("GLX_LARGEST_PBUFFER_SGIX", 0x801C)
set_enum("GLX_WIDTH_SGIX", 0x801D)
set_enum("GLX_HEIGHT_SGIX", 0x801E)
set_enum("GLX_EVENT_MASK_SGIX", 0x801F)
set_enum("GLX_DAMAGED_SGIX", 0x8020)
set_enum("GLX_SAVED_SGIX", 0x8021)
set_enum("GLX_WINDOW_SGIX", 0x8022)
set_enum("GLX_PBUFFER_SGIX", 0x8023)
#### GLX_SGIX_SWAP_BARRIER ####
def init_glx_sgix_swap_barrier():
# set_func('glXBindSwapBarrierSGIX', t.void, (ct.POINTER(t.Display), t.GLXDrawable, t.INT))
# set_func('glXQueryMaxSwapBarriersSGIX', t.Bool, (ct.POINTER(t.Display), t.INT, ct.POINTER(t.INT)))
pass
#### GLX_SGIX_SWAP_GROUP ####
def init_glx_sgix_swap_group():
# set_func('glXJoinSwapGroupSGIX', t.void, (ct.POINTER(t.Display), t.GLXDrawable, t.GLXDrawable))
pass
#### GLX_SGIX_VIDEO_RESIZE ####
def init_glx_sgix_video_resize():
# set_func('glXBindChannelToWindowSGIX', t.INT, (ct.POINTER(t.Display), t.INT, t.INT, t.Window))
# set_func('glXChannelRectSGIX', t.INT, (ct.POINTER(t.Display), t.INT, t.INT, t.INT, t.INT, t.INT, t.INT))
# set_func('glXQueryChannelRectSGIX', t.INT, (ct.POINTER(t.Display), t.INT, t.INT, ct.POINTER(t.INT), ct.POINTER(t.INT), ct.POINTER(t.INT), ct.POINTER(t.INT)))
# set_func('glXQueryChannelDeltasSGIX', t.INT, (ct.POINTER(t.Display), t.INT, t.INT, ct.POINTER(t.INT), ct.POINTER(t.INT), ct.POINTER(t.INT), ct.POINTER(t.INT)))
# set_func('glXChannelRectSyncSGIX', t.INT, (ct.POINTER(t.Display), t.INT, t.INT, t.GLenum))
set_enum("GLX_SYNC_FRAME_SGIX", 0x00000000)
set_enum("GLX_SYNC_SWAP_SGIX", 0x00000001)
#### GLX_SGIX_VIDEO_SOURCE ####
def init_glx_sgix_video_source():
# set_func('glXCreateGLXVideoSourceSGIX', t.GLXVideoSourceSGIX, (ct.POINTER(t.Display), t.INT, t.VLServer, t.VLPath, t.INT, t.VLNode))
# set_func('glXDestroyGLXVideoSourceSGIX', t.void, (ct.POINTER(t.Display), t.GLXVideoSourceSGIX))
pass
#### GLX_SGIX_VISUAL_SELECT_GROUP ####
def init_glx_sgix_visual_select_group():
set_enum("GLX_VISUAL_SELECT_GROUP_SGIX", 0x8028)
#### GLX_SUN_GET_TRANSPARENT_INDEX ####
def init_glx_sun_get_transparent_index():
# set_func('glXGetTransparentIndexSUN', t.Status, (ct.POINTER(t.Display), t.Window, t.Window, ct.POINTER(t.long)))
pass
def init():
init_glx_version_1_0()
init_glx_version_1_1()
init_glx_version_1_2()
init_glx_version_1_3()
init_glx_version_1_4()
init_glx_3dfx_multisample()
init_glx_amd_gpu_association()
init_glx_arb_context_flush_control()
init_glx_arb_create_context()
init_glx_arb_create_context_profile()
init_glx_arb_create_context_robustness()
init_glx_arb_fbconfig_float()
init_glx_arb_framebuffer_srgb()
init_glx_arb_get_proc_address()
init_glx_arb_multisample()
init_glx_arb_robustness_application_isolation()
init_glx_arb_robustness_share_group_isolation()
init_glx_arb_vertex_buffer_object()
init_glx_ext_buffer_age()
init_glx_ext_create_context_es_profile()
init_glx_ext_create_context_es2_profile()
init_glx_ext_fbconfig_packed_float()
init_glx_ext_framebuffer_srgb()
init_glx_ext_import_context()
init_glx_ext_libglvnd()
init_glx_ext_stereo_tree()
init_glx_ext_swap_control()
init_glx_ext_swap_control_tear()
init_glx_ext_texture_from_pixmap()
init_glx_ext_visual_info()
init_glx_ext_visual_rating()
init_glx_intel_swap_event()
init_glx_mesa_agp_offset()
init_glx_mesa_copy_sub_buffer()
init_glx_mesa_pixmap_colormap()
init_glx_mesa_query_renderer()
init_glx_mesa_release_buffers()
init_glx_mesa_set_3dfx_mode()
init_glx_nv_copy_buffer()
init_glx_nv_copy_image()
init_glx_nv_delay_before_swap()
init_glx_nv_float_buffer()
init_glx_nv_multisample_coverage()
init_glx_nv_present_video()
init_glx_nv_robustness_video_memory_purge()
init_glx_nv_swap_group()
init_glx_nv_video_capture()
init_glx_nv_video_out()
init_glx_oml_swap_method()
init_glx_oml_sync_control()
init_glx_sgi_cushion()
init_glx_sgi_make_current_read()
init_glx_sgi_swap_control()
init_glx_sgi_video_sync()
init_glx_sgis_blended_overlay()
init_glx_sgis_multisample()
init_glx_sgis_shared_multisample()
init_glx_sgix_dmbuffer()
init_glx_sgix_fbconfig()
init_glx_sgix_hyperpipe()
init_glx_sgix_pbuffer()
init_glx_sgix_swap_barrier()
init_glx_sgix_swap_group()
init_glx_sgix_video_resize()
init_glx_sgix_video_source()
init_glx_sgix_visual_select_group()
init_glx_sun_get_transparent_index()
|
bsd-2-clause
|
lra/mackup
|
setup.py
|
1
|
1084
|
"""Setup file to automate the install of Mackup in the Python environment."""
from setuptools import setup
from mackup.constants import VERSION
setup(
name="mackup",
version=VERSION,
author="Laurent Raufaste",
author_email="[email protected]",
url="https://github.com/lra/mackup",
description="Keep your application settings in sync (macOS/Linux)",
keywords="configuration config dotfiles sync backup dropbox gdrive box",
license="GPLv3",
packages=["mackup"],
install_requires=["docopt", "six"],
entry_points={"console_scripts": ["mackup=mackup.main:main"]},
package_data={"mackup": ["applications/*.cfg"]},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
(
"License :: OSI Approved :: "
"GNU General Public License v3 or later (GPLv3+)"
),
"Natural Language :: English",
"Operating System :: POSIX",
"Programming Language :: Python",
"Topic :: Utilities",
],
)
|
gpl-3.0
|
diydrones/ardupilot
|
Tools/scripts/build_binaries_history.py
|
18
|
3381
|
#!/usr/bin/env python
from __future__ import print_function
import os
import sqlite3
class BuildBinariesHistory():
def __init__(self, db_filepath):
self.db_filepath = db_filepath
self.assure_db_present()
def progress(self, msg):
print("BBHIST: %s" % msg)
def conn(self):
return sqlite3.connect(self.db_filepath)
def create_schema(self, c):
'''create our tables and whatnot'''
schema_version = 1
c.execute("create table version (version integer)")
c.execute("insert into version (version) values (?)", (schema_version,))
# at some stage we should probably directly associate build with runs....
c.execute("create table build (hash text, tag text, vehicle text, board text, "
"frame text, text integer, data integer, bss integer, start_time real, duration real)")
c.execute("create table run (hash text, tag text, start_time real, duration real)")
c.commit()
def sizes_for_file(self, filepath):
cmd = "size %s" % (filepath,)
stuff = os.popen(cmd).read()
lines = stuff.split("\n")
sizes = lines[1].split("\t")
text = int(sizes[0])
data = int(sizes[1])
bss = int(sizes[2])
self.progress("Binary size of %s:" % filepath)
self.progress("text=%u" % text)
self.progress("data=%u" % data)
self.progress("bss=%u" % bss)
return (text, data, bss)
def assure_db_present(self):
c = self.conn()
need_schema_create = False
try:
version_cursor = c.execute("select version from version")
except sqlite3.OperationalError as e:
if "no such table" in str(e): # FIXME: do better here? what's in "e"?
print("need schema create")
need_schema_create = True
if need_schema_create:
self.create_schema(c)
version_cursor = c.execute("select version from version")
version_results = version_cursor.fetchall()
if len(version_results) == 0:
raise IOError("No version number?")
if len(version_results) > 1:
raise IOError("More than one version result?")
first = version_results[0]
want_version = 1
got_version = first[0]
if got_version != want_version:
raise IOError("Bad version number (want=%u got=%u" %
(want_version, got_version))
self.progress("Got history version %u" % got_version)
def record_build(self, hash, tag, vehicle, board, frame, bare_path, start_time, duration):
if bare_path is None:
(text, data, bss) = (None, None, None)
else:
(text, data, bss) = self.sizes_for_file(bare_path)
c = self.conn()
c.execute("replace into build (hash, tag, vehicle, board, frame, text, data, bss, start_time, duration) "
"values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(hash, tag, vehicle, board, frame, text, data, bss, start_time, duration))
c.commit()
def record_run(self, hash, tag, start_time, duration):
c = self.conn()
c.execute("replace into run (hash, tag, start_time, duration) "
"values (?, ?, ?, ?)",
(hash, tag, start_time, duration))
c.commit()
|
gpl-3.0
|
alexandrucoman/labs
|
python/solutii/vlad_cristia_avram/from_icao.py
|
5
|
1254
|
"""Rezolvarea problemei from_icao"""
from __future__ import print_function
import os
ICAO = {
'a': 'alfa', 'b': 'bravo', 'c': 'charlie', 'd': 'delta', 'e': 'echo',
'f': 'foxtrot', 'g': 'golf', 'h': 'hotel', 'i': 'india', 'j': 'juliett',
'k': 'kilo', 'l': 'lima', 'm': 'mike', 'n': 'november', 'o': 'oscar',
'p': 'papa', 'q': 'quebec', 'r': 'romeo', 's': 'sierra', 't': 'tango',
'u': 'uniform', 'v': 'victor', 'w': 'whiskey', 'x': 'x-ray', 'y': 'yankee',
'z': 'zulu'
}
def din_icao(mesaj):
"""Functia de traducere"""
try:
fisier = open(mesaj, "r")
mesaj = fisier.read()
fisier.close()
except IOError:
print("Nu exista fisierul din care doriti sa faceti traducerea.")
return
if mesaj == "":
print("Nu exista niciun text de tradus in fisierul dorit.")
for linie in mesaj.splitlines():
for i in linie.split():
if ICAO[i[0]] == i:
print(i[0], end="")
else:
print()
os.system('cls' if os.name == 'nt' else 'clear')
print("Fisierul contine cuvinte ce nu sunt codate ICAO")
return
print()
if __name__ == "__main__":
din_icao("mesaj.icao")
|
mit
|
atruberg/django-custom
|
django/conf/locale/ko/formats.py
|
118
|
2323
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'Y년 n월 j일'
TIME_FORMAT = 'A g:i:s'
DATETIME_FORMAT = 'Y년 n월 j일 g:i:s A'
YEAR_MONTH_FORMAT = 'Y년 F월'
MONTH_DAY_FORMAT = 'F월 j일'
SHORT_DATE_FORMAT = 'Y-n-j.'
SHORT_DATETIME_FORMAT = 'Y-n-j H:i'
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = (
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
# '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
# '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
# '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
# '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
'%Y년 %m월 %d일', # '2006년 10월 25일', with localized suffix.
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M:%S.%f', # '14:30:59.000200'
'%H:%M', # '14:30'
'%H시 %M분 %S초', # '14시 30분 59초'
'%H시 %M분', # '14시 30분'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
'%Y년 %m월 %d일 %H시 %M분 %S초', # '2006년 10월 25일 14시 30분 59초'
'%Y년 %m월 %d일 %H시 %M분', # '2006년 10월 25일 14시 30분'
)
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
NUMBER_GROUPING = 3
|
bsd-3-clause
|
marrow/WebCore
|
web/core/context.py
|
1
|
4200
|
# encoding: utf-8
"""A `MutableMapping` subclass for use as a request-local context object."""
# ## Imports
from __future__ import unicode_literals
from collections import MutableMapping
# ## Mapping Class
class Context(MutableMapping):
"""An attribute access dictionary, of a kind.
This utility class is used to cooperatively construct the ApplicationContext (and subsequent RequestContext)
from the contributions of multiple extensions. The concept of "promotion to a class" is needed in order to enable
the use of descriptor protocol attributes; without promotion the protocol would not be utilized.
"""
# M-Morty! We're, *belch*, gonna have to go in deep, Morty! Elbow deep!
def _promote(self, name, instantiate=True):
"""Create a new subclass of Context which incorporates instance attributes and new descriptors.
This promotes an instance and its instance attributes up to being a class with class attributes, then
returns an instance of that class.
"""
metaclass = type(self.__class__)
contents = self.__dict__.copy()
cls = metaclass(str(name), (self.__class__, ), contents)
if instantiate:
return cls()
return cls
def __init__(self, **kw):
"""Construct a new Context instance.
All keyword arguments are applied to the instance as attributes through direct assignment to `__dict__`.
"""
self.__dict__.update(kw)
super(Context, self).__init__()
def __len__(self):
"""Get a list of the public data attributes."""
return len([i for i in (set(dir(self)) - self._STANDARD_ATTRS) if i[0] != '_'])
def __iter__(self):
"""Iterate all valid (public) attributes/keys."""
return (i for i in (set(dir(self)) - self._STANDARD_ATTRS) if i[0] != '_')
def __getitem__(self, name):
"""Retrieve an attribute through dictionary access."""
try:
return getattr(self, name)
except AttributeError:
pass
# We do this here to avoid Python 3's nested exception support.
raise KeyError(name)
def __setitem__(self, name, value):
"""Assign an attribute through dictionary access."""
setattr(self, name, value)
def __delitem__(self, name):
"""Delete an attribute through dictionary access."""
try:
return delattr(self, name)
except AttributeError:
pass
# We do this here to avoid Python 3's nested exception support.
raise KeyError(name)
# We generally want to exclude "default object attributes" from the context's list of attributes.
# This auto-detects the basic set of them for exclusion from iteration in the above methods.
Context._STANDARD_ATTRS = set(dir(Context()))
class ContextGroup(Context):
"""A managed group of related context additions.
This proxies most attribute access through to the "default" group member.
Because of the possibility of conflicts, all attributes are accessible through dict-like subscripting.
Register new group members through dict-like subscript assignment as attribute assignment is passed through to the
default handler if assigned.
"""
default = None
def __init__(self, default=None, **kw):
if default is not None:
self.default = default
default.__name__ = 'default'
for name in kw:
kw[name].__name__ = name
self.__dict__[name] = kw[name]
def __repr__(self):
return "{0.__class__.__name__}({1})".format(self, ', '.join(sorted(self)))
def __len__(self):
return len(self.__dict__)
def __iter__(self):
return iter(set(dir(self)) - self._STANDARD_ATTRS)
def __getitem__(self, name):
try:
return getattr(self, name)
except AttributeError:
pass
raise KeyError()
def __setitem__(self, name, value):
self.__dict__[name] = value
def __delitem__(self, name):
del self.__dict__[name]
def __getattr__(self, name):
if self.default is None:
raise AttributeError()
return getattr(self.default, name)
def __setattr__(self, name, value):
if self.default is not None:
return setattr(self.default, name, value)
self.__dict__[name] = value
def __delattr__(self, name):
if self.default is not None:
return delattr(self.default, name)
self.__dict__[name] = None
del self.__dict__[name]
ContextGroup._STANDARD_ATTRS = set(dir(ContextGroup()))
|
mit
|
wolfskaempf/ga_statistics
|
lib/python2.7/site-packages/django/core/validators.py
|
50
|
11700
|
from __future__ import unicode_literals
import re
from django.core.exceptions import ValidationError
from django.utils import six
from django.utils.deconstruct import deconstructible
from django.utils.encoding import force_text
from django.utils.ipv6 import is_valid_ipv6_address
from django.utils.six.moves.urllib.parse import urlsplit, urlunsplit
from django.utils.translation import ugettext_lazy as _, ungettext_lazy
# These values, if given to validate(), will trigger the self.required check.
EMPTY_VALUES = (None, '', [], (), {})
@deconstructible
class RegexValidator(object):
regex = ''
message = _('Enter a valid value.')
code = 'invalid'
inverse_match = False
flags = 0
def __init__(self, regex=None, message=None, code=None, inverse_match=None, flags=None):
if regex is not None:
self.regex = regex
if message is not None:
self.message = message
if code is not None:
self.code = code
if inverse_match is not None:
self.inverse_match = inverse_match
if flags is not None:
self.flags = flags
if self.flags and not isinstance(self.regex, six.string_types):
raise TypeError("If the flags are set, regex must be a regular expression string.")
# Compile the regex if it was not passed pre-compiled.
if isinstance(self.regex, six.string_types):
self.regex = re.compile(self.regex, self.flags)
def __call__(self, value):
"""
Validates that the input matches the regular expression
if inverse_match is False, otherwise raises ValidationError.
"""
if not (self.inverse_match is not bool(self.regex.search(
force_text(value)))):
raise ValidationError(self.message, code=self.code)
def __eq__(self, other):
return (
isinstance(other, RegexValidator) and
self.regex.pattern == other.regex.pattern and
self.regex.flags == other.regex.flags and
(self.message == other.message) and
(self.code == other.code) and
(self.inverse_match == other.inverse_match)
)
def __ne__(self, other):
return not (self == other)
@deconstructible
class URLValidator(RegexValidator):
ul = '\u00a1-\uffff' # unicode letters range (must be a unicode string, not a raw string)
# IP patterns
ipv4_re = r'(?:25[0-5]|2[0-4]\d|[0-1]?\d?\d)(?:\.(?:25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}'
ipv6_re = r'\[[0-9a-f:\.]+\]' # (simple regex, validated later)
# Host patterns
hostname_re = r'[a-z' + ul + r'0-9](?:[a-z' + ul + r'0-9-]*[a-z' + ul + r'0-9])?'
domain_re = r'(?:\.[a-z' + ul + r'0-9]+(?:[a-z' + ul + r'0-9-]*[a-z' + ul + r'0-9]+)*)*'
tld_re = r'\.[a-z' + ul + r']{2,}\.?'
host_re = '(' + hostname_re + domain_re + tld_re + '|localhost)'
regex = re.compile(
r'^(?:[a-z0-9\.\-]*)://' # scheme is validated separately
r'(?:\S+(?::\S*)?@)?' # user:pass authentication
r'(?:' + ipv4_re + '|' + ipv6_re + '|' + host_re + ')'
r'(?::\d{2,5})?' # port
r'(?:[/?#][^\s]*)?' # resource path
r'$', re.IGNORECASE)
message = _('Enter a valid URL.')
schemes = ['http', 'https', 'ftp', 'ftps']
def __init__(self, schemes=None, **kwargs):
super(URLValidator, self).__init__(**kwargs)
if schemes is not None:
self.schemes = schemes
def __call__(self, value):
value = force_text(value)
# Check first if the scheme is valid
scheme = value.split('://')[0].lower()
if scheme not in self.schemes:
raise ValidationError(self.message, code=self.code)
# Then check full URL
try:
super(URLValidator, self).__call__(value)
except ValidationError as e:
# Trivial case failed. Try for possible IDN domain
if value:
scheme, netloc, path, query, fragment = urlsplit(value)
try:
netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE
except UnicodeError: # invalid domain part
raise e
url = urlunsplit((scheme, netloc, path, query, fragment))
super(URLValidator, self).__call__(url)
else:
raise
else:
# Now verify IPv6 in the netloc part
host_match = re.search(r'^\[(.+)\](?::\d{2,5})?$', urlsplit(value).netloc)
if host_match:
potential_ip = host_match.groups()[0]
try:
validate_ipv6_address(potential_ip)
except ValidationError:
raise ValidationError(self.message, code=self.code)
url = value
def validate_integer(value):
try:
int(value)
except (ValueError, TypeError):
raise ValidationError(_('Enter a valid integer.'), code='invalid')
@deconstructible
class EmailValidator(object):
message = _('Enter a valid email address.')
code = 'invalid'
user_regex = re.compile(
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*$" # dot-atom
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"$)', # quoted-string
re.IGNORECASE)
domain_regex = re.compile(
# max length of the domain is 249: 254 (max email length) minus one
# period, two characters for the TLD, @ sign, & one character before @.
r'(?:[A-Z0-9](?:[A-Z0-9-]{0,247}[A-Z0-9])?\.)+(?:[A-Z]{2,6}|[A-Z0-9-]{2,}(?<!-))$',
re.IGNORECASE)
literal_regex = re.compile(
# literal form, ipv4 or ipv6 address (SMTP 4.1.3)
r'\[([A-f0-9:\.]+)\]$',
re.IGNORECASE)
domain_whitelist = ['localhost']
def __init__(self, message=None, code=None, whitelist=None):
if message is not None:
self.message = message
if code is not None:
self.code = code
if whitelist is not None:
self.domain_whitelist = whitelist
def __call__(self, value):
value = force_text(value)
if not value or '@' not in value:
raise ValidationError(self.message, code=self.code)
user_part, domain_part = value.rsplit('@', 1)
if not self.user_regex.match(user_part):
raise ValidationError(self.message, code=self.code)
if (domain_part not in self.domain_whitelist and
not self.validate_domain_part(domain_part)):
# Try for possible IDN domain-part
try:
domain_part = domain_part.encode('idna').decode('ascii')
if self.validate_domain_part(domain_part):
return
except UnicodeError:
pass
raise ValidationError(self.message, code=self.code)
def validate_domain_part(self, domain_part):
if self.domain_regex.match(domain_part):
return True
literal_match = self.literal_regex.match(domain_part)
if literal_match:
ip_address = literal_match.group(1)
try:
validate_ipv46_address(ip_address)
return True
except ValidationError:
pass
return False
def __eq__(self, other):
return (
isinstance(other, EmailValidator) and
(self.domain_whitelist == other.domain_whitelist) and
(self.message == other.message) and
(self.code == other.code)
)
validate_email = EmailValidator()
slug_re = re.compile(r'^[-a-zA-Z0-9_]+$')
validate_slug = RegexValidator(
slug_re,
_("Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."),
'invalid'
)
ipv4_re = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}$')
validate_ipv4_address = RegexValidator(ipv4_re, _('Enter a valid IPv4 address.'), 'invalid')
def validate_ipv6_address(value):
if not is_valid_ipv6_address(value):
raise ValidationError(_('Enter a valid IPv6 address.'), code='invalid')
def validate_ipv46_address(value):
try:
validate_ipv4_address(value)
except ValidationError:
try:
validate_ipv6_address(value)
except ValidationError:
raise ValidationError(_('Enter a valid IPv4 or IPv6 address.'), code='invalid')
ip_address_validator_map = {
'both': ([validate_ipv46_address], _('Enter a valid IPv4 or IPv6 address.')),
'ipv4': ([validate_ipv4_address], _('Enter a valid IPv4 address.')),
'ipv6': ([validate_ipv6_address], _('Enter a valid IPv6 address.')),
}
def ip_address_validators(protocol, unpack_ipv4):
"""
Depending on the given parameters returns the appropriate validators for
the GenericIPAddressField.
This code is here, because it is exactly the same for the model and the form field.
"""
if protocol != 'both' and unpack_ipv4:
raise ValueError(
"You can only use `unpack_ipv4` if `protocol` is set to 'both'")
try:
return ip_address_validator_map[protocol.lower()]
except KeyError:
raise ValueError("The protocol '%s' is unknown. Supported: %s"
% (protocol, list(ip_address_validator_map)))
comma_separated_int_list_re = re.compile('^[\d,]+$')
validate_comma_separated_integer_list = RegexValidator(
comma_separated_int_list_re,
_('Enter only digits separated by commas.'),
'invalid'
)
@deconstructible
class BaseValidator(object):
compare = lambda self, a, b: a is not b
clean = lambda self, x: x
message = _('Ensure this value is %(limit_value)s (it is %(show_value)s).')
code = 'limit_value'
def __init__(self, limit_value, message=None):
self.limit_value = limit_value
if message:
self.message = message
def __call__(self, value):
cleaned = self.clean(value)
params = {'limit_value': self.limit_value, 'show_value': cleaned, 'value': value}
if self.compare(cleaned, self.limit_value):
raise ValidationError(self.message, code=self.code, params=params)
def __eq__(self, other):
return (
isinstance(other, self.__class__) and
(self.limit_value == other.limit_value)
and (self.message == other.message)
and (self.code == other.code)
)
@deconstructible
class MaxValueValidator(BaseValidator):
compare = lambda self, a, b: a > b
message = _('Ensure this value is less than or equal to %(limit_value)s.')
code = 'max_value'
@deconstructible
class MinValueValidator(BaseValidator):
compare = lambda self, a, b: a < b
message = _('Ensure this value is greater than or equal to %(limit_value)s.')
code = 'min_value'
@deconstructible
class MinLengthValidator(BaseValidator):
compare = lambda self, a, b: a < b
clean = lambda self, x: len(x)
message = ungettext_lazy(
'Ensure this value has at least %(limit_value)d character (it has %(show_value)d).',
'Ensure this value has at least %(limit_value)d characters (it has %(show_value)d).',
'limit_value')
code = 'min_length'
@deconstructible
class MaxLengthValidator(BaseValidator):
compare = lambda self, a, b: a > b
clean = lambda self, x: len(x)
message = ungettext_lazy(
'Ensure this value has at most %(limit_value)d character (it has %(show_value)d).',
'Ensure this value has at most %(limit_value)d characters (it has %(show_value)d).',
'limit_value')
code = 'max_length'
|
mit
|
witcxc/scipy
|
scipy/weave/tests/test_inline_tools.py
|
91
|
1523
|
from __future__ import absolute_import, print_function
from numpy.testing import TestCase, assert_, run_module_suite
from scipy.weave import inline_tools
from weave_test_utils import dec
class TestInline(TestCase):
"""These are long running tests...
Would be useful to benchmark these things somehow.
"""
@dec.slow
def test_exceptions(self):
a = 3
code = """
if (a < 2)
throw_error(PyExc_ValueError,
"the variable 'a' should not be less than 2");
else
return_val = PyInt_FromLong(a+1);
"""
result = inline_tools.inline(code,['a'])
assert_(result == 4)
## Unfortunately, it is not always possible to catch distutils compiler
## errors, since SystemExit is used. Until that is fixed, these tests
## cannot be run in the same process as the test suite.
## try:
## a = 1
## result = inline_tools.inline(code,['a'])
## assert_(1) # should've thrown a ValueError
## except ValueError:
## pass
## from distutils.errors import DistutilsError, CompileError
## try:
## a = 'string'
## result = inline_tools.inline(code,['a'])
## assert_(1) # should've gotten an error
## except:
## # ?CompileError is the error reported, but catching it doesn't work
## pass
if __name__ == "__main__":
run_module_suite()
|
bsd-3-clause
|
splav/servo
|
tests/wpt/web-platform-tests/tools/third_party/h2/examples/eventlet/eventlet-server.py
|
25
|
2846
|
# -*- coding: utf-8 -*-
"""
eventlet-server.py
~~~~~~~~~~~~~~~~~~
A fully-functional HTTP/2 server written for Eventlet.
"""
import collections
import json
import eventlet
from eventlet.green.OpenSSL import SSL, crypto
from h2.config import H2Configuration
from h2.connection import H2Connection
from h2.events import RequestReceived, DataReceived
class ConnectionManager(object):
"""
An object that manages a single HTTP/2 connection.
"""
def __init__(self, sock):
config = H2Configuration(client_side=False)
self.sock = sock
self.conn = H2Connection(config=config)
def run_forever(self):
self.conn.initiate_connection()
self.sock.sendall(self.conn.data_to_send())
while True:
data = self.sock.recv(65535)
if not data:
break
events = self.conn.receive_data(data)
for event in events:
if isinstance(event, RequestReceived):
self.request_received(event.headers, event.stream_id)
elif isinstance(event, DataReceived):
self.conn.reset_stream(event.stream_id)
self.sock.sendall(self.conn.data_to_send())
def request_received(self, headers, stream_id):
headers = collections.OrderedDict(headers)
data = json.dumps({'headers': headers}, indent=4).encode('utf-8')
response_headers = (
(':status', '200'),
('content-type', 'application/json'),
('content-length', len(data)),
('server', 'eventlet-h2'),
)
self.conn.send_headers(stream_id, response_headers)
self.conn.send_data(stream_id, data, end_stream=True)
def alpn_callback(conn, protos):
if b'h2' in protos:
return b'h2'
raise RuntimeError("No acceptable protocol offered!")
def npn_advertise_cb(conn):
return [b'h2']
# Let's set up SSL. This is a lot of work in PyOpenSSL.
options = (
SSL.OP_NO_COMPRESSION |
SSL.OP_NO_SSLv2 |
SSL.OP_NO_SSLv3 |
SSL.OP_NO_TLSv1 |
SSL.OP_NO_TLSv1_1
)
context = SSL.Context(SSL.SSLv23_METHOD)
context.set_options(options)
context.set_verify(SSL.VERIFY_NONE, lambda *args: True)
context.use_privatekey_file('server.key')
context.use_certificate_file('server.crt')
context.set_npn_advertise_callback(npn_advertise_cb)
context.set_alpn_select_callback(alpn_callback)
context.set_cipher_list(
"ECDHE+AESGCM"
)
context.set_tmp_ecdh(crypto.get_elliptic_curve(u'prime256v1'))
server = eventlet.listen(('0.0.0.0', 443))
server = SSL.Connection(context, server)
pool = eventlet.GreenPool()
while True:
try:
new_sock, _ = server.accept()
manager = ConnectionManager(new_sock)
pool.spawn_n(manager.run_forever)
except (SystemExit, KeyboardInterrupt):
break
|
mpl-2.0
|
ashray/VTK-EVM
|
ThirdParty/ZopeInterface/zope/interface/registry.py
|
40
|
18877
|
##############################################################################
#
# Copyright (c) 2006 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Basic components support
"""
try:
from zope.event import notify
except ImportError: #pragma NO COVER
def notify(*arg, **kw): pass
from zope.interface.interfaces import ISpecification
from zope.interface.interfaces import ComponentLookupError
from zope.interface.interfaces import IAdapterRegistration
from zope.interface.interfaces import IComponents
from zope.interface.interfaces import IHandlerRegistration
from zope.interface.interfaces import ISubscriptionAdapterRegistration
from zope.interface.interfaces import IUtilityRegistration
from zope.interface.interfaces import Registered
from zope.interface.interfaces import Unregistered
from zope.interface.interface import Interface
from zope.interface.declarations import implementedBy
from zope.interface.declarations import implementer
from zope.interface.declarations import implementer_only
from zope.interface.declarations import providedBy
from zope.interface.adapter import AdapterRegistry
from zope.interface._compat import _u
from zope.interface._compat import CLASS_TYPES
from zope.interface._compat import STRING_TYPES
@implementer(IComponents)
class Components(object):
def __init__(self, name='', bases=()):
assert isinstance(name, STRING_TYPES)
self.__name__ = name
self._init_registries()
self._init_registrations()
self.__bases__ = tuple(bases)
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.__name__)
def _init_registries(self):
self.adapters = AdapterRegistry()
self.utilities = AdapterRegistry()
def _init_registrations(self):
self._utility_registrations = {}
self._adapter_registrations = {}
self._subscription_registrations = []
self._handler_registrations = []
def _getBases(self):
# Subclasses might override
return self.__dict__.get('__bases__', ())
def _setBases(self, bases):
# Subclasses might override
self.adapters.__bases__ = tuple([
base.adapters for base in bases])
self.utilities.__bases__ = tuple([
base.utilities for base in bases])
self.__dict__['__bases__'] = tuple(bases)
__bases__ = property(
lambda self: self._getBases(),
lambda self, bases: self._setBases(bases),
)
def registerUtility(self, component=None, provided=None, name=_u(''),
info=_u(''), event=True, factory=None):
if factory:
if component:
raise TypeError("Can't specify factory and component.")
component = factory()
if provided is None:
provided = _getUtilityProvided(component)
if name == _u(''):
name = _getName(component)
reg = self._utility_registrations.get((provided, name))
if reg is not None:
if reg[:2] == (component, info):
# already registered
return
self.unregisterUtility(reg[0], provided, name)
subscribed = False
for ((p, _), data) in iter(self._utility_registrations.items()):
if p == provided and data[0] == component:
subscribed = True
break
self._utility_registrations[(provided, name)] = component, info, factory
self.utilities.register((), provided, name, component)
if not subscribed:
self.utilities.subscribe((), provided, component)
if event:
notify(Registered(
UtilityRegistration(self, provided, name, component, info,
factory)
))
def unregisterUtility(self, component=None, provided=None, name=_u(''),
factory=None):
if factory:
if component:
raise TypeError("Can't specify factory and component.")
component = factory()
if provided is None:
if component is None:
raise TypeError("Must specify one of component, factory and "
"provided")
provided = _getUtilityProvided(component)
old = self._utility_registrations.get((provided, name))
if (old is None) or ((component is not None) and
(component != old[0])):
return False
if component is None:
component = old[0]
# Note that component is now the old thing registered
del self._utility_registrations[(provided, name)]
self.utilities.unregister((), provided, name)
subscribed = False
for ((p, _), data) in iter(self._utility_registrations.items()):
if p == provided and data[0] == component:
subscribed = True
break
if not subscribed:
self.utilities.unsubscribe((), provided, component)
notify(Unregistered(
UtilityRegistration(self, provided, name, component, *old[1:])
))
return True
def registeredUtilities(self):
for ((provided, name), data
) in iter(self._utility_registrations.items()):
yield UtilityRegistration(self, provided, name, *data)
def queryUtility(self, provided, name=_u(''), default=None):
return self.utilities.lookup((), provided, name, default)
def getUtility(self, provided, name=_u('')):
utility = self.utilities.lookup((), provided, name)
if utility is None:
raise ComponentLookupError(provided, name)
return utility
def getUtilitiesFor(self, interface):
for name, utility in self.utilities.lookupAll((), interface):
yield name, utility
def getAllUtilitiesRegisteredFor(self, interface):
return self.utilities.subscriptions((), interface)
def registerAdapter(self, factory, required=None, provided=None,
name=_u(''), info=_u(''), event=True):
if provided is None:
provided = _getAdapterProvided(factory)
required = _getAdapterRequired(factory, required)
if name == _u(''):
name = _getName(factory)
self._adapter_registrations[(required, provided, name)
] = factory, info
self.adapters.register(required, provided, name, factory)
if event:
notify(Registered(
AdapterRegistration(self, required, provided, name,
factory, info)
))
def unregisterAdapter(self, factory=None,
required=None, provided=None, name=_u(''),
):
if provided is None:
if factory is None:
raise TypeError("Must specify one of factory and provided")
provided = _getAdapterProvided(factory)
if (required is None) and (factory is None):
raise TypeError("Must specify one of factory and required")
required = _getAdapterRequired(factory, required)
old = self._adapter_registrations.get((required, provided, name))
if (old is None) or ((factory is not None) and
(factory != old[0])):
return False
del self._adapter_registrations[(required, provided, name)]
self.adapters.unregister(required, provided, name)
notify(Unregistered(
AdapterRegistration(self, required, provided, name,
*old)
))
return True
def registeredAdapters(self):
for ((required, provided, name), (component, info)
) in iter(self._adapter_registrations.items()):
yield AdapterRegistration(self, required, provided, name,
component, info)
def queryAdapter(self, object, interface, name=_u(''), default=None):
return self.adapters.queryAdapter(object, interface, name, default)
def getAdapter(self, object, interface, name=_u('')):
adapter = self.adapters.queryAdapter(object, interface, name)
if adapter is None:
raise ComponentLookupError(object, interface, name)
return adapter
def queryMultiAdapter(self, objects, interface, name=_u(''),
default=None):
return self.adapters.queryMultiAdapter(
objects, interface, name, default)
def getMultiAdapter(self, objects, interface, name=_u('')):
adapter = self.adapters.queryMultiAdapter(objects, interface, name)
if adapter is None:
raise ComponentLookupError(objects, interface, name)
return adapter
def getAdapters(self, objects, provided):
for name, factory in self.adapters.lookupAll(
list(map(providedBy, objects)),
provided):
adapter = factory(*objects)
if adapter is not None:
yield name, adapter
def registerSubscriptionAdapter(self,
factory, required=None, provided=None,
name=_u(''), info=_u(''),
event=True):
if name:
raise TypeError("Named subscribers are not yet supported")
if provided is None:
provided = _getAdapterProvided(factory)
required = _getAdapterRequired(factory, required)
self._subscription_registrations.append(
(required, provided, name, factory, info)
)
self.adapters.subscribe(required, provided, factory)
if event:
notify(Registered(
SubscriptionRegistration(self, required, provided, name,
factory, info)
))
def registeredSubscriptionAdapters(self):
for data in self._subscription_registrations:
yield SubscriptionRegistration(self, *data)
def unregisterSubscriptionAdapter(self, factory=None,
required=None, provided=None, name=_u(''),
):
if name:
raise TypeError("Named subscribers are not yet supported")
if provided is None:
if factory is None:
raise TypeError("Must specify one of factory and provided")
provided = _getAdapterProvided(factory)
if (required is None) and (factory is None):
raise TypeError("Must specify one of factory and required")
required = _getAdapterRequired(factory, required)
if factory is None:
new = [(r, p, n, f, i)
for (r, p, n, f, i)
in self._subscription_registrations
if not (r == required and p == provided)
]
else:
new = [(r, p, n, f, i)
for (r, p, n, f, i)
in self._subscription_registrations
if not (r == required and p == provided and f == factory)
]
if len(new) == len(self._subscription_registrations):
return False
self._subscription_registrations[:] = new
self.adapters.unsubscribe(required, provided, factory)
notify(Unregistered(
SubscriptionRegistration(self, required, provided, name,
factory, '')
))
return True
def subscribers(self, objects, provided):
return self.adapters.subscribers(objects, provided)
def registerHandler(self,
factory, required=None,
name=_u(''), info=_u(''),
event=True):
if name:
raise TypeError("Named handlers are not yet supported")
required = _getAdapterRequired(factory, required)
self._handler_registrations.append(
(required, name, factory, info)
)
self.adapters.subscribe(required, None, factory)
if event:
notify(Registered(
HandlerRegistration(self, required, name, factory, info)
))
def registeredHandlers(self):
for data in self._handler_registrations:
yield HandlerRegistration(self, *data)
def unregisterHandler(self, factory=None, required=None, name=_u('')):
if name:
raise TypeError("Named subscribers are not yet supported")
if (required is None) and (factory is None):
raise TypeError("Must specify one of factory and required")
required = _getAdapterRequired(factory, required)
if factory is None:
new = [(r, n, f, i)
for (r, n, f, i)
in self._handler_registrations
if r != required
]
else:
new = [(r, n, f, i)
for (r, n, f, i)
in self._handler_registrations
if not (r == required and f == factory)
]
if len(new) == len(self._handler_registrations):
return False
self._handler_registrations[:] = new
self.adapters.unsubscribe(required, None, factory)
notify(Unregistered(
HandlerRegistration(self, required, name, factory, '')
))
return True
def handle(self, *objects):
self.adapters.subscribers(objects, None)
def _getName(component):
try:
return component.__component_name__
except AttributeError:
return _u('')
def _getUtilityProvided(component):
provided = list(providedBy(component))
if len(provided) == 1:
return provided[0]
raise TypeError(
"The utility doesn't provide a single interface "
"and no provided interface was specified.")
def _getAdapterProvided(factory):
provided = list(implementedBy(factory))
if len(provided) == 1:
return provided[0]
raise TypeError(
"The adapter factory doesn't implement a single interface "
"and no provided interface was specified.")
def _getAdapterRequired(factory, required):
if required is None:
try:
required = factory.__component_adapts__
except AttributeError:
raise TypeError(
"The adapter factory doesn't have a __component_adapts__ "
"attribute and no required specifications were specified"
)
elif ISpecification.providedBy(required):
raise TypeError("the required argument should be a list of "
"interfaces, not a single interface")
result = []
for r in required:
if r is None:
r = Interface
elif not ISpecification.providedBy(r):
if isinstance(r, CLASS_TYPES):
r = implementedBy(r)
else:
raise TypeError("Required specification must be a "
"specification or class."
)
result.append(r)
return tuple(result)
@implementer(IUtilityRegistration)
class UtilityRegistration(object):
def __init__(self, registry, provided, name, component, doc, factory=None):
(self.registry, self.provided, self.name, self.component, self.info,
self.factory
) = registry, provided, name, component, doc, factory
def __repr__(self):
return '%s(%r, %s, %r, %s, %r, %r)' % (
self.__class__.__name__,
self.registry,
getattr(self.provided, '__name__', None), self.name,
getattr(self.component, '__name__', repr(self.component)),
self.factory, self.info,
)
def __hash__(self):
return id(self)
def __eq__(self, other):
return repr(self) == repr(other)
def __ne__(self, other):
return repr(self) != repr(other)
def __lt__(self, other):
return repr(self) < repr(other)
def __le__(self, other):
return repr(self) <= repr(other)
def __gt__(self, other):
return repr(self) > repr(other)
def __ge__(self, other):
return repr(self) >= repr(other)
@implementer(IAdapterRegistration)
class AdapterRegistration(object):
def __init__(self, registry, required, provided, name, component, doc):
(self.registry, self.required, self.provided, self.name,
self.factory, self.info
) = registry, required, provided, name, component, doc
def __repr__(self):
return '%s(%r, %s, %s, %r, %s, %r)' % (
self.__class__.__name__,
self.registry,
'[' + ", ".join([r.__name__ for r in self.required]) + ']',
getattr(self.provided, '__name__', None), self.name,
getattr(self.factory, '__name__', repr(self.factory)), self.info,
)
def __hash__(self):
return id(self)
def __eq__(self, other):
return repr(self) == repr(other)
def __ne__(self, other):
return repr(self) != repr(other)
def __lt__(self, other):
return repr(self) < repr(other)
def __le__(self, other):
return repr(self) <= repr(other)
def __gt__(self, other):
return repr(self) > repr(other)
def __ge__(self, other):
return repr(self) >= repr(other)
@implementer_only(ISubscriptionAdapterRegistration)
class SubscriptionRegistration(AdapterRegistration):
pass
@implementer_only(IHandlerRegistration)
class HandlerRegistration(AdapterRegistration):
def __init__(self, registry, required, name, handler, doc):
(self.registry, self.required, self.name, self.handler, self.info
) = registry, required, name, handler, doc
@property
def factory(self):
return self.handler
provided = None
def __repr__(self):
return '%s(%r, %s, %r, %s, %r)' % (
self.__class__.__name__,
self.registry,
'[' + ", ".join([r.__name__ for r in self.required]) + ']',
self.name,
getattr(self.factory, '__name__', repr(self.factory)), self.info,
)
|
bsd-3-clause
|
chienlieu2017/it_management
|
odoo/addons/mrp_byproduct/models/mrp_subproduct.py
|
30
|
1445
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.addons import decimal_precision as dp
class MrpSubProduct(models.Model):
_name = 'mrp.subproduct'
_description = 'Byproduct'
product_id = fields.Many2one('product.product', 'Product', required=True)
product_qty = fields.Float(
'Product Qty',
default=1.0, digits=dp.get_precision('Product Unit of Measure'), required=True)
product_uom_id = fields.Many2one('product.uom', 'Unit of Measure', required=True)
bom_id = fields.Many2one('mrp.bom', 'BoM', ondelete='cascade')
operation_id = fields.Many2one('mrp.routing.workcenter', 'Produced at Operation')
@api.onchange('product_id')
def onchange_product_id(self):
""" Changes UoM if product_id changes. """
if self.product_id:
self.product_uom_id = self.product_id.uom_id.id
@api.onchange('product_uom_id')
def onchange_uom(self):
res = {}
if self.product_uom_id and self.product_id and self.product_uom_id.category_id != self.product_id.uom_id.category_id:
res['warning'] = {
'title': _('Warning'),
'message': _('The Product Unit of Measure you chose has a different category than in the product form.')
}
self.product_uom_id = self.product_id.uom_id.id
return res
|
gpl-3.0
|
ezequielpereira/Time-Line
|
libs/wx/lib/agw/flatmenu.py
|
2
|
170722
|
# --------------------------------------------------------------------------------- #
# FLATMENU wxPython IMPLEMENTATION
#
# Andrea Gavana, @ 03 Nov 2006
# Latest Revision: 26 Feb 2010, 21.00 GMT
#
#
# TODO List
#
# 1. Work is still in progress, so other functionalities may be added in the future;
# 2. No shadows under MAC, but it may be possible to create them using Carbon.
#
#
# For All Kind Of Problems, Requests Of Enhancements And Bug Reports, Please
# Write To Me At:
#
# [email protected]
# [email protected]
#
# Or, Obviously, To The wxPython Mailing List!!!
#
#
# End Of Comments
# --------------------------------------------------------------------------------- #
"""
FlatMenu is a generic menu implementation.
Description
===========
FlatMenu, like the name implies, it is a generic menu implementation.
I tried to provide a full functionality for menus, menubar and toolbar.
FlatMenu supports the following features:
- Fires all the events (UI & Cmd);
- Check items;
- Separators;
- Enabled / Disabled menu items;
- Images on items;
- Toolbar support, with images and separators;
- Controls in toolbar (work in progress);
- Toolbar tools tooltips (done: thanks to Peter Kort);
- Accelerators for menus;
- Accelerators for menubar;
- Radio items in menus;
- Integration with AUI;
- Scrolling when menu is too big to fit the screen;
- Menu navigation with keyboard;
- Drop down arrow button to the right of the menu, it always contains the
"Customize" option, which will popup an options dialog. The dialog has the
following abilities:
(a) Ability to add/remove menus;
(b) Select different colour schemes for the menu bar / toolbar;
(c) Control various options, such as: colour for highlight menu item, draw
border around menus (classic look only);
(d) Toolbar floating appearance.
- Allows user to specify grey bitmap for disabled menus/toolbar tools;
- If no grey bitmap is provided, it generates one from the existing bitmap;
- Hidden toolbar items / menu bar items - will appear in a small popmenu
to the right if they are hidden;
- 4 different colour schemes for the menu bar (more can easily added);
- Scrolling is available if the menu height is greater than the screen height;
- Context menus for menu items;
- Show/hide the drop down arrow which allows the customization of FlatMenu;
- Multiple columns menu window;
- Tooltips for menus and toolbar items on a `wx.StatusBar` (if present);
- Transparency (alpha channel) for menu windows (for platforms supporting it);
- First attempt in adding controls to FlatToolbar;
- Added a MiniBar (thanks to Vladiuz);
- Added `wx.ToolBar` methods AddCheckTool/AddRadioTool (thanks to Vladiuz).
Supported Platforms
===================
FlatMenu v0.8 has been tested on the following platforms:
* Windows (Windows XP);
* Linux Ubuntu (Dapper 6.06)
v0.9.* has been tested on
* Windows (Windows XP, Vista);
Window Styles
=============
This class supports the following window styles:
========================= =========== ==================================================
Window Styles Hex Value Description
========================= =========== ==================================================
``FM_OPT_IS_LCD`` 0x1 Use this style if your computer uses a LCD screen.
``FM_OPT_MINIBAR`` 0x2 Use this if you plan to use the toolbar only.
``FM_OPT_SHOW_CUSTOMIZE`` 0x4 Show "customize link" in the `More` menu, you will need to write your own handler. See demo.
``FM_OPT_SHOW_TOOLBAR`` 0x8 Set this option is you are planning to use the toolbar.
========================= =========== ==================================================
Events Processing
=================
This class processes the following events:
================================= ==================================================
Event Name Description
================================= ==================================================
``EVT_FLAT_MENU_DISMISSED`` Used internally.
``EVT_FLAT_MENU_ITEM_MOUSE_OUT`` Fires an event when the mouse leaves a `FlatMenuItem`.
``EVT_FLAT_MENU_ITEM_MOUSE_OVER`` Fires an event when the mouse enters a `FlatMenuItem`.
``EVT_FLAT_MENU_SELECTED`` Fires the `wx.EVT_MENU` event for `FlatMenu`.
================================= ==================================================
License And Version
===================
FlatMenu is distributed under the wxPython license.
Latest Revision: Andrea Gavana @ 26 Feb 2010, 21.00 GMT
Version 0.9.5
"""
__docformat__ = "epytext"
__version__ = "0.9.5"
import wx
import math
from fmcustomizedlg import FMCustomizeDlg
from artmanager import ArtManager, DCSaver
from fmresources import *
# FlatMenu styles
FM_OPT_IS_LCD = 1
""" Use this style if your computer uses a LCD screen. """
FM_OPT_MINIBAR = 2
""" Use this if you plan to use the toolbar only. """
FM_OPT_SHOW_CUSTOMIZE = 4
""" Show "customize link" in the `More` menu, you will need to write your own handler. See demo. """
FM_OPT_SHOW_TOOLBAR = 8
""" Set this option is you are planning to use the toolbar. """
# Some checking to see if we can draw shadows behind the popup menus
# at least on Windows. *REQUIRES* Mark Hammond's win32all extensions
# and ctypes, on Windows obviouly. Mac and GTK have no shadows under
# the menus, and it has been reported that shadows don't work well
# on Windows 2000 and previous.
_libimported = None
_DELAY = 5000
if wx.Platform == "__WXMSW__":
osVersion = wx.GetOsVersion()
# Shadows behind menus are supported only in XP
if osVersion[1] == 5 and osVersion[2] == 1:
try:
import win32api
import win32gui
_libimported = "MH"
except:
try:
import ctypes
_libimported = "ctypes"
except:
pass
else:
_libimported = None
# Simple hack, but I don't know how to make it work on Mac
# I don't have Mac ;-)
#if wx.Platform == "__WXMAC__":
# try:
# import ctypes
# _carbon_dll = ctypes.cdll.LoadLibrary(r'/System/Frameworks/Carbon.framework/Carbon')
# except:
# _carbon_dll = None
# FIXME: No way to get shadows on Windows with the original code...
# May anyone share some suggestion on how to make it work??
# Right now I am using win32api to create shadows behind wx.PopupWindow,
# but this will result in *all* the popup windows in an application
# to have shadows behind them, even the user defined wx.PopupWindow
# that do not derive from FlatMenu.
import wx.aui as AUI
AuiPaneInfo = AUI.AuiPaneInfo
try:
import aui as PyAUI
PyAuiPaneInfo = PyAUI.AuiPaneInfo
except ImportError:
pass
# Check for the new method in 2.7 (not present in 2.6.3.3)
if wx.VERSION_STRING < "2.7":
wx.Rect.Contains = lambda self, point: wx.Rect.Inside(self, point)
wxEVT_FLAT_MENU_DISMISSED = wx.NewEventType()
wxEVT_FLAT_MENU_SELECTED = wx.wxEVT_COMMAND_MENU_SELECTED
wxEVT_FLAT_MENU_ITEM_MOUSE_OVER = wx.NewEventType()
wxEVT_FLAT_MENU_ITEM_MOUSE_OUT = wx.NewEventType()
EVT_FLAT_MENU_DISMISSED = wx.PyEventBinder(wxEVT_FLAT_MENU_DISMISSED, 1)
""" Used internally. """
EVT_FLAT_MENU_SELECTED = wx.PyEventBinder(wxEVT_FLAT_MENU_SELECTED, 2)
""" Fires the wx.EVT_MENU event for `FlatMenu`. """
EVT_FLAT_MENU_ITEM_MOUSE_OUT = wx.PyEventBinder(wxEVT_FLAT_MENU_ITEM_MOUSE_OUT, 1)
""" Fires an event when the mouse leaves a `FlatMenuItem`. """
EVT_FLAT_MENU_ITEM_MOUSE_OVER = wx.PyEventBinder(wxEVT_FLAT_MENU_ITEM_MOUSE_OVER, 1)
""" Fires an event when the mouse enters a `FlatMenuItem`. """
def ConvertToMonochrome(bmp):
"""
Converts a bitmap to monochrome colour.
:param `bmp`: a valid `wx.Bitmap` object.
"""
mem_dc = wx.MemoryDC()
shadow = wx.EmptyBitmap(bmp.GetWidth(), bmp.GetHeight())
mem_dc.SelectObject(shadow)
mem_dc.DrawBitmap(bmp, 0, 0, True)
mem_dc.SelectObject(wx.NullBitmap)
img = shadow.ConvertToImage()
img = img.ConvertToMono(0, 0, 0)
# we now have black where the original bmp was drawn,
# white elsewhere
shadow = wx.BitmapFromImage(img)
shadow.SetMask(wx.Mask(shadow, wx.BLACK))
# Convert the black to grey
tmp = wx.EmptyBitmap(bmp.GetWidth(), bmp.GetHeight())
col = wx.SystemSettings_GetColour(wx.SYS_COLOUR_BTNSHADOW)
mem_dc.SelectObject(tmp)
mem_dc.SetPen(wx.Pen(col))
mem_dc.SetBrush(wx.Brush(col))
mem_dc.DrawRectangle(0, 0, bmp.GetWidth(), bmp.GetHeight())
mem_dc.DrawBitmap(shadow, 0, 0, True) # now contains a bitmap with grey where the image was, white elsewhere
mem_dc.SelectObject(wx.NullBitmap)
shadow = tmp
shadow.SetMask(wx.Mask(shadow, wx.WHITE))
return shadow
# ---------------------------------------------------------------------------- #
# Class FlatMenuEvent
# ---------------------------------------------------------------------------- #
class FlatMenuEvent(wx.PyCommandEvent):
"""
Event class that supports the FlatMenu-compatible event called
``EVT_FLAT_MENU_SELECTED``.
"""
def __init__(self, eventType, eventId=1, nSel=-1, nOldSel=-1):
"""
Default class constructor.
:param `eventType`: the event type;
:param `eventId`: the event identifier;
:param `nSel`: the current selection;
:param `nOldSel`: the old selection.
"""
wx.PyCommandEvent.__init__(self, eventType, eventId)
self._eventType = eventType
# ---------------------------------------------------------------------------- #
# Class MenuEntryInfo
# ---------------------------------------------------------------------------- #
class MenuEntryInfo(object):
"""
Internal class which holds information about a menu.
"""
def __init__(self, titleOrMenu="", menu=None, state=ControlNormal, cmd=wx.ID_ANY):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `titleOrMenu`: if it is a string, it represents the new menu label,
otherwise it is another instance of L{MenuEntryInfo} from which the attributes
are copied;
:param `menu`: the associated L{FlatMenu} object;
:param `state`: the menu item state. This can be one of the following:
==================== ======= ==========================
Item State Value Description
==================== ======= ==========================
``ControlPressed`` 0 The item is pressed
``ControlFocus`` 1 The item is focused
``ControlDisabled`` 2 The item is disabled
``ControlNormal`` 3 Normal state
==================== ======= ==========================
:param `cmd`: the menu accelerator identifier.
"""
if isinstance(titleOrMenu, basestring):
self._title = titleOrMenu
self._menu = menu
self._rect = wx.Rect()
self._state = state
if cmd == wx.ID_ANY:
cmd = wx.NewId()
self._cmd = cmd # the menu itself accelerator id
else:
self._title = titleOrMenu._title
self._menu = titleOrMenu._menu
self._rect = titleOrMenu._rect
self._state = titleOrMenu._state
self._cmd = titleOrMenu._cmd
self._textBmp = wx.NullBitmap
def GetTitle(self):
""" Returns the associated menu title. """
return self._title
def GetMenu(self):
""" Returns the associated menu. """
return self._menu
def SetRect(self, rect):
"""
Sets the associated menu client rectangle.
:param `rect`: an instance of `wx.Rect`, representing the menu client rectangle.
"""
self._rect = rect
def GetRect(self):
""" Returns the associated menu client rectangle. """
return self._rect
def SetState(self, state):
"""
Sets the associated menu state.
:param `state`: the menu item state. This can be one of the following:
==================== ======= ==========================
Item State Value Description
==================== ======= ==========================
``ControlPressed`` 0 The item is pressed
``ControlFocus`` 1 The item is focused
``ControlDisabled`` 2 The item is disabled
``ControlNormal`` 3 Normal state
==================== ======= ==========================
"""
self._state = state
def GetState(self):
"""
Returns the associated menu state.
:see: L{SetState} for a list of valid menu states.
"""
return self._state
def SetTextBitmap(self, bmp):
"""
Sets the associated menu bitmap.
:param `bmp`: a valid `wx.Bitmap` object.
"""
self._textBmp = bmp
def GetTextBitmap(self):
""" Returns the associated menu bitmap. """
return self._textBmp
def GetCmdId(self):
""" Returns the associated menu accelerator identifier. """
return self._cmd
# ---------------------------------------------------------------------------- #
# Class StatusBarTimer
# ---------------------------------------------------------------------------- #
class StatusBarTimer(wx.Timer):
""" Timer used for deleting `wx.StatusBar` long help after ``_DELAY`` seconds. """
def __init__(self, owner):
"""
Default class constructor.
For internal use: do not call it in your code!
:param `owner`: the `wx.Timer` owner (L{FlatMenuBar}).
"""
wx.Timer.__init__(self)
self._owner = owner
def Notify(self):
""" The timer has expired. """
self._owner.OnStatusBarTimer()
# ---------------------------------------------------------------------------- #
# Class FlatMenuBar
# ---------------------------------------------------------------------------- #
class FlatMenuBar(wx.Panel):
"""
Implements the generic owner-drawn menu bar for L{FlatMenu}.
"""
def __init__(self, parent, id=wx.ID_ANY, iconSize=SmallIcons,
spacer=SPACER, options=FM_OPT_SHOW_CUSTOMIZE|FM_OPT_IS_LCD):
"""
Default class constructor.
:param `parent`: the menu bar parent
:param `id`: the window identifier. If ``wx.ID_ANY``, will automatically create an identifier;
:param `iconSize`: size of the icons in the toolbar. This can be one of the
following values (in pixels):
==================== ======= =============================
`iconSize` Bit Value Description
==================== ======= =============================
``LargeIcons`` 32 Use large 32x32 icons
``SmallIcons`` 16 Use standard 16x16 icons
==================== ======= =============================
:param `spacer`: the space between the menu bar text and the menu bar border;
:param `options`: a combination of the following bits:
========================= ========= =============================
`options` Bit Hex Value Description
========================= ========= =============================
``FM_OPT_IS_LCD`` 0x1 Use this style if your computer uses a LCD screen
``FM_OPT_MINIBAR`` 0x2 Use this if you plan to use toolbar only
``FM_OPT_SHOW_CUSTOMIZE`` 0x4 Show "customize link" in more menus, you will need to write your own handler. See demo.
``FM_OPT_SHOW_TOOLBAR`` 0x8 Set this option is you are planing to use the toolbar
========================= ========= =============================
"""
self._parent = parent
self._curretHiliteItem = -1
self._items = []
self._dropDownButtonArea = wx.Rect()
self._tbIconSize = iconSize
self._tbButtons = []
self._interval = 20 # 20 milliseconds
self._showTooltip = -1
self._haveTip = False
self._statusTimer = None
self._spacer = spacer
self._showToolbar = options & FM_OPT_SHOW_TOOLBAR
self._showCustomize = options & FM_OPT_SHOW_CUSTOMIZE
self._isLCD = options & FM_OPT_IS_LCD
self._isMinibar = options & FM_OPT_MINIBAR
self._options = options
self._dropDownButtonState = ControlNormal
self._moreMenu = None
self._dlg = None
self._tbMenu = None
self._moreMenuBgBmp = None
self._lastRadioGroup = 0
self._mgr = None
self.SetBarHeight()
wx.Panel.__init__(self, parent, id, size=(-1, self._barHeight), style=wx.WANTS_CHARS)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_MOTION, self.OnMouseMove)
self.Bind(EVT_FLAT_MENU_DISMISSED, self.OnMenuDismissed)
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeaveMenuBar)
self.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown)
self.Bind(wx.EVT_LEFT_DCLICK, self.OnLeftDown)
self.Bind(wx.EVT_LEFT_UP, self.OnLeftUp)
self.Bind(wx.EVT_IDLE, self.OnIdle)
if "__WXGTK__" in wx.Platform:
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeaveWindow)
self.SetFocus()
# start the stop watch
self._watch = wx.StopWatch()
self._watch.Start()
def Append(self, menu, title):
"""
Adds the item to the end of the menu bar.
:param `menu`: the menu to which we are appending a new item;
:param `title`: the menu item label.
"""
menu._menuBarFullTitle = title
position, label = ArtManager.Get().GetAccelIndex(title)
menu._menuBarLabelOnly = label
return self.Insert(len(self._items), menu, title)
def OnIdle(self, event):
"""
Handles the ``wx.EVT_IDLE`` event for L{FlatMenuBar}.
:param `event`: a `wx.IdleEvent` event to be processed.
"""
refresh = False
if self._watch.Time() > self._interval:
# it is time to process UpdateUIEvents
for but in self._tbButtons:
event = wx.UpdateUIEvent(but._tbItem.GetId())
event.Enable(but._tbItem.IsEnabled())
event.SetText(but._tbItem.GetLabel())
event.SetEventObject(self)
self.GetEventHandler().ProcessEvent(event)
if but._tbItem.GetLabel() != event.GetText() or but._tbItem.IsEnabled() != event.GetEnabled():
refresh = True
but._tbItem.SetLabel(event.GetText())
but._tbItem.Enable(event.GetEnabled())
self._watch.Start() # Reset the timer
# we need to update the menu bar
if refresh:
self.Refresh()
def SetBarHeight(self):
""" Recalculates the L{FlatMenuBar} height when its settings change. """
mem_dc = wx.MemoryDC()
mem_dc.SelectObject(wx.EmptyBitmap(1, 1))
dummy, self._barHeight = mem_dc.GetTextExtent("Tp")
mem_dc.SelectObject(wx.NullBitmap)
if not self._isMinibar:
self._barHeight += 4*self._spacer
else:
self._barHeight = self._spacer
if self._showToolbar :
# add the toolbar height to the menubar height
self._barHeight += self._tbIconSize + self._spacer
if self._mgr is None:
return
pn = self._mgr.GetPane("flat_menu_bar")
pn.MinSize(wx.Size(-1, self._barHeight))
self._mgr.Update()
self.Refresh()
def SetOptions(self, options):
"""
Sets the L{FlatMenuBar} options, whether to show a toolbar, to use LCD screen settings etc...
:param `options`: a combination of the following bits:
========================= ========= =============================
`options` Bit Hex Value Description
========================= ========= =============================
``FM_OPT_IS_LCD`` 0x1 Use this style if your computer uses a LCD screen
``FM_OPT_MINIBAR`` 0x2 Use this if you plan to use toolbar only
``FM_OPT_SHOW_CUSTOMIZE`` 0x4 Show "customize link" in more menus, you will need to write your own handler. See demo.
``FM_OPT_SHOW_TOOLBAR`` 0x8 Set this option is you are planing to use the toolbar
========================= ========= =============================
"""
self._options = options
self._showToolbar = options & FM_OPT_SHOW_TOOLBAR
self._showCustomize = options & FM_OPT_SHOW_CUSTOMIZE
self._isLCD = options & FM_OPT_IS_LCD
self._isMinibar = options & FM_OPT_MINIBAR
self.SetBarHeight()
self.Refresh()
self.Update()
def GetOptions(self):
"""
Returns the L{FlatMenuBar} options, whether to show a toolbar, to use LCD screen settings etc...
"""
return self._options
def UpdateItem(self, item):
"""
An item was modified. This function is called by L{FlatMenu} in case
an item was modified directly and not via a `wx.UpdateUIEvent` event.
:param `item`: an instance of L{FlatMenu}.
"""
if not self._showToolbar:
return
# search for a tool bar with id
refresh = False
for but in self._tbButtons:
if but._tbItem.GetId() == item.GetId():
if but._tbItem.IsEnabled() != item.IsEnabled():
refresh = True
but._tbItem.Enable(item.IsEnabled())
break
if refresh:
self.Refresh()
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` event for L{FlatMenuBar}.
:param `event`: a `wx.PaintEvent` event to be processed.
"""
# on GTK, dont use the bitmap for drawing,
# draw directly on the DC
if "__WXGTK__" in wx.Platform and not self._isLCD:
self.ClearBitmaps(0)
dc = wx.BufferedPaintDC(self)
self.DrawAll(dc)
def DrawAll(self, dc):
"""
Draws everything for L{FlatMenuBar}.
:param `dc`: an instance of `wx.DC`.
"""
artMgr = ArtManager.Get()
fnt = artMgr.GetFont()
textColour = artMgr.GetTextColourEnable()
theme = artMgr.GetMenuTheme()
dc.SetFont(fnt)
dc.SetTextForeground(textColour)
clientRect = self.GetClientRect()
artMgr.DrawMenuBarBg(dc, clientRect)
padding, dummy = dc.GetTextExtent("W")
posx = self._spacer
posy = self._spacer * 2
# ---------------------------------------------------------------------------
# Draw as much items as we can if the screen is not wide enough, add all
# missing items to a drop down menu
# ---------------------------------------------------------------------------
menuBarRect = self.GetClientRect()
# mark all items as non-visibles at first
for item in self._items:
item.SetRect(wx.Rect())
dc.SetTextForeground(textColour)
for item in self._items:
# Handle accelerator ('&')
title = item.GetTitle()
fixedText = title
location, labelOnly = artMgr.GetAccelIndex(fixedText)
# Get the menu item length, add some padding to it
textWidth, textHeight = dc.GetTextExtent(fixedText)
rect = wx.Rect(posx, posy, textWidth + self._spacer + padding, textHeight)
# Can we draw more??
# the +DROP_DOWN_ARROW_WIDTH is the width of the drop down arrow
if posx + rect.width + DROP_DOWN_ARROW_WIDTH >= menuBarRect.width:
break
# Keep the item rectangle, will be used later in functions such
# as 'OnLeftDown', 'OnMouseMove'
copy = wx.Rect(*rect)
copy.Inflate(0, self._spacer)
item.SetRect(copy)
if item.GetState() == ControlFocus:
artMgr.SetMS2007ButtonSunken(True)
artMgr.DrawButton(dc, item.GetRect(), theme, ControlFocus, False)
ww, hh = dc.GetTextExtent(labelOnly)
textOffset = (rect.width - ww) / 2
if not self._isLCD and item.GetTextBitmap().Ok():
dc.DrawBitmap(item.GetTextBitmap(), rect.x, rect.y, True)
else:
if not self._isLCD:
# Draw the text on a bitmap using memory dc,
# so on following calls we will use this bitmap instead
# of calculating everything from scratch
bmp = wx.EmptyBitmap(rect.width, rect.height)
memDc = wx.MemoryDC()
memDc.SelectObject(bmp)
# Fill the bitmap with the maksing colour
memDc.SetPen(wx.Pen(wx.Colour(0, 128, 128)) )
memDc.SetBrush(wx.Brush(wx.Colour(0, 128, 128)) )
memDc.DrawRectangle(0, 0, rect.width, rect.height)
memDc.SetFont(fnt)
if location == wx.NOT_FOUND or location >= len(fixedText):
# draw the text
if not self._isLCD:
memDc.DrawText(title, textOffset, 0)
dc.DrawText(title, rect.x + textOffset, rect.y)
else:
# underline the first '&'
before = labelOnly[0:location]
underlineLetter = labelOnly[location]
after = labelOnly[location+1:]
# before
if not self._isLCD:
memDc.DrawText(before, textOffset, 0)
dc.DrawText(before, rect.x + textOffset, rect.y)
# underlineLetter
if "__WXGTK__" not in wx.Platform:
w1, h = dc.GetTextExtent(before)
fnt.SetUnderlined(True)
dc.SetFont(fnt)
dc.DrawText(underlineLetter, rect.x + w1 + textOffset, rect.y)
if not self._isLCD:
memDc.SetFont(fnt)
memDc.DrawText(underlineLetter, textOffset + w1, 0)
else:
w1, h = dc.GetTextExtent(before)
dc.DrawText(underlineLetter, rect.x + w1 + textOffset, rect.y)
if not self._isLCD:
memDc.DrawText(underlineLetter, textOffset + w1, 0)
# Draw the underline ourselves since using the Underline in GTK,
# causes the line to be too close to the letter
uderlineLetterW, uderlineLetterH = dc.GetTextExtent(underlineLetter)
dc.DrawLine(rect.x + w1 + textOffset, rect.y + uderlineLetterH - 2,
rect.x + w1 + textOffset + uderlineLetterW, rect.y + uderlineLetterH - 2)
# after
w2, h = dc.GetTextExtent(underlineLetter)
fnt.SetUnderlined(False)
dc.SetFont(fnt)
dc.DrawText(after, rect.x + w1 + w2 + textOffset, rect.y)
if not self._isLCD:
memDc.SetFont(fnt)
memDc.DrawText(after, w1 + w2 + textOffset, 0)
if not self._isLCD:
memDc.SelectObject(wx.NullBitmap)
# Set masking colour to the bitmap
bmp.SetMask(wx.Mask(bmp, wx.Colour(0, 128, 128)))
item.SetTextBitmap(bmp)
posx += rect.width
# Get a backgroud image of the more menu button
moreMenubtnBgBmpRect = wx.Rect(*self.GetMoreMenuButtonRect())
if not self._moreMenuBgBmp:
self._moreMenuBgBmp = wx.EmptyBitmap(moreMenubtnBgBmpRect.width, moreMenubtnBgBmpRect.height)
if self._showToolbar and len(self._tbButtons) > 0:
rectX = self._spacer
rectWidth = clientRect.width - moreMenubtnBgBmpRect.width - 3*self._spacer
if len(self._items) == 0:
rectHeight = clientRect.height - posy - 2*self._spacer
rectY = posy
else:
rectHeight = clientRect.height - 2*self._spacer - self._items[0].GetRect().height
rectY = self._items[0].GetRect().y + self._items[0].GetRect().height
rr = wx.Rect(rectX, rectY, rectWidth, rectHeight)
artMgr.DrawToolBarBg(dc, rr)
self.DrawToolbar(dc, rr)
if self._showCustomize or self.GetInvisibleMenuItemCount() > 0 or self.GetInvisibleToolbarItemCount() > 0:
memDc = wx.MemoryDC()
memDc.SelectObject(self._moreMenuBgBmp)
try:
memDc.Blit(0, 0, self._moreMenuBgBmp.GetWidth(), self._moreMenuBgBmp.GetHeight(), dc,
moreMenubtnBgBmpRect.x, moreMenubtnBgBmpRect.y)
except:
pass
memDc.SelectObject(wx.NullBitmap)
# Draw the drop down arrow button
self.DrawMoreButton(dc, 0, self._dropDownButtonState)
# Set the button rect
self._dropDownButtonArea = moreMenubtnBgBmpRect
def DrawToolbar(self, dc, rect):
"""
Draws the toolbar (if present).
:param `dc`: an instance of `wx.DC`;
:param `rect`: the toolbar client rectangle.
"""
width = self._tbIconSize + self._spacer
height = self._tbIconSize + self._spacer
xx = rect.x
yy = rect.y + (rect.height - height)/2
artMgr = ArtManager.Get()
# by default set all toolbar items as invisible
for but in self._tbButtons:
but._visible = False
counter = 0
# Get all the toolbar items
for i in xrange(len(self._tbButtons)):
tbItem = self._tbButtons[i]._tbItem
# the button width depends on its type
if tbItem.IsSeparator():
width = SEPARATOR_WIDTH
elif tbItem.IsCustomControl():
control = tbItem.GetCustomControl()
width = control.GetSize().x + self._spacer
else:
width = self._tbIconSize + self._spacer # normal bitmap's width
# can we keep drawing?
if xx + width >= rect.width:
break
counter += 1
# mark this item as visible
self._tbButtons[i]._visible = True
bmp = wx.NullBitmap
#------------------------------------------
# special handling for separator
#------------------------------------------
if tbItem.IsSeparator():
# Place a separator bitmap
bmp = wx.EmptyBitmap(12, rect.height - 2)
mem_dc = wx.MemoryDC()
mem_dc.SelectObject(bmp)
mem_dc.SetPen(wx.BLACK_PEN)
mem_dc.SetBrush(wx.BLACK_BRUSH)
mem_dc.DrawRectangle(0, 0, bmp.GetWidth(), bmp.GetHeight())
col = artMgr.GetMenuBarFaceColour()
col1 = artMgr.LightColour(col, 40)
col2 = artMgr.LightColour(col, 70)
mem_dc.SetPen(wx.Pen(col2))
mem_dc.DrawLine(5, 0, 5, bmp.GetHeight())
mem_dc.SetPen(wx.Pen(col1))
mem_dc.DrawLine(6, 0, 6, bmp.GetHeight())
mem_dc.SelectObject(wx.NullBitmap)
bmp.SetMask(wx.Mask(bmp, wx.BLACK))
# draw the separator
buttonRect = wx.Rect(xx, rect.y + 1, bmp.GetWidth(), bmp.GetHeight())
dc.DrawBitmap(bmp, xx, rect.y + 1, True)
xx += buttonRect.width
self._tbButtons[i]._rect = buttonRect
continue
elif tbItem.IsCustomControl():
control = tbItem.GetCustomControl()
ctrlSize = control.GetSize()
ctrlPos = wx.Point(xx, yy + (rect.height - ctrlSize.y)/2)
if control.GetPosition() != ctrlPos:
control.SetPosition(ctrlPos)
if not control.IsShown():
control.Show()
buttonRect = wx.RectPS(ctrlPos, ctrlSize)
xx += buttonRect.width
self._tbButtons[i]._rect = buttonRect
continue
else:
if tbItem.IsEnabled():
bmp = tbItem.GetBitmap()
else:
bmp = tbItem.GetDisabledBitmap()
# Draw the toolbar image
if bmp.Ok():
x = xx
y = yy + (height - bmp.GetHeight())/2 - 1
buttonRect = wx.Rect(x, y, width, height)
if i < len(self._tbButtons) and i >= 0:
if self._tbButtons[i]._tbItem.IsSelected():
tmpState = ControlPressed
else:
tmpState = ControlFocus
if self._tbButtons[i]._state == ControlFocus or self._tbButtons[i]._tbItem.IsSelected():
artMgr.DrawButton(dc, buttonRect, artMgr.GetMenuTheme(), tmpState, False)
else:
self._tbButtons[i]._state = ControlNormal
imgx = buttonRect.x + (buttonRect.width - bmp.GetWidth())/2
imgy = buttonRect.y + (buttonRect.height - bmp.GetHeight())/2
if self._tbButtons[i]._state == ControlFocus and not self._tbButtons[i]._tbItem.IsSelected():
# in case we the button is in focus, place it
# once pixle up and left
# place a dark image under the original image to provide it
# with some shadow
# shadow = ConvertToMonochrome(bmp)
# dc.DrawBitmap(shadow, imgx, imgy, True)
imgx -= 1
imgy -= 1
dc.DrawBitmap(bmp, imgx, imgy, True)
xx += buttonRect.width
self._tbButtons[i]._rect = buttonRect
#Edited by P.Kort
if self._showTooltip == -1:
self.RemoveHelp()
else:
try:
self.DoGiveHelp(self._tbButtons[self._showTooltip]._tbItem)
except:
if _debug:
print "FlatMenu.py; fn : DrawToolbar; Can't create Tooltip "
pass
for j in xrange(counter, len(self._tbButtons)):
if self._tbButtons[j]._tbItem.IsCustomControl():
control = self._tbButtons[j]._tbItem.GetCustomControl()
control.Hide()
def GetMoreMenuButtonRect(self):
""" Returns a rectangle surrounding the menu button. """
clientRect = self.GetClientRect()
rect = wx.Rect(*clientRect)
rect.SetWidth(DROP_DOWN_ARROW_WIDTH)
rect.SetX(clientRect.GetWidth() + rect.GetX() - DROP_DOWN_ARROW_WIDTH - 3)
rect.SetY(2)
rect.SetHeight(rect.GetHeight() - self._spacer)
return rect
def DrawMoreButton(self, dc, fr, state):
"""
Draws 'more' button to the right side of the menu bar.
:param `dc`: an instance of `wx.DC`;
:param `fr`: unused at present;
:param `state`: the 'more' button state.
:see: L{MenuEntryInfo.SetState} for a list of valid menu states.
"""
if (not self._showCustomize) and self.GetInvisibleMenuItemCount() < 1 and self.GetInvisibleToolbarItemCount() < 1:
return
# Draw a drop down menu at the right position of the menu bar
# we use xpm file with 16x16 size, another 4 pixels we take as spacer
# from the right side of the frame, this will create a DROP_DOWN_ARROW_WIDTH pixels width
# of unwanted zone on the right side
rect = self.GetMoreMenuButtonRect()
artMgr = ArtManager.Get()
# Draw the bitmap
if state != ControlNormal:
# Draw background according to state
artMgr.SetMS2007ButtonSunken(True)
artMgr.DrawButton(dc, rect, artMgr.GetMenuTheme(), state, False)
else:
# Delete current image
if self._moreMenuBgBmp.Ok():
dc.DrawBitmap(self._moreMenuBgBmp, rect.x, rect.y, True)
dropArrowBmp = artMgr.GetStockBitmap("arrow_down")
# Calc the image coordinates
xx = rect.x + (DROP_DOWN_ARROW_WIDTH - dropArrowBmp.GetWidth())/2
yy = rect.y + (rect.height - dropArrowBmp.GetHeight())/2
dc.DrawBitmap(dropArrowBmp, xx, yy + self._spacer, True)
self._dropDownButtonState = state
def HitTest(self, pt):
"""
HitTest method for L{FlatMenuBar}.
:param `pt`: an instance of `wx.Point`, specifying the hit test position.
"""
if self._dropDownButtonArea.Contains(pt):
return -1, DropDownArrowButton
for ii, item in enumerate(self._items):
if item.GetRect().Contains(pt):
return ii, MenuItem
# check for tool bar items
if self._showToolbar:
for ii, but in enumerate(self._tbButtons):
if but._rect.Contains(pt):
# locate the corresponded menu item
enabled = but._tbItem.IsEnabled()
separator = but._tbItem.IsSeparator()
visible = but._visible
if enabled and not separator and visible:
self._showTooltip = ii
return ii, ToolbarItem
self._showTooltip = -1
return -1, NoWhere
def FindMenuItem(self, id):
"""
Returns a L{FlatMenuItem} according to its id.
:param `id`: the identifier for the sought L{FlatMenuItem}.
"""
for item in self._items:
mi = item.GetMenu().FindItem(id)
if mi:
return mi
return None
def OnSize(self, event):
"""
Handles the ``wx.EVT_SIZE`` event for L{FlatMenuBar}.
:param `event`: a `wx.SizeEvent` event to be processed.
"""
self.ClearBitmaps(0)
self.Refresh()
def OnEraseBackground(self, event):
"""
Handles the ``wx.EVT_ERASE_BACKGROUND`` event for L{FlatMenuBar}.
:param `event`: a `wx.EraseEvent` event to be processed.
:note: This method is intentionally empty to reduce flicker.
"""
pass
def ShowCustomize(self, show=True):
"""
Shows/hides the drop-down arrow which allows customization of L{FlatMenu}.
:param `show`: ``True`` to show the customize menu, ``False`` to hide it.
"""
if self._showCustomize == show:
return
self._showCustomize = show
self.Refresh()
def SetLCDMonitor(self, lcd=True):
"""
Sets whether the PC monitor is an LCD or not.
:param `lcd`: ``True`` to use the settings appropriate for a LCD monitor,
``False`` otherwise.
"""
if self._isLCD == lcd:
return
self._isLCD = lcd
self.Refresh()
def ProcessMouseMoveFromMenu(self, pt):
"""
This function is called from child menus, this allow a child menu to
pass the mouse movement event to the menu bar.
:param `pt`: an instance of `wx.Point`.
"""
idx, where = self.HitTest(pt)
if where == MenuItem:
self.ActivateMenu(self._items[idx])
def DoMouseMove(self, pt, leftIsDown):
"""
Handles mouse move event.
:param `pt`: an instance of `wx.Point`;
:param `leftIsDown`: ``True`` is the left mouse button is down, ``False`` otherwise.
"""
# Reset items state
for item in self._items:
item.SetState(ControlNormal)
idx, where = self.HitTest(pt)
if where == DropDownArrowButton:
self.RemoveHelp()
if self._dropDownButtonState != ControlFocus and not leftIsDown:
dc = wx.ClientDC(self)
self.DrawMoreButton(dc, -1, ControlFocus)
elif where == MenuItem:
self._dropDownButtonState = ControlNormal
# On Item
self._items[idx].SetState(ControlFocus)
# If this item is already selected, dont draw it again
if self._curretHiliteItem == idx:
return
self._curretHiliteItem = idx
if self._showToolbar:
# mark all toolbar items as non-hilited
for but in self._tbButtons:
but._state = ControlNormal
self.Refresh()
elif where == ToolbarItem:
if self._showToolbar:
if idx < len(self._tbButtons) and idx >= 0:
if self._tbButtons[idx]._state == ControlFocus:
return
# we need to refresh the toolbar
active = self.GetActiveToolbarItem()
if active != wx.NOT_FOUND:
self._tbButtons[active]._state = ControlNormal
for but in self._tbButtons:
but._state = ControlNormal
self._tbButtons[idx]._state = ControlFocus
self.DoGiveHelp(self._tbButtons[idx]._tbItem)
self.Refresh()
elif where == NoWhere:
refresh = False
self.RemoveHelp()
if self._dropDownButtonState != ControlNormal:
refresh = True
self._dropDownButtonState = ControlNormal
if self._showToolbar:
tbActiveItem = self.GetActiveToolbarItem()
if tbActiveItem != wx.NOT_FOUND:
self._tbButtons[tbActiveItem]._state = ControlNormal
refresh = True
if self._curretHiliteItem != -1:
self._items[self._curretHiliteItem].SetState(ControlNormal)
self._curretHiliteItem = -1
self.Refresh()
if refresh:
self.Refresh()
def OnMouseMove(self, event):
"""
Handles the ``wx.EVT_MOTION`` event for L{FlatMenuBar}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
pt = event.GetPosition()
self.DoMouseMove(pt, event.LeftIsDown())
def OnLeaveMenuBar(self, event):
"""
Handles the ``wx.EVT_LEAVE_WINDOW`` event for L{FlatMenuBar}.
:param `event`: a `wx.MouseEvent` event to be processed.
:note: This method is for MSW only.
"""
pt = event.GetPosition()
self.DoMouseMove(pt, event.LeftIsDown())
def ResetToolbarItems(self):
""" Used internally. """
for but in self._tbButtons:
but._state = ControlNormal
def GetActiveToolbarItem(self):
""" Returns the active toolbar item. """
for but in self._tbButtons:
if but._state == ControlFocus or but._state == ControlPressed:
return self._tbButtons.index(but)
return wx.NOT_FOUND
def OnLeaveWindow(self, event):
"""
Handles the ``wx.EVT_LEAVE_WINDOW`` event for L{FlatMenuBar}.
:param `event`: a `wx.MouseEvent` event to be processed.
:note: This method is for GTK only.
"""
self._curretHiliteItem = -1
self._dropDownButtonState = ControlNormal
# Reset items state
for item in self._items:
item.SetState(ControlNormal)
for but in self._tbButtons:
but._state = ControlNormal
self.Refresh()
def OnMenuDismissed(self, event):
"""
Handles the ``EVT_FLAT_MENU_DISMISSED`` event for L{FlatMenuBar}.
:param `event`: a L{FlatMenuEvent} event to be processed.
"""
pt = wx.GetMousePosition()
pt = self.ScreenToClient(pt)
idx, where = self.HitTest(pt)
self.RemoveHelp()
if where not in [MenuItem, DropDownArrowButton]:
self._dropDownButtonState = ControlNormal
self._curretHiliteItem = -1
for item in self._items:
item.SetState(ControlNormal)
self.Refresh()
def OnLeftDown(self, event):
"""
Handles the ``wx.EVT_LEFT_DOWN`` event for L{FlatMenuBar}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
pt = event.GetPosition()
idx, where = self.HitTest(pt)
if where == DropDownArrowButton:
dc = wx.ClientDC(self)
self.DrawMoreButton(dc, -1, ControlPressed)
self.PopupMoreMenu()
elif where == MenuItem:
# Position the menu, the GetPosition() return the coords
# of the button relative to its parent, we need to translate
# them into the screen coords
self.ActivateMenu(self._items[idx])
elif where == ToolbarItem:
redrawAll = False
item = self._tbButtons[idx]._tbItem
# try to toggle if its a check item:
item.Toggle()
# switch is if its a unselected radio item
if not item.IsSelected() and item.IsRadioItem():
group = item.GetGroup()
for i in xrange(len(self._tbButtons)):
if self._tbButtons[i]._tbItem.GetGroup() == group and \
i != idx and self._tbButtons[i]._tbItem.IsSelected():
self._tbButtons[i]._state = ControlNormal
self._tbButtons[i]._tbItem.Select(False)
redrawAll = True
item.Select(True)
# Over a toolbar item
if redrawAll:
self.Refresh()
if "__WXMSW__" in wx.Platform:
dc = wx.BufferedDC(wx.ClientDC(self))
else:
dc = wx.ClientDC(self)
else:
dc = wx.ClientDC(self)
self.DrawToolbarItem(dc, idx, ControlPressed)
# TODO:: Do the action specified in this button
self.DoToolbarAction(idx)
def OnLeftUp(self, event):
"""
Handles the ``wx.EVT_LEFT_UP`` event for L{FlatMenuBar}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
pt = event.GetPosition()
idx, where = self.HitTest(pt)
if where == ToolbarItem:
# Over a toolbar item
dc = wx.ClientDC(self)
self.DrawToolbarItem(dc, idx, ControlFocus)
def DrawToolbarItem(self, dc, idx, state):
"""
Draws a toolbar item button.
:param `dc`: an instance of `wx.DC`;
:param `idx`: the tool index in the toolbar;
:param `state`: the button state.
:see: L{MenuEntryInfo.SetState} for a list of valid menu states.
"""
if idx >= len(self._tbButtons) or idx < 0:
return
if self._tbButtons[idx]._tbItem.IsSelected():
state = ControlPressed
rect = self._tbButtons[idx]._rect
ArtManager.Get().DrawButton(dc, rect, ArtManager.Get().GetMenuTheme(), state, False)
# draw the bitmap over the highlight
buttonRect = wx.Rect(*rect)
x = rect.x + (buttonRect.width - self._tbButtons[idx]._tbItem.GetBitmap().GetWidth())/2
y = rect.y + (buttonRect.height - self._tbButtons[idx]._tbItem.GetBitmap().GetHeight())/2
if state == ControlFocus:
# place a dark image under the original image to provide it
# with some shadow
# shadow = ConvertToMonochrome(self._tbButtons[idx]._tbItem.GetBitmap())
# dc.DrawBitmap(shadow, x, y, True)
# in case we the button is in focus, place it
# once pixle up and left
x -= 1
y -= 1
dc.DrawBitmap(self._tbButtons[idx]._tbItem.GetBitmap(), x, y, True)
def ActivateMenu(self, menuInfo):
"""
Activates a menu.
:param `menuInfo`: an instance of L{MenuEntryInfo}.
"""
# first make sure all other menus are not popedup
if menuInfo.GetMenu().IsShown():
return
idx = wx.NOT_FOUND
for item in self._items:
item.GetMenu().Dismiss(False, True)
if item.GetMenu() == menuInfo.GetMenu():
idx = self._items.index(item)
# Remove the popup menu as well
if self._moreMenu and self._moreMenu.IsShown():
self._moreMenu.Dismiss(False, True)
# make sure that the menu item button is highlited
if idx != wx.NOT_FOUND:
self._dropDownButtonState = ControlNormal
self._curretHiliteItem = idx
for item in self._items:
item.SetState(ControlNormal)
self._items[idx].SetState(ControlFocus)
self.Refresh()
rect = menuInfo.GetRect()
menuPt = self.ClientToScreen(wx.Point(rect.x, rect.y))
menuInfo.GetMenu().SetOwnerHeight(rect.height)
menuInfo.GetMenu().Popup(wx.Point(menuPt.x, menuPt.y), self)
def DoToolbarAction(self, idx):
"""
Performs a toolbar button pressed action.
:param `idx`: the tool index in the toolbar.
"""
# we handle only button clicks
if self._tbButtons[idx]._tbItem.IsRegularItem() or \
self._tbButtons[idx]._tbItem.IsCheckItem():
# Create the event
event = wx.CommandEvent(wxEVT_FLAT_MENU_SELECTED, self._tbButtons[idx]._tbItem.GetId())
event.SetEventObject(self)
# all events are handled by this control and its parents
self.GetEventHandler().ProcessEvent(event)
def FindMenu(self, title):
"""
Returns the index of the menu with the given title or ``wx.NOT_FOUND`` if
no such menu exists in this menubar.
:param `title`: may specify either the menu title (with accelerator characters
, i.e. "&File") or just the menu label ("File") indifferently.
"""
for ii, item in enumerate(self._items):
accelIdx, labelOnly = ArtManager.Get().GetAccelIndex(item.GetTitle())
if labelOnly == title or item.GetTitle() == title:
return ii
return wx.NOT_FOUND
def GetMenu(self, menuIdx):
"""
Returns the menu at the specified index (zero-based).
:param `menuIdx`: the index of the sought menu.
"""
if menuIdx >= len(self._items) or menuIdx < 0:
return None
return self._items[menuIdx].GetMenu()
def GetMenuCount(self):
""" Returns the number of menus in the menubar. """
return len(self._items)
def Insert(self, pos, menu, title):
"""
Inserts the menu at the given position into the menu bar.
:param `pos`: the position of the new menu in the menu bar;
:param `menu`: the menu to add. L{FlatMenuBar} owns the menu and will free it;
:param `title`: the title of the menu.
:note: Inserting menu at position 0 will insert it in the very beginning of it,
inserting at position L{GetMenuCount} is the same as calling L{Append}.
"""
menu.SetMenuBar(self)
self._items.insert(pos, MenuEntryInfo(title, menu))
self.UpdateAcceleratorTable()
self.ClearBitmaps(pos)
self.Refresh()
return True
def Remove(self, pos):
"""
Removes the menu from the menu bar and returns the menu object - the
caller is responsible for deleting it.
:param `pos`: the position of the menu in the menu bar.
:note: This function may be used together with L{Insert} to change the menubar
dynamically.
"""
if pos >= len(self._items):
return None
menu = self._items[pos].GetMenu()
self._items.pop(pos)
self.UpdateAcceleratorTable()
# Since we use bitmaps to optimize our drawings, we need
# to reset all bitmaps from pos and until end of vector
# to force size/position changes to the menu bar
self.ClearBitmaps(pos)
self.Refresh()
# remove the connection to this menubar
menu.SetMenuBar(None)
return menu
def UpdateAcceleratorTable(self):
""" Updates the parent accelerator table. """
# first get the number of items we have
updatedTable = []
parent = self.GetParent()
for item in self._items:
updatedTable = item.GetMenu().GetAccelArray() + updatedTable
# create accelerator for every menu (if it exist)
title = item.GetTitle()
mnemonic, labelOnly = ArtManager.Get().GetAccelIndex(title)
if mnemonic != wx.NOT_FOUND:
# Get the accelrator character
accelChar = labelOnly[mnemonic]
accelString = "\tAlt+" + accelChar
title += accelString
accel = wx.GetAccelFromString(title)
itemId = item.GetCmdId()
if accel:
# connect an event to this cmd
parent.Connect(itemId, -1, wxEVT_FLAT_MENU_SELECTED, self.OnAccelCmd)
accel.Set(accel.GetFlags(), accel.GetKeyCode(), itemId)
updatedTable.append(accel)
entries = [wx.AcceleratorEntry() for ii in xrange(len(updatedTable))]
# Add the new menu items
for i in xrange(len(updatedTable)):
entries[i] = updatedTable[i]
table = wx.AcceleratorTable(entries)
del entries
parent.SetAcceleratorTable(table)
def ClearBitmaps(self, start):
"""
Restores a `wx.NullBitmap` for the menu.
:param `start`: the index at which to start resetting the bitmaps.
"""
if self._isLCD:
return
for item in self._items[start:]:
item.SetTextBitmap(wx.NullBitmap)
def OnAccelCmd(self, event):
"""
Single function to handle any accelerator key used inside the menubar.
:param `event`: a L{FlatMenuEvent} event to be processed.
"""
for item in self._items:
if item.GetCmdId() == event.GetId():
self.ActivateMenu(item)
def ActivateNextMenu(self):
""" Activates next menu and make sure all others are non-active. """
last_item = self.GetLastVisibleMenu()
# find the current active menu
for i in xrange(last_item+1):
if self._items[i].GetMenu().IsShown():
nextMenu = i + 1
if nextMenu >= last_item:
nextMenu = 0
self.ActivateMenu(self._items[nextMenu])
return
def GetLastVisibleMenu(self):
""" Returns the index of the last visible menu on the menu bar. """
last_item = 0
# find the last visible item
rect = wx.Rect()
for item in self._items:
if item.GetRect() == rect:
break
last_item += 1
return last_item
def ActivatePreviousMenu(self):
""" Activates previous menu and make sure all others are non-active. """
# find the current active menu
last_item = self.GetLastVisibleMenu()
for i in xrange(last_item):
if self._items[i].GetMenu().IsShown():
prevMenu = i - 1
if prevMenu < 0:
prevMenu = last_item - 1
if prevMenu < 0:
return
self.ActivateMenu(self._items[prevMenu])
return
def CreateMoreMenu(self):
""" Creates the drop down menu and populate it. """
if not self._moreMenu:
# first time
self._moreMenu = FlatMenu(self)
self._popupDlgCmdId = wx.NewId()
# Connect an event handler for this event
self.Connect(self._popupDlgCmdId, -1, wxEVT_FLAT_MENU_SELECTED, self.OnCustomizeDlg)
# Remove all items from the popup menu
self._moreMenu.Clear()
invM = self.GetInvisibleMenuItemCount()
for i in xrange(len(self._items) - invM, len(self._items)):
item = FlatMenuItem(self._moreMenu, wx.ID_ANY, self._items[i].GetTitle(),
"", wx.ITEM_NORMAL, self._items[i].GetMenu())
self._moreMenu.AppendItem(item)
# Add invisible toolbar items
invT = self.GetInvisibleToolbarItemCount()
if self._showToolbar and invT > 0:
if self.GetInvisibleMenuItemCount() > 0:
self._moreMenu.AppendSeparator()
for i in xrange(len(self._tbButtons) - invT, len(self._tbButtons)):
if self._tbButtons[i]._tbItem.IsSeparator():
self._moreMenu.AppendSeparator()
elif not self._tbButtons[i]._tbItem.IsCustomControl():
tbitem = self._tbButtons[i]._tbItem
item = FlatMenuItem(self._tbMenu, tbitem.GetId(), tbitem.GetLabel(), "", wx.ITEM_NORMAL, None, tbitem.GetBitmap(), tbitem.GetDisabledBitmap())
item.Enable(tbitem.IsEnabled())
self._moreMenu.AppendItem(item)
if self._showCustomize:
if invT + invM > 0:
self._moreMenu.AppendSeparator()
item = FlatMenuItem(self._moreMenu, self._popupDlgCmdId, "Customize ...")
self._moreMenu.AppendItem(item)
def GetInvisibleMenuItemCount(self):
"""
Returns the number of invisible menu items.
:note: Valid only after the `wx.PaintEvent` has been processed after a resize.
"""
return len(self._items) - self.GetLastVisibleMenu()
def GetInvisibleToolbarItemCount(self):
"""
Returns the number of invisible toolbar items.
:note: Valid only after the `wx.PaintEvent` has been processed after a resize.
"""
count = 0
for i in xrange(len(self._tbButtons)):
if self._tbButtons[i]._visible == False:
break
count = i
return len(self._tbButtons) - count - 1
def PopupMoreMenu(self):
""" Popups the 'more' menu. """
if (not self._showCustomize) and self.GetInvisibleMenuItemCount() + self.GetInvisibleToolbarItemCount() < 1:
return
self.CreateMoreMenu()
pt = self._dropDownButtonArea.GetTopLeft()
pt = self.ClientToScreen(pt)
pt.y += self._dropDownButtonArea.GetHeight()
self._moreMenu.Popup(pt, self)
def OnCustomizeDlg(self, event):
"""
Handles the customize dialog here.
:param `event`: a L{FlatMenuEvent} event to be processed.
"""
if not self._dlg:
self._dlg = FMCustomizeDlg(self)
else:
# intialize the dialog
self._dlg.Initialise()
if self._dlg.ShowModal() == wx.ID_OK:
# Handle customize requests here
pass
if "__WXGTK__" in wx.Platform:
# Reset the more button
dc = wx.ClientDC(self)
self.DrawMoreButton(dc, -1, ControlNormal)
def AppendToolbarItem(self, item):
"""
Appends a tool to the L{FlatMenuBar}.
:warning: This method is now deprecated.
:see: L{AddTool}
"""
newItem = ToolBarItem(item, wx.Rect(), ControlNormal)
self._tbButtons.append(newItem)
def AddTool(self, toolId, label="", bitmap1=wx.NullBitmap, bitmap2=wx.NullBitmap,
kind=wx.ITEM_NORMAL, shortHelp="", longHelp=""):
"""
Adds a tool to the toolbar.
:param `toolId`: an integer by which the tool may be identified in subsequent
operations;
:param `kind`: may be ``wx.ITEM_NORMAL`` for a normal button (default),
``wx.ITEM_CHECK`` for a checkable tool (such tool stays pressed after it had been
toggled) or ``wx.ITEM_RADIO`` for a checkable tool which makes part of a radio
group of tools each of which is automatically unchecked whenever another button
in the group is checked;
:param `bitmap1`: the primary tool bitmap;
:param `bitmap2`: the bitmap used when the tool is disabled. If it is equal to
`wx.NullBitmap`, the disabled bitmap is automatically generated by greing out
the normal one;
:param `shortHelp`: a string used for the tools tooltip;
:param `longHelp`: this string is shown in the `wx.StatusBar` (if any) of the
parent frame when the mouse pointer is inside the tool.
"""
self._tbButtons.append(ToolBarItem(FlatToolbarItem(bitmap1, toolId, label, bitmap2, kind, shortHelp, longHelp), wx.Rect(), ControlNormal))
def AddSeparator(self):
""" Adds a separator for spacing groups of tools in toolbar. """
if len(self._tbButtons) > 0 and not self._tbButtons[len(self._tbButtons)-1]._tbItem.IsSeparator():
self._tbButtons.append(ToolBarItem(FlatToolbarItem(), wx.Rect(), ControlNormal))
def AddControl(self, control):
"""
Adds any control to the toolbar, typically e.g. a combobox.
:param `control`: the control to be added.
"""
self._tbButtons.append(ToolBarItem(FlatToolbarItem(control), wx.Rect(), ControlNormal))
def AddCheckTool(self, toolId, label="", bitmap1=wx.NullBitmap, bitmap2=wx.NullBitmap, shortHelp="", longHelp=""):
"""
Adds a new check (or toggle) tool to the toolbar.
:see: L{AddTool} for parameter descriptions.
"""
self.AddTool(toolId, label, bitmap1, bitmap2, kind=wx.ITEM_CHECK, shortHelp=shortHelp, longHelp=longHelp)
def AddRadioTool(self, toolId, label= "", bitmap1=wx.NullBitmap, bitmap2=wx.NullBitmap, shortHelp="", longHelp=""):
"""
Adds a new radio tool to the toolbar. Consecutive radio tools form a radio group
such that exactly one button in the group is pressed at any moment, in other
words whenever a button in the group is pressed the previously pressed button
is automatically released.
You should avoid having the radio groups of only one element as it would be
impossible for the user to use such button.
By default, the first button in the radio group is initially pressed, the others are not.
:see: L{AddTool} for parameter descriptions.
"""
self.AddTool(toolId, label, bitmap1, bitmap2, kind=wx.ITEM_RADIO, shortHelp=shortHelp, longHelp=longHelp)
if len(self._tbButtons)<1 or not self._tbButtons[len(self._tbButtons)-2]._tbItem.IsRadioItem():
self._tbButtons[len(self._tbButtons)-1]._tbItem.Select(True)
self._lastRadioGroup += 1
self._tbButtons[len(self._tbButtons)-1]._tbItem.SetGroup(self._lastRadioGroup)
def SetUpdateInterval(self, interval):
"""
Sets the updateUI interval for toolbar items. All UpdateUI events are
sent from within L{OnIdle} handler, the default is 20 milliseconds.
:param `interval`: the updateUI interval in milliseconds.
"""
self._interval = interval
def PositionAUI(self, mgr, fixToolbar=True):
"""
Positions the control inside a wxAUI/PyAUI frame manager.
:param `mgr`: an instance of `wx.aui.AuiManager` or L{AuiManager};
:param `fixToolbar`: ``True`` if L{FlatMenuBar} can not be floated.
"""
if isinstance(mgr, wx.aui.AuiManager):
pn = AuiPaneInfo()
else:
pn = PyAuiPaneInfo()
xx = wx.SystemSettings_GetMetric(wx.SYS_SCREEN_X)
# We add our menu bar as a toolbar, with the following settings
pn.Name("flat_menu_bar")
pn.Caption("Menu Bar")
pn.Top()
pn.MinSize(wx.Size(xx/2, self._barHeight))
pn.LeftDockable(False)
pn.RightDockable(False)
pn.ToolbarPane()
if not fixToolbar:
# We add our menu bar as a toolbar, with the following settings
pn.BestSize(wx.Size(xx, self._barHeight))
pn.FloatingSize(wx.Size(300, self._barHeight))
pn.Floatable(True)
pn.MaxSize(wx.Size(xx, self._barHeight))
pn.Gripper(True)
else:
pn.BestSize(wx.Size(xx, self._barHeight))
pn.Gripper(False)
pn.Resizable(False)
pn.PaneBorder(False)
mgr.AddPane(self, pn)
self._mgr = mgr
def DoGiveHelp(self, hit):
"""
Gives tooltips and help in `wx.StatusBar`.
:param `hit`: the toolbar tool currently hovered by the mouse.
"""
shortHelp = hit.GetShortHelp()
if shortHelp:
self.SetToolTipString(shortHelp)
self._haveTip = True
longHelp = hit.GetLongHelp()
if not longHelp:
return
topLevel = wx.GetTopLevelParent(self)
if isinstance(topLevel, wx.Frame) and topLevel.GetStatusBar():
statusBar = topLevel.GetStatusBar()
if self._statusTimer and self._statusTimer.IsRunning():
self._statusTimer.Stop()
statusBar.PopStatusText(0)
statusBar.PushStatusText(longHelp, 0)
self._statusTimer = StatusBarTimer(self)
self._statusTimer.Start(_DELAY, wx.TIMER_ONE_SHOT)
def RemoveHelp(self):
""" Removes the tooltips and statusbar help (if any) for a button. """
if self._haveTip:
self.SetToolTipString("")
self._haveTip = False
if self._statusTimer and self._statusTimer.IsRunning():
topLevel = wx.GetTopLevelParent(self)
statusBar = topLevel.GetStatusBar()
self._statusTimer.Stop()
statusBar.PopStatusText(0)
self._statusTimer = None
def OnStatusBarTimer(self):
""" Handles the timer expiring to delete the `longHelp` string in the `wx.StatusBar`. """
topLevel = wx.GetTopLevelParent(self)
statusBar = topLevel.GetStatusBar()
statusBar.PopStatusText(0)
class mcPopupWindow(wx.MiniFrame):
""" Since Max OS does not support `wx.PopupWindow`, this is an alternative."""
def __init__(self, parent):
"""
Default class constructor.
:param `parent`: the L{mcPopupWindow} parent window.
"""
wx.MiniFrame.__init__(self, parent, style = wx.POPUP_WINDOW)
self.SetExtraStyle(wx.WS_EX_TRANSIENT)
self._parent = parent
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeaveWindow)
def OnLeaveWindow(self, event):
"""
Handles the ``wx.EVT_LEAVE_WINDOW`` event for L{mcPopupWindow}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
event.Skip()
havePopupWindow = 1
if wx.Platform == '__WXMAC__':
havePopupWindow = 0
wx.PopupWindow = mcPopupWindow
# ---------------------------------------------------------------------------- #
# Class ShadowPopupWindow
# ---------------------------------------------------------------------------- #
class ShadowPopupWindow(wx.PopupWindow):
""" Base class for generic L{FlatMenu} derived from `wx.PopupWindow`. """
def __init__(self, parent=None):
"""
Default class constructor.
:param `parent`: the L{ShadowPopupWindow} parent (tipically your main frame).
"""
if not parent:
parent = wx.GetApp().GetTopWindow()
if not parent:
raise Exception("Can't create menu without parent!")
wx.PopupWindow.__init__(self, parent)
if "__WXMSW__" in wx.Platform and _libimported == "MH":
GCL_STYLE= -26
cstyle= win32gui.GetClassLong(self.GetHandle(), GCL_STYLE)
if cstyle & CS_DROPSHADOW == 0:
win32api.SetClassLong(self.GetHandle(),
GCL_STYLE, cstyle | CS_DROPSHADOW)
# popup windows are created hidden by default
self.Hide()
#--------------------------------------------------------
# Class FlatMenuButton
#--------------------------------------------------------
class FlatMenuButton(object):
"""
A nice small class that functions like `wx.BitmapButton`, the reason I did
not used `wx.BitmapButton` is that on Linux, it has some extra margins that
I can't seem to be able to remove.
"""
def __init__(self, menu, up, normalBmp, disabledBmp=wx.NullBitmap):
"""
Default class constructor.
:param `menu`: the parent menu associated with this button;
:param `up`: ``True`` for up arrow or ``False`` for down arrow;
:param `normalBmp`: normal state bitmap;
:param `disabledBmp`: disabled state bitmap.
"""
self._normalBmp = normalBmp
self._up = up
self._parent = menu
self._pos = wx.Point()
self._size = wx.Size()
self._timerID = wx.NewId()
if not disabledBmp.Ok():
self._disabledBmp = ArtManager.Get().CreateGreyBitmap(self._normalBmp)
else:
self._disabledBmp = disabledBmp
self._state = ControlNormal
self._timer = wx.Timer(self._parent, self._timerID)
self._timer.Stop()
def __del__(self):
""" Used internally. """
if self._timer:
if self._timer.IsRunning():
self._timer.Stop()
del self._timer
def Contains(self, pt):
""" Used internally. """
rect = wx.RectPS(self._pos, self._size)
if not rect.Contains(pt):
return False
return True
def Draw(self, dc):
"""
Draws self at rect using dc.
:param `dc`: an instance of `wx.DC`.
"""
rect = wx.RectPS(self._pos, self._size)
xx = rect.x + (rect.width - self._normalBmp.GetWidth())/2
yy = rect.y + (rect.height - self._normalBmp.GetHeight())/2
ArtManager.Get().DrawButton(dc, rect, Style2007, self._state, wx.BLACK)
dc.DrawBitmap(self._normalBmp, xx, yy, True)
def ProcessLeftDown(self, pt):
"""
Handles left down mouse events.
:param `pt`: an instance of `wx.Point` where the left mouse button was pressed.
"""
if not self.Contains(pt):
return False
self._state = ControlPressed
self._parent.Refresh()
if self._up:
self._parent.ScrollUp()
else:
self._parent.ScrollDown()
self._timer.Start(100)
return True
def ProcessLeftUp(self, pt):
"""
Handles left up mouse events.
:param `pt`: an instance of `wx.Point` where the left mouse button was released.
"""
# always stop the timer
self._timer.Stop()
if not self.Contains(pt):
return False
self._state = ControlFocus
self._parent.Refresh()
return True
def ProcessMouseMove(self, pt):
"""
Handles mouse motion events.
:param `pt`: an instance of `wx.Point` where the mouse pointer was moved.
"""
# pt is in parent coordiantes, convert it to our
if not self.Contains(pt):
self._timer.Stop()
if self._state != ControlNormal:
self._state = ControlNormal
self._parent.Refresh()
return False
# Process mouse move event
if self._state != ControlFocus:
if self._state != ControlPressed:
self._state = ControlFocus
self._parent.Refresh()
return True
def GetTimerId(self):
""" Returns the timer object Ientifier. """
return self._timerID
def GetTimer(self):
""" Returns the timer object. """
return self._timer
def Move(self, input1, input2=None):
""" Moves L{FlatMenuButton} to the specified position. """
if type(input) == type(1):
self._pos = wx.Point(input1, input2)
else:
self._pos = input1
def SetSize(self, input1, input2=None):
"""
Sets the size for L{FlatMenuButton}.
:param `input1`: if it is an instance of `wx.Size`, it represents the L{FlatMenuButton}
size and the `input2` parameter is not used. Otherwise it is an integer representing
the button width;
:param `input2`: if not ``None``, it is an integer representing the button height.
"""
if type(input) == type(1):
self._size = wx.Size(input1, input2)
else:
self._size = input1
def GetClientRect(self):
""" Returns the client rectangle for L{FlatMenuButton}. """
return wx.RectPS(self._pos, self._size)
#--------------------------------------------------------
# Class FlatMenuItemGroup
#--------------------------------------------------------
class FlatMenuItemGroup(object):
"""
A class that manages a group of radio menu items.
"""
def __init__(self):
""" Default class constructor. """
self._items = []
def GetSelectedItem(self):
""" Returns the selected item. """
for item in self._items:
if item.IsChecked():
return item
return None
def Add(self, item):
"""
Adds a new item to the group.
:param `item`: an instance of L{FlatMenu}.
"""
if item.IsChecked():
# uncheck all other items
for exitem in self._items:
exitem._bIsChecked = False
self._items.append(item)
def Exist(self, item):
"""
Checks if an item is in the group.
:param `item`: an instance of L{FlatMenu}.
"""
if item in self._items:
return True
return False
def SetSelection(self, item):
"""
Selects a particular item.
:param `item`: an instance of L{FlatMenu}.
"""
# make sure this item exist in our group
if not self.Exist(item):
return
# uncheck all other items
for exitem in self._items:
exitem._bIsChecked = False
item._bIsChecked = True
def Remove(self, item):
"""
Removes a particular item.
:param `item`: an instance of L{FlatMenu}.
"""
if item not in self._items:
return
self._items.remove(item)
if item.IsChecked() and len(self._items) > 0:
#if the removed item was the selected one,
# select the first one in the group
self._items[0]._bIsChecked = True
#--------------------------------------------------------
# Class FlatMenuBase
#--------------------------------------------------------
class FlatMenuBase(ShadowPopupWindow):
"""
Base class for generic flat menu derived from `wx.PopupWindow`.
"""
def __init__(self, parent=None):
"""
Default class constructor.
:param `parent`: the L{ShadowPopupWindow} parent window.
"""
self._parentMenu = parent
self._openedSubMenu = None
self._owner = None
self._popupPtOffset = 0
self._showScrollButtons = False
self._upButton = None
self._downButton = None
self._is_dismiss = False
ShadowPopupWindow.__init__(self, parent)
def OnDismiss(self):
""" Fires an event ``EVT_FLAT_MENU_DISMISSED`` and handle menu dismiss. """
# Release mouse capture if needed
if self.HasCapture():
self.ReleaseMouse()
self._is_dismiss = True
# send an event about our dismissal to the parent (unless we are a sub menu)
if self.IsShown() and not self._parentMenu:
event = FlatMenuEvent(wxEVT_FLAT_MENU_DISMISSED, self.GetId())
event.SetEventObject(self)
# Send it
if self.GetMenuOwner():
self.GetMenuOwner().GetEventHandler().ProcessEvent(event)
else:
self.GetEventHandler().ProcessEvent(event)
def Popup(self, pt, parent):
"""
Popups menu at the specified point.
:param `pt`: an instance of `wx.Point`, assumed to be in screen coordinates. However,
if `parent` is not ``None``, `pt` is translated into the screen coordinates using
`parent.ClientToScreen()`;
:param `parent`: if not ``None``, an instance of `wx.Window`.
"""
# some controls update themselves from OnIdle() call - let them do it
wx.GetApp().ProcessIdle()
# The mouse was pressed in the parent coordinates,
# e.g. pressing on the left top of a text ctrl
# will result in (1, 1), these coordinates needs
# to be converted into screen coords
self._parentMenu = parent
# If we are topmost menu, we use the given pt
# else we use the logical
# parent (second argument provided to this function)
if self._parentMenu:
pos = self._parentMenu.ClientToScreen(pt)
else:
pos = pt
# Fit the menu into screen
pos = self.AdjustPosition(pos)
if self._showScrollButtons:
sz = self.GetSize()
# Get the screen height
scrHeight = wx.SystemSettings_GetMetric(wx.SYS_SCREEN_Y)
if not self._upButton:
self._upButton = FlatMenuButton(self, True, ArtManager.Get().GetStockBitmap("arrow_up"))
if not self._downButton:
self._downButton = FlatMenuButton(self, False, ArtManager.Get().GetStockBitmap("arrow_down"))
# position the scrollbar
self._upButton.SetSize((SCROLL_BTN_HEIGHT, SCROLL_BTN_HEIGHT))
self._downButton.SetSize((SCROLL_BTN_HEIGHT, SCROLL_BTN_HEIGHT))
self._upButton.Move((sz.x - SCROLL_BTN_HEIGHT - 4, 4))
self._downButton.Move((sz.x - SCROLL_BTN_HEIGHT - 4, scrHeight - pos.y - 2 - SCROLL_BTN_HEIGHT))
self.Move(pos)
self.Show()
# Capture mouse event and direct them to us
self.CaptureMouse()
self._is_dismiss = False
def AdjustPosition(self, pos):
"""
Adjusts position so the menu will be fully visible on screen.
:param `pos`: an instance of `wx.Point` specifying the menu position.
"""
# Check that the menu can fully appear in the screen
scrWidth = wx.SystemSettings_GetMetric(wx.SYS_SCREEN_X)
scrHeight = wx.SystemSettings_GetMetric(wx.SYS_SCREEN_Y)
size = self.GetSize()
# always assume that we have scrollbuttons on
self._showScrollButtons = False
pos.y += self._popupPtOffset
if size.y + pos.y > scrHeight:
# the menu will be truncated
if self._parentMenu is None:
# try to flip the menu
flippedPosy = pos.y - size.y
flippedPosy -= self._popupPtOffset
if flippedPosy >= 0 and flippedPosy + size.y < scrHeight:
pos.y = flippedPosy
return pos
else:
# We need to popup scrollbuttons!
self._showScrollButtons = True
else:
# we are a submenu
# try to decrease the y value of the menu position
newy = pos.y
newy -= (size.y + pos.y) - scrHeight
if newy + size.y > scrHeight:
# probably the menu size is too high to fit
# the screen, we need scrollbuttons
self._showScrollButtons = True
else:
pos.y = newy
menuMaxX = pos.x + size.x
if menuMaxX > scrWidth and pos.x < scrWidth:
if self._parentMenu:
# We are submenu
self._shiftePos = (size.x + self._parentMenu.GetSize().x)
pos.x -= self._shiftePos
pos.x += 10
else:
self._shiftePos = ((size.x + pos.x) - scrWidth)
pos.x -= self._shiftePos
else:
if self._parentMenu:
pos.x += 5
return pos
def Dismiss(self, dismissParent, resetOwner):
"""
Dismisses the popup window.
:param `dismissParent`: whether to dismiss the parent menu or not;
:param `resetOwner`: ``True`` to delete the link between this menu and the
owner menu, ``False`` otherwise.
"""
# Check if child menu is poped, if so, dismiss it
if self._openedSubMenu:
self._openedSubMenu.Dismiss(False, resetOwner)
self.OnDismiss()
# Reset menu owner
if resetOwner:
self._owner = None
self.Show(False)
if self._parentMenu and dismissParent:
self._parentMenu.OnChildDismiss()
self._parentMenu.Dismiss(dismissParent, resetOwner)
self._parentMenu = None
def OnChildDismiss(self):
""" Handles children dismiss. """
self._openedSubMenu = None
def GetRootMenu(self):
""" Gets the top level menu. """
root = self
while root._parentMenu:
root = root._parentMenu
return root
def SetOwnerHeight(self, height):
"""
Sets the menu owner height, this will be used to position the menu below
or above the owner.
:param `height`: an integer representing the menu owner height.
"""
self._popupPtOffset = height
# by default do nothing
def ScrollDown(self):
"""
Scroll one unit down.
By default this function is empty, let derived class do something.
"""
pass
# by default do nothing
def ScrollUp(self):
"""
Scroll one unit up.
By default this function is empty, let derived class do something.
"""
pass
def GetMenuOwner(self):
"""
Returns the menu logical owner, the owner does not necessarly mean the
menu parent, it can also be the window that popped up it.
"""
return self._owner
#--------------------------------------------------------
# Class ToolBarItem
#--------------------------------------------------------
class ToolBarItem(object):
"""
A simple class that holds information about a toolbar item.
"""
def __init__(self, tbItem, rect, state):
"""
Default class constructor.
:param `tbItem`: an instance of L{FlatToolbarItem};
:param `rect`: the client rectangle for the toolbar item;
:param `state`: the toolbar item state.
:see: L{MenuEntryInfo.SetState} for a list of valid item states.
"""
self._tbItem = tbItem
self._rect = rect
self._state = state
self._visible = True
#--------------------------------------------------------
# Class FlatToolBarItem
#--------------------------------------------------------
class FlatToolbarItem(object):
"""
This class represents a toolbar item.
"""
def __init__(self, controlType=None, id=wx.ID_ANY, label="", disabledBmp=wx.NullBitmap, kind=wx.ITEM_NORMAL,
shortHelp="", longHelp=""):
"""
Default class constructor.
:param `controlType`: can be ``None`` for a toolbar separator, an instance
of `wx.Window` for a control or an instance of `wx.Bitmap` for a standard
toolbar tool;
:param `id`: the toolbar tool id. If set to ``wx.ID_ANY``, a new id is
automatically assigned;
:param `label`: the toolbar tool label;
:param `disabledBmp`: the bitmap used when the tool is disabled. If the tool
is a standard one (i.e., not a control or a separator), and `disabledBmp`
is equal to `wx.NullBitmap`, the disabled bitmap is automatically generated
by greing the normal one;
:param `kind`: may be ``wx.ITEM_NORMAL`` for a normal button (default),
``wx.ITEM_CHECK`` for a checkable tool (such tool stays pressed after it had been
toggled) or ``wx.ITEM_RADIO`` for a checkable tool which makes part of a radio
group of tools each of which is automatically unchecked whenever another button
in the group is checked;
:param `shortHelp`: a string used for the tool's tooltip;
:param `longHelp`: this string is shown in the `wx.StatusBar` (if any) of the
parent frame when the mouse pointer is inside the tool.
"""
if id == wx.ID_ANY:
id = wx.NewId()
if controlType is None: # Is a separator
self._normalBmp = wx.NullBitmap
self._id = wx.NewId()
self._label = ""
self._disabledImg = wx.NullBitmap
self._customCtrl = None
kind = wx.ITEM_SEPARATOR
elif isinstance(controlType, wx.Window): # is a wxControl
self._normalBmp = wx.NullBitmap
self._id = id
self._label = ""
self._disabledImg = wx.NullBitmap
self._customCtrl = controlType
kind = FTB_ITEM_CUSTOM
elif isinstance(controlType, wx.Bitmap): # Bitmap construction, simple tool
self._normalBmp = controlType
self._id = id
self._label = label
self._disabledImg = disabledBmp
self._customCtrl = None
if not self._disabledImg.Ok():
# Create a grey bitmap from the normal bitmap
self._disabledImg = ArtManager.Get().CreateGreyBitmap(self._normalBmp)
self._kind = kind
self._enabled = True
self._selected = False
self._group = -1 # group id for radio items
if not shortHelp:
shortHelp = label
self._shortHelp = shortHelp
self._longHelp = longHelp
def GetLabel(self):
""" Returns the tool label. """
return self._label
def SetLabel(self, label):
"""
Sets the tool label.
:param `label`: the new tool string.
"""
self._label = label
def GetBitmap(self):
""" Returns the tool bitmap. """
return self._normalBmp
def SetBitmap(self, bmp):
"""
Sets the tool bitmap.
:param `bmp`: the new tool bitmap, a valid `wx.Bitmap` object.
"""
self._normalBmp = bmp
def GetDisabledBitmap(self):
""" Returns the tool disabled bitmap. """
return self._disabledImg
def SetDisabledBitmap(self, bmp):
"""
Sets the tool disabled bitmap.
:param `bmp`: the new tool disabled bitmap, a valid `wx.Bitmap` object.
"""
self._disabledImg = bmp
def GetId(self):
""" Gets the tool id. """
return self._id
def IsSeparator(self):
""" Returns whether the tool is a separator or not. """
return self._kind == wx.ITEM_SEPARATOR
def IsRadioItem(self):
""" Returns True if the item is a radio item. """
return self._kind == wx.ITEM_RADIO
def IsCheckItem(self):
""" Returns True if the item is a radio item. """
return self._kind == wx.ITEM_CHECK
def IsCustomControl(self):
""" Returns whether the tool is a custom control or not. """
return self._kind == FTB_ITEM_CUSTOM
def IsRegularItem(self):
""" Returns whether the tool is a standard tool or not. """
return self._kind == wx.ITEM_NORMAL
def GetCustomControl(self):
""" Returns the associated custom control. """
return self._customCtrl
def IsSelected(self):
""" Returns whether the tool is selected or checked."""
return self._selected
def IsChecked(self):
""" Same as L{IsSelected}. More intuitive for check items though. """
return self._selected
def Select(self, select=True):
"""
Selects or checks a radio or check item.
:param `select`: ``True`` to select or check a tool, ``False`` to unselect
or uncheck it.
"""
self._selected = select
def Toggle(self):
""" Toggles a check item. """
if self.IsCheckItem():
self._selected = not self._selected
def SetGroup(self, group):
"""
Sets group id for a radio item, for other items does nothing.
:param `group`: an instance of L{FlatMenuItemGroup}.
"""
if self.IsRadioItem():
self._group = group
def GetGroup(self):
""" Returns group id for radio item, or -1 for other item types. """
return self._group
def IsEnabled(self):
""" Returns whether the tool is enabled or not. """
return self._enabled
def Enable(self, enable=True):
"""
Enables or disables the tool.
:param `enable`: ``True`` to enable the tool, ``False`` to disable it.
"""
self._enabled = enable
def GetShortHelp(self):
""" Returns the tool short help string (displayed in the tool's tooltip). """
if self._kind == wx.ITEM_NORMAL:
return self._shortHelp
return ""
def SetShortHelp(self, help):
"""
Sets the tool short help string (displayed in the tool's tooltip).
:param `help`: the new tool short help string.
"""
if self._kind == wx.ITEM_NORMAL:
self._shortHelp = help
def SetLongHelp(self, help):
"""
Sets the tool long help string (displayed in the parent frame `wx.StatusBar`).
:param `help`: the new tool long help string.
"""
if self._kind == wx.ITEM_NORMAL:
self._longHelp = help
def GetLongHelp(self):
""" Returns the tool long help string (displayed in the parent frame `wx.StatusBar`). """
if self._kind == wx.ITEM_NORMAL:
return self._longHelp
return ""
#--------------------------------------------------------
# Class FlatMenuItem
#--------------------------------------------------------
class FlatMenuItem(object):
"""
A class that represents an item in a menu.
"""
def __init__(self, parent, id=wx.ID_SEPARATOR, text="", helpString="",
kind=wx.ITEM_NORMAL, subMenu=None, normalBmp=wx.NullBitmap,
disabledBmp=wx.NullBitmap,
hotBmp=wx.NullBitmap):
"""
Default class constructor.
:param `parent`: menu that the menu item belongs to;
:param `label`: text for the menu item, as shown on the menu. An accelerator
key can be specified using the ampersand '&' character. In order to embed
an ampersand character in the menu item text, the ampersand must be doubled;
:param kind: may be ``wx.ITEM_SEPARATOR``, ``wx.ITEM_NORMAL``, ``wx.ITEM_CHECK``
or ``wx.ITEM_RADIO``;
:param `helpString`: optional help string that will be shown on the status bar;
:param `normalBmp`: normal bitmap to draw to the side of the text, this bitmap
is used when the menu is enabled;
:param `disabledBmp`: 'greyed' bitmap to draw to the side of the text, this
bitmap is used when the menu is disabled, if none supplied normal is used;
:param `hotBmp`: hot bitmap to draw to the side of the text, this bitmap is
used when the menu is hovered, if non supplied, normal is used.
"""
self._text = text
self._kind = kind
self._helpString = helpString
if id == wx.ID_ANY:
id = wx.NewId()
self._id = id
self._parentMenu = parent
self._subMenu = subMenu
self._normalBmp = normalBmp
self._disabledBmp = disabledBmp
self._hotBmp = hotBmp
self._bIsChecked = False
self._bIsEnabled = True
self._mnemonicIdx = wx.NOT_FOUND
self._isAttachedToMenu = False
self._accelStr = ""
self._rect = wx.Rect()
self._groupPtr = None
self._visible = False
self._contextMenu = None
self.SetLabel(self._text)
self.SetMenuBar()
self._checkMarkBmp = wx.BitmapFromXPMData(check_mark_xpm)
self._checkMarkBmp.SetMask(wx.Mask(self._checkMarkBmp, wx.WHITE))
self._radioMarkBmp = wx.BitmapFromXPMData(radio_item_xpm)
self._radioMarkBmp.SetMask(wx.Mask(self._radioMarkBmp, wx.WHITE))
def SetLongHelp(self, help):
"""
Sets the item long help string (displayed in the parent frame `wx.StatusBar`).
:param `help`: the new item long help string.
"""
self._helpString = help
def GetLongHelp(self):
""" Returns the item long help string (displayed in the parent frame `wx.StatusBar`). """
return self._helpString
def GetShortHelp(self):
""" Returns the item short help string (displayed in the tool's tooltip). """
return ""
def Enable(self, enable=True):
"""
Enables or disables a menu item.
:param `enable`: ``True`` to enable the menu item, ``False`` to disable it.
"""
self._bIsEnabled = enable
if self._parentMenu:
self._parentMenu.UpdateItem(self)
def GetBitmap(self):
"""
Returns the normal bitmap associated to the menu item or `wx.NullBitmap` if
none has been supplied.
"""
return self._normalBmp
def GetDisabledBitmap(self):
"""
Returns the disabled bitmap associated to the menu item or `wx.NullBitmap`
if none has been supplied.
"""
return self._disabledBmp
def GetHotBitmap(self):
"""
Returns the hot bitmap associated to the menu item or `wx.NullBitmap` if
none has been supplied.
"""
return self._hotBmp
def GetHelp(self):
""" Returns the item help string. """
return self._helpString
def GetId(self):
""" Returns the item id. """
return self._id
def GetKind(self):
"""
Returns the menu item kind, can be one of ``wx.ITEM_SEPARATOR``, ``wx.ITEM_NORMAL``,
``wx.ITEM_CHECK`` or ``wx.ITEM_RADIO``.
"""
return self._kind
def GetLabel(self):
""" Returns the menu item label (without the accelerator if it is part of the string). """
return self._label
def GetMenu(self):
""" Returns the parent menu. """
return self._parentMenu
def GetContextMenu(self):
""" Returns the context menu associated with this item (if any). """
return self._contextMenu
def SetContextMenu(self, context_menu):
"""
Assigns a context menu to this item.
:param `context_menu`: an instance of L{FlatMenu}.
"""
self._contextMenu = context_menu
def GetText(self):
""" Returns the text associated with the menu item including the accelerator. """
return self._text
def GetSubMenu(self):
""" Returns the sub-menu of this menu item (if any). """
return self._subMenu
def IsCheckable(self):
""" Returns ``True`` if this item is of type ``wx.ITEM_CHECK``, ``False`` otherwise. """
return self._kind == wx.ITEM_CHECK
def IsChecked(self):
"""
Returns whether an item is checked or not.
:note: This method is meaningful only for items of kind ``wx.ITEM_CHECK`` or
``wx.ITEM_RADIO``.
"""
return self._bIsChecked
def IsRadioItem(self):
""" Returns ``True`` if this item is of type ``wx.ITEM_RADIO``, ``False`` otherwise. """
return self._kind == wx.ITEM_RADIO
def IsEnabled(self):
""" Returns whether an item is enabled or not. """
return self._bIsEnabled
def IsSeparator(self):
""" Returns ``True`` if this item is of type ``wx.ITEM_SEPARATOR``, ``False`` otherwise. """
return self._id == wx.ID_SEPARATOR
def IsSubMenu(self):
""" Returns whether an item is a sub-menu or not. """
return self._subMenu != None
def SetNormalBitmap(self, bmp):
"""
Sets the menu item normal bitmap.
:param `bmp`: an instance of `wx.Bitmap`.
"""
self._normalBmp = bmp
def SetDisabledBitmap(self, bmp):
"""
Sets the menu item disabled bitmap.
:param `bmp`: an instance of `wx.Bitmap`.
"""
self._disabledBmp = bmp
def SetHotBitmap(self, bmp):
"""
Sets the menu item hot bitmap.
:param `bmp`: an instance of `wx.Bitmap`.
"""
self._hotBmp = bmp
def SetHelp(self, helpString):
"""
Sets the menu item help string.
:param `helpString`: the new menu item help string.
"""
self._helpString = helpString
def SetMenu(self, menu):
"""
Sets the menu item parent menu.
:param `menu`: an instance of L{FlatMenu}.
"""
self._parentMenu = menu
def SetSubMenu(self, menu):
"""
Sets the menu item sub-menu.
:param `menu`: an instance of L{FlatMenu}.
"""
self._subMenu = menu
# Fix toolbar update
self.SetMenuBar()
def GetAccelString(self):
""" Returns the accelerator string. """
return self._accelStr
def SetRect(self, rect):
"""
Sets the menu item client rectangle.
:param `rect`: the menu item client rectangle.
"""
self._rect = rect
def GetRect(self):
""" Returns the menu item client rectangle. """
return self._rect
def IsShown(self):
""" Returns whether an item is shown or not. """
return self._visible
def Show(self, show=True):
"""
Actually shows/hides the menu item.
:param `show`: ``True`` to show the menu item, ``False`` to hide it.
"""
self._visible = show
def DrawSelf(self, dc, xCoord, yCoord, imageMarginX, markerMarginX, textX, rightMarginX, selected=False):
"""
Draws the menu item.
:param `dc`: an instance of `wx.DC`;
:param `xCoord`: the current x position where to draw the menu;
:param `yCoord`: the current y position where to draw the menu;
:param `imageMarginX`: the spacing between the image and the menu border;
:param `markerMarginX`: the spacing between the checkbox/radio marker and
the menu border;
:param `textX`: the menu item label x position;
:param `rightMarginX`: the right margin between the text and the menu border;
:param `selected`: ``True`` if this menu item is currentl hovered by the mouse,
``False`` otherwise.
"""
borderXSize = self._parentMenu.GetBorderXWidth()
itemHeight = self._parentMenu.GetItemHeight()
menuWidth = self._parentMenu.GetMenuWidth()
artMgr = ArtManager.Get()
theme = artMgr.GetMenuTheme()
# Define the item actual rectangle area
itemRect = wx.Rect(xCoord, yCoord, menuWidth, itemHeight)
# Define the drawing area
rect = wx.Rect(xCoord+2, yCoord, menuWidth - 4, itemHeight)
# Draw the background
backColour = artMgr.GetMenuFaceColour()
penColour = backColour
backBrush = wx.Brush(backColour)
lightColour = wx.NamedColour("LIGHT GREY")
leftMarginWidth = self._parentMenu.GetLeftMarginWidth()
pen = wx.Pen(penColour)
dc.SetPen(pen)
dc.SetBrush(backBrush)
dc.DrawRectangleRect(rect)
# Draw the left margin gradient
self._parentMenu.DrawLeftMargin(dc, itemRect)
# check if separator
if self.IsSeparator():
# Separator is a small grey line separating between
# menu item. the separator height is 3 pixels
sepWidth = xCoord + menuWidth - textX - 1
sepRect1 = wx.Rect(xCoord + textX, yCoord + 1, sepWidth/2, 1)
sepRect2 = wx.Rect(xCoord + textX + sepWidth/2, yCoord + 1, sepWidth/2-1, 1)
artMgr.PaintStraightGradientBox(dc, sepRect1, backColour, lightColour, False)
artMgr.PaintStraightGradientBox(dc, sepRect2, lightColour, backColour, False)
return
# Keep the item rect
self._rect = itemRect
# Get the bitmap base on the item state (disabled, selected ..)
bmp = self.GetSuitableBitmap(selected)
# First we draw the selection rectangle
if selected:
artMgr.SetMS2007ButtonSunken(False)
artMgr.DrawButton(dc, rect, theme, ControlFocus, False)
if bmp.Ok():
# Calculate the postion to place the image
imgHeight = bmp.GetHeight()
imgWidth = bmp.GetWidth()
if imageMarginX == 0:
xx = rect.x + (leftMarginWidth - imgWidth)/2
else:
xx = rect.x + ((leftMarginWidth - rect.height) - imgWidth)/2 + rect.height
yy = rect.y + (rect.height - imgHeight)/2
dc.DrawBitmap(bmp, xx, yy, True)
if self.GetKind() == wx.ITEM_CHECK:
# Checkable item
if self.IsChecked():
# Draw surrounding rectangle around the selection box
xx = rect.x + 1
yy = rect.y + 1
rr = wx.Rect(xx, yy, rect.height-2, rect.height-2)
if not selected:
artMgr.SetMS2007ButtonSunken(False)
artMgr.DrawButton(dc, rr, theme, ControlFocus, False)
dc.DrawBitmap(self._checkMarkBmp, rr.x + (rr.width - 16)/2, rr.y + (rr.height - 16)/2, True)
if self.GetKind() == wx.ITEM_RADIO:
# Checkable item
if self.IsChecked():
# Draw surrounding rectangle around the selection box
xx = rect.x + 1
yy = rect.y + 1
rr = wx.Rect(xx, yy, rect.height-2, rect.height-2)
if not selected:
artMgr.SetMS2007ButtonSunken(False)
artMgr.DrawButton(dc, rr, theme, ControlFocus, False)
dc.DrawBitmap(self._radioMarkBmp, rr.x + (rr.width - 16)/2, rr.y + (rr.height - 16)/2, True)
# Draw text - without accelerators
text = self.GetLabel()
if text:
font = artMgr.GetFont()
enabledTxtColour = artMgr.GetTextColourEnable()
disabledTxtColour = artMgr.GetTextColourDisable()
textColour = (self.IsEnabled() and [enabledTxtColour] or [disabledTxtColour])[0]
dc.SetFont(font)
w, h = dc.GetTextExtent(text)
dc.SetTextForeground(textColour)
if self._mnemonicIdx != wx.NOT_FOUND:
# We divide the drawing to 3 parts
text1 = text[0:self._mnemonicIdx]
text2 = text[self._mnemonicIdx]
text3 = text[self._mnemonicIdx+1:]
w1, dummy = dc.GetTextExtent(text1)
w2, dummy = dc.GetTextExtent(text2)
w3, dummy = dc.GetTextExtent(text3)
posx = xCoord + textX + borderXSize
posy = (itemHeight - h)/2 + yCoord
# Draw first part
dc.DrawText(text1, posx, posy)
# mnemonic
if "__WXGTK__" not in wx.Platform:
font.SetUnderlined(True)
dc.SetFont(font)
posx += w1
dc.DrawText(text2, posx, posy)
# last part
font.SetUnderlined(False)
dc.SetFont(font)
posx += w2
dc.DrawText(text3, posx, posy)
else:
w, h = dc.GetTextExtent(text)
dc.DrawText(text, xCoord + textX + borderXSize, (itemHeight - h)/2 + yCoord)
# Now draw accelerator
# Accelerators are aligned to the right
if self.GetAccelString():
accelWidth, accelHeight = dc.GetTextExtent(self.GetAccelString())
dc.DrawText(self.GetAccelString(), xCoord + rightMarginX - accelWidth, (itemHeight - accelHeight)/2 + yCoord)
# Check if this item has sub-menu - if it does, draw
# right arrow on the right margin
if self.GetSubMenu():
# Draw arrow
rightArrowBmp = wx.BitmapFromXPMData(menu_right_arrow_xpm)
rightArrowBmp.SetMask(wx.Mask(rightArrowBmp, wx.WHITE))
xx = xCoord + rightMarginX + borderXSize
rr = wx.Rect(xx, rect.y + 1, rect.height-2, rect.height-2)
dc.DrawBitmap(rightArrowBmp, rr.x + 4, rr.y +(rr.height-16)/2, True)
def GetHeight(self):
""" Returns the menu item height. """
if self.IsSeparator():
return 3
else:
return self._parentMenu._itemHeight
def GetSuitableBitmap(self, selected):
"""
Gets the bitmap that should be used based on the item state.
:param `selected`: ``True`` if this menu item is currentl hovered by the mouse,
``False`` otherwise.
"""
normalBmp = self._normalBmp
gBmp = (self._disabledBmp.Ok() and [self._disabledBmp] or [self._normalBmp])[0]
hotBmp = (self._hotBmp.Ok() and [self._hotBmp] or [self._normalBmp])[0]
if not self.IsEnabled():
return gBmp
elif selected:
return hotBmp
else:
return normalBmp
def SetLabel(self, text):
"""
Sets the label text for this item from the text (excluding the accelerator).
:param `text`: the new item label (excluding the accelerator).
"""
if text:
indx = text.find("\t")
if indx >= 0:
self._accelStr = text[indx+1:]
label = text[0:indx]
else:
self._accelStr = ""
label = text
self._mnemonicIdx, self._label = ArtManager.Get().GetAccelIndex(label)
else:
self._mnemonicIdx = wx.NOT_FOUND
self._label = ""
if self._parentMenu:
self._parentMenu.UpdateItem(self)
def SetText(self, text):
"""
Sets the text for this menu item (including accelerators).
:param `text`: the new item label (including the accelerator).
"""
self._text = text
self.SetLabel(self._text)
def SetMenuBar(self):
""" Links the current menu item with the main L{FlatMenuBar}. """
# Fix toolbar update
if self._subMenu and self._parentMenu:
self._subMenu.SetSubMenuBar(self._parentMenu.GetMenuBarForSubMenu())
def GetAcceleratorEntry(self):
""" Returns the accelerator entry associated to this menu item. """
return wx.GetAccelFromString(self.GetText())
def GetMnemonicChar(self):
""" Returns the shortcut char for this menu item. """
if self._mnemonicIdx == wx.NOT_FOUND:
return 0
mnemonic = self._label[self._mnemonicIdx]
return mnemonic.lower()
def Check(self, check=True):
"""
Checks or unchecks the menu item.
:param `check`: ``True`` to check the menu item, ``False`` to uncheck it.
:note: This method is meaningful only for menu items of ``wx.ITEM_CHECK``
or ``wx.ITEM_RADIO`` kind.
"""
if self.IsRadioItem() and not self._isAttachedToMenu:
# radio items can be checked only after they are attached to menu
return
self._bIsChecked = check
# update group
if self.IsRadioItem() and check:
self._groupPtr.SetSelection(self)
# Our parent menu might want to do something with this change
if self._parentMenu:
self._parentMenu.UpdateItem(self)
#--------------------------------------------------------
# Class FlatMenu
#--------------------------------------------------------
class FlatMenu(FlatMenuBase):
"""
A Flat popup menu generic implementation.
"""
def __init__(self, parent=None):
"""
Default class constructor.
:param `parent`: the L{FlatMenu} parent window (used to initialize the
underlying L{ShadowPopupWindow}).
"""
self._menuWidth = 2*26
self._leftMarginWidth = 26
self._rightMarginWidth = 30
self._borderXWidth = 1
self._borderYWidth = 2
self._activeWin = None
self._focusWin = None
self._imgMarginX = 0
self._markerMarginX = 0
self._textX = 26
self._rightMarginPosX = -1
self._itemHeight = 20
self._selectedItem = -1
self._clearCurrentSelection = True
self._textPadding = 8
self._marginHeight = 20
self._marginWidth = 26
self._accelWidth = 0
self._mb = None
self._itemsArr = []
self._accelArray = []
self._ptLast = wx.Point()
self._resizeMenu = True
self._shiftePos = 0
self._first = 0
self._mb_submenu = 0
self._is_dismiss = False
self._numCols = 1
FlatMenuBase.__init__(self, parent)
self.SetSize(wx.Size(self._menuWidth, self._itemHeight+4))
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
self.Bind(wx.EVT_MOTION, self.OnMouseMove)
self.Bind(wx.EVT_ENTER_WINDOW, self.OnMouseEnterWindow)
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnMouseLeaveWindow)
self.Bind(wx.EVT_LEFT_DOWN, self.OnMouseLeftDown)
self.Bind(wx.EVT_LEFT_UP, self.OnMouseLeftUp)
self.Bind(wx.EVT_LEFT_DCLICK, self.OnMouseLeftDown)
self.Bind(wx.EVT_RIGHT_DOWN, self.OnMouseRightDown)
self.Bind(wx.EVT_KILL_FOCUS, self.OnKillFocus)
self.Bind(wx.EVT_TIMER, self.OnTimer)
def SetMenuBar(self, mb):
"""
Attaches this menu to a menubar.
:param `mb`: an instance of L{FlatMenuBar}.
"""
self._mb = mb
def SetSubMenuBar(self, mb):
"""
Attaches this menu to a menubar.
:param `mb`: an instance of L{FlatMenuBar}.
"""
self._mb_submenu = mb
def GetMenuBar(self):
""" Returns the menubar associated with this menu item. """
if self._mb_submenu:
return self._mb_submenu
return self._mb
def GetMenuBarForSubMenu(self):
""" Returns the menubar associated with this menu item. """
return self._mb
def Popup(self, pt, owner=None, parent=None):
"""
Pops up the menu.
:param `pt`: the point at which the menu should be popped up (an instance
of `wx.Point`);
:param `owner`: the owner of the menu. The owner does not necessarly mean the
menu parent, it can also be the window that popped up it;
:param `parent`: the menu parent window.
"""
if "__WXMSW__" in wx.Platform:
self._mousePtAtStartup = wx.GetMousePosition()
# each time we popup, need to reset the starting index
self._first = 0
# Loop over self menu and send update UI event for
# every item in the menu
numEvents = len(self._itemsArr)
cc = 0
self._shiftePos = 0
# Set the owner of the menu. All events will be directed to it.
# If owner is None, the Default GetParent() is used as the owner
self._owner = owner
for cc in xrange(numEvents):
self.SendUIEvent(cc)
# Adjust menu position and show it
FlatMenuBase.Popup(self, pt, parent)
artMgr = ArtManager.Get()
artMgr.MakeWindowTransparent(self, artMgr.GetTransparency())
# Replace the event handler of the active window to direct
# all keyboard events to us and the focused window to direct char events to us
self._activeWin = wx.GetActiveWindow()
if self._activeWin:
oldHandler = self._activeWin.GetEventHandler()
newEvtHandler = MenuKbdRedirector(self, oldHandler)
self._activeWin.PushEventHandler(newEvtHandler)
if "__WXMSW__" in wx.Platform:
self._focusWin = wx.Window.FindFocus()
elif "__WXGTK__" in wx.Platform:
self._focusWin = self
else:
self._focusWin = None
if self._focusWin:
newEvtHandler = FocusHandler(self)
self._focusWin.PushEventHandler(newEvtHandler)
def Append(self, id, item, helpString="", kind=wx.ITEM_NORMAL):
"""
Appends an item to this menu.
:param `id`: the menu item identifier;
:param `item`: the string to appear on the menu item;
:param `helpString`: an optional help string associated with the item. By default,
the handler for the ``EVT_FLAT_MENU_ITEM_MOUSE_OVER`` event displays this string
in the status line;
:param `kind`: may be ``wx.ITEM_NORMAL`` for a normal button (default),
``wx.ITEM_CHECK`` for a checkable tool (such tool stays pressed after it had been
toggled) or ``wx.ITEM_RADIO`` for a checkable tool which makes part of a radio
group of tools each of which is automatically unchecked whenever another button
in the group is checked;
"""
newItem = FlatMenuItem(self, id, item, helpString, kind)
return self.AppendItem(newItem)
def AppendSubMenu(self, subMenu, item, helpString=""):
"""
Adds a pull-right submenu to the end of the menu. See AppendMenu()
This function is added to duplicate the API of wx.Menu
"""
return self.AppendMenu(wx.ID_ANY, item, subMenu, helpString)
def AppendMenu(self, id, item, subMenu, helpString=""):
"""
Adds a pull-right submenu to the end of the menu.
:param `id`: the menu item identifier;
:param `item`: the string to appear on the menu item;
:param `subMenu`: an instance of L{FlatMenu}, the submenu to append;
:param `helpString`: an optional help string associated with the item. By default,
the handler for the ``EVT_FLAT_MENU_ITEM_MOUSE_OVER`` event displays this string
in the status line.
"""
newItem = FlatMenuItem(self, id, item, helpString, wx.ITEM_NORMAL, subMenu)
return self.AppendItem(newItem)
# The main Append function
def AppendItem(self, menuItem):
"""
Appends an item to this menu.
:param `menuItem`: an instance of L{FlatMenuItem}.
"""
if not menuItem:
raise Exception("Adding None item?")
return
# Reparent to us
menuItem.SetMenu(self)
self._itemsArr.append(menuItem)
menuItem._isAttachedToMenu = True
# Update the menu width if necessary
menuItemWidth = self.GetMenuItemWidth(menuItem)
self._menuWidth = (self._menuWidth > menuItemWidth + self._accelWidth and \
[self._menuWidth] or [menuItemWidth + self._accelWidth])[0]
menuHeight = 0
switch = 1e6
if self._numCols > 1:
nItems = len(self._itemsArr)
switch = int(math.ceil((nItems - self._first)/float(self._numCols)))
for indx, item in enumerate(self._itemsArr):
if indx >= switch:
break
if item.IsSeparator():
menuHeight += 3
else:
menuHeight += self._itemHeight
self.SetSize(wx.Size(self._menuWidth*self._numCols, menuHeight+4))
# Add accelerator entry to the menu if needed
accel = menuItem.GetAcceleratorEntry()
if accel:
accel.Set(accel.GetFlags(), accel.GetKeyCode(), menuItem.GetId())
self._accelArray.append(accel)
self.UpdateRadioGroup(menuItem)
return menuItem
def GetMenuItems(self):
""" Returns the list of menu items in the menu. """
return self._itemsArr
def GetMenuItemWidth(self, menuItem):
"""
Returns the width of a particular item.
:param `menuItem`: an instance of L{FlatMenuItem}.
"""
menuItemWidth = 0
text = menuItem.GetLabel() # Without accelerator
accel = menuItem.GetAccelString()
dc = wx.ClientDC(self)
font = ArtManager.Get().GetFont()
dc.SetFont(font)
accelFiller = "XXXX" # 4 spaces betweem text and accel column
# Calc text length/height
dummy, itemHeight = dc.GetTextExtent("Tp")
width, height = dc.GetTextExtent(text)
accelWidth, accelHeight = dc.GetTextExtent(accel)
filler, dummy = dc.GetTextExtent(accelFiller)
bmpHeight = bmpWidth = 0
if menuItem.GetBitmap().Ok():
bmpHeight = menuItem.GetBitmap().GetHeight()
bmpWidth = menuItem.GetBitmap().GetWidth()
if itemHeight < self._marginHeight:
itemHeight = self._marginHeight
itemHeight = (bmpHeight > self._itemHeight and [bmpHeight] or [itemHeight])[0]
itemHeight += 2*self._borderYWidth
# Update the global menu item height if needed
self._itemHeight = (self._itemHeight > itemHeight and [self._itemHeight] or [itemHeight])[0]
self._marginWidth = (self._marginWidth > bmpWidth and [self._marginWidth] or [bmpWidth])[0]
# Update the accel width
accelWidth += filler
if accel:
self._accelWidth = (self._accelWidth > accelWidth and [self._accelWidth] or [accelWidth])[0]
# In case the item has image & is type radio or check, we need double size
# left margin
factor = (((menuItem.GetBitmap() != wx.NullBitmap) and \
(menuItem.IsCheckable() or (menuItem.GetKind() == wx.ITEM_RADIO))) and [2] or [1])[0]
if factor == 2:
self._imgMarginX = self._marginWidth + 2*self._borderXWidth
self._leftMarginWidth = 2 * self._marginWidth + 2*self._borderXWidth
else:
self._leftMarginWidth = ((self._leftMarginWidth > self._marginWidth + 2*self._borderXWidth) and \
[self._leftMarginWidth] or [self._marginWidth + 2*self._borderXWidth])[0]
menuItemWidth = self.GetLeftMarginWidth() + 2*self.GetBorderXWidth() + width + self.GetRightMarginWidth()
self._textX = self._imgMarginX + self._marginWidth + self._textPadding
# update the rightMargin X position
self._rightMarginPosX = ((self._textX + width + self._accelWidth> self._rightMarginPosX) and \
[self._textX + width + self._accelWidth] or [self._rightMarginPosX])[0]
return menuItemWidth
def GetMenuWidth(self):
""" Returns the menu width. """
return self._menuWidth
def GetLeftMarginWidth(self):
""" Returns the menu left margin width. """
return self._leftMarginWidth
def GetRightMarginWidth(self):
""" Returns the menu right margin width. """
return self._rightMarginWidth
def GetBorderXWidth(self):
""" Returns the menu border x-width. """
return self._borderXWidth
def GetBorderYWidth(self):
""" Returns the menu border y-width. """
return self._borderYWidth
def GetItemHeight(self):
""" Returns the height of a particular item. """
return self._itemHeight
def AppendCheckItem(self, id, item, helpString=""):
"""
Adds a checkable item to the end of the menu.
:see: L{Append} for the explanation of the input parameters.
"""
newItem = FlatMenuItem(self, id, item, helpString, wx.ITEM_CHECK)
return self.AppendItem(newItem)
def AppendRadioItem(self, id, item, helpString=""):
"""
Adds a radio item to the end of the menu.
All consequent radio items form a group and when an item in the group is
checked, all the others are automatically unchecked.
:see: L{Append} for the explanation of the input parameters.
"""
newItem = FlatMenuItem(self, id, item, helpString, wx.ITEM_RADIO)
return self.AppendItem(newItem)
def AppendSeparator(self):
""" Appends a separator item to teh end of this menu. """
newItem = FlatMenuItem(self)
return self.AppendItem(newItem)
def InsertSeparator(self, pos):
"""
Inserts a separator at the given position.
:param `pos`: the index at which we want to insert the separator.
"""
newItem = FlatMenuItem(self)
return self.Insert(pos, newItem)
def Dismiss(self, dismissParent, resetOwner):
"""
Dismisses the popup window.
:param `dismissParent`: whether to dismiss the parent menu or not;
:param `resetOwner`: ``True`` to delete the link between this menu and the
owner menu, ``False`` otherwise.
"""
if self._activeWin:
self._activeWin.PopEventHandler(True)
self._activeWin = None
if self._focusWin:
self._focusWin.PopEventHandler(True)
self._focusWin = None
self._selectedItem = -1
if self._mb:
self._mb.RemoveHelp()
FlatMenuBase.Dismiss(self, dismissParent, resetOwner)
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` event for L{FlatMenu}.
:param `event`: a `wx.PaintEvent` event to be processed.
"""
dc = wx.PaintDC(self)
self.DoDrawMenu(dc)
# We need to redraw all our child menus
self.RefreshChilds()
def UpdateItem(self, item):
"""
Updates an item.
:param `item`: an instance of L{FlatMenuItem}.
"""
# notify menu bar that an item was modified directly
if item and self._mb:
self._mb.UpdateItem(item)
def OnEraseBackground(self, event):
"""
Handles the ``wx.EVT_ERASE_BACKGROUND`` event for L{FlatMenu}.
:param `event`: a `wx.EraseEvent` event to be processed.
:note: This method is intentionally empty to avoid flicker.
"""
pass
def DoDrawMenu(self, dc):
"""
Actually draws the menu.
:param `dc`: an instance of `wx.DC`.
"""
menuRect = self.GetMenuRect()
menuBmp = wx.EmptyBitmap(menuRect.width, menuRect.height)
mem_dc = wx.MemoryDC()
mem_dc.SelectObject(menuBmp)
# colour the menu face with background colour
backColour = ArtManager.Get().GetMenuFaceColour()
penColour = wx.SystemSettings_GetColour(wx.SYS_COLOUR_BTNSHADOW)
backBrush = wx.Brush(backColour)
pen = wx.Pen(penColour)
mem_dc.SetPen(pen)
mem_dc.SetBrush(backBrush)
mem_dc.DrawRectangleRect(menuRect)
# draw items
posy = 2
nItems = len(self._itemsArr)
# make all items as non-visible first
for item in self._itemsArr:
item.Show(False)
visibleItems = 0
screenHeight = wx.SystemSettings_GetMetric(wx.SYS_SCREEN_Y)
numCols = self.GetNumberColumns()
switch, posx, index = 1e6, 0, 0
if numCols > 1:
switch = int(math.ceil((nItems - self._first)/float(numCols)))
for nCount in xrange(self._first, nItems):
visibleItems += 1
item = self._itemsArr[nCount]
item.DrawSelf(mem_dc,
posx,
posy,
self._imgMarginX,
self._markerMarginX,
self._textX,
self._rightMarginPosX,
nCount == self._selectedItem
)
posy += item.GetHeight()
item.Show()
if visibleItems >= switch:
posy = 2
index += 1
posx = self._menuWidth*index
visibleItems = 0
# make sure we draw only visible items
pp = self.ClientToScreen(wx.Point(0, posy))
if pp.y > screenHeight:
break
if self._showScrollButtons:
if self._upButton:
self._upButton.Draw(mem_dc)
if self._downButton:
self._downButton.Draw(mem_dc)
dc.Blit(0, 0, menuBmp.GetWidth(), menuBmp.GetHeight(), mem_dc, 0, 0)
def DrawSelection(self, dc, oldSelection=-1):
"""
Redraws the menu.
:param `dc`: an instance of `wx.DC`;
:param `oldSelection`: if >= 0, the index representing the previous selected
menu item.
"""
self.Refresh()
def RefreshChilds(self):
"""
In some cases, we need to perform a recursive refresh for all opened submenu
from this.
"""
# Draw all childs menus of self menu as well
child = self._openedSubMenu
while child:
dc = wx.ClientDC(child)
child.DoDrawMenu(dc)
child = child._openedSubMenu
def DrawLeftMargin(self, dc, menuRect):
"""
Draws the menu left margin.
:param `dc`: an instance of `wx.DC`;
:param `menuRect`: the menu client rectangle.
"""
# Construct the margin rectangle
marginRect = wx.Rect(menuRect.x+1, menuRect.y, self.GetLeftMarginWidth(), menuRect.height)
# Set the gradient colours
artMgr = ArtManager.Get()
faceColour = artMgr.GetMenuFaceColour()
if Style2007 == artMgr.GetMenuTheme():
dcsaver = DCSaver(dc)
marginColour = artMgr.DarkColour(faceColour, 5)
dc.SetPen(wx.Pen(marginColour))
dc.SetBrush(wx.Brush(marginColour))
dc.DrawRectangleRect(marginRect)
dc.SetPen(wx.WHITE_PEN)
dc.DrawLine(marginRect.x + marginRect.width, marginRect.y, marginRect.x + marginRect.width, marginRect.y + marginRect.height)
borderColour = artMgr.DarkColour(faceColour, 10)
dc.SetPen(wx.Pen(borderColour))
dc.DrawLine(marginRect.x + marginRect.width-1, marginRect.y, marginRect.x + marginRect.width-1, marginRect.y + marginRect.height)
else:
startColour = artMgr.DarkColour(faceColour, 20)
endColour = faceColour
artMgr.PaintStraightGradientBox(dc, marginRect, startColour, endColour, False)
def GetMenuRect(self):
""" Returns the menu client rectangle. """
clientRect = self.GetClientRect()
return wx.Rect(clientRect.x, clientRect.y, clientRect.width, clientRect.height)
def OnKeyDown(self, event):
"""
Handles the ``wx.EVT_KEY_DOWN`` event for L{FlatMenu}.
:param `event`: a `wx.KeyEvent` event to be processed.
"""
self.OnChar(event.GetKeyCode())
def OnChar(self, key):
"""
Handles key events for L{FlatMenu}.
:param `key`: the keyboard key integer code.
"""
processed = True
if key == wx.WXK_ESCAPE:
if self._parentMenu:
self._parentMenu.CloseSubMenu(-1)
else:
self.Dismiss(True, True)
elif key == wx.WXK_LEFT:
if self._parentMenu:
# We are a submenu, dismiss us.
self._parentMenu.CloseSubMenu(-1)
else:
# try to find our root menu, if we are attached to menubar,
# let it try and open the previous menu
root = self.GetRootMenu()
if root:
if root._mb:
root._mb.ActivatePreviousMenu()
elif key == wx.WXK_RIGHT:
if not self.TryOpenSubMenu(self._selectedItem, True):
# try to find our root menu, if we are attached to menubar,
# let it try and open the previous menu
root = self.GetRootMenu()
if root:
if root._mb:
root._mb.ActivateNextMenu()
elif key == wx.WXK_UP:
self.AdvanceSelection(False)
elif key == wx.WXK_DOWN:
self.AdvanceSelection()
elif key in [wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER]:
self.DoAction(self._selectedItem)
elif key == wx.WXK_HOME:
# Select first item of the menu
if self._selectedItem != 0:
oldSel = self._selectedItem
self._selectedItem = 0
dc = wx.ClientDC(self)
self.DrawSelection(dc, oldSel)
elif key == wx.WXK_END:
# Select last item of the menu
if self._selectedItem != len(self._itemsArr)-1:
oldSel = self._selectedItem
self._selectedItem = len(self._itemsArr)-1
dc = wx.ClientDC(self)
self.DrawSelection(dc, oldSel)
elif key in [wx.WXK_CONTROL, wx.WXK_ALT]:
# Alt was pressed
root = self.GetRootMenu()
root.Dismiss(False, True)
else:
try:
chrkey = chr(key)
except:
return processed
if chrkey.isalnum():
ch = chrkey.lower()
# Iterate over all the menu items
itemIdx = -1
occur = 0
for i in xrange(len(self._itemsArr)):
item = self._itemsArr[i]
mnemonic = item.GetMnemonicChar()
if mnemonic == ch:
if itemIdx == -1:
itemIdx = i
# We keep the index of only
# the first occurence
occur += 1
# Keep on looping until no more items for self menu
if itemIdx != -1:
if occur > 1:
# We select the first item
if self._selectedItem == itemIdx:
return processed
oldSel = self._selectedItem
self._selectedItem = itemIdx
dc = wx.ClientDC(self)
self.DrawSelection(dc, oldSel)
elif occur == 1:
# Activate the item, if self is a submenu item we first select it
item = self._itemsArr[itemIdx]
if item.IsSubMenu() and self._selectedItem != itemIdx:
oldSel = self._selectedItem
self._selectedItem = itemIdx
dc = wx.ClientDC(self)
self.DrawSelection(dc, oldSel)
self.DoAction(itemIdx)
else:
processed = False
return processed
def AdvanceSelection(self, down=True):
"""
Advance forward or backward the current selection.
:param `down`: ``True`` to advance the selection forward, ``False`` otherwise.
"""
# make sure we have at least two items in the menu (which are not
# separators)
num=0
singleItemIdx = -1
for i in xrange(len(self._itemsArr)):
item = self._itemsArr[i]
if item.IsSeparator():
continue
num += 1
singleItemIdx = i
if num < 1:
return
if num == 1:
# Select the current one
self._selectedItem = singleItemIdx
dc = wx.ClientDC(self)
self.DrawSelection(dc, -1)
return
oldSelection = self._selectedItem
if not down:
# find the next valid item
while 1:
self._selectedItem -= 1
if self._selectedItem < 0:
self._selectedItem = len(self._itemsArr)-1
if not self._itemsArr[self._selectedItem].IsSeparator():
break
else:
# find the next valid item
while 1:
self._selectedItem += 1
if self._selectedItem > len(self._itemsArr)-1:
self._selectedItem = 0
if not self._itemsArr[self._selectedItem].IsSeparator():
break
dc = wx.ClientDC(self)
self.DrawSelection(dc, oldSelection)
def HitTest(self, pos):
"""
HitTest method for L{FlatMenu}.
:param `pos`: an instance of `wx.Point`, a point to test for hits.
"""
if self._showScrollButtons:
if self._upButton and self._upButton.GetClientRect().Contains(pos):
return MENU_HT_SCROLL_UP, -1
if self._downButton and self._downButton.GetClientRect().Contains(pos):
return MENU_HT_SCROLL_DOWN, -1
for ii, item in enumerate(self._itemsArr):
if item.GetRect().Contains(pos) and item.IsEnabled() and item.IsShown():
return MENU_HT_ITEM, ii
return MENU_HT_NONE, -1
def OnMouseMove(self, event):
"""
Handles the ``wx.EVT_MOTION`` event for L{FlatMenu}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
if "__WXMSW__" in wx.Platform:
# Ignore dummy mouse move events
pt = wx.GetMousePosition()
if self._mousePtAtStartup == pt:
return
pos = event.GetPosition()
# we need to ignore extra mouse events: example when this happens is when
# the mouse is on the menu and we open a submenu from keyboard - Windows
# then sends us a dummy mouse move event, we (correctly) determine that it
# happens in the parent menu and so immediately close the just opened
# submenunot
if "__WXMSW__" in wx.Platform:
ptCur = self.ClientToScreen(pos)
if ptCur == self._ptLast:
return
self._ptLast = ptCur
# first let the scrollbar handle it
self.TryScrollButtons(event)
self.ProcessMouseMove(pos)
def OnMouseLeftDown(self, event):
"""
Handles the ``wx.EVT_LEFT_DOWN`` event for L{FlatMenu}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
if self.TryScrollButtons(event):
return
pos = event.GetPosition()
self.ProcessMouseLClick(pos)
def OnMouseLeftUp(self, event):
"""
Handles the ``wx.EVT_LEFT_UP`` event for L{FlatMenu}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
if self.TryScrollButtons(event):
return
pos = event.GetPosition()
rect = self.GetClientRect()
if not rect.Contains(pos):
# The event is not in our coords,
# so we try our parent
win = self._parentMenu
while win:
# we need to translate our client coords to the client coords of the
# window we forward this event to
ptScreen = self.ClientToScreen(pos)
p = win.ScreenToClient(ptScreen)
if win.GetClientRect().Contains(p):
event.m_x = p.x
event.m_y = p.y
win.OnMouseLeftUp(event)
return
else:
# try the grandparent
win = win._parentMenu
else:
self.ProcessMouseLClickEnd(pos)
if self._showScrollButtons:
if self._upButton:
self._upButton.ProcessLeftUp(pos)
if self._downButton:
self._downButton.ProcessLeftUp(pos)
def OnMouseRightDown(self, event):
"""
Handles the ``wx.EVT_RIGHT_DOWN`` event for L{FlatMenu}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
if self.TryScrollButtons(event):
return
pos = event.GetPosition()
self.ProcessMouseRClick(pos)
def ProcessMouseRClick(self, pos):
"""
Processes mouse right clicks.
:param `pos`: the position at which the mouse right button was pressed.
"""
rect = self.GetClientRect()
if not rect.Contains(pos):
# The event is not in our coords,
# so we try our parent
win = self._parentMenu
while win:
# we need to translate our client coords to the client coords of the
# window we forward self event to
ptScreen = self.ClientToScreen(pos)
p = win.ScreenToClient(ptScreen)
if win.GetClientRect().Contains(p):
win.ProcessMouseRClick(p)
return
else:
# try the grandparent
win = win._parentMenu
# At this point we can assume that the event was not
# processed, so we dismiss the menu and its children
self.Dismiss(True, True)
return
# test if we are on a menu item
res, itemIdx = self.HitTest(pos)
if res == MENU_HT_ITEM:
self.OpenItemContextMenu(itemIdx)
def OpenItemContextMenu(self, itemIdx):
"""
Open an item's context menu (if any).
:param `itemIdx`: the index of the item for which we want to open the context menu.
"""
item = self._itemsArr[itemIdx]
context_menu = item.GetContextMenu()
# If we have a context menu, close any opened submenu
if context_menu:
self.CloseSubMenu(itemIdx, True)
if context_menu and not context_menu.IsShown():
# Popup child menu
pos = wx.Point()
pos.x = item.GetRect().GetWidth() + item.GetRect().GetX() - 5
pos.y = item.GetRect().GetY()
self._clearCurrentSelection = False
self._openedSubMenu = context_menu
context_menu.Popup(self.ScreenToClient(wx.GetMousePosition()), self._owner, self)
return True
return False
def ProcessMouseLClick(self, pos):
"""
Processes mouse left clicks.
:param `pos`: the position at which the mouse left button was pressed.
"""
rect = self.GetClientRect()
if not rect.Contains(pos):
# The event is not in our coords,
# so we try our parent
win = self._parentMenu
while win:
# we need to translate our client coords to the client coords of the
# window we forward self event to
ptScreen = self.ClientToScreen(pos)
p = win.ScreenToClient(ptScreen)
if win.GetClientRect().Contains(p):
win.ProcessMouseLClick(p)
return
else:
# try the grandparent
win = win._parentMenu
# At this point we can assume that the event was not
# processed, so we dismiss the menu and its children
self.Dismiss(True, True)
return
def ProcessMouseLClickEnd(self, pos):
"""
Processes mouse left clicks.
:param `pos`: the position at which the mouse left button was pressed.
"""
self.ProcessMouseLClick(pos)
# test if we are on a menu item
res, itemIdx = self.HitTest(pos)
if res == MENU_HT_ITEM:
self.DoAction(itemIdx)
elif res == MENU_HT_SCROLL_UP:
if self._upButton:
self._upButton.ProcessLeftDown(pos)
elif res == MENU_HT_SCROLL_DOWN:
if self._downButton:
self._downButton.ProcessLeftDown(pos)
else:
self._selectedItem = -1
def ProcessMouseMove(self, pos):
"""
Processes mouse movements.
:param `pos`: the position at which the mouse was moved.
"""
rect = self.GetClientRect()
if not rect.Contains(pos):
# The event is not in our coords,
# so we try our parent
win = self._parentMenu
while win:
# we need to translate our client coords to the client coords of the
# window we forward self event to
ptScreen = self.ClientToScreen(pos)
p = win.ScreenToClient(ptScreen)
if win.GetClientRect().Contains(p):
win.ProcessMouseMove(p)
return
else:
# try the grandparent
win = win._parentMenu
# If we are attached to a menu bar,
# let him process the event as well
if self._mb:
ptScreen = self.ClientToScreen(pos)
p = self._mb.ScreenToClient(ptScreen)
if self._mb.GetClientRect().Contains(p):
# let the menu bar process it
self._mb.ProcessMouseMoveFromMenu(p)
return
if self._mb_submenu:
ptScreen = self.ClientToScreen(pos)
p = self._mb_submenu.ScreenToClient(ptScreen)
if self._mb_submenu.GetClientRect().Contains(p):
# let the menu bar process it
self._mb_submenu.ProcessMouseMoveFromMenu(p)
return
return
# test if we are on a menu item
res, itemIdx = self.HitTest(pos)
if res == MENU_HT_SCROLL_DOWN:
if self._downButton:
self._downButton.ProcessMouseMove(pos)
elif res == MENU_HT_SCROLL_UP:
if self._upButton:
self._upButton.ProcessMouseMove(pos)
elif res == MENU_HT_ITEM:
if self._downButton:
self._downButton.ProcessMouseMove(pos)
if self._upButton:
self._upButton.ProcessMouseMove(pos)
if self._selectedItem == itemIdx:
return
# Message to send when out of last selected item
if self._selectedItem != -1:
self.SendOverItem(self._selectedItem, False)
self.SendOverItem(itemIdx, True) # Message to send when over an item
oldSelection = self._selectedItem
self._selectedItem = itemIdx
self.CloseSubMenu(self._selectedItem)
dc = wx.ClientDC(self)
self.DrawSelection(dc, oldSelection)
self.TryOpenSubMenu(self._selectedItem)
if self._mb:
self._mb.RemoveHelp()
if itemIdx >= 0:
self._mb.DoGiveHelp(self._itemsArr[itemIdx])
else:
# Message to send when out of last selected item
if self._selectedItem != -1:
item = self._itemsArr[self._selectedItem]
if item.IsSubMenu() and item.GetSubMenu().IsShown():
return
# Message to send when out of last selected item
if self._selectedItem != -1:
self.SendOverItem(self._selectedItem, False)
oldSelection = self._selectedItem
self._selectedItem = -1
dc = wx.ClientDC(self)
self.DrawSelection(dc, oldSelection)
def OnMouseLeaveWindow(self, event):
"""
Handles the ``wx.EVT_LEAVE_WINDOW`` event for L{FlatMenu}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
if self._mb:
self._mb.RemoveHelp()
if self._clearCurrentSelection:
# Message to send when out of last selected item
if self._selectedItem != -1:
item = self._itemsArr[self._selectedItem]
if item.IsSubMenu() and item.GetSubMenu().IsShown():
return
# Message to send when out of last selected item
if self._selectedItem != -1:
self.SendOverItem(self._selectedItem, False)
oldSelection = self._selectedItem
self._selectedItem = -1
dc = wx.ClientDC(self)
self.DrawSelection(dc, oldSelection)
self._clearCurrentSelection = True
if "__WXMSW__" in wx.Platform:
self.SetCursor(self._oldCur)
def OnMouseEnterWindow(self, event):
"""
Handles the ``wx.EVT_ENTER_WINDOW`` event for L{FlatMenu}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
if "__WXMSW__" in wx.Platform:
self._oldCur = self.GetCursor()
self.SetCursor(wx.StockCursor(wx.CURSOR_ARROW))
event.Skip()
def OnKillFocus(self, event):
"""
Handles the ``wx.EVT_KILL_FOCUS`` event for L{FlatMenu}.
:param `event`: a `wx.FocusEvent` event to be processed.
"""
self.Dismiss(True, True)
def CloseSubMenu(self, itemIdx, alwaysClose=False):
"""
Closes a child sub-menu.
:param `itemIdx`: the index of the item for which we want to close the submenu;
:param `alwaysClose`: if ``True``, always close the submenu irrespectively of
other conditions.
"""
item = None
subMenu = None
if itemIdx >= 0 and itemIdx < len(self._itemsArr):
item = self._itemsArr[itemIdx]
# Close sub-menu first
if item:
subMenu = item.GetSubMenu()
if self._openedSubMenu:
if self._openedSubMenu != subMenu or alwaysClose:
# We have another sub-menu open, close it
self._openedSubMenu.Dismiss(False, True)
self._openedSubMenu = None
def DoAction(self, itemIdx):
"""
Performs an action based on user selection.
:param `itemIdx`: the index of the item for which we want to perform the action.
"""
if itemIdx < 0 or itemIdx >= len(self._itemsArr):
raise Exception("Invalid menu item")
return
item = self._itemsArr[itemIdx]
if not item.IsEnabled() or item.IsSeparator():
return
# Close sub-menu if needed
self.CloseSubMenu(itemIdx)
if item.IsSubMenu() and not item.GetSubMenu().IsShown():
# Popup child menu
self.TryOpenSubMenu(itemIdx)
return
if item.IsRadioItem():
# if the radio item is already checked,
# just send command event. Else, check it, uncheck the current
# checked item in the radio item group, and send command event
if not item.IsChecked():
item._groupPtr.SetSelection(item)
elif item.IsCheckable():
item.Check(not item.IsChecked())
dc = wx.ClientDC(self)
self.DrawSelection(dc)
if not item.IsSubMenu():
self.Dismiss(True, False)
# Send command event
self.SendCmdEvent(itemIdx)
def TryOpenSubMenu(self, itemIdx, selectFirst=False):
"""
If `itemIdx` is an item with submenu, open it.
:param `itemIdx`: the index of the item for which we want to open the submenu;
:param `selectFirst`: if ``True``, the first item of the submenu will be shown
as selected.
"""
if itemIdx < 0 or itemIdx >= len(self._itemsArr):
return False
item = self._itemsArr[itemIdx]
if item.IsSubMenu() and not item.GetSubMenu().IsShown():
pos = wx.Point()
# Popup child menu
pos.x = item.GetRect().GetWidth()+ item.GetRect().GetX()-5
pos.y = item.GetRect().GetY()
self._clearCurrentSelection = False
self._openedSubMenu = item.GetSubMenu()
item.GetSubMenu().Popup(pos, self._owner, self)
# Select the first child
if selectFirst:
dc = wx.ClientDC(item.GetSubMenu())
item.GetSubMenu()._selectedItem = 0
item.GetSubMenu().DrawSelection(dc)
return True
return False
def _RemoveById(self, id):
""" Used internally. """
# First we search for the menu item (recursively)
menuParent = None
item = None
idx = wx.NOT_FOUND
idx, menuParent = self.FindMenuItemPos(id)
if idx != wx.NOT_FOUND:
# Remove the menu item
item = menuParent._itemsArr[idx]
menuParent._itemsArr.pop(idx)
# update group
if item._groupPtr and item.IsRadioItem():
item._groupPtr.Remove(item)
# Resize the menu
menuParent.ResizeMenu()
return item
def Remove(self, item):
"""
Removes the menu item from the menu but doesn't delete the associated menu
object. This allows to reuse the same item later by adding it back to the
menu (especially useful with submenus).
:param `item`: can be either a menu item identifier or a plain L{FlatMenuItem}.
"""
if type(item) != type(1):
item = item.GetId()
return self._RemoveById(item)
def _DestroyById(self, id):
""" Used internally. """
item = None
item = self.Remove(id)
if item:
del item
def Destroy(self, item):
"""
Deletes the menu item from the menu. If the item is a submenu, it will be
deleted. Use L{Remove} if you want to keep the submenu (for example, to reuse
it later).
:param `item`: can be either a menu item identifier or a plain L{FlatMenuItem}.
"""
if type(item) != type(1):
item = item.GetId()
self._DestroyById(item)
def Insert(self, pos, id, item, helpString="", kind=wx.ITEM_NORMAL):
"""
Inserts the given `item` before the position `pos`.
:param `pos`: the position at which to insert the new menu item;
:param `id`: the menu item identifier;
:param `item`: the string to appear on the menu item;
:param `helpString`: an optional help string associated with the item. By default,
the handler for the ``EVT_FLAT_MENU_ITEM_MOUSE_OVER`` event displays this string
in the status line;
:param `kind`: may be ``wx.ITEM_NORMAL`` for a normal button (default),
``wx.ITEM_CHECK`` for a checkable tool (such tool stays pressed after it had been
toggled) or ``wx.ITEM_RADIO`` for a checkable tool which makes part of a radio
group of tools each of which is automatically unchecked whenever another button
in the group is checked;
"""
newitem = FlatMenuItem(self, id, item, helpString, kind)
return self.InsertItem(pos, newitem)
def InsertItem(self, pos, item):
"""
Inserts an item into the menu.
:param `pos`: the position at which to insert the new menu item;
:param `item`: an instance of L{FlatMenuItem}.
"""
if pos == len(self._itemsArr):
# Append it
return self.AppendItem(item)
# Insert the menu item
self._itemsArr.insert(pos, item)
item._isAttachedToMenu = True
# Recalculate the menu geometry
self.ResizeMenu()
# Update radio groups
self.UpdateRadioGroup(item)
return item
def UpdateRadioGroup(self, item):
"""
Updates a group of radio items.
:param `item`: an instance of L{FlatMenuItem}.
"""
if item.IsRadioItem():
# Udpate radio groups in case this item is a radio item
sibling = self.GetSiblingGroupItem(item)
if sibling:
item._groupPtr = sibling._groupPtr
item._groupPtr.Add(item)
if item.IsChecked():
item._groupPtr.SetSelection(item)
else:
# first item in group
item._groupPtr = FlatMenuItemGroup()
item._groupPtr.Add(item)
item._groupPtr.SetSelection(item)
def ResizeMenu(self):
""" Resizes the menu to the correct size. """
# can we do the resize?
if not self._resizeMenu:
return
items = self._itemsArr
self._itemsArr = []
# Clear accelerator table
self._accelArray = []
# Reset parameters and menu size
self._menuWidth = 2*self._marginWidth
self._imgMarginX = 0
self._markerMarginX = 0
self._textX = self._marginWidth
self._rightMarginPosX = -1
self._itemHeight = self._marginHeight
self.SetSize(wx.Size(self._menuWidth*self._numCols, self._itemHeight+4))
# Now we simply add the items
for item in items:
self.AppendItem(item)
def SetNumberColumns(self, numCols):
"""
Sets the number of columns for a menu window.
:param `numCols`: the number of columns for this L{FlatMenu} window.
"""
if self._numCols == numCols:
return
self._numCols = numCols
self.ResizeMenu()
self.Refresh()
def GetNumberColumns(self):
""" Returns the number of columns for a menu window. """
return self._numCols
def FindItem(self, itemId, menu=None):
"""
Finds the menu item object associated with the given menu item identifier and,
optionally, the (sub)menu it belongs to.
:param `itemId`: menu item identifier;
:param `menu`: if not ``None``, it will be filled with the item's parent menu
(if the item was found).
"""
idx = wx.NOT_FOUND
if menu:
idx, menu = self.FindMenuItemPos(itemId, menu)
if idx != wx.NOT_FOUND:
return menu._itemsArr[idx]
else:
return None
else:
idx, parentMenu = self.FindMenuItemPos(itemId, None)
if idx != wx.NOT_FOUND:
return parentMenu._itemsArr[idx]
else:
return None
def FindMenuItemPos(self, itemId, menu=None):
"""
Finds an item and its position inside the menu based on its id.
:param `itemId`: menu item identifier;
:param `menu`: if not ``None``, it will be filled with the item's parent menu
(if the item was found).
"""
menu = None
item = None
idx = wx.NOT_FOUND
for i in xrange(len(self._itemsArr)):
item = self._itemsArr[i]
if item.GetId() == itemId:
menu = self
idx = i
break
elif item.IsSubMenu():
idx, menu = item.GetSubMenu().FindMenuItemPos(itemId, menu)
if idx != wx.NOT_FOUND:
break
else:
item = None
return idx, menu
def GetAccelTable(self):
""" Returns the menu accelerator table. """
n = len(self._accelArray)
if n == 0:
return wx.NullAcceleratorTable
entries = [wx.AcceleratorEntry() for ii in xrange(n)]
for counter in len(entries):
entries[counter] = self._accelArray[counter]
table = wx.AcceleratorTable(entries)
del entries
return table
def GetAccelArray(self):
""" Returns an array filled with the accelerator entries for the menu. """
return self._accelArray
# events
def SendCmdEvent(self, itemIdx):
"""
Actually sends menu command events.
:param `itemIdx`: the menu item index for which we want to send a command event.
"""
if itemIdx < 0 or itemIdx >= len(self._itemsArr):
raise Exception("Invalid menu item")
return
item = self._itemsArr[itemIdx]
# Create the event
event = wx.CommandEvent(wxEVT_FLAT_MENU_SELECTED, item.GetId())
# For checkable item, set the IsChecked() value
if item.IsCheckable():
event.SetInt((item.IsChecked() and [1] or [0])[0])
event.SetEventObject(self)
if self._owner:
self._owner.GetEventHandler().ProcessEvent(event)
else:
self.GetEventHandler().ProcessEvent(event)
def SendOverItem(self, itemIdx, over):
"""
Sends the ``EVT_FLAT_MENU_ITEM_MOUSE_OVER`` and ``EVT_FLAT_MENU_ITEM_MOUSE_OUT``
events.
:param `itemIdx`: the menu item index for which we want to send an event;
:param `over`: ``True`` to send a ``EVT_FLAT_MENU_ITEM_MOUSE_OVER`` event, ``False`` to
send a ``EVT_FLAT_MENU_ITEM_MOUSE_OUT`` event.
"""
item = self._itemsArr[itemIdx]
# Create the event
event = FlatMenuEvent((over and [wxEVT_FLAT_MENU_ITEM_MOUSE_OVER] or [wxEVT_FLAT_MENU_ITEM_MOUSE_OUT])[0], item.GetId())
# For checkable item, set the IsChecked() value
if item.IsCheckable():
event.SetInt((item.IsChecked() and [1] or [0])[0])
event.SetEventObject(self)
if self._owner:
self._owner.GetEventHandler().ProcessEvent(event)
else:
self.GetEventHandler().ProcessEvent(event)
def SendUIEvent(self, itemIdx):
"""
Actually sends menu UI events.
:param `itemIdx`: the menu item index for which we want to send a UI event.
"""
if itemIdx < 0 or itemIdx >= len(self._itemsArr):
raise Exception("Invalid menu item")
return
item = self._itemsArr[itemIdx]
event = wx.UpdateUIEvent(item.GetId())
event.Check(item.IsChecked())
event.Enable(item.IsEnabled())
event.SetText(item.GetText())
event.SetEventObject(self)
if self._owner:
self._owner.GetEventHandler().ProcessEvent(event)
else:
self.GetEventHandler().ProcessEvent(event)
item.Check(event.GetChecked())
item.SetLabel(event.GetText())
item.Enable(event.GetEnabled())
def Clear(self):
""" Clears the menu items. """
# since Destroy() call ResizeMenu(), we turn this flag on
# to avoid resizing the menu for every item removed
self._resizeMenu = False
lenItems = len(self._itemsArr)
for ii in xrange(lenItems):
self.Destroy(self._itemsArr[0].GetId())
# Now we can resize the menu
self._resizeMenu = True
self.ResizeMenu()
def FindMenuItemPosSimple(self, item):
"""
Finds an item and its position inside the menu based on its id.
:param `item`: an instance of L{FlatMenuItem}.
"""
if item == None or len(self._itemsArr) == 0:
return wx.NOT_FOUND
for i in xrange(len(self._itemsArr)):
if self._itemsArr[i] == item:
return i
return wx.NOT_FOUND
def GetAllItems(self, menu=None, items=[]):
"""
Internal function to help recurse through all the menu items.
:param `menu`: the menu from which we start accumulating items;
:param `items`: the array which is recursively filled with menu items.
"""
# first copy the current menu items
newitems = [item for item in items]
if not menu:
return newitems
# if any item in this menu has sub-menu, copy them as well
for i in xrange(len(menu._itemsArr)):
if menu._itemsArr[i].IsSubMenu():
newitems = self.GetAllItems(menu._itemsArr[i].GetSubMenu(), newitems)
return newitems
def GetSiblingGroupItem(self, item):
"""
Used internally.
:param `item`: an instance of L{FlatMenuItem}.
"""
pos = self.FindMenuItemPosSimple(item)
if pos in [wx.NOT_FOUND, 0]:
return None
if self._itemsArr[pos-1].IsRadioItem():
return self._itemsArr[pos-1]
return None
def ScrollDown(self):
""" Scrolls the menu down (for very tall menus). """
# increase the self._from index
if not self._itemsArr[-1].IsShown():
self._first += 1
self.Refresh()
return True
else:
if self._downButton:
self._downButton.GetTimer().Stop()
return False
def ScrollUp(self):
""" Scrolls the menu up (for very tall menus). """
if self._first == 0:
if self._upButton:
self._upButton.GetTimer().Stop()
return False
else:
self._first -= 1
self.Refresh()
return True
# Not used anymore
def TryScrollButtons(self, event):
""" Used internally. """
return False
def OnTimer(self, event):
"""
Handles the ``wx.EVT_TIMER`` event for L{FlatMenu}.
:param `event`: a `wx.TimerEvent` event to be processed.
"""
if self._upButton and self._upButton.GetTimerId() == event.GetId():
self.ScrollUp()
elif self._downButton and self._downButton.GetTimerId() == event.GetId():
self.ScrollDown()
else:
event.Skip()
#--------------------------------------------------------
# Class MenuKbdRedirector
#--------------------------------------------------------
class MenuKbdRedirector(wx.EvtHandler):
""" A keyboard event handler. """
def __init__(self, menu, oldHandler):
"""
Default class constructor.
:param `menu`: an instance of L{FlatMenu} for which we want to redirect
keyboard inputs;
:param `oldHandler`: a previous (if any) `wx.EvtHandler` associated with
the menu.
"""
self._oldHandler = oldHandler
self.SetMenu(menu)
wx.EvtHandler.__init__(self)
def SetMenu(self, menu):
"""
Sets the listener menu.
:param `menu`: an instance of L{FlatMenu}.
"""
self._menu = menu
def ProcessEvent(self, event):
"""
Processes the inout event.
:param `event`: any kind of keyboard-generated events.
"""
if event.GetEventType() in [wx.EVT_KEY_DOWN, wx.EVT_CHAR, wx.EVT_CHAR_HOOK]:
return self._menu.OnChar(event.GetKeyCode())
else:
return self._oldHandler.ProcessEvent(event)
#--------------------------------------------------------
# Class FocusHandler
#--------------------------------------------------------
class FocusHandler(wx.EvtHandler):
""" A focus event handler. """
def __init__(self, menu):
"""
Default class constructor.
:param `menu`: an instance of L{FlatMenu} for which we want to redirect
focus inputs.
"""
wx.EvtHandler.__init__(self)
self.SetMenu(menu)
self.Bind(wx.EVT_KEY_DOWN, self.OnKeyDown)
self.Bind(wx.EVT_KILL_FOCUS, self.OnKillFocus)
def SetMenu(self, menu):
"""
Sets the listener menu.
:param `menu`: an instance of L{FlatMenu}.
"""
self._menu = menu
def OnKeyDown(self, event):
"""
Handles the ``wx.EVT_KEY_DOWN`` event for L{FocusHandler}.
:param `event`: a `wx.KeyEvent` event to be processed.
"""
# Let parent process it
self._menu.OnKeyDown(event)
def OnKillFocus(self, event):
"""
Handles the ``wx.EVT_KILL_FOCUS`` event for L{FocusHandler}.
:param `event`: a `wx.FocusEvent` event to be processed.
"""
wx.PostEvent(self._menu, event)
|
gpl-3.0
|
ychfan/tensorflow
|
tensorflow/python/training/tensorboard_logging_test.py
|
132
|
4456
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.framework.tensorboard_logging."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import glob
import os
import shutil
import tempfile
import time
from tensorflow.core.util import event_pb2
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import summary_iterator
from tensorflow.python.summary.writer import writer
from tensorflow.python.training import tensorboard_logging
class EventLoggingTest(test.TestCase):
def setUp(self):
self._work_dir = tempfile.mkdtemp(dir=self.get_temp_dir())
self._sw = writer.FileWriter(self._work_dir)
tensorboard_logging.set_summary_writer(self._sw)
self.addCleanup(shutil.rmtree, self._work_dir)
# Stop the clock to avoid test flakiness.
now = time.time()
time._real_time = time.time
time.time = lambda: now
# Mock out logging calls so we can verify that the right number of messages
# get logged.
self.logged_message_count = 0
self._actual_log = logging.log
def mockLog(*args, **kwargs):
self.logged_message_count += 1
self._actual_log(*args, **kwargs)
logging.log = mockLog
def tearDown(self):
time.time = time._real_time
logging.log = self._actual_log
def assertLoggedMessagesAre(self, expected_messages):
self._sw.close()
event_paths = glob.glob(os.path.join(self._work_dir, "event*"))
# If the tests runs multiple time in the same directory we can have
# more than one matching event file. We only want to read the last one.
self.assertTrue(event_paths)
event_reader = summary_iterator.summary_iterator(event_paths[-1])
# Skip over the version event.
next(event_reader)
for level, message in expected_messages:
event = next(event_reader)
self.assertEqual(event.wall_time, time.time())
self.assertEqual(event.log_message.level, level)
self.assertEqual(event.log_message.message, message)
def testBasic(self):
tensorboard_logging.set_summary_writer(self._sw)
tensorboard_logging.error("oh no!")
tensorboard_logging.error("for%s", "mat")
self.assertLoggedMessagesAre([(event_pb2.LogMessage.ERROR, "oh no!"),
(event_pb2.LogMessage.ERROR, "format")])
self.assertEqual(2, self.logged_message_count)
def testVerbosity(self):
tensorboard_logging.set_summary_writer(self._sw)
tensorboard_logging.set_verbosity(tensorboard_logging.ERROR)
tensorboard_logging.warn("warn")
tensorboard_logging.error("error")
tensorboard_logging.set_verbosity(tensorboard_logging.DEBUG)
tensorboard_logging.debug("debug")
self.assertLoggedMessagesAre([(event_pb2.LogMessage.ERROR, "error"),
(event_pb2.LogMessage.DEBUGGING, "debug")])
# All message should be logged because tensorboard_logging verbosity doesn't
# affect logging verbosity.
self.assertEqual(3, self.logged_message_count)
def testBadVerbosity(self):
with self.assertRaises(ValueError):
tensorboard_logging.set_verbosity("failure")
with self.assertRaises(ValueError):
tensorboard_logging.log("bad", "dead")
def testNoSummaryWriter(self):
"""Test that logging without a SummaryWriter succeeds."""
tensorboard_logging.set_summary_writer(None)
tensorboard_logging.warn("this should work")
self.assertEqual(1, self.logged_message_count)
def testSummaryWriterFailsAfterClear(self):
tensorboard_logging._clear_summary_writer()
with self.assertRaises(RuntimeError):
tensorboard_logging.log(tensorboard_logging.ERROR, "failure")
if __name__ == "__main__":
test.main()
|
apache-2.0
|
gqwest-erp/server
|
openerp/addons/warning/__init__.py
|
446
|
1071
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import warning
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
takeshineshiro/nova
|
nova/tests/unit/scheduler/filters/test_availability_zone_filters.py
|
57
|
2170
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova.scheduler.filters import availability_zone_filter
from nova import test
from nova.tests.unit.scheduler import fakes
@mock.patch('nova.scheduler.filters.utils.aggregate_metadata_get_by_host')
class TestAvailabilityZoneFilter(test.NoDBTestCase):
def setUp(self):
super(TestAvailabilityZoneFilter, self).setUp()
self.filt_cls = availability_zone_filter.AvailabilityZoneFilter()
@staticmethod
def _make_zone_request(zone):
return {
'context': mock.sentinel.ctx,
'request_spec': {
'instance_properties': {
'availability_zone': zone
}
}
}
def test_availability_zone_filter_same(self, agg_mock):
agg_mock.return_value = {'availability_zone': 'nova'}
request = self._make_zone_request('nova')
host = fakes.FakeHostState('host1', 'node1', {})
self.assertTrue(self.filt_cls.host_passes(host, request))
def test_availability_zone_filter_same_comma(self, agg_mock):
agg_mock.return_value = {'availability_zone': 'nova,nova2'}
request = self._make_zone_request('nova')
host = fakes.FakeHostState('host1', 'node1', {})
self.assertTrue(self.filt_cls.host_passes(host, request))
def test_availability_zone_filter_different(self, agg_mock):
agg_mock.return_value = {'availability_zone': 'nova'}
request = self._make_zone_request('bad')
host = fakes.FakeHostState('host1', 'node1', {})
self.assertFalse(self.filt_cls.host_passes(host, request))
|
apache-2.0
|
tesidroni/mp
|
Lib/contextlib.py
|
261
|
4424
|
"""Utilities for with-statement contexts. See PEP 343."""
import sys
from functools import wraps
from warnings import warn
__all__ = ["contextmanager", "nested", "closing"]
class GeneratorContextManager(object):
"""Helper for @contextmanager decorator."""
def __init__(self, gen):
self.gen = gen
def __enter__(self):
try:
return self.gen.next()
except StopIteration:
raise RuntimeError("generator didn't yield")
def __exit__(self, type, value, traceback):
if type is None:
try:
self.gen.next()
except StopIteration:
return
else:
raise RuntimeError("generator didn't stop")
else:
if value is None:
# Need to force instantiation so we can reliably
# tell if we get the same exception back
value = type()
try:
self.gen.throw(type, value, traceback)
raise RuntimeError("generator didn't stop after throw()")
except StopIteration, exc:
# Suppress the exception *unless* it's the same exception that
# was passed to throw(). This prevents a StopIteration
# raised inside the "with" statement from being suppressed
return exc is not value
except:
# only re-raise if it's *not* the exception that was
# passed to throw(), because __exit__() must not raise
# an exception unless __exit__() itself failed. But throw()
# has to raise the exception to signal propagation, so this
# fixes the impedance mismatch between the throw() protocol
# and the __exit__() protocol.
#
if sys.exc_info()[1] is not value:
raise
def contextmanager(func):
"""@contextmanager decorator.
Typical usage:
@contextmanager
def some_generator(<arguments>):
<setup>
try:
yield <value>
finally:
<cleanup>
This makes this:
with some_generator(<arguments>) as <variable>:
<body>
equivalent to this:
<setup>
try:
<variable> = <value>
<body>
finally:
<cleanup>
"""
@wraps(func)
def helper(*args, **kwds):
return GeneratorContextManager(func(*args, **kwds))
return helper
@contextmanager
def nested(*managers):
"""Combine multiple context managers into a single nested context manager.
This function has been deprecated in favour of the multiple manager form
of the with statement.
The one advantage of this function over the multiple manager form of the
with statement is that argument unpacking allows it to be
used with a variable number of context managers as follows:
with nested(*managers):
do_something()
"""
warn("With-statements now directly support multiple context managers",
DeprecationWarning, 3)
exits = []
vars = []
exc = (None, None, None)
try:
for mgr in managers:
exit = mgr.__exit__
enter = mgr.__enter__
vars.append(enter())
exits.append(exit)
yield vars
except:
exc = sys.exc_info()
finally:
while exits:
exit = exits.pop()
try:
if exit(*exc):
exc = (None, None, None)
except:
exc = sys.exc_info()
if exc != (None, None, None):
# Don't rely on sys.exc_info() still containing
# the right information. Another exception may
# have been raised and caught by an exit method
raise exc[0], exc[1], exc[2]
class closing(object):
"""Context to automatically close something at the end of a block.
Code like this:
with closing(<module>.open(<arguments>)) as f:
<block>
is equivalent to this:
f = <module>.open(<arguments>)
try:
<block>
finally:
f.close()
"""
def __init__(self, thing):
self.thing = thing
def __enter__(self):
return self.thing
def __exit__(self, *exc_info):
self.thing.close()
|
gpl-3.0
|
csachs/openmicroscopy
|
components/tools/OmeroPy/test/integration/test_exporter.py
|
9
|
2346
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011-2014 Glencoe Software, Inc. All Rights Reserved.
# Use is subject to license terms supplied in LICENSE.txt
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Tests for the stateful Exporter service.
"""
import omero
import library as lib
import pytest
class TestExporter(lib.ITest):
def bigimage(self):
pix = self.pix(x=4000, y=4000, z=1, t=1, c=1)
rps = self.client.sf.createRawPixelsStore()
try:
rps.setPixelsId(pix.id.val, True)
self.write(pix, rps)
return pix
finally:
rps.close()
def testBasic(self):
"""
Runs a simple export through to completion
as a smoke test.
"""
pix_ids = self.import_image()
image_id = self.client.sf.getQueryService().projection(
"select i.id from Image i join i.pixels p where p.id = :id",
omero.sys.ParametersI().addId(pix_ids[0]))[0][0].val
exporter = self.client.sf.createExporter()
exporter.addImage(image_id)
length = exporter.generateTiff()
offset = 0
while True:
rv = exporter.read(offset, 1000 * 1000)
if not rv:
break
rv = rv[:min(1000 * 1000, length - offset)]
offset += len(rv)
def test6713(self):
"""
Tests that a big image will not be exportable.
"""
pix = self.bigimage()
exporter = self.client.sf.createExporter()
exporter.addImage(pix.getImage().id.val)
with pytest.raises(omero.ApiUsageException):
exporter.generateTiff()
|
gpl-2.0
|
t-tran/libcloud
|
libcloud/compute/drivers/openstack.py
|
1
|
87667
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
OpenStack driver
"""
from libcloud.common.exceptions import BaseHTTPError
from libcloud.utils.iso8601 import parse_date
try:
import simplejson as json
except ImportError:
import json
import warnings
import base64
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import b
from libcloud.utils.py3 import next
from libcloud.utils.py3 import urlparse
from libcloud.common.openstack import OpenStackBaseConnection
from libcloud.common.openstack import OpenStackDriverMixin
from libcloud.common.openstack import OpenStackException
from libcloud.common.openstack import OpenStackResponse
from libcloud.utils.networking import is_public_subnet
from libcloud.compute.base import NodeSize, NodeImage
from libcloud.compute.base import (NodeDriver, Node, NodeLocation,
StorageVolume, VolumeSnapshot)
from libcloud.compute.base import KeyPair
from libcloud.compute.types import NodeState, StorageVolumeState, Provider, \
VolumeSnapshotState
from libcloud.pricing import get_size_price
from libcloud.utils.xml import findall
from libcloud.utils.py3 import ET
__all__ = [
'OpenStack_1_0_Response',
'OpenStack_1_0_Connection',
'OpenStack_1_0_NodeDriver',
'OpenStack_1_0_SharedIpGroup',
'OpenStack_1_0_NodeIpAddresses',
'OpenStack_1_1_Response',
'OpenStack_1_1_Connection',
'OpenStack_1_1_NodeDriver',
'OpenStack_1_1_FloatingIpPool',
'OpenStack_1_1_FloatingIpAddress',
'OpenStackNodeDriver'
]
ATOM_NAMESPACE = "http://www.w3.org/2005/Atom"
DEFAULT_API_VERSION = '1.1'
class OpenStackComputeConnection(OpenStackBaseConnection):
# default config for http://devstack.org/
service_type = 'compute'
service_name = 'nova'
service_region = 'RegionOne'
class OpenStackNodeDriver(NodeDriver, OpenStackDriverMixin):
"""
Base OpenStack node driver. Should not be used directly.
"""
api_name = 'openstack'
name = 'OpenStack'
website = 'http://openstack.org/'
NODE_STATE_MAP = {
'BUILD': NodeState.PENDING,
'REBUILD': NodeState.PENDING,
'ACTIVE': NodeState.RUNNING,
'SUSPENDED': NodeState.STOPPED,
'SHUTOFF': NodeState.STOPPED,
'DELETED': NodeState.TERMINATED,
'QUEUE_RESIZE': NodeState.PENDING,
'PREP_RESIZE': NodeState.PENDING,
'VERIFY_RESIZE': NodeState.RUNNING,
'PASSWORD': NodeState.PENDING,
'RESCUE': NodeState.PENDING,
'REBOOT': NodeState.REBOOTING,
'HARD_REBOOT': NodeState.REBOOTING,
'SHARE_IP': NodeState.PENDING,
'SHARE_IP_NO_CONFIG': NodeState.PENDING,
'DELETE_IP': NodeState.PENDING,
'ERROR': NodeState.ERROR,
'UNKNOWN': NodeState.UNKNOWN
}
# http://developer.openstack.org/api-ref-blockstorage-v2.html#volumes-v2
VOLUME_STATE_MAP = {
'creating': StorageVolumeState.CREATING,
'available': StorageVolumeState.AVAILABLE,
'attaching': StorageVolumeState.ATTACHING,
'in-use': StorageVolumeState.INUSE,
'deleting': StorageVolumeState.DELETING,
'error': StorageVolumeState.ERROR,
'error_deleting': StorageVolumeState.ERROR,
'backing-up': StorageVolumeState.BACKUP,
'restoring-backup': StorageVolumeState.BACKUP,
'error_restoring': StorageVolumeState.ERROR,
'error_extending': StorageVolumeState.ERROR,
}
# http://developer.openstack.org/api-ref-blockstorage-v2.html#ext-backups-v2
SNAPSHOT_STATE_MAP = {
'creating': VolumeSnapshotState.CREATING,
'available': VolumeSnapshotState.AVAILABLE,
'deleting': VolumeSnapshotState.DELETING,
'error': VolumeSnapshotState.ERROR,
'restoring': VolumeSnapshotState.RESTORING,
'error_restoring': VolumeSnapshotState.ERROR
}
def __new__(cls, key, secret=None, secure=True, host=None, port=None,
api_version=DEFAULT_API_VERSION, **kwargs):
if cls is OpenStackNodeDriver:
if api_version == '1.0':
cls = OpenStack_1_0_NodeDriver
elif api_version == '1.1':
cls = OpenStack_1_1_NodeDriver
else:
raise NotImplementedError(
"No OpenStackNodeDriver found for API version %s" %
(api_version))
return super(OpenStackNodeDriver, cls).__new__(cls)
def __init__(self, *args, **kwargs):
OpenStackDriverMixin.__init__(self, **kwargs)
super(OpenStackNodeDriver, self).__init__(*args, **kwargs)
def destroy_node(self, node):
uri = '/servers/%s' % (node.id)
resp = self.connection.request(uri, method='DELETE')
# The OpenStack and Rackspace documentation both say this API will
# return a 204, but in-fact, everyone everywhere agrees it actually
# returns a 202, so we are going to accept either, and someday,
# someone will fix either the implementation or the documentation to
# agree.
return resp.status in (httplib.NO_CONTENT, httplib.ACCEPTED)
def reboot_node(self, node):
return self._reboot_node(node, reboot_type='HARD')
def list_nodes(self, ex_all_tenants=False):
"""
List the nodes in a tenant
:param ex_all_tenants: List nodes for all the tenants. Note: Your user
must have admin privileges for this
functionality to work.
:type ex_all_tenants: ``bool``
"""
params = {}
if ex_all_tenants:
params = {'all_tenants': 1}
return self._to_nodes(
self.connection.request('/servers/detail', params=params).object)
def create_volume(self, size, name, location=None, snapshot=None,
ex_volume_type=None):
"""
Create a new volume.
:param size: Size of volume in gigabytes (required)
:type size: ``int``
:param name: Name of the volume to be created
:type name: ``str``
:param location: Which data center to create a volume in. If
empty, undefined behavior will be selected.
(optional)
:type location: :class:`.NodeLocation`
:param snapshot: Snapshot from which to create the new
volume. (optional)
:type snapshot: :class:`.VolumeSnapshot`
:param ex_volume_type: What kind of volume to create.
(optional)
:type ex_volume_type: ``str``
:return: The newly created volume.
:rtype: :class:`StorageVolume`
"""
volume = {
'display_name': name,
'display_description': name,
'size': size,
'metadata': {
'contents': name,
},
}
if ex_volume_type:
volume['volume_type'] = ex_volume_type
if location:
volume['availability_zone'] = location
if snapshot:
volume['snapshot_id'] = snapshot.id
resp = self.connection.request('/os-volumes',
method='POST',
data={'volume': volume})
return self._to_volume(resp.object)
def destroy_volume(self, volume):
return self.connection.request('/os-volumes/%s' % volume.id,
method='DELETE').success()
def attach_volume(self, node, volume, device="auto"):
# when "auto" or None is provided for device, openstack will let
# the guest OS pick the next available device (fi. /dev/vdb)
return self.connection.request(
'/servers/%s/os-volume_attachments' % node.id,
method='POST',
data={
'volumeAttachment': {
'volumeId': volume.id,
'device': device,
}
}).success()
def detach_volume(self, volume, ex_node=None):
# when ex_node is not provided, volume is detached from all nodes
failed_nodes = []
for attachment in volume.extra['attachments']:
if not ex_node or ex_node.id == attachment['serverId']:
response = self.connection.request(
'/servers/%s/os-volume_attachments/%s' %
(attachment['serverId'], attachment['id']),
method='DELETE')
if not response.success():
failed_nodes.append(attachment['serverId'])
if failed_nodes:
raise OpenStackException(
'detach_volume failed for nodes with id: %s' %
', '.join(failed_nodes), 500, self
)
return True
def list_volumes(self):
return self._to_volumes(
self.connection.request('/os-volumes').object)
def ex_get_volume(self, volumeId):
return self._to_volume(
self.connection.request('/os-volumes/%s' % volumeId).object)
def list_images(self, location=None, ex_only_active=True):
"""
Lists all active images
@inherits: :class:`NodeDriver.list_images`
:param ex_only_active: True if list only active
:type ex_only_active: ``bool``
"""
return self._to_images(
self.connection.request('/images/detail').object, ex_only_active)
def get_image(self, image_id):
"""
Get an image based on an image_id
@inherits: :class:`NodeDriver.get_image`
:param image_id: Image identifier
:type image_id: ``str``
:return: A NodeImage object
:rtype: :class:`NodeImage`
"""
return self._to_image(self.connection.request(
'/images/%s' % (image_id,)).object['image'])
def list_sizes(self, location=None):
return self._to_sizes(
self.connection.request('/flavors/detail').object)
def list_locations(self):
return [NodeLocation(0, '', '', self)]
def _ex_connection_class_kwargs(self):
return self.openstack_connection_kwargs()
def ex_get_node_details(self, node_id):
"""
Lists details of the specified server.
:param node_id: ID of the node which should be used
:type node_id: ``str``
:rtype: :class:`Node`
"""
# @TODO: Remove this if in 0.6
if isinstance(node_id, Node):
node_id = node_id.id
uri = '/servers/%s' % (node_id)
try:
resp = self.connection.request(uri, method='GET')
except BaseHTTPError as e:
if e.code == httplib.NOT_FOUND:
return None
raise
return self._to_node_from_obj(resp.object)
def ex_soft_reboot_node(self, node):
"""
Soft reboots the specified server
:param node: node
:type node: :class:`Node`
:rtype: ``bool``
"""
return self._reboot_node(node, reboot_type='SOFT')
def ex_hard_reboot_node(self, node):
"""
Hard reboots the specified server
:param node: node
:type node: :class:`Node`
:rtype: ``bool``
"""
return self._reboot_node(node, reboot_type='HARD')
class OpenStackNodeSize(NodeSize):
"""
NodeSize class for the OpenStack.org driver.
Following the example of OpenNebula.org driver
and following guidelines:
https://issues.apache.org/jira/browse/LIBCLOUD-119
"""
def __init__(self, id, name, ram, disk, bandwidth, price, driver,
vcpus=None, ephemeral_disk=None, swap=None, extra=None):
super(OpenStackNodeSize, self).__init__(id=id, name=name, ram=ram,
disk=disk,
bandwidth=bandwidth,
price=price, driver=driver)
self.vcpus = vcpus
self.ephemeral_disk = ephemeral_disk
self.swap = swap
self.extra = extra
def __repr__(self):
return (('<OpenStackNodeSize: id=%s, name=%s, ram=%s, disk=%s, '
'bandwidth=%s, price=%s, driver=%s, vcpus=%s, ...>')
% (self.id, self.name, self.ram, self.disk, self.bandwidth,
self.price, self.driver.name, self.vcpus))
class OpenStack_1_0_Response(OpenStackResponse):
def __init__(self, *args, **kwargs):
# done because of a circular reference from
# NodeDriver -> Connection -> Response
self.node_driver = OpenStack_1_0_NodeDriver
super(OpenStack_1_0_Response, self).__init__(*args, **kwargs)
class OpenStack_1_0_Connection(OpenStackComputeConnection):
responseCls = OpenStack_1_0_Response
default_content_type = 'application/xml; charset=UTF-8'
accept_format = 'application/xml'
XML_NAMESPACE = 'http://docs.rackspacecloud.com/servers/api/v1.0'
class OpenStack_1_0_NodeDriver(OpenStackNodeDriver):
"""
OpenStack node driver.
Extra node attributes:
- password: root password, available after create.
- hostId: represents the host your cloud server runs on
- imageId: id of image
- flavorId: id of flavor
"""
connectionCls = OpenStack_1_0_Connection
type = Provider.OPENSTACK
features = {'create_node': ['generates_password']}
def __init__(self, *args, **kwargs):
self._ex_force_api_version = str(kwargs.pop('ex_force_api_version',
None))
self.XML_NAMESPACE = self.connectionCls.XML_NAMESPACE
super(OpenStack_1_0_NodeDriver, self).__init__(*args, **kwargs)
def _to_images(self, object, ex_only_active):
images = []
for image in findall(object, 'image', self.XML_NAMESPACE):
if ex_only_active and image.get('status') != 'ACTIVE':
continue
images.append(self._to_image(image))
return images
def _to_image(self, element):
return NodeImage(id=element.get('id'),
name=element.get('name'),
driver=self.connection.driver,
extra={'updated': element.get('updated'),
'created': element.get('created'),
'status': element.get('status'),
'serverId': element.get('serverId'),
'progress': element.get('progress'),
'minDisk': element.get('minDisk'),
'minRam': element.get('minRam')
}
)
def _change_password_or_name(self, node, name=None, password=None):
uri = '/servers/%s' % (node.id)
if not name:
name = node.name
body = {'xmlns': self.XML_NAMESPACE,
'name': name}
if password is not None:
body['adminPass'] = password
server_elm = ET.Element('server', body)
resp = self.connection.request(
uri, method='PUT', data=ET.tostring(server_elm))
if resp.status == httplib.NO_CONTENT and password is not None:
node.extra['password'] = password
return resp.status == httplib.NO_CONTENT
def create_node(self, **kwargs):
"""
Create a new node
@inherits: :class:`NodeDriver.create_node`
:keyword ex_metadata: Key/Value metadata to associate with a node
:type ex_metadata: ``dict``
:keyword ex_files: File Path => File contents to create on
the node
:type ex_files: ``dict``
:keyword ex_shared_ip_group_id: The server is launched into
that shared IP group
:type ex_shared_ip_group_id: ``str``
"""
name = kwargs['name']
image = kwargs['image']
size = kwargs['size']
attributes = {'xmlns': self.XML_NAMESPACE,
'name': name,
'imageId': str(image.id),
'flavorId': str(size.id)}
if 'ex_shared_ip_group' in kwargs:
# Deprecate this. Be explicit and call the variable
# ex_shared_ip_group_id since user needs to pass in the id, not the
# name.
warnings.warn('ex_shared_ip_group argument is deprecated.'
' Please use ex_shared_ip_group_id')
if 'ex_shared_ip_group_id' in kwargs:
shared_ip_group_id = kwargs['ex_shared_ip_group_id']
attributes['sharedIpGroupId'] = shared_ip_group_id
server_elm = ET.Element('server', attributes)
metadata_elm = self._metadata_to_xml(kwargs.get("ex_metadata", {}))
if metadata_elm:
server_elm.append(metadata_elm)
files_elm = self._files_to_xml(kwargs.get("ex_files", {}))
if files_elm:
server_elm.append(files_elm)
resp = self.connection.request("/servers",
method='POST',
data=ET.tostring(server_elm))
return self._to_node(resp.object)
def ex_set_password(self, node, password):
"""
Sets the Node's root password.
This will reboot the instance to complete the operation.
:class:`Node.extra['password']` will be set to the new value if the
operation was successful.
:param node: node to set password
:type node: :class:`Node`
:param password: new password.
:type password: ``str``
:rtype: ``bool``
"""
return self._change_password_or_name(node, password=password)
def ex_set_server_name(self, node, name):
"""
Sets the Node's name.
This will reboot the instance to complete the operation.
:param node: node to set name
:type node: :class:`Node`
:param name: new name
:type name: ``str``
:rtype: ``bool``
"""
return self._change_password_or_name(node, name=name)
def ex_resize(self, node, size):
"""
Change an existing server flavor / scale the server up or down.
:param node: node to resize.
:type node: :class:`Node`
:param size: new size.
:type size: :class:`NodeSize`
:rtype: ``bool``
"""
elm = ET.Element(
'resize',
{'xmlns': self.XML_NAMESPACE,
'flavorId': str(size.id)}
)
resp = self.connection.request("/servers/%s/action" % (node.id),
method='POST',
data=ET.tostring(elm))
return resp.status == httplib.ACCEPTED
def ex_confirm_resize(self, node):
"""
Confirm a resize request which is currently in progress. If a resize
request is not explicitly confirmed or reverted it's automatically
confirmed after 24 hours.
For more info refer to the API documentation: http://goo.gl/zjFI1
:param node: node for which the resize request will be confirmed.
:type node: :class:`Node`
:rtype: ``bool``
"""
elm = ET.Element(
'confirmResize',
{'xmlns': self.XML_NAMESPACE},
)
resp = self.connection.request("/servers/%s/action" % (node.id),
method='POST',
data=ET.tostring(elm))
return resp.status == httplib.NO_CONTENT
def ex_revert_resize(self, node):
"""
Revert a resize request which is currently in progress.
All resizes are automatically confirmed after 24 hours if they have
not already been confirmed explicitly or reverted.
For more info refer to the API documentation: http://goo.gl/AizBu
:param node: node for which the resize request will be reverted.
:type node: :class:`Node`
:rtype: ``bool``
"""
elm = ET.Element(
'revertResize',
{'xmlns': self.XML_NAMESPACE}
)
resp = self.connection.request("/servers/%s/action" % (node.id),
method='POST',
data=ET.tostring(elm))
return resp.status == httplib.NO_CONTENT
def ex_rebuild(self, node_id, image_id):
"""
Rebuilds the specified server.
:param node_id: ID of the node which should be used
:type node_id: ``str``
:param image_id: ID of the image which should be used
:type image_id: ``str``
:rtype: ``bool``
"""
# @TODO: Remove those ifs in 0.6
if isinstance(node_id, Node):
node_id = node_id.id
if isinstance(image_id, NodeImage):
image_id = image_id.id
elm = ET.Element(
'rebuild',
{'xmlns': self.XML_NAMESPACE,
'imageId': image_id}
)
resp = self.connection.request("/servers/%s/action" % node_id,
method='POST',
data=ET.tostring(elm))
return resp.status == httplib.ACCEPTED
def ex_create_ip_group(self, group_name, node_id=None):
"""
Creates a shared IP group.
:param group_name: group name which should be used
:type group_name: ``str``
:param node_id: ID of the node which should be used
:type node_id: ``str``
:rtype: ``bool``
"""
# @TODO: Remove this if in 0.6
if isinstance(node_id, Node):
node_id = node_id.id
group_elm = ET.Element(
'sharedIpGroup',
{'xmlns': self.XML_NAMESPACE,
'name': group_name}
)
if node_id:
ET.SubElement(
group_elm,
'server',
{'id': node_id}
)
resp = self.connection.request('/shared_ip_groups',
method='POST',
data=ET.tostring(group_elm))
return self._to_shared_ip_group(resp.object)
def ex_list_ip_groups(self, details=False):
"""
Lists IDs and names for shared IP groups.
If details lists all details for shared IP groups.
:param details: True if details is required
:type details: ``bool``
:rtype: ``list`` of :class:`OpenStack_1_0_SharedIpGroup`
"""
uri = '/shared_ip_groups/detail' if details else '/shared_ip_groups'
resp = self.connection.request(uri,
method='GET')
groups = findall(resp.object, 'sharedIpGroup',
self.XML_NAMESPACE)
return [self._to_shared_ip_group(el) for el in groups]
def ex_delete_ip_group(self, group_id):
"""
Deletes the specified shared IP group.
:param group_id: group id which should be used
:type group_id: ``str``
:rtype: ``bool``
"""
uri = '/shared_ip_groups/%s' % group_id
resp = self.connection.request(uri, method='DELETE')
return resp.status == httplib.NO_CONTENT
def ex_share_ip(self, group_id, node_id, ip, configure_node=True):
"""
Shares an IP address to the specified server.
:param group_id: group id which should be used
:type group_id: ``str``
:param node_id: ID of the node which should be used
:type node_id: ``str``
:param ip: ip which should be used
:type ip: ``str``
:param configure_node: configure node
:type configure_node: ``bool``
:rtype: ``bool``
"""
# @TODO: Remove this if in 0.6
if isinstance(node_id, Node):
node_id = node_id.id
if configure_node:
str_configure = 'true'
else:
str_configure = 'false'
elm = ET.Element(
'shareIp',
{'xmlns': self.XML_NAMESPACE,
'sharedIpGroupId': group_id,
'configureServer': str_configure},
)
uri = '/servers/%s/ips/public/%s' % (node_id, ip)
resp = self.connection.request(uri,
method='PUT',
data=ET.tostring(elm))
return resp.status == httplib.ACCEPTED
def ex_unshare_ip(self, node_id, ip):
"""
Removes a shared IP address from the specified server.
:param node_id: ID of the node which should be used
:type node_id: ``str``
:param ip: ip which should be used
:type ip: ``str``
:rtype: ``bool``
"""
# @TODO: Remove this if in 0.6
if isinstance(node_id, Node):
node_id = node_id.id
uri = '/servers/%s/ips/public/%s' % (node_id, ip)
resp = self.connection.request(uri,
method='DELETE')
return resp.status == httplib.ACCEPTED
def ex_list_ip_addresses(self, node_id):
"""
List all server addresses.
:param node_id: ID of the node which should be used
:type node_id: ``str``
:rtype: :class:`OpenStack_1_0_NodeIpAddresses`
"""
# @TODO: Remove this if in 0.6
if isinstance(node_id, Node):
node_id = node_id.id
uri = '/servers/%s/ips' % node_id
resp = self.connection.request(uri,
method='GET')
return self._to_ip_addresses(resp.object)
def _metadata_to_xml(self, metadata):
if len(metadata) == 0:
return None
metadata_elm = ET.Element('metadata')
for k, v in list(metadata.items()):
meta_elm = ET.SubElement(metadata_elm, 'meta', {'key': str(k)})
meta_elm.text = str(v)
return metadata_elm
def _files_to_xml(self, files):
if len(files) == 0:
return None
personality_elm = ET.Element('personality')
for k, v in list(files.items()):
file_elm = ET.SubElement(personality_elm,
'file',
{'path': str(k)})
file_elm.text = base64.b64encode(b(v))
return personality_elm
def _reboot_node(self, node, reboot_type='SOFT'):
resp = self._node_action(node, ['reboot', ('type', reboot_type)])
return resp.status == httplib.ACCEPTED
def _node_action(self, node, body):
if isinstance(body, list):
attr = ' '.join(['%s="%s"' % (item[0], item[1])
for item in body[1:]])
body = '<%s xmlns="%s" %s/>' % (body[0], self.XML_NAMESPACE, attr)
uri = '/servers/%s/action' % (node.id)
resp = self.connection.request(uri, method='POST', data=body)
return resp
def _to_nodes(self, object):
node_elements = findall(object, 'server', self.XML_NAMESPACE)
return [self._to_node(el) for el in node_elements]
def _to_node_from_obj(self, obj):
return self._to_node(findall(obj, 'server', self.XML_NAMESPACE)[0])
def _to_node(self, el):
def get_ips(el):
return [ip.get('addr') for ip in el]
def get_meta_dict(el):
d = {}
for meta in el:
d[meta.get('key')] = meta.text
return d
public_ip = get_ips(findall(el, 'addresses/public/ip',
self.XML_NAMESPACE))
private_ip = get_ips(findall(el, 'addresses/private/ip',
self.XML_NAMESPACE))
metadata = get_meta_dict(findall(el, 'metadata/meta',
self.XML_NAMESPACE))
n = Node(id=el.get('id'),
name=el.get('name'),
state=self.NODE_STATE_MAP.get(
el.get('status'), NodeState.UNKNOWN),
public_ips=public_ip,
private_ips=private_ip,
driver=self.connection.driver,
extra={
'password': el.get('adminPass'),
'hostId': el.get('hostId'),
'imageId': el.get('imageId'),
'flavorId': el.get('flavorId'),
'uri': "https://%s%s/servers/%s" % (
self.connection.host,
self.connection.request_path, el.get('id')),
'service_name': self.connection.get_service_name(),
'metadata': metadata})
return n
def _to_sizes(self, object):
elements = findall(object, 'flavor', self.XML_NAMESPACE)
return [self._to_size(el) for el in elements]
def _to_size(self, el):
vcpus = int(el.get('vcpus')) if el.get('vcpus', None) else None
return OpenStackNodeSize(id=el.get('id'),
name=el.get('name'),
ram=int(el.get('ram')),
disk=int(el.get('disk')),
# XXX: needs hardcode
vcpus=vcpus,
bandwidth=None,
# Hardcoded
price=self._get_size_price(el.get('id')),
driver=self.connection.driver)
def ex_limits(self):
"""
Extra call to get account's limits, such as
rates (for example amount of POST requests per day)
and absolute limits like total amount of available
RAM to be used by servers.
:return: dict with keys 'rate' and 'absolute'
:rtype: ``dict``
"""
def _to_rate(el):
rate = {}
for item in list(el.items()):
rate[item[0]] = item[1]
return rate
def _to_absolute(el):
return {el.get('name'): el.get('value')}
limits = self.connection.request("/limits").object
rate = [_to_rate(el) for el in findall(limits, 'rate/limit',
self.XML_NAMESPACE)]
absolute = {}
for item in findall(limits, 'absolute/limit',
self.XML_NAMESPACE):
absolute.update(_to_absolute(item))
return {"rate": rate, "absolute": absolute}
def create_image(self, node, name, description=None, reboot=True):
"""Create an image for node.
@inherits: :class:`NodeDriver.create_image`
:param node: node to use as a base for image
:type node: :class:`Node`
:param name: name for new image
:type name: ``str``
:rtype: :class:`NodeImage`
"""
image_elm = ET.Element(
'image',
{'xmlns': self.XML_NAMESPACE,
'name': name,
'serverId': node.id}
)
return self._to_image(
self.connection.request("/images", method="POST",
data=ET.tostring(image_elm)).object)
def delete_image(self, image):
"""Delete an image for node.
@inherits: :class:`NodeDriver.delete_image`
:param image: the image to be deleted
:type image: :class:`NodeImage`
:rtype: ``bool``
"""
uri = '/images/%s' % image.id
resp = self.connection.request(uri, method='DELETE')
return resp.status == httplib.NO_CONTENT
def _to_shared_ip_group(self, el):
servers_el = findall(el, 'servers', self.XML_NAMESPACE)
if servers_el:
servers = [s.get('id')
for s in findall(servers_el[0], 'server',
self.XML_NAMESPACE)]
else:
servers = None
return OpenStack_1_0_SharedIpGroup(id=el.get('id'),
name=el.get('name'),
servers=servers)
def _to_ip_addresses(self, el):
public_ips = [ip.get('addr') for ip in findall(
findall(el, 'public', self.XML_NAMESPACE)[0],
'ip', self.XML_NAMESPACE)]
private_ips = [ip.get('addr') for ip in findall(
findall(el, 'private', self.XML_NAMESPACE)[0],
'ip', self.XML_NAMESPACE)]
return OpenStack_1_0_NodeIpAddresses(public_ips, private_ips)
def _get_size_price(self, size_id):
try:
return get_size_price(driver_type='compute',
driver_name=self.api_name,
size_id=size_id)
except KeyError:
return 0.0
class OpenStack_1_0_SharedIpGroup(object):
"""
Shared IP group info.
"""
def __init__(self, id, name, servers=None):
self.id = str(id)
self.name = name
self.servers = servers
class OpenStack_1_0_NodeIpAddresses(object):
"""
List of public and private IP addresses of a Node.
"""
def __init__(self, public_addresses, private_addresses):
self.public_addresses = public_addresses
self.private_addresses = private_addresses
class OpenStack_1_1_Response(OpenStackResponse):
def __init__(self, *args, **kwargs):
# done because of a circular reference from
# NodeDriver -> Connection -> Response
self.node_driver = OpenStack_1_1_NodeDriver
super(OpenStack_1_1_Response, self).__init__(*args, **kwargs)
class OpenStackNetwork(object):
"""
A Virtual Network.
"""
def __init__(self, id, name, cidr, driver, extra=None):
self.id = str(id)
self.name = name
self.cidr = cidr
self.driver = driver
self.extra = extra or {}
def __repr__(self):
return '<OpenStackNetwork id="%s" name="%s" cidr="%s">' % (self.id,
self.name,
self.cidr,)
class OpenStackSecurityGroup(object):
"""
A Security Group.
"""
def __init__(self, id, tenant_id, name, description, driver, rules=None,
extra=None):
"""
Constructor.
:keyword id: Group id.
:type id: ``str``
:keyword tenant_id: Owner of the security group.
:type tenant_id: ``str``
:keyword name: Human-readable name for the security group. Might
not be unique.
:type name: ``str``
:keyword description: Human-readable description of a security
group.
:type description: ``str``
:keyword rules: Rules associated with this group.
:type rules: ``list`` of
:class:`OpenStackSecurityGroupRule`
:keyword extra: Extra attributes associated with this group.
:type extra: ``dict``
"""
self.id = id
self.tenant_id = tenant_id
self.name = name
self.description = description
self.driver = driver
self.rules = rules or []
self.extra = extra or {}
def __repr__(self):
return ('<OpenStackSecurityGroup id=%s tenant_id=%s name=%s \
description=%s>' % (self.id, self.tenant_id, self.name,
self.description))
class OpenStackSecurityGroupRule(object):
"""
A Rule of a Security Group.
"""
def __init__(self, id, parent_group_id, ip_protocol, from_port, to_port,
driver, ip_range=None, group=None, tenant_id=None,
extra=None):
"""
Constructor.
:keyword id: Rule id.
:type id: ``str``
:keyword parent_group_id: ID of the parent security group.
:type parent_group_id: ``str``
:keyword ip_protocol: IP Protocol (icmp, tcp, udp, etc).
:type ip_protocol: ``str``
:keyword from_port: Port at start of range.
:type from_port: ``int``
:keyword to_port: Port at end of range.
:type to_port: ``int``
:keyword ip_range: CIDR for address range.
:type ip_range: ``str``
:keyword group: Name of a source security group to apply to rule.
:type group: ``str``
:keyword tenant_id: Owner of the security group.
:type tenant_id: ``str``
:keyword extra: Extra attributes associated with this rule.
:type extra: ``dict``
"""
self.id = id
self.parent_group_id = parent_group_id
self.ip_protocol = ip_protocol
self.from_port = from_port
self.to_port = to_port
self.driver = driver
self.ip_range = ''
self.group = {}
if group is None:
self.ip_range = ip_range
else:
self.group = {'name': group, 'tenant_id': tenant_id}
self.tenant_id = tenant_id
self.extra = extra or {}
def __repr__(self):
return ('<OpenStackSecurityGroupRule id=%s parent_group_id=%s \
ip_protocol=%s from_port=%s to_port=%s>' % (self.id,
self.parent_group_id, self.ip_protocol, self.from_port,
self.to_port))
class OpenStackKeyPair(object):
"""
A KeyPair.
"""
def __init__(self, name, fingerprint, public_key, driver, private_key=None,
extra=None):
"""
Constructor.
:keyword name: Name of the KeyPair.
:type name: ``str``
:keyword fingerprint: Fingerprint of the KeyPair
:type fingerprint: ``str``
:keyword public_key: Public key in OpenSSH format.
:type public_key: ``str``
:keyword private_key: Private key in PEM format.
:type private_key: ``str``
:keyword extra: Extra attributes associated with this KeyPair.
:type extra: ``dict``
"""
self.name = name
self.fingerprint = fingerprint
self.public_key = public_key
self.private_key = private_key
self.driver = driver
self.extra = extra or {}
def __repr__(self):
return ('<OpenStackKeyPair name=%s fingerprint=%s public_key=%s ...>'
% (self.name, self.fingerprint, self.public_key))
class OpenStack_1_1_Connection(OpenStackComputeConnection):
responseCls = OpenStack_1_1_Response
accept_format = 'application/json'
default_content_type = 'application/json; charset=UTF-8'
def encode_data(self, data):
return json.dumps(data)
class OpenStack_1_1_NodeDriver(OpenStackNodeDriver):
"""
OpenStack node driver.
"""
connectionCls = OpenStack_1_1_Connection
type = Provider.OPENSTACK
features = {"create_node": ["generates_password"]}
_networks_url_prefix = '/os-networks'
def __init__(self, *args, **kwargs):
self._ex_force_api_version = str(kwargs.pop('ex_force_api_version',
None))
super(OpenStack_1_1_NodeDriver, self).__init__(*args, **kwargs)
def create_node(self, **kwargs):
"""Create a new node
@inherits: :class:`NodeDriver.create_node`
:keyword ex_keyname: The name of the key pair
:type ex_keyname: ``str``
:keyword ex_userdata: String containing user data
see
https://help.ubuntu.com/community/CloudInit
:type ex_userdata: ``str``
:keyword ex_config_drive: Enable config drive
see
http://docs.openstack.org/grizzly/openstack-compute/admin/content/config-drive.html
:type ex_config_drive: ``bool``
:keyword ex_security_groups: List of security groups to assign to
the node
:type ex_security_groups: ``list`` of
:class:`OpenStackSecurityGroup`
:keyword ex_metadata: Key/Value metadata to associate with a node
:type ex_metadata: ``dict``
:keyword ex_files: File Path => File contents to create on
the no de
:type ex_files: ``dict``
:keyword networks: The server is launched into a set of Networks.
:type networks: ``list`` of :class:`OpenStackNetwork`
:keyword ex_disk_config: Name of the disk configuration.
Can be either ``AUTO`` or ``MANUAL``.
:type ex_disk_config: ``str``
:keyword ex_config_drive: If True enables metadata injection in a
server through a configuration drive.
:type ex_config_drive: ``bool``
:keyword ex_admin_pass: The root password for the node
:type ex_admin_pass: ``str``
:keyword ex_availability_zone: Nova availability zone for the node
:type ex_availability_zone: ``str``
"""
server_params = self._create_args_to_params(None, **kwargs)
resp = self.connection.request("/servers",
method='POST',
data={'server': server_params})
create_response = resp.object['server']
server_resp = self.connection.request(
'/servers/%s' % create_response['id'])
server_object = server_resp.object['server']
# adminPass is not always present
# http://docs.openstack.org/essex/openstack-compute/admin/
# content/configuring-compute-API.html#d6e1833
server_object['adminPass'] = create_response.get('adminPass', None)
return self._to_node(server_object)
def _to_images(self, obj, ex_only_active):
images = []
for image in obj['images']:
if ex_only_active and image.get('status') != 'ACTIVE':
continue
images.append(self._to_image(image))
return images
def _to_image(self, api_image):
server = api_image.get('server', {})
return NodeImage(
id=api_image['id'],
name=api_image['name'],
driver=self,
extra=dict(
updated=api_image['updated'],
created=api_image['created'],
status=api_image['status'],
progress=api_image.get('progress'),
metadata=api_image.get('metadata'),
serverId=server.get('id'),
minDisk=api_image.get('minDisk'),
minRam=api_image.get('minRam'),
)
)
def _to_nodes(self, obj):
servers = obj['servers']
return [self._to_node(server) for server in servers]
def _to_volumes(self, obj):
volumes = obj['volumes']
return [self._to_volume(volume) for volume in volumes]
def _to_snapshots(self, obj):
snapshots = obj['snapshots']
return [self._to_snapshot(snapshot) for snapshot in snapshots]
def _to_sizes(self, obj):
flavors = obj['flavors']
return [self._to_size(flavor) for flavor in flavors]
def _create_args_to_params(self, node, **kwargs):
server_params = {
'name': kwargs.get('name'),
'metadata': kwargs.get('ex_metadata', {}),
'personality': self._files_to_personality(kwargs.get("ex_files",
{}))
}
if 'ex_availability_zone' in kwargs:
server_params['availability_zone'] = kwargs['ex_availability_zone']
if 'ex_keyname' in kwargs:
server_params['key_name'] = kwargs['ex_keyname']
if 'ex_userdata' in kwargs:
server_params['user_data'] = base64.b64encode(
b(kwargs['ex_userdata'])).decode('ascii')
if 'ex_config_drive' in kwargs:
server_params['config_drive'] = kwargs['ex_config_drive']
if 'ex_disk_config' in kwargs:
server_params['OS-DCF:diskConfig'] = kwargs['ex_disk_config']
if 'ex_config_drive' in kwargs:
server_params['config_drive'] = str(kwargs['ex_config_drive'])
if 'ex_admin_pass' in kwargs:
server_params['adminPass'] = kwargs['ex_admin_pass']
if 'networks' in kwargs:
networks = kwargs['networks']
networks = [{'uuid': network.id} for network in networks]
server_params['networks'] = networks
if 'ex_security_groups' in kwargs:
server_params['security_groups'] = []
for security_group in kwargs['ex_security_groups']:
name = security_group.name
server_params['security_groups'].append({'name': name})
if 'ex_blockdevicemappings' in kwargs:
server_params['block_device_mapping_v2'] = \
kwargs['ex_blockdevicemappings']
if 'name' in kwargs:
server_params['name'] = kwargs.get('name')
else:
server_params['name'] = node.name
if 'image' in kwargs:
server_params['imageRef'] = kwargs.get('image').id
else:
server_params['imageRef'] = node.extra.get('imageId')
if 'size' in kwargs:
server_params['flavorRef'] = kwargs.get('size').id
else:
server_params['flavorRef'] = node.extra.get('flavorId')
return server_params
def _files_to_personality(self, files):
rv = []
for k, v in list(files.items()):
rv.append({'path': k, 'contents': base64.b64encode(b(v))})
return rv
def _reboot_node(self, node, reboot_type='SOFT'):
resp = self._node_action(node, 'reboot', type=reboot_type)
return resp.status == httplib.ACCEPTED
def ex_set_password(self, node, password):
"""
Changes the administrator password for a specified server.
:param node: Node to rebuild.
:type node: :class:`Node`
:param password: The administrator password.
:type password: ``str``
:rtype: ``bool``
"""
resp = self._node_action(node, 'changePassword', adminPass=password)
node.extra['password'] = password
return resp.status == httplib.ACCEPTED
def ex_rebuild(self, node, image, **kwargs):
"""
Rebuild a Node.
:param node: Node to rebuild.
:type node: :class:`Node`
:param image: New image to use.
:type image: :class:`NodeImage`
:keyword ex_metadata: Key/Value metadata to associate with a node
:type ex_metadata: ``dict``
:keyword ex_files: File Path => File contents to create on
the no de
:type ex_files: ``dict``
:keyword ex_keyname: Name of existing public key to inject into
instance
:type ex_keyname: ``str``
:keyword ex_userdata: String containing user data
see
https://help.ubuntu.com/community/CloudInit
:type ex_userdata: ``str``
:keyword ex_security_groups: List of security groups to assign to
the node
:type ex_security_groups: ``list`` of
:class:`OpenStackSecurityGroup`
:keyword ex_disk_config: Name of the disk configuration.
Can be either ``AUTO`` or ``MANUAL``.
:type ex_disk_config: ``str``
:keyword ex_config_drive: If True enables metadata injection in a
server through a configuration drive.
:type ex_config_drive: ``bool``
:rtype: ``bool``
"""
server_params = self._create_args_to_params(node, image=image,
**kwargs)
resp = self._node_action(node, 'rebuild', **server_params)
return resp.status == httplib.ACCEPTED
def ex_resize(self, node, size):
"""
Change a node size.
:param node: Node to resize.
:type node: :class:`Node`
:type size: :class:`NodeSize`
:param size: New size to use.
:rtype: ``bool``
"""
server_params = self._create_args_to_params(node, size=size)
resp = self._node_action(node, 'resize', **server_params)
return resp.status == httplib.ACCEPTED
def ex_confirm_resize(self, node):
"""
Confirms a pending resize action.
:param node: Node to resize.
:type node: :class:`Node`
:rtype: ``bool``
"""
resp = self._node_action(node, 'confirmResize')
return resp.status == httplib.NO_CONTENT
def ex_revert_resize(self, node):
"""
Cancels and reverts a pending resize action.
:param node: Node to resize.
:type node: :class:`Node`
:rtype: ``bool``
"""
resp = self._node_action(node, 'revertResize')
return resp.status == httplib.ACCEPTED
def create_image(self, node, name, metadata=None):
"""
Creates a new image.
:param node: Node
:type node: :class:`Node`
:param name: The name for the new image.
:type name: ``str``
:param metadata: Key and value pairs for metadata.
:type metadata: ``dict``
:rtype: :class:`NodeImage`
"""
optional_params = {}
if metadata:
optional_params['metadata'] = metadata
resp = self._node_action(node, 'createImage', name=name,
**optional_params)
image_id = self._extract_image_id_from_url(resp.headers['location'])
return self.get_image(image_id=image_id)
def ex_set_server_name(self, node, name):
"""
Sets the Node's name.
:param node: Node
:type node: :class:`Node`
:param name: The name of the server.
:type name: ``str``
:rtype: :class:`Node`
"""
return self._update_node(node, name=name)
def ex_get_metadata(self, node):
"""
Get a Node's metadata.
:param node: Node
:type node: :class:`Node`
:return: Key/Value metadata associated with node.
:rtype: ``dict``
"""
return self.connection.request(
'/servers/%s/metadata' % (node.id,),
method='GET',).object['metadata']
def ex_set_metadata(self, node, metadata):
"""
Sets the Node's metadata.
:param node: Node
:type node: :class:`Node`
:param metadata: Key/Value metadata to associate with a node
:type metadata: ``dict``
:rtype: ``dict``
"""
return self.connection.request(
'/servers/%s/metadata' % (node.id,), method='PUT',
data={'metadata': metadata}
).object['metadata']
def ex_update_node(self, node, **node_updates):
"""
Update the Node's editable attributes. The OpenStack API currently
supports editing name and IPv4/IPv6 access addresses.
The driver currently only supports updating the node name.
:param node: Node
:type node: :class:`Node`
:keyword name: New name for the server
:type name: ``str``
:rtype: :class:`Node`
"""
potential_data = self._create_args_to_params(node, **node_updates)
updates = {'name': potential_data['name']}
return self._update_node(node, **updates)
def _to_networks(self, obj):
networks = obj['networks']
return [self._to_network(network) for network in networks]
def _to_network(self, obj):
return OpenStackNetwork(id=obj['id'],
name=obj['label'],
cidr=obj.get('cidr', None),
driver=self)
def ex_list_networks(self):
"""
Get a list of Networks that are available.
:rtype: ``list`` of :class:`OpenStackNetwork`
"""
response = self.connection.request(self._networks_url_prefix).object
return self._to_networks(response)
def ex_create_network(self, name, cidr):
"""
Create a new Network
:param name: Name of network which should be used
:type name: ``str``
:param cidr: cidr of network which should be used
:type cidr: ``str``
:rtype: :class:`OpenStackNetwork`
"""
data = {'network': {'cidr': cidr, 'label': name}}
response = self.connection.request(self._networks_url_prefix,
method='POST', data=data).object
return self._to_network(response['network'])
def ex_delete_network(self, network):
"""
Get a list of NodeNetorks that are available.
:param network: Network which should be used
:type network: :class:`OpenStackNetwork`
:rtype: ``bool``
"""
resp = self.connection.request('%s/%s' % (self._networks_url_prefix,
network.id),
method='DELETE')
return resp.status == httplib.ACCEPTED
def ex_get_console_output(self, node, length=None):
"""
Get console output
:param node: node
:type node: :class:`Node`
:param length: Optional number of lines to fetch from the
console log
:type length: ``int``
:return: Dictionary with the output
:rtype: ``dict``
"""
data = {
"os-getConsoleOutput": {
"length": length
}
}
resp = self.connection.request('/servers/%s/action' % node.id,
method='POST', data=data).object
return resp
def ex_list_snapshots(self):
return self._to_snapshots(
self.connection.request('/os-snapshots').object)
def list_volume_snapshots(self, volume):
return [snapshot for snapshot in self.ex_list_snapshots()
if snapshot.extra['volume_id'] == volume.id]
def create_volume_snapshot(self, volume, name=None, ex_description=None,
ex_force=True):
"""
Create snapshot from volume
:param volume: Instance of `StorageVolume`
:type volume: `StorageVolume`
:param name: Name of snapshot (optional)
:type name: `str` | `NoneType`
:param ex_description: Description of the snapshot (optional)
:type ex_description: `str` | `NoneType`
:param ex_force: Specifies if we create a snapshot that is not in
state `available`. For example `in-use`. Defaults
to True. (optional)
:type ex_force: `bool`
:rtype: :class:`VolumeSnapshot`
"""
data = {'snapshot': {'volume_id': volume.id, 'force': ex_force}}
if name is not None:
data['snapshot']['display_name'] = name
if ex_description is not None:
data['snapshot']['display_description'] = ex_description
return self._to_snapshot(self.connection.request('/os-snapshots',
method='POST',
data=data).object)
def destroy_volume_snapshot(self, snapshot):
resp = self.connection.request('/os-snapshots/%s' % snapshot.id,
method='DELETE')
return resp.status == httplib.NO_CONTENT
def ex_create_snapshot(self, volume, name, description=None, force=False):
"""
Create a snapshot based off of a volume.
:param volume: volume
:type volume: :class:`StorageVolume`
:keyword name: New name for the volume snapshot
:type name: ``str``
:keyword description: Description of the snapshot (optional)
:type description: ``str``
:keyword force: Whether to force creation (optional)
:type force: ``bool``
:rtype: :class:`VolumeSnapshot`
"""
warnings.warn('This method has been deprecated in favor of the '
'create_volume_snapshot method')
return self.create_volume_snapshot(volume, name,
ex_description=description,
ex_force=force)
def ex_delete_snapshot(self, snapshot):
"""
Delete a VolumeSnapshot
:param snapshot: snapshot
:type snapshot: :class:`VolumeSnapshot`
:rtype: ``bool``
"""
warnings.warn('This method has been deprecated in favor of the '
'destroy_volume_snapshot method')
return self.destroy_volume_snapshot(snapshot)
def _to_security_group_rules(self, obj):
return [self._to_security_group_rule(security_group_rule) for
security_group_rule in obj]
def _to_security_group_rule(self, obj):
ip_range = group = tenant_id = None
if obj['group'] == {}:
ip_range = obj['ip_range'].get('cidr', None)
else:
group = obj['group'].get('name', None)
tenant_id = obj['group'].get('tenant_id', None)
return OpenStackSecurityGroupRule(
id=obj['id'], parent_group_id=obj['parent_group_id'],
ip_protocol=obj['ip_protocol'], from_port=obj['from_port'],
to_port=obj['to_port'], driver=self, ip_range=ip_range,
group=group, tenant_id=tenant_id)
def _to_security_groups(self, obj):
security_groups = obj['security_groups']
return [self._to_security_group(security_group) for security_group in
security_groups]
def _to_security_group(self, obj):
rules = self._to_security_group_rules(obj.get('rules', []))
return OpenStackSecurityGroup(id=obj['id'],
tenant_id=obj['tenant_id'],
name=obj['name'],
description=obj.get('description', ''),
rules=rules,
driver=self)
def ex_list_security_groups(self):
"""
Get a list of Security Groups that are available.
:rtype: ``list`` of :class:`OpenStackSecurityGroup`
"""
return self._to_security_groups(
self.connection.request('/os-security-groups').object)
def ex_get_node_security_groups(self, node):
"""
Get Security Groups of the specified server.
:rtype: ``list`` of :class:`OpenStackSecurityGroup`
"""
return self._to_security_groups(
self.connection.request('/servers/%s/os-security-groups' %
(node.id)).object)
def ex_create_security_group(self, name, description):
"""
Create a new Security Group
:param name: Name of the new Security Group
:type name: ``str``
:param description: Description of the new Security Group
:type description: ``str``
:rtype: :class:`OpenStackSecurityGroup`
"""
return self._to_security_group(self.connection.request(
'/os-security-groups', method='POST',
data={'security_group': {'name': name, 'description': description}}
).object['security_group'])
def ex_delete_security_group(self, security_group):
"""
Delete a Security Group.
:param security_group: Security Group should be deleted
:type security_group: :class:`OpenStackSecurityGroup`
:rtype: ``bool``
"""
resp = self.connection.request('/os-security-groups/%s' %
(security_group.id),
method='DELETE')
return resp.status in (httplib.NO_CONTENT, httplib.ACCEPTED)
def ex_create_security_group_rule(self, security_group, ip_protocol,
from_port, to_port, cidr=None,
source_security_group=None):
"""
Create a new Rule in a Security Group
:param security_group: Security Group in which to add the rule
:type security_group: :class:`OpenStackSecurityGroup`
:param ip_protocol: Protocol to which this rule applies
Examples: tcp, udp, ...
:type ip_protocol: ``str``
:param from_port: First port of the port range
:type from_port: ``int``
:param to_port: Last port of the port range
:type to_port: ``int``
:param cidr: CIDR notation of the source IP range for this rule
:type cidr: ``str``
:param source_security_group: Existing Security Group to use as the
source (instead of CIDR)
:type source_security_group: L{OpenStackSecurityGroup
:rtype: :class:`OpenStackSecurityGroupRule`
"""
source_security_group_id = None
if type(source_security_group) == OpenStackSecurityGroup:
source_security_group_id = source_security_group.id
return self._to_security_group_rule(self.connection.request(
'/os-security-group-rules', method='POST',
data={'security_group_rule': {
'ip_protocol': ip_protocol,
'from_port': from_port,
'to_port': to_port,
'cidr': cidr,
'group_id': source_security_group_id,
'parent_group_id': security_group.id}}
).object['security_group_rule'])
def ex_delete_security_group_rule(self, rule):
"""
Delete a Rule from a Security Group.
:param rule: Rule should be deleted
:type rule: :class:`OpenStackSecurityGroupRule`
:rtype: ``bool``
"""
resp = self.connection.request('/os-security-group-rules/%s' %
(rule.id), method='DELETE')
return resp.status == httplib.NO_CONTENT
def _to_key_pairs(self, obj):
key_pairs = obj['keypairs']
key_pairs = [self._to_key_pair(key_pair['keypair']) for key_pair in
key_pairs]
return key_pairs
def _to_key_pair(self, obj):
key_pair = KeyPair(name=obj['name'],
fingerprint=obj['fingerprint'],
public_key=obj['public_key'],
private_key=obj.get('private_key', None),
driver=self)
return key_pair
def list_key_pairs(self):
response = self.connection.request('/os-keypairs')
key_pairs = self._to_key_pairs(response.object)
return key_pairs
def get_key_pair(self, name):
self.connection.set_context({'key_pair_name': name})
response = self.connection.request('/os-keypairs/%s' % (name))
key_pair = self._to_key_pair(response.object['keypair'])
return key_pair
def create_key_pair(self, name):
data = {'keypair': {'name': name}}
response = self.connection.request('/os-keypairs', method='POST',
data=data)
key_pair = self._to_key_pair(response.object['keypair'])
return key_pair
def import_key_pair_from_string(self, name, key_material):
data = {'keypair': {'name': name, 'public_key': key_material}}
response = self.connection.request('/os-keypairs', method='POST',
data=data)
key_pair = self._to_key_pair(response.object['keypair'])
return key_pair
def delete_key_pair(self, key_pair):
"""
Delete a KeyPair.
:param keypair: KeyPair to delete
:type keypair: :class:`OpenStackKeyPair`
:rtype: ``bool``
"""
response = self.connection.request('/os-keypairs/%s' % (key_pair.name),
method='DELETE')
return response.status == httplib.ACCEPTED
def ex_list_keypairs(self):
"""
Get a list of KeyPairs that are available.
:rtype: ``list`` of :class:`OpenStackKeyPair`
"""
warnings.warn('This method has been deprecated in favor of '
'list_key_pairs method')
return self.list_key_pairs()
def ex_create_keypair(self, name):
"""
Create a new KeyPair
:param name: Name of the new KeyPair
:type name: ``str``
:rtype: :class:`OpenStackKeyPair`
"""
warnings.warn('This method has been deprecated in favor of '
'create_key_pair method')
return self.create_key_pair(name=name)
def ex_import_keypair(self, name, keyfile):
"""
Import a KeyPair from a file
:param name: Name of the new KeyPair
:type name: ``str``
:param keyfile: Path to the public key file (in OpenSSH format)
:type keyfile: ``str``
:rtype: :class:`OpenStackKeyPair`
"""
warnings.warn('This method has been deprecated in favor of '
'import_key_pair_from_file method')
return self.import_key_pair_from_file(name=name, key_file_path=keyfile)
def ex_import_keypair_from_string(self, name, key_material):
"""
Import a KeyPair from a string
:param name: Name of the new KeyPair
:type name: ``str``
:param key_material: Public key (in OpenSSH format)
:type key_material: ``str``
:rtype: :class:`OpenStackKeyPair`
"""
warnings.warn('This method has been deprecated in favor of '
'import_key_pair_from_string method')
return self.import_key_pair_from_string(name=name,
key_material=key_material)
def ex_delete_keypair(self, keypair):
"""
Delete a KeyPair.
:param keypair: KeyPair to delete
:type keypair: :class:`OpenStackKeyPair`
:rtype: ``bool``
"""
warnings.warn('This method has been deprecated in favor of '
'delete_key_pair method')
return self.delete_key_pair(key_pair=keypair)
def ex_get_size(self, size_id):
"""
Get a NodeSize
:param size_id: ID of the size which should be used
:type size_id: ``str``
:rtype: :class:`NodeSize`
"""
return self._to_size(self.connection.request(
'/flavors/%s' % (size_id,)) .object['flavor'])
def get_image(self, image_id):
"""
Get a NodeImage
@inherits: :class:`NodeDriver.get_image`
:param image_id: ID of the image which should be used
:type image_id: ``str``
:rtype: :class:`NodeImage`
"""
return self._to_image(self.connection.request(
'/images/%s' % (image_id,)).object['image'])
def delete_image(self, image):
"""
Delete a NodeImage
@inherits: :class:`NodeDriver.delete_image`
:param image: image witch should be used
:type image: :class:`NodeImage`
:rtype: ``bool``
"""
resp = self.connection.request('/images/%s' % (image.id,),
method='DELETE')
return resp.status == httplib.NO_CONTENT
def _node_action(self, node, action, **params):
params = params or None
return self.connection.request('/servers/%s/action' % (node.id,),
method='POST', data={action: params})
def _update_node(self, node, **node_updates):
"""
Updates the editable attributes of a server, which currently include
its name and IPv4/IPv6 access addresses.
"""
return self._to_node(
self.connection.request(
'/servers/%s' % (node.id,), method='PUT',
data={'server': node_updates}
).object['server']
)
def _to_node_from_obj(self, obj):
return self._to_node(obj['server'])
def _to_node(self, api_node):
public_networks_labels = ['public', 'internet']
public_ips, private_ips = [], []
for label, values in api_node['addresses'].items():
for value in values:
ip = value['addr']
is_public_ip = False
try:
is_public_ip = is_public_subnet(ip)
except:
# IPv6
# Openstack Icehouse sets 'OS-EXT-IPS:type' to 'floating'
# for public and 'fixed' for private
explicit_ip_type = value.get('OS-EXT-IPS:type', None)
if label in public_networks_labels:
is_public_ip = True
elif explicit_ip_type == 'floating':
is_public_ip = True
elif explicit_ip_type == 'fixed':
is_public_ip = False
if is_public_ip:
public_ips.append(ip)
else:
private_ips.append(ip)
# Sometimes 'image' attribute is not present if the node is in an error
# state
image = api_node.get('image', None)
image_id = image.get('id', None) if image else None
config_drive = api_node.get("config_drive", False)
volumes_attached = api_node.get('os-extended-volumes:volumes_attached')
created = parse_date(api_node["created"])
return Node(
id=api_node['id'],
name=api_node['name'],
state=self.NODE_STATE_MAP.get(api_node['status'],
NodeState.UNKNOWN),
public_ips=public_ips,
private_ips=private_ips,
created_at=created,
driver=self,
extra=dict(
addresses=api_node['addresses'],
hostId=api_node['hostId'],
access_ip=api_node.get('accessIPv4'),
access_ipv6=api_node.get('accessIPv6', None),
# Docs says "tenantId", but actual is "tenant_id". *sigh*
# Best handle both.
tenantId=api_node.get('tenant_id') or api_node['tenantId'],
userId=api_node.get('user_id', None),
imageId=image_id,
flavorId=api_node['flavor']['id'],
uri=next(link['href'] for link in api_node['links'] if
link['rel'] == 'self'),
service_name=self.connection.get_service_name(),
metadata=api_node['metadata'],
password=api_node.get('adminPass', None),
created=api_node['created'],
updated=api_node['updated'],
key_name=api_node.get('key_name', None),
disk_config=api_node.get('OS-DCF:diskConfig', None),
config_drive=config_drive,
availability_zone=api_node.get('OS-EXT-AZ:availability_zone'),
volumes_attached=volumes_attached,
task_state=api_node.get("OS-EXT-STS:task_state", None),
vm_state=api_node.get("OS-EXT-STS:vm_state", None),
power_state=api_node.get("OS-EXT-STS:power_state", None),
progress=api_node.get("progress", None),
fault=api_node.get('fault')
),
)
def _to_volume(self, api_node):
if 'volume' in api_node:
api_node = api_node['volume']
state = self.VOLUME_STATE_MAP.get(api_node['status'],
StorageVolumeState.UNKNOWN)
return StorageVolume(
id=api_node['id'],
name=api_node['displayName'],
size=api_node['size'],
state=state,
driver=self,
extra={
'description': api_node['displayDescription'],
'attachments': [att for att in api_node['attachments'] if att],
# TODO: remove in 1.18.0
'state': api_node.get('status', None),
'snapshot_id': api_node.get('snapshotId', None),
'location': api_node.get('availabilityZone', None),
'volume_type': api_node.get('volumeType', None),
'metadata': api_node.get('metadata', None),
'created_at': api_node.get('createdAt', None)
}
)
def _to_snapshot(self, data):
if 'snapshot' in data:
data = data['snapshot']
volume_id = data.get('volume_id', data.get('volumeId', None))
display_name = data.get('display_name', data.get('displayName', None))
created_at = data.get('created_at', data.get('createdAt', None))
description = data.get('display_description',
data.get('displayDescription', None))
status = data.get('status', None)
extra = {'volume_id': volume_id,
'name': display_name,
'created': created_at,
'description': description,
'status': status}
state = self.SNAPSHOT_STATE_MAP.get(
status,
VolumeSnapshotState.UNKNOWN
)
try:
created_dt = parse_date(created_at)
except ValueError:
created_dt = None
snapshot = VolumeSnapshot(id=data['id'], driver=self,
size=data['size'], extra=extra,
created=created_dt, state=state,
name=display_name)
return snapshot
def _to_size(self, api_flavor, price=None, bandwidth=None):
# if provider-specific subclasses can get better values for
# price/bandwidth, then can pass them in when they super().
if not price:
price = self._get_size_price(str(api_flavor['id']))
extra = api_flavor.get('OS-FLV-WITH-EXT-SPECS:extra_specs', {})
return OpenStackNodeSize(
id=api_flavor['id'],
name=api_flavor['name'],
ram=api_flavor['ram'],
disk=api_flavor['disk'],
vcpus=api_flavor['vcpus'],
ephemeral_disk=api_flavor.get('OS-FLV-EXT-DATA:ephemeral', None),
swap=api_flavor['swap'],
extra=extra,
bandwidth=bandwidth,
price=price,
driver=self,
)
def _get_size_price(self, size_id):
try:
return get_size_price(
driver_type='compute',
driver_name=self.api_name,
size_id=size_id,
)
except KeyError:
return(0.0)
def _extract_image_id_from_url(self, location_header):
path = urlparse.urlparse(location_header).path
image_id = path.split('/')[-1]
return image_id
def ex_rescue(self, node, password=None):
# Requires Rescue Mode extension
"""
Rescue a node
:param node: node
:type node: :class:`Node`
:param password: password
:type password: ``str``
:rtype: :class:`Node`
"""
if password:
resp = self._node_action(node, 'rescue', adminPass=password)
else:
resp = self._node_action(node, 'rescue')
password = json.loads(resp.body)['adminPass']
node.extra['password'] = password
return node
def ex_unrescue(self, node):
"""
Unrescue a node
:param node: node
:type node: :class:`Node`
:rtype: ``bool``
"""
resp = self._node_action(node, 'unrescue')
return resp.status == httplib.ACCEPTED
def _to_floating_ip_pools(self, obj):
pool_elements = obj['floating_ip_pools']
return [self._to_floating_ip_pool(pool) for pool in pool_elements]
def _to_floating_ip_pool(self, obj):
return OpenStack_1_1_FloatingIpPool(obj['name'], self.connection)
def ex_list_floating_ip_pools(self):
"""
List available floating IP pools
:rtype: ``list`` of :class:`OpenStack_1_1_FloatingIpPool`
"""
return self._to_floating_ip_pools(
self.connection.request('/os-floating-ip-pools').object)
def _to_floating_ips(self, obj):
ip_elements = obj['floating_ips']
return [self._to_floating_ip(ip) for ip in ip_elements]
def _to_floating_ip(self, obj):
return OpenStack_1_1_FloatingIpAddress(id=obj['id'],
ip_address=obj['ip'],
pool=None,
node_id=obj['instance_id'],
driver=self)
def ex_list_floating_ips(self):
"""
List floating IPs
:rtype: ``list`` of :class:`OpenStack_1_1_FloatingIpAddress`
"""
return self._to_floating_ips(
self.connection.request('/os-floating-ips').object)
def ex_get_floating_ip(self, ip):
"""
Get specified floating IP
:param ip: floating IP to get
:type ip: ``str``
:rtype: :class:`OpenStack_1_1_FloatingIpAddress`
"""
floating_ips = self.ex_list_floating_ips()
ip_obj, = [x for x in floating_ips if x.ip_address == ip]
return ip_obj
def ex_create_floating_ip(self, ip_pool=None):
"""
Create new floating IP. The ip_pool attribute is optional only if your
infrastructure has only one IP pool available.
:param ip_pool: name of the floating IP pool
:type ip_pool: ``str``
:rtype: :class:`OpenStack_1_1_FloatingIpAddress`
"""
data = {'pool': ip_pool} if ip_pool is not None else {}
resp = self.connection.request('/os-floating-ips',
method='POST',
data=data)
data = resp.object['floating_ip']
id = data['id']
ip_address = data['ip']
return OpenStack_1_1_FloatingIpAddress(id=id,
ip_address=ip_address,
pool=None,
node_id=None,
driver=self)
def ex_delete_floating_ip(self, ip):
"""
Delete specified floating IP
:param ip: floating IP to remove
:type ip: :class:`OpenStack_1_1_FloatingIpAddress`
:rtype: ``bool``
"""
resp = self.connection.request('/os-floating-ips/%s' % ip.id,
method='DELETE')
return resp.status in (httplib.NO_CONTENT, httplib.ACCEPTED)
def ex_attach_floating_ip_to_node(self, node, ip):
"""
Attach the floating IP to the node
:param node: node
:type node: :class:`Node`
:param ip: floating IP to attach
:type ip: ``str`` or :class:`OpenStack_1_1_FloatingIpAddress`
:rtype: ``bool``
"""
address = ip.ip_address if hasattr(ip, 'ip_address') else ip
data = {
'addFloatingIp': {'address': address}
}
resp = self.connection.request('/servers/%s/action' % node.id,
method='POST', data=data)
return resp.status == httplib.ACCEPTED
def ex_detach_floating_ip_from_node(self, node, ip):
"""
Detach the floating IP from the node
:param node: node
:type node: :class:`Node`
:param ip: floating IP to remove
:type ip: ``str`` or :class:`OpenStack_1_1_FloatingIpAddress`
:rtype: ``bool``
"""
address = ip.ip_address if hasattr(ip, 'ip_address') else ip
data = {
'removeFloatingIp': {'address': address}
}
resp = self.connection.request('/servers/%s/action' % node.id,
method='POST', data=data)
return resp.status == httplib.ACCEPTED
def ex_get_metadata_for_node(self, node):
"""
Return the metadata associated with the node.
:param node: Node instance
:type node: :class:`Node`
:return: A dictionary or other mapping of strings to strings,
associating tag names with tag values.
:type tags: ``dict``
"""
return node.extra['metadata']
def ex_pause_node(self, node):
return self._post_simple_node_action(node, 'pause')
def ex_unpause_node(self, node):
return self._post_simple_node_action(node, 'unpause')
def ex_stop_node(self, node):
return self._post_simple_node_action(node, 'os-stop')
def ex_start_node(self, node):
return self._post_simple_node_action(node, 'os-start')
def ex_suspend_node(self, node):
return self._post_simple_node_action(node, 'suspend')
def ex_resume_node(self, node):
return self._post_simple_node_action(node, 'resume')
def _post_simple_node_action(self, node, action):
""" Post a simple, data-less action to the OS node action endpoint
:param `Node` node:
:param str action: the action to call
:return `bool`: a boolean that indicates success
"""
uri = '/servers/{node_id}/action'.format(node_id=node.id)
resp = self.connection.request(uri, method='POST', data={action: None})
return resp.status == httplib.ACCEPTED
class OpenStack_1_1_FloatingIpPool(object):
"""
Floating IP Pool info.
"""
def __init__(self, name, connection):
self.name = name
self.connection = connection
def list_floating_ips(self):
"""
List floating IPs in the pool
:rtype: ``list`` of :class:`OpenStack_1_1_FloatingIpAddress`
"""
return self._to_floating_ips(
self.connection.request('/os-floating-ips').object)
def _to_floating_ips(self, obj):
ip_elements = obj['floating_ips']
return [self._to_floating_ip(ip) for ip in ip_elements]
def _to_floating_ip(self, obj):
return OpenStack_1_1_FloatingIpAddress(id=obj['id'],
ip_address=obj['ip'],
pool=self,
node_id=obj['instance_id'],
driver=self.connection.driver)
def get_floating_ip(self, ip):
"""
Get specified floating IP from the pool
:param ip: floating IP to get
:type ip: ``str``
:rtype: :class:`OpenStack_1_1_FloatingIpAddress`
"""
ip_obj, = [x for x in self.list_floating_ips() if x.ip_address == ip]
return ip_obj
def create_floating_ip(self):
"""
Create new floating IP in the pool
:rtype: :class:`OpenStack_1_1_FloatingIpAddress`
"""
resp = self.connection.request('/os-floating-ips',
method='POST',
data={'pool': self.name})
data = resp.object['floating_ip']
id = data['id']
ip_address = data['ip']
return OpenStack_1_1_FloatingIpAddress(id=id,
ip_address=ip_address,
pool=self,
node_id=None,
driver=self.connection.driver)
def delete_floating_ip(self, ip):
"""
Delete specified floating IP from the pool
:param ip: floating IP to remove
:type ip::class:`OpenStack_1_1_FloatingIpAddress`
:rtype: ``bool``
"""
resp = self.connection.request('/os-floating-ips/%s' % ip.id,
method='DELETE')
return resp.status in (httplib.NO_CONTENT, httplib.ACCEPTED)
def __repr__(self):
return ('<OpenStack_1_1_FloatingIpPool: name=%s>' % self.name)
class OpenStack_1_1_FloatingIpAddress(object):
"""
Floating IP info.
"""
def __init__(self, id, ip_address, pool, node_id=None, driver=None):
self.id = str(id)
self.ip_address = ip_address
self.pool = pool
self.node_id = node_id
self.driver = driver
def delete(self):
"""
Delete this floating IP
:rtype: ``bool``
"""
if self.pool is not None:
return self.pool.delete_floating_ip(self)
elif self.driver is not None:
return self.driver.ex_delete_floating_ip(self)
def __repr__(self):
return ('<OpenStack_1_1_FloatingIpAddress: id=%s, ip_addr=%s,'
' pool=%s, driver=%s>'
% (self.id, self.ip_address, self.pool, self.driver))
|
apache-2.0
|
bsmr-Bitcraze/crazyflie-clients-python
|
src/cfclient/utils/singleton.py
|
9
|
1513
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Singleton class.
"""
__author__ = 'Bitcraze AB'
__all__ = ['Singleton']
class Singleton(type):
"""Class for making singletons"""
_instances = {}
def __call__(cls, *args, **kwargs):
"""Called when creating new class"""
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(
*args, **kwargs)
return cls._instances[cls]
|
gpl-2.0
|
xingyepei/edx-platform
|
lms/djangoapps/bulk_email/tests/test_tasks.py
|
12
|
21247
|
# -*- coding: utf-8 -*-
"""
Unit tests for LMS instructor-initiated background tasks.
Runs tasks on answers to course problems to validate that code
paths actually work.
"""
import json
from uuid import uuid4
from itertools import cycle, chain, repeat
from mock import patch, Mock
from nose.plugins.attrib import attr
from smtplib import SMTPServerDisconnected, SMTPDataError, SMTPConnectError, SMTPAuthenticationError
from boto.ses.exceptions import (
SESAddressNotVerifiedError,
SESIdentityNotVerifiedError,
SESDomainNotConfirmedError,
SESAddressBlacklistedError,
SESDailyQuotaExceededError,
SESMaxSendingRateExceededError,
SESDomainEndsWithDotError,
SESLocalAddressCharacterError,
SESIllegalAddressError,
)
from boto.exception import AWSConnectionError
from celery.states import SUCCESS, FAILURE # pylint: disable=no-name-in-module, import-error
from django.conf import settings
from django.core.management import call_command
from xmodule.modulestore.tests.factories import CourseFactory
from bulk_email.models import CourseEmail, Optout, SEND_TO_ALL
from instructor_task.tasks import send_bulk_course_email
from instructor_task.subtasks import update_subtask_status, SubtaskStatus
from instructor_task.models import InstructorTask
from instructor_task.tests.test_base import InstructorTaskCourseTestCase
from instructor_task.tests.factories import InstructorTaskFactory
from opaque_keys.edx.locations import SlashSeparatedCourseKey
class TestTaskFailure(Exception):
"""Dummy exception used for unit tests."""
pass
def my_update_subtask_status(entry_id, current_task_id, new_subtask_status):
"""
Check whether a subtask has been updated before really updating.
Check whether a subtask which has been retried
has had the retry already write its results here before the code
that was invoking the retry had a chance to update this status.
This is the norm in "eager" mode (used by tests) where the retry is called
and run to completion before control is returned to the code that
invoked the retry. If the retries eventually end in failure (e.g. due to
a maximum number of retries being attempted), the "eager" code will return
the error for each retry as it is popped off the stack. We want to just ignore
the later updates that are called as the result of the earlier retries.
This should not be an issue in production, where status is updated before
a task is retried, and is then updated afterwards if the retry fails.
"""
entry = InstructorTask.objects.get(pk=entry_id)
subtask_dict = json.loads(entry.subtasks)
subtask_status_info = subtask_dict['status']
current_subtask_status = SubtaskStatus.from_dict(subtask_status_info[current_task_id])
current_retry_count = current_subtask_status.get_retry_count()
new_retry_count = new_subtask_status.get_retry_count()
if current_retry_count <= new_retry_count:
update_subtask_status(entry_id, current_task_id, new_subtask_status)
@attr('shard_1')
@patch('bulk_email.models.html_to_text', Mock(return_value='Mocking CourseEmail.text_message'))
class TestBulkEmailInstructorTask(InstructorTaskCourseTestCase):
"""Tests instructor task that send bulk email."""
def setUp(self):
super(TestBulkEmailInstructorTask, self).setUp()
self.initialize_course()
self.instructor = self.create_instructor('instructor')
# load initial content (since we don't run migrations as part of tests):
call_command("loaddata", "course_email_template.json")
def _create_input_entry(self, course_id=None):
"""
Creates a InstructorTask entry for testing.
Overrides the base class version in that this creates CourseEmail.
"""
to_option = SEND_TO_ALL
course_id = course_id or self.course.id
course_email = CourseEmail.create(
course_id, self.instructor, to_option, "Test Subject", "<p>This is a test message</p>"
)
task_input = {'email_id': course_email.id} # pylint: disable=no-member
task_id = str(uuid4())
instructor_task = InstructorTaskFactory.create(
course_id=course_id,
requester=self.instructor,
task_input=json.dumps(task_input),
task_key='dummy value',
task_id=task_id,
)
return instructor_task
def _run_task_with_mock_celery(self, task_class, entry_id, task_id):
"""Mock was not needed for some tests, testing to see if it's needed at all."""
task_args = [entry_id, {}]
return task_class.apply(task_args, task_id=task_id).get()
def test_email_missing_current_task(self):
task_entry = self._create_input_entry()
with self.assertRaises(ValueError):
send_bulk_course_email(task_entry.id, {})
def test_email_undefined_course(self):
# Check that we fail when passing in a course that doesn't exist.
task_entry = self._create_input_entry(course_id=SlashSeparatedCourseKey("bogus", "course", "id"))
with self.assertRaises(ValueError):
self._run_task_with_mock_celery(send_bulk_course_email, task_entry.id, task_entry.task_id)
def test_bad_task_id_on_update(self):
task_entry = self._create_input_entry()
def dummy_update_subtask_status(entry_id, _current_task_id, new_subtask_status):
"""Passes a bad value for task_id to test update_subtask_status"""
bogus_task_id = "this-is-bogus"
update_subtask_status(entry_id, bogus_task_id, new_subtask_status)
with self.assertRaises(ValueError):
with patch('bulk_email.tasks.update_subtask_status', dummy_update_subtask_status):
send_bulk_course_email(task_entry.id, {})
def _create_students(self, num_students):
"""Create students for testing"""
return [self.create_student('robot%d' % i) for i in xrange(num_students)]
def _assert_single_subtask_status(self, entry, succeeded, failed=0, skipped=0, retried_nomax=0, retried_withmax=0):
"""Compare counts with 'subtasks' entry in InstructorTask table."""
subtask_info = json.loads(entry.subtasks)
# verify subtask-level counts:
self.assertEquals(subtask_info.get('total'), 1)
self.assertEquals(subtask_info.get('succeeded'), 1 if succeeded > 0 else 0)
self.assertEquals(subtask_info.get('failed'), 0 if succeeded > 0 else 1)
# verify individual subtask status:
subtask_status_info = subtask_info.get('status')
task_id_list = subtask_status_info.keys()
self.assertEquals(len(task_id_list), 1)
task_id = task_id_list[0]
subtask_status = subtask_status_info.get(task_id)
print "Testing subtask status: {}".format(subtask_status)
self.assertEquals(subtask_status.get('task_id'), task_id)
self.assertEquals(subtask_status.get('attempted'), succeeded + failed)
self.assertEquals(subtask_status.get('succeeded'), succeeded)
self.assertEquals(subtask_status.get('skipped'), skipped)
self.assertEquals(subtask_status.get('failed'), failed)
self.assertEquals(subtask_status.get('retried_nomax'), retried_nomax)
self.assertEquals(subtask_status.get('retried_withmax'), retried_withmax)
self.assertEquals(subtask_status.get('state'), SUCCESS if succeeded > 0 else FAILURE)
def _test_run_with_task(
self, task_class, action_name, total, succeeded,
failed=0, skipped=0, retried_nomax=0, retried_withmax=0):
"""Run a task and check the number of emails processed."""
task_entry = self._create_input_entry()
parent_status = self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id)
# check return value
self.assertEquals(parent_status.get('total'), total)
self.assertEquals(parent_status.get('action_name'), action_name)
# compare with task_output entry in InstructorTask table:
entry = InstructorTask.objects.get(id=task_entry.id)
status = json.loads(entry.task_output)
self.assertEquals(status.get('attempted'), succeeded + failed)
self.assertEquals(status.get('succeeded'), succeeded)
self.assertEquals(status.get('skipped'), skipped)
self.assertEquals(status.get('failed'), failed)
self.assertEquals(status.get('total'), total)
self.assertEquals(status.get('action_name'), action_name)
self.assertGreater(status.get('duration_ms'), 0)
self.assertEquals(entry.task_state, SUCCESS)
self._assert_single_subtask_status(entry, succeeded, failed, skipped, retried_nomax, retried_withmax)
return entry
def test_successful(self):
# Select number of emails to fit into a single subtask.
num_emails = settings.BULK_EMAIL_EMAILS_PER_TASK
# We also send email to the instructor:
self._create_students(num_emails - 1)
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
get_conn.return_value.send_messages.side_effect = cycle([None])
self._test_run_with_task(send_bulk_course_email, 'emailed', num_emails, num_emails)
def test_successful_twice(self):
# Select number of emails to fit into a single subtask.
num_emails = settings.BULK_EMAIL_EMAILS_PER_TASK
# We also send email to the instructor:
self._create_students(num_emails - 1)
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
get_conn.return_value.send_messages.side_effect = cycle([None])
task_entry = self._test_run_with_task(send_bulk_course_email, 'emailed', num_emails, num_emails)
# submit the same task a second time, and confirm that it is not run again.
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
get_conn.return_value.send_messages.side_effect = cycle([Exception("This should not happen!")])
parent_status = self._run_task_with_mock_celery(send_bulk_course_email, task_entry.id, task_entry.task_id)
self.assertEquals(parent_status.get('total'), num_emails)
self.assertEquals(parent_status.get('succeeded'), num_emails)
self.assertEquals(parent_status.get('failed'), 0)
def test_unactivated_user(self):
# Select number of emails to fit into a single subtask.
num_emails = settings.BULK_EMAIL_EMAILS_PER_TASK
# We also send email to the instructor:
students = self._create_students(num_emails - 1)
# mark a student as not yet having activated their email:
student = students[0]
student.is_active = False
student.save()
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
get_conn.return_value.send_messages.side_effect = cycle([None])
self._test_run_with_task(send_bulk_course_email, 'emailed', num_emails - 1, num_emails - 1)
def test_skipped(self):
# Select number of emails to fit into a single subtask.
num_emails = settings.BULK_EMAIL_EMAILS_PER_TASK
# We also send email to the instructor:
students = self._create_students(num_emails - 1)
# have every fourth student optout:
expected_skipped = int((num_emails + 3) / 4.0)
expected_succeeds = num_emails - expected_skipped
for index in range(0, num_emails, 4):
Optout.objects.create(user=students[index], course_id=self.course.id)
# mark some students as opting out
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
get_conn.return_value.send_messages.side_effect = cycle([None])
self._test_run_with_task(
send_bulk_course_email, 'emailed', num_emails, expected_succeeds, skipped=expected_skipped
)
def _test_email_address_failures(self, exception):
"""Test that celery handles bad address errors by failing and not retrying."""
# Select number of emails to fit into a single subtask.
num_emails = settings.BULK_EMAIL_EMAILS_PER_TASK
# We also send email to the instructor:
self._create_students(num_emails - 1)
expected_fails = int((num_emails + 3) / 4.0)
expected_succeeds = num_emails - expected_fails
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
# have every fourth email fail due to some address failure:
get_conn.return_value.send_messages.side_effect = cycle([exception, None, None, None])
self._test_run_with_task(
send_bulk_course_email, 'emailed', num_emails, expected_succeeds, failed=expected_fails
)
def test_smtp_blacklisted_user(self):
# Test that celery handles permanent SMTPDataErrors by failing and not retrying.
self._test_email_address_failures(SMTPDataError(554, "Email address is blacklisted"))
def test_ses_blacklisted_user(self):
# Test that celery handles permanent SMTPDataErrors by failing and not retrying.
self._test_email_address_failures(SESAddressBlacklistedError(554, "Email address is blacklisted"))
def test_ses_illegal_address(self):
# Test that celery handles permanent SMTPDataErrors by failing and not retrying.
self._test_email_address_failures(SESIllegalAddressError(554, "Email address is illegal"))
def test_ses_local_address_character_error(self):
# Test that celery handles permanent SMTPDataErrors by failing and not retrying.
self._test_email_address_failures(SESLocalAddressCharacterError(554, "Email address contains a bad character"))
def test_ses_domain_ends_with_dot(self):
# Test that celery handles permanent SMTPDataErrors by failing and not retrying.
self._test_email_address_failures(SESDomainEndsWithDotError(554, "Email address ends with a dot"))
def _test_retry_after_limited_retry_error(self, exception):
"""Test that celery handles connection failures by retrying."""
# If we want the batch to succeed, we need to send fewer emails
# than the max retries, so that the max is not triggered.
num_emails = settings.BULK_EMAIL_MAX_RETRIES
# We also send email to the instructor:
self._create_students(num_emails - 1)
expected_fails = 0
expected_succeeds = num_emails
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
# Have every other mail attempt fail due to disconnection.
get_conn.return_value.send_messages.side_effect = cycle([exception, None])
self._test_run_with_task(
send_bulk_course_email,
'emailed',
num_emails,
expected_succeeds,
failed=expected_fails,
retried_withmax=num_emails
)
def _test_max_retry_limit_causes_failure(self, exception):
"""Test that celery can hit a maximum number of retries."""
# Doesn't really matter how many recipients, since we expect
# to fail on the first.
num_emails = 10
# We also send email to the instructor:
self._create_students(num_emails - 1)
expected_fails = num_emails
expected_succeeds = 0
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
# always fail to connect, triggering repeated retries until limit is hit:
get_conn.return_value.send_messages.side_effect = cycle([exception])
with patch('bulk_email.tasks.update_subtask_status', my_update_subtask_status):
self._test_run_with_task(
send_bulk_course_email,
'emailed',
num_emails,
expected_succeeds,
failed=expected_fails,
retried_withmax=(settings.BULK_EMAIL_MAX_RETRIES + 1)
)
def test_retry_after_smtp_disconnect(self):
self._test_retry_after_limited_retry_error(SMTPServerDisconnected(425, "Disconnecting"))
def test_max_retry_after_smtp_disconnect(self):
self._test_max_retry_limit_causes_failure(SMTPServerDisconnected(425, "Disconnecting"))
def test_retry_after_smtp_connect_error(self):
self._test_retry_after_limited_retry_error(SMTPConnectError(424, "Bad Connection"))
def test_max_retry_after_smtp_connect_error(self):
self._test_max_retry_limit_causes_failure(SMTPConnectError(424, "Bad Connection"))
def test_retry_after_aws_connect_error(self):
self._test_retry_after_limited_retry_error(
AWSConnectionError("Unable to provide secure connection through proxy")
)
def test_max_retry_after_aws_connect_error(self):
self._test_max_retry_limit_causes_failure(
AWSConnectionError("Unable to provide secure connection through proxy")
)
def test_retry_after_general_error(self):
self._test_retry_after_limited_retry_error(Exception("This is some random exception."))
def test_max_retry_after_general_error(self):
self._test_max_retry_limit_causes_failure(Exception("This is some random exception."))
def _test_retry_after_unlimited_retry_error(self, exception):
"""Test that celery handles throttling failures by retrying."""
num_emails = 8
# We also send email to the instructor:
self._create_students(num_emails - 1)
expected_fails = 0
expected_succeeds = num_emails
# Note that because celery in eager mode will call retries synchronously,
# each retry will increase the stack depth. It turns out that there is a
# maximum depth at which a RuntimeError is raised ("maximum recursion depth
# exceeded"). The maximum recursion depth is 90, so
# num_emails * expected_retries < 90.
expected_retries = 10
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
# Cycle through N throttling errors followed by a success.
get_conn.return_value.send_messages.side_effect = cycle(
chain(repeat(exception, expected_retries), [None])
)
self._test_run_with_task(
send_bulk_course_email,
'emailed',
num_emails,
expected_succeeds,
failed=expected_fails,
retried_nomax=(expected_retries * num_emails)
)
def test_retry_after_smtp_throttling_error(self):
self._test_retry_after_unlimited_retry_error(SMTPDataError(455, "Throttling: Sending rate exceeded"))
def test_retry_after_ses_throttling_error(self):
self._test_retry_after_unlimited_retry_error(
SESMaxSendingRateExceededError(455, "Throttling: Sending rate exceeded")
)
def _test_immediate_failure(self, exception):
"""Test that celery can hit a maximum number of retries."""
# Doesn't really matter how many recipients, since we expect
# to fail on the first.
num_emails = 10
# We also send email to the instructor:
self._create_students(num_emails - 1)
expected_fails = num_emails
expected_succeeds = 0
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
# always fail to connect, triggering repeated retries until limit is hit:
get_conn.return_value.send_messages.side_effect = cycle([exception])
self._test_run_with_task(
send_bulk_course_email,
'emailed',
num_emails,
expected_succeeds,
failed=expected_fails,
)
def test_failure_on_unhandled_smtp(self):
self._test_immediate_failure(SMTPAuthenticationError(403, "That password doesn't work!"))
def test_failure_on_ses_quota_exceeded(self):
self._test_immediate_failure(SESDailyQuotaExceededError(403, "You're done for the day!"))
def test_failure_on_ses_address_not_verified(self):
self._test_immediate_failure(SESAddressNotVerifiedError(403, "Who *are* you?"))
def test_failure_on_ses_identity_not_verified(self):
self._test_immediate_failure(SESIdentityNotVerifiedError(403, "May I please see an ID!"))
def test_failure_on_ses_domain_not_confirmed(self):
self._test_immediate_failure(SESDomainNotConfirmedError(403, "You're out of bounds!"))
def test_bulk_emails_with_unicode_course_image_name(self):
# Test bulk email with unicode characters in course image name
course_image = u'在淡水測試.jpg'
self.course = CourseFactory.create(course_image=course_image)
num_emails = 1
self._create_students(num_emails)
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
get_conn.return_value.send_messages.side_effect = cycle([None])
self._test_run_with_task(send_bulk_course_email, 'emailed', num_emails, num_emails)
|
agpl-3.0
|
dgquintas/grpc
|
src/python/grpcio_tests/tests/interop/server.py
|
5
|
2233
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC interoperability test server."""
import argparse
from concurrent import futures
import logging
import time
import grpc
from src.proto.grpc.testing import test_pb2_grpc
from tests.interop import methods
from tests.interop import resources
from tests.unit import test_common
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
_LOGGER = logging.getLogger(__name__)
def serve():
parser = argparse.ArgumentParser()
parser.add_argument(
'--port', type=int, required=True, help='the port on which to serve')
parser.add_argument(
'--use_tls',
default=False,
type=resources.parse_bool,
help='require a secure connection')
args = parser.parse_args()
server = test_common.test_server()
test_pb2_grpc.add_TestServiceServicer_to_server(methods.TestService(),
server)
if args.use_tls:
private_key = resources.private_key()
certificate_chain = resources.certificate_chain()
credentials = grpc.ssl_server_credentials(((private_key,
certificate_chain),))
server.add_secure_port('[::]:{}'.format(args.port), credentials)
else:
server.add_insecure_port('[::]:{}'.format(args.port))
server.start()
_LOGGER.info('Server serving.')
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except BaseException as e:
_LOGGER.info('Caught exception "%s"; stopping server...', e)
server.stop(None)
_LOGGER.info('Server stopped; exiting.')
if __name__ == '__main__':
serve()
|
apache-2.0
|
adambrenecki/django
|
django/contrib/messages/tests/test_fallback.py
|
234
|
7033
|
from django.contrib.messages import constants
from django.contrib.messages.storage.fallback import (FallbackStorage,
CookieStorage)
from django.contrib.messages.tests.base import BaseTests
from django.contrib.messages.tests.test_cookie import (set_cookie_data,
stored_cookie_messages_count)
from django.contrib.messages.tests.test_session import (set_session_data,
stored_session_messages_count)
from django.test import TestCase
class FallbackTest(BaseTests, TestCase):
storage_class = FallbackStorage
def get_request(self):
self.session = {}
request = super(FallbackTest, self).get_request()
request.session = self.session
return request
def get_cookie_storage(self, storage):
return storage.storages[-2]
def get_session_storage(self, storage):
return storage.storages[-1]
def stored_cookie_messages_count(self, storage, response):
return stored_cookie_messages_count(self.get_cookie_storage(storage),
response)
def stored_session_messages_count(self, storage, response):
return stored_session_messages_count(self.get_session_storage(storage))
def stored_messages_count(self, storage, response):
"""
Return the storage totals from both cookie and session backends.
"""
total = (self.stored_cookie_messages_count(storage, response) +
self.stored_session_messages_count(storage, response))
return total
def test_get(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
# Set initial cookie data.
example_messages = [str(i) for i in range(5)]
set_cookie_data(cookie_storage, example_messages)
# Overwrite the _get method of the fallback storage to prove it is not
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._get = None
# Test that the message actually contains what we expect.
self.assertEqual(list(storage), example_messages)
def test_get_empty(self):
request = self.get_request()
storage = self.storage_class(request)
# Overwrite the _get method of the fallback storage to prove it is not
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._get = None
# Test that the message actually contains what we expect.
self.assertEqual(list(storage), [])
def test_get_fallback(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
example_messages = [str(i) for i in range(5)]
set_cookie_data(cookie_storage, example_messages[:4] +
[CookieStorage.not_finished])
set_session_data(session_storage, example_messages[4:])
# Test that the message actually contains what we expect.
self.assertEqual(list(storage), example_messages)
def test_get_fallback_only(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
example_messages = [str(i) for i in range(5)]
set_cookie_data(cookie_storage, [CookieStorage.not_finished],
encode_empty=True)
set_session_data(session_storage, example_messages)
# Test that the message actually contains what we expect.
self.assertEqual(list(storage), example_messages)
def test_flush_used_backends(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
set_cookie_data(cookie_storage, ['cookie', CookieStorage.not_finished])
set_session_data(session_storage, ['session'])
# When updating, previously used but no longer needed backends are
# flushed.
response = self.get_response()
list(storage)
storage.update(response)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 0)
def test_no_fallback(self):
"""
Confirms that:
(1) A short number of messages whose data size doesn't exceed what is
allowed in a cookie will all be stored in the CookieBackend.
(2) If the CookieBackend can store all messages, the SessionBackend
won't be written to at all.
"""
storage = self.get_storage()
response = self.get_response()
# Overwrite the _store method of the fallback storage to prove it isn't
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._store = None
for i in range(5):
storage.add(constants.INFO, str(i) * 100)
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 5)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 0)
def test_session_fallback(self):
"""
Confirms that, if the data exceeds what is allowed in a cookie,
messages which did not fit are stored in the SessionBackend.
"""
storage = self.get_storage()
response = self.get_response()
# see comment in CookieText.test_cookie_max_length
msg_size = int((CookieStorage.max_cookie_size - 54) / 4.5 - 37)
for i in range(5):
storage.add(constants.INFO, str(i) * msg_size)
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 4)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 1)
def test_session_fallback_only(self):
"""
Confirms that large messages, none of which fit in a cookie, are stored
in the SessionBackend (and nothing is stored in the CookieBackend).
"""
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, 'x' * 5000)
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 0)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 1)
|
bsd-3-clause
|
FlyCamel/enjarify
|
enjarify/dalvikformats.py
|
30
|
4154
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import util
# Code for parsing the various Dalvik opcode formats
INSTRUCTION_FORMAT = util.keysToRanges({
0x00: '10x',
0x01: '12x',
0x02: '22x',
0x03: '32x',
0x04: '12x',
0x05: '22x',
0x06: '32x',
0x07: '12x',
0x08: '22x',
0x09: '32x',
0x0a: '11x',
0x0b: '11x',
0x0c: '11x',
0x0d: '11x',
0x0e: '10x',
0x0f: '11x',
0x10: '11x',
0x11: '11x',
0x12: '11n',
0x13: '21s',
0x14: '31i',
0x15: '21h',
0x16: '21s',
0x17: '31i',
0x18: '51l',
0x19: '21h',
0x1a: '21c',
0x1b: '31c',
0x1c: '21c',
0x1d: '11x',
0x1e: '11x',
0x1f: '21c',
0x20: '22c',
0x21: '12x',
0x22: '21c',
0x23: '22c',
0x24: '35c',
0x25: '3rc',
0x26: '31t',
0x27: '11x',
0x28: '10t',
0x29: '20t',
0x2a: '30t',
0x2b: '31t',
0x2c: '31t',
0x2d: '23x',
0x32: '22t',
0x38: '21t',
0x3e: '10x',
0x44: '23x',
0x52: '22c',
0x60: '21c',
0x6e: '35c',
0x73: '10x',
0x74: '3rc',
0x79: '10x',
0x7b: '12x',
0x90: '23x',
0xb0: '12x',
0xd0: '22s',
0xd8: '22b',
0xe3: '10x',
}, 256)
# parsing funcs
def p00op(w): return []
def pBAop(w): return [(w >> 8) & 0xF, w >> 12]
def pAAop(w): return [w >> 8]
def p00opAAAA(w, w2): return [w2]
def pAAopBBBB(w, w2): return [w >> 8, w2]
def pAAopCCBB(w, w2): return [w >> 8, w2 & 0xFF, w2 >> 8]
def pBAopCCCC(w, w2): return [(w >> 8) & 0xF, w >> 12, w2]
def p00opAAAAAAAA(w, w2, w3): return [w2 ^ (w3 << 16)]
def p00opAAAABBBB(w, w2, w3): return [w2, w3]
def pAAopBBBBBBBB(w, w2, w3): return [w >> 8, w2 ^ (w3 << 16)]
def pAGopBBBBFEDC(w, w2, w3):
a = w >> 12
c, d, e, f = (w3) & 0xF, (w3 >> 4) & 0xF, (w3 >> 8) & 0xF, (w3 >> 12) & 0xF
g = (w >> 8) & 0xF
return [w2, [c, d, e, f, g][:a]]
def pAAopBBBBCCCC(w, w2, w3):
a = w >> 8
return [w2, range(w3, w3+a)]
def pAAopBBBBBBBBBBBBBBBB(w, w2, w3, w4, w5):
b = w2 ^ (w3 << 16) ^ (w4 << 32) ^ (w5 << 48)
return [w >> 8, b]
_FUNC = {
'10x': p00op,
'12x': pBAop,
'11n': pBAop,
'11x': pAAop,
'10t': pAAop,
'20t': p00opAAAA,
'22x': pAAopBBBB,
'21t': pAAopBBBB,
'21s': pAAopBBBB,
'21h': pAAopBBBB,
'21c': pAAopBBBB,
'23x': pAAopCCBB,
'22b': pAAopCCBB,
'22t': pBAopCCCC,
'22s': pBAopCCCC,
'22c': pBAopCCCC,
'30t': p00opAAAAAAAA,
'32x': p00opAAAABBBB,
'31i': pAAopBBBBBBBB,
'31t': pAAopBBBBBBBB,
'31c': pAAopBBBBBBBB,
'35c': pAGopBBBBFEDC,
'3rc': pAAopBBBBCCCC,
'51l': pAAopBBBBBBBBBBBBBBBB,
}
def sign(x, bits):
if x >= (1 << (bits-1)):
x -= 1 << bits
return x
def decode(shorts, pos, opcode):
fmt = INSTRUCTION_FORMAT[opcode]
size = int(fmt[0])
results = _FUNC[fmt](*shorts[pos:pos+size])
# Check if we need to sign extend
if fmt[2] == 'n':
results[-1] = sign(results[-1], 4)
elif fmt[2] == 'b' or (fmt[2] == 't' and size == 1):
results[-1] = sign(results[-1], 8)
elif fmt[2] == 's' or (fmt[2] == 't' and size == 2):
results[-1] = sign(results[-1], 16)
elif fmt[2] == 't' and size == 3:
results[-1] = sign(results[-1], 32)
# Hats depend on actual size expected, so we rely on opcode as a hack
if fmt[2] == 'h':
assert(opcode == 0x15 or opcode == 0x19)
results[-1] = results[-1] << (16 if opcode == 0x15 else 48)
# Convert code offsets to actual code position
if fmt[2] == 't':
results[-1] += pos
return pos + size, results
|
apache-2.0
|
elba7r/lite-system
|
erpnext/schools/utils.py
|
11
|
1848
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For lice
from __future__ import unicode_literals
import frappe
from frappe import _
class OverlapError(frappe.ValidationError): pass
def validate_overlap_for(doc, doctype, fieldname, value=None):
"""Checks overlap for specified feild.
:param fieldname: Checks Overlap for this feild
"""
existing = get_overlap_for(doc, doctype, fieldname, value)
if existing:
frappe.throw(_("This {0} conflicts with {1} for {2} {3}").format(doc.doctype, existing.name,
doc.meta.get_label(fieldname) if not value else fieldname , value or doc.get(fieldname)), OverlapError)
def get_overlap_for(doc, doctype, fieldname, value=None):
"""Returns overlaping document for specified feild.
:param fieldname: Checks Overlap for this feild
"""
existing = frappe.db.sql("""select name, from_time, to_time from `tab{0}`
where `{1}`=%(val)s and schedule_date = %(schedule_date)s and
(
(from_time > %(from_time)s and from_time < %(to_time)s) or
(to_time > %(from_time)s and to_time < %(to_time)s) or
(%(from_time)s > from_time and %(from_time)s < to_time) or
(%(from_time)s = from_time and %(to_time)s = to_time))
and name!=%(name)s""".format(doctype, fieldname),
{
"schedule_date": doc.schedule_date,
"val": value or doc.get(fieldname),
"from_time": doc.from_time,
"to_time": doc.to_time,
"name": doc.name or "No Name"
}, as_dict=True)
return existing[0] if existing else None
def validate_duplicate_student(students):
unique_students= []
for stud in students:
if stud.student in unique_students:
frappe.throw(_("Student {0} - {1} appears Multiple times in row {2} & {3}")
.format(stud.student, stud.student_name, unique_students.index(stud.student)+1, stud.idx))
else:
unique_students.append(stud.student)
|
gpl-3.0
|
molobrakos/home-assistant
|
tests/components/alexa/test_flash_briefings.py
|
12
|
3225
|
"""The tests for the Alexa component."""
# pylint: disable=protected-access
import asyncio
import datetime
import pytest
from homeassistant.core import callback
from homeassistant.setup import async_setup_component
from homeassistant.components import alexa
from homeassistant.components.alexa import const
SESSION_ID = "amzn1.echo-api.session.0000000-0000-0000-0000-00000000000"
APPLICATION_ID = "amzn1.echo-sdk-ams.app.000000-d0ed-0000-ad00-000000d00ebe"
REQUEST_ID = "amzn1.echo-api.request.0000000-0000-0000-0000-00000000000"
# pylint: disable=invalid-name
calls = []
NPR_NEWS_MP3_URL = "https://pd.npr.org/anon.npr-mp3/npr/news/newscast.mp3"
@pytest.fixture
def alexa_client(loop, hass, hass_client):
"""Initialize a Home Assistant server for testing this module."""
@callback
def mock_service(call):
calls.append(call)
hass.services.async_register("test", "alexa", mock_service)
assert loop.run_until_complete(async_setup_component(hass, alexa.DOMAIN, {
# Key is here to verify we allow other keys in config too
"homeassistant": {},
"alexa": {
"flash_briefings": {
"weather": [
{"title": "Weekly forecast",
"text": "This week it will be sunny."},
{"title": "Current conditions",
"text": "Currently it is 80 degrees fahrenheit."}
],
"news_audio": {
"title": "NPR",
"audio": NPR_NEWS_MP3_URL,
"display_url": "https://npr.org",
"uid": "uuid"
}
},
}
}))
return loop.run_until_complete(hass_client())
def _flash_briefing_req(client, briefing_id):
return client.get(
"/api/alexa/flash_briefings/{}".format(briefing_id))
@asyncio.coroutine
def test_flash_briefing_invalid_id(alexa_client):
"""Test an invalid Flash Briefing ID."""
req = yield from _flash_briefing_req(alexa_client, 10000)
assert req.status == 404
text = yield from req.text()
assert text == ''
@asyncio.coroutine
def test_flash_briefing_date_from_str(alexa_client):
"""Test the response has a valid date parsed from string."""
req = yield from _flash_briefing_req(alexa_client, "weather")
assert req.status == 200
data = yield from req.json()
assert isinstance(datetime.datetime.strptime(data[0].get(
const.ATTR_UPDATE_DATE), const.DATE_FORMAT), datetime.datetime)
@asyncio.coroutine
def test_flash_briefing_valid(alexa_client):
"""Test the response is valid."""
data = [{
"titleText": "NPR",
"redirectionURL": "https://npr.org",
"streamUrl": NPR_NEWS_MP3_URL,
"mainText": "",
"uid": "uuid",
"updateDate": '2016-10-10T19:51:42.0Z'
}]
req = yield from _flash_briefing_req(alexa_client, "news_audio")
assert req.status == 200
json = yield from req.json()
assert isinstance(datetime.datetime.strptime(json[0].get(
const.ATTR_UPDATE_DATE), const.DATE_FORMAT), datetime.datetime)
json[0].pop(const.ATTR_UPDATE_DATE)
data[0].pop(const.ATTR_UPDATE_DATE)
assert json == data
|
apache-2.0
|
Eric-Zhong/odoo
|
openerp/report/render/rml2pdf/trml2pdf.py
|
256
|
46679
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sys
import copy
import reportlab
import re
from reportlab.pdfgen import canvas
from reportlab import platypus
import utils
import color
import os
import logging
from lxml import etree
import base64
from distutils.version import LooseVersion
from reportlab.platypus.doctemplate import ActionFlowable
from openerp.tools.safe_eval import safe_eval as eval
from reportlab.lib.units import inch,cm,mm
from openerp.tools.misc import file_open
from reportlab.pdfbase import pdfmetrics
from reportlab.lib.pagesizes import A4, letter
try:
from cStringIO import StringIO
_hush_pyflakes = [ StringIO ]
except ImportError:
from StringIO import StringIO
_logger = logging.getLogger(__name__)
encoding = 'utf-8'
def select_fontname(fontname, default_fontname):
if fontname not in pdfmetrics.getRegisteredFontNames()\
or fontname not in pdfmetrics.standardFonts:
# let reportlab attempt to find it
try:
pdfmetrics.getFont(fontname)
except Exception:
addition = ""
if " " in fontname:
addition = ". Your font contains spaces which is not valid in RML."
_logger.warning('Could not locate font %s, substituting default: %s%s',
fontname, default_fontname, addition)
fontname = default_fontname
return fontname
def _open_image(filename, path=None):
"""Attempt to open a binary file and return the descriptor
"""
if os.path.isfile(filename):
return open(filename, 'rb')
for p in (path or []):
if p and os.path.isabs(p):
fullpath = os.path.join(p, filename)
if os.path.isfile(fullpath):
return open(fullpath, 'rb')
try:
if p:
fullpath = os.path.join(p, filename)
else:
fullpath = filename
return file_open(fullpath)
except IOError:
pass
raise IOError("File %s cannot be found in image path" % filename)
class NumberedCanvas(canvas.Canvas):
def __init__(self, *args, **kwargs):
canvas.Canvas.__init__(self, *args, **kwargs)
self._saved_page_states = []
def showPage(self):
self._startPage()
def save(self):
"""add page info to each page (page x of y)"""
for state in self._saved_page_states:
self.__dict__.update(state)
self.draw_page_number()
canvas.Canvas.showPage(self)
canvas.Canvas.save(self)
def draw_page_number(self):
page_count = len(self._saved_page_states)
self.setFont("Helvetica", 8)
self.drawRightString((self._pagesize[0]-30), (self._pagesize[1]-40),
" %(this)i / %(total)i" % {
'this': self._pageNumber,
'total': page_count,
}
)
class PageCount(platypus.Flowable):
def __init__(self, story_count=0):
platypus.Flowable.__init__(self)
self.story_count = story_count
def draw(self):
self.canv.beginForm("pageCount%d" % self.story_count)
self.canv.setFont("Helvetica", utils.unit_get(str(8)))
self.canv.drawString(0, 0, str(self.canv.getPageNumber()))
self.canv.endForm()
class PageReset(platypus.Flowable):
def draw(self):
"""Flag to close current story page numbering and prepare for the next
should be executed after the rendering of the full story"""
self.canv._doPageReset = True
class _rml_styles(object,):
def __init__(self, nodes, localcontext):
self.localcontext = localcontext
self.styles = {}
self.styles_obj = {}
self.names = {}
self.table_styles = {}
self.default_style = reportlab.lib.styles.getSampleStyleSheet()
for node in nodes:
for style in node.findall('blockTableStyle'):
self.table_styles[style.get('id')] = self._table_style_get(style)
for style in node.findall('paraStyle'):
sname = style.get('name')
self.styles[sname] = self._para_style_update(style)
if self.default_style.has_key(sname):
for key, value in self.styles[sname].items():
setattr(self.default_style[sname], key, value)
else:
self.styles_obj[sname] = reportlab.lib.styles.ParagraphStyle(sname, self.default_style["Normal"], **self.styles[sname])
for variable in node.findall('initialize'):
for name in variable.findall('name'):
self.names[ name.get('id')] = name.get('value')
def _para_style_update(self, node):
data = {}
for attr in ['textColor', 'backColor', 'bulletColor', 'borderColor']:
if node.get(attr):
data[attr] = color.get(node.get(attr))
for attr in ['bulletFontName', 'fontName']:
if node.get(attr):
fontname= select_fontname(node.get(attr), None)
if fontname is not None:
data['fontName'] = fontname
for attr in ['bulletText']:
if node.get(attr):
data[attr] = node.get(attr)
for attr in ['fontSize', 'leftIndent', 'rightIndent', 'spaceBefore', 'spaceAfter',
'firstLineIndent', 'bulletIndent', 'bulletFontSize', 'leading',
'borderWidth','borderPadding','borderRadius']:
if node.get(attr):
data[attr] = utils.unit_get(node.get(attr))
if node.get('alignment'):
align = {
'right':reportlab.lib.enums.TA_RIGHT,
'center':reportlab.lib.enums.TA_CENTER,
'justify':reportlab.lib.enums.TA_JUSTIFY
}
data['alignment'] = align.get(node.get('alignment').lower(), reportlab.lib.enums.TA_LEFT)
data['splitLongWords'] = 0
return data
def _table_style_get(self, style_node):
styles = []
for node in style_node:
start = utils.tuple_int_get(node, 'start', (0,0) )
stop = utils.tuple_int_get(node, 'stop', (-1,-1) )
if node.tag=='blockValign':
styles.append(('VALIGN', start, stop, str(node.get('value'))))
elif node.tag=='blockFont':
styles.append(('FONT', start, stop, str(node.get('name'))))
elif node.tag=='blockTextColor':
styles.append(('TEXTCOLOR', start, stop, color.get(str(node.get('colorName')))))
elif node.tag=='blockLeading':
styles.append(('LEADING', start, stop, utils.unit_get(node.get('length'))))
elif node.tag=='blockAlignment':
styles.append(('ALIGNMENT', start, stop, str(node.get('value'))))
elif node.tag=='blockSpan':
styles.append(('SPAN', start, stop))
elif node.tag=='blockLeftPadding':
styles.append(('LEFTPADDING', start, stop, utils.unit_get(node.get('length'))))
elif node.tag=='blockRightPadding':
styles.append(('RIGHTPADDING', start, stop, utils.unit_get(node.get('length'))))
elif node.tag=='blockTopPadding':
styles.append(('TOPPADDING', start, stop, utils.unit_get(node.get('length'))))
elif node.tag=='blockBottomPadding':
styles.append(('BOTTOMPADDING', start, stop, utils.unit_get(node.get('length'))))
elif node.tag=='blockBackground':
styles.append(('BACKGROUND', start, stop, color.get(node.get('colorName'))))
if node.get('size'):
styles.append(('FONTSIZE', start, stop, utils.unit_get(node.get('size'))))
elif node.tag=='lineStyle':
kind = node.get('kind')
kind_list = [ 'GRID', 'BOX', 'OUTLINE', 'INNERGRID', 'LINEBELOW', 'LINEABOVE','LINEBEFORE', 'LINEAFTER' ]
assert kind in kind_list
thick = 1
if node.get('thickness'):
thick = float(node.get('thickness'))
styles.append((kind, start, stop, thick, color.get(node.get('colorName'))))
return platypus.tables.TableStyle(styles)
def para_style_get(self, node):
style = False
sname = node.get('style')
if sname:
if sname in self.styles_obj:
style = self.styles_obj[sname]
else:
_logger.debug('Warning: style not found, %s - setting default!', node.get('style'))
if not style:
style = self.default_style['Normal']
para_update = self._para_style_update(node)
if para_update:
# update style only is necessary
style = copy.deepcopy(style)
style.__dict__.update(para_update)
return style
class _rml_doc(object):
def __init__(self, node, localcontext=None, images=None, path='.', title=None):
if images is None:
images = {}
if localcontext is None:
localcontext = {}
self.localcontext = localcontext
self.etree = node
self.filename = self.etree.get('filename')
self.images = images
self.path = path
self.title = title
def docinit(self, els):
from reportlab.lib.fonts import addMapping
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
for node in els:
for font in node.findall('registerFont'):
name = font.get('fontName').encode('ascii')
fname = font.get('fontFile').encode('ascii')
if name not in pdfmetrics._fonts:
pdfmetrics.registerFont(TTFont(name, fname))
#by default, we map the fontName to each style (bold, italic, bold and italic), so that
#if there isn't any font defined for one of these style (via a font family), the system
#will fallback on the normal font.
addMapping(name, 0, 0, name) #normal
addMapping(name, 0, 1, name) #italic
addMapping(name, 1, 0, name) #bold
addMapping(name, 1, 1, name) #italic and bold
#if registerFontFamily is defined, we register the mapping of the fontName to use for each style.
for font_family in node.findall('registerFontFamily'):
family_name = font_family.get('normal').encode('ascii')
if font_family.get('italic'):
addMapping(family_name, 0, 1, font_family.get('italic').encode('ascii'))
if font_family.get('bold'):
addMapping(family_name, 1, 0, font_family.get('bold').encode('ascii'))
if font_family.get('boldItalic'):
addMapping(family_name, 1, 1, font_family.get('boldItalic').encode('ascii'))
def setTTFontMapping(self,face, fontname, filename, mode='all'):
from reportlab.lib.fonts import addMapping
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
if mode:
mode = mode.lower()
if fontname not in pdfmetrics._fonts:
pdfmetrics.registerFont(TTFont(fontname, filename))
if mode == 'all':
addMapping(face, 0, 0, fontname) #normal
addMapping(face, 0, 1, fontname) #italic
addMapping(face, 1, 0, fontname) #bold
addMapping(face, 1, 1, fontname) #italic and bold
elif mode in ['italic', 'oblique']:
addMapping(face, 0, 1, fontname) #italic
elif mode == 'bold':
addMapping(face, 1, 0, fontname) #bold
elif mode in ('bolditalic', 'bold italic','boldoblique', 'bold oblique'):
addMapping(face, 1, 1, fontname) #italic and bold
else:
addMapping(face, 0, 0, fontname) #normal
def _textual_image(self, node):
rc = ''
for n in node:
rc +=( etree.tostring(n) or '') + n.tail
return base64.decodestring(node.tostring())
def _images(self, el):
result = {}
for node in el.findall('.//image'):
rc =( node.text or '')
result[node.get('name')] = base64.decodestring(rc)
return result
def render(self, out):
el = self.etree.findall('.//docinit')
if el:
self.docinit(el)
el = self.etree.findall('.//stylesheet')
self.styles = _rml_styles(el,self.localcontext)
el = self.etree.findall('.//images')
if el:
self.images.update( self._images(el[0]) )
el = self.etree.findall('.//template')
if len(el):
pt_obj = _rml_template(self.localcontext, out, el[0], self, images=self.images, path=self.path, title=self.title)
el = utils._child_get(self.etree, self, 'story')
pt_obj.render(el)
else:
self.canvas = canvas.Canvas(out)
pd = self.etree.find('pageDrawing')[0]
pd_obj = _rml_canvas(self.canvas, self.localcontext, None, self, self.images, path=self.path, title=self.title)
pd_obj.render(pd)
self.canvas.showPage()
self.canvas.save()
class _rml_canvas(object):
def __init__(self, canvas, localcontext, doc_tmpl=None, doc=None, images=None, path='.', title=None):
if images is None:
images = {}
self.localcontext = localcontext
self.canvas = canvas
self.styles = doc.styles
self.doc_tmpl = doc_tmpl
self.doc = doc
self.images = images
self.path = path
self.title = title
if self.title:
self.canvas.setTitle(self.title)
def _textual(self, node, x=0, y=0):
text = node.text and node.text.encode('utf-8') or ''
rc = utils._process_text(self, text)
for n in node:
if n.tag == 'seq':
from reportlab.lib.sequencer import getSequencer
seq = getSequencer()
rc += str(seq.next(n.get('id')))
if n.tag == 'pageCount':
if x or y:
self.canvas.translate(x,y)
self.canvas.doForm('pageCount%s' % (self.canvas._storyCount,))
if x or y:
self.canvas.translate(-x,-y)
if n.tag == 'pageNumber':
rc += str(self.canvas.getPageNumber())
rc += utils._process_text(self, n.tail)
return rc.replace('\n','')
def _drawString(self, node):
v = utils.attr_get(node, ['x','y'])
text=self._textual(node, **v)
text = utils.xml2str(text)
try:
self.canvas.drawString(text=text, **v)
except TypeError:
_logger.error("Bad RML: <drawString> tag requires attributes 'x' and 'y'!")
raise
def _drawCenteredString(self, node):
v = utils.attr_get(node, ['x','y'])
text=self._textual(node, **v)
text = utils.xml2str(text)
self.canvas.drawCentredString(text=text, **v)
def _drawRightString(self, node):
v = utils.attr_get(node, ['x','y'])
text=self._textual(node, **v)
text = utils.xml2str(text)
self.canvas.drawRightString(text=text, **v)
def _rect(self, node):
if node.get('round'):
self.canvas.roundRect(radius=utils.unit_get(node.get('round')), **utils.attr_get(node, ['x','y','width','height'], {'fill':'bool','stroke':'bool'}))
else:
self.canvas.rect(**utils.attr_get(node, ['x','y','width','height'], {'fill':'bool','stroke':'bool'}))
def _ellipse(self, node):
x1 = utils.unit_get(node.get('x'))
x2 = utils.unit_get(node.get('width'))
y1 = utils.unit_get(node.get('y'))
y2 = utils.unit_get(node.get('height'))
self.canvas.ellipse(x1,y1,x2,y2, **utils.attr_get(node, [], {'fill':'bool','stroke':'bool'}))
def _curves(self, node):
line_str = node.text.split()
lines = []
while len(line_str)>7:
self.canvas.bezier(*[utils.unit_get(l) for l in line_str[0:8]])
line_str = line_str[8:]
def _lines(self, node):
line_str = node.text.split()
lines = []
while len(line_str)>3:
lines.append([utils.unit_get(l) for l in line_str[0:4]])
line_str = line_str[4:]
self.canvas.lines(lines)
def _grid(self, node):
xlist = [utils.unit_get(s) for s in node.get('xs').split(',')]
ylist = [utils.unit_get(s) for s in node.get('ys').split(',')]
self.canvas.grid(xlist, ylist)
def _translate(self, node):
dx = utils.unit_get(node.get('dx')) or 0
dy = utils.unit_get(node.get('dy')) or 0
self.canvas.translate(dx,dy)
def _circle(self, node):
self.canvas.circle(x_cen=utils.unit_get(node.get('x')), y_cen=utils.unit_get(node.get('y')), r=utils.unit_get(node.get('radius')), **utils.attr_get(node, [], {'fill':'bool','stroke':'bool'}))
def _place(self, node):
flows = _rml_flowable(self.doc, self.localcontext, images=self.images, path=self.path, title=self.title, canvas=self.canvas).render(node)
infos = utils.attr_get(node, ['x','y','width','height'])
infos['y']+=infos['height']
for flow in flows:
w,h = flow.wrap(infos['width'], infos['height'])
if w<=infos['width'] and h<=infos['height']:
infos['y']-=h
flow.drawOn(self.canvas,infos['x'],infos['y'])
infos['height']-=h
else:
raise ValueError("Not enough space")
def _line_mode(self, node):
ljoin = {'round':1, 'mitered':0, 'bevelled':2}
lcap = {'default':0, 'round':1, 'square':2}
if node.get('width'):
self.canvas.setLineWidth(utils.unit_get(node.get('width')))
if node.get('join'):
self.canvas.setLineJoin(ljoin[node.get('join')])
if node.get('cap'):
self.canvas.setLineCap(lcap[node.get('cap')])
if node.get('miterLimit'):
self.canvas.setDash(utils.unit_get(node.get('miterLimit')))
if node.get('dash'):
dashes = node.get('dash').split(',')
for x in range(len(dashes)):
dashes[x]=utils.unit_get(dashes[x])
self.canvas.setDash(node.get('dash').split(','))
def _image(self, node):
import urllib
import urlparse
from reportlab.lib.utils import ImageReader
nfile = node.get('file')
if not nfile:
if node.get('name'):
image_data = self.images[node.get('name')]
_logger.debug("Image %s used", node.get('name'))
s = StringIO(image_data)
else:
newtext = node.text
if self.localcontext:
res = utils._regex.findall(newtext)
for key in res:
newtext = eval(key, {}, self.localcontext) or ''
image_data = None
if newtext:
image_data = base64.decodestring(newtext)
if image_data:
s = StringIO(image_data)
else:
_logger.debug("No image data!")
return False
else:
if nfile in self.images:
s = StringIO(self.images[nfile])
else:
try:
up = urlparse.urlparse(str(nfile))
except ValueError:
up = False
if up and up.scheme:
# RFC: do we really want to open external URLs?
# Are we safe from cross-site scripting or attacks?
_logger.debug("Retrieve image from %s", nfile)
u = urllib.urlopen(str(nfile))
s = StringIO(u.read())
else:
_logger.debug("Open image file %s ", nfile)
s = _open_image(nfile, path=self.path)
try:
img = ImageReader(s)
(sx,sy) = img.getSize()
_logger.debug("Image is %dx%d", sx, sy)
args = { 'x': 0.0, 'y': 0.0, 'mask': 'auto'}
for tag in ('width','height','x','y'):
if node.get(tag):
args[tag] = utils.unit_get(node.get(tag))
if ('width' in args) and (not 'height' in args):
args['height'] = sy * args['width'] / sx
elif ('height' in args) and (not 'width' in args):
args['width'] = sx * args['height'] / sy
elif ('width' in args) and ('height' in args):
if (float(args['width'])/args['height'])>(float(sx)>sy):
args['width'] = sx * args['height'] / sy
else:
args['height'] = sy * args['width'] / sx
self.canvas.drawImage(img, **args)
finally:
s.close()
# self.canvas._doc.SaveToFile(self.canvas._filename, self.canvas)
def _path(self, node):
self.path = self.canvas.beginPath()
self.path.moveTo(**utils.attr_get(node, ['x','y']))
for n in utils._child_get(node, self):
if not n.text :
if n.tag=='moveto':
vals = utils.text_get(n).split()
self.path.moveTo(utils.unit_get(vals[0]), utils.unit_get(vals[1]))
elif n.tag=='curvesto':
vals = utils.text_get(n).split()
while len(vals)>5:
pos=[]
while len(pos)<6:
pos.append(utils.unit_get(vals.pop(0)))
self.path.curveTo(*pos)
elif n.text:
data = n.text.split() # Not sure if I must merge all TEXT_NODE ?
while len(data)>1:
x = utils.unit_get(data.pop(0))
y = utils.unit_get(data.pop(0))
self.path.lineTo(x,y)
if (not node.get('close')) or utils.bool_get(node.get('close')):
self.path.close()
self.canvas.drawPath(self.path, **utils.attr_get(node, [], {'fill':'bool','stroke':'bool'}))
def setFont(self, node):
fontname = select_fontname(node.get('name'), self.canvas._fontname)
return self.canvas.setFont(fontname, utils.unit_get(node.get('size')))
def render(self, node):
tags = {
'drawCentredString': self._drawCenteredString,
'drawRightString': self._drawRightString,
'drawString': self._drawString,
'rect': self._rect,
'ellipse': self._ellipse,
'lines': self._lines,
'grid': self._grid,
'curves': self._curves,
'fill': lambda node: self.canvas.setFillColor(color.get(node.get('color'))),
'stroke': lambda node: self.canvas.setStrokeColor(color.get(node.get('color'))),
'setFont': self.setFont ,
'place': self._place,
'circle': self._circle,
'lineMode': self._line_mode,
'path': self._path,
'rotate': lambda node: self.canvas.rotate(float(node.get('degrees'))),
'translate': self._translate,
'image': self._image
}
for n in utils._child_get(node, self):
if n.tag in tags:
tags[n.tag](n)
class _rml_draw(object):
def __init__(self, localcontext, node, styles, images=None, path='.', title=None):
if images is None:
images = {}
self.localcontext = localcontext
self.node = node
self.styles = styles
self.canvas = None
self.images = images
self.path = path
self.canvas_title = title
def render(self, canvas, doc):
canvas.saveState()
cnv = _rml_canvas(canvas, self.localcontext, doc, self.styles, images=self.images, path=self.path, title=self.canvas_title)
cnv.render(self.node)
canvas.restoreState()
class _rml_Illustration(platypus.flowables.Flowable):
def __init__(self, node, localcontext, styles, self2):
self.localcontext = (localcontext or {}).copy()
self.node = node
self.styles = styles
self.width = utils.unit_get(node.get('width'))
self.height = utils.unit_get(node.get('height'))
self.self2 = self2
def wrap(self, *args):
return self.width, self.height
def draw(self):
drw = _rml_draw(self.localcontext ,self.node,self.styles, images=self.self2.images, path=self.self2.path, title=self.self2.title)
drw.render(self.canv, None)
# Workaround for issue #15: https://bitbucket.org/rptlab/reportlab/issue/15/infinite-pages-produced-when-splitting
original_pto_split = platypus.flowables.PTOContainer.split
def split(self, availWidth, availHeight):
res = original_pto_split(self, availWidth, availHeight)
if len(res) > 2 and len(self._content) > 0:
header = self._content[0]._ptoinfo.header
trailer = self._content[0]._ptoinfo.trailer
if isinstance(res[-2], platypus.flowables.UseUpSpace) and len(header + trailer) == len(res[:-2]):
return []
return res
platypus.flowables.PTOContainer.split = split
class _rml_flowable(object):
def __init__(self, doc, localcontext, images=None, path='.', title=None, canvas=None):
if images is None:
images = {}
self.localcontext = localcontext
self.doc = doc
self.styles = doc.styles
self.images = images
self.path = path
self.title = title
self.canvas = canvas
def _textual(self, node):
rc1 = utils._process_text(self, node.text or '')
for n in utils._child_get(node,self):
txt_n = copy.deepcopy(n)
for key in txt_n.attrib.keys():
if key in ('rml_except', 'rml_loop', 'rml_tag'):
del txt_n.attrib[key]
if not n.tag == 'bullet':
if n.tag == 'pageNumber':
txt_n.text = self.canvas and str(self.canvas.getPageNumber()) or ''
else:
txt_n.text = utils.xml2str(self._textual(n))
txt_n.tail = n.tail and utils.xml2str(utils._process_text(self, n.tail.replace('\n',''))) or ''
rc1 += etree.tostring(txt_n)
return rc1
def _table(self, node):
children = utils._child_get(node,self,'tr')
if not children:
return None
length = 0
colwidths = None
rowheights = None
data = []
styles = []
posy = 0
for tr in children:
paraStyle = None
if tr.get('style'):
st = copy.deepcopy(self.styles.table_styles[tr.get('style')])
for si in range(len(st._cmds)):
s = list(st._cmds[si])
s[1] = (s[1][0],posy)
s[2] = (s[2][0],posy)
st._cmds[si] = tuple(s)
styles.append(st)
if tr.get('paraStyle'):
paraStyle = self.styles.styles[tr.get('paraStyle')]
data2 = []
posx = 0
for td in utils._child_get(tr, self,'td'):
if td.get('style'):
st = copy.deepcopy(self.styles.table_styles[td.get('style')])
for s in st._cmds:
s[1][1] = posy
s[2][1] = posy
s[1][0] = posx
s[2][0] = posx
styles.append(st)
if td.get('paraStyle'):
# TODO: merge styles
paraStyle = self.styles.styles[td.get('paraStyle')]
posx += 1
flow = []
for n in utils._child_get(td, self):
if n.tag == etree.Comment:
n.text = ''
continue
fl = self._flowable(n, extra_style=paraStyle)
if isinstance(fl,list):
flow += fl
else:
flow.append( fl )
if not len(flow):
flow = self._textual(td)
data2.append( flow )
if len(data2)>length:
length=len(data2)
for ab in data:
while len(ab)<length:
ab.append('')
while len(data2)<length:
data2.append('')
data.append( data2 )
posy += 1
if node.get('colWidths'):
assert length == len(node.get('colWidths').split(','))
colwidths = [utils.unit_get(f.strip()) for f in node.get('colWidths').split(',')]
if node.get('rowHeights'):
rowheights = [utils.unit_get(f.strip()) for f in node.get('rowHeights').split(',')]
if len(rowheights) == 1:
rowheights = rowheights[0]
table = platypus.LongTable(data = data, colWidths=colwidths, rowHeights=rowheights, **(utils.attr_get(node, ['splitByRow'] ,{'repeatRows':'int','repeatCols':'int'})))
if node.get('style'):
table.setStyle(self.styles.table_styles[node.get('style')])
for s in styles:
table.setStyle(s)
return table
def _illustration(self, node):
return _rml_Illustration(node, self.localcontext, self.styles, self)
def _textual_image(self, node):
return base64.decodestring(node.text)
def _pto(self, node):
sub_story = []
pto_header = None
pto_trailer = None
for node in utils._child_get(node, self):
if node.tag == etree.Comment:
node.text = ''
continue
elif node.tag=='pto_header':
pto_header = self.render(node)
elif node.tag=='pto_trailer':
pto_trailer = self.render(node)
else:
flow = self._flowable(node)
if flow:
if isinstance(flow,list):
sub_story = sub_story + flow
else:
sub_story.append(flow)
return platypus.flowables.PTOContainer(sub_story, trailer=pto_trailer, header=pto_header)
def _flowable(self, node, extra_style=None):
if node.tag=='pto':
return self._pto(node)
if node.tag=='para':
style = self.styles.para_style_get(node)
if extra_style:
style.__dict__.update(extra_style)
text_node = self._textual(node).strip().replace('\n\n', '\n').replace('\n', '<br/>')
instance = platypus.Paragraph(text_node, style, **(utils.attr_get(node, [], {'bulletText':'str'})))
result = [instance]
if LooseVersion(reportlab.Version) > LooseVersion('3.0') and not instance.getPlainText().strip() and instance.text.strip():
result.append(platypus.Paragraph(' <br/>', style, **(utils.attr_get(node, [], {'bulletText': 'str'}))))
return result
elif node.tag=='barCode':
try:
from reportlab.graphics.barcode import code128
from reportlab.graphics.barcode import code39
from reportlab.graphics.barcode import code93
from reportlab.graphics.barcode import common
from reportlab.graphics.barcode import fourstate
from reportlab.graphics.barcode import usps
from reportlab.graphics.barcode import createBarcodeDrawing
except ImportError:
_logger.warning("Cannot use barcode renderers:", exc_info=True)
return None
args = utils.attr_get(node, [], {'ratio':'float','xdim':'unit','height':'unit','checksum':'int','quiet':'int','width':'unit','stop':'bool','bearers':'int','barWidth':'float','barHeight':'float'})
codes = {
'codabar': lambda x: common.Codabar(x, **args),
'code11': lambda x: common.Code11(x, **args),
'code128': lambda x: code128.Code128(str(x), **args),
'standard39': lambda x: code39.Standard39(str(x), **args),
'standard93': lambda x: code93.Standard93(str(x), **args),
'i2of5': lambda x: common.I2of5(x, **args),
'extended39': lambda x: code39.Extended39(str(x), **args),
'extended93': lambda x: code93.Extended93(str(x), **args),
'msi': lambda x: common.MSI(x, **args),
'fim': lambda x: usps.FIM(x, **args),
'postnet': lambda x: usps.POSTNET(x, **args),
'ean13': lambda x: createBarcodeDrawing('EAN13', value=str(x), **args),
'qrcode': lambda x: createBarcodeDrawing('QR', value=x, **args),
}
code = 'code128'
if node.get('code'):
code = node.get('code').lower()
return codes[code](self._textual(node))
elif node.tag=='name':
self.styles.names[ node.get('id')] = node.get('value')
return None
elif node.tag=='xpre':
style = self.styles.para_style_get(node)
return platypus.XPreformatted(self._textual(node), style, **(utils.attr_get(node, [], {'bulletText':'str','dedent':'int','frags':'int'})))
elif node.tag=='pre':
style = self.styles.para_style_get(node)
return platypus.Preformatted(self._textual(node), style, **(utils.attr_get(node, [], {'bulletText':'str','dedent':'int'})))
elif node.tag=='illustration':
return self._illustration(node)
elif node.tag=='blockTable':
return self._table(node)
elif node.tag=='title':
styles = reportlab.lib.styles.getSampleStyleSheet()
style = styles['Title']
return platypus.Paragraph(self._textual(node), style, **(utils.attr_get(node, [], {'bulletText':'str'})))
elif re.match('^h([1-9]+[0-9]*)$', (node.tag or '')):
styles = reportlab.lib.styles.getSampleStyleSheet()
style = styles['Heading'+str(node.tag[1:])]
return platypus.Paragraph(self._textual(node), style, **(utils.attr_get(node, [], {'bulletText':'str'})))
elif node.tag=='image':
image_data = False
if not node.get('file'):
if node.get('name'):
if node.get('name') in self.doc.images:
_logger.debug("Image %s read ", node.get('name'))
image_data = self.doc.images[node.get('name')].read()
else:
_logger.warning("Image %s not defined", node.get('name'))
return False
else:
import base64
newtext = node.text
if self.localcontext:
newtext = utils._process_text(self, node.text or '')
image_data = base64.decodestring(newtext)
if not image_data:
_logger.debug("No inline image data")
return False
image = StringIO(image_data)
else:
_logger.debug("Image get from file %s", node.get('file'))
image = _open_image(node.get('file'), path=self.doc.path)
return platypus.Image(image, mask=(250,255,250,255,250,255), **(utils.attr_get(node, ['width','height'])))
elif node.tag=='spacer':
if node.get('width'):
width = utils.unit_get(node.get('width'))
else:
width = utils.unit_get('1cm')
length = utils.unit_get(node.get('length'))
return platypus.Spacer(width=width, height=length)
elif node.tag=='section':
return self.render(node)
elif node.tag == 'pageNumberReset':
return PageReset()
elif node.tag in ('pageBreak', 'nextPage'):
return platypus.PageBreak()
elif node.tag=='condPageBreak':
return platypus.CondPageBreak(**(utils.attr_get(node, ['height'])))
elif node.tag=='setNextTemplate':
return platypus.NextPageTemplate(str(node.get('name')))
elif node.tag=='nextFrame':
return platypus.CondPageBreak(1000) # TODO: change the 1000 !
elif node.tag == 'setNextFrame':
from reportlab.platypus.doctemplate import NextFrameFlowable
return NextFrameFlowable(str(node.get('name')))
elif node.tag == 'currentFrame':
from reportlab.platypus.doctemplate import CurrentFrameFlowable
return CurrentFrameFlowable(str(node.get('name')))
elif node.tag == 'frameEnd':
return EndFrameFlowable()
elif node.tag == 'hr':
width_hr=node.get('width') or '100%'
color_hr=node.get('color') or 'black'
thickness_hr=node.get('thickness') or 1
lineCap_hr=node.get('lineCap') or 'round'
return platypus.flowables.HRFlowable(width=width_hr,color=color.get(color_hr),thickness=float(thickness_hr),lineCap=str(lineCap_hr))
else:
sys.stderr.write('Warning: flowable not yet implemented: %s !\n' % (node.tag,))
return None
def render(self, node_story):
def process_story(node_story):
sub_story = []
for node in utils._child_get(node_story, self):
if node.tag == etree.Comment:
node.text = ''
continue
flow = self._flowable(node)
if flow:
if isinstance(flow,list):
sub_story = sub_story + flow
else:
sub_story.append(flow)
return sub_story
return process_story(node_story)
class EndFrameFlowable(ActionFlowable):
def __init__(self,resume=0):
ActionFlowable.__init__(self,('frameEnd',resume))
class TinyDocTemplate(platypus.BaseDocTemplate):
def beforeDocument(self):
# Store some useful value directly inside canvas, so it's available
# on flowable drawing (needed for proper PageCount handling)
self.canv._doPageReset = False
self.canv._storyCount = 0
def ___handle_pageBegin(self):
self.page += 1
self.pageTemplate.beforeDrawPage(self.canv,self)
self.pageTemplate.checkPageSize(self.canv,self)
self.pageTemplate.onPage(self.canv,self)
for f in self.pageTemplate.frames: f._reset()
self.beforePage()
self._curPageFlowableCount = 0
if hasattr(self,'_nextFrameIndex'):
del self._nextFrameIndex
for f in self.pageTemplate.frames:
if f.id == 'first':
self.frame = f
break
self.handle_frameBegin()
def afterPage(self):
if isinstance(self.canv, NumberedCanvas):
# save current page states before eventual reset
self.canv._saved_page_states.append(dict(self.canv.__dict__))
if self.canv._doPageReset:
# Following a <pageReset/> tag:
# - we reset page number to 0
# - we add an new PageCount flowable (relative to the current
# story number), but not for NumeredCanvas at is handle page
# count itself)
# NOTE: _rml_template render() method add a PageReset flowable at end
# of each story, so we're sure to pass here at least once per story.
if not isinstance(self.canv, NumberedCanvas):
self.handle_flowable([ PageCount(story_count=self.canv._storyCount) ])
self.canv._pageCount = self.page
self.page = 0
self.canv._flag = True
self.canv._pageNumber = 0
self.canv._doPageReset = False
self.canv._storyCount += 1
class _rml_template(object):
def __init__(self, localcontext, out, node, doc, images=None, path='.', title=None):
if images is None:
images = {}
if not localcontext:
localcontext={'internal_header':True}
self.localcontext = localcontext
self.images= images
self.path = path
self.title = title
pagesize_map = {'a4': A4,
'us_letter': letter
}
pageSize = A4
if self.localcontext.get('company'):
pageSize = pagesize_map.get(self.localcontext.get('company').rml_paper_format, A4)
if node.get('pageSize'):
ps = map(lambda x:x.strip(), node.get('pageSize').replace(')', '').replace('(', '').split(','))
pageSize = ( utils.unit_get(ps[0]),utils.unit_get(ps[1]) )
self.doc_tmpl = TinyDocTemplate(out, pagesize=pageSize, **utils.attr_get(node, ['leftMargin','rightMargin','topMargin','bottomMargin'], {'allowSplitting':'int','showBoundary':'bool','rotation':'int','title':'str','author':'str'}))
self.page_templates = []
self.styles = doc.styles
self.doc = doc
self.image=[]
pts = node.findall('pageTemplate')
for pt in pts:
frames = []
for frame_el in pt.findall('frame'):
frame = platypus.Frame( **(utils.attr_get(frame_el, ['x1','y1', 'width','height', 'leftPadding', 'rightPadding', 'bottomPadding', 'topPadding'], {'id':'str', 'showBoundary':'bool'})) )
if utils.attr_get(frame_el, ['last']):
frame.lastFrame = True
frames.append( frame )
try :
gr = pt.findall('pageGraphics')\
or pt[1].findall('pageGraphics')
except Exception: # FIXME: be even more specific, perhaps?
gr=''
if len(gr):
# self.image=[ n for n in utils._child_get(gr[0], self) if n.tag=='image' or not self.localcontext]
drw = _rml_draw(self.localcontext,gr[0], self.doc, images=images, path=self.path, title=self.title)
self.page_templates.append( platypus.PageTemplate(frames=frames, onPage=drw.render, **utils.attr_get(pt, [], {'id':'str'}) ))
else:
drw = _rml_draw(self.localcontext,node,self.doc,title=self.title)
self.page_templates.append( platypus.PageTemplate(frames=frames,onPage=drw.render, **utils.attr_get(pt, [], {'id':'str'}) ))
self.doc_tmpl.addPageTemplates(self.page_templates)
def render(self, node_stories):
if self.localcontext and not self.localcontext.get('internal_header',False):
del self.localcontext['internal_header']
fis = []
r = _rml_flowable(self.doc,self.localcontext, images=self.images, path=self.path, title=self.title, canvas=None)
story_cnt = 0
for node_story in node_stories:
if story_cnt > 0:
fis.append(platypus.PageBreak())
fis += r.render(node_story)
# end of story numbering computation
fis.append(PageReset())
story_cnt += 1
try:
if self.localcontext and self.localcontext.get('internal_header',False):
self.doc_tmpl.afterFlowable(fis)
self.doc_tmpl.build(fis,canvasmaker=NumberedCanvas)
else:
self.doc_tmpl.build(fis)
except platypus.doctemplate.LayoutError, e:
e.name = 'Print Error'
e.value = 'The document you are trying to print contains a table row that does not fit on one page. Please try to split it in smaller rows or contact your administrator.'
raise
def parseNode(rml, localcontext=None, fout=None, images=None, path='.', title=None):
node = etree.XML(rml)
r = _rml_doc(node, localcontext, images, path, title=title)
#try to override some font mappings
try:
from customfonts import SetCustomFonts
SetCustomFonts(r)
except ImportError:
# means there is no custom fonts mapping in this system.
pass
except Exception:
_logger.warning('Cannot set font mapping', exc_info=True)
pass
fp = StringIO()
r.render(fp)
return fp.getvalue()
def parseString(rml, localcontext=None, fout=None, images=None, path='.', title=None):
node = etree.XML(rml)
r = _rml_doc(node, localcontext, images, path, title=title)
#try to override some font mappings
try:
from customfonts import SetCustomFonts
SetCustomFonts(r)
except Exception:
pass
if fout:
fp = file(fout,'wb')
r.render(fp)
fp.close()
return fout
else:
fp = StringIO()
r.render(fp)
return fp.getvalue()
def trml2pdf_help():
print 'Usage: trml2pdf input.rml >output.pdf'
print 'Render the standard input (RML) and output a PDF file'
sys.exit(0)
if __name__=="__main__":
if len(sys.argv)>1:
if sys.argv[1]=='--help':
trml2pdf_help()
print parseString(file(sys.argv[1], 'r').read()),
else:
print 'Usage: trml2pdf input.rml >output.pdf'
print 'Try \'trml2pdf --help\' for more information.'
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
vmax-feihu/hue
|
desktop/core/ext-py/requests-2.6.0/requests/packages/chardet/escsm.py
|
2930
|
7839
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart, eError, eItsMe
HZ_cls = (
1,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,0,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,4,0,5,2,0, # 78 - 7f
1,1,1,1,1,1,1,1, # 80 - 87
1,1,1,1,1,1,1,1, # 88 - 8f
1,1,1,1,1,1,1,1, # 90 - 97
1,1,1,1,1,1,1,1, # 98 - 9f
1,1,1,1,1,1,1,1, # a0 - a7
1,1,1,1,1,1,1,1, # a8 - af
1,1,1,1,1,1,1,1, # b0 - b7
1,1,1,1,1,1,1,1, # b8 - bf
1,1,1,1,1,1,1,1, # c0 - c7
1,1,1,1,1,1,1,1, # c8 - cf
1,1,1,1,1,1,1,1, # d0 - d7
1,1,1,1,1,1,1,1, # d8 - df
1,1,1,1,1,1,1,1, # e0 - e7
1,1,1,1,1,1,1,1, # e8 - ef
1,1,1,1,1,1,1,1, # f0 - f7
1,1,1,1,1,1,1,1, # f8 - ff
)
HZ_st = (
eStart,eError, 3,eStart,eStart,eStart,eError,eError,# 00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
eItsMe,eItsMe,eError,eError,eStart,eStart, 4,eError,# 10-17
5,eError, 6,eError, 5, 5, 4,eError,# 18-1f
4,eError, 4, 4, 4,eError, 4,eError,# 20-27
4,eItsMe,eStart,eStart,eStart,eStart,eStart,eStart,# 28-2f
)
HZCharLenTable = (0, 0, 0, 0, 0, 0)
HZSMModel = {'classTable': HZ_cls,
'classFactor': 6,
'stateTable': HZ_st,
'charLenTable': HZCharLenTable,
'name': "HZ-GB-2312"}
ISO2022CN_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,4,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022CN_st = (
eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
eStart,eError,eError,eError,eError,eError,eError,eError,# 08-0f
eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
eItsMe,eItsMe,eItsMe,eError,eError,eError, 4,eError,# 18-1f
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 20-27
5, 6,eError,eError,eError,eError,eError,eError,# 28-2f
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 30-37
eError,eError,eError,eError,eError,eItsMe,eError,eStart,# 38-3f
)
ISO2022CNCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0)
ISO2022CNSMModel = {'classTable': ISO2022CN_cls,
'classFactor': 9,
'stateTable': ISO2022CN_st,
'charLenTable': ISO2022CNCharLenTable,
'name': "ISO-2022-CN"}
ISO2022JP_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,2,2, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,7,0,0,0, # 20 - 27
3,0,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
6,0,4,0,8,0,0,0, # 40 - 47
0,9,5,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022JP_st = (
eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
eStart,eStart,eError,eError,eError,eError,eError,eError,# 08-0f
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,# 18-1f
eError, 5,eError,eError,eError, 4,eError,eError,# 20-27
eError,eError,eError, 6,eItsMe,eError,eItsMe,eError,# 28-2f
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,# 30-37
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 38-3f
eError,eError,eError,eError,eItsMe,eError,eStart,eStart,# 40-47
)
ISO2022JPCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
ISO2022JPSMModel = {'classTable': ISO2022JP_cls,
'classFactor': 10,
'stateTable': ISO2022JP_st,
'charLenTable': ISO2022JPCharLenTable,
'name': "ISO-2022-JP"}
ISO2022KR_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,3,0,0,0, # 20 - 27
0,4,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,5,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022KR_st = (
eStart, 3,eError,eStart,eStart,eStart,eError,eError,# 00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
eItsMe,eItsMe,eError,eError,eError, 4,eError,eError,# 10-17
eError,eError,eError,eError, 5,eError,eError,eError,# 18-1f
eError,eError,eError,eItsMe,eStart,eStart,eStart,eStart,# 20-27
)
ISO2022KRCharLenTable = (0, 0, 0, 0, 0, 0)
ISO2022KRSMModel = {'classTable': ISO2022KR_cls,
'classFactor': 6,
'stateTable': ISO2022KR_st,
'charLenTable': ISO2022KRCharLenTable,
'name': "ISO-2022-KR"}
# flake8: noqa
|
apache-2.0
|
gangadharkadam/saloon_erp_install
|
erpnext/controllers/buying_controller.py
|
24
|
10406
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _, msgprint
from frappe.utils import flt
from erpnext.setup.utils import get_company_currency
from erpnext.accounts.party import get_party_details
from erpnext.stock.get_item_details import get_conversion_factor
from erpnext.controllers.stock_controller import StockController
class BuyingController(StockController):
def __setup__(self):
if hasattr(self, "taxes"):
self.print_templates = {
"taxes": "templates/print_formats/includes/taxes.html"
}
def get_feed(self):
return _("From {0} | {1} {2}").format(self.supplier_name, self.currency,
self.grand_total)
def validate(self):
super(BuyingController, self).validate()
if getattr(self, "supplier", None) and not self.supplier_name:
self.supplier_name = frappe.db.get_value("Supplier", self.supplier, "supplier_name")
self.is_item_table_empty()
self.set_qty_as_per_stock_uom()
self.validate_stock_or_nonstock_items()
self.validate_warehouse()
def set_missing_values(self, for_validate=False):
super(BuyingController, self).set_missing_values(for_validate)
self.set_supplier_from_item_default()
self.set_price_list_currency("Buying")
# set contact and address details for supplier, if they are not mentioned
if getattr(self, "supplier", None):
self.update_if_missing(get_party_details(self.supplier, party_type="Supplier"))
self.set_missing_item_details()
def set_supplier_from_item_default(self):
if self.meta.get_field("supplier") and not self.supplier:
for d in self.get("items"):
supplier = frappe.db.get_value("Item", d.item_code, "default_supplier")
if supplier:
self.supplier = supplier
break
def validate_warehouse(self):
from erpnext.stock.utils import validate_warehouse_company
warehouses = list(set([d.warehouse for d in
self.get("items") if getattr(d, "warehouse", None)]))
for w in warehouses:
validate_warehouse_company(w, self.company)
def validate_stock_or_nonstock_items(self):
if self.meta.get_field("taxes") and not self.get_stock_items():
tax_for_valuation = [d.account_head for d in self.get("taxes")
if d.category in ["Valuation", "Valuation and Total"]]
if tax_for_valuation:
frappe.throw(_("Tax Category can not be 'Valuation' or 'Valuation and Total' as all items are non-stock items"))
def set_total_in_words(self):
from frappe.utils import money_in_words
company_currency = get_company_currency(self.company)
if self.meta.get_field("base_in_words"):
self.base_in_words = money_in_words(self.base_grand_total, company_currency)
if self.meta.get_field("in_words"):
self.in_words = money_in_words(self.grand_total, self.currency)
# update valuation rate
def update_valuation_rate(self, parentfield):
"""
item_tax_amount is the total tax amount applied on that item
stored for valuation
TODO: rename item_tax_amount to valuation_tax_amount
"""
stock_items = self.get_stock_items()
stock_items_qty, stock_items_amount = 0, 0
last_stock_item_idx = 1
for d in self.get(parentfield):
if d.item_code and d.item_code in stock_items:
stock_items_qty += flt(d.qty)
stock_items_amount += flt(d.base_net_amount)
last_stock_item_idx = d.idx
total_valuation_amount = sum([flt(d.base_tax_amount_after_discount_amount) for d in self.get("taxes")
if d.category in ["Valuation", "Valuation and Total"]])
valuation_amount_adjustment = total_valuation_amount
for i, item in enumerate(self.get(parentfield)):
if item.item_code and item.qty and item.item_code in stock_items:
item_proportion = flt(item.base_net_amount) / stock_items_amount if stock_items_amount \
else flt(item.qty) / stock_items_qty
if i == (last_stock_item_idx - 1):
item.item_tax_amount = flt(valuation_amount_adjustment,
self.precision("item_tax_amount", item))
else:
item.item_tax_amount = flt(item_proportion * total_valuation_amount,
self.precision("item_tax_amount", item))
valuation_amount_adjustment -= item.item_tax_amount
self.round_floats_in(item)
if flt(item.conversion_factor)==0:
item.conversion_factor = get_conversion_factor(item.item_code, item.uom).get("conversion_factor") or 1.0
qty_in_stock_uom = flt(item.qty * item.conversion_factor)
rm_supp_cost = flt(item.rm_supp_cost) if self.doctype=="Purchase Receipt" else 0.0
landed_cost_voucher_amount = flt(item.landed_cost_voucher_amount) \
if self.doctype == "Purchase Receipt" else 0.0
item.valuation_rate = ((item.base_net_amount + item.item_tax_amount + rm_supp_cost
+ landed_cost_voucher_amount) / qty_in_stock_uom)
else:
item.valuation_rate = 0.0
def validate_for_subcontracting(self):
if not self.is_subcontracted and self.sub_contracted_items:
frappe.throw(_("Please enter 'Is Subcontracted' as Yes or No"))
if self.is_subcontracted == "Yes":
if self.doctype == "Purchase Receipt" and not self.supplier_warehouse:
frappe.throw(_("Supplier Warehouse mandatory for sub-contracted Purchase Receipt"))
for item in self.get("items"):
if item in self.sub_contracted_items and not item.bom:
frappe.throw(_("Please select BOM in BOM field for Item {0}").format(item.item_code))
else:
for item in self.get("items"):
if item.bom:
item.bom = None
def create_raw_materials_supplied(self, raw_material_table):
if self.is_subcontracted=="Yes":
parent_items = []
for item in self.get("items"):
if self.doctype == "Purchase Receipt":
item.rm_supp_cost = 0.0
if item.item_code in self.sub_contracted_items:
self.update_raw_materials_supplied(item, raw_material_table)
if [item.item_code, item.name] not in parent_items:
parent_items.append([item.item_code, item.name])
self.cleanup_raw_materials_supplied(parent_items, raw_material_table)
elif self.doctype == "Purchase Receipt":
for item in self.get("items"):
item.rm_supp_cost = 0.0
def update_raw_materials_supplied(self, item, raw_material_table):
bom_items = self.get_items_from_bom(item.item_code, item.bom)
raw_materials_cost = 0
for bom_item in bom_items:
# check if exists
exists = 0
for d in self.get(raw_material_table):
if d.main_item_code == item.item_code and d.rm_item_code == bom_item.item_code \
and d.reference_name == item.name:
rm, exists = d, 1
break
if not exists:
rm = self.append(raw_material_table, {})
required_qty = flt(bom_item.qty_consumed_per_unit) * flt(item.qty) * flt(item.conversion_factor)
rm.reference_name = item.name
rm.bom_detail_no = bom_item.name
rm.main_item_code = item.item_code
rm.rm_item_code = bom_item.item_code
rm.stock_uom = bom_item.stock_uom
rm.required_qty = required_qty
rm.conversion_factor = item.conversion_factor
if self.doctype == "Purchase Receipt":
rm.consumed_qty = required_qty
rm.description = bom_item.description
if item.batch_no and not rm.batch_no:
rm.batch_no = item.batch_no
# get raw materials rate
if self.doctype == "Purchase Receipt":
from erpnext.stock.utils import get_incoming_rate
rm.rate = get_incoming_rate({
"item_code": bom_item.item_code,
"warehouse": self.supplier_warehouse,
"posting_date": self.posting_date,
"posting_time": self.posting_time,
"qty": -1 * required_qty,
"serial_no": rm.serial_no
})
if not rm.rate:
from erpnext.stock.stock_ledger import get_valuation_rate
rm.rate = get_valuation_rate(bom_item.item_code, self.supplier_warehouse)
else:
rm.rate = bom_item.rate
rm.amount = required_qty * flt(rm.rate)
raw_materials_cost += flt(rm.amount)
if self.doctype == "Purchase Receipt":
item.rm_supp_cost = raw_materials_cost
def cleanup_raw_materials_supplied(self, parent_items, raw_material_table):
"""Remove all those child items which are no longer present in main item table"""
delete_list = []
for d in self.get(raw_material_table):
if [d.main_item_code, d.reference_name] not in parent_items:
# mark for deletion from doclist
delete_list.append(d)
# delete from doclist
if delete_list:
rm_supplied_details = self.get(raw_material_table)
self.set(raw_material_table, [])
for d in rm_supplied_details:
if d not in delete_list:
self.append(raw_material_table, d)
def get_items_from_bom(self, item_code, bom):
bom_items = frappe.db.sql("""select t2.item_code,
ifnull(t2.qty, 0) / ifnull(t1.quantity, 1) as qty_consumed_per_unit,
t2.rate, t2.stock_uom, t2.name, t2.description
from `tabBOM` t1, `tabBOM Item` t2, tabItem t3
where t2.parent = t1.name and t1.item = %s
and t1.docstatus = 1 and t1.is_active = 1 and t1.name = %s
and t2.item_code = t3.name and t3.is_stock_item = 1""", (item_code, bom), as_dict=1)
if not bom_items:
msgprint(_("Specified BOM {0} does not exist for Item {1}").format(bom, item_code), raise_exception=1)
return bom_items
@property
def sub_contracted_items(self):
if not hasattr(self, "_sub_contracted_items"):
self._sub_contracted_items = []
item_codes = list(set(item.item_code for item in
self.get("items")))
if item_codes:
self._sub_contracted_items = [r[0] for r in frappe.db.sql("""select name
from `tabItem` where name in (%s) and is_sub_contracted_item=1""" % \
(", ".join((["%s"]*len(item_codes))),), item_codes)]
return self._sub_contracted_items
@property
def purchase_items(self):
if not hasattr(self, "_purchase_items"):
self._purchase_items = []
item_codes = list(set(item.item_code for item in
self.get("items")))
if item_codes:
self._purchase_items = [r[0] for r in frappe.db.sql("""select name
from `tabItem` where name in (%s) and is_purchase_item='Yes'""" % \
(", ".join((["%s"]*len(item_codes))),), item_codes)]
return self._purchase_items
def is_item_table_empty(self):
if not len(self.get("items")):
frappe.throw(_("Item table can not be blank"))
def set_qty_as_per_stock_uom(self):
for d in self.get("items"):
if d.meta.get_field("stock_qty"):
if not d.conversion_factor:
frappe.throw(_("Row {0}: Conversion Factor is mandatory").format(d.idx))
d.stock_qty = flt(d.qty) * flt(d.conversion_factor)
|
agpl-3.0
|
CloudWareChile/OpenChile
|
openerp/addons/crm/wizard/crm_partner_to_opportunity.py
|
9
|
3111
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields
from tools.translate import _
class crm_partner2opportunity(osv.osv_memory):
"""Converts Partner To Opportunity"""
_name = 'crm.partner2opportunity'
_description = 'Partner To Opportunity'
_columns = {
'name' : fields.char('Opportunity Name', size=64, required=True),
'planned_revenue': fields.float('Expected Revenue', digits=(16,2)),
'probability': fields.float('Success Probability', digits=(16,2)),
'partner_id': fields.many2one('res.partner', 'Partner'),
}
def action_cancel(self, cr, uid, ids, context=None):
"""
Closes Partner 2 Opportunity
"""
return {'type':'ir.actions.act_window_close'}
def default_get(self, cr, uid, fields, context=None):
"""
This function gets default values
"""
partner_obj = self.pool.get('res.partner')
data = context and context.get('active_ids', []) or []
res = super(crm_partner2opportunity, self).default_get(cr, uid, fields, context=context)
for partner in partner_obj.browse(cr, uid, data, []):
if 'name' in fields:
res.update({'name': partner.name})
if 'partner_id' in fields:
res.update({'partner_id': data and data[0] or False})
return res
def make_opportunity(self, cr, uid, ids, context=None):
partner_ids = context and context.get('active_ids', []) or []
partner_id = partner_ids[0] if partner_ids else None
partner = self.pool.get('res.partner')
lead = self.pool.get('crm.lead')
data = self.browse(cr, uid, ids, context=context)[0]
opportunity_ids = partner.make_opportunity(cr, uid, partner_ids,
data.name,
data.planned_revenue,
data.probability,
partner_id,
context=context,
)
opportunity_id = opportunity_ids[partner_ids[0]]
return lead.redirect_opportunity_view(cr, uid, opportunity_id, context=context)
crm_partner2opportunity()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
fredericlepied/ansible
|
lib/ansible/modules/database/influxdb/influxdb_database.py
|
8
|
5123
|
#!/usr/bin/python
# (c) 2016, Kamil Szczygiel <kamil.szczygiel () intel.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: influxdb_database
short_description: Manage InfluxDB databases
description:
- Manage InfluxDB databases
version_added: 2.1
author: "Kamil Szczygiel (@kamsz)"
requirements:
- "python >= 2.6"
- "influxdb >= 0.9"
options:
hostname:
description:
- The hostname or IP address on which InfluxDB server is listening
required: true
username:
description:
- Username that will be used to authenticate against InfluxDB server
default: root
required: false
password:
description:
- Password that will be used to authenticate against InfluxDB server
default: root
required: false
port:
description:
- The port on which InfluxDB server is listening
default: 8086
required: false
database_name:
description:
- Name of the database that will be created/destroyed
required: true
state:
description:
- Determines if the database should be created or destroyed
choices: ['present', 'absent']
default: present
required: false
'''
EXAMPLES = '''
# Example influxdb_database command from Ansible Playbooks
- name: Create database
influxdb_database:
hostname: "{{influxdb_ip_address}}"
database_name: "{{influxdb_database_name}}"
state: present
- name: Destroy database
influxdb_database:
hostname: "{{influxdb_ip_address}}"
database_name: "{{influxdb_database_name}}"
state: absent
- name: Create database using custom credentials
influxdb_database:
hostname: "{{influxdb_ip_address}}"
username: "{{influxdb_username}}"
password: "{{influxdb_password}}"
database_name: "{{influxdb_database_name}}"
state: present
'''
RETURN = '''
#only defaults
'''
try:
import requests.exceptions
from influxdb import InfluxDBClient
from influxdb import exceptions
HAS_INFLUXDB = True
except ImportError:
HAS_INFLUXDB = False
from ansible.module_utils.basic import AnsibleModule
def influxdb_argument_spec():
return dict(
hostname=dict(required=True, type='str'),
port=dict(default=8086, type='int'),
username=dict(default='root', type='str'),
password=dict(default='root', type='str', no_log=True),
database_name=dict(required=True, type='str')
)
def connect_to_influxdb(module):
hostname = module.params['hostname']
port = module.params['port']
username = module.params['username']
password = module.params['password']
database_name = module.params['database_name']
client = InfluxDBClient(
host=hostname,
port=port,
username=username,
password=password,
database=database_name
)
return client
def find_database(module, client, database_name):
database = None
try:
databases = client.get_list_database()
for db in databases:
if db['name'] == database_name:
database = db
break
except requests.exceptions.ConnectionError as e:
module.fail_json(msg=str(e))
return database
def create_database(module, client, database_name):
if not module.check_mode:
try:
client.create_database(database_name)
except requests.exceptions.ConnectionError as e:
module.fail_json(msg=str(e))
module.exit_json(changed=True)
def drop_database(module, client, database_name):
if not module.check_mode:
try:
client.drop_database(database_name)
except exceptions.InfluxDBClientError as e:
module.fail_json(msg=e.content)
module.exit_json(changed=True)
def main():
argument_spec = influxdb_argument_spec()
argument_spec.update(
state=dict(default='present', type='str', choices=['present', 'absent'])
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True
)
if not HAS_INFLUXDB:
module.fail_json(msg='influxdb python package is required for this module')
state = module.params['state']
database_name = module.params['database_name']
client = connect_to_influxdb(module)
database = find_database(module, client, database_name)
if state == 'present':
if database:
module.exit_json(changed=False)
else:
create_database(module, client, database_name)
if state == 'absent':
if database:
drop_database(module, client, database_name)
else:
module.exit_json(changed=False)
if __name__ == '__main__':
main()
|
gpl-3.0
|
CatBakun/AutobahnPython
|
examples/wamp/authentication/client.py
|
27
|
2349
|
###############################################################################
##
## Copyright 2012 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import sys
from pprint import pprint
from twisted.python import log
from twisted.internet import reactor
from autobahn.websocket import connectWS
from autobahn.wamp import WampClientFactory, WampCraClientProtocol
class MyClientProtocol(WampCraClientProtocol):
"""
Authenticated WAMP client using WAMP-Challenge-Response-Authentication ("WAMP-CRA").
"""
def onSessionOpen(self):
## "authenticate" as anonymous
##
#d = self.authenticate()
## authenticate as "foobar" with password "secret"
##
d = self.authenticate(authKey = "foobar",
authExtra = None,
authSecret = "secret")
d.addCallbacks(self.onAuthSuccess, self.onAuthError)
def onClose(self, wasClean, code, reason):
reactor.stop()
def onAuthSuccess(self, permissions):
print "Authentication Success!", permissions
self.publish("http://example.com/topics/mytopic1", "Hello, world!")
d = self.call("http://example.com/procedures/hello", "Foobar")
d.addBoth(pprint)
d.addBoth(self.sendClose)
def onAuthError(self, e):
uri, desc, details = e.value.args
print "Authentication Error!", uri, desc, details
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'debug':
log.startLogging(sys.stdout)
debug = True
else:
debug = False
log.startLogging(sys.stdout)
factory = WampClientFactory("ws://localhost:9000", debugWamp = debug)
factory.protocol = MyClientProtocol
connectWS(factory)
reactor.run()
|
apache-2.0
|
lihui7115/ChromiumGStreamerBackend
|
build/android/pylib/instrumentation/setup.py
|
35
|
4043
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates test runner factory and tests for instrumentation tests."""
import logging
import os
from pylib import constants
from pylib import valgrind_tools
from pylib.base import base_setup
from pylib.device import device_utils
from pylib.instrumentation import test_package
from pylib.instrumentation import test_runner
DEVICE_DATA_DIR = 'chrome/test/data'
ISOLATE_FILE_PATHS = {
'AndroidWebViewTest': 'android_webview/android_webview_test_apk.isolate',
'ChromeShellTest': 'chrome/chrome_shell_test_apk.isolate',
'ContentShellTest': 'content/content_shell_test_apk.isolate',
}
DEPS_EXCLUSION_LIST = []
# TODO(mikecase): Remove this function and the constant DEVICE_DATA_DIR
# once all data deps are pushed to the same location on the device.
def _PushExtraSuiteDataDeps(device, test_apk):
"""Pushes some extra data files/dirs needed by some test suite.
Args:
test_apk: The test suite basename for which to return file paths.
"""
if test_apk in ['ChromeTest', 'ContentShellTest']:
test_files = 'net/data/ssl/certificates'
host_device_file_tuple = [
(os.path.join(constants.DIR_SOURCE_ROOT, test_files),
os.path.join(device.GetExternalStoragePath(), test_files))]
device.PushChangedFiles(host_device_file_tuple)
# TODO(mikecase): Remove this function once everything uses
# base_setup.PushDataDeps to push data deps to the device.
def _PushDataDeps(device, test_options):
valgrind_tools.PushFilesForTool(test_options.tool, device)
host_device_file_tuples = []
for dest_host_pair in test_options.test_data:
dst_src = dest_host_pair.split(':', 1)
dst_layer = dst_src[0]
host_src = dst_src[1]
host_test_files_path = os.path.join(constants.DIR_SOURCE_ROOT, host_src)
if os.path.exists(host_test_files_path):
host_device_file_tuples += [(
host_test_files_path,
'%s/%s/%s' % (
device.GetExternalStoragePath(),
DEVICE_DATA_DIR,
dst_layer))]
if host_device_file_tuples:
device.PushChangedFiles(host_device_file_tuples)
def Setup(test_options, devices):
"""Create and return the test runner factory and tests.
Args:
test_options: An InstrumentationOptions object.
Returns:
A tuple of (TestRunnerFactory, tests).
"""
if (test_options.coverage_dir and not
os.path.exists(test_options.coverage_dir)):
os.makedirs(test_options.coverage_dir)
test_pkg = test_package.TestPackage(test_options.test_apk_path,
test_options.test_apk_jar_path,
test_options.test_support_apk_path)
tests = test_pkg.GetAllMatchingTests(
test_options.annotations,
test_options.exclude_annotations,
test_options.test_filter)
if not tests:
logging.error('No instrumentation tests to run with current args.')
if test_options.test_data:
device_utils.DeviceUtils.parallel(devices).pMap(
_PushDataDeps, test_options)
if test_options.isolate_file_path:
i = base_setup.GenerateDepsDirUsingIsolate(test_options.test_apk,
test_options.isolate_file_path,
ISOLATE_FILE_PATHS,
DEPS_EXCLUSION_LIST)
def push_data_deps_to_device_dir(device):
base_setup.PushDataDeps(device, device.GetExternalStoragePath(),
test_options)
device_utils.DeviceUtils.parallel(devices).pMap(
push_data_deps_to_device_dir)
if i:
i.Clear()
device_utils.DeviceUtils.parallel(devices).pMap(
_PushExtraSuiteDataDeps, test_options.test_apk)
def TestRunnerFactory(device, shard_index):
return test_runner.TestRunner(test_options, device, shard_index,
test_pkg)
return (TestRunnerFactory, tests)
|
bsd-3-clause
|
ruchee/vimrc
|
vimfiles/bundle/vim-python/submodules/pydocstyle/src/tests/test_cases/sections.py
|
3
|
12481
|
"""A valid module docstring."""
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
_D213 = 'D213: Multi-line docstring summary should start at the second line'
_D400 = "D400: First line should end with a period (not '!')"
@expect(_D213)
@expect("D405: Section name should be properly capitalized "
"('Returns', not 'returns')")
def not_capitalized(): # noqa: D416
"""Toggle the gizmo.
returns
-------
A value of some sort.
"""
@expect(_D213)
@expect("D406: Section name should end with a newline "
"('Returns', not 'Returns:')")
def superfluous_suffix(): # noqa: D416
"""Toggle the gizmo.
Returns:
-------
A value of some sort.
"""
@expect(_D213)
@expect("D407: Missing dashed underline after section ('Returns')")
def no_underline(): # noqa: D416
"""Toggle the gizmo.
Returns
A value of some sort.
"""
@expect(_D213)
@expect("D407: Missing dashed underline after section ('Returns')")
@expect("D414: Section has no content ('Returns')")
def no_underline_and_no_description(): # noqa: D416
"""Toggle the gizmo.
Returns
"""
@expect(_D213)
@expect("D410: Missing blank line after section ('Returns')")
@expect("D414: Section has no content ('Returns')")
@expect("D411: Missing blank line before section ('Yields')")
@expect("D414: Section has no content ('Yields')")
def consecutive_sections(): # noqa: D416
"""Toggle the gizmo.
Returns
-------
Yields
------
Raises
------
Questions.
"""
@expect(_D213)
@expect("D408: Section underline should be in the line following the "
"section's name ('Returns')")
def blank_line_before_underline(): # noqa: D416
"""Toggle the gizmo.
Returns
-------
A value of some sort.
"""
@expect(_D213)
@expect("D409: Section underline should match the length of its name "
"(Expected 7 dashes in section 'Returns', got 2)")
def bad_underline_length(): # noqa: D416
"""Toggle the gizmo.
Returns
--
A value of some sort.
"""
@expect(_D213)
@expect("D413: Missing blank line after last section ('Returns')")
def no_blank_line_after_last_section(): # noqa: D416
"""Toggle the gizmo.
Returns
-------
A value of some sort.
"""
@expect(_D213)
@expect("D411: Missing blank line before section ('Returns')")
def no_blank_line_before_section(): # noqa: D416
"""Toggle the gizmo.
The function's description.
Returns
-------
A value of some sort.
"""
@expect(_D213)
@expect("D214: Section is over-indented ('Returns')")
def section_overindented(): # noqa: D416
"""Toggle the gizmo.
Returns
-------
A value of some sort.
"""
@expect(_D213)
@expect("D215: Section underline is over-indented (in section 'Returns')")
def section_underline_overindented(): # noqa: D416
"""Toggle the gizmo.
Returns
-------
A value of some sort.
"""
@expect(_D213)
@expect("D215: Section underline is over-indented (in section 'Returns')")
@expect("D413: Missing blank line after last section ('Returns')")
@expect("D414: Section has no content ('Returns')")
def section_underline_overindented_and_contentless(): # noqa: D416
"""Toggle the gizmo.
Returns
-------
"""
@expect(_D213)
def ignore_non_actual_section(): # noqa: D416
"""Toggle the gizmo.
This is the function's description, which will also specify what it
returns
"""
@expect(_D213)
@expect("D401: First line should be in imperative mood "
"(perhaps 'Return', not 'Returns')")
@expect("D400: First line should end with a period (not 's')")
@expect("D415: First line should end with a period, question "
"mark, or exclamation point (not 's')")
@expect("D205: 1 blank line required between summary line and description "
"(found 0)")
def section_name_in_first_line(): # noqa: D416
"""Returns
-------
A value of some sort.
"""
@expect(_D213)
@expect("D405: Section name should be properly capitalized "
"('Short Summary', not 'Short summary')")
@expect("D412: No blank lines allowed between a section header and its "
"content ('Short Summary')")
@expect("D409: Section underline should match the length of its name "
"(Expected 7 dashes in section 'Returns', got 6)")
@expect("D410: Missing blank line after section ('Returns')")
@expect("D411: Missing blank line before section ('Raises')")
@expect("D406: Section name should end with a newline "
"('Raises', not 'Raises:')")
@expect("D407: Missing dashed underline after section ('Raises')")
def multiple_sections(): # noqa: D416
"""Toggle the gizmo.
Short summary
-------------
This is the function's description, which will also specify what it
returns.
Returns
------
Many many wonderful things.
Raises:
My attention.
"""
@expect(_D213)
def false_positive_section_prefix(): # noqa: D416
"""Toggle the gizmo.
Parameters
----------
attributes_are_fun: attributes for the function.
"""
@expect(_D213)
def section_names_as_parameter_names(): # noqa: D416
"""Toggle the gizmo.
Parameters
----------
notes : list
A list of wonderful notes.
examples: list
A list of horrible examples.
"""
@expect(_D213)
@expect("D414: Section has no content ('Returns')")
def valid_google_style_section(): # noqa: D406, D407
"""Toggle the gizmo.
Args:
note: A random string.
Returns:
Raises:
RandomError: A random error that occurs randomly.
"""
@expect(_D213)
@expect("D416: Section name should end with a colon "
"('Args:', not 'Args')")
def missing_colon_google_style_section(): # noqa: D406, D407
"""Toggle the gizmo.
Args
note: A random string.
"""
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) y are missing descriptions in "
"'bar' docstring)", func_name="bar")
def _test_nested_functions():
x = 1
def bar(y=2): # noqa: D207, D213, D406, D407
"""Nested function test for docstrings.
Will this work when referencing x?
Args:
x: Test something
that is broken.
"""
print(x)
@expect(_D213)
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) y are missing descriptions in "
"'test_missing_google_args' docstring)")
def test_missing_google_args(x=1, y=2, _private=3): # noqa: D406, D407
"""Toggle the gizmo.
Args:
x (int): The greatest integer.
"""
class TestGoogle: # noqa: D203
"""Test class."""
def test_method(self, test, another_test, _): # noqa: D213, D407
"""Test a valid args section.
Args:
test: A parameter.
another_test: Another parameter.
"""
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) test, y, z are missing descriptions in "
"'test_missing_args' docstring)", arg_count=5)
def test_missing_args(self, test, x, y, z=3, _private_arg=3): # noqa: D213, D407
"""Test a valid args section.
Args:
x: Another parameter.
"""
@classmethod
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) test, y, z are missing descriptions in "
"'test_missing_args_class_method' docstring)", arg_count=5)
def test_missing_args_class_method(cls, test, x, y, _, z=3): # noqa: D213, D407
"""Test a valid args section.
Args:
x: Another parameter. The parameter below is missing description.
y:
"""
@staticmethod
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) a, y, z are missing descriptions in "
"'test_missing_args_static_method' docstring)", arg_count=4)
def test_missing_args_static_method(a, x, y, _test, z=3): # noqa: D213, D407
"""Test a valid args section.
Args:
x: Another parameter.
"""
@staticmethod
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) a, b are missing descriptions in "
"'test_missing_docstring' docstring)", arg_count=2)
def test_missing_docstring(a, b): # noqa: D213, D407
"""Test a valid args section.
Args:
a:
"""
@staticmethod
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) skip, verbose are missing descriptions in "
"'test_missing_docstring_another' docstring)", arg_count=2)
def test_missing_docstring_another(skip, verbose): # noqa: D213, D407
"""Do stuff.
Args:
skip (:attr:`.Skip`):
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Etiam at tellus a tellus faucibus maximus. Curabitur tellus
mauris, semper id vehicula ac, feugiat ut tortor.
verbose (bool):
If True, print out as much infromation as possible.
If False, print out concise "one-liner" information.
"""
@expect(_D213)
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) y are missing descriptions in "
"'test_missing_numpy_args' docstring)")
def test_missing_numpy_args(_private_arg=0, x=1, y=2): # noqa: D406, D407
"""Toggle the gizmo.
Parameters
----------
x : int
The greatest integer in the history \
of the entire world.
"""
class TestNumpy: # noqa: D203
"""Test class."""
def test_method(self, test, another_test, z, _, x=1, y=2, _private_arg=1): # noqa: D213, D407
"""Test a valid args section.
Some long string with a \
line continuation.
Parameters
----------
test, another_test
Some parameters without type.
z : some parameter with a very long type description that requires a \
line continuation.
But no further description.
x, y : int
Some integer parameters.
"""
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) test, y, z are missing descriptions in "
"'test_missing_args' docstring)", arg_count=5)
def test_missing_args(self, test, x, y, z=3, t=1, _private=0): # noqa: D213, D407
"""Test a valid args section.
Parameters
----------
x, t : int
Some parameters.
"""
@classmethod
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) test, y, z are missing descriptions in "
"'test_missing_args_class_method' docstring)", arg_count=4)
def test_missing_args_class_method(cls, test, x, y, z=3): # noqa: D213, D407
"""Test a valid args section.
Parameters
----------
z
x
Another parameter. The parameters y, test below are
missing descriptions. The parameter z above is also missing
a description.
y
test
"""
@staticmethod
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) a, z are missing descriptions in "
"'test_missing_args_static_method' docstring)", arg_count=3)
def test_missing_args_static_method(a, x, y, z=3, t=1): # noqa: D213, D407
"""Test a valid args section.
Parameters
----------
x, y
Another parameter.
t : int
Yet another parameter.
"""
@staticmethod
def test_mixing_numpy_and_google(danger): # noqa: D213
"""Repro for #388.
Parameters
----------
danger
Zoneeeeee!
"""
class TestIncorrectIndent: # noqa: D203
"""Test class."""
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) y are missing descriptions in "
"'test_incorrect_indent' docstring)", arg_count=3)
def test_incorrect_indent(self, x=1, y=2): # noqa: D207, D213, D407
"""Reproducing issue #437.
Testing this incorrectly indented docstring.
Args:
x: Test argument.
"""
|
mit
|
cavestruz/L500analysis
|
caps/util/sg_filter.py
|
1
|
3482
|
from math import *
from numpy import *
def resub(D, rhs):
""" solves D D^T = rhs by resubstituion.
D is lower triangle-matrix from cholesky-decomposition """
M = D.shape[0]
x1= zeros((M,),float)
x2= zeros((M,),float)
# resub step 1
for l in range(M):
sum = rhs[l]
for n in range(l):
sum -= D[l,n]*x1[n]
x1[l] = sum/D[l,l]
# resub step 2
for l in range(M-1,-1,-1):
sum = x1[l]
for n in range(l+1,M):
sum -= D[n,l]*x2[n]
x2[l] = sum/D[l,l]
return x2
def calc_coeff(num_points, pol_degree, diff_order=0):
""" calculates filter coefficients for symmetric savitzky-golay filter.
see: http://www.nrbook.com/a/bookcpdf/c14-8.pdf
num_points means that 2*num_points+1 values contribute to the
smoother.
pol_degree is degree of fitting polynomial
diff_order is degree of implicit differentiation.
0 means that filter results in smoothing of function
1 means that filter results in smoothing the first
derivative of function.
and so on ...
"""
# setup normal matrix
A = zeros((2*num_points+1, pol_degree+1), float)
for i in range(2*num_points+1):
for j in range(pol_degree+1):
A[i,j] = pow(i-num_points, j)
# calculate diff_order-th row of inv(A^T A)
ATA = dot(A.transpose(), A)
rhs = zeros((pol_degree+1,), float)
rhs[diff_order] = 1
D = linalg.cholesky(ATA)
wvec = resub(D, rhs)
# calculate filter-coefficients
coeff = zeros((2*num_points+1,), float)
for n in range(-num_points, num_points+1):
x = 0.0
for m in range(pol_degree+1):
x += wvec[m]*pow(n, m)
coeff[n+num_points] = x
return coeff
def savgol(np,nl,nr,ld,m):
index = zeros(m)
A = zeros(m,m)
b = zeros(m)
for ipj in arange(2*m):
if ipj == 0:
s = 0.0
else:
s = 1.0
s += sum([k**ipj for k in arange(nr)])
s += sum([-k**ipj for k in arange(nl)])
mm = min(ipj,2.*m-ipj)
for imj in arange(-mm,mm,2.0):
a[(ipj+imj)/2,(ipj-imj)/2] = s
#ludcmp(a,m+1,indx,&d);
#b[ld+1]=1.0
#lubksb(a,m+1,indx,b);
coeff = zeros(np)
for k in xrange(-nl,nr+1):
s = b[0]
for mm in xrange(m):
s += b[mm+1]*pow(float(k),float(mm))
kk = (np-k)%np
coeff[kk] = s
return coeff
#def smooth(signal, coeff):
#
# """ applies coefficients calculated by calc_coeff()
# to signal """
#
# N = size(coeff-1)/2
# res = convolve(signal, coeff)
# return res[N:-N]
def smooth(signal,mbin,m,diff_order=0):
smoothed = zeros(len(signal))
for i in xrange(len(signal)):
nl = nr = mbin
if ( i < mbin ):
nl = i
nr = i
elif ( i > len(signal)-mbin-1 ):
nr = len(signal)-i-1
nl = nr
nc = nl+nr+1
if ( nc > 1 ):
coeff = calc_coeff(nl, m, diff_order)
# print coeff, sum(coeff)
#for k in xrange(-nl,nr):
# print k, nl, nr, nc, signal[i+k], (nc-k)%nc, coeff[(nc-k)%nc]
smoothed[i] = sum([signal[i+k]*coeff[k+nl] for k in xrange(-nl,nr+1)])
else:
if ( diff_order == 0 ):
smoothed[i] = signal[i]
else:
smoothed[i] = 0.0
return smoothed
#y = array([x**2 for x in arange(0,10,0.1)])
#yd = array([2*x for x in arange(0,10,0.1)])
#y1 = smooth(y,4,2,0)
#y2 = smooth(y,4,2,1)/0.1
#print ((y2-yd)/yd)
|
mit
|
tzewangdorje/SIPserv
|
Twisted-13.1.0/twisted/internet/test/test_udp_internals.py
|
35
|
5001
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for the internal implementation details of L{twisted.internet.udp}.
"""
from __future__ import division, absolute_import
import socket
from twisted.trial import unittest
from twisted.internet.protocol import DatagramProtocol
from twisted.internet import udp
from twisted.python.runtime import platformType
if platformType == 'win32':
from errno import WSAEWOULDBLOCK as EWOULDBLOCK
from errno import WSAECONNREFUSED as ECONNREFUSED
else:
from errno import EWOULDBLOCK
from errno import ECONNREFUSED
class StringUDPSocket(object):
"""
A fake UDP socket object, which returns a fixed sequence of strings and/or
socket errors. Useful for testing.
@ivar retvals: A C{list} containing either strings or C{socket.error}s.
@ivar connectedAddr: The address the socket is connected to.
"""
def __init__(self, retvals):
self.retvals = retvals
self.connectedAddr = None
def connect(self, addr):
self.connectedAddr = addr
def recvfrom(self, size):
"""
Return (or raise) the next value from C{self.retvals}.
"""
ret = self.retvals.pop(0)
if isinstance(ret, socket.error):
raise ret
return ret, None
class KeepReads(DatagramProtocol):
"""
Accumulate reads in a list.
"""
def __init__(self):
self.reads = []
def datagramReceived(self, data, addr):
self.reads.append(data)
class ErrorsTestCase(unittest.SynchronousTestCase):
"""
Error handling tests for C{udp.Port}.
"""
def test_socketReadNormal(self):
"""
Socket reads with some good data followed by a socket error which can
be ignored causes reading to stop, and no log messages to be logged.
"""
# Add a fake error to the list of ignorables:
udp._sockErrReadIgnore.append(-7000)
self.addCleanup(udp._sockErrReadIgnore.remove, -7000)
protocol = KeepReads()
port = udp.Port(None, protocol)
# Normal result, no errors
port.socket = StringUDPSocket(
[b"result", b"123", socket.error(-7000), b"456",
socket.error(-7000)])
port.doRead()
# Read stops on error:
self.assertEqual(protocol.reads, [b"result", b"123"])
port.doRead()
self.assertEqual(protocol.reads, [b"result", b"123", b"456"])
def test_readImmediateError(self):
"""
If the socket is unconnected, socket reads with an immediate
connection refusal are ignored, and reading stops. The protocol's
C{connectionRefused} method is not called.
"""
# Add a fake error to the list of those that count as connection
# refused:
udp._sockErrReadRefuse.append(-6000)
self.addCleanup(udp._sockErrReadRefuse.remove, -6000)
protocol = KeepReads()
# Fail if connectionRefused is called:
protocol.connectionRefused = lambda: 1/0
port = udp.Port(None, protocol)
# Try an immediate "connection refused"
port.socket = StringUDPSocket([b"a", socket.error(-6000), b"b",
socket.error(EWOULDBLOCK)])
port.doRead()
# Read stops on error:
self.assertEqual(protocol.reads, [b"a"])
# Read again:
port.doRead()
self.assertEqual(protocol.reads, [b"a", b"b"])
def test_connectedReadImmediateError(self):
"""
If the socket connected, socket reads with an immediate
connection refusal are ignored, and reading stops. The protocol's
C{connectionRefused} method is called.
"""
# Add a fake error to the list of those that count as connection
# refused:
udp._sockErrReadRefuse.append(-6000)
self.addCleanup(udp._sockErrReadRefuse.remove, -6000)
protocol = KeepReads()
refused = []
protocol.connectionRefused = lambda: refused.append(True)
port = udp.Port(None, protocol)
port.socket = StringUDPSocket([b"a", socket.error(-6000), b"b",
socket.error(EWOULDBLOCK)])
port.connect("127.0.0.1", 9999)
# Read stops on error:
port.doRead()
self.assertEqual(protocol.reads, [b"a"])
self.assertEqual(refused, [True])
# Read again:
port.doRead()
self.assertEqual(protocol.reads, [b"a", b"b"])
self.assertEqual(refused, [True])
def test_readUnknownError(self):
"""
Socket reads with an unknown socket error are raised.
"""
protocol = KeepReads()
port = udp.Port(None, protocol)
# Some good data, followed by an unknown error
port.socket = StringUDPSocket([b"good", socket.error(-1337)])
self.assertRaises(socket.error, port.doRead)
self.assertEqual(protocol.reads, [b"good"])
|
gpl-3.0
|
vikas1885/test1
|
lms/djangoapps/django_comment_client/utils.py
|
43
|
28663
|
from collections import defaultdict
from datetime import datetime
import json
import logging
import pytz
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import connection
from django.http import HttpResponse
from django.utils.timezone import UTC
import pystache_custom as pystache
from opaque_keys.edx.locations import i4xEncoder
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import modulestore
from django_comment_common.models import Role, FORUM_ROLE_STUDENT
from django_comment_client.permissions import check_permissions_by_view, has_permission, get_team
from django_comment_client.settings import MAX_COMMENT_DEPTH
from edxmako import lookup_template
from courseware import courses
from courseware.access import has_access
from openedx.core.djangoapps.content.course_structures.models import CourseStructure
from openedx.core.djangoapps.course_groups.cohorts import (
get_course_cohort_settings, get_cohort_by_id, get_cohort_id, is_course_cohorted
)
from openedx.core.djangoapps.course_groups.models import CourseUserGroup
log = logging.getLogger(__name__)
def extract(dic, keys):
return {k: dic.get(k) for k in keys}
def strip_none(dic):
return dict([(k, v) for k, v in dic.iteritems() if v is not None])
def strip_blank(dic):
def _is_blank(v):
return isinstance(v, str) and len(v.strip()) == 0
return dict([(k, v) for k, v in dic.iteritems() if not _is_blank(v)])
# TODO should we be checking if d1 and d2 have the same keys with different values?
def merge_dict(dic1, dic2):
return dict(dic1.items() + dic2.items())
def get_role_ids(course_id):
roles = Role.objects.filter(course_id=course_id).exclude(name=FORUM_ROLE_STUDENT)
return dict([(role.name, list(role.users.values_list('id', flat=True))) for role in roles])
def has_discussion_privileges(user, course_id):
"""Returns True if the user is privileged in teams discussions for
this course. The user must be one of Discussion Admin, Moderator,
or Community TA.
Args:
user (User): The user to check privileges for.
course_id (CourseKey): A key for the course to check privileges for.
Returns:
bool
"""
# get_role_ids returns a dictionary of only admin, moderator and community TAs.
roles = get_role_ids(course_id)
for role in roles:
if user.id in roles[role]:
return True
return False
def has_forum_access(uname, course_id, rolename):
try:
role = Role.objects.get(name=rolename, course_id=course_id)
except Role.DoesNotExist:
return False
return role.users.filter(username=uname).exists()
def has_required_keys(module):
"""Returns True iff module has the proper attributes for generating metadata with get_discussion_id_map_entry()"""
for key in ('discussion_id', 'discussion_category', 'discussion_target'):
if getattr(module, key, None) is None:
log.debug("Required key '%s' not in discussion %s, leaving out of category map", key, module.location)
return False
return True
def get_accessible_discussion_modules(course, user, include_all=False): # pylint: disable=invalid-name
"""
Return a list of all valid discussion modules in this course that
are accessible to the given user.
"""
all_modules = modulestore().get_items(course.id, qualifiers={'category': 'discussion'})
return [
module for module in all_modules
if has_required_keys(module) and (include_all or has_access(user, 'load', module, course.id))
]
def get_discussion_id_map_entry(module):
"""
Returns a tuple of (discussion_id, metadata) suitable for inclusion in the results of get_discussion_id_map().
"""
return (
module.discussion_id,
{
"location": module.location,
"title": module.discussion_category.split("/")[-1].strip() + " / " + module.discussion_target
}
)
class DiscussionIdMapIsNotCached(Exception):
"""Thrown when the discussion id map is not cached for this course, but an attempt was made to access it."""
pass
def get_cached_discussion_key(course, discussion_id):
"""
Returns the usage key of the discussion module associated with discussion_id if it is cached. If the discussion id
map is cached but does not contain discussion_id, returns None. If the discussion id map is not cached for course,
raises a DiscussionIdMapIsNotCached exception.
"""
try:
cached_mapping = CourseStructure.objects.get(course_id=course.id).discussion_id_map
if not cached_mapping:
raise DiscussionIdMapIsNotCached()
return cached_mapping.get(discussion_id)
except CourseStructure.DoesNotExist:
raise DiscussionIdMapIsNotCached()
def get_cached_discussion_id_map(course, discussion_ids, user):
"""
Returns a dict mapping discussion_ids to respective discussion module metadata if it is cached and visible to the
user. If not, returns the result of get_discussion_id_map
"""
try:
entries = []
for discussion_id in discussion_ids:
key = get_cached_discussion_key(course, discussion_id)
if not key:
continue
module = modulestore().get_item(key)
if not (has_required_keys(module) and has_access(user, 'load', module, course.id)):
continue
entries.append(get_discussion_id_map_entry(module))
return dict(entries)
except DiscussionIdMapIsNotCached:
return get_discussion_id_map(course, user)
def get_discussion_id_map(course, user):
"""
Transform the list of this course's discussion modules (visible to a given user) into a dictionary of metadata keyed
by discussion_id.
"""
return dict(map(get_discussion_id_map_entry, get_accessible_discussion_modules(course, user)))
def _filter_unstarted_categories(category_map):
now = datetime.now(UTC())
result_map = {}
unfiltered_queue = [category_map]
filtered_queue = [result_map]
while unfiltered_queue:
unfiltered_map = unfiltered_queue.pop()
filtered_map = filtered_queue.pop()
filtered_map["children"] = []
filtered_map["entries"] = {}
filtered_map["subcategories"] = {}
for child in unfiltered_map["children"]:
if child in unfiltered_map["entries"]:
if unfiltered_map["entries"][child]["start_date"] <= now:
filtered_map["children"].append(child)
filtered_map["entries"][child] = {}
for key in unfiltered_map["entries"][child]:
if key != "start_date":
filtered_map["entries"][child][key] = unfiltered_map["entries"][child][key]
else:
log.debug(u"Filtering out:%s with start_date: %s", child, unfiltered_map["entries"][child]["start_date"])
else:
if unfiltered_map["subcategories"][child]["start_date"] < now:
filtered_map["children"].append(child)
filtered_map["subcategories"][child] = {}
unfiltered_queue.append(unfiltered_map["subcategories"][child])
filtered_queue.append(filtered_map["subcategories"][child])
return result_map
def _sort_map_entries(category_map, sort_alpha):
things = []
for title, entry in category_map["entries"].items():
if entry["sort_key"] is None and sort_alpha:
entry["sort_key"] = title
things.append((title, entry))
for title, category in category_map["subcategories"].items():
things.append((title, category))
_sort_map_entries(category_map["subcategories"][title], sort_alpha)
category_map["children"] = [x[0] for x in sorted(things, key=lambda x: x[1]["sort_key"])]
def get_discussion_category_map(course, user, cohorted_if_in_list=False, exclude_unstarted=True):
"""
Transform the list of this course's discussion modules into a recursive dictionary structure. This is used
to render the discussion category map in the discussion tab sidebar for a given user.
Args:
course: Course for which to get the ids.
user: User to check for access.
cohorted_if_in_list (bool): If True, inline topics are marked is_cohorted only if they are
in course_cohort_settings.discussion_topics.
Example:
>>> example = {
>>> "entries": {
>>> "General": {
>>> "sort_key": "General",
>>> "is_cohorted": True,
>>> "id": "i4x-edx-eiorguegnru-course-foobarbaz"
>>> }
>>> },
>>> "children": ["General", "Getting Started"],
>>> "subcategories": {
>>> "Getting Started": {
>>> "subcategories": {},
>>> "children": [
>>> "Working with Videos",
>>> "Videos on edX"
>>> ],
>>> "entries": {
>>> "Working with Videos": {
>>> "sort_key": None,
>>> "is_cohorted": False,
>>> "id": "d9f970a42067413cbb633f81cfb12604"
>>> },
>>> "Videos on edX": {
>>> "sort_key": None,
>>> "is_cohorted": False,
>>> "id": "98d8feb5971041a085512ae22b398613"
>>> }
>>> }
>>> }
>>> }
>>> }
"""
unexpanded_category_map = defaultdict(list)
modules = get_accessible_discussion_modules(course, user)
course_cohort_settings = get_course_cohort_settings(course.id)
for module in modules:
id = module.discussion_id
title = module.discussion_target
sort_key = module.sort_key
category = " / ".join([x.strip() for x in module.discussion_category.split("/")])
# Handle case where module.start is None
entry_start_date = module.start if module.start else datetime.max.replace(tzinfo=pytz.UTC)
unexpanded_category_map[category].append({"title": title, "id": id, "sort_key": sort_key, "start_date": entry_start_date})
category_map = {"entries": defaultdict(dict), "subcategories": defaultdict(dict)}
for category_path, entries in unexpanded_category_map.items():
node = category_map["subcategories"]
path = [x.strip() for x in category_path.split("/")]
# Find the earliest start date for the entries in this category
category_start_date = None
for entry in entries:
if category_start_date is None or entry["start_date"] < category_start_date:
category_start_date = entry["start_date"]
for level in path[:-1]:
if level not in node:
node[level] = {"subcategories": defaultdict(dict),
"entries": defaultdict(dict),
"sort_key": level,
"start_date": category_start_date}
else:
if node[level]["start_date"] > category_start_date:
node[level]["start_date"] = category_start_date
node = node[level]["subcategories"]
level = path[-1]
if level not in node:
node[level] = {"subcategories": defaultdict(dict),
"entries": defaultdict(dict),
"sort_key": level,
"start_date": category_start_date}
else:
if node[level]["start_date"] > category_start_date:
node[level]["start_date"] = category_start_date
always_cohort_inline_discussions = ( # pylint: disable=invalid-name
not cohorted_if_in_list and course_cohort_settings.always_cohort_inline_discussions
)
dupe_counters = defaultdict(lambda: 0) # counts the number of times we see each title
for entry in entries:
is_entry_cohorted = (
course_cohort_settings.is_cohorted and (
always_cohort_inline_discussions or entry["id"] in course_cohort_settings.cohorted_discussions
)
)
title = entry["title"]
if node[level]["entries"][title]:
# If we've already seen this title, append an incrementing number to disambiguate
# the category from other categores sharing the same title in the course discussion UI.
dupe_counters[title] += 1
title = u"{title} ({counter})".format(title=title, counter=dupe_counters[title])
node[level]["entries"][title] = {"id": entry["id"],
"sort_key": entry["sort_key"],
"start_date": entry["start_date"],
"is_cohorted": is_entry_cohorted}
# TODO. BUG! : course location is not unique across multiple course runs!
# (I think Kevin already noticed this) Need to send course_id with requests, store it
# in the backend.
for topic, entry in course.discussion_topics.items():
category_map['entries'][topic] = {
"id": entry["id"],
"sort_key": entry.get("sort_key", topic),
"start_date": datetime.now(UTC()),
"is_cohorted": (course_cohort_settings.is_cohorted and
entry["id"] in course_cohort_settings.cohorted_discussions)
}
_sort_map_entries(category_map, course.discussion_sort_alpha)
return _filter_unstarted_categories(category_map) if exclude_unstarted else category_map
def discussion_category_id_access(course, user, discussion_id):
"""
Returns True iff the given discussion_id is accessible for user in course.
Assumes that the commentable identified by discussion_id has a null or 'course' context.
Uses the discussion id cache if available, falling back to
get_discussion_categories_ids if there is no cache.
"""
if discussion_id in course.top_level_discussion_topic_ids:
return True
try:
key = get_cached_discussion_key(course, discussion_id)
if not key:
return False
module = modulestore().get_item(key)
return has_required_keys(module) and has_access(user, 'load', module, course.id)
except DiscussionIdMapIsNotCached:
return discussion_id in get_discussion_categories_ids(course, user)
def get_discussion_categories_ids(course, user, include_all=False):
"""
Returns a list of available ids of categories for the course that
are accessible to the given user.
Args:
course: Course for which to get the ids.
user: User to check for access.
include_all (bool): If True, return all ids. Used by configuration views.
"""
accessible_discussion_ids = [
module.discussion_id for module in get_accessible_discussion_modules(course, user, include_all=include_all)
]
return course.top_level_discussion_topic_ids + accessible_discussion_ids
class JsonResponse(HttpResponse):
def __init__(self, data=None):
content = json.dumps(data, cls=i4xEncoder)
super(JsonResponse, self).__init__(content,
mimetype='application/json; charset=utf-8')
class JsonError(HttpResponse):
def __init__(self, error_messages=[], status=400):
if isinstance(error_messages, basestring):
error_messages = [error_messages]
content = json.dumps({'errors': error_messages}, indent=2, ensure_ascii=False)
super(JsonError, self).__init__(content,
mimetype='application/json; charset=utf-8', status=status)
class HtmlResponse(HttpResponse):
def __init__(self, html=''):
super(HtmlResponse, self).__init__(html, content_type='text/plain')
class ViewNameMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
request.view_name = view_func.__name__
class QueryCountDebugMiddleware(object):
"""
This middleware will log the number of queries run
and the total time taken for each request (with a
status code of 200). It does not currently support
multi-db setups.
"""
def process_response(self, request, response):
if response.status_code == 200:
total_time = 0
for query in connection.queries:
query_time = query.get('time')
if query_time is None:
# django-debug-toolbar monkeypatches the connection
# cursor wrapper and adds extra information in each
# item in connection.queries. The query time is stored
# under the key "duration" rather than "time" and is
# in milliseconds, not seconds.
query_time = query.get('duration', 0) / 1000
total_time += float(query_time)
log.info(u'%s queries run, total %s seconds', len(connection.queries), total_time)
return response
def get_ability(course_id, content, user):
return {
'editable': check_permissions_by_view(user, course_id, content, "update_thread" if content['type'] == 'thread' else "update_comment"),
'can_reply': check_permissions_by_view(user, course_id, content, "create_comment" if content['type'] == 'thread' else "create_sub_comment"),
'can_delete': check_permissions_by_view(user, course_id, content, "delete_thread" if content['type'] == 'thread' else "delete_comment"),
'can_openclose': check_permissions_by_view(user, course_id, content, "openclose_thread") if content['type'] == 'thread' else False,
'can_vote': check_permissions_by_view(user, course_id, content, "vote_for_thread" if content['type'] == 'thread' else "vote_for_comment"),
}
# TODO: RENAME
def get_annotated_content_info(course_id, content, user, user_info):
"""
Get metadata for an individual content (thread or comment)
"""
voted = ''
if content['id'] in user_info['upvoted_ids']:
voted = 'up'
elif content['id'] in user_info['downvoted_ids']:
voted = 'down'
return {
'voted': voted,
'subscribed': content['id'] in user_info['subscribed_thread_ids'],
'ability': get_ability(course_id, content, user),
}
# TODO: RENAME
def get_annotated_content_infos(course_id, thread, user, user_info):
"""
Get metadata for a thread and its children
"""
infos = {}
def annotate(content):
infos[str(content['id'])] = get_annotated_content_info(course_id, content, user, user_info)
for child in (
content.get('children', []) +
content.get('endorsed_responses', []) +
content.get('non_endorsed_responses', [])
):
annotate(child)
annotate(thread)
return infos
def get_metadata_for_threads(course_id, threads, user, user_info):
def infogetter(thread):
return get_annotated_content_infos(course_id, thread, user, user_info)
metadata = reduce(merge_dict, map(infogetter, threads), {})
return metadata
# put this method in utils.py to avoid circular import dependency between helpers and mustache_helpers
def render_mustache(template_name, dictionary, *args, **kwargs):
template = lookup_template('main', template_name).source
return pystache.render(template, dictionary)
def permalink(content):
if isinstance(content['course_id'], CourseKey):
course_id = content['course_id'].to_deprecated_string()
else:
course_id = content['course_id']
if content['type'] == 'thread':
return reverse('django_comment_client.forum.views.single_thread',
args=[course_id, content['commentable_id'], content['id']])
else:
return reverse('django_comment_client.forum.views.single_thread',
args=[course_id, content['commentable_id'], content['thread_id']]) + '#' + content['id']
def extend_content(content):
roles = {}
if content.get('user_id'):
try:
user = User.objects.get(pk=content['user_id'])
roles = dict(('name', role.name.lower()) for role in user.roles.filter(course_id=content['course_id']))
except User.DoesNotExist:
log.error(
'User ID %s in comment content %s but not in our DB.',
content.get('user_id'),
content.get('id')
)
content_info = {
'displayed_title': content.get('highlighted_title') or content.get('title', ''),
'displayed_body': content.get('highlighted_body') or content.get('body', ''),
'permalink': permalink(content),
'roles': roles,
'updated': content['created_at'] != content['updated_at'],
}
return merge_dict(content, content_info)
def add_courseware_context(content_list, course, user, id_map=None):
"""
Decorates `content_list` with courseware metadata using the discussion id map cache if available.
"""
if id_map is None:
id_map = get_cached_discussion_id_map(
course,
[content['commentable_id'] for content in content_list],
user
)
for content in content_list:
commentable_id = content['commentable_id']
if commentable_id in id_map:
location = id_map[commentable_id]["location"].to_deprecated_string()
title = id_map[commentable_id]["title"]
url = reverse('jump_to', kwargs={"course_id": course.id.to_deprecated_string(),
"location": location})
content.update({"courseware_url": url, "courseware_title": title})
def prepare_content(content, course_key, is_staff=False, course_is_cohorted=None):
"""
This function is used to pre-process thread and comment models in various
ways before adding them to the HTTP response. This includes fixing empty
attribute fields, enforcing author anonymity, and enriching metadata around
group ownership and response endorsement.
@TODO: not all response pre-processing steps are currently integrated into
this function.
Arguments:
content (dict): A thread or comment.
course_key (CourseKey): The course key of the course.
is_staff (bool): Whether the user is a staff member.
course_is_cohorted (bool): Whether the course is cohorted.
"""
fields = [
'id', 'title', 'body', 'course_id', 'anonymous', 'anonymous_to_peers',
'endorsed', 'parent_id', 'thread_id', 'votes', 'closed', 'created_at',
'updated_at', 'depth', 'type', 'commentable_id', 'comments_count',
'at_position_list', 'children', 'highlighted_title', 'highlighted_body',
'courseware_title', 'courseware_url', 'unread_comments_count',
'read', 'group_id', 'group_name', 'pinned', 'abuse_flaggers',
'stats', 'resp_skip', 'resp_limit', 'resp_total', 'thread_type',
'endorsed_responses', 'non_endorsed_responses', 'non_endorsed_resp_total',
'endorsement', 'context'
]
if (content.get('anonymous') is False) and ((content.get('anonymous_to_peers') is False) or is_staff):
fields += ['username', 'user_id']
content = strip_none(extract(content, fields))
if content.get("endorsement"):
endorsement = content["endorsement"]
endorser = None
if endorsement["user_id"]:
try:
endorser = User.objects.get(pk=endorsement["user_id"])
except User.DoesNotExist:
log.error(
"User ID %s in endorsement for comment %s but not in our DB.",
content.get('user_id'),
content.get('id')
)
# Only reveal endorser if requester can see author or if endorser is staff
if (
endorser and
("username" in fields or has_permission(endorser, "endorse_comment", course_key))
):
endorsement["username"] = endorser.username
else:
del endorsement["user_id"]
if course_is_cohorted is None:
course_is_cohorted = is_course_cohorted(course_key)
for child_content_key in ["children", "endorsed_responses", "non_endorsed_responses"]:
if child_content_key in content:
children = [
prepare_content(child, course_key, is_staff, course_is_cohorted=course_is_cohorted)
for child in content[child_content_key]
]
content[child_content_key] = children
if course_is_cohorted:
# Augment the specified thread info to include the group name if a group id is present.
if content.get('group_id') is not None:
content['group_name'] = get_cohort_by_id(course_key, content.get('group_id')).name
else:
# Remove any cohort information that might remain if the course had previously been cohorted.
content.pop('group_id', None)
return content
def get_group_id_for_comments_service(request, course_key, commentable_id=None):
"""
Given a user requesting content within a `commentable_id`, determine the
group_id which should be passed to the comments service.
Returns:
int: the group_id to pass to the comments service or None if nothing
should be passed
Raises:
ValueError if the requested group_id is invalid
"""
if commentable_id is None or is_commentable_cohorted(course_key, commentable_id):
if request.method == "GET":
requested_group_id = request.GET.get('group_id')
elif request.method == "POST":
requested_group_id = request.POST.get('group_id')
if has_permission(request.user, "see_all_cohorts", course_key):
if not requested_group_id:
return None
try:
group_id = int(requested_group_id)
get_cohort_by_id(course_key, group_id)
except CourseUserGroup.DoesNotExist:
raise ValueError
else:
# regular users always query with their own id.
group_id = get_cohort_id(request.user, course_key)
return group_id
else:
# Never pass a group_id to the comments service for a non-cohorted
# commentable
return None
def is_comment_too_deep(parent):
"""
Determine whether a comment with the given parent violates MAX_COMMENT_DEPTH
parent can be None to determine whether root comments are allowed
"""
return (
MAX_COMMENT_DEPTH is not None and (
MAX_COMMENT_DEPTH < 0 or
(parent and parent["depth"] >= MAX_COMMENT_DEPTH)
)
)
def is_commentable_cohorted(course_key, commentable_id):
"""
Args:
course_key: CourseKey
commentable_id: string
Returns:
Bool: is this commentable cohorted?
Raises:
Http404 if the course doesn't exist.
"""
course = courses.get_course_by_id(course_key)
course_cohort_settings = get_course_cohort_settings(course_key)
if not course_cohort_settings.is_cohorted or get_team(commentable_id):
# this is the easy case :)
ans = False
elif (
commentable_id in course.top_level_discussion_topic_ids or
course_cohort_settings.always_cohort_inline_discussions is False
):
# top level discussions have to be manually configured as cohorted
# (default is not).
# Same thing for inline discussions if the default is explicitly set to False in settings
ans = commentable_id in course_cohort_settings.cohorted_discussions
else:
# inline discussions are cohorted by default
ans = True
log.debug(u"is_commentable_cohorted(%s, %s) = {%s}", course_key, commentable_id, ans)
return ans
|
agpl-3.0
|
oscartorresco/financial
|
financiero/compras/views.py
|
1
|
12247
|
# Create your views here.
import json
from django.shortcuts import render, get_object_or_404, get_list_or_404, redirect, render_to_response
from django.http import HttpResponse, HttpResponseRedirect
from compras.models import Proveedor, Producto_Proveedor, Compra, Compra_Producto
from django.db import IntegrityError
from django.core.paginator import Paginator
from inventario.models import Producto
from django.core import serializers
from django.db.models import Q, F, Sum, Count
from django.utils import timezone
from django.db import connection
from django.core.urlresolvers import reverse
from django.forms.models import model_to_dict
#Paginacion
def paginacionAjax(listaDatos, pag):
num = len(listaDatos)
datosPaginacion = Paginator(listaDatos, 10);
cont = datosPaginacion.num_pages;
if pag[:1] == '*':
pag = pag.strip('*')
pag = int(pag) - 1
if(int(pag) == 0):
pag = 1
else:
if pag[:1] == '^':
pag = pag.strip('^')
pag = int(pag) + 1
if int(pag) > cont:
pag = cont
if int(pag) > 0 and int(pag) <= cont:
info = datosPaginacion.page(int(pag))
else:
info = datosPaginacion.page(1)
pag = 1
if(num == 0):
cont = 0
return {'info':info, 'cont':cont, 'pag':pag}
def buscarProductos(request):
if request.method == 'GET':
if request.is_ajax():
codigo = request.GET['proveedor']
lista = Producto.objects.filter(producto_proveedor__proveedor=codigo)
data = serializers.serialize('json', lista)
return HttpResponse(data, mimetype='aplication/json')
else:
return redirect('compras.views.indexCompras')
else:
return redirect('compras.views.indexCompras')
def buscarPrecioProductoProveedor(request):
if request.method == 'GET':
if request.is_ajax():
producto = request.GET['producto']
proveedor = request.GET['proveedor']
lista = Producto_Proveedor.objects.get(Q(producto=producto) & Q(proveedor=proveedor))
data = serializers.serialize('json', [lista])
return HttpResponse(data, mimetype='aplication/json')
else:
return redirect('compras.views.indexCompras')
else:
return redirect('compras.views.indexCompras')
#Vistas de Compras
def indexCompras(request):
try:
pag=request.GET['pag']
except:
pag="*1"
listaCompras = Compra.objects.all()
if request.is_ajax():
info = paginacionAjax(listaCompras, pag)
data = serializers.serialize('json', info['info'], use_natural_keys=True);
return HttpResponse(data,mimetype='aplication/json')
else:
#Se quitan los proveedores de estado inactivo
proveedores = Proveedor.objects.exclude(estado=False)
info = paginacionCompras(listaCompras, pag, proveedores)
return render(request, 'compras/index.html', info)
def agregarCompra(request):
if request.method == 'POST':
#Datos para la creacion de la compra
proveedor = request.POST['proveedor']
try:
proveedorDatos = Proveedor.objects.get(pk=proveedor)
descripcion = request.POST['descripcion']
fecha = timezone.now()
compra = Compra(descripcion=descripcion, fecha=fecha, proveedor=proveedorDatos)
compra.save()
try:
#Informacion para el renderizado de la pagina respuesta
proveedores = Proveedor.objects.exclude(estado=False)
listaCompras = Compra.objects.all()
#Datos para la creacion de la relacion producto compra
cantidadProductos = int(request.POST['cantidadproductos'])
for a in range(0, cantidadProductos + 1):
auxProducto = 'productos_' + `a`
productoPedido = request.POST[auxProducto]
if (productoPedido != "-1"):
try:
infoProducto = Producto.objects.get(pk=productoPedido)
auxPrecio = 'precio_' + `a`
productoPrecio = request.POST[auxPrecio]
auxCantidad = 'cantidad_' + `a`
productoCantidad = request.POST[auxCantidad]
#Se crea el objecto de la relacion entre compra y producto
infoProducto.cantidad = infoProducto.cantidad + int(productoCantidad)
infoProducto.save()
relacion = Compra_Producto(cantidad = productoCantidad, precio=productoPrecio, compra=compra, producto=infoProducto)
relacion.save()
except Producto.DoesNotExist:
info = paginacionCompras(listaCompras, "*1", proveedores, "Ocurrio un problema al guardar la compra, por favor verifique la informacion del producto e intentelo mas tarde", 2)
return render(request, 'compras/index.html', info)
info = paginacionCompras(listaCompras, "*1", proveedores, "La compra se guardo satisfactoriamente", 1)
return render(request, 'compras/index.html', info)
except Proveedor.DoesNotExist:
info = paginacionCompras(listaCompras, "*1", proveedores, "Ocurrio un problema al guardar la compra, por favor verifique la informacion del proveedor e intentelo mas tarde", 2)
return render(request, 'compras/index.html', info)
except Exception as e:
info = paginacionCompras(listaCompras, "*1", proveedores, "Ocurrio un problema al guardar la compra, por favor intentelo mas tarde", 2)
return render(request, 'compras/index.html', info)
else:
return redirect('compras.views.indexCompras')
def listadoCompras(request):
if request.is_ajax():
codigoCompra = request.GET['codigoCompra']
#Se obtiene la info de la compra
infoCompra = Compra.objects.get(pk=codigoCompra)
#Se obtiene la relacion de la compra y el producto
compraProducto = Compra_Producto.objects.filter(compra=infoCompra)
data = serializers.serialize('json', compraProducto, use_natural_keys = True);
return HttpResponse(data, mimetype="aplication/json")
else:
return redirect('compras.views.indexCompras')
def buscarCompra(request):
if request.is_ajax():
array = True
listaCompras = Compra.objects.all()
descripcion = request.GET['busquedaDescripcion']
listaCompras = listaCompras.filter(descripcion__icontains=descripcion)
proveedor = request.GET['busquedaProveedor']
listaCompras = listaCompras.filter(proveedor__nombre__icontains=proveedor).order_by('codigo')
fecha = request.GET['busquedaFecha']
if len(fecha) > 0:
listaCompras = listaCompras.filter(fecha=fecha)
codigo = request.GET['busquedaCodigo']
if len(codigo) > 0:
try:
finCodigo = int(codigo)
except:
finCodigo = 0
if finCodigo > 0:
try:
listaCompras = listaCompras.get(pk=finCodigo)
array = False
except Compra.DoesNotExist:
listaCompras = []
else:
listaCompras = []
datos = {}
if array:
try:
pag=request.GET['pag']
except:
pag="*1"
info = paginacionAjax(listaCompras, pag)
datos['datos'] = serializers.serialize('json', info['info'], use_natural_keys=True);
datos['paginaAct'] = info['pag']
datos['cont'] = info['cont']
else:
datos['datos'] = serializers.serialize('json',[listaCompras], use_natural_keys=True)
datos['paginaAct'] = "1"
datos['cont'] = "1"
return HttpResponse(json.dumps(datos), mimetype='aplication/json')
else:
return redirect('compras.views.indexCompras')
def paginacionCompras(listaDatos, pag, listaProveedores=[], mensaje="", tipoMensaje=0):
num = len(listaDatos)
datosPaginacion = Paginator(listaDatos, 10);
cont = datosPaginacion.num_pages;
if pag[:1] == '*':
pag = pag.strip('*')
pag = int(pag) - 1
if(int(pag) == 0):
pag = 1
else:
if pag[:1] == '^':
pag = pag.strip('^')
pag = int(pag) + 1
if int(pag) > cont:
pag = cont
if int(pag) > 0 and int(pag) <= cont:
info = datosPaginacion.page(int(pag))
else:
info = datosPaginacion.page(1)
if(num == 0):
cont = 0
return {'listaCompras': info, 'pag':pag, 'cont':cont, 'mensaje':mensaje, 'tipoMensaje':tipoMensaje, 'proveedores':listaProveedores}
#Vistas de proveedor
def indexProveedor(request, mensaje="", tipoMensaje=0):
try:
pag=request.GET['pag']
except:
pag="*1"
listaProveedores = Proveedor.objects.all()
if request.is_ajax():
info = paginacionAjax(listaProveedores, pag)
data = serializers.serialize('json',info['info'])
return HttpResponse(data,mimetype='aplication/json')
else:
info = paginacionProv(listaProveedores, pag, mensaje, tipoMensaje)
return render(request, 'proveedor/index.html', info)
def agregarProveedor(request):
if request.method == 'POST':
codigo = request.POST['codigo']
nombre = request.POST['nombre']
direccion = request.POST['direccion']
telefono = request.POST['telefono']
estado = True
listaProveedores = Proveedor.objects.all()
pag="*1"
try:
aux = Proveedor.objects.get(pk=codigo)
info = paginacionProv(listaProveedores, pag, "El Proveedor no se ha podido agregar, revise el codigo", 2)
except Proveedor.DoesNotExist:
proveedor = Proveedor(codigo,nombre,direccion,telefono,estado)
try:
proveedor.save()
info = paginacionProv(listaProveedores, pag, "El proveedor se ha agregado Correctamente", 1)
except IntegrityError:
info = paginacionProv(listaProveedores, pag, "El Proveedor no se ha podido agregar, revise el telefono", 3)
return render(request, 'proveedor/index.html', info)
else:
return redirect('compras.views.indexProveedor')
def editarProveedor(request):
if request.method == 'POST':
codigo = request.POST['nuevoCodigo']
nombre = request.POST['nuevoNombre']
direccion = request.POST['nuevaDireccion']
telefono = request.POST['nuevoTelefono']
estado = request.POST['nuevoEstado']
pag="*1"
try:
proveedor = Proveedor.objects.get(pk=codigo)
try:
if(len(nombre) > 0):
proveedor.nombre = nombre
if(len(direccion) > 0):
proveedor.direccion = direccion
if(len(telefono) > 0):
proveedor.telefono = telefono
if request.POST['nuevoEstado'] == "1":
proveedor.estado = True
else:
proveedor.estado = False
proveedor.save()
listaProveedores = Proveedor.objects.all()
mensaje = 1
info = paginacionProv(listaProveedores, pag, "El proveedor se ha modificado correctamente", 1)
except IntegrityError:
listaProveedores = Proveedor.objects.all()
info = paginacionProv(listaProveedores, pag, "El Proveedor no se ha podido modificar, revise el telefono", 3)
except Proveedor.DoesNotExist:
listaProveedores = Proveedor.objects.all()
info = paginacionProv(listaProveedores, pag, "El Proveedor no se ha podido modificar, revise el codigo", 2)
return render(request, 'proveedor/index.html', info)
else:
return redirect('compras.views.indexProveedor')
#1-Agregado Correcto
#2-Fallo
def paginacionProv(listaDatos, pag, mensaje="", tipoMensaje=0):
num = len(listaDatos)
datosPaginacion = Paginator(listaDatos, 10);
cont = datosPaginacion.num_pages;
if pag[:1] == '*':
pag = pag.strip('*')
pag = int(pag) - 1
if(int(pag) == 0):
pag = 1
else:
if pag[:1] == '^':
pag = pag.strip('^')
pag = int(pag) + 1
if int(pag) > cont:
pag = cont
if int(pag) > 0 and int(pag) <= cont:
info = datosPaginacion.page(int(pag))
else:
info = datosPaginacion.page(1)
if(num == 0):
cont = 0
return {'listaProveedores': info, 'pag':pag, 'cont':cont, 'mensaje':mensaje, 'tipoMensaje':tipoMensaje}
def buscarProveedor(request):
if request.is_ajax():
array = True
listaProveedores = Proveedor.objects.all()
nombre = request.GET['busquedaNombre']
listaProveedores = listaProveedores.filter(nombre__icontains=nombre)
direccion = request.GET['busquedaDireccion']
listaProveedores = listaProveedores.filter(direccion__icontains=direccion)
codigo = request.GET['busquedaCodigo']
if len(codigo) > 0:
try:
listaProveedores = listaProveedores.get(pk=codigo)
array = False
except Proveedor.DoesNotExist:
listaProveedores = []
telefono = request.GET['busquedaTelefono']
if len(telefono) > 0:
try:
listaProveedores = listaProveedores.get(telefono=telefono)
array = False
except Proveedor.DoesNotExist:
listaProveedores = []
datos = {}
if array:
try:
pag=request.GET['pag']
except:
pag="*1"
info = paginacionAjax(listaProveedores, pag)
datos['datos'] = serializers.serialize('json', info['info'], use_natural_keys=True);
datos['paginaAct'] = info['pag']
datos['cont'] = info['cont']
else:
datos['datos'] = serializers.serialize('json',[listaProveedores], use_natural_keys=True)
datos['paginaAct'] = "1"
datos['cont'] = "1"
return HttpResponse(json.dumps(datos), mimetype='aplication/json')
else:
return redirect('compras.views.indexProveedor')
|
gpl-2.0
|
babyliynfg/cross
|
tools/project-creator/Python2.6.6/Lib/test/lock_tests.py
|
1
|
15776
|
"""
Various tests for synchronization primitives.
"""
import sys
import time
from thread import start_new_thread, get_ident
import threading
import unittest
from test import test_support as support
def _wait():
# A crude wait/yield function not relying on synchronization primitives.
time.sleep(0.01)
class Bunch(object):
"""
A bunch of threads.
"""
def __init__(self, f, n, wait_before_exit=False):
"""
Construct a bunch of `n` threads running the same function `f`.
If `wait_before_exit` is True, the threads won't terminate until
do_finish() is called.
"""
self.f = f
self.n = n
self.started = []
self.finished = []
self._can_exit = not wait_before_exit
def task():
tid = get_ident()
self.started.append(tid)
try:
f()
finally:
self.finished.append(tid)
while not self._can_exit:
_wait()
for i in range(n):
start_new_thread(task, ())
def wait_for_started(self):
while len(self.started) < self.n:
_wait()
def wait_for_finished(self):
while len(self.finished) < self.n:
_wait()
def do_finish(self):
self._can_exit = True
class BaseTestCase(unittest.TestCase):
def setUp(self):
self._threads = support.threading_setup()
def tearDown(self):
support.threading_cleanup(*self._threads)
support.reap_children()
class BaseLockTests(BaseTestCase):
"""
Tests for both recursive and non-recursive locks.
"""
def test_constructor(self):
lock = self.locktype()
del lock
def test_acquire_destroy(self):
lock = self.locktype()
lock.acquire()
del lock
def test_acquire_release(self):
lock = self.locktype()
lock.acquire()
lock.release()
del lock
def test_try_acquire(self):
lock = self.locktype()
self.assertTrue(lock.acquire(False))
lock.release()
def test_try_acquire_contended(self):
lock = self.locktype()
lock.acquire()
result = []
def f():
result.append(lock.acquire(False))
Bunch(f, 1).wait_for_finished()
self.assertFalse(result[0])
lock.release()
def test_acquire_contended(self):
lock = self.locktype()
lock.acquire()
N = 5
def f():
lock.acquire()
lock.release()
b = Bunch(f, N)
b.wait_for_started()
_wait()
self.assertEqual(len(b.finished), 0)
lock.release()
b.wait_for_finished()
self.assertEqual(len(b.finished), N)
def test_with(self):
lock = self.locktype()
def f():
lock.acquire()
lock.release()
def _with(err=None):
with lock:
if err is not None:
raise err
_with()
# Check the lock is unacquired
Bunch(f, 1).wait_for_finished()
self.assertRaises(TypeError, _with, TypeError)
# Check the lock is unacquired
Bunch(f, 1).wait_for_finished()
def test_thread_leak(self):
# The lock shouldn't leak a Thread instance when used from a foreign
# (non-threading) thread.
lock = self.locktype()
def f():
lock.acquire()
lock.release()
n = len(threading.enumerate())
# We run many threads in the hope that existing threads ids won't
# be recycled.
Bunch(f, 15).wait_for_finished()
self.assertEqual(n, len(threading.enumerate()))
class LockTests(BaseLockTests):
"""
Tests for non-recursive, weak locks
(which can be acquired and released from different threads).
"""
def test_reacquire(self):
# Lock needs to be released before re-acquiring.
lock = self.locktype()
phase = []
def f():
lock.acquire()
phase.append(None)
lock.acquire()
phase.append(None)
start_new_thread(f, ())
while len(phase) == 0:
_wait()
_wait()
self.assertEqual(len(phase), 1)
lock.release()
while len(phase) == 1:
_wait()
self.assertEqual(len(phase), 2)
def test_different_thread(self):
# Lock can be released from a different thread.
lock = self.locktype()
lock.acquire()
def f():
lock.release()
b = Bunch(f, 1)
b.wait_for_finished()
lock.acquire()
lock.release()
class RLockTests(BaseLockTests):
"""
Tests for recursive locks.
"""
def test_reacquire(self):
lock = self.locktype()
lock.acquire()
lock.acquire()
lock.release()
lock.acquire()
lock.release()
lock.release()
def test_release_unacquired(self):
# Cannot release an unacquired lock
lock = self.locktype()
self.assertRaises(RuntimeError, lock.release)
lock.acquire()
lock.acquire()
lock.release()
lock.acquire()
lock.release()
lock.release()
self.assertRaises(RuntimeError, lock.release)
def test_different_thread(self):
# Cannot release from a different thread
lock = self.locktype()
def f():
lock.acquire()
b = Bunch(f, 1, True)
try:
self.assertRaises(RuntimeError, lock.release)
finally:
b.do_finish()
def test__is_owned(self):
lock = self.locktype()
self.assertFalse(lock._is_owned())
lock.acquire()
self.assertTrue(lock._is_owned())
lock.acquire()
self.assertTrue(lock._is_owned())
result = []
def f():
result.append(lock._is_owned())
Bunch(f, 1).wait_for_finished()
self.assertFalse(result[0])
lock.release()
self.assertTrue(lock._is_owned())
lock.release()
self.assertFalse(lock._is_owned())
class EventTests(BaseTestCase):
"""
Tests for Event objects.
"""
def test_is_set(self):
evt = self.eventtype()
self.assertFalse(evt.is_set())
evt.set()
self.assertTrue(evt.is_set())
evt.set()
self.assertTrue(evt.is_set())
evt.clear()
self.assertFalse(evt.is_set())
evt.clear()
self.assertFalse(evt.is_set())
def _check_notify(self, evt):
# All threads get notified
N = 5
results1 = []
results2 = []
def f():
evt.wait()
results1.append(evt.is_set())
evt.wait()
results2.append(evt.is_set())
b = Bunch(f, N)
b.wait_for_started()
_wait()
self.assertEqual(len(results1), 0)
evt.set()
b.wait_for_finished()
self.assertEqual(results1, [True] * N)
self.assertEqual(results2, [True] * N)
def test_notify(self):
evt = self.eventtype()
self._check_notify(evt)
# Another time, after an explicit clear()
evt.set()
evt.clear()
self._check_notify(evt)
def test_timeout(self):
evt = self.eventtype()
results1 = []
results2 = []
N = 5
def f():
evt.wait(0.0)
results1.append(evt.is_set())
t1 = time.time()
evt.wait(0.2)
r = evt.is_set()
t2 = time.time()
results2.append((r, t2 - t1))
Bunch(f, N).wait_for_finished()
self.assertEqual(results1, [False] * N)
for r, dt in results2:
self.assertFalse(r)
self.assertTrue(dt >= 0.2, dt)
# The event is set
results1 = []
results2 = []
evt.set()
Bunch(f, N).wait_for_finished()
self.assertEqual(results1, [True] * N)
for r, dt in results2:
self.assertTrue(r)
class ConditionTests(BaseTestCase):
"""
Tests for condition variables.
"""
def test_acquire(self):
cond = self.condtype()
# Be default we have an RLock: the condition can be acquired multiple
# times.
cond.acquire()
cond.acquire()
cond.release()
cond.release()
lock = threading.Lock()
cond = self.condtype(lock)
cond.acquire()
self.assertFalse(lock.acquire(False))
cond.release()
self.assertTrue(lock.acquire(False))
self.assertFalse(cond.acquire(False))
lock.release()
with cond:
self.assertFalse(lock.acquire(False))
def test_unacquired_wait(self):
cond = self.condtype()
self.assertRaises(RuntimeError, cond.wait)
def test_unacquired_notify(self):
cond = self.condtype()
self.assertRaises(RuntimeError, cond.notify)
def _check_notify(self, cond):
N = 5
results1 = []
results2 = []
phase_num = 0
def f():
cond.acquire()
cond.wait()
cond.release()
results1.append(phase_num)
cond.acquire()
cond.wait()
cond.release()
results2.append(phase_num)
b = Bunch(f, N)
b.wait_for_started()
_wait()
self.assertEqual(results1, [])
# Notify 3 threads at first
cond.acquire()
cond.notify(3)
_wait()
phase_num = 1
cond.release()
while len(results1) < 3:
_wait()
self.assertEqual(results1, [1] * 3)
self.assertEqual(results2, [])
# Notify 5 threads: they might be in their first or second wait
cond.acquire()
cond.notify(5)
_wait()
phase_num = 2
cond.release()
while len(results1) + len(results2) < 8:
_wait()
self.assertEqual(results1, [1] * 3 + [2] * 2)
self.assertEqual(results2, [2] * 3)
# Notify all threads: they are all in their second wait
cond.acquire()
cond.notify_all()
_wait()
phase_num = 3
cond.release()
while len(results2) < 5:
_wait()
self.assertEqual(results1, [1] * 3 + [2] * 2)
self.assertEqual(results2, [2] * 3 + [3] * 2)
b.wait_for_finished()
def test_notify(self):
cond = self.condtype()
self._check_notify(cond)
# A second time, to check internal state is still ok.
self._check_notify(cond)
def test_timeout(self):
cond = self.condtype()
results = []
N = 5
def f():
cond.acquire()
t1 = time.time()
cond.wait(0.2)
t2 = time.time()
cond.release()
results.append(t2 - t1)
Bunch(f, N).wait_for_finished()
self.assertEqual(len(results), 5)
for dt in results:
self.assertTrue(dt >= 0.2, dt)
class BaseSemaphoreTests(BaseTestCase):
"""
Common tests for {bounded, unbounded} semaphore objects.
"""
def test_constructor(self):
self.assertRaises(ValueError, self.semtype, value = -1)
self.assertRaises(ValueError, self.semtype, value = -sys.maxint)
def test_acquire(self):
sem = self.semtype(1)
sem.acquire()
sem.release()
sem = self.semtype(2)
sem.acquire()
sem.acquire()
sem.release()
sem.release()
def test_acquire_destroy(self):
sem = self.semtype()
sem.acquire()
del sem
def test_acquire_contended(self):
sem = self.semtype(7)
sem.acquire()
N = 10
results1 = []
results2 = []
phase_num = 0
def f():
sem.acquire()
results1.append(phase_num)
sem.acquire()
results2.append(phase_num)
b = Bunch(f, 10)
b.wait_for_started()
while len(results1) + len(results2) < 6:
_wait()
self.assertEqual(results1 + results2, [0] * 6)
phase_num = 1
for i in range(7):
sem.release()
while len(results1) + len(results2) < 13:
_wait()
self.assertEqual(sorted(results1 + results2), [0] * 6 + [1] * 7)
phase_num = 2
for i in range(6):
sem.release()
while len(results1) + len(results2) < 19:
_wait()
self.assertEqual(sorted(results1 + results2), [0] * 6 + [1] * 7 + [2] * 6)
# The semaphore is still locked
self.assertFalse(sem.acquire(False))
# Final release, to let the last thread finish
sem.release()
b.wait_for_finished()
def test_try_acquire(self):
sem = self.semtype(2)
self.assertTrue(sem.acquire(False))
self.assertTrue(sem.acquire(False))
self.assertFalse(sem.acquire(False))
sem.release()
self.assertTrue(sem.acquire(False))
def test_try_acquire_contended(self):
sem = self.semtype(4)
sem.acquire()
results = []
def f():
results.append(sem.acquire(False))
results.append(sem.acquire(False))
Bunch(f, 5).wait_for_finished()
# There can be a thread switch between acquiring the semaphore and
# appending the result, therefore results will not necessarily be
# ordered.
self.assertEqual(sorted(results), [False] * 7 + [True] * 3 )
def test_default_value(self):
# The default initial value is 1.
sem = self.semtype()
sem.acquire()
def f():
sem.acquire()
sem.release()
b = Bunch(f, 1)
b.wait_for_started()
_wait()
self.assertFalse(b.finished)
sem.release()
b.wait_for_finished()
def test_with(self):
sem = self.semtype(2)
def _with(err=None):
with sem:
self.assertTrue(sem.acquire(False))
sem.release()
with sem:
self.assertFalse(sem.acquire(False))
if err:
raise err
_with()
self.assertTrue(sem.acquire(False))
sem.release()
self.assertRaises(TypeError, _with, TypeError)
self.assertTrue(sem.acquire(False))
sem.release()
class SemaphoreTests(BaseSemaphoreTests):
"""
Tests for unbounded semaphores.
"""
def test_release_unacquired(self):
# Unbounded releases are allowed and increment the semaphore's value
sem = self.semtype(1)
sem.release()
sem.acquire()
sem.acquire()
sem.release()
class BoundedSemaphoreTests(BaseSemaphoreTests):
"""
Tests for bounded semaphores.
"""
def test_release_unacquired(self):
# Cannot go past the initial value
sem = self.semtype()
self.assertRaises(ValueError, sem.release)
sem.acquire()
sem.release()
self.assertRaises(ValueError, sem.release)
|
mit
|
sonali0901/zulip
|
zerver/webhooks/gitlab/tests.py
|
21
|
14333
|
# -*- coding: utf-8 -*-
from zerver.lib.webhooks.git import COMMITS_LIMIT
from zerver.lib.test_classes import WebhookTestCase
class GitlabHookTests(WebhookTestCase):
STREAM_NAME = 'gitlab'
URL_TEMPLATE = "/api/v1/external/gitlab?&api_key={api_key}"
FIXTURE_DIR_NAME = 'gitlab'
def test_push_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / tomek"
expected_message = u"Tomasz Kolek [pushed](https://gitlab.com/tomaszkolek0/my-awesome-project/compare/5fcdd5551fc3085df79bece2c32b1400802ac407...eb6ae1e591e0819dc5bf187c6bfe18ec065a80e9) to branch tomek\n\n* [66abd2d](https://gitlab.com/tomaszkolek0/my-awesome-project/commit/66abd2da28809ffa128ed0447965cf11d7f863a7): b\n* [eb6ae1e](https://gitlab.com/tomaszkolek0/my-awesome-project/commit/eb6ae1e591e0819dc5bf187c6bfe18ec065a80e9): c"
self.send_and_test_stream_message('push', expected_subject, expected_message, HTTP_X_GITLAB_EVENT="Push Hook")
def test_push_commits_more_than_limit_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / tomek"
commits_info = u'* [66abd2d](https://gitlab.com/tomaszkolek0/my-awesome-project/commit/66abd2da28809ffa128ed0447965cf11d7f863a7): b\n'
expected_message = u"Tomasz Kolek [pushed](https://gitlab.com/tomaszkolek0/my-awesome-project/compare/5fcdd5551fc3085df79bece2c32b1400802ac407...eb6ae1e591e0819dc5bf187c6bfe18ec065a80e9) to branch tomek\n\n{}[and {} more commit(s)]".format(
commits_info * COMMITS_LIMIT,
50 - COMMITS_LIMIT,
)
self.send_and_test_stream_message('push_commits_more_than_limit', expected_subject, expected_message, HTTP_X_GITLAB_EVENT="Push Hook")
def test_remove_branch_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / tomek"
expected_message = u"Tomasz Kolek deleted branch tomek"
self.send_and_test_stream_message('remove_branch', expected_subject, expected_message, HTTP_X_GITLAB_EVENT="Push Hook")
def test_add_tag_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project"
expected_message = u"Tomasz Kolek pushed tag xyz"
self.send_and_test_stream_message(
'add_tag',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Tag Push Hook",
)
def test_remove_tag_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project"
expected_message = u"Tomasz Kolek removed tag xyz"
self.send_and_test_stream_message(
'remove_tag',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Tag Push Hook"
)
def test_create_issue_without_assignee_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / Issue #1 Issue title"
expected_message = u"Tomasz Kolek created [Issue #1](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)\n\n~~~ quote\nIssue description\n~~~"
self.send_and_test_stream_message(
'issue_created_without_assignee',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Issue Hook"
)
def test_create_issue_with_assignee_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / Issue #1 Issue title"
expected_message = u"Tomasz Kolek created [Issue #1](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)(assigned to Tomasz Kolek)\n\n~~~ quote\nIssue description\n~~~"
self.send_and_test_stream_message(
'issue_created_with_assignee',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Issue Hook"
)
def test_update_issue_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / Issue #1 Issue title_new"
expected_message = u"Tomasz Kolek updated [Issue #1](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)"
self.send_and_test_stream_message(
'issue_updated',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Issue Hook"
)
def test_close_issue_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / Issue #1 Issue title_new"
expected_message = u"Tomasz Kolek closed [Issue #1](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)"
self.send_and_test_stream_message(
'issue_closed',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Issue Hook"
)
def test_reopen_issue_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / Issue #1 Issue title_new"
expected_message = u"Tomasz Kolek reopened [Issue #1](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)"
self.send_and_test_stream_message(
'issue_reopened',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Issue Hook"
)
def test_note_commit_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project"
expected_message = u"Tomasz Kolek [commented](https://gitlab.com/tomaszkolek0/my-awesome-project/commit/66abd2da28809ffa128ed0447965cf11d7f863a7#note_14169211) on [66abd2d](https://gitlab.com/tomaszkolek0/my-awesome-project/commit/66abd2da28809ffa128ed0447965cf11d7f863a7)\n~~~ quote\nnice commit\n~~~"
self.send_and_test_stream_message(
'commit_note',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Note Hook"
)
def test_note_merge_request_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / MR #1 Tomek"
expected_message = u"Tomasz Kolek [commented](https://gitlab.com/tomaszkolek0/my-awesome-project/merge_requests/1#note_14171860) on [MR #1](https://gitlab.com/tomaszkolek0/my-awesome-project/merge_requests/1)\n\n~~~ quote\nNice merge request!\n~~~"
self.send_and_test_stream_message(
'merge_request_note',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Note Hook"
)
def test_note_issue_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / Issue #2 abc"
expected_message = u"Tomasz Kolek [commented](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/2#note_14172057) on [Issue #2](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/2)\n\n~~~ quote\nNice issue\n~~~"
self.send_and_test_stream_message(
'issue_note',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Note Hook"
)
def test_note_snippet_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / Snippet #2 test"
expected_message = u"Tomasz Kolek [commented](https://gitlab.com/tomaszkolek0/my-awesome-project/snippets/2#note_14172058) on [Snippet #2](https://gitlab.com/tomaszkolek0/my-awesome-project/snippets/2)\n\n~~~ quote\nNice snippet\n~~~"
self.send_and_test_stream_message(
'snippet_note',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Note Hook"
)
def test_merge_request_created_without_assignee_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / MR #2 NEW MR"
expected_message = u"Tomasz Kolek created [MR #2](https://gitlab.com/tomaszkolek0/my-awesome-project/merge_requests/2)\nfrom `tomek` to `master`\n\n~~~ quote\ndescription of merge request\n~~~"
self.send_and_test_stream_message(
'merge_request_created_without_assignee',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Merge Request Hook"
)
def test_merge_request_created_with_assignee_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / MR #3 New Merge Request"
expected_message = u"Tomasz Kolek created [MR #3](https://gitlab.com/tomaszkolek0/my-awesome-project/merge_requests/3)(assigned to Tomasz Kolek)\nfrom `tomek` to `master`\n\n~~~ quote\ndescription of merge request\n~~~"
self.send_and_test_stream_message(
'merge_request_created_with_assignee',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Merge Request Hook"
)
def test_merge_request_closed_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / MR #2 NEW MR"
expected_message = u"Tomasz Kolek closed [MR #2](https://gitlab.com/tomaszkolek0/my-awesome-project/merge_requests/2)"
self.send_and_test_stream_message(
'merge_request_closed',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Merge Request Hook"
)
def test_merge_request_updated_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / MR #3 New Merge Request"
expected_message = u"Tomasz Kolek updated [MR #3](https://gitlab.com/tomaszkolek0/my-awesome-project/merge_requests/3)(assigned to Tomasz Kolek)\nfrom `tomek` to `master`\n\n~~~ quote\nupdated desc\n~~~"
self.send_and_test_stream_message(
'merge_request_updated',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Merge Request Hook"
)
def test_merge_request_added_commit_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / MR #3 New Merge Request"
expected_message = u"Tomasz Kolek added commit(s) to [MR #3](https://gitlab.com/tomaszkolek0/my-awesome-project/merge_requests/3)"
self.send_and_test_stream_message(
'merge_request_added_commit',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Merge Request Hook"
)
def test_merge_request_merged_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / MR #3 New Merge Request"
expected_message = u"Tomasz Kolek merged [MR #3](https://gitlab.com/tomaszkolek0/my-awesome-project/merge_requests/3)"
self.send_and_test_stream_message(
'merge_request_merged',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Merge Request Hook"
)
def test_wiki_page_opened_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project"
expected_message = u"Tomasz Kolek created [Wiki Page \"how to\"](https://gitlab.com/tomaszkolek0/my-awesome-project/wikis/how-to)."
self.send_and_test_stream_message(
'wiki_page_opened',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Wiki Page Hook"
)
def test_wiki_page_edited_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project"
expected_message = u"Tomasz Kolek updated [Wiki Page \"how to\"](https://gitlab.com/tomaszkolek0/my-awesome-project/wikis/how-to)."
self.send_and_test_stream_message(
'wiki_page_edited',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Wiki Page Hook"
)
def test_build_created_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / master"
expected_message = u"Build job_name from test stage was created."
self.send_and_test_stream_message(
'build_created',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Build Hook"
)
def test_build_started_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / master"
expected_message = u"Build job_name from test stage started."
self.send_and_test_stream_message(
'build_started',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Build Hook"
)
def test_build_succeeded_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / master"
expected_message = u"Build job_name from test stage changed status to success."
self.send_and_test_stream_message(
'build_succeeded',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Build Hook"
)
def test_pipeline_succeeded_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / master"
expected_message = u"Pipeline changed status to success with build(s):\n* job_name2 - success\n* job_name - success."
self.send_and_test_stream_message(
'pipeline_succeeded',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Pipeline Hook"
)
def test_pipeline_started_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / master"
expected_message = u"Pipeline started with build(s):\n* job_name - running\n* job_name2 - pending."
self.send_and_test_stream_message(
'pipeline_started',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Pipeline Hook"
)
def test_pipeline_pending_event_message(self):
# type: () -> None
expected_subject = u"my-awesome-project / master"
expected_message = u"Pipeline was created with build(s):\n* job_name2 - pending\n* job_name - created."
self.send_and_test_stream_message(
'pipeline_pending',
expected_subject,
expected_message,
HTTP_X_GITLAB_EVENT="Pipeline Hook"
)
|
apache-2.0
|
yangjae/grpc
|
src/python/src/grpc/framework/face/_calls.py
|
12
|
15160
|
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utility functions for invoking RPCs."""
import sys
import threading
from grpc.framework.base import interfaces as base_interfaces
from grpc.framework.base import util as base_util
from grpc.framework.face import _control
from grpc.framework.face import interfaces
from grpc.framework.foundation import callable_util
from grpc.framework.foundation import future
_ITERATOR_EXCEPTION_LOG_MESSAGE = 'Exception iterating over requests!'
_DONE_CALLBACK_LOG_MESSAGE = 'Exception calling Future "done" callback!'
class _RendezvousServicedIngestor(base_interfaces.ServicedIngestor):
def __init__(self, rendezvous):
self._rendezvous = rendezvous
def consumer(self, operation_context):
return self._rendezvous
class _EventServicedIngestor(base_interfaces.ServicedIngestor):
def __init__(self, result_consumer, abortion_callback):
self._result_consumer = result_consumer
self._abortion_callback = abortion_callback
def consumer(self, operation_context):
operation_context.add_termination_callback(
_control.as_operation_termination_callback(self._abortion_callback))
return self._result_consumer
def _rendezvous_subscription(rendezvous):
return base_util.full_serviced_subscription(
_RendezvousServicedIngestor(rendezvous))
def _unary_event_subscription(completion_callback, abortion_callback):
return base_util.full_serviced_subscription(
_EventServicedIngestor(
_control.UnaryConsumer(completion_callback), abortion_callback))
def _stream_event_subscription(result_consumer, abortion_callback):
return base_util.full_serviced_subscription(
_EventServicedIngestor(result_consumer, abortion_callback))
# NOTE(nathaniel): This class has some extremely special semantics around
# cancellation that allow it to be used by both "blocking" APIs and "futures"
# APIs.
#
# Since futures.Future defines its own exception for cancellation, we want these
# objects, when returned by methods of a returning-Futures-from-other-methods
# object, to raise the same exception for cancellation. But that's weird in a
# blocking API - why should this object, also returned by methods of blocking
# APIs, raise exceptions from the "future" module? Should we do something like
# have this class be parameterized by the type of exception that it raises in
# cancellation circumstances?
#
# We don't have to take such a dramatic step: since blocking APIs define no
# cancellation semantics whatsoever, there is no supported way for
# blocking-API-users of these objects to cancel RPCs, and thus no supported way
# for them to see an exception the type of which would be weird to them.
#
# Bonus: in both blocking and futures APIs, this object still properly raises
# exceptions.CancellationError for any *server-side cancellation* of an RPC.
class _OperationCancellableIterator(interfaces.CancellableIterator):
"""An interfaces.CancellableIterator for response-streaming operations."""
def __init__(self, rendezvous, operation):
self._lock = threading.Lock()
self._rendezvous = rendezvous
self._operation = operation
self._cancelled = False
def __iter__(self):
return self
def next(self):
with self._lock:
if self._cancelled:
raise future.CancelledError()
return next(self._rendezvous)
def cancel(self):
with self._lock:
self._cancelled = True
self._operation.cancel()
self._rendezvous.set_outcome(base_interfaces.Outcome.CANCELLED)
class _OperationFuture(future.Future):
"""A future.Future interface to an operation."""
def __init__(self, rendezvous, operation):
self._condition = threading.Condition()
self._rendezvous = rendezvous
self._operation = operation
self._cancelled = False
self._computed = False
self._payload = None
self._exception = None
self._traceback = None
self._callbacks = []
def cancel(self):
"""See future.Future.cancel for specification."""
with self._condition:
if not self._cancelled and not self._computed:
self._operation.cancel()
self._cancelled = True
self._condition.notify_all()
return False
def cancelled(self):
"""See future.Future.cancelled for specification."""
with self._condition:
return self._cancelled
def running(self):
"""See future.Future.running for specification."""
with self._condition:
return not self._cancelled and not self._computed
def done(self):
"""See future.Future.done for specification."""
with self._condition:
return self._cancelled or self._computed
def result(self, timeout=None):
"""See future.Future.result for specification."""
with self._condition:
if self._cancelled:
raise future.CancelledError()
if self._computed:
if self._payload is None:
raise self._exception # pylint: disable=raising-bad-type
else:
return self._payload
condition = threading.Condition()
def notify_condition(unused_future):
with condition:
condition.notify()
self._callbacks.append(notify_condition)
with condition:
condition.wait(timeout=timeout)
with self._condition:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
if self._payload is None:
raise self._exception # pylint: disable=raising-bad-type
else:
return self._payload
else:
raise future.TimeoutError()
def exception(self, timeout=None):
"""See future.Future.exception for specification."""
with self._condition:
if self._cancelled:
raise future.CancelledError()
if self._computed:
return self._exception
condition = threading.Condition()
def notify_condition(unused_future):
with condition:
condition.notify()
self._callbacks.append(notify_condition)
with condition:
condition.wait(timeout=timeout)
with self._condition:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
return self._exception
else:
raise future.TimeoutError()
def traceback(self, timeout=None):
"""See future.Future.traceback for specification."""
with self._condition:
if self._cancelled:
raise future.CancelledError()
if self._computed:
return self._traceback
condition = threading.Condition()
def notify_condition(unused_future):
with condition:
condition.notify()
self._callbacks.append(notify_condition)
with condition:
condition.wait(timeout=timeout)
with self._condition:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
return self._traceback
else:
raise future.TimeoutError()
def add_done_callback(self, fn):
"""See future.Future.add_done_callback for specification."""
with self._condition:
if self._callbacks is not None:
self._callbacks.add(fn)
return
callable_util.call_logging_exceptions(fn, _DONE_CALLBACK_LOG_MESSAGE, self)
def on_operation_termination(self, operation_outcome):
"""Indicates to this object that the operation has terminated.
Args:
operation_outcome: A base_interfaces.Outcome value indicating the
outcome of the operation.
"""
with self._condition:
cancelled = self._cancelled
if cancelled:
callbacks = list(self._callbacks)
self._callbacks = None
else:
rendezvous = self._rendezvous
if not cancelled:
payload = None
exception = None
traceback = None
if operation_outcome == base_interfaces.Outcome.COMPLETED:
try:
payload = next(rendezvous)
except Exception as e: # pylint: disable=broad-except
exception = e
traceback = sys.exc_info()[2]
else:
try:
# We raise and then immediately catch in order to create a traceback.
raise _control.abortion_outcome_to_exception(operation_outcome)
except Exception as e: # pylint: disable=broad-except
exception = e
traceback = sys.exc_info()[2]
with self._condition:
if not self._cancelled:
self._computed = True
self._payload = payload
self._exception = exception
self._traceback = traceback
callbacks = list(self._callbacks)
self._callbacks = None
for callback in callbacks:
callable_util.call_logging_exceptions(
callback, _DONE_CALLBACK_LOG_MESSAGE, self)
class _Call(interfaces.Call):
def __init__(self, operation):
self._operation = operation
self.context = _control.RpcContext(operation.context)
def cancel(self):
self._operation.cancel()
def blocking_value_in_value_out(front, name, payload, timeout, trace_id):
"""Services in a blocking fashion a value-in value-out servicer method."""
rendezvous = _control.Rendezvous()
subscription = _rendezvous_subscription(rendezvous)
operation = front.operate(
name, payload, True, timeout, subscription, trace_id)
operation.context.add_termination_callback(rendezvous.set_outcome)
return next(rendezvous)
def future_value_in_value_out(front, name, payload, timeout, trace_id):
"""Services a value-in value-out servicer method by returning a Future."""
rendezvous = _control.Rendezvous()
subscription = _rendezvous_subscription(rendezvous)
operation = front.operate(
name, payload, True, timeout, subscription, trace_id)
operation.context.add_termination_callback(rendezvous.set_outcome)
operation_future = _OperationFuture(rendezvous, operation)
operation.context.add_termination_callback(
operation_future.on_operation_termination)
return operation_future
def inline_value_in_stream_out(front, name, payload, timeout, trace_id):
"""Services a value-in stream-out servicer method."""
rendezvous = _control.Rendezvous()
subscription = _rendezvous_subscription(rendezvous)
operation = front.operate(
name, payload, True, timeout, subscription, trace_id)
operation.context.add_termination_callback(rendezvous.set_outcome)
return _OperationCancellableIterator(rendezvous, operation)
def blocking_stream_in_value_out(
front, name, payload_iterator, timeout, trace_id):
"""Services in a blocking fashion a stream-in value-out servicer method."""
rendezvous = _control.Rendezvous()
subscription = _rendezvous_subscription(rendezvous)
operation = front.operate(name, None, False, timeout, subscription, trace_id)
operation.context.add_termination_callback(rendezvous.set_outcome)
for payload in payload_iterator:
operation.consumer.consume(payload)
operation.consumer.terminate()
return next(rendezvous)
def future_stream_in_value_out(
front, name, payload_iterator, timeout, trace_id, pool):
"""Services a stream-in value-out servicer method by returning a Future."""
rendezvous = _control.Rendezvous()
subscription = _rendezvous_subscription(rendezvous)
operation = front.operate(name, None, False, timeout, subscription, trace_id)
operation.context.add_termination_callback(rendezvous.set_outcome)
pool.submit(
callable_util.with_exceptions_logged(
_control.pipe_iterator_to_consumer, _ITERATOR_EXCEPTION_LOG_MESSAGE),
payload_iterator, operation.consumer, lambda: True, True)
operation_future = _OperationFuture(rendezvous, operation)
operation.context.add_termination_callback(
operation_future.on_operation_termination)
return operation_future
def inline_stream_in_stream_out(
front, name, payload_iterator, timeout, trace_id, pool):
"""Services a stream-in stream-out servicer method."""
rendezvous = _control.Rendezvous()
subscription = _rendezvous_subscription(rendezvous)
operation = front.operate(name, None, False, timeout, subscription, trace_id)
operation.context.add_termination_callback(rendezvous.set_outcome)
pool.submit(
callable_util.with_exceptions_logged(
_control.pipe_iterator_to_consumer, _ITERATOR_EXCEPTION_LOG_MESSAGE),
payload_iterator, operation.consumer, lambda: True, True)
return _OperationCancellableIterator(rendezvous, operation)
def event_value_in_value_out(
front, name, payload, completion_callback, abortion_callback, timeout,
trace_id):
subscription = _unary_event_subscription(
completion_callback, abortion_callback)
operation = front.operate(
name, payload, True, timeout, subscription, trace_id)
return _Call(operation)
def event_value_in_stream_out(
front, name, payload, result_payload_consumer, abortion_callback, timeout,
trace_id):
subscription = _stream_event_subscription(
result_payload_consumer, abortion_callback)
operation = front.operate(
name, payload, True, timeout, subscription, trace_id)
return _Call(operation)
def event_stream_in_value_out(
front, name, completion_callback, abortion_callback, timeout, trace_id):
subscription = _unary_event_subscription(
completion_callback, abortion_callback)
operation = front.operate(name, None, False, timeout, subscription, trace_id)
return _Call(operation), operation.consumer
def event_stream_in_stream_out(
front, name, result_payload_consumer, abortion_callback, timeout, trace_id):
subscription = _stream_event_subscription(
result_payload_consumer, abortion_callback)
operation = front.operate(name, None, False, timeout, subscription, trace_id)
return _Call(operation), operation.consumer
|
bsd-3-clause
|
shybovycha/buck
|
scripts/top_down_stress_tester.py
|
25
|
6514
|
import argparse
import itertools
import json
import logging
import os
import subprocess
import sys
import tempfile
import zipfile
CACHE_DIR = 'buck-cache'
class CacheEntry(object):
pass
def get_cache_entry(path):
with zipfile.ZipFile(path) as f:
entry_map = {os.path.basename(n): n for n in f.namelist()}
entry = CacheEntry()
entry.target = f.read(entry_map['TARGET']).strip()
entry.rule_key = f.read(entry_map['RULE_KEY']).strip()
entry.deps = json.loads(f.read(entry_map['DEPS']))
entry.path = path
return entry
def get_cache_inventory():
inventory = {}
for item in os.listdir(CACHE_DIR):
entry = get_cache_entry(os.path.join(CACHE_DIR, item))
inventory[entry.target] = entry
return inventory
def get_missing_cache_entries(inventory):
"""
Find and return all entries missing in the cache.
"""
missing_entries = {}
for entry in inventory.itervalues():
if not os.path.exists(entry.path):
missing_entries[entry.target] = entry
return missing_entries
def clear_cache():
subprocess.check_call(['rm', '-rf', CACHE_DIR])
def clear_output():
subprocess.check_call(['rm', '-rf', 'buck-out'])
def run_buck(buck, *args):
logging.info('Running {} {}'.format(buck, ' '.join(args)))
# Always create a temp file, in case we need to serialize the
# arguments to it.
with tempfile.NamedTemporaryFile() as f:
# Point cache to a known location.
args.append('--config')
args.append('cache.dir=' + CACHE_DIR)
# If the command would be too long, put the args into a file and
# execute that.
if len(args) > 30:
for arg in args:
f.write(arg)
f.write(os.linesep)
f.flush()
args = ['@' + f.name]
return subprocess.check_output([buck] + list(args))
def preorder_traversal(roots, deps, callback):
"""
Execute the given callback during a preorder traversal of the graph.
"""
# Keep track of all the nodes processed.
seen = set()
def traverse(node, callback, chain):
# Make sure we only visit nodes once.
if node in seen:
return
seen.add(node)
# Run the callback with the current node and the chain of parent nodes we
# traversed to find it.
callback(node, chain)
# Recurse on depednencies, making sure to update the visiter chain.
for dep in deps[node]:
traverse(dep, callback, chain=chain + [node])
# Traverse starting from all the roots.
for root in roots:
traverse(root, callback, [])
def build(buck, targets):
"""
Verify that each of the actions the run when building the given targets
run correctly using a top-down build.
"""
# Now run a build to populate the cache.
logging.info('Running a build to populate the cache')
run_buck(buck, 'build', *targets)
# Find all targets reachable via the UI.
out = run_buck(buck, 'audit', 'dependencies', '--transitive', *targets)
ui_targets = set(out.splitlines())
ui_targets.update(targets)
# Grab an inventory of the cache and use it to form a dependency map.
cache_inventory = get_cache_inventory()
dependencies = {n.target: n.deps for n in cache_inventory.itervalues()}
# Keep track of all the processed nodes so we can print progress info.
processed = set()
# The callback to run for each build rule.
def handle(current, chain):
logging.info(
'Processing {} ({}/{})'
.format(current, len(processed), len(dependencies.keys())))
processed.add(current)
# Empty the previous builds output.
logging.info('Removing output from previous build')
clear_output()
# Remove the cache entry for this target.
entry = cache_inventory[current]
os.remove(entry.path)
logging.info(' removed {} => {}'.format(current, entry.path))
# Now run the build using the closest UI visible ancestor target.
logging.info('Running the build to check ' + current)
for node in itertools.chain([current], reversed(chain)):
if node in ui_targets:
run_buck(buck, 'build', '--just-build', current, node)
break
else:
assert False, 'couldn\'t find target in UI: ' + node
# We should *always* end with a full cache.
logging.info('Verifying cache...')
missing = get_missing_cache_entries(cache_inventory)
assert len(missing) == 0, '\n'.join(sorted(missing.keys()))
preorder_traversal(targets, dependencies, handle)
def test(buck, targets):
"""
Test that we can run tests when pulling from the cache.
"""
# Find all test targets.
test_targets = set()
out = run_buck(buck, 'targets', '--json', *targets)
for info in json.loads(out):
if info['buck.type'].endswith('_test'):
test_targets.add(
'//' + info['buck.base_path'] + ':' + info['name'])
if not test_targets:
raise Exception('no test targets')
# Now run a build to populate the cache.
logging.info('Running a build to populate the cache')
run_buck(buck, 'build', *test_targets)
# Empty the build output.
logging.info('Removing output from build')
clear_output()
# Now run the test
run_buck(buck, 'test', *test_targets)
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument('--buck', default='buck')
parser.add_argument('command', choices=('build', 'test'))
parser.add_argument('targets', metavar='target', nargs='+')
args = parser.parse_args(argv[1:])
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p')
# Resolve any aliases in the top-level targets.
out = run_buck(args.buck, 'targets', *args.targets)
targets = set(out.splitlines())
# Clear the cache and output directories to start with a clean slate.
logging.info('Clearing output and cache')
run_buck(args.buck, 'clean')
clear_output()
clear_cache()
# Run the subcommand
if args.command == 'build':
build(args.buck, targets)
elif args.command == 'test':
test(args.buck, targets)
else:
raise Exception('unknown command: ' + args.command)
sys.exit(main(sys.argv))
|
apache-2.0
|
yinquan529/platform-external-chromium_org
|
third_party/markupsafe/_native.py
|
1243
|
1187
|
# -*- coding: utf-8 -*-
"""
markupsafe._native
~~~~~~~~~~~~~~~~~~
Native Python implementation the C module is not compiled.
:copyright: (c) 2010 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from markupsafe import Markup
from markupsafe._compat import text_type
def escape(s):
"""Convert the characters &, <, >, ' and " in string s to HTML-safe
sequences. Use this if you need to display text that might contain
such characters in HTML. Marks return value as markup string.
"""
if hasattr(s, '__html__'):
return s.__html__()
return Markup(text_type(s)
.replace('&', '&')
.replace('>', '>')
.replace('<', '<')
.replace("'", ''')
.replace('"', '"')
)
def escape_silent(s):
"""Like :func:`escape` but converts `None` into an empty
markup string.
"""
if s is None:
return Markup()
return escape(s)
def soft_unicode(s):
"""Make a string unicode if it isn't already. That way a markup
string is not converted back to unicode.
"""
if not isinstance(s, text_type):
s = text_type(s)
return s
|
bsd-3-clause
|
codenote/chromium-test
|
chrome/test/functional/secure_shell.py
|
68
|
4066
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import glob
import logging
import os
import time
import pyauto_functional # must be imported before pyauto
import pyauto
class SecureShellTest(pyauto.PyUITest):
"""Tests for Secure Shell app.
Uses app from chrome/test/data/extensions/secure_shell/.
The test uses stable app by default.
Set the env var SECURE_SHELL_USE_DEV=1 to make it use the dev one.
"""
assert pyauto.PyUITest.IsChromeOS(), 'Works on ChromeOS only'
def setUp(self):
"""Install secure shell app at startup."""
pyauto.PyUITest.setUp(self)
# Pick app from data dir.
app_dir = os.path.join(os.path.abspath(
self.DataDir()), 'extensions', 'secure_shell')
channel = 'dev' if os.getenv('SECURE_SHELL_USE_DEV') else 'stable'
files = glob.glob(os.path.join(app_dir, 'SecureShell-%s-*.crx' % channel))
assert files, 'Secure Shell %s app missing in %s' % (channel, app_dir)
app_path = files[0]
# Install app.
logging.debug('Using Secure shell app %s' % app_path)
self._app_id = self.InstallExtension(app_path, from_webstore=True)
def testInstall(self):
"""Install Secure Shell."""
# Installation already done in setUp. Just verify.
self.assertTrue(self._app_id)
ssh_info = [x for x in self.GetExtensionsInfo()
if x['id'] == self._app_id][0]
self.assertTrue(ssh_info)
# Uninstall.
self.UninstallExtensionById(id=self._app_id)
self.assertFalse([x for x in self.GetExtensionsInfo()
if x['id'] == self._app_id],
msg='Could not uninstall.')
def testLaunch(self):
"""Launch Secure Shell and verify basic connect/exit flow.
This basic flow also verifies that NaCl works since secure shell is based
on it.
"""
self.assertEqual(1, self.GetTabCount())
then = time.time()
self.LaunchApp(self._app_id)
login_ui_frame = (
'/descendant::iframe[contains(@src, "nassh_connect_dialog.html")]')
# Wait for connection dialog iframe to load.
self.WaitForDomNode(login_ui_frame, tab_index=1,
msg='Secure shell login dialog did not show up')
self.WaitForDomNode('id("field-description")', tab_index=1,
attribute='placeholder',
expected_value='username@hostname', # partial match
frame_xpath=login_ui_frame,
msg='Did not find secure shell username dialog')
now = time.time()
self.assertEqual(2, self.GetTabCount(), msg='Did not launch')
logging.info('Launched Secure Shell in %.2f secs' % (now - then))
# Fill in chronos@localhost using webdriver.
driver = self.NewWebDriver()
driver.switch_to_window(driver.window_handles[-1]) # last tab
driver.switch_to_frame(1)
user = 'chronos@localhost'
driver.find_element_by_id('field-description').send_keys(user + '\n')
# Verify yes/no prompt
self.WaitForHtermText('continue connecting \(yes/no\)\?', tab_index=1,
msg='Did not get the yes/no prompt')
welcome_text = self.GetHtermRowsText(0, 8, tab_index=1)
self.assertTrue('Welcome to Secure Shell' in welcome_text,
msg='Did not get correct welcome message')
# Type 'yes' and enter password
self.SendKeysToHterm('yes\\n', tab_index=1)
self.WaitForHtermText('Password:', tab_index=1,
msg='Did not get password prompt')
self.SendKeysToHterm('test0000\\n', tab_index=1)
self.WaitForHtermText('chronos@localhost $', tab_index=1,
msg='Did not get shell login prompt')
# Type 'exit' and close the tab
self.SendKeysToHterm('exit\\n', tab_index=1)
# Check for only 'code 0' since that is what indicates that we exited
# successfully. Checking for more stringage causes flakes since the exit
# string does change at times.
self.WaitForHtermText('code 0', tab_index=1,
msg='Did not get correct exit message')
if __name__ == '__main__':
pyauto_functional.Main()
|
bsd-3-clause
|
lxybox1/MissionPlanner
|
Lib/encodings/mac_roman.py
|
93
|
14043
|
""" Python Character Mapping Codec mac_roman generated from 'MAPPINGS/VENDORS/APPLE/ROMAN.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-roman',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> CONTROL CHARACTER
u'\x01' # 0x01 -> CONTROL CHARACTER
u'\x02' # 0x02 -> CONTROL CHARACTER
u'\x03' # 0x03 -> CONTROL CHARACTER
u'\x04' # 0x04 -> CONTROL CHARACTER
u'\x05' # 0x05 -> CONTROL CHARACTER
u'\x06' # 0x06 -> CONTROL CHARACTER
u'\x07' # 0x07 -> CONTROL CHARACTER
u'\x08' # 0x08 -> CONTROL CHARACTER
u'\t' # 0x09 -> CONTROL CHARACTER
u'\n' # 0x0A -> CONTROL CHARACTER
u'\x0b' # 0x0B -> CONTROL CHARACTER
u'\x0c' # 0x0C -> CONTROL CHARACTER
u'\r' # 0x0D -> CONTROL CHARACTER
u'\x0e' # 0x0E -> CONTROL CHARACTER
u'\x0f' # 0x0F -> CONTROL CHARACTER
u'\x10' # 0x10 -> CONTROL CHARACTER
u'\x11' # 0x11 -> CONTROL CHARACTER
u'\x12' # 0x12 -> CONTROL CHARACTER
u'\x13' # 0x13 -> CONTROL CHARACTER
u'\x14' # 0x14 -> CONTROL CHARACTER
u'\x15' # 0x15 -> CONTROL CHARACTER
u'\x16' # 0x16 -> CONTROL CHARACTER
u'\x17' # 0x17 -> CONTROL CHARACTER
u'\x18' # 0x18 -> CONTROL CHARACTER
u'\x19' # 0x19 -> CONTROL CHARACTER
u'\x1a' # 0x1A -> CONTROL CHARACTER
u'\x1b' # 0x1B -> CONTROL CHARACTER
u'\x1c' # 0x1C -> CONTROL CHARACTER
u'\x1d' # 0x1D -> CONTROL CHARACTER
u'\x1e' # 0x1E -> CONTROL CHARACTER
u'\x1f' # 0x1F -> CONTROL CHARACTER
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> CONTROL CHARACTER
u'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x81 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc7' # 0x82 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xd1' # 0x84 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe3' # 0x8B -> LATIN SMALL LETTER A WITH TILDE
u'\xe5' # 0x8C -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
u'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
u'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE
u'\xec' # 0x93 -> LATIN SMALL LETTER I WITH GRAVE
u'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xf1' # 0x96 -> LATIN SMALL LETTER N WITH TILDE
u'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf2' # 0x98 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE
u'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE
u'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
u'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u2020' # 0xA0 -> DAGGER
u'\xb0' # 0xA1 -> DEGREE SIGN
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa7' # 0xA4 -> SECTION SIGN
u'\u2022' # 0xA5 -> BULLET
u'\xb6' # 0xA6 -> PILCROW SIGN
u'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
u'\xae' # 0xA8 -> REGISTERED SIGN
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u2122' # 0xAA -> TRADE MARK SIGN
u'\xb4' # 0xAB -> ACUTE ACCENT
u'\xa8' # 0xAC -> DIAERESIS
u'\u2260' # 0xAD -> NOT EQUAL TO
u'\xc6' # 0xAE -> LATIN CAPITAL LETTER AE
u'\xd8' # 0xAF -> LATIN CAPITAL LETTER O WITH STROKE
u'\u221e' # 0xB0 -> INFINITY
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
u'\xa5' # 0xB4 -> YEN SIGN
u'\xb5' # 0xB5 -> MICRO SIGN
u'\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL
u'\u2211' # 0xB7 -> N-ARY SUMMATION
u'\u220f' # 0xB8 -> N-ARY PRODUCT
u'\u03c0' # 0xB9 -> GREEK SMALL LETTER PI
u'\u222b' # 0xBA -> INTEGRAL
u'\xaa' # 0xBB -> FEMININE ORDINAL INDICATOR
u'\xba' # 0xBC -> MASCULINE ORDINAL INDICATOR
u'\u03a9' # 0xBD -> GREEK CAPITAL LETTER OMEGA
u'\xe6' # 0xBE -> LATIN SMALL LETTER AE
u'\xf8' # 0xBF -> LATIN SMALL LETTER O WITH STROKE
u'\xbf' # 0xC0 -> INVERTED QUESTION MARK
u'\xa1' # 0xC1 -> INVERTED EXCLAMATION MARK
u'\xac' # 0xC2 -> NOT SIGN
u'\u221a' # 0xC3 -> SQUARE ROOT
u'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK
u'\u2248' # 0xC5 -> ALMOST EQUAL TO
u'\u2206' # 0xC6 -> INCREMENT
u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
u'\xa0' # 0xCA -> NO-BREAK SPACE
u'\xc0' # 0xCB -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc3' # 0xCC -> LATIN CAPITAL LETTER A WITH TILDE
u'\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE
u'\u0152' # 0xCE -> LATIN CAPITAL LIGATURE OE
u'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE
u'\u2013' # 0xD0 -> EN DASH
u'\u2014' # 0xD1 -> EM DASH
u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
u'\xf7' # 0xD6 -> DIVISION SIGN
u'\u25ca' # 0xD7 -> LOZENGE
u'\xff' # 0xD8 -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\u0178' # 0xD9 -> LATIN CAPITAL LETTER Y WITH DIAERESIS
u'\u2044' # 0xDA -> FRACTION SLASH
u'\u20ac' # 0xDB -> EURO SIGN
u'\u2039' # 0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\u203a' # 0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\ufb01' # 0xDE -> LATIN SMALL LIGATURE FI
u'\ufb02' # 0xDF -> LATIN SMALL LIGATURE FL
u'\u2021' # 0xE0 -> DOUBLE DAGGER
u'\xb7' # 0xE1 -> MIDDLE DOT
u'\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK
u'\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2030' # 0xE4 -> PER MILLE SIGN
u'\xc2' # 0xE5 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xca' # 0xE6 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xcb' # 0xE8 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0xE9 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xEB -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xEC -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xcc' # 0xED -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\uf8ff' # 0xF0 -> Apple logo
u'\xd2' # 0xF1 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xF3 -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xd9' # 0xF4 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\u0131' # 0xF5 -> LATIN SMALL LETTER DOTLESS I
u'\u02c6' # 0xF6 -> MODIFIER LETTER CIRCUMFLEX ACCENT
u'\u02dc' # 0xF7 -> SMALL TILDE
u'\xaf' # 0xF8 -> MACRON
u'\u02d8' # 0xF9 -> BREVE
u'\u02d9' # 0xFA -> DOT ABOVE
u'\u02da' # 0xFB -> RING ABOVE
u'\xb8' # 0xFC -> CEDILLA
u'\u02dd' # 0xFD -> DOUBLE ACUTE ACCENT
u'\u02db' # 0xFE -> OGONEK
u'\u02c7' # 0xFF -> CARON
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
gpl-3.0
|
gunan/tensorflow
|
tensorflow/python/debug/wrappers/grpc_wrapper.py
|
25
|
8662
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Debugger wrapper session that sends debug data to file:// URLs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import signal
import sys
import traceback
import six
# Google-internal import(s).
from tensorflow.python.debug.lib import common
from tensorflow.python.debug.wrappers import framework
def publish_traceback(debug_server_urls,
graph,
feed_dict,
fetches,
old_graph_version):
"""Publish traceback and source code if graph version is new.
`graph.version` is compared with `old_graph_version`. If the former is higher
(i.e., newer), the graph traceback and the associated source code is sent to
the debug server at the specified gRPC URLs.
Args:
debug_server_urls: A single gRPC debug server URL as a `str` or a `list` of
debug server URLs.
graph: A Python `tf.Graph` object.
feed_dict: Feed dictionary given to the `Session.run()` call.
fetches: Fetches from the `Session.run()` call.
old_graph_version: Old graph version to compare to.
Returns:
If `graph.version > old_graph_version`, the new graph version as an `int`.
Else, the `old_graph_version` is returned.
"""
# TODO(cais): Consider moving this back to the top, after grpc becomes a
# pip dependency of tensorflow or tf_debug.
# pylint:disable=g-import-not-at-top
from tensorflow.python.debug.lib import source_remote
# pylint:enable=g-import-not-at-top
if graph.version > old_graph_version:
run_key = common.get_run_key(feed_dict, fetches)
source_remote.send_graph_tracebacks(
debug_server_urls, run_key, traceback.extract_stack(), graph,
send_source=True)
return graph.version
else:
return old_graph_version
class GrpcDebugWrapperSession(framework.NonInteractiveDebugWrapperSession):
"""Debug Session wrapper that send debug data to gRPC stream(s)."""
def __init__(self,
sess,
grpc_debug_server_addresses,
watch_fn=None,
thread_name_filter=None,
log_usage=True):
"""Constructor of DumpingDebugWrapperSession.
Args:
sess: The TensorFlow `Session` object being wrapped.
grpc_debug_server_addresses: (`str` or `list` of `str`) Single or a list
of the gRPC debug server addresses, in the format of
<host:port>, with or without the "grpc://" prefix. For example:
"localhost:7000",
["localhost:7000", "192.168.0.2:8000"]
watch_fn: (`Callable`) A Callable that can be used to define per-run
debug ops and watched tensors. See the doc of
`NonInteractiveDebugWrapperSession.__init__()` for details.
thread_name_filter: Regular-expression white list for threads on which the
wrapper session will be active. See doc of `BaseDebugWrapperSession` for
more details.
log_usage: (`bool`) whether the usage of this class is to be logged.
Raises:
TypeError: If `grpc_debug_server_addresses` is not a `str` or a `list`
of `str`.
"""
if log_usage:
pass # No logging for open-source.
framework.NonInteractiveDebugWrapperSession.__init__(
self, sess, watch_fn=watch_fn, thread_name_filter=thread_name_filter)
if isinstance(grpc_debug_server_addresses, str):
self._grpc_debug_server_urls = [
self._normalize_grpc_url(grpc_debug_server_addresses)]
elif isinstance(grpc_debug_server_addresses, list):
self._grpc_debug_server_urls = []
for address in grpc_debug_server_addresses:
if not isinstance(address, str):
raise TypeError(
"Expected type str in list grpc_debug_server_addresses, "
"received type %s" % type(address))
self._grpc_debug_server_urls.append(self._normalize_grpc_url(address))
else:
raise TypeError(
"Expected type str or list in grpc_debug_server_addresses, "
"received type %s" % type(grpc_debug_server_addresses))
def prepare_run_debug_urls(self, fetches, feed_dict):
"""Implementation of abstract method in superclass.
See doc of `NonInteractiveDebugWrapperSession.prepare_run_debug_urls()`
for details.
Args:
fetches: Same as the `fetches` argument to `Session.run()`
feed_dict: Same as the `feed_dict` argument to `Session.run()`
Returns:
debug_urls: (`str` or `list` of `str`) file:// debug URLs to be used in
this `Session.run()` call.
"""
return self._grpc_debug_server_urls
def _normalize_grpc_url(self, address):
return (common.GRPC_URL_PREFIX + address
if not address.startswith(common.GRPC_URL_PREFIX) else address)
def _signal_handler(unused_signal, unused_frame):
while True:
response = six.moves.input(
"\nSIGINT received. Quit program? (Y/n): ").strip()
if response in ("", "Y", "y"):
sys.exit(0)
elif response in ("N", "n"):
break
def register_signal_handler():
try:
signal.signal(signal.SIGINT, _signal_handler)
except ValueError:
# This can happen if we are not in the MainThread.
pass
class TensorBoardDebugWrapperSession(GrpcDebugWrapperSession):
"""A tfdbg Session wrapper that can be used with TensorBoard Debugger Plugin.
This wrapper is the same as `GrpcDebugWrapperSession`, except that it uses a
predefined `watch_fn` that
1) uses `DebugIdentity` debug ops with the `gated_grpc` attribute set to
`True` to allow the interactive enabling and disabling of tensor
breakpoints.
2) watches all tensors in the graph.
This saves the need for the user to define a `watch_fn`.
"""
def __init__(self,
sess,
grpc_debug_server_addresses,
thread_name_filter=None,
send_traceback_and_source_code=True,
log_usage=True):
"""Constructor of TensorBoardDebugWrapperSession.
Args:
sess: The `tf.compat.v1.Session` instance to be wrapped.
grpc_debug_server_addresses: gRPC address(es) of debug server(s), as a
`str` or a `list` of `str`s. E.g., "localhost:2333",
"grpc://localhost:2333", ["192.168.0.7:2333", "192.168.0.8:2333"].
thread_name_filter: Optional filter for thread names.
send_traceback_and_source_code: Whether traceback of graph elements and
the source code are to be sent to the debug server(s).
log_usage: Whether the usage of this class is to be logged (if
applicable).
"""
def _gated_grpc_watch_fn(fetches, feeds):
del fetches, feeds # Unused.
return framework.WatchOptions(
debug_ops=["DebugIdentity(gated_grpc=true)"])
super(TensorBoardDebugWrapperSession, self).__init__(
sess,
grpc_debug_server_addresses,
watch_fn=_gated_grpc_watch_fn,
thread_name_filter=thread_name_filter,
log_usage=log_usage)
self._send_traceback_and_source_code = send_traceback_and_source_code
# Keeps track of the latest version of Python graph object that has been
# sent to the debug servers.
self._sent_graph_version = -1
register_signal_handler()
def run(self,
fetches,
feed_dict=None,
options=None,
run_metadata=None,
callable_runner=None,
callable_runner_args=None,
callable_options=None):
if self._send_traceback_and_source_code:
self._sent_graph_version = publish_traceback(
self._grpc_debug_server_urls, self.graph, feed_dict, fetches,
self._sent_graph_version)
return super(TensorBoardDebugWrapperSession, self).run(
fetches,
feed_dict=feed_dict,
options=options,
run_metadata=run_metadata,
callable_runner=callable_runner,
callable_runner_args=callable_runner_args,
callable_options=callable_options)
|
apache-2.0
|
ZLLab-Mooc/edx-platform
|
common/test/acceptance/pages/lms/certificate_page.py
|
110
|
1582
|
# -*- coding: utf-8 -*-
"""
Module for Certificates pages.
"""
from bok_choy.page_object import PageObject
from . import BASE_URL
class CertificatePage(PageObject):
"""
Certificate web view page.
"""
url_path = "certificates"
def __init__(self, browser, user_id, course_id):
"""Initialize the page.
Arguments:
browser (Browser): The browser instance.
user_id: id of the user whom certificate is awarded
course_id: course key of the course where certificate is awarded
"""
super(CertificatePage, self).__init__(browser)
self.user_id = user_id
self.course_id = course_id
def is_browser_on_page(self):
""" Checks if certificate web view page is being viewed """
return self.q(css='section.about-accomplishments').present
@property
def url(self):
"""
Construct a URL to the page
"""
return BASE_URL + "/" + self.url_path + "/user/" + self.user_id + "/course/" + self.course_id
@property
def accomplishment_banner(self):
"""
returns accomplishment banner.
"""
return self.q(css='section.banner-user')
@property
def add_to_linkedin_profile_button(self):
"""
returns add to LinkedIn profile button
"""
return self.q(css='button.action-linkedin-profile')
@property
def add_to_facebook_profile_button(self):
"""
returns Facebook share button
"""
return self.q(css='button.action-share-facebook')
|
agpl-3.0
|
mgrosvenor/fastnet
|
fast_net_scale.py
|
1
|
2979
|
#! /usr/bin/python
import math
import sys
import os
import subprocess
#PTYPES = [ "eth_ip_udp_head_t", "ip_udp_head_t", "eth_32ip_udp_head_t", "eth_64ip_udp_head_t", "eth64_64ip64_64udp_head_t", "eth6464_64ip64_64udp_head_t" ]
#PTYPES = [ "eth_ip_udp_head_t", "eth_32ip_udp_head_t", "eth_64ip_udp_head_t", "eth64_64ip64_64udp_head_t", "eth6464_64ip64_64udp_head_t" ]
#PTYPE = "volatile eth_ip_udp_head_t"
PTYPE = "volatile eth6464_64ip64_64udp_head_t"
USE_ETHER = "1"
USE_NBO = "0"
DO_COPY = "0"
READ_DATA = "1"
DTYPE = "i64"
PSIZE = "8096"
DO_PREFETCH = 1
packet_size = 8096
samples = 1 * 1000 * 1000
def log_out(out):
print(out[:-1])
log.write(out)
def run_proc(p, wait):
if not wait:
pid = os.fork()
if pid != 0:
return
proc = subprocess.Popen(p, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
proc.wait()
log_out("STDERR -- %s\n" % p)
for line in proc.stderr:
log_out(line)
log_out("STDOUT -- %s\n" % p)
for line in proc.stdout:
log_out(line)
if not wait:
sys.exit(0)
def run_fast_net(outdir, core, wait, test_id):
test_id = test_id + "C%02i" % core
log_out("Running test...\n")
cmd = "../bin/fast_net --packet-count=%i --packet-size=%i --iterations=%i" % (packet_count, packet_size, iterations)
os.chdir("pin") # HACK!!
pin_cmd = "./pin %i \"%s\" > ../%s/%s.stats" % (core, cmd, outdir, test_id)
run_proc(pin_cmd, wait)
os.chdir("../")
def run_test(outdir, test_id, extra):
cmd = "./build.sh %s" % " ".join( [ "\"" + str(x) + "\"" for x in [PTYPE,USE_ETHER,USE_NBO,DO_COPY,READ_DATA,DTYPE,PSIZE, DO_PREFETCH]] )
log_out("Running Compiler \"%s\"\n" % cmd)
run_proc(cmd, True)
for i in range( 6, 4 + extra - 1, 2):
run_fast_net(outdir, i, False, test_id)
run_fast_net(outdir, 4, True, test_id)
#log_out("Calculating Stats\n")
#run_proc("./do_hist.py %s/%s.stats" % (outdir,test_id), True)
outdir = "experiments/layout_hbo_8k_64_scale"
try:
os.makedirs(outdir)
except:
None
for i in range(4096,4096 * 2 + 1, 4096):
#[1,10] + \
# range(100,1000,200) + \
# range(1000,10 *1000, 2000) + \
# range(10 * 1000,100 * 1000, 20 * 1000) + \
# range(100 * 1000, 1000 * 1000, 200 * 1000) + \
# range(1000 * 1000, 5 * 1000 * 1000, 2000 * 1000):
for scale in range(0,6):
packet_count = i
#packet_count = int(math.pow(2,int(math.log(packet_count,2))))
iterations = max(2,samples / packet_count)
print "Running packet_count=%i for %i iterations ptype=%s" % (packet_count, iterations, PTYPE)
test_id = "S%02i-%010i" % (scale,i)
log = open("%s/%s.log" % (outdir, test_id), "w")
run_test(outdir, test_id, scale)
log_out("Resting for a bit\n")
run_proc("sleep 4", True)
|
bsd-3-clause
|
zenhacklab/OpenBazaar
|
obelisk/numbertheory.py
|
21
|
2813
|
def inverse_mod( a, m ):
"""Inverse of a mod m."""
if a < 0 or m <= a: a = a % m
# From Ferguson and Schneier, roughly:
c, d = a, m
uc, vc, ud, vd = 1, 0, 0, 1
while c != 0:
q, c, d = divmod( d, c ) + ( c, )
uc, vc, ud, vd = ud - q*uc, vd - q*vc, uc, vc
# At this point, d is the GCD, and ud*a+vd*m = d.
# If d == 1, this means that ud is a inverse.
assert d == 1
if ud > 0: return ud
else: return ud + m
# from http://eli.thegreenplace.net/2009/03/07/computing-modular-square-roots-in-python/
def modular_sqrt(a, p):
""" Find a quadratic residue (mod p) of 'a'. p
must be an odd prime.
Solve the congruence of the form:
x^2 = a (mod p)
And returns x. Note that p - x is also a root.
0 is returned is no square root exists for
these a and p.
The Tonelli-Shanks algorithm is used (except
for some simple cases in which the solution
is known from an identity). This algorithm
runs in polynomial time (unless the
generalized Riemann hypothesis is false).
"""
# Simple cases
#
if legendre_symbol(a, p) != 1:
return 0
elif a == 0:
return 0
elif p == 2:
return p
elif p % 4 == 3:
return pow(a, (p + 1) // 4, p)
# Partition p-1 to s * 2^e for an odd s (i.e.
# reduce all the powers of 2 from p-1)
#
s = p - 1
e = 0
while s % 2 == 0:
s /= 2
e += 1
# Find some 'n' with a legendre symbol n|p = -1.
# Shouldn't take long.
#
n = 2
while legendre_symbol(n, p) != -1:
n += 1
# Here be dragons!
# Read the paper "Square roots from 1; 24, 51,
# 10 to Dan Shanks" by Ezra Brown for more
# information
#
# x is a guess of the square root that gets better
# with each iteration.
# b is the "fudge factor" - by how much we're off
# with the guess. The invariant x^2 = ab (mod p)
# is maintained throughout the loop.
# g is used for successive powers of n to update
# both a and b
# r is the exponent - decreases with each update
#
x = pow(a, (s + 1) // 2, p)
b = pow(a, s, p)
g = pow(n, s, p)
r = e
while True:
t = b
m = 0
for m in xrange(r):
if t == 1:
break
t = pow(t, 2, p)
if m == 0:
return x
gs = pow(g, 2 ** (r - m - 1), p)
g = (gs * gs) % p
x = (x * gs) % p
b = (b * g) % p
r = m
def legendre_symbol(a, p):
""" Compute the Legendre symbol a|p using
Euler's criterion. p is a prime, a is
relatively prime to p (if p divides
a, then a|p = 0)
Returns 1 if a has a square root modulo
p, -1 otherwise.
"""
ls = pow(a, (p - 1) // 2, p)
return -1 if ls == p - 1 else ls
|
agpl-3.0
|
zivaharoni/gradual-learning-rnn
|
tensorflow_impl/model.py
|
1
|
19610
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
import rnn_cell_additions as dr
import optimizers
import logging
logger = logging.getLogger("logger")
class PTBModel(object):
"""class for handling the ptb model"""
def __init__(self,
data,
config,
is_training):
"""the constructor builds the tensorflow_impl graph"""
self._config = config
self._is_training = is_training
self._seed = config.seed
self._gpu_devices = config.gpu_devices
self._cpu_device = "/cpu:" + config.cpu_device
self._debug_ops = list()
self._stat_ops = list()
self._activations = list()
self._data = data
self._init_scale = config.init_scale
self._batch_size = config.batch_size
self._layers = config.layers
self._hid_size = config.hid_size
self._bptt = config.bptt
self._vocab_size = config.vocab_size
self._embedding_size = config.embedding_size
self._drop_e = config.drop_e
self._drop_i = config.drop_i
self._drop_h = config.drop_h
self._drop_s = config.drop_s
self._mos = config.mos
if self._mos:
self._mos_drop = config.mos_drop
self._mos_experts_num = config.mos_experts_num
with tf.name_scope("aux_variables"):
with tf.name_scope("global_step"):
self._global_step = tf.Variable(0, name='global_step', trainable=False)
with tf.name_scope("epoch_counter"):
self._epoch_count = tf.Variable(0, name='epoch', trainable=False)
self._epoch_inc = tf.assign(self._epoch_count, tf.add(self._epoch_count, tf.constant(1)))
self._epoch_reset = tf.assign(self._epoch_count, tf.constant(0))
# construct the embedding layer on cpu device
self._activations.append(self._data.input_data)
self._build_embedding()
self._build_rnn()
self._build_loss()
if self._is_training:
# set learning rate as variable in order to anneal it throughout training
with tf.name_scope("learning_rate"):
self._lr = tf.Variable(config.lr, trainable=False, dtype=tf.float32)
# a placeholder to assign a new learning rate
self._new_lr = tf.placeholder(
tf.float32, shape=[], name="new_learning_rate")
# function to update learning rate
self._lr_update = tf.assign(self._lr, self._new_lr)
# get trainable vars
tvars = tf.trainable_variables()
# define an optimizer with the averaged gradients
with tf.name_scope("optimizer"):
self._optimizer = []
if config.opt == "sgd":
logger.debug("using SGD optimizer")
self._optimizer = optimizers.SGD(self, self._grads, tvars, self._config)
self._train_op = self._optimizer.train_op
elif config.opt == "asgd":
logger.debug("using ASGD optimizer")
self._optimizer = optimizers.ASGD(self, self._grads, tvars, self._config)
self._train_op = self._optimizer.train_op
# elif config.opt == "masgd":
# logger.info("using MASGD optimizer")
# opt = optimizers.ASGD(self, self._grads, tvars)
# self._optimizer = optimizers.MASGD(self, opt.updates, tvars)
# self._train_op = self._optimizer.train_op
# elif config.opt == "rms":
# logger.info("using RMS optimizer")
# self._optimizer = optimizers.RMSprop(self, self._grads, tvars)
# self._train_op = self._optimizer.train_op
# elif config.opt == "arms":
# logger.info("using ARMS optimizer")
# opt = optimizers.RMSprop(self, grads, tvars, use_opt=False)
# self._optimizer = optimizers.ASGD(self, opt.updates, tvars)
# self._train_op = self._optimizer.train_op
# elif config.opt == "marms":
# logger.info("using MARMS optimizer")
# opt = optimizers.RMSprop(self, grads, tvars, use_opt=False)
# self._optimizer = optimizers.ASGD(self, opt.updates, tvars)
# self._train_op = self._optimizer.train_op
else:
raise ValueError( config.opt + " is not a valid optimizer")
def _build_embedding(self):
init_scale = self._config.embed_init_scale
init = tf.random_uniform(shape=[self._vocab_size, self._embedding_size],
minval=-init_scale,
maxval=init_scale,
seed=self._seed,
dtype=tf.float32)
with tf.variable_scope("embedding"), tf.device(self._cpu_device):
# the embedding matrix is allocated in the cpu to save valuable gpu memory for the model.
logger.debug("adding embedding matrix with dims [{:d}, {:d}]".format(self._vocab_size, self._embedding_size))
self._embedding_map = tf.get_variable(name="embedding", dtype=tf.float32, initializer=init)
embedding_vec = tf.nn.embedding_lookup(self._embedding_map, self._activations[-1])
if self._is_training and (self._drop_e > 0 or self._drop_i > 0):
with tf.name_scope("embedding_mask"):
# non variational wrapper for the embedding
logger.debug("adding embedding mask with dims [{:d}, {:d}, {:d}]"
.format(self._batch_size, self._bptt, self._embedding_size))
self._emb_mask = tf.placeholder(dtype=tf.float32,
shape=[self._batch_size, self._bptt, self._embedding_size],
name="embedding_mask")
if self._drop_e > 0:
if self._config.drop_embed_var:
logger.debug("using variational embedding dropout")
random_tensor = ops.convert_to_tensor(1-self._drop_e)
random_tensor += \
random_ops.random_uniform([self._batch_size, 1, self._embedding_size],
seed=self._seed)
random_tensor = tf.tile(random_tensor, [1, self._bptt, 1])
self._gen_emb_mask = math_ops.floor(random_tensor)
else:
logger.debug("using naive embedding dropout")
random_tensor = ops.convert_to_tensor(1-self._drop_e)
random_tensor += \
random_ops.random_uniform([self._batch_size, self._bptt, self._embedding_size],
seed=self._seed)
self._gen_emb_mask = math_ops.floor(random_tensor)
else:
self._gen_emb_mask = tf.ones([self._batch_size, self._bptt, self._embedding_size])
embedding_vec = math_ops.div(embedding_vec, (1-self._drop_e)*(1-self._drop_i)) * self._emb_mask
self._activations.append(embedding_vec)
def _build_rnn(self):
self._cell = list()
self._initial_state = list()
self._state = list()
# define the lstm cell
lstm_cell = self._build_lstm_cell()
outputs = tf.unstack(self._activations[-1], num=self._bptt, axis=1)
self._final_state = list()
for i in range(self._layers):
with tf.variable_scope("layer_%d" % (i+1)):
self._cell.append(lstm_cell(self._hid_size[i]))
self._initial_state.append(self._cell[-1].zero_state(self._batch_size, dtype=tf.float32))
outputs, state = tf.nn.static_rnn(self._cell[-1], outputs, initial_state=self._initial_state[-1])
self._final_state.append(state)
output = tf.reshape(tf.concat(outputs, 1), [-1, self._hid_size[i]])
self._activations.append(output)
def _build_lstm_cell(self):
def cell(lstm_size):
if self._config.DC:
logger.debug("using weight-dropped LSTM cell")
return dr.WeightDroppedLSTMCell(num_units=lstm_size,
is_training=self._is_training,
state_is_tuple=True)
else:
logger.debug("using LSTM cell")
return tf.contrib.rnn.LSTMBlockCell(num_units=lstm_size)
final_cell = cell
# if dropout is needed add a dropout wrapper
if self._is_training and (self._drop_h[0] > 0 or self._drop_h[1] > 0 or
self._drop_s[0] > 0 or self._drop_s[1] > 0):
def final_cell(lstm_size):
if self._config.variational is not None:
if self._config.DC:
logger.debug("using weight-dropped variational dropout")
return dr.WeightDroppedVariationalDropoutWrapper(cell(lstm_size),
self._batch_size,
lstm_size)
else:
logger.debug("using variational dropout")
return dr.VariationalDropoutWrapper(cell(lstm_size),
self._batch_size,
lstm_size)
else:
raise ValueError("non variational dropout is deprecated")
return final_cell
def _get_prev_h(self, outputs):
_, initial_h = self._initial_state[-1]
state = list()
state.append(initial_h)
state.extend(outputs[:-1])
state = tf.stack(state, axis=1)
state = tf.reshape(state, [-1, self._hid_size[-1]])
return state
def _build_loss(self):
if self._embedding_size == self._hid_size[-1] or self._mos:
# outer softmax matrix is tied with embedding matrix
logger.debug("tied embedding")
w_out = tf.transpose(self._embedding_map)
else:
logger.debug("untied embedding")
w_out = tf.get_variable(name="w_embed_out", shape=[self._hid_size[-1], self._vocab_size], dtype=tf.float32)
b_out = tf.get_variable(name="b_out",
dtype=tf.float32,initializer=tf.zeros([self._vocab_size], dtype=tf.float32))
with tf.name_scope("loss"):
with tf.name_scope("data_loss"):
if self._mos:
logger.debug("adding mos with %d contexts" % self._mos_experts_num)
logits = self._build_mos(w_out, b_out)
else:
logger.debug("adding softmax layer")
logits = tf.matmul(self._activations[-1], w_out) + b_out
if self._is_training:
random_tensor = ops.convert_to_tensor(1-self._config.drop_label)
random_tensor += random_ops.random_uniform([self._batch_size * self._bptt], seed=self._seed)
mask = math_ops.floor(random_tensor)
else:
mask = tf.ones([self._batch_size * self._bptt], dtype=tf.float32)
losses = tf.contrib.legacy_seq2seq.sequence_loss_by_example([logits],
[tf.reshape(self._data.targets, [-1])],
[mask])
loss = tf.reduce_mean(losses)
self._loss = loss
if self._config.AR and self._is_training:
logger.debug("using activation regularization")
with tf.name_scope("AR"):
loss += self._config.AR * tf.reduce_mean(tf.square(tf.reshape(self._activations[-1], [-1, 1])))
if self._config.TAR and self._is_training:
logger.debug("using temporal activation regularization")
with tf.name_scope("TAR"):
outputs_reshaped = tf.reshape(self._activations[-1], [self._batch_size, self._bptt, -1])
diff = outputs_reshaped[:, :-1, :] - outputs_reshaped[:, 1:, :]
loss += self._config.TAR * tf.reduce_mean(tf.square(tf.reshape(diff, [-1, 1])))
if self._config.wdecay and self._is_training:
logger.debug("using L2 regularization")
for tvar in tf.trainable_variables():
loss += self._config.wdecay * tf.reduce_sum(tf.square(tf.reshape(tvar, [-1, 1])))
with tf.name_scope("compute_grads"):
self._grads = None
if self._is_training:
self._grads = tf.gradients(loss, tf.trainable_variables())
def _build_mos(self, w_out, b_out):
with tf.name_scope("mos"):
# pi
prior = tf.get_variable(name="mos_pi",
shape=[self._hid_size[-1], self._mos_experts_num],
dtype=tf.float32)
# context vectors
w_h = tf.get_variable(name="mos_w_h",
shape=[self._hid_size[-1], self._mos_experts_num * self._embedding_size],
dtype=tf.float32)
b_h = tf.get_variable(name="mos_b_h",
shape=[self._mos_experts_num * self._embedding_size],
dtype=tf.float32)
prior = tf.matmul(self._activations[-1], prior)
pi = tf.nn.softmax(prior, name="mos_prior")
h = tf.reshape(tf.tanh(tf.matmul(self._activations[-1], w_h) + b_h), [-1, self._embedding_size])
if self._is_training:
self._mos_mask = tf.placeholder(dtype=tf.float32,
shape=[self._batch_size * self._bptt * self._mos_experts_num,
self._embedding_size],
name="mos_mask")
if self._config.variational is not None:
with tf.name_scope("mos_mask_gen"):
random_tensor = ops.convert_to_tensor(1-self._mos_drop)
random_tensor += random_ops.random_uniform(
[self._batch_size, 1, self._mos_experts_num * self._embedding_size], seed=self._seed)
random_tensor = tf.tile(random_tensor, [1, self._bptt, 1])
self._gen_mos_mask = tf.reshape(math_ops.floor(random_tensor),
[self._batch_size * self._bptt * self._mos_experts_num,
self._embedding_size])
else:
with tf.name_scope("mos_mask_gen"):
random_tensor = ops.convert_to_tensor(1-self._mos_drop)
random_tensor += random_ops.random_uniform(
[self._batch_size * self._mos_experts_num * self._bptt, self._embedding_size],
seed=self._seed)
self._gen_mos_mask = math_ops.floor(random_tensor)
h = math_ops.div(h, 1-self._mos_drop) * self._mos_mask
a = tf.matmul(h, w_out) + b_out
# mos
a_mos = tf.reshape(tf.nn.softmax(a), [-1, self._mos_experts_num, self._vocab_size])
pi = tf.reshape(pi, [-1, self._mos_experts_num, 1])
weighted_softmax = tf.multiply(a_mos, pi)
prob = tf.reduce_sum(weighted_softmax, axis=1)
log_prob = tf.log(prob+1e-8)
return log_prob
@property
def batch_size(self):
return self._batch_size
@property
def bptt(self):
return self._bptt
@property
def hid_size(self):
return self._hid_size
@property
def init_scale(self):
return self._init_scale
@property
def initial_state(self):
return self._initial_state
@property
def final_state(self):
return self._final_state
@property
def loss(self):
return self._loss
@property
def train_op(self):
return self._train_op
@property
def optimizer(self):
return self._optimizer
@property
def data(self):
return self._data
@property
def lr(self):
return self._lr
@property
def global_step(self):
return self._global_step
@property
def epoch(self):
return self._epoch_count
@property
def config(self):
return self._config
@property
def emb_mask(self):
return self._emb_mask
@property
def stat_ops(self):
return self._stat_ops
def assign_lr(self, session, lr_value):
session.run(self._lr_update, feed_dict={self._new_lr: lr_value})
def epoch_inc(self, session):
return session.run(self._epoch_inc)
def epoch_reset(self, session):
return session.run(self._epoch_reset)
def gen_masks(self, session):
feed_dict = {}
if (self._drop_h[0] > 0 or self._drop_h[1] > 0 or
self._drop_s[0] > 0 or self._drop_s[1] > 0):
for i in range(self._layers):
feed_dict.update(self._cell[i].gen_masks(session))
if self._config.mos:
feed_dict.update({self._mos_mask: session.run(self._gen_mos_mask)})
return feed_dict
def gen_emb_mask(self, session):
return {self._emb_mask: session.run(self._gen_emb_mask)}
def gen_wdrop_mask(self, session):
masks = {}
if self._config.drop_s[0] > 0 or self._config.drop_s[1] > 0:
for cell in self._cell:
masks.update(cell.cell.gen_masks(session))
return masks
def update_drop_params(self, session, output_drop, state_drop):
if (self._drop_h[0] > 0 or self._drop_h[1] > 0 or
self._drop_s[0] > 0 or self._drop_s[1] > 0):
for i in range(self._layers):
if i < self._layers-1:
logger.info("layer %d: out %.2f, state %.2f" % (i+1, output_drop[0], state_drop[0]))
self._cell[i].update_drop_params(session,
1 - output_drop[0],
1 - state_drop[0])
else:
logger.info("layer %d: out %.2f, state %.2f" % (i + 1, output_drop[1], state_drop[1]))
self._cell[i].update_drop_params(session,
1 - output_drop[1],
1 - state_drop[1])
|
mit
|
DiptoDas8/Biponi
|
lib/python2.7/site-packages/django/contrib/gis/geos/point.py
|
103
|
4401
|
from ctypes import c_uint
from django.contrib.gis.geos import prototypes as capi
from django.contrib.gis.geos.error import GEOSException
from django.contrib.gis.geos.geometry import GEOSGeometry
from django.utils import six
from django.utils.six.moves import range
class Point(GEOSGeometry):
_minlength = 2
_maxlength = 3
def __init__(self, x, y=None, z=None, srid=None):
"""
The Point object may be initialized with either a tuple, or individual
parameters.
For Example:
>>> p = Point((5, 23)) # 2D point, passed in as a tuple
>>> p = Point(5, 23, 8) # 3D point, passed in with individual parameters
"""
if isinstance(x, (tuple, list)):
# Here a tuple or list was passed in under the `x` parameter.
ndim = len(x)
coords = x
elif isinstance(x, six.integer_types + (float,)) and isinstance(y, six.integer_types + (float,)):
# Here X, Y, and (optionally) Z were passed in individually, as parameters.
if isinstance(z, six.integer_types + (float,)):
ndim = 3
coords = [x, y, z]
else:
ndim = 2
coords = [x, y]
else:
raise TypeError('Invalid parameters given for Point initialization.')
point = self._create_point(ndim, coords)
# Initializing using the address returned from the GEOS
# createPoint factory.
super(Point, self).__init__(point, srid=srid)
def _create_point(self, ndim, coords):
"""
Create a coordinate sequence, set X, Y, [Z], and create point
"""
if ndim < 2 or ndim > 3:
raise TypeError('Invalid point dimension: %s' % str(ndim))
cs = capi.create_cs(c_uint(1), c_uint(ndim))
i = iter(coords)
capi.cs_setx(cs, 0, next(i))
capi.cs_sety(cs, 0, next(i))
if ndim == 3:
capi.cs_setz(cs, 0, next(i))
return capi.create_point(cs)
def _set_list(self, length, items):
ptr = self._create_point(length, items)
if ptr:
capi.destroy_geom(self.ptr)
self._ptr = ptr
self._set_cs()
else:
# can this happen?
raise GEOSException('Geometry resulting from slice deletion was invalid.')
def _set_single(self, index, value):
self._cs.setOrdinate(index, 0, value)
def __iter__(self):
"Allows iteration over coordinates of this Point."
for i in range(len(self)):
yield self[i]
def __len__(self):
"Returns the number of dimensions for this Point (either 0, 2 or 3)."
if self.empty:
return 0
if self.hasz:
return 3
else:
return 2
def _get_single_external(self, index):
if index == 0:
return self.x
elif index == 1:
return self.y
elif index == 2:
return self.z
_get_single_internal = _get_single_external
def get_x(self):
"Returns the X component of the Point."
return self._cs.getOrdinate(0, 0)
def set_x(self, value):
"Sets the X component of the Point."
self._cs.setOrdinate(0, 0, value)
def get_y(self):
"Returns the Y component of the Point."
return self._cs.getOrdinate(1, 0)
def set_y(self, value):
"Sets the Y component of the Point."
self._cs.setOrdinate(1, 0, value)
def get_z(self):
"Returns the Z component of the Point."
if self.hasz:
return self._cs.getOrdinate(2, 0)
else:
return None
def set_z(self, value):
"Sets the Z component of the Point."
if self.hasz:
self._cs.setOrdinate(2, 0, value)
else:
raise GEOSException('Cannot set Z on 2D Point.')
# X, Y, Z properties
x = property(get_x, set_x)
y = property(get_y, set_y)
z = property(get_z, set_z)
# ### Tuple setting and retrieval routines. ###
def get_coords(self):
"Returns a tuple of the point."
return self._cs.tuple
def set_coords(self, tup):
"Sets the coordinates of the point with the given tuple."
self._cs[0] = tup
# The tuple and coords properties
tuple = property(get_coords, set_coords)
coords = tuple
|
mit
|
beagles/neutron_hacking
|
neutron/db/migration/alembic_migrations/versions/1c33fa3cd1a1_extra_route_config.py
|
3
|
2588
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Support routing table configuration on Router
Revision ID: 1c33fa3cd1a1
Revises: 45680af419f9
Create Date: 2013-01-17 14:35:09.386975
"""
# revision identifiers, used by Alembic.
revision = '1c33fa3cd1a1'
down_revision = '45680af419f9'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.plugins.openvswitch.ovs_neutron_plugin.OVSNeutronPluginV2',
'neutron.plugins.linuxbridge.lb_neutron_plugin.LinuxBridgePluginV2',
'neutron.plugins.metaplugin.meta_neutron_plugin.MetaPluginV2',
'neutron.plugins.nec.nec_plugin.NECPluginV2',
'neutron.plugins.nicira.NeutronPlugin.NvpPluginV2',
'neutron.plugins.nicira.NeutronServicePlugin.NvpAdvancedPlugin',
'neutron.plugins.ryu.ryu_neutron_plugin.RyuNeutronPluginV2',
'neutron.plugins.vmware.plugin.NsxPlugin',
'neutron.plugins.vmware.plugin.NsxServicePlugin',
'neutron.plugins.oneconvergence.plugin.OneConvergencePluginV2',
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.rename_table(
'routes',
'subnetroutes',
)
op.create_table(
'routerroutes',
sa.Column('destination', sa.String(length=64), nullable=False),
sa.Column(
'nexthop', sa.String(length=64), nullable=False),
sa.Column('router_id', sa.String(length=36), nullable=False),
sa.ForeignKeyConstraint(
['router_id'], ['routers.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('destination', 'nexthop', 'router_id')
)
def downgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.rename_table(
'subnetroutes',
'routes',
)
op.drop_table('routerroutes')
|
apache-2.0
|
gladsonvm/haystackdemo
|
lib/python2.7/site-packages/django/utils/html.py
|
78
|
9474
|
"""HTML utilities suitable for global use."""
import re
import string
import urllib
import urlparse
from django.utils.safestring import SafeData, mark_safe
from django.utils.encoding import smart_str, force_unicode
from django.utils.functional import allow_lazy
from django.utils.text import normalize_newlines
# Configuration for urlize() function.
TRAILING_PUNCTUATION = ['.', ',', ':', ';']
WRAPPING_PUNCTUATION = [('(', ')'), ('<', '>'), ('<', '>')]
# List of possible strings used for bullets in bulleted lists.
DOTS = [u'·', u'*', u'\u2022', u'•', u'•', u'•']
unencoded_ampersands_re = re.compile(r'&(?!(\w+|#\d+);)')
unquoted_percents_re = re.compile(r'%(?![0-9A-Fa-f]{2})')
word_split_re = re.compile(r'(\s+)')
simple_url_re = re.compile(r'^https?://\w')
simple_url_2_re = re.compile(r'^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net|org)$')
simple_email_re = re.compile(r'^\S+@\S+\.\S+$')
link_target_attribute_re = re.compile(r'(<a [^>]*?)target=[^\s>]+')
html_gunk_re = re.compile(r'(?:<br clear="all">|<i><\/i>|<b><\/b>|<em><\/em>|<strong><\/strong>|<\/?smallcaps>|<\/?uppercase>)', re.IGNORECASE)
hard_coded_bullets_re = re.compile(r'((?:<p>(?:%s).*?[a-zA-Z].*?</p>\s*)+)' % '|'.join([re.escape(x) for x in DOTS]), re.DOTALL)
trailing_empty_content_re = re.compile(r'(?:<p>(?: |\s|<br \/>)*?</p>\s*)+\Z')
del x # Temporary variable
def escape(html):
"""
Returns the given HTML with ampersands, quotes and angle brackets encoded.
"""
return mark_safe(force_unicode(html).replace('&', '&').replace('<', '<').replace('>', '>').replace('"', '"').replace("'", '''))
escape = allow_lazy(escape, unicode)
_base_js_escapes = (
('\\', r'\u005C'),
('\'', r'\u0027'),
('"', r'\u0022'),
('>', r'\u003E'),
('<', r'\u003C'),
('&', r'\u0026'),
('=', r'\u003D'),
('-', r'\u002D'),
(';', r'\u003B'),
(u'\u2028', r'\u2028'),
(u'\u2029', r'\u2029')
)
# Escape every ASCII character with a value less than 32.
_js_escapes = (_base_js_escapes +
tuple([('%c' % z, '\\u%04X' % z) for z in range(32)]))
def escapejs(value):
"""Hex encodes characters for use in JavaScript strings."""
for bad, good in _js_escapes:
value = mark_safe(force_unicode(value).replace(bad, good))
return value
escapejs = allow_lazy(escapejs, unicode)
def conditional_escape(html):
"""
Similar to escape(), except that it doesn't operate on pre-escaped strings.
"""
if isinstance(html, SafeData):
return html
else:
return escape(html)
def linebreaks(value, autoescape=False):
"""Converts newlines into <p> and <br />s."""
value = normalize_newlines(value)
paras = re.split('\n{2,}', value)
if autoescape:
paras = [u'<p>%s</p>' % escape(p).replace('\n', '<br />') for p in paras]
else:
paras = [u'<p>%s</p>' % p.replace('\n', '<br />') for p in paras]
return u'\n\n'.join(paras)
linebreaks = allow_lazy(linebreaks, unicode)
def strip_tags(value):
"""Returns the given HTML with all tags stripped."""
return re.sub(r'<[^>]*?>', '', force_unicode(value))
strip_tags = allow_lazy(strip_tags)
def strip_spaces_between_tags(value):
"""Returns the given HTML with spaces between tags removed."""
return re.sub(r'>\s+<', '><', force_unicode(value))
strip_spaces_between_tags = allow_lazy(strip_spaces_between_tags, unicode)
def strip_entities(value):
"""Returns the given HTML with all entities (&something;) stripped."""
return re.sub(r'&(?:\w+|#\d+);', '', force_unicode(value))
strip_entities = allow_lazy(strip_entities, unicode)
def fix_ampersands(value):
"""Returns the given HTML with all unencoded ampersands encoded correctly."""
return unencoded_ampersands_re.sub('&', force_unicode(value))
fix_ampersands = allow_lazy(fix_ampersands, unicode)
def smart_urlquote(url):
"Quotes a URL if it isn't already quoted."
# Handle IDN before quoting.
scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
try:
netloc = netloc.encode('idna') # IDN -> ACE
except UnicodeError: # invalid domain part
pass
else:
url = urlparse.urlunsplit((scheme, netloc, path, query, fragment))
# An URL is considered unquoted if it contains no % characters or
# contains a % not followed by two hexadecimal digits. See #9655.
if '%' not in url or unquoted_percents_re.search(url):
# See http://bugs.python.org/issue2637
url = urllib.quote(smart_str(url), safe='!*\'();:@&=+$,/?#[]~')
return force_unicode(url)
def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False):
"""
Converts any URLs in text into clickable links.
Works on http://, https://, www. links, and also on links ending in one of
the original seven gTLDs (.com, .edu, .gov, .int, .mil, .net, and .org).
Links can have trailing punctuation (periods, commas, close-parens) and
leading punctuation (opening parens) and it'll still do the right thing.
If trim_url_limit is not None, the URLs in link text longer than this limit
will truncated to trim_url_limit-3 characters and appended with an elipsis.
If nofollow is True, the URLs in link text will get a rel="nofollow"
attribute.
If autoescape is True, the link text and URLs will get autoescaped.
"""
trim_url = lambda x, limit=trim_url_limit: limit is not None and (len(x) > limit and ('%s...' % x[:max(0, limit - 3)])) or x
safe_input = isinstance(text, SafeData)
words = word_split_re.split(force_unicode(text))
for i, word in enumerate(words):
match = None
if '.' in word or '@' in word or ':' in word:
# Deal with punctuation.
lead, middle, trail = '', word, ''
for punctuation in TRAILING_PUNCTUATION:
if middle.endswith(punctuation):
middle = middle[:-len(punctuation)]
trail = punctuation + trail
for opening, closing in WRAPPING_PUNCTUATION:
if middle.startswith(opening):
middle = middle[len(opening):]
lead = lead + opening
# Keep parentheses at the end only if they're balanced.
if (middle.endswith(closing)
and middle.count(closing) == middle.count(opening) + 1):
middle = middle[:-len(closing)]
trail = closing + trail
# Make URL we want to point to.
url = None
nofollow_attr = ' rel="nofollow"' if nofollow else ''
if simple_url_re.match(middle):
url = smart_urlquote(middle)
elif simple_url_2_re.match(middle):
url = smart_urlquote('http://%s' % middle)
elif not ':' in middle and simple_email_re.match(middle):
local, domain = middle.rsplit('@', 1)
try:
domain = domain.encode('idna')
except UnicodeError:
continue
url = 'mailto:%s@%s' % (local, domain)
nofollow_attr = ''
# Make link.
if url:
trimmed = trim_url(middle)
if autoescape and not safe_input:
lead, trail = escape(lead), escape(trail)
url, trimmed = escape(url), escape(trimmed)
middle = '<a href="%s"%s>%s</a>' % (url, nofollow_attr, trimmed)
words[i] = mark_safe('%s%s%s' % (lead, middle, trail))
else:
if safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
elif safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
return u''.join(words)
urlize = allow_lazy(urlize, unicode)
def clean_html(text):
"""
Clean the given HTML. Specifically, do the following:
* Convert <b> and <i> to <strong> and <em>.
* Encode all ampersands correctly.
* Remove all "target" attributes from <a> tags.
* Remove extraneous HTML, such as presentational tags that open and
immediately close and <br clear="all">.
* Convert hard-coded bullets into HTML unordered lists.
* Remove stuff like "<p> </p>", but only if it's at the
bottom of the text.
"""
from django.utils.text import normalize_newlines
text = normalize_newlines(force_unicode(text))
text = re.sub(r'<(/?)\s*b\s*>', '<\\1strong>', text)
text = re.sub(r'<(/?)\s*i\s*>', '<\\1em>', text)
text = fix_ampersands(text)
# Remove all target="" attributes from <a> tags.
text = link_target_attribute_re.sub('\\1', text)
# Trim stupid HTML such as <br clear="all">.
text = html_gunk_re.sub('', text)
# Convert hard-coded bullets into HTML unordered lists.
def replace_p_tags(match):
s = match.group().replace(u'</p>', u'</li>')
for d in DOTS:
s = s.replace(u'<p>%s' % d, u'<li>')
return u'<ul>\n%s\n</ul>' % s
text = hard_coded_bullets_re.sub(replace_p_tags, text)
# Remove stuff like "<p> </p>", but only if it's at the bottom
# of the text.
text = trailing_empty_content_re.sub(u'', text)
return text
clean_html = allow_lazy(clean_html, unicode)
|
mit
|
nitinitprof/odoo
|
addons/web/controllers/main.py
|
46
|
66013
|
# -*- coding: utf-8 -*-
import ast
import base64
import csv
import functools
import glob
import itertools
import jinja2
import logging
import operator
import datetime
import hashlib
import os
import re
import simplejson
import sys
import time
import urllib2
import zlib
from xml.etree import ElementTree
from cStringIO import StringIO
import babel.messages.pofile
import werkzeug.utils
import werkzeug.wrappers
try:
import xlwt
except ImportError:
xlwt = None
import openerp
import openerp.modules.registry
from openerp.addons.base.ir.ir_qweb import AssetsBundle, QWebTemplateNotFound
from openerp.modules import get_module_resource
from openerp.service import model as service_model
from openerp.tools import topological_sort
from openerp.tools.translate import _
from openerp.tools import ustr
from openerp import http
from openerp.http import request, serialize_exception as _serialize_exception
_logger = logging.getLogger(__name__)
if hasattr(sys, 'frozen'):
# When running on compiled windows binary, we don't have access to package loader.
path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'views'))
loader = jinja2.FileSystemLoader(path)
else:
loader = jinja2.PackageLoader('openerp.addons.web', "views")
env = jinja2.Environment(loader=loader, autoescape=True)
env.filters["json"] = simplejson.dumps
# 1 week cache for asset bundles as advised by Google Page Speed
BUNDLE_MAXAGE = 60 * 60 * 24 * 7
#----------------------------------------------------------
# OpenERP Web helpers
#----------------------------------------------------------
db_list = http.db_list
db_monodb = http.db_monodb
def serialize_exception(f):
@functools.wraps(f)
def wrap(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception, e:
_logger.exception("An exception occured during an http request")
se = _serialize_exception(e)
error = {
'code': 200,
'message': "Odoo Server Error",
'data': se
}
return werkzeug.exceptions.InternalServerError(simplejson.dumps(error))
return wrap
def redirect_with_hash(*args, **kw):
"""
.. deprecated:: 8.0
Use the ``http.redirect_with_hash()`` function instead.
"""
return http.redirect_with_hash(*args, **kw)
def abort_and_redirect(url):
r = request.httprequest
response = werkzeug.utils.redirect(url, 302)
response = r.app.get_response(r, response, explicit_session=False)
werkzeug.exceptions.abort(response)
def ensure_db(redirect='/web/database/selector'):
# This helper should be used in web client auth="none" routes
# if those routes needs a db to work with.
# If the heuristics does not find any database, then the users will be
# redirected to db selector or any url specified by `redirect` argument.
# If the db is taken out of a query parameter, it will be checked against
# `http.db_filter()` in order to ensure it's legit and thus avoid db
# forgering that could lead to xss attacks.
db = request.params.get('db')
# Ensure db is legit
if db and db not in http.db_filter([db]):
db = None
if db and not request.session.db:
# User asked a specific database on a new session.
# That mean the nodb router has been used to find the route
# Depending on installed module in the database, the rendering of the page
# may depend on data injected by the database route dispatcher.
# Thus, we redirect the user to the same page but with the session cookie set.
# This will force using the database route dispatcher...
r = request.httprequest
url_redirect = r.base_url
if r.query_string:
# Can't use werkzeug.wrappers.BaseRequest.url with encoded hashes:
# https://github.com/amigrave/werkzeug/commit/b4a62433f2f7678c234cdcac6247a869f90a7eb7
url_redirect += '?' + r.query_string
response = werkzeug.utils.redirect(url_redirect, 302)
request.session.db = db
abort_and_redirect(url_redirect)
# if db not provided, use the session one
if not db and request.session.db and http.db_filter([request.session.db]):
db = request.session.db
# if no database provided and no database in session, use monodb
if not db:
db = db_monodb(request.httprequest)
# if no db can be found til here, send to the database selector
# the database selector will redirect to database manager if needed
if not db:
werkzeug.exceptions.abort(werkzeug.utils.redirect(redirect, 303))
# always switch the session to the computed db
if db != request.session.db:
request.session.logout()
abort_and_redirect(request.httprequest.url)
request.session.db = db
def module_installed():
# Candidates module the current heuristic is the /static dir
loadable = http.addons_manifest.keys()
modules = {}
# Retrieve database installed modules
# TODO The following code should move to ir.module.module.list_installed_modules()
Modules = request.session.model('ir.module.module')
domain = [('state','=','installed'), ('name','in', loadable)]
for module in Modules.search_read(domain, ['name', 'dependencies_id']):
modules[module['name']] = []
deps = module.get('dependencies_id')
if deps:
deps_read = request.session.model('ir.module.module.dependency').read(deps, ['name'])
dependencies = [i['name'] for i in deps_read]
modules[module['name']] = dependencies
sorted_modules = topological_sort(modules)
return sorted_modules
def module_installed_bypass_session(dbname):
loadable = http.addons_manifest.keys()
modules = {}
try:
registry = openerp.modules.registry.RegistryManager.get(dbname)
with registry.cursor() as cr:
m = registry.get('ir.module.module')
# TODO The following code should move to ir.module.module.list_installed_modules()
domain = [('state','=','installed'), ('name','in', loadable)]
ids = m.search(cr, 1, [('state','=','installed'), ('name','in', loadable)])
for module in m.read(cr, 1, ids, ['name', 'dependencies_id']):
modules[module['name']] = []
deps = module.get('dependencies_id')
if deps:
deps_read = registry.get('ir.module.module.dependency').read(cr, 1, deps, ['name'])
dependencies = [i['name'] for i in deps_read]
modules[module['name']] = dependencies
except Exception,e:
pass
sorted_modules = topological_sort(modules)
return sorted_modules
def module_boot(db=None):
server_wide_modules = openerp.conf.server_wide_modules or ['web']
serverside = []
dbside = []
for i in server_wide_modules:
if i in http.addons_manifest:
serverside.append(i)
monodb = db or db_monodb()
if monodb:
dbside = module_installed_bypass_session(monodb)
dbside = [i for i in dbside if i not in serverside]
addons = serverside + dbside
return addons
def concat_xml(file_list):
"""Concatenate xml files
:param list(str) file_list: list of files to check
:returns: (concatenation_result, checksum)
:rtype: (str, str)
"""
checksum = hashlib.new('sha1')
if not file_list:
return '', checksum.hexdigest()
root = None
for fname in file_list:
with open(fname, 'rb') as fp:
contents = fp.read()
checksum.update(contents)
fp.seek(0)
xml = ElementTree.parse(fp).getroot()
if root is None:
root = ElementTree.Element(xml.tag)
#elif root.tag != xml.tag:
# raise ValueError("Root tags missmatch: %r != %r" % (root.tag, xml.tag))
for child in xml.getchildren():
root.append(child)
return ElementTree.tostring(root, 'utf-8'), checksum.hexdigest()
def fs2web(path):
"""convert FS path into web path"""
return '/'.join(path.split(os.path.sep))
def manifest_glob(extension, addons=None, db=None, include_remotes=False):
if addons is None:
addons = module_boot(db=db)
else:
addons = addons.split(',')
r = []
for addon in addons:
manifest = http.addons_manifest.get(addon, None)
if not manifest:
continue
# ensure does not ends with /
addons_path = os.path.join(manifest['addons_path'], '')[:-1]
globlist = manifest.get(extension, [])
for pattern in globlist:
if pattern.startswith(('http://', 'https://', '//')):
if include_remotes:
r.append((None, pattern))
else:
for path in glob.glob(os.path.normpath(os.path.join(addons_path, addon, pattern))):
r.append((path, fs2web(path[len(addons_path):])))
return r
def manifest_list(extension, mods=None, db=None, debug=None):
""" list ressources to load specifying either:
mods: a comma separated string listing modules
db: a database name (return all installed modules in that database)
"""
if debug is not None:
_logger.warning("openerp.addons.web.main.manifest_list(): debug parameter is deprecated")
files = manifest_glob(extension, addons=mods, db=db, include_remotes=True)
return [wp for _fp, wp in files]
def get_last_modified(files):
""" Returns the modification time of the most recently modified
file provided
:param list(str) files: names of files to check
:return: most recent modification time amongst the fileset
:rtype: datetime.datetime
"""
files = list(files)
if files:
return max(datetime.datetime.fromtimestamp(os.path.getmtime(f))
for f in files)
return datetime.datetime(1970, 1, 1)
def make_conditional(response, last_modified=None, etag=None, max_age=0):
""" Makes the provided response conditional based upon the request,
and mandates revalidation from clients
Uses Werkzeug's own :meth:`ETagResponseMixin.make_conditional`, after
setting ``last_modified`` and ``etag`` correctly on the response object
:param response: Werkzeug response
:type response: werkzeug.wrappers.Response
:param datetime.datetime last_modified: last modification date of the response content
:param str etag: some sort of checksum of the content (deep etag)
:return: the response object provided
:rtype: werkzeug.wrappers.Response
"""
response.cache_control.must_revalidate = True
response.cache_control.max_age = max_age
if last_modified:
response.last_modified = last_modified
if etag:
response.set_etag(etag)
return response.make_conditional(request.httprequest)
def login_and_redirect(db, login, key, redirect_url='/web'):
request.session.authenticate(db, login, key)
return set_cookie_and_redirect(redirect_url)
def set_cookie_and_redirect(redirect_url):
redirect = werkzeug.utils.redirect(redirect_url, 303)
redirect.autocorrect_location_header = False
return redirect
def login_redirect():
url = '/web/login?'
# built the redirect url, keeping all the query parameters of the url
redirect_url = '%s?%s' % (request.httprequest.base_url, werkzeug.urls.url_encode(request.params))
return """<html><head><script>
window.location = '%sredirect=' + encodeURIComponent("%s" + location.hash);
</script></head></html>
""" % (url, redirect_url)
def load_actions_from_ir_values(key, key2, models, meta):
Values = request.session.model('ir.values')
actions = Values.get(key, key2, models, meta, request.context)
return [(id, name, clean_action(action))
for id, name, action in actions]
def clean_action(action):
action.setdefault('flags', {})
action_type = action.setdefault('type', 'ir.actions.act_window_close')
if action_type == 'ir.actions.act_window':
return fix_view_modes(action)
return action
# I think generate_views,fix_view_modes should go into js ActionManager
def generate_views(action):
"""
While the server generates a sequence called "views" computing dependencies
between a bunch of stuff for views coming directly from the database
(the ``ir.actions.act_window model``), it's also possible for e.g. buttons
to return custom view dictionaries generated on the fly.
In that case, there is no ``views`` key available on the action.
Since the web client relies on ``action['views']``, generate it here from
``view_mode`` and ``view_id``.
Currently handles two different cases:
* no view_id, multiple view_mode
* single view_id, single view_mode
:param dict action: action descriptor dictionary to generate a views key for
"""
view_id = action.get('view_id') or False
if isinstance(view_id, (list, tuple)):
view_id = view_id[0]
# providing at least one view mode is a requirement, not an option
view_modes = action['view_mode'].split(',')
if len(view_modes) > 1:
if view_id:
raise ValueError('Non-db action dictionaries should provide '
'either multiple view modes or a single view '
'mode and an optional view id.\n\n Got view '
'modes %r and view id %r for action %r' % (
view_modes, view_id, action))
action['views'] = [(False, mode) for mode in view_modes]
return
action['views'] = [(view_id, view_modes[0])]
def fix_view_modes(action):
""" For historical reasons, OpenERP has weird dealings in relation to
view_mode and the view_type attribute (on window actions):
* one of the view modes is ``tree``, which stands for both list views
and tree views
* the choice is made by checking ``view_type``, which is either
``form`` for a list view or ``tree`` for an actual tree view
This methods simply folds the view_type into view_mode by adding a
new view mode ``list`` which is the result of the ``tree`` view_mode
in conjunction with the ``form`` view_type.
TODO: this should go into the doc, some kind of "peculiarities" section
:param dict action: an action descriptor
:returns: nothing, the action is modified in place
"""
if not action.get('views'):
generate_views(action)
if action.pop('view_type', 'form') != 'form':
return action
if 'view_mode' in action:
action['view_mode'] = ','.join(
mode if mode != 'tree' else 'list'
for mode in action['view_mode'].split(','))
action['views'] = [
[id, mode if mode != 'tree' else 'list']
for id, mode in action['views']
]
return action
def _local_web_translations(trans_file):
messages = []
try:
with open(trans_file) as t_file:
po = babel.messages.pofile.read_po(t_file)
except Exception:
return
for x in po:
if x.id and x.string and "openerp-web" in x.auto_comments:
messages.append({'id': x.id, 'string': x.string})
return messages
def xml2json_from_elementtree(el, preserve_whitespaces=False):
""" xml2json-direct
Simple and straightforward XML-to-JSON converter in Python
New BSD Licensed
http://code.google.com/p/xml2json-direct/
"""
res = {}
if el.tag[0] == "{":
ns, name = el.tag.rsplit("}", 1)
res["tag"] = name
res["namespace"] = ns[1:]
else:
res["tag"] = el.tag
res["attrs"] = {}
for k, v in el.items():
res["attrs"][k] = v
kids = []
if el.text and (preserve_whitespaces or el.text.strip() != ''):
kids.append(el.text)
for kid in el:
kids.append(xml2json_from_elementtree(kid, preserve_whitespaces))
if kid.tail and (preserve_whitespaces or kid.tail.strip() != ''):
kids.append(kid.tail)
res["children"] = kids
return res
def content_disposition(filename):
filename = ustr(filename)
escaped = urllib2.quote(filename.encode('utf8'))
browser = request.httprequest.user_agent.browser
version = int((request.httprequest.user_agent.version or '0').split('.')[0])
if browser == 'msie' and version < 9:
return "attachment; filename=%s" % escaped
elif browser == 'safari':
return u"attachment; filename=%s" % filename
else:
return "attachment; filename*=UTF-8''%s" % escaped
#----------------------------------------------------------
# OpenERP Web web Controllers
#----------------------------------------------------------
class Home(http.Controller):
@http.route('/', type='http', auth="none")
def index(self, s_action=None, db=None, **kw):
return http.local_redirect('/web', query=request.params, keep_hash=True)
@http.route('/web', type='http', auth="none")
def web_client(self, s_action=None, **kw):
ensure_db()
if request.session.uid:
if kw.get('redirect'):
return werkzeug.utils.redirect(kw.get('redirect'), 303)
if not request.uid:
request.uid = request.session.uid
menu_data = request.registry['ir.ui.menu'].load_menus(request.cr, request.uid, context=request.context)
return request.render('web.webclient_bootstrap', qcontext={'menu_data': menu_data})
else:
return login_redirect()
@http.route('/web/dbredirect', type='http', auth="none")
def web_db_redirect(self, redirect='/', **kw):
ensure_db()
return werkzeug.utils.redirect(redirect, 303)
@http.route('/web/login', type='http', auth="none")
def web_login(self, redirect=None, **kw):
ensure_db()
if request.httprequest.method == 'GET' and redirect and request.session.uid:
return http.redirect_with_hash(redirect)
if not request.uid:
request.uid = openerp.SUPERUSER_ID
values = request.params.copy()
if not redirect:
redirect = '/web?' + request.httprequest.query_string
values['redirect'] = redirect
try:
values['databases'] = http.db_list()
except openerp.exceptions.AccessDenied:
values['databases'] = None
if request.httprequest.method == 'POST':
old_uid = request.uid
uid = request.session.authenticate(request.session.db, request.params['login'], request.params['password'])
if uid is not False:
return http.redirect_with_hash(redirect)
request.uid = old_uid
values['error'] = "Wrong login/password"
if request.env.ref('web.login', False):
return request.render('web.login', values)
else:
# probably not an odoo compatible database
error = 'Unable to login on database %s' % request.session.db
return werkzeug.utils.redirect('/web/database/selector?error=%s' % error, 303)
@http.route('/login', type='http', auth="none")
def login(self, db, login, key, redirect="/web", **kw):
if not http.db_filter([db]):
return werkzeug.utils.redirect('/', 303)
return login_and_redirect(db, login, key, redirect_url=redirect)
@http.route([
'/web/js/<xmlid>',
'/web/js/<xmlid>/<version>',
], type='http', auth='public')
def js_bundle(self, xmlid, version=None, **kw):
try:
bundle = AssetsBundle(xmlid)
except QWebTemplateNotFound:
return request.not_found()
response = request.make_response(bundle.js(), [('Content-Type', 'application/javascript')])
return make_conditional(response, bundle.last_modified, max_age=BUNDLE_MAXAGE)
@http.route([
'/web/css/<xmlid>',
'/web/css/<xmlid>/<version>',
'/web/css.<int:page>/<xmlid>/<version>',
], type='http', auth='public')
def css_bundle(self, xmlid, version=None, page=None, **kw):
try:
bundle = AssetsBundle(xmlid)
except QWebTemplateNotFound:
return request.not_found()
response = request.make_response(bundle.css(page), [('Content-Type', 'text/css')])
return make_conditional(response, bundle.last_modified, max_age=BUNDLE_MAXAGE)
class WebClient(http.Controller):
@http.route('/web/webclient/csslist', type='json', auth="none")
def csslist(self, mods=None):
return manifest_list('css', mods=mods)
@http.route('/web/webclient/jslist', type='json', auth="none")
def jslist(self, mods=None):
return manifest_list('js', mods=mods)
@http.route('/web/webclient/qweb', type='http', auth="none")
def qweb(self, mods=None, db=None):
files = [f[0] for f in manifest_glob('qweb', addons=mods, db=db)]
last_modified = get_last_modified(files)
if request.httprequest.if_modified_since and request.httprequest.if_modified_since >= last_modified:
return werkzeug.wrappers.Response(status=304)
content, checksum = concat_xml(files)
return make_conditional(
request.make_response(content, [('Content-Type', 'text/xml')]),
last_modified, checksum)
@http.route('/web/webclient/bootstrap_translations', type='json', auth="none")
def bootstrap_translations(self, mods):
""" Load local translations from *.po files, as a temporary solution
until we have established a valid session. This is meant only
for translating the login page and db management chrome, using
the browser's language. """
# For performance reasons we only load a single translation, so for
# sub-languages (that should only be partially translated) we load the
# main language PO instead - that should be enough for the login screen.
lang = request.lang.split('_')[0]
translations_per_module = {}
for addon_name in mods:
if http.addons_manifest[addon_name].get('bootstrap'):
addons_path = http.addons_manifest[addon_name]['addons_path']
f_name = os.path.join(addons_path, addon_name, "i18n", lang + ".po")
if not os.path.exists(f_name):
continue
translations_per_module[addon_name] = {'messages': _local_web_translations(f_name)}
return {"modules": translations_per_module,
"lang_parameters": None}
@http.route('/web/webclient/translations', type='json', auth="none")
def translations(self, mods=None, lang=None):
request.disable_db = False
uid = openerp.SUPERUSER_ID
if mods is None:
m = request.registry.get('ir.module.module')
mods = [x['name'] for x in m.search_read(request.cr, uid,
[('state','=','installed')], ['name'])]
if lang is None:
lang = request.context["lang"]
res_lang = request.registry.get('res.lang')
ids = res_lang.search(request.cr, uid, [("code", "=", lang)])
lang_params = None
if ids:
lang_params = res_lang.read(request.cr, uid, ids[0], ["direction", "date_format", "time_format",
"grouping", "decimal_point", "thousands_sep"])
# Regional languages (ll_CC) must inherit/override their parent lang (ll), but this is
# done server-side when the language is loaded, so we only need to load the user's lang.
ir_translation = request.registry.get('ir.translation')
translations_per_module = {}
messages = ir_translation.search_read(request.cr, uid, [('module','in',mods),('lang','=',lang),
('comments','like','openerp-web'),('value','!=',False),
('value','!=','')],
['module','src','value','lang'], order='module')
for mod, msg_group in itertools.groupby(messages, key=operator.itemgetter('module')):
translations_per_module.setdefault(mod,{'messages':[]})
translations_per_module[mod]['messages'].extend({'id': m['src'],
'string': m['value']} \
for m in msg_group)
return {"modules": translations_per_module,
"lang_parameters": lang_params}
@http.route('/web/webclient/version_info', type='json', auth="none")
def version_info(self):
return openerp.service.common.exp_version()
@http.route('/web/tests', type='http', auth="none")
def index(self, mod=None, **kwargs):
return request.render('web.qunit_suite')
class Proxy(http.Controller):
@http.route('/web/proxy/load', type='json', auth="none")
def load(self, path):
""" Proxies an HTTP request through a JSON request.
It is strongly recommended to not request binary files through this,
as the result will be a binary data blob as well.
:param path: actual request path
:return: file content
"""
from werkzeug.test import Client
from werkzeug.wrappers import BaseResponse
base_url = request.httprequest.base_url
return Client(request.httprequest.app, BaseResponse).get(path, base_url=base_url).data
class Database(http.Controller):
@http.route('/web/database/selector', type='http', auth="none")
def selector(self, **kw):
try:
dbs = http.db_list()
if not dbs:
return http.local_redirect('/web/database/manager')
except openerp.exceptions.AccessDenied:
dbs = False
return env.get_template("database_selector.html").render({
'databases': dbs,
'debug': request.debug,
'error': kw.get('error')
})
@http.route('/web/database/manager', type='http', auth="none")
def manager(self, **kw):
# TODO: migrate the webclient's database manager to server side views
request.session.logout()
return env.get_template("database_manager.html").render({
'modules': simplejson.dumps(module_boot()),
})
@http.route('/web/database/get_list', type='json', auth="none")
def get_list(self):
# TODO change js to avoid calling this method if in monodb mode
try:
return http.db_list()
except openerp.exceptions.AccessDenied:
monodb = db_monodb()
if monodb:
return [monodb]
raise
@http.route('/web/database/create', type='json', auth="none")
def create(self, fields):
params = dict(map(operator.itemgetter('name', 'value'), fields))
db_created = request.session.proxy("db").create_database(
params['super_admin_pwd'],
params['db_name'],
bool(params.get('demo_data')),
params['db_lang'],
params['create_admin_pwd'])
if db_created:
request.session.authenticate(params['db_name'], 'admin', params['create_admin_pwd'])
return db_created
@http.route('/web/database/duplicate', type='json', auth="none")
def duplicate(self, fields):
params = dict(map(operator.itemgetter('name', 'value'), fields))
duplicate_attrs = (
params['super_admin_pwd'],
params['db_original_name'],
params['db_name'],
)
return request.session.proxy("db").duplicate_database(*duplicate_attrs)
@http.route('/web/database/drop', type='json', auth="none")
def drop(self, fields):
password, db = operator.itemgetter(
'drop_pwd', 'drop_db')(
dict(map(operator.itemgetter('name', 'value'), fields)))
try:
if request.session.proxy("db").drop(password, db):
return True
else:
return False
except openerp.exceptions.AccessDenied:
return {'error': 'AccessDenied', 'title': 'Drop Database'}
except Exception:
return {'error': _('Could not drop database !'), 'title': _('Drop Database')}
@http.route('/web/database/backup', type='http', auth="none")
def backup(self, backup_db, backup_pwd, token, backup_format='zip'):
try:
openerp.service.security.check_super(backup_pwd)
ts = datetime.datetime.utcnow().strftime("%Y-%m-%d_%H-%M-%S")
filename = "%s_%s.%s" % (backup_db, ts, backup_format)
headers = [
('Content-Type', 'application/octet-stream; charset=binary'),
('Content-Disposition', content_disposition(filename)),
]
dump_stream = openerp.service.db.dump_db(backup_db, None, backup_format)
response = werkzeug.wrappers.Response(dump_stream, headers=headers, direct_passthrough=True)
response.set_cookie('fileToken', token)
return response
except Exception, e:
_logger.exception('Database.backup')
return simplejson.dumps([[],[{'error': openerp.tools.ustr(e), 'title': _('Backup Database')}]])
@http.route('/web/database/restore', type='http', auth="none")
def restore(self, db_file, restore_pwd, new_db, mode):
try:
copy = mode == 'copy'
data = base64.b64encode(db_file.read())
request.session.proxy("db").restore(restore_pwd, new_db, data, copy)
return ''
except openerp.exceptions.AccessDenied, e:
raise Exception("AccessDenied")
@http.route('/web/database/change_password', type='json', auth="none")
def change_password(self, fields):
old_password, new_password = operator.itemgetter(
'old_pwd', 'new_pwd')(
dict(map(operator.itemgetter('name', 'value'), fields)))
try:
return request.session.proxy("db").change_admin_password(old_password, new_password)
except openerp.exceptions.AccessDenied:
return {'error': 'AccessDenied', 'title': _('Change Password')}
except Exception:
return {'error': _('Error, password not changed !'), 'title': _('Change Password')}
class Session(http.Controller):
def session_info(self):
request.session.ensure_valid()
return {
"session_id": request.session_id,
"uid": request.session.uid,
"user_context": request.session.get_context() if request.session.uid else {},
"db": request.session.db,
"username": request.session.login,
"company_id": request.env.user.company_id.id if request.session.uid else None,
}
@http.route('/web/session/get_session_info', type='json', auth="none")
def get_session_info(self):
request.uid = request.session.uid
request.disable_db = False
return self.session_info()
@http.route('/web/session/authenticate', type='json', auth="none")
def authenticate(self, db, login, password, base_location=None):
request.session.authenticate(db, login, password)
return self.session_info()
@http.route('/web/session/change_password', type='json', auth="user")
def change_password(self, fields):
old_password, new_password,confirm_password = operator.itemgetter('old_pwd', 'new_password','confirm_pwd')(
dict(map(operator.itemgetter('name', 'value'), fields)))
if not (old_password.strip() and new_password.strip() and confirm_password.strip()):
return {'error':_('You cannot leave any password empty.'),'title': _('Change Password')}
if new_password != confirm_password:
return {'error': _('The new password and its confirmation must be identical.'),'title': _('Change Password')}
try:
if request.session.model('res.users').change_password(
old_password, new_password):
return {'new_password':new_password}
except Exception:
return {'error': _('The old password you provided is incorrect, your password was not changed.'), 'title': _('Change Password')}
return {'error': _('Error, password not changed !'), 'title': _('Change Password')}
@http.route('/web/session/get_lang_list', type='json', auth="none")
def get_lang_list(self):
try:
return request.session.proxy("db").list_lang() or []
except Exception, e:
return {"error": e, "title": _("Languages")}
@http.route('/web/session/modules', type='json', auth="user")
def modules(self):
# return all installed modules. Web client is smart enough to not load a module twice
return module_installed()
@http.route('/web/session/save_session_action', type='json', auth="user")
def save_session_action(self, the_action):
"""
This method store an action object in the session object and returns an integer
identifying that action. The method get_session_action() can be used to get
back the action.
:param the_action: The action to save in the session.
:type the_action: anything
:return: A key identifying the saved action.
:rtype: integer
"""
return request.httpsession.save_action(the_action)
@http.route('/web/session/get_session_action', type='json', auth="user")
def get_session_action(self, key):
"""
Gets back a previously saved action. This method can return None if the action
was saved since too much time (this case should be handled in a smart way).
:param key: The key given by save_session_action()
:type key: integer
:return: The saved action or None.
:rtype: anything
"""
return request.httpsession.get_action(key)
@http.route('/web/session/check', type='json', auth="user")
def check(self):
request.session.assert_valid()
return None
@http.route('/web/session/destroy', type='json', auth="user")
def destroy(self):
request.session.logout()
@http.route('/web/session/logout', type='http', auth="none")
def logout(self, redirect='/web'):
request.session.logout(keep_db=True)
return werkzeug.utils.redirect(redirect, 303)
class Menu(http.Controller):
@http.route('/web/menu/load_needaction', type='json', auth="user")
def load_needaction(self, menu_ids):
""" Loads needaction counters for specific menu ids.
:return: needaction data
:rtype: dict(menu_id: {'needaction_enabled': boolean, 'needaction_counter': int})
"""
return request.session.model('ir.ui.menu').get_needaction_data(menu_ids, request.context)
class DataSet(http.Controller):
@http.route('/web/dataset/search_read', type='json', auth="user")
def search_read(self, model, fields=False, offset=0, limit=False, domain=None, sort=None):
return self.do_search_read(model, fields, offset, limit, domain, sort)
def do_search_read(self, model, fields=False, offset=0, limit=False, domain=None
, sort=None):
""" Performs a search() followed by a read() (if needed) using the
provided search criteria
:param str model: the name of the model to search on
:param fields: a list of the fields to return in the result records
:type fields: [str]
:param int offset: from which index should the results start being returned
:param int limit: the maximum number of records to return
:param list domain: the search domain for the query
:param list sort: sorting directives
:returns: A structure (dict) with two keys: ids (all the ids matching
the (domain, context) pair) and records (paginated records
matching fields selection set)
:rtype: list
"""
Model = request.session.model(model)
records = Model.search_read(domain, fields, offset or 0, limit or False, sort or False,
request.context)
if not records:
return {
'length': 0,
'records': []
}
if limit and len(records) == limit:
length = Model.search_count(domain, request.context)
else:
length = len(records) + (offset or 0)
return {
'length': length,
'records': records
}
@http.route('/web/dataset/load', type='json', auth="user")
def load(self, model, id, fields):
m = request.session.model(model)
value = {}
r = m.read([id], False, request.context)
if r:
value = r[0]
return {'value': value}
def call_common(self, model, method, args, domain_id=None, context_id=None):
return self._call_kw(model, method, args, {})
def _call_kw(self, model, method, args, kwargs):
if method.startswith('_'):
raise Exception("Access Denied: Underscore prefixed methods cannot be remotely called")
@service_model.check
def checked_call(__dbname, *args, **kwargs):
return getattr(request.registry.get(model), method)(request.cr, request.uid, *args, **kwargs)
return checked_call(request.db, *args, **kwargs)
@http.route('/web/dataset/call', type='json', auth="user")
def call(self, model, method, args, domain_id=None, context_id=None):
return self._call_kw(model, method, args, {})
@http.route(['/web/dataset/call_kw', '/web/dataset/call_kw/<path:path>'], type='json', auth="user")
def call_kw(self, model, method, args, kwargs, path=None):
return self._call_kw(model, method, args, kwargs)
@http.route('/web/dataset/call_button', type='json', auth="user")
def call_button(self, model, method, args, domain_id=None, context_id=None):
action = self._call_kw(model, method, args, {})
if isinstance(action, dict) and action.get('type') != '':
return clean_action(action)
return False
@http.route('/web/dataset/exec_workflow', type='json', auth="user")
def exec_workflow(self, model, id, signal):
return request.session.exec_workflow(model, id, signal)
@http.route('/web/dataset/resequence', type='json', auth="user")
def resequence(self, model, ids, field='sequence', offset=0):
""" Re-sequences a number of records in the model, by their ids
The re-sequencing starts at the first model of ``ids``, the sequence
number is incremented by one after each record and starts at ``offset``
:param ids: identifiers of the records to resequence, in the new sequence order
:type ids: list(id)
:param str field: field used for sequence specification, defaults to
"sequence"
:param int offset: sequence number for first record in ``ids``, allows
starting the resequencing from an arbitrary number,
defaults to ``0``
"""
m = request.session.model(model)
if not m.fields_get([field]):
return False
# python 2.6 has no start parameter
for i, id in enumerate(ids):
m.write(id, { field: i + offset })
return True
class View(http.Controller):
@http.route('/web/view/add_custom', type='json', auth="user")
def add_custom(self, view_id, arch):
CustomView = request.session.model('ir.ui.view.custom')
CustomView.create({
'user_id': request.session.uid,
'ref_id': view_id,
'arch': arch
}, request.context)
return {'result': True}
@http.route('/web/view/undo_custom', type='json', auth="user")
def undo_custom(self, view_id, reset=False):
CustomView = request.session.model('ir.ui.view.custom')
vcustom = CustomView.search([('user_id', '=', request.session.uid), ('ref_id' ,'=', view_id)],
0, False, False, request.context)
if vcustom:
if reset:
CustomView.unlink(vcustom, request.context)
else:
CustomView.unlink([vcustom[0]], request.context)
return {'result': True}
return {'result': False}
class TreeView(View):
@http.route('/web/treeview/action', type='json', auth="user")
def action(self, model, id):
return load_actions_from_ir_values(
'action', 'tree_but_open',[(model, id)],
False)
class Binary(http.Controller):
@http.route('/web/binary/image', type='http', auth="public")
def image(self, model, id, field, **kw):
last_update = '__last_update'
Model = request.registry[model]
cr, uid, context = request.cr, request.uid, request.context
headers = [('Content-Type', 'image/png')]
etag = request.httprequest.headers.get('If-None-Match')
hashed_session = hashlib.md5(request.session_id).hexdigest()
retag = hashed_session
id = None if not id else simplejson.loads(id)
if type(id) is list:
id = id[0] # m2o
try:
if etag:
if not id and hashed_session == etag:
return werkzeug.wrappers.Response(status=304)
else:
date = Model.read(cr, uid, [id], [last_update], context)[0].get(last_update)
if hashlib.md5(date).hexdigest() == etag:
return werkzeug.wrappers.Response(status=304)
if not id:
res = Model.default_get(cr, uid, [field], context).get(field)
image_base64 = res
else:
res = Model.read(cr, uid, [id], [last_update, field], context)[0]
retag = hashlib.md5(res.get(last_update)).hexdigest()
image_base64 = res.get(field)
if kw.get('resize'):
resize = kw.get('resize').split(',')
if len(resize) == 2 and int(resize[0]) and int(resize[1]):
width = int(resize[0])
height = int(resize[1])
# resize maximum 500*500
if width > 500: width = 500
if height > 500: height = 500
image_base64 = openerp.tools.image_resize_image(base64_source=image_base64, size=(width, height), encoding='base64', filetype='PNG')
image_data = base64.b64decode(image_base64)
except Exception:
image_data = self.placeholder()
headers.append(('ETag', retag))
headers.append(('Content-Length', len(image_data)))
try:
ncache = int(kw.get('cache'))
headers.append(('Cache-Control', 'no-cache' if ncache == 0 else 'max-age=%s' % (ncache)))
except:
pass
return request.make_response(image_data, headers)
def placeholder(self, image='placeholder.png'):
addons_path = http.addons_manifest['web']['addons_path']
return open(os.path.join(addons_path, 'web', 'static', 'src', 'img', image), 'rb').read()
@http.route('/web/binary/saveas', type='http', auth="public")
@serialize_exception
def saveas(self, model, field, id=None, filename_field=None, **kw):
""" Download link for files stored as binary fields.
If the ``id`` parameter is omitted, fetches the default value for the
binary field (via ``default_get``), otherwise fetches the field for
that precise record.
:param str model: name of the model to fetch the binary from
:param str field: binary field
:param str id: id of the record from which to fetch the binary
:param str filename_field: field holding the file's name, if any
:returns: :class:`werkzeug.wrappers.Response`
"""
Model = request.registry[model]
cr, uid, context = request.cr, request.uid, request.context
fields = [field]
if filename_field:
fields.append(filename_field)
if id:
res = Model.read(cr, uid, [int(id)], fields, context)[0]
else:
res = Model.default_get(cr, uid, fields, context)
filecontent = base64.b64decode(res.get(field) or '')
if not filecontent:
return request.not_found()
else:
filename = '%s_%s' % (model.replace('.', '_'), id)
if filename_field:
filename = res.get(filename_field, '') or filename
return request.make_response(filecontent,
[('Content-Type', 'application/octet-stream'),
('Content-Disposition', content_disposition(filename))])
@http.route('/web/binary/saveas_ajax', type='http', auth="public")
@serialize_exception
def saveas_ajax(self, data, token):
jdata = simplejson.loads(data)
model = jdata['model']
field = jdata['field']
data = jdata['data']
id = jdata.get('id', None)
filename_field = jdata.get('filename_field', None)
context = jdata.get('context', {})
Model = request.session.model(model)
fields = [field]
if filename_field:
fields.append(filename_field)
if data:
res = {field: data, filename_field: jdata.get('filename', None)}
elif id:
res = Model.read([int(id)], fields, context)[0]
else:
res = Model.default_get(fields, context)
filecontent = base64.b64decode(res.get(field) or '')
if not filecontent:
raise ValueError(_("No content found for field '%s' on '%s:%s'") %
(field, model, id))
else:
filename = '%s_%s' % (model.replace('.', '_'), id)
if filename_field:
filename = res.get(filename_field, '') or filename
return request.make_response(filecontent,
headers=[('Content-Type', 'application/octet-stream'),
('Content-Disposition', content_disposition(filename))],
cookies={'fileToken': token})
@http.route('/web/binary/upload', type='http', auth="user")
@serialize_exception
def upload(self, callback, ufile):
# TODO: might be useful to have a configuration flag for max-length file uploads
out = """<script language="javascript" type="text/javascript">
var win = window.top.window;
win.jQuery(win).trigger(%s, %s);
</script>"""
try:
data = ufile.read()
args = [len(data), ufile.filename,
ufile.content_type, base64.b64encode(data)]
except Exception, e:
args = [False, e.message]
return out % (simplejson.dumps(callback), simplejson.dumps(args))
@http.route('/web/binary/upload_attachment', type='http', auth="user")
@serialize_exception
def upload_attachment(self, callback, model, id, ufile):
Model = request.session.model('ir.attachment')
out = """<script language="javascript" type="text/javascript">
var win = window.top.window;
win.jQuery(win).trigger(%s, %s);
</script>"""
try:
attachment_id = Model.create({
'name': ufile.filename,
'datas': base64.encodestring(ufile.read()),
'datas_fname': ufile.filename,
'res_model': model,
'res_id': int(id)
}, request.context)
args = {
'filename': ufile.filename,
'id': attachment_id
}
except Exception:
args = {'error': "Something horrible happened"}
_logger.exception("Fail to upload attachment %s" % ufile.filename)
return out % (simplejson.dumps(callback), simplejson.dumps(args))
@http.route([
'/web/binary/company_logo',
'/logo',
'/logo.png',
], type='http', auth="none", cors="*")
def company_logo(self, dbname=None, **kw):
imgname = 'logo.png'
placeholder = functools.partial(get_module_resource, 'web', 'static', 'src', 'img')
uid = None
if request.session.db:
dbname = request.session.db
uid = request.session.uid
elif dbname is None:
dbname = db_monodb()
if not uid:
uid = openerp.SUPERUSER_ID
if not dbname:
response = http.send_file(placeholder(imgname))
else:
try:
# create an empty registry
registry = openerp.modules.registry.Registry(dbname)
with registry.cursor() as cr:
cr.execute("""SELECT c.logo_web, c.write_date
FROM res_users u
LEFT JOIN res_company c
ON c.id = u.company_id
WHERE u.id = %s
""", (uid,))
row = cr.fetchone()
if row and row[0]:
image_data = StringIO(str(row[0]).decode('base64'))
response = http.send_file(image_data, filename=imgname, mtime=row[1])
else:
response = http.send_file(placeholder('nologo.png'))
except Exception:
response = http.send_file(placeholder(imgname))
return response
class Action(http.Controller):
@http.route('/web/action/load', type='json', auth="user")
def load(self, action_id, do_not_eval=False, additional_context=None):
Actions = request.session.model('ir.actions.actions')
value = False
try:
action_id = int(action_id)
except ValueError:
try:
module, xmlid = action_id.split('.', 1)
model, action_id = request.session.model('ir.model.data').get_object_reference(module, xmlid)
assert model.startswith('ir.actions.')
except Exception:
action_id = 0 # force failed read
base_action = Actions.read([action_id], ['type'], request.context)
if base_action:
ctx = request.context
action_type = base_action[0]['type']
if action_type == 'ir.actions.report.xml':
ctx.update({'bin_size': True})
if additional_context:
ctx.update(additional_context)
action = request.session.model(action_type).read([action_id], False, ctx)
if action:
value = clean_action(action[0])
return value
@http.route('/web/action/run', type='json', auth="user")
def run(self, action_id):
return_action = request.session.model('ir.actions.server').run(
[action_id], request.context)
if return_action:
return clean_action(return_action)
else:
return False
class Export(http.Controller):
@http.route('/web/export/formats', type='json', auth="user")
def formats(self):
""" Returns all valid export formats
:returns: for each export format, a pair of identifier and printable name
:rtype: [(str, str)]
"""
return [
{'tag': 'csv', 'label': 'CSV'},
{'tag': 'xls', 'label': 'Excel', 'error': None if xlwt else "XLWT required"},
]
def fields_get(self, model):
Model = request.session.model(model)
fields = Model.fields_get(False, request.context)
return fields
@http.route('/web/export/get_fields', type='json', auth="user")
def get_fields(self, model, prefix='', parent_name= '',
import_compat=True, parent_field_type=None,
exclude=None):
if import_compat and parent_field_type == "many2one":
fields = {}
else:
fields = self.fields_get(model)
if import_compat:
fields.pop('id', None)
else:
fields['.id'] = fields.pop('id', {'string': 'ID'})
fields_sequence = sorted(fields.iteritems(),
key=lambda field: openerp.tools.ustr(field[1].get('string', '')))
records = []
for field_name, field in fields_sequence:
if import_compat:
if exclude and field_name in exclude:
continue
if field.get('readonly'):
# If none of the field's states unsets readonly, skip the field
if all(dict(attrs).get('readonly', True)
for attrs in field.get('states', {}).values()):
continue
if not field.get('exportable', True):
continue
id = prefix + (prefix and '/'or '') + field_name
name = parent_name + (parent_name and '/' or '') + field['string']
record = {'id': id, 'string': name,
'value': id, 'children': False,
'field_type': field.get('type'),
'required': field.get('required'),
'relation_field': field.get('relation_field')}
records.append(record)
if len(name.split('/')) < 3 and 'relation' in field:
ref = field.pop('relation')
record['value'] += '/id'
record['params'] = {'model': ref, 'prefix': id, 'name': name}
if not import_compat or field['type'] == 'one2many':
# m2m field in import_compat is childless
record['children'] = True
return records
@http.route('/web/export/namelist', type='json', auth="user")
def namelist(self, model, export_id):
# TODO: namelist really has no reason to be in Python (although itertools.groupby helps)
export = request.session.model("ir.exports").read([export_id])[0]
export_fields_list = request.session.model("ir.exports.line").read(
export['export_fields'])
fields_data = self.fields_info(
model, map(operator.itemgetter('name'), export_fields_list))
return [
{'name': field['name'], 'label': fields_data[field['name']]}
for field in export_fields_list
]
def fields_info(self, model, export_fields):
info = {}
fields = self.fields_get(model)
if ".id" in export_fields:
fields['.id'] = fields.pop('id', {'string': 'ID'})
# To make fields retrieval more efficient, fetch all sub-fields of a
# given field at the same time. Because the order in the export list is
# arbitrary, this requires ordering all sub-fields of a given field
# together so they can be fetched at the same time
#
# Works the following way:
# * sort the list of fields to export, the default sorting order will
# put the field itself (if present, for xmlid) and all of its
# sub-fields right after it
# * then, group on: the first field of the path (which is the same for
# a field and for its subfields and the length of splitting on the
# first '/', which basically means grouping the field on one side and
# all of the subfields on the other. This way, we have the field (for
# the xmlid) with length 1, and all of the subfields with the same
# base but a length "flag" of 2
# * if we have a normal field (length 1), just add it to the info
# mapping (with its string) as-is
# * otherwise, recursively call fields_info via graft_subfields.
# all graft_subfields does is take the result of fields_info (on the
# field's model) and prepend the current base (current field), which
# rebuilds the whole sub-tree for the field
#
# result: because we're not fetching the fields_get for half the
# database models, fetching a namelist with a dozen fields (including
# relational data) falls from ~6s to ~300ms (on the leads model).
# export lists with no sub-fields (e.g. import_compatible lists with
# no o2m) are even more efficient (from the same 6s to ~170ms, as
# there's a single fields_get to execute)
for (base, length), subfields in itertools.groupby(
sorted(export_fields),
lambda field: (field.split('/', 1)[0], len(field.split('/', 1)))):
subfields = list(subfields)
if length == 2:
# subfields is a seq of $base/*rest, and not loaded yet
info.update(self.graft_subfields(
fields[base]['relation'], base, fields[base]['string'],
subfields
))
elif base in fields:
info[base] = fields[base]['string']
return info
def graft_subfields(self, model, prefix, prefix_string, fields):
export_fields = [field.split('/', 1)[1] for field in fields]
return (
(prefix + '/' + k, prefix_string + '/' + v)
for k, v in self.fields_info(model, export_fields).iteritems())
class ExportFormat(object):
raw_data = False
@property
def content_type(self):
""" Provides the format's content type """
raise NotImplementedError()
def filename(self, base):
""" Creates a valid filename for the format (with extension) from the
provided base name (exension-less)
"""
raise NotImplementedError()
def from_data(self, fields, rows):
""" Conversion method from OpenERP's export data to whatever the
current export class outputs
:params list fields: a list of fields to export
:params list rows: a list of records to export
:returns:
:rtype: bytes
"""
raise NotImplementedError()
def base(self, data, token):
params = simplejson.loads(data)
model, fields, ids, domain, import_compat = \
operator.itemgetter('model', 'fields', 'ids', 'domain',
'import_compat')(
params)
Model = request.session.model(model)
context = dict(request.context or {}, **params.get('context', {}))
ids = ids or Model.search(domain, 0, False, False, context)
if not request.env[model]._is_an_ordinary_table():
fields = [field for field in fields if field['name'] != 'id']
field_names = map(operator.itemgetter('name'), fields)
import_data = Model.export_data(ids, field_names, self.raw_data, context=context).get('datas',[])
if import_compat:
columns_headers = field_names
else:
columns_headers = [val['label'].strip() for val in fields]
return request.make_response(self.from_data(columns_headers, import_data),
headers=[('Content-Disposition',
content_disposition(self.filename(model))),
('Content-Type', self.content_type)],
cookies={'fileToken': token})
class CSVExport(ExportFormat, http.Controller):
@http.route('/web/export/csv', type='http', auth="user")
@serialize_exception
def index(self, data, token):
return self.base(data, token)
@property
def content_type(self):
return 'text/csv;charset=utf8'
def filename(self, base):
return base + '.csv'
def from_data(self, fields, rows):
fp = StringIO()
writer = csv.writer(fp, quoting=csv.QUOTE_ALL)
writer.writerow([name.encode('utf-8') for name in fields])
for data in rows:
row = []
for d in data:
if isinstance(d, basestring):
d = d.replace('\n',' ').replace('\t',' ')
try:
d = d.encode('utf-8')
except UnicodeError:
pass
if d is False: d = None
row.append(d)
writer.writerow(row)
fp.seek(0)
data = fp.read()
fp.close()
return data
class ExcelExport(ExportFormat, http.Controller):
# Excel needs raw data to correctly handle numbers and date values
raw_data = True
@http.route('/web/export/xls', type='http', auth="user")
@serialize_exception
def index(self, data, token):
return self.base(data, token)
@property
def content_type(self):
return 'application/vnd.ms-excel'
def filename(self, base):
return base + '.xls'
def from_data(self, fields, rows):
workbook = xlwt.Workbook()
worksheet = workbook.add_sheet('Sheet 1')
for i, fieldname in enumerate(fields):
worksheet.write(0, i, fieldname)
worksheet.col(i).width = 8000 # around 220 pixels
base_style = xlwt.easyxf('align: wrap yes')
date_style = xlwt.easyxf('align: wrap yes', num_format_str='YYYY-MM-DD')
datetime_style = xlwt.easyxf('align: wrap yes', num_format_str='YYYY-MM-DD HH:mm:SS')
for row_index, row in enumerate(rows):
for cell_index, cell_value in enumerate(row):
cell_style = base_style
if isinstance(cell_value, basestring):
cell_value = re.sub("\r", " ", cell_value)
elif isinstance(cell_value, datetime.datetime):
cell_style = datetime_style
elif isinstance(cell_value, datetime.date):
cell_style = date_style
worksheet.write(row_index + 1, cell_index, cell_value, cell_style)
fp = StringIO()
workbook.save(fp)
fp.seek(0)
data = fp.read()
fp.close()
return data
class Reports(http.Controller):
POLLING_DELAY = 0.25
TYPES_MAPPING = {
'doc': 'application/vnd.ms-word',
'html': 'text/html',
'odt': 'application/vnd.oasis.opendocument.text',
'pdf': 'application/pdf',
'sxw': 'application/vnd.sun.xml.writer',
'xls': 'application/vnd.ms-excel',
}
@http.route('/web/report', type='http', auth="user")
@serialize_exception
def index(self, action, token):
action = simplejson.loads(action)
report_srv = request.session.proxy("report")
context = dict(request.context)
context.update(action["context"])
report_data = {}
report_ids = context.get("active_ids", None)
if 'report_type' in action:
report_data['report_type'] = action['report_type']
if 'datas' in action:
if 'ids' in action['datas']:
report_ids = action['datas'].pop('ids')
report_data.update(action['datas'])
report_id = report_srv.report(
request.session.db, request.session.uid, request.session.password,
action["report_name"], report_ids,
report_data, context)
report_struct = None
while True:
report_struct = report_srv.report_get(
request.session.db, request.session.uid, request.session.password, report_id)
if report_struct["state"]:
break
time.sleep(self.POLLING_DELAY)
report = base64.b64decode(report_struct['result'])
if report_struct.get('code') == 'zlib':
report = zlib.decompress(report)
report_mimetype = self.TYPES_MAPPING.get(
report_struct['format'], 'octet-stream')
file_name = action.get('name', 'report')
if 'name' not in action:
reports = request.session.model('ir.actions.report.xml')
res_id = reports.search([('report_name', '=', action['report_name']),],
0, False, False, context)
if len(res_id) > 0:
file_name = reports.read(res_id[0], ['name'], context)['name']
else:
file_name = action['report_name']
file_name = '%s.%s' % (file_name, report_struct['format'])
return request.make_response(report,
headers=[
('Content-Disposition', content_disposition(file_name)),
('Content-Type', report_mimetype),
('Content-Length', len(report))],
cookies={'fileToken': token})
class Apps(http.Controller):
@http.route('/apps/<app>', auth='user')
def get_app_url(self, req, app):
act_window_obj = request.session.model('ir.actions.act_window')
ir_model_data = request.session.model('ir.model.data')
try:
action_id = ir_model_data.get_object_reference('base', 'open_module_tree')[1]
action = act_window_obj.read(action_id, ['name', 'type', 'res_model', 'view_mode', 'view_type', 'context', 'views', 'domain'])
action['target'] = 'current'
except ValueError:
action = False
try:
app_id = ir_model_data.get_object_reference('base', 'module_%s' % app)[1]
except ValueError:
app_id = False
if action and app_id:
action['res_id'] = app_id
action['view_mode'] = 'form'
action['views'] = [(False, u'form')]
sakey = Session().save_session_action(action)
debug = '?debug' if req.debug else ''
return werkzeug.utils.redirect('/web{0}#sa={1}'.format(debug, sakey))
# vim:expandtab:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
ProfessionalIT/maxigenios-website
|
sdk/google_appengine/lib/django-1.5/django/contrib/gis/db/backends/base.py
|
104
|
11139
|
"""
Base/mixin classes for the spatial backend database operations and the
`SpatialRefSys` model the backend.
"""
import re
from django.contrib.gis import gdal
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class BaseSpatialOperations(object):
"""
This module holds the base `BaseSpatialBackend` object, which is
instantiated by each spatial database backend with the features
it has.
"""
distance_functions = {}
geometry_functions = {}
geometry_operators = {}
geography_operators = {}
geography_functions = {}
gis_terms = {}
truncate_params = {}
# Quick booleans for the type of this spatial backend, and
# an attribute for the spatial database version tuple (if applicable)
postgis = False
spatialite = False
mysql = False
oracle = False
spatial_version = None
# How the geometry column should be selected.
select = None
# Does the spatial database have a geometry or geography type?
geography = False
geometry = False
area = False
centroid = False
difference = False
distance = False
distance_sphere = False
distance_spheroid = False
envelope = False
force_rhr = False
mem_size = False
bounding_circle = False
num_geom = False
num_points = False
perimeter = False
perimeter3d = False
point_on_surface = False
polygonize = False
reverse = False
scale = False
snap_to_grid = False
sym_difference = False
transform = False
translate = False
union = False
# Aggregates
collect = False
extent = False
extent3d = False
make_line = False
unionagg = False
# Serialization
geohash = False
geojson = False
gml = False
kml = False
svg = False
# Constructors
from_text = False
from_wkb = False
# Default conversion functions for aggregates; will be overridden if implemented
# for the spatial backend.
def convert_extent(self, box):
raise NotImplementedError('Aggregate extent not implemented for this spatial backend.')
def convert_extent3d(self, box):
raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.')
def convert_geom(self, geom_val, geom_field):
raise NotImplementedError('Aggregate method not implemented for this spatial backend.')
# For quoting column values, rather than columns.
def geo_quote_name(self, name):
return "'%s'" % name
# GeometryField operations
def geo_db_type(self, f):
"""
Returns the database column type for the geometry field on
the spatial backend.
"""
raise NotImplementedError
def get_distance(self, f, value, lookup_type):
"""
Returns the distance parameters for the given geometry field,
lookup value, and lookup type.
"""
raise NotImplementedError('Distance operations not available on this spatial backend.')
def get_geom_placeholder(self, f, value):
"""
Returns the placeholder for the given geometry field with the given
value. Depending on the spatial backend, the placeholder may contain a
stored procedure call to the transformation function of the spatial
backend.
"""
raise NotImplementedError
def get_expression_column(self, evaluator):
"""
Helper method to return the quoted column string from the evaluator
for its expression.
"""
for expr, col_tup in evaluator.cols:
if expr is evaluator.expression:
return '%s.%s' % tuple(map(self.quote_name, col_tup))
raise Exception("Could not find the column for the expression.")
# Spatial SQL Construction
def spatial_aggregate_sql(self, agg):
raise NotImplementedError('Aggregate support not implemented for this spatial backend.')
def spatial_lookup_sql(self, lvalue, lookup_type, value, field):
raise NotImplementedError
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
raise NotImplementedError
def spatial_ref_sys(self):
raise NotImplementedError
@python_2_unicode_compatible
class SpatialRefSysMixin(object):
"""
The SpatialRefSysMixin is a class used by the database-dependent
SpatialRefSys objects to reduce redundnant code.
"""
# For pulling out the spheroid from the spatial reference string. This
# regular expression is used only if the user does not have GDAL installed.
# TODO: Flattening not used in all ellipsoids, could also be a minor axis,
# or 'b' parameter.
spheroid_regex = re.compile(r'.+SPHEROID\[\"(?P<name>.+)\",(?P<major>\d+(\.\d+)?),(?P<flattening>\d{3}\.\d+),')
# For pulling out the units on platforms w/o GDAL installed.
# TODO: Figure out how to pull out angular units of projected coordinate system and
# fix for LOCAL_CS types. GDAL should be highly recommended for performing
# distance queries.
units_regex = re.compile(r'.+UNIT ?\["(?P<unit_name>[\w \'\(\)]+)", ?(?P<unit>[\d\.]+)(,AUTHORITY\["(?P<unit_auth_name>[\w \'\(\)]+)","(?P<unit_auth_val>\d+)"\])?\]([\w ]+)?(,AUTHORITY\["(?P<auth_name>[\w \'\(\)]+)","(?P<auth_val>\d+)"\])?\]$')
@property
def srs(self):
"""
Returns a GDAL SpatialReference object, if GDAL is installed.
"""
if gdal.HAS_GDAL:
# TODO: Is caching really necessary here? Is complexity worth it?
if hasattr(self, '_srs'):
# Returning a clone of the cached SpatialReference object.
return self._srs.clone()
else:
# Attempting to cache a SpatialReference object.
# Trying to get from WKT first.
try:
self._srs = gdal.SpatialReference(self.wkt)
return self.srs
except Exception as msg:
pass
try:
self._srs = gdal.SpatialReference(self.proj4text)
return self.srs
except Exception as msg:
pass
raise Exception('Could not get OSR SpatialReference from WKT: %s\nError:\n%s' % (self.wkt, msg))
else:
raise Exception('GDAL is not installed.')
@property
def ellipsoid(self):
"""
Returns a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening).
"""
if gdal.HAS_GDAL:
return self.srs.ellipsoid
else:
m = self.spheroid_regex.match(self.wkt)
if m: return (float(m.group('major')), float(m.group('flattening')))
else: return None
@property
def name(self):
"Returns the projection name."
return self.srs.name
@property
def spheroid(self):
"Returns the spheroid name for this spatial reference."
return self.srs['spheroid']
@property
def datum(self):
"Returns the datum for this spatial reference."
return self.srs['datum']
@property
def projected(self):
"Is this Spatial Reference projected?"
if gdal.HAS_GDAL:
return self.srs.projected
else:
return self.wkt.startswith('PROJCS')
@property
def local(self):
"Is this Spatial Reference local?"
if gdal.HAS_GDAL:
return self.srs.local
else:
return self.wkt.startswith('LOCAL_CS')
@property
def geographic(self):
"Is this Spatial Reference geographic?"
if gdal.HAS_GDAL:
return self.srs.geographic
else:
return self.wkt.startswith('GEOGCS')
@property
def linear_name(self):
"Returns the linear units name."
if gdal.HAS_GDAL:
return self.srs.linear_name
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def linear_units(self):
"Returns the linear units."
if gdal.HAS_GDAL:
return self.srs.linear_units
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def angular_name(self):
"Returns the name of the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_name
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def angular_units(self):
"Returns the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_units
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def units(self):
"Returns a tuple of the units and the name."
if self.projected or self.local:
return (self.linear_units, self.linear_name)
elif self.geographic:
return (self.angular_units, self.angular_name)
else:
return (None, None)
@classmethod
def get_units(cls, wkt):
"""
Class method used by GeometryField on initialization to
retrive the units on the given WKT, without having to use
any of the database fields.
"""
if gdal.HAS_GDAL:
return gdal.SpatialReference(wkt).units
else:
m = cls.units_regex.match(wkt)
return m.group('unit'), m.group('unit_name')
@classmethod
def get_spheroid(cls, wkt, string=True):
"""
Class method used by GeometryField on initialization to
retrieve the `SPHEROID[..]` parameters from the given WKT.
"""
if gdal.HAS_GDAL:
srs = gdal.SpatialReference(wkt)
sphere_params = srs.ellipsoid
sphere_name = srs['spheroid']
else:
m = cls.spheroid_regex.match(wkt)
if m:
sphere_params = (float(m.group('major')), float(m.group('flattening')))
sphere_name = m.group('name')
else:
return None
if not string:
return sphere_name, sphere_params
else:
# `string` parameter used to place in format acceptable by PostGIS
if len(sphere_params) == 3:
radius, flattening = sphere_params[0], sphere_params[2]
else:
radius, flattening = sphere_params
return 'SPHEROID["%s",%s,%s]' % (sphere_name, radius, flattening)
def __str__(self):
"""
Returns the string representation. If GDAL is installed,
it will be 'pretty' OGC WKT.
"""
try:
return six.text_type(self.srs)
except:
return six.text_type(self.wkt)
|
mit
|
crate/crate-python
|
src/crate/client/sqlalchemy/tests/bulk_test.py
|
1
|
2714
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
from unittest import TestCase
from unittest.mock import patch, MagicMock
import sqlalchemy as sa
from sqlalchemy.orm import Session
from sqlalchemy.ext.declarative import declarative_base
from crate.client.cursor import Cursor
fake_cursor = MagicMock(name='fake_cursor')
FakeCursor = MagicMock(name='FakeCursor', spec=Cursor)
FakeCursor.return_value = fake_cursor
class SqlAlchemyBulkTest(TestCase):
def setUp(self):
self.engine = sa.create_engine('crate://')
Base = declarative_base(bind=self.engine)
class Character(Base):
__tablename__ = 'characters'
name = sa.Column(sa.String, primary_key=True)
age = sa.Column(sa.Integer)
self.character = Character
self.session = Session()
@patch('crate.client.connection.Cursor', FakeCursor)
def test_bulk_save(self):
chars = [
self.character(name='Arthur', age=35),
self.character(name='Banshee', age=26),
self.character(name='Callisto', age=37),
]
fake_cursor.description = ()
fake_cursor.rowcount = len(chars)
fake_cursor.executemany.return_value = [
{'rowcount': 1},
{'rowcount': 1},
{'rowcount': 1},
]
self.session.bulk_save_objects(chars)
(stmt, bulk_args), _kwargs = fake_cursor.executemany.call_args
expected_stmt = "INSERT INTO characters (name, age) VALUES (?, ?)"
self.assertEqual(expected_stmt, stmt)
expected_bulk_args = (
('Arthur', 35),
('Banshee', 26),
('Callisto', 37)
)
self.assertEqual(expected_bulk_args, bulk_args)
|
apache-2.0
|
bdh1011/wau
|
venv/lib/python2.7/encodings/euc_jisx0213.py
|
816
|
1051
|
#
# euc_jisx0213.py: Python Unicode Codec for EUC_JISX0213
#
# Written by Hye-Shik Chang <[email protected]>
#
import _codecs_jp, codecs
import _multibytecodec as mbc
codec = _codecs_jp.getcodec('euc_jisx0213')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='euc_jisx0213',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
mit
|
FedoraScientific/salome-paravis
|
test/VisuPrs/MeshPresentation/K9.py
|
1
|
2459
|
# Copyright (C) 2010-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : [email protected]
#
#This case corresponds to: /visu/MeshPresentation/K9 case
# Create mesh presentation for nodes and cells of the the given MED file
import sys
from paravistest import datadir, pictureext, get_picture_dir
from presentations import *
from pvsimple import *
import pvserver as paravis
# Create presentations
myParavis = paravis.myParavis
picturedir = get_picture_dir("MeshPresentation/K9")
theFileName = datadir + "SimpleIncludingTetra.med"
print " --------------------------------- "
print "file ", theFileName
print " --------------------------------- "
result = OpenDataFile(theFileName)
aProxy = GetActiveSource()
if aProxy is None:
raise RuntimeError, "Error: can't import file."
else: print "OK"
aView = GetRenderView()
#%Creation of the mesh presentation%
mesh_name = "SimpleIncludingTetra"
#^Presentation on "onNodes" and '"onCells" family^
entity_types = [EntityType.NODE,EntityType.CELL]
for entity_type in entity_types:
entity_name = EntityType.get_name(entity_type)
mesh = MeshOnEntity(aProxy, mesh_name ,entity_type)
if mesh is None:
msg = "ERROR!!!Presentation of mesh on '"+entity_name+"' family wasn't created..."
raise RuntimeError, msg
mesh.Visibility=1
reset_view(aView)
Render()
# Add path separator to the end of picture path if necessery
if not picturedir.endswith(os.sep):
picturedir += os.sep
entity_name = EntityType.get_name(entity_type)
# Construct image file name
pic_name = picturedir + mesh_name + "_" + entity_name + "." + pictureext
process_prs_for_test(mesh, aView, pic_name)
|
lgpl-2.1
|
leeon/annotated-django
|
django/conf/locale/hu/formats.py
|
82
|
1123
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'Y. F j.'
TIME_FORMAT = 'G.i.s'
DATETIME_FORMAT = 'Y. F j. G.i.s'
YEAR_MONTH_FORMAT = 'Y. F'
MONTH_DAY_FORMAT = 'F j.'
SHORT_DATE_FORMAT = 'Y.m.d.'
SHORT_DATETIME_FORMAT = 'Y.m.d. G.i.s'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%Y.%m.%d.', # '2006.10.25.'
)
TIME_INPUT_FORMATS = (
'%H.%M.%S', # '14.30.59'
'%H.%M', # '14.30'
)
DATETIME_INPUT_FORMATS = (
'%Y.%m.%d. %H.%M.%S', # '2006.10.25. 14.30.59'
'%Y.%m.%d. %H.%M.%S.%f', # '2006.10.25. 14.30.59.000200'
'%Y.%m.%d. %H.%M', # '2006.10.25. 14.30'
'%Y.%m.%d.', # '2006.10.25.'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = ' ' # Non-breaking space
NUMBER_GROUPING = 3
|
bsd-3-clause
|
magvugr/AT
|
EntVirtual/lib/python2.7/site-packages/django/contrib/flatpages/forms.py
|
108
|
2162
|
from django import forms
from django.conf import settings
from django.contrib.flatpages.models import FlatPage
from django.utils.translation import ugettext, ugettext_lazy as _
class FlatpageForm(forms.ModelForm):
url = forms.RegexField(
label=_("URL"),
max_length=100,
regex=r'^[-\w/\.~]+$',
help_text=_("Example: '/about/contact/'. Make sure to have leading and trailing slashes."),
error_messages={
"invalid": _(
"This value must contain only letters, numbers, dots, "
"underscores, dashes, slashes or tildes."
),
},
)
class Meta:
model = FlatPage
fields = '__all__'
def clean_url(self):
url = self.cleaned_data['url']
if not url.startswith('/'):
raise forms.ValidationError(
ugettext("URL is missing a leading slash."),
code='missing_leading_slash',
)
if (settings.APPEND_SLASH and (
(settings.MIDDLEWARE and 'django.middleware.common.CommonMiddleware' in settings.MIDDLEWARE) or
'django.middleware.common.CommonMiddleware' in settings.MIDDLEWARE_CLASSES) and
not url.endswith('/')):
raise forms.ValidationError(
ugettext("URL is missing a trailing slash."),
code='missing_trailing_slash',
)
return url
def clean(self):
url = self.cleaned_data.get('url')
sites = self.cleaned_data.get('sites')
same_url = FlatPage.objects.filter(url=url)
if self.instance.pk:
same_url = same_url.exclude(pk=self.instance.pk)
if sites and same_url.filter(sites__in=sites).exists():
for site in sites:
if same_url.filter(sites=site).exists():
raise forms.ValidationError(
_('Flatpage with url %(url)s already exists for site %(site)s'),
code='duplicate_url',
params={'url': url, 'site': site},
)
return super(FlatpageForm, self).clean()
|
gpl-3.0
|
skinny121/MCEdit-TallWorlds
|
pymclevel/test/templevel.py
|
13
|
1394
|
import atexit
import os
from os.path import join
import shutil
import tempfile
from pymclevel import mclevel
__author__ = 'Rio'
tempdir = os.path.join(tempfile.gettempdir(), "pymclevel_test")
if not os.path.exists(tempdir):
os.mkdir(tempdir)
def mktemp(suffix):
td = tempfile.mkdtemp(suffix, dir=tempdir)
os.rmdir(td)
return td
class TempLevel(object):
def __init__(self, filename, createFunc=None):
if not os.path.exists(filename):
filename = join("testfiles", filename)
tmpname = mktemp(os.path.basename(filename))
if os.path.exists(filename):
if os.path.isdir(filename):
shutil.copytree(filename, tmpname)
else:
shutil.copy(filename, tmpname)
elif createFunc:
createFunc(tmpname)
else:
raise IOError("File %s not found." % filename)
self.tmpname = tmpname
self.level = mclevel.fromFile(tmpname)
atexit.register(self.removeTemp)
def __del__(self):
if hasattr(self, 'level'):
self.level.close()
del self.level
self.removeTemp()
def removeTemp(self):
if hasattr(self, 'tmpname'):
filename = self.tmpname
if os.path.isdir(filename):
shutil.rmtree(filename)
else:
os.unlink(filename)
|
isc
|
askomics/askomics
|
askomics/libaskomics/LdapAuth.py
|
2
|
2823
|
import logging
import ldap
from askomics.libaskomics.ParamManager import ParamManager
class LdapAuth(ParamManager):
"""[summary]
[description]
"""
def __init__(self, settings, session):
ParamManager.__init__(self, settings, session)
self.log = logging.getLogger(__name__)
self.ldap_server = self.settings['askomics.ldap_host']
self.ldap_port = self.settings['askomics.ldap_port']
# self.ldap_bind_dn = self.settings['askomics.ldap_bind_dn']
# self.ldap_bind_passwd = self.settings['askomics.ldap_bind_passwd']
self.ldap_user_search_base = self.settings['askomics.ldap_user_search_base']
self.ldap_user_filter = self.settings['askomics.ldap_user_filter']
self.ldap_username_attr = self.settings['askomics.ldap_username_attr']
self.ldap_email_attr = self.settings['askomics.ldap_email_attr']
self.username = None
self.password = None
self.email = None
def get_user(self, login):
try:
connect = ldap.initialize('ldap://' + self.ldap_server + ':' + self.ldap_port)
connect.set_option(ldap.OPT_REFERRALS, 0)
# connect.simple_bind_s(self.ldap_bind_dn , self.ldap_bind_passwd)
search_filter=self.ldap_user_filter.replace('%s', login)
ldap_user = connect.search_s(self.ldap_user_search_base, ldap.SCOPE_SUBTREE, search_filter, [self.ldap_username_attr, self.ldap_email_attr])
except ldap.INVALID_CREDENTIALS as e:
self.log.debug('Invalid ldap bind credentials')
raise e
except ldap.SERVER_DOWN as e:
raise e
if not ldap_user:
return None
return {
'dn': ldap_user[0][0],
'mail': ldap_user[0][1]['mail'][0].decode(),
'username': ldap_user[0][1]['uid'][0].decode()
}
def authenticate_user(self, username, password):
try:
ldap_client = ldap.initialize('ldap://' + self.ldap_server + ':' + self.ldap_port)
ldap_client.set_option(ldap.OPT_REFERRALS,0)
ldap_user = self.get_user(username)
if not ldap_user:
self.log.debug("No user registered in ldap with " + username)
return None
user_dn = ldap_user['dn']
ldap_client.simple_bind_s(user_dn, password)
except ldap.INVALID_CREDENTIALS:
self.log.debug('Wrong password for ldap user ' + username)
ldap_client.unbind()
return None
except ldap.SERVER_DOWN as e:
raise e
ldap_client.unbind()
return ldap_user
def check_password(self, username, password):
if self.authenticate_user(username, password):
return True
return False
|
agpl-3.0
|
google/material-design-icons
|
update/venv/lib/python3.9/site-packages/fontTools/colorLib/table_builder.py
|
5
|
7714
|
"""
colorLib.table_builder: Generic helper for filling in BaseTable derivatives from tuples and maps and such.
"""
import collections
import enum
from fontTools.ttLib.tables.otBase import (
BaseTable,
FormatSwitchingBaseTable,
UInt8FormatSwitchingBaseTable,
)
from fontTools.ttLib.tables.otConverters import (
ComputedInt,
SimpleValue,
Struct,
Short,
UInt8,
UShort,
VarInt16,
VarUInt16,
IntValue,
FloatValue,
)
from fontTools.misc.roundTools import otRound
class BuildCallback(enum.Enum):
"""Keyed on (BEFORE_BUILD, class[, Format if available]).
Receives (dest, source).
Should return (dest, source), which can be new objects.
"""
BEFORE_BUILD = enum.auto()
"""Keyed on (AFTER_BUILD, class[, Format if available]).
Receives (dest).
Should return dest, which can be a new object.
"""
AFTER_BUILD = enum.auto()
"""Keyed on (CREATE_DEFAULT, class).
Receives no arguments.
Should return a new instance of class.
"""
CREATE_DEFAULT = enum.auto()
def _assignable(convertersByName):
return {k: v for k, v in convertersByName.items() if not isinstance(v, ComputedInt)}
def convertTupleClass(tupleClass, value):
if isinstance(value, tupleClass):
return value
if isinstance(value, tuple):
return tupleClass(*value)
return tupleClass(value)
def _isNonStrSequence(value):
return isinstance(value, collections.abc.Sequence) and not isinstance(value, str)
def _set_format(dest, source):
if _isNonStrSequence(source):
assert len(source) > 0, f"{type(dest)} needs at least format from {source}"
dest.Format = source[0]
source = source[1:]
elif isinstance(source, collections.abc.Mapping):
assert "Format" in source, f"{type(dest)} needs at least Format from {source}"
dest.Format = source["Format"]
else:
raise ValueError(f"Not sure how to populate {type(dest)} from {source}")
assert isinstance(
dest.Format, collections.abc.Hashable
), f"{type(dest)} Format is not hashable: {dest.Format}"
assert (
dest.Format in dest.convertersByName
), f"{dest.Format} invalid Format of {cls}"
return source
class TableBuilder:
"""
Helps to populate things derived from BaseTable from maps, tuples, etc.
A table of lifecycle callbacks may be provided to add logic beyond what is possible
based on otData info for the target class. See BuildCallbacks.
"""
def __init__(self, callbackTable=None):
if callbackTable is None:
callbackTable = {}
self._callbackTable = callbackTable
def _convert(self, dest, field, converter, value):
tupleClass = getattr(converter, "tupleClass", None)
enumClass = getattr(converter, "enumClass", None)
if tupleClass:
value = convertTupleClass(tupleClass, value)
elif enumClass:
if isinstance(value, enumClass):
pass
elif isinstance(value, str):
try:
value = getattr(enumClass, value.upper())
except AttributeError:
raise ValueError(f"{value} is not a valid {enumClass}")
else:
value = enumClass(value)
elif isinstance(converter, IntValue):
value = otRound(value)
elif isinstance(converter, FloatValue):
value = float(value)
elif isinstance(converter, Struct):
if converter.repeat:
if _isNonStrSequence(value):
value = [self.build(converter.tableClass, v) for v in value]
else:
value = [self.build(converter.tableClass, value)]
setattr(dest, converter.repeat, len(value))
else:
value = self.build(converter.tableClass, value)
elif callable(converter):
value = converter(value)
setattr(dest, field, value)
def build(self, cls, source):
assert issubclass(cls, BaseTable)
if isinstance(source, cls):
return source
callbackKey = (cls,)
dest = self._callbackTable.get(
(BuildCallback.CREATE_DEFAULT,) + callbackKey, lambda: cls()
)()
assert isinstance(dest, cls)
convByName = _assignable(cls.convertersByName)
skippedFields = set()
# For format switchers we need to resolve converters based on format
if issubclass(cls, FormatSwitchingBaseTable):
source = _set_format(dest, source)
convByName = _assignable(convByName[dest.Format])
skippedFields.add("Format")
callbackKey = (cls, dest.Format)
# Convert sequence => mapping so before thunk only has to handle one format
if _isNonStrSequence(source):
# Sequence (typically list or tuple) assumed to match fields in declaration order
assert len(source) <= len(
convByName
), f"Sequence of {len(source)} too long for {cls}; expected <= {len(convByName)} values"
source = dict(zip(convByName.keys(), source))
dest, source = self._callbackTable.get(
(BuildCallback.BEFORE_BUILD,) + callbackKey, lambda d, s: (d, s)
)(dest, source)
if isinstance(source, collections.abc.Mapping):
for field, value in source.items():
if field in skippedFields:
continue
converter = convByName.get(field, None)
if not converter:
raise ValueError(
f"Unrecognized field {field} for {cls}; expected one of {sorted(convByName.keys())}"
)
self._convert(dest, field, converter, value)
else:
# let's try as a 1-tuple
dest = self.build(cls, (source,))
dest = self._callbackTable.get(
(BuildCallback.AFTER_BUILD,) + callbackKey, lambda d: d
)(dest)
return dest
class TableUnbuilder:
def __init__(self, callbackTable=None):
if callbackTable is None:
callbackTable = {}
self._callbackTable = callbackTable
def unbuild(self, table):
assert isinstance(table, BaseTable)
source = {}
callbackKey = (type(table),)
if isinstance(table, FormatSwitchingBaseTable):
source["Format"] = int(table.Format)
callbackKey += (table.Format,)
for converter in table.getConverters():
if isinstance(converter, ComputedInt):
continue
value = getattr(table, converter.name)
tupleClass = getattr(converter, "tupleClass", None)
enumClass = getattr(converter, "enumClass", None)
if tupleClass:
source[converter.name] = tuple(value)
elif enumClass:
source[converter.name] = value.name.lower()
elif isinstance(converter, Struct):
if converter.repeat:
source[converter.name] = [self.unbuild(v) for v in value]
else:
source[converter.name] = self.unbuild(value)
elif isinstance(converter, SimpleValue):
# "simple" values (e.g. int, float, str) need no further un-building
source[converter.name] = value
else:
raise NotImplementedError(
"Don't know how unbuild {value!r} with {converter!r}"
)
source = self._callbackTable.get(callbackKey, lambda s: s)(source)
return source
|
apache-2.0
|
Silmathoron/NNGT
|
doc/examples/attributes.py
|
1
|
6916
|
#-*- coding:utf-8 -*-
#
# This file is part of the NNGT project to generate and analyze
# neuronal networks and their activity.
# Copyright (C) 2015-2019 Tanguy Fardet
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
''' Node and edge attributes '''
import numpy as np
import nngt
import nngt.generation as ng
''' -------------- #
# Generate a graph #
# -------------- '''
num_nodes = 1000
avg_deg = 25
graph = ng.erdos_renyi(nodes=num_nodes, avg_deg=avg_deg)
''' ----------------- #
# Add node attributes #
# ----------------- '''
# Let's make a network of animals where nodes represent either cats or dogs.
# (no discrimination against cats or dogs was intended, no animals were harmed
# while writing or running this code)
animals = ["cat" for _ in range(600)] # 600 cats
animals += ["dog" for _ in range(400)] # and 400 dogs
np.random.shuffle(animals) # which we assign randomly to the nodes
graph.new_node_attribute("animal", value_type="string", values=animals)
# Let's check the type of the first six animals
print(graph.get_node_attributes([0, 1, 2, 3, 4, 5], "animal"))
# Nodes can have attributes of multiple types, let's add a size to our animals
catsizes = np.random.normal(50, 5, 600) # cats around 50 cm
dogsizes = np.random.normal(80, 10, 400) # dogs around 80 cm
# We first create the attribute without values (for "double", default to NaN)
graph.new_node_attribute("size", value_type="double")
# We now have to attributes: one containing strings, the other numbers (double)
print(graph.node_attributes)
# get the cats and set their sizes
cats = graph.get_nodes(attribute="animal", value="cat")
graph.set_node_attribute("size", values=catsizes, nodes=cats)
# We set 600 values so there are 400 NaNs left
assert np.sum(np.isnan(graph.get_node_attributes(name="size"))) == 400, \
"There were not 400 NaNs as predicted."
# None of the NaN values belongs to a cat
assert not np.any(np.isnan(graph.get_node_attributes(cats, name="size"))), \
"Got some cats with NaN size! :'("
# get the dogs and set their sizes
dogs = graph.get_nodes(attribute="animal", value="dog")
graph.set_node_attribute("size", values=dogsizes, nodes=dogs)
# Some of the animals are part of human househols, they have therefore "owners"
# which will be represented here through a Human class.
# Animals without an owner will have an empty list as attribute.
class Human:
def __init__(self, name):
self.name = name
def __repr__(self):
return "Human<{}>".format(self.name)
# John owns all animals between 8 and 48
John = Human("John")
animals = [i for i in range(8, 49)]
graph.new_node_attribute("owners", value_type="object", val=[])
graph.set_node_attribute("owners", val=[John], nodes=animals)
# Now suppose another human, Julie, owns all animals between 0 and 40
Julie = Human("Julie")
animals = [i for i in range(0, 41)]
# to update the values, we need to get them to add Bob to the list
owners = graph.get_node_attributes(name="owners", nodes=animals)
for interactions in owners:
interactions.append(Julie)
graph.set_node_attribute("owners", values=owners, nodes=animals)
# now some of the initial owners should have had their attributes updated
new_owners = graph.get_node_attributes(name="owners")
print("There are animals owned only by", new_owners[0], "others owned only by",
new_owners[48], "and some more owned by both", new_owners[40])
''' ---------- #
# Edge weights #
# ---------- '''
# Same as for node attributes, one can give attributes to the edges
# Let's give weights to the edges depending on how often the animals interact!
# cat's interact a lot among themselves, so we'll give them high weights
cat_edges = graph.get_edges(source_node=cats, target_node=cats)
# check that these are indeed only between cats
cat_set = set(cats)
node_set = set(np.unique(cat_edges))
assert cat_set == node_set, "Damned, something wrong happened to the cats!"
# uniform distribution of weights between 30 and 50
graph.set_weights(elist=cat_edges, distribution="uniform",
parameters={"lower": 30, "upper": 50})
# dogs have less occasions to interact except some which spend a lot of time
# together, so we use a lognormal distribution
dog_edges = graph.get_edges(source_node=dogs, target_node=dogs)
graph.set_weights(elist=dog_edges, distribution="lognormal",
parameters={"position": 2.2, "scale": 0.5})
# Cats do not like dogs, so we set their weights to -5
# Dogs like chasing cats but do not like them much either so we let the default
# value of 1
cd_edges = graph.get_edges(source_node=cats, target_node=dogs)
graph.set_weights(elist=cd_edges, distribution="constant",
parameters={"value": -5})
# Let's check the distribution (you should clearly see 4 separate shapes)
if nngt.get_config("with_plot"):
nngt.plot.edge_attributes_distribution(graph, "weight")
''' ------------------- #
# Other edge attributes #
# ------------------- '''
# non-default edge attributes can be created as the node attributes
# let's create a class for humans and store it when two animals have interacted
# with the same human (the default will be an empty list if they did not)
# Alice interacted with all animals between 8 and 48
Alice = Human("Alice")
animals = [i for i in range(8, 49)]
edges = graph.get_edges(source_node=animals, target_node=animals)
graph.new_edge_attribute("common_interaction", value_type="object", val=[])
graph.set_edge_attribute("common_interaction", val=[Alice], edges=edges)
# Now suppose another human, Bob, interacted with all animals between 0 and 40
Bob = Human("Bob")
animals = [i for i in range(0, 41)]
edges2 = graph.get_edges(source_node=animals, target_node=animals)
# to update the values, we need to get them to add Bob to the list
ci = graph.get_edge_attributes(name="common_interaction", edges=edges2)
for interactions in ci:
interactions.append(Bob)
graph.set_edge_attribute("common_interaction", values=ci, edges=edges2)
# now some of the initial `edges` should have had their attributes updated
new_ci = graph.get_edge_attributes(name="common_interaction", edges=edges)
print(np.sum([0 if len(interaction) < 2 else 1 for interaction in new_ci]),
"interactions have been updated among the", len(edges), "from Alice.")
|
gpl-3.0
|
lutianming/leetcode
|
longest_palindromic_substring.py
|
1
|
1460
|
class Solution:
# @param {string} s
# @return {string}
def longestPalindrome(self, s):
max_length = 0
max_left = 0
max_right = 0
n = len(s)
for i, c in enumerate(s):
left = None
right = None
if (i-1) >= 0 and c == s[i-1]:
left = i-1
right = i
while left >= 0 and right < n:
if s[left] == s[right]:
left -= 1
right += 1
else:
break
length = right-left-2 + 1
if max_length < length:
max_length = length
max_left = left+1
max_right = right-1
if (i-2) >= 0 and c == s[i-2]:
left = i-2
right = i
while left >= 0 and right < n:
if s[left] == s[right]:
left -= 1
right += 1
else:
break
length = right-left-2 + 1
if max_length < length:
max_length = length
max_left = left+1
max_right = right-1
return s[max_left:max_right+1]
s = Solution()
# print(s.longestPalindrome("a"))
print(s.longestPalindrome("aaaa"))
# print(s.longestPalindrome("abcb"))
|
mit
|
hollabaq86/haikuna-matata
|
env/lib/python2.7/site-packages/psycopg2/__init__.py
|
61
|
5838
|
"""A Python driver for PostgreSQL
psycopg is a PostgreSQL_ database adapter for the Python_ programming
language. This is version 2, a complete rewrite of the original code to
provide new-style classes for connection and cursor objects and other sweet
candies. Like the original, psycopg 2 was written with the aim of being very
small and fast, and stable as a rock.
Homepage: http://initd.org/projects/psycopg2
.. _PostgreSQL: http://www.postgresql.org/
.. _Python: http://www.python.org/
:Groups:
* `Connections creation`: connect
* `Value objects constructors`: Binary, Date, DateFromTicks, Time,
TimeFromTicks, Timestamp, TimestampFromTicks
"""
# psycopg/__init__.py - initialization of the psycopg module
#
# Copyright (C) 2003-2010 Federico Di Gregorio <[email protected]>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# In addition, as a special exception, the copyright holders give
# permission to link this program with the OpenSSL library (or with
# modified versions of OpenSSL that use the same license as OpenSSL),
# and distribute linked combinations including the two.
#
# You must obey the GNU Lesser General Public License in all respects for
# all of the code used other than OpenSSL.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
# Import modules needed by _psycopg to allow tools like py2exe to do
# their work without bothering about the module dependencies.
# Note: the first internal import should be _psycopg, otherwise the real cause
# of a failed loading of the C module may get hidden, see
# http://archives.postgresql.org/psycopg/2011-02/msg00044.php
# Import the DBAPI-2.0 stuff into top-level module.
from psycopg2._psycopg import BINARY, NUMBER, STRING, DATETIME, ROWID
from psycopg2._psycopg import Binary, Date, Time, Timestamp
from psycopg2._psycopg import DateFromTicks, TimeFromTicks, TimestampFromTicks
from psycopg2._psycopg import Error, Warning, DataError, DatabaseError, ProgrammingError
from psycopg2._psycopg import IntegrityError, InterfaceError, InternalError
from psycopg2._psycopg import NotSupportedError, OperationalError
from psycopg2._psycopg import _connect, apilevel, threadsafety, paramstyle
from psycopg2._psycopg import __version__
from psycopg2 import tz
# Register default adapters.
import psycopg2.extensions as _ext
_ext.register_adapter(tuple, _ext.SQL_IN)
_ext.register_adapter(type(None), _ext.NoneAdapter)
# Register the Decimal adapter here instead of in the C layer.
# This way a new class is registered for each sub-interpreter.
# See ticket #52
try:
from decimal import Decimal
except ImportError:
pass
else:
from psycopg2._psycopg import Decimal as Adapter
_ext.register_adapter(Decimal, Adapter)
del Decimal, Adapter
import re
def _param_escape(s,
re_escape=re.compile(r"([\\'])"),
re_space=re.compile(r'\s')):
"""
Apply the escaping rule required by PQconnectdb
"""
if not s: return "''"
s = re_escape.sub(r'\\\1', s)
if re_space.search(s):
s = "'" + s + "'"
return s
del re
def connect(dsn=None,
database=None, user=None, password=None, host=None, port=None,
connection_factory=None, cursor_factory=None, async=False, **kwargs):
"""
Create a new database connection.
The connection parameters can be specified either as a string:
conn = psycopg2.connect("dbname=test user=postgres password=secret")
or using a set of keyword arguments:
conn = psycopg2.connect(database="test", user="postgres", password="secret")
The basic connection parameters are:
- *dbname*: the database name (only in dsn string)
- *database*: the database name (only as keyword argument)
- *user*: user name used to authenticate
- *password*: password used to authenticate
- *host*: database host address (defaults to UNIX socket if not provided)
- *port*: connection port number (defaults to 5432 if not provided)
Using the *connection_factory* parameter a different class or connections
factory can be specified. It should be a callable object taking a dsn
argument.
Using the *cursor_factory* parameter, a new default cursor factory will be
used by cursor().
Using *async*=True an asynchronous connection will be created.
Any other keyword parameter will be passed to the underlying client
library: the list of supported parameters depends on the library version.
"""
items = []
if database is not None:
items.append(('dbname', database))
if user is not None:
items.append(('user', user))
if password is not None:
items.append(('password', password))
if host is not None:
items.append(('host', host))
if port is not None:
items.append(('port', port))
items.extend([(k, v) for (k, v) in kwargs.iteritems() if v is not None])
if dsn is not None and items:
raise TypeError(
"'%s' is an invalid keyword argument when the dsn is specified"
% items[0][0])
if dsn is None:
if not items:
raise TypeError('missing dsn and no parameters')
else:
dsn = " ".join(["%s=%s" % (k, _param_escape(str(v)))
for (k, v) in items])
conn = _connect(dsn, connection_factory=connection_factory, async=async)
if cursor_factory is not None:
conn.cursor_factory = cursor_factory
return conn
|
mit
|
saintpai/sos
|
sos/plugins/openssl.py
|
5
|
1873
|
# Copyright (C) 2007 Sadique Puthen <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
class OpenSSL(Plugin):
"""OpenSSL configuration
"""
plugin_name = "openssl"
profiles = ('network', 'security')
packages = ('openssl',)
def postproc(self):
protect_keys = [
"input_password",
"output_password",
"challengePassword"
]
regexp = r"(?m)^(\s*#?\s*(%s).*=)(.*)" % "|".join(protect_keys)
self.do_file_sub(
'/etc/ssl/openssl.cnf',
regexp,
r"\1 ******"
)
class RedHatOpenSSL(OpenSSL, RedHatPlugin):
"""openssl related information
"""
files = ('/etc/pki/tls/openssl.cnf',)
def setup(self):
super(RedHatOpenSSL, self).setup()
self.add_copy_spec("/etc/pki/tls/openssl.cnf")
class DebianOpenSSL(OpenSSL, DebianPlugin, UbuntuPlugin):
"""openssl related information for Debian distributions
"""
files = ('/etc/ssl/openssl.cnf',)
def setup(self):
super(DebianOpenSSL, self).setup()
self.add_copy_spec("/etc/ssl/openssl.cnf")
# vim: et ts=4 sw=4
|
gpl-2.0
|
rosmo/ansible
|
lib/ansible/modules/cloud/rackspace/rax_mon_entity.py
|
77
|
5795
|
#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rax_mon_entity
short_description: Create or delete a Rackspace Cloud Monitoring entity
description:
- Create or delete a Rackspace Cloud Monitoring entity, which represents a device
to monitor. Entities associate checks and alarms with a target system and
provide a convenient, centralized place to store IP addresses. Rackspace
monitoring module flow | *rax_mon_entity* -> rax_mon_check ->
rax_mon_notification -> rax_mon_notification_plan -> rax_mon_alarm
version_added: "2.0"
options:
label:
description:
- Defines a name for this entity. Must be a non-empty string between 1 and
255 characters long.
required: true
state:
description:
- Ensure that an entity with this C(name) exists or does not exist.
choices: ["present", "absent"]
agent_id:
description:
- Rackspace monitoring agent on the target device to which this entity is
bound. Necessary to collect C(agent.) rax_mon_checks against this entity.
named_ip_addresses:
description:
- Hash of IP addresses that may be referenced by name by rax_mon_checks
added to this entity. Must be a dictionary of with keys that are names
between 1 and 64 characters long, and values that are valid IPv4 or IPv6
addresses.
metadata:
description:
- Hash of arbitrary C(name), C(value) pairs that are passed to associated
rax_mon_alarms. Names and values must all be between 1 and 255 characters
long.
author: Ash Wilson (@smashwilson)
extends_documentation_fragment: rackspace.openstack
'''
EXAMPLES = '''
- name: Entity example
gather_facts: False
hosts: local
connection: local
tasks:
- name: Ensure an entity exists
rax_mon_entity:
credentials: ~/.rax_pub
state: present
label: my_entity
named_ip_addresses:
web_box: 192.0.2.4
db_box: 192.0.2.5
meta:
hurf: durf
register: the_entity
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.rax import rax_argument_spec, rax_required_together, setup_rax_module
def cloud_monitoring(module, state, label, agent_id, named_ip_addresses,
metadata):
if len(label) < 1 or len(label) > 255:
module.fail_json(msg='label must be between 1 and 255 characters long')
changed = False
cm = pyrax.cloud_monitoring
if not cm:
module.fail_json(msg='Failed to instantiate client. This typically '
'indicates an invalid region or an incorrectly '
'capitalized region name.')
existing = []
for entity in cm.list_entities():
if label == entity.label:
existing.append(entity)
entity = None
if existing:
entity = existing[0]
if state == 'present':
should_update = False
should_delete = False
should_create = False
if len(existing) > 1:
module.fail_json(msg='%s existing entities have the label %s.' %
(len(existing), label))
if entity:
if named_ip_addresses and named_ip_addresses != entity.ip_addresses:
should_delete = should_create = True
# Change an existing Entity, unless there's nothing to do.
should_update = agent_id and agent_id != entity.agent_id or \
(metadata and metadata != entity.metadata)
if should_update and not should_delete:
entity.update(agent_id, metadata)
changed = True
if should_delete:
entity.delete()
else:
should_create = True
if should_create:
# Create a new Entity.
entity = cm.create_entity(label=label, agent=agent_id,
ip_addresses=named_ip_addresses,
metadata=metadata)
changed = True
else:
# Delete the existing Entities.
for e in existing:
e.delete()
changed = True
if entity:
entity_dict = {
"id": entity.id,
"name": entity.name,
"agent_id": entity.agent_id,
}
module.exit_json(changed=changed, entity=entity_dict)
else:
module.exit_json(changed=changed)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
state=dict(default='present', choices=['present', 'absent']),
label=dict(required=True),
agent_id=dict(),
named_ip_addresses=dict(type='dict', default={}),
metadata=dict(type='dict', default={})
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together()
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
state = module.params.get('state')
label = module.params.get('label')
agent_id = module.params.get('agent_id')
named_ip_addresses = module.params.get('named_ip_addresses')
metadata = module.params.get('metadata')
setup_rax_module(module, pyrax)
cloud_monitoring(module, state, label, agent_id, named_ip_addresses, metadata)
if __name__ == '__main__':
main()
|
gpl-3.0
|
bankonme/OpenBazaar-Server
|
keyutils/keys.py
|
4
|
1674
|
__author__ = 'chris'
import bitcoin
import nacl.signing
import nacl.encoding
from db.datastore import KeyStore
from keyutils.guid import GUID
from nacl.public import PrivateKey
class KeyChain(object):
def __init__(self):
self.db = KeyStore()
guid_keys = self.db.get_key("guid")
if guid_keys is None:
self.create_keychain()
else:
g = GUID.from_privkey(guid_keys[0])
self.guid = g.guid
self.guid_privkey = g.privkey
self.signing_key = nacl.signing.SigningKey(self.guid_privkey)
self.guid_signed_pubkey = g.signed_pubkey
# pylint: disable=W0633
self.bitcoin_master_privkey, self.bitcoin_master_pubkey = self.db.get_key("bitcoin")
self.encryption_key = PrivateKey(self.guid_privkey)
self.encryption_pubkey = self.encryption_key.public_key.encode()
def create_keychain(self):
print "Generating GUID, stand by..."
g = GUID()
self.guid = g.guid
self.guid_privkey = g.privkey
self.signing_key = nacl.signing.SigningKey(self.guid_privkey)
self.guid_signed_pubkey = g.signed_pubkey
self.db.set_key("guid", self.guid_privkey, self.guid_signed_pubkey)
self.bitcoin_master_privkey = bitcoin.bip32_master_key(bitcoin.sha256(self.guid_privkey))
self.bitcoin_master_pubkey = bitcoin.bip32_privtopub(self.bitcoin_master_privkey)
self.db.set_key("bitcoin", self.bitcoin_master_privkey, self.bitcoin_master_pubkey)
self.encryption_key = PrivateKey(self.guid_privkey)
self.encryption_pubkey = self.encryption_key.public_key.encode()
|
mit
|
jesseengel/magenta
|
magenta/music/musicnet_io.py
|
1
|
3870
|
# Copyright 2019 The Magenta Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Import NoteSequences from MusicNet."""
from magenta.protobuf import music_pb2
import numpy as np
from six import BytesIO
import tensorflow as tf
MUSICNET_SAMPLE_RATE = 44100
MUSICNET_NOTE_VELOCITY = 100
def note_interval_tree_to_sequence_proto(note_interval_tree, sample_rate):
"""Convert MusicNet note interval tree to a NoteSequence proto.
Args:
note_interval_tree: An intervaltree.IntervalTree containing note intervals
and data as found in the MusicNet archive. The interval begin and end
values are audio sample numbers.
sample_rate: The sample rate for which the note intervals are defined.
Returns:
A NoteSequence proto containing the notes in the interval tree.
"""
sequence = music_pb2.NoteSequence()
# Sort note intervals by onset time.
note_intervals = sorted(note_interval_tree,
key=lambda note_interval: note_interval.begin)
# MusicNet represents "instruments" as MIDI program numbers. Here we map each
# program to a separate MIDI instrument.
instruments = {}
for note_interval in note_intervals:
note_data = note_interval.data
note = sequence.notes.add()
note.pitch = note_data[1]
note.velocity = MUSICNET_NOTE_VELOCITY
note.start_time = float(note_interval.begin) / sample_rate
note.end_time = float(note_interval.end) / sample_rate
# MusicNet "instrument" numbers use 1-based indexing, so we subtract 1 here.
note.program = note_data[0] - 1
note.is_drum = False
if note.program not in instruments:
instruments[note.program] = len(instruments)
note.instrument = instruments[note.program]
if note.end_time > sequence.total_time:
sequence.total_time = note.end_time
return sequence
def musicnet_iterator(musicnet_file):
"""An iterator over the MusicNet archive that yields audio and NoteSequences.
The MusicNet archive (in .npz format) can be downloaded from:
https://homes.cs.washington.edu/~thickstn/media/musicnet.npz
Args:
musicnet_file: The path to the MusicNet NumPy archive (.npz) containing
audio and transcriptions for 330 classical recordings.
Yields:
Tuples where the first element is a NumPy array of sampled audio (at 44.1
kHz) and the second element is a NoteSequence proto containing the
transcription.
"""
with tf.gfile.GFile(musicnet_file, 'rb') as f:
# Unfortunately the gfile seek function breaks the reading of NumPy
# archives, so we read the archive first then load as BytesIO.
musicnet_bytes = f.read()
musicnet_bytesio = BytesIO(musicnet_bytes)
musicnet = np.load(musicnet_bytesio, encoding='latin1')
for file_id in musicnet.files:
audio, note_interval_tree = musicnet[file_id]
sequence = note_interval_tree_to_sequence_proto(
note_interval_tree, MUSICNET_SAMPLE_RATE)
sequence.filename = file_id
sequence.collection_name = 'MusicNet'
sequence.id = '/id/musicnet/%s' % file_id
sequence.source_info.source_type = (
music_pb2.NoteSequence.SourceInfo.PERFORMANCE_BASED)
sequence.source_info.encoding_type = (
music_pb2.NoteSequence.SourceInfo.MUSICNET)
sequence.source_info.parser = (
music_pb2.NoteSequence.SourceInfo.MAGENTA_MUSICNET)
yield audio, sequence
|
apache-2.0
|
DReckoning/AcousticPositioning
|
Client/PulseCorrection.py
|
1
|
1744
|
# this will correct missing or false pulses.
def process(rocks,syncs,nn):
# checker
for n in range(nn):
rockavg = 0
for s in rocks[n]:
rockavg+= float(s)/float(len(rocks[n]))
lastsamp = 0
i = 0
print 'avg ',rockavg
for s in rocks[n]:
i = i+1
if (lastsamp == 0):
lastsamp = s
else:
if (((s - lastsamp) - rockavg)>0.25):
print '\nRock Error on node ',n,' sample ',i
lastsamp = s
# checker2
for n in range(nn):
rockavg = 0
for s in syncs[n]:
rockavg+= float(s)/float(len(syncs[n]))
lastsamp = 0
i = 0
for s in syncs[n]:
i = i+1
if (lastsamp == 0):
lastsamp = s
else:
if (((s - lastsamp) - rockavg)>0.25):
print '\nRock Error on node ',n,' sample ',i
lastsamp = s
minrocks = 999
minsyncs = 999
newsyncs = []
newrocks = []
for n in range(nn):
if (minrocks > len(rocks[n])):
minrocks = len(rocks[n])
if (minsyncs > len(syncs[n])):
minsyncs = len(syncs[n])
print '\nHACKING OFF'
for n in range(nn):
thisnodesrocks = []
thisnodessyncs = []
for i in range(minrocks):
thisnodesrocks.append(rocks[n][i])
for i in range(minsyncs):
thisnodessyncs.append(syncs[n][i])
newsyncs.append(thisnodessyncs)
newrocks.append(thisnodesrocks)
print len(rocks[n]) - len(thisnodesrocks)
print len(syncs[n]) - len(thisnodessyncs)
return [newrocks,newsyncs]
|
gpl-2.0
|
ghisvail/scikit-fftw
|
skfftw/backend/_cffi.py
|
1
|
2359
|
# coding: utf8
# Copyright (c) 2014, 2015 Ghislain Antony Vaillant.
#
# This file is distributed under the new BSD License, see the LICENSE file or
# checkout the license terms at http://opensource.org/licenses/BSD-2-Clause).
from __future__ import absolute_import, division, print_function
import binascii
import os
import sys
import threading
from cffi import FFI
from cffi.verifier import Verifier
__all__ = ('ffi', 'lib')
class LazyLibrary(object):
def __init__(self, ffi):
self._ffi = ffi
self._lib = None
self._lock = threading.Lock()
def __getattr__(self, name):
if self._lib is None:
with self._lock:
if self._lib is None:
self._lib = self._ffi.verifier.load_library()
return getattr(self._lib, name)
def _create_modulename(cdef_sources, source, sys_version):
"""
This is the same as CFFI's create modulename except we don't include the
CFFI version.
"""
key = '\x00'.join([sys_version[:3], source, cdef_sources])
key = key.encode('utf-8')
k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
k1 = k1.lstrip('0x').rstrip('L')
k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
k2 = k2.lstrip('0').rstrip('L')
return '_{0}_cffi_{1}{2}'.format("skfftw", k1, k2)
def _compile_module(*args, **kwargs):
raise RuntimeError(
"Attempted implicit compile of a cffi module. All cffi modules should "
"be pre-compiled at installation time."
)
# Load the cffi definitions
here = os.path.dirname(__file__)
with open(os.path.join(here, '_cdefs.h')) as f:
_cdefs_source = f.read()
with open(os.path.join(here, '_verify.c')) as f:
_verify_source = f.read()
# Make the ffi instance
ffi = FFI()
ffi.cdef(_cdefs_source)
ffi.verifier = Verifier(
ffi,
_verify_source,
modulename=_create_modulename(_cdefs_source, _verify_source, sys.version),
ext_package='skfftw',
libraries=['fftw3', 'fftw3_threads', 'fftw3f', 'fftw3f_threads',
'fftw3l', 'fftw3l_threads'],
include_dirs=[],
library_dirs=[],
runtime_library_dirs=[],
)
# Patch the Verifier() instance to prevent CFFI from compiling the module
ffi.verifier.compile_module = _compile_module
ffi.verifier._compile_module = _compile_module
# Export the library object
lib = LazyLibrary(ffi)
|
bsd-3-clause
|
OpenSourcePolicyCenter/multi-country
|
Python/Archive/Stage2/Unincorporated Demographics/StepbyStepv1.py
|
2
|
35778
|
from __future__ import division
import sys
import numpy as np
import scipy as sp
import scipy.optimize as opt
import time as time
from matplotlib import pyplot as plt
#DEMOGRAPHICS FUNCTIONS
def getDemographics(params, PrintAges, DiffDemog):
"""
Description:
-Imports data from csv files for initial populations, fertility rates, mortality rates, and net migrants.
-Stores these data sets in their respective matrices, and calculates population distribuitons through year T.
-NOTE: FOR NOW THIS FUNCTION ONLY USES DATA FOR THE USA. NEEDS TO EVENTUALLY ADD MORE COUNTRIES
Inputs:
-None, but uses the global variables T, T_1, StartFertilityAge, EndFertilityAge, StartDyingAge, and MaxImmigrantAge
Objects in Function:
-USAPopdata: (S+1) vector that has the initial population of the U.S straight from the csv
-USAFertdata: (T_1,EndFertilityAge+2-StartFertilityAge) vector that has U.S. fertility straight from the csv
-USAMortdata: (T_1,S+1-StartDyingAge) vector that has U.S. mortality straight from the csv
-USAMigdata: (MaxImmigrantAge) vector that contains the number of net U.S. migrants straight from the csv
-g_N: (T) vector that contains the exogenous population growth rates
-g_A: Constant that represents the technical growth rate
-l_endowment: (T) vector labor endowment per household
-f_bar: (I) vector that represents the fertility rate after period T_1
-p_bar: (I) vector that represents the mortality rate after period T_1
-m_bar: (I) vector that represents the immigration rate after period T_1
Output:
-FertilityRates: Numpy array that contains fertilty rates for all countries, ages, and years
-MortalityRates: Numpy array that contains mortality rates for all countries, ages, and years
-Migrants: Numpy array that contains net migration for all countries and ages
-N_matrix: Numpy array that contains population numbers for all countries, ages, and years
-Nhat matrix: Numpy array that contains the population percentage for all countries, ages, and years
"""
I, S, T, T_1, StartFertilityAge, EndFertilityAge, StartDyingAge, MaxImmigrantAge, g_A = params
#Initializes demographics matrices
N_matrix = np.zeros((I, S+1, T+S+1))
Nhat_matrix = np.zeros((I, S+1, T+S+1))
#N_temp = np.zeros((I, S+1, T+S+1))
FertilityRates = np.zeros((I, S+1, T+S+1))
MortalityRates = np.zeros((I, S+1, T+S+1))
Migrants = np.zeros((I, S+1, T+S+1))
g_N = np.zeros(T+S+1)
if PrintAges:
print "T =", T
print "T_1", T_1
print "StartFertilityAge", StartFertilityAge
print "EndFertilityAge", EndFertilityAge
print "StartDyingAge", StartDyingAge
print "MaxImmigrantAge", MaxImmigrantAge
if DiffDemog:
if I > 7:
sys.exit("ERROR!!! We can't have more than 7 Countries without unique data. Change either parameter I so it is less than 8 or change DiffDemog to False")
countrynames = ["usa", "eu", "japan", "china", "india", "russia", "korea"]
for i in range(I):
#print "Got demographics for", countrynames[i]
N_matrix[i,:,0] = np.loadtxt(("Data_Files/population.csv"),delimiter=',',skiprows=1, usecols=[i+1])[:S+1]*1000
FertilityRates[i,StartFertilityAge:EndFertilityAge+1,:T_1] = np.transpose(np.loadtxt(str("Data_Files/" + countrynames[i] + "_fertility.csv"),delimiter=',',skiprows=1, usecols=range(1,EndFertilityAge+2-StartFertilityAge))[48:48+T_1,:])
MortalityRates[i,StartDyingAge:-1,:T_1] = np.transpose(np.loadtxt(str("Data_Files/" + countrynames[i] + "_mortality.csv"),delimiter=',',skiprows=1, usecols=range(1,S+1-StartDyingAge))[:T_1,:])
Migrants[i,:MaxImmigrantAge,:T_1] = np.einsum("s,t->st",np.loadtxt(("Data_Files/net_migration.csv"),delimiter=',',skiprows=1, usecols=[i+1])[:MaxImmigrantAge]*100, np.ones(T_1))
else:
#Imports and scales data for the USA. Imports a certain number of generations according to the value of S
USAPopdata = np.loadtxt(("Data_Files/population.csv"),delimiter=',',skiprows=1, usecols=[1])[:S+1]*1000
USAFertdata = np.loadtxt(("Data_Files/usa_fertility.csv"),delimiter=',',skiprows=1, usecols=range(1,EndFertilityAge+2-StartFertilityAge))[48:48+T_1,:]
USAMortdata = np.loadtxt(("Data_Files/usa_mortality.csv"),delimiter=',',skiprows=1, usecols=range(1,S+1-StartDyingAge))[:T_1,:]
USAMigdata = np.loadtxt(("Data_Files/net_migration.csv"),delimiter=',',skiprows=1, usecols=[1])[:MaxImmigrantAge]*100
#NOTE: For now we set fertility, mortality, number of migrants, and initial population the same for all countries.
#Sets initial total population
N_matrix[:,:,0] = np.tile(USAPopdata, (I, 1))
#Fertility Will be equal to 0 for all ages that don't bear children
FertilityRates[:,StartFertilityAge:EndFertilityAge+1,:T_1] = np.einsum("ts,i->ist", USAFertdata, np.ones(I))
#Mortality be equal to 0 for all young people who aren't old enough to die
MortalityRates[:,StartDyingAge:-1,:T_1] = np.einsum("ts,it->ist", USAMortdata, np.ones((I,T_1)))
#The number of migrants is the same for each year
Migrants[:,:MaxImmigrantAge,:T_1] = np.einsum("s,it->ist", USAMigdata, np.ones((I,T_1)))
Nhat_matrix[:,:,0] = N_matrix[:,:,0]/np.sum(N_matrix[:,:,0])
N_temp = np.ones((I, S+1))/(I*S)
#The last generation dies with probability 1
MortalityRates[:,-1,:] = np.ones((I, T+S+1))
#Gets steady-state values
f_bar = FertilityRates[:,:,T_1-1]
p_bar = MortalityRates[:,:,T_1-1]
m_bar = Migrants[:,:,T_1-1]
#Set to the steady state for every year beyond year T_1
FertilityRates[:,:,T_1:] = np.tile(np.expand_dims(f_bar, axis=2), (1,1,T-T_1+S+1))
MortalityRates[:,:,T_1:] = np.tile(np.expand_dims(p_bar, axis=2), (1,1,T-T_1+S+1))
Migrants[:,:,T_1:] = np.tile(np.expand_dims(m_bar, axis=2), (1,1,T-T_1+S+1))
#Gets the initial immigration rate
ImmigrationRate = Migrants[:,:,0]/N_matrix[:,:,0]
#Gets initial world population growth rate
g_N[0] = 0.
#Calculates population numbers for each country
for t in range(1,T+S+1):
#Gets the total number of children and and percentage of children and stores them in generation 0 of their respective matrices
#See equations 2.1 and 2.10
N_matrix[:,0,t] = np.sum((N_matrix[:,:,t-1]*FertilityRates[:,:,t-1]),axis=1)
N_temp[:,0] = np.sum((Nhat_matrix[:,:,t-1]*FertilityRates[:,:,t-1]),axis=1)
#Finds the immigration rate for each year
ImmigrationRate = Migrants[:,:,t-1]/N_matrix[:,:,t-1]
#Gets the population distribution and percentage of population distribution for the next year, taking into account immigration and mortality
#See equations 2.2 and 2.11
N_matrix[:,1:,t] = N_matrix[:,:-1,t-1]*(1+ImmigrationRate[:,:-1]-MortalityRates[:,:-1,t-1])
Nhat_matrix[:,:,t] = N_matrix[:,:,t]/np.sum(N_matrix[:,:,t])
N_temp[:,1:] = Nhat_matrix[:,:-1,t-1]*(1+ImmigrationRate[:,:-1]-MortalityRates[:,:-1,t-1])
#Gets the growth rate for the next year
g_N[t] = np.sum(N_temp[:,:])-1
#Gets labor endowment per household. For now it grows at a constant rate g_A
l_endowment = np.cumsum(np.ones(T)*g_A)
return FertilityRates, MortalityRates, Migrants, N_matrix, Nhat_matrix
def plotDemographics(params, index, years, name, N_matrix):
"""
Description:
Plots the population distribution of a given country for any number of specified years
Inputs:
index: Integer that indicates which country to plot
years: List that contains each year to plot
name: String of the country's name. Used in the legend of the plot
Outputs:
None
"""
S, T = params
for y in range(len(years)):
yeartograph = years[y]
#Checks to make sure we haven't requested to plot a year past the max year
if yeartograph <= T:
plt.plot(range(S+1), N_matrix[index,:,yeartograph])
else:
print "\nERROR: WE HAVE ONLY SIMULATED UP TO THE YEAR", T
time.sleep(15)
plt.title(str(name + " Population Distribution"))
plt.legend(years)
plt.show()
plt.clf()
def getBequests(params, assets_old):
"""
Description:
-Gets the value of the bequests given to each generation
Inputs:
-assets: Assets for each generation in a given year
-current_t: Integer that indicates the current year. Used to pull information from demographics global matrices like FertilityRates
Objects in Function:
-BQ: T
-num_bequest_receivers:
-bq_Distribution:
Output:
-bq: Numpy array that contains the number of bequests for each generation in each country.
"""
I, S, T, StartFertilityAge, StartDyingAge, pop_old, pop_working, current_mort = params
#Initializes bequests
bq = np.zeros((I, S+1))
#Gets the total assets of the people who died this year
BQ = np.sum(assets_old*current_mort*pop_old, axis=1)
#Distributes the total assets equally among the eligible population for each country
#NOTE: This will likely change as we get a more complex function for distributing the bequests
num_bequest_receivers = np.sum(pop_working, axis=1)
bq_Distribution = BQ/num_bequest_receivers
bq[:,StartFertilityAge:StartDyingAge+1] = np.einsum("i,s->is", bq_Distribution, np.ones(StartDyingAge+1-StartFertilityAge))
return bq
def hatvariables(Kpathreal, kfpathreal, Nhat_matrix):
#THIS FUNCTION HAS EQUATIONS 2.13-2.16 AND 2.19-2.20, BUT STILL NEEDS TO BE INCORPORATED INTO THE REST OF THE MODEL TO COMPLETELY TEST
#We are only using up until T periods rather than T+S+1 since Nhat only goes out to T
Kpath = Kpathreal[:,:T]
kfpath = kfpathreal[:,:T]
temp_e = np.ones((I, S+1, T))#THIS SHOULD ONLY BE UNTIL WE GET S GENERATIONS RATHER THAN S-1
n = np.sum(temp_e[:,:,:T]*Nhat_matrix, axis=1)
Ypath = (Kpath**alpha) * (np.einsum("i,it->it", A, n)**(1-alpha))
rpath = alpha * Ypath / Kpath
wpath = (1-alpha) * Ypath / n
"""
#NOTE:This goes in the get_householdchoices_path function
c_path = np.zeros((I, S))
asset_path = np.zeros((I, S+1))
c_path[:,0] = c_1
asset_path[:,0] = starting_assets
for s in range(1,S):
c_path[:,s] = ((beta * (1 + rpath_chunk[:,s] - delta))**(1/sigma) * c_path[:,s-1])/np.exp(g_A)
asset_path[:,s] = (wpath_chunk[:,s]*e[:,0,s-1] + (1 + rpath_chunk[:,s-1] - delta)*asset_path[:,s-1] + bq_chunk - c_path[:,s-1])/np.exp(g_A)
asset_path[:,s+1] = wpath_chunk[:,s]*e_chunk[:,s] + (1 + rpath_chunk[:,s] - delta)*asset_path[:,s] - c_path[:,s]
"""
#STEADY STATE FUNCTIONS
def get_kd(assets, kf):
"""
Description: Calculates the amount of domestic capital that remains in the domestic country
Inputs:
-assets[I,S,T+S+1]: Matrix of assets
-kf[I,T+S+1]: Domestic capital held by foreigners.
Objects in Function:
NONE
Outputs:
-kd[I,T+S+1]: Capital that is used in the domestic country
"""
kd = np.sum(assets[:,1:-1], axis=1) - kf
return kd
def get_n(e):
"""
Description: Calculates the total labor productivity for each country
Inputs:
-e[I,S,T]:Matrix of labor productivities
Objects in Function:
-NONE
Outputs:
-n[I,S+T+1]: Total labor productivity
"""
n = np.sum(e, axis=1)
return n
def get_Y(params, kd, n):
"""
Description:Calculates the output timepath
Inputs:
-params (2) tuple: Contains the necessary parameters used
-kd[I,T+S+1]: Domestic held capital stock
-n[I,S+T+1]: Summed labor productivity
Objects in Function:
-A[I]: Technology for each country
-alpha: Production share of capital
Outputs:
-Y[I,S+T+1]: Timepath of output
"""
alpha, A = params
if kd.ndim == 1:
Y = (kd**alpha) * ((A*n)**(1-alpha))
elif kd.ndim == 2:
Y = (kd**alpha) * (np.einsum("i,is->is", A, n)**(1-alpha))
return Y
def get_r(alpha, Y, kd):
"""
Description: Calculates the rental rates.
Inputs:
-alpha (scalar): Production share of capital
-Y[I,T+S+1]: Timepath of output
-kd[I,T+S+1]: Timepath of domestically owned capital
Objects in Function:
-NONE
Outputs:
-r[I,R+S+1]:Timepath of rental rates
"""
r = alpha * Y / kd
return r
def get_w(alpha, Y, n):
"""
Description: Calculates the wage timepath.
Inputs:
-alpha (scalar): Production share of output
-Y[I,T+S+1]: Output timepath
-n[I,T+S+1]: Total labor productivity timepath
Objects in Function:
-NONE
Outputs:
-w[I,T+S+1]: Wage timepath
"""
w = (1-alpha) * Y / n
return w
def get_cvecss(params, w, r, assets):
"""
Description: Calculates the consumption vector
Inputs:
-params (tuple 2): Tuple that containts the necessary parameters
-w[I,T+S+1]: Wage timepath
-r[I,T+S+1]: Rental Rate timepath
-assets[I,S,T+S+1]: Assets timepath
Objects in Function:
-e[I,S,T+S+1]: Matrix of labor productivities
-delta (parameter): Depreciation rate
Outputs:
-c_vec[I,T+S+1]:Vector of consumption.
"""
e, delta = params
c_vec = np.einsum("i, is -> is", w, e[:,:,0])\
+ np.einsum("i, is -> is",(1 + r - delta) , assets[:,:-1])\
- assets[:,1:]
return c_vec
def check_feasible(K, Y, w, r, c):
"""
Description:Checks the feasibility of the inputs.
Inputs:
-K[I,T+S+1]: Capital stock timepath
-y[I,T+S+1]: Output timepath
-w[I,T+S+1]: Wage timepath
-r[I,T+S+1]: Rental rate timepath
-c[I,T+S+1]: consumption timepath
Objects in Function:
NONE
Outputs:
-Feasible (Boolean): Whether or not the inputs are feasible.
"""
Feasible = True
if np.any(K<0):
Feasible=False
print "WARNING! INFEASABLE VALUE ENCOUNTERED IN K!"
print "The following coordinates have infeasible values:"
print np.argwhere(K<0)
if np.any(Y<0):
Feasible=False
print "WARNING! INFEASABLE VALUE ENCOUNTERED IN Y!"
print "The following coordinates have infeasible values:"
print np.argwhere(Y<0)
if np.any(r<0):
Feasible=False
print "WARNING! INFEASABLE VALUE ENCOUNTERED IN r!"
print "The following coordinates have infeasible values:"
print np.argwhere(r<0)
if np.any(w<0):
Feasible=False
print "WARNING! INFEASABLE VALUE ENCOUNTERED IN w!"
print "The following coordinates have infeasible values:"
print np.argwhere(w<0)
if np.any(c<0):
Feasible=False
print "WARNING! INFEASABLE VALUE ENCOUNTERED IN c_vec!"
print "The following coordinates have infeasible values:"
print np.argwhere(c<0)
return Feasible
def SteadyStateSolution(guess, I, S, beta, sigma, delta, alpha, e, A):
"""
Description:
-This is the function that will be optimized by fsolve.
Inputs:
-guess[I,S+1]: vector that pieced together from assets and kf.
Objects in Function:
-kf[I,]:Foreign capital held by foreigners in each country
-assets[I,S]: Asset path for each country
-k[I,]:Capital for each country
-n[I,]:Labor for each country
-Y[I,]:Output for each country
-r[I,]:Rental Rate for each country
-w[I,]:Wage for each country
-c_vec[I, S]: Consumption by cohort in each country
-Euler_c[I, S-1]: Corresponds to (1.16)
-Euler_r[I,]: Corresponds to (1.17)
-Euler_kf(Scalar): Corresponds to (1.18)
Output:
-all_Euler[I*S,]: Similar to guess, it's a vector that's has both assets and kf.
"""
#Takes a 1D guess of length I*S and reshapes it to match what the original input into the fsolve looked like since fsolve flattens numpy arrays
guess = np.reshape(guess[:,np.newaxis], (I, S))
#Appends a I-length vector of zeros on ends of assets to represent no assets when born and no assets when dead
assets = np.column_stack((np.zeros(I), guess[:,:-1], np.zeros(I)))
#Sets kf as the last element of the guess vector for each country
kf = guess[:,-1]
#Getting the other variables
kd = get_kd(assets, kf)
n = get_n(e[:,:,0])
Yparams = (alpha, A)
Y = get_Y(Yparams, kd, n)
r = get_r(alpha, Y, kd)
w = get_w(alpha, Y, n)
cparams = (e, delta)
c_vec = get_cvecss(cparams, w, r, assets)
K = kd+kf
Feasible = check_feasible(K, Y, w, r, c_vec)
if np.any(c_vec<0): #Punishes the the poor choice of negative values in the fsolve
all_Euler=np.ones((I*S-1))*9999.
else:
#Gets Euler equations
Euler_c = c_vec[:,:-1] ** (-sigma) - beta * c_vec[:,1:] ** (-sigma) * (1 + r[0] - delta)
Euler_r = r[1:] - r[0]
Euler_kf = np.sum(kf)
#Makes a new 1D vector of length I*S that contains all the Euler equations
all_Euler = np.append(np.append(np.ravel(Euler_c), np.ravel(Euler_r)), Euler_kf)
return all_Euler
def getSteadyState(params, assets_init, kf_init):
"""
Description:
This takes the initial guess for assets and kf. Since the function
returns a matrix, this unpacks the individual parts.
Inputs:
-assets_init[I,S-1]:Intial guess for asset path
-kf_init[I]:Initial guess on foreigner held capital
Objects in Function:
-guess[I,S]: A combined matrix that has both assets_init and kf_init
-ss[S*I,]: The result from optimization.
Outputs:
-assets_ss[I,S-1]:Calculated assets steady state
-kf_ss[I,]:Calculated domestic capital owned by foreigners steady state
-k_ss[I]: Calculated total capital stock steady state
-n_ss[I]: Summed labor productivities steady state
-y_ss[I]: Calculated output steady state
-r_ss[I]: calculated steady state rental rate
-w_ss[I]: calculated steady state wage rate
-c_vec_ss[I, S]: Calculated steady state counsumption
"""
I, S, beta, sigma, delta, alpha, e, A = params
#Merges the assets and kf together into one matrix that can be inputted into the fsolve function
guess = np.column_stack((assets_init, kf_init))
#Solves for the steady state
solver_params = (I, S, beta, sigma, delta, alpha, e, A)
ss = opt.fsolve(SteadyStateSolution, guess, args=solver_params)
#Reshapes the ss code
ss = np.array(np.split(ss, I))
#Breaks down the steady state matrix into the two separate assets and kf matrices.
assets_ss = np.column_stack((np.zeros(I), ss[:,:-1], np.zeros(I)))
kf_ss = ss[:,-1]
#Gets the other steady-state values using assets and kf
kd_ss = get_kd(assets_ss, kf_ss)
n_ss = get_n(e[:,:,0])
Yparams = (alpha, A)
Y_ss = get_Y(Yparams, kd_ss, n_ss)
r_ss = get_r(alpha, Y_ss, kd_ss)
w_ss = get_w(alpha, Y_ss, n_ss)
cparams = (e, delta)
c_vec_ss = get_cvecss(cparams, w_ss, r_ss, assets_ss)
print "\nSteady State Found!\n"
return assets_ss, kf_ss, kd_ss, n_ss, Y_ss, r_ss[0], w_ss, c_vec_ss
#TIMEPATH FUNCTIONS
def get_initialguesses(params, assets_ss, kf_ss, w_ss, r_ss):
"""
Description:
With the parameters and steady state values, this function creates
initial guesses in a linear path.
Inputs:
-Params (Tuple): Tuple of parameters from Main.py
-Assets_ss[I,S,T+S+1]: Steady state assets value
-kf_ss[I,]: Steady State value of foreign owned domestic capital
-w_ss[I,]: Steady state value of wages
-r_ss[I,]: Steady state value of rental rate
Objects in Function:
-othervariable_params (Tuple): A tuple specifically made for GetOtherVariables
Outputs:
-assets_init[I,]: Initial Asset path
-kf_init[I,]: New initial foreign held capital
-w_initguess[I,T+S+1]: Initial guess wage timepath
-r_initguess[I,T+S+1]: Initial guess rental rate timepath
-k_init[I,]: total capital stock initial guess
-n_init[I,]: total labor initial guess
-y_init[I,]: output labor initial guess
-c_init[I,]: consumption initial guess
"""
I, S, T, delta, alpha, e, A = params
#Sets initial assets and kf, start with something close to the steady state
assets_init = assets_ss*.9
kf_init = kf_ss*0
w_initguess = np.zeros((I, T+S+1))
r_initguess = np.ones((T+S+1))*.5
#Gets initial kd, n, y, r, w, and K
kd_init = get_kd(assets_init, kf_init)
n_init = get_n(e[:,:,0])
Yparams = (alpha, A)
Y_init = get_Y(Yparams, kd_init, n_init)
r_init = get_r(alpha, Y_init, kd_init)
w_init = get_w(alpha, Y_init, n_init)
cparams = (e, delta)
c_init = get_cvecss(cparams, w_init, r_init, assets_init)
#Gets initial guess for rental rate path. This is set up to be linear.
r_initguess[:T+1] = np.linspace(r_init[0], r_ss, T+1)
r_initguess[T+1:] = r_initguess[T]
#Gets initial guess for wage path. This is set up to be linear.
for i in range(I):
w_initguess[i, :T+1] = np.linspace(w_init[i], w_ss[i], T+1)
w_initguess[i,T+1:] = w_initguess[i,T]
return assets_init, kf_init, w_initguess, r_initguess, kd_init, n_init, Y_init, c_init
def get_foreignK_path(params, Kpath, rpath, kf_ss):
"""
Description:
This calculates the timepath of the foreign capital stock. This is based on equation (1.12 and 1.13).
Inputs:
apath: Asset path, from our calculations
rpath: Rental Rate path, also from our calculation
Objects in Function:
kdpath[I,S+T+1]: Path of domestic owned capital
n[I,S+T+1]: Path of total labor
kf_ss[I,]: Calculated from the steady state.
A[I,]: Parameters from above
Outputs:
kfPath[I,S+T+1]: Path of domestic capital held by foreigners.
"""
I, S, T, alpha, e, A = params
#Sums the labor productivities across cohorts
n = np.sum(e, axis=1)
#Declares the array that will later be used.
kfPath=np.zeros((I,S+T+1))
kdPath=np.zeros((I,S+T+1))
#Gets the domestic-owned capital stock for each country except for the first country
kdPath[1:,:]=(rpath[:]/alpha)**(1/(alpha-1))*np.einsum("i,is->is", A[1:], n[1:,:])
#This is using equation 1.13 solved for the foreign capital stock to caluclate the foreign capital stock
#For everyone except the first country
kfPath[1:,:]=Kpath[1:,:]-kdPath[1:,:]
#To satisfy 1.18, the first country's assets is the negative of the sum of all the other countries' assets
kfPath[0,:]= -np.sum(kfPath[1:,:],axis=0)
#Making every year beyond t equal to the steady-state
kfPath[:,T:] = np.einsum("i,s->is", kf_ss, np.ones(S+1))
return kfPath
def get_lifetime_decisions(params, c_1, wpath_chunk, rpath_chunk, e_chunk, starting_assets, current_s):
"""
Description:
This solves for equations 1.15 and 1.16 in the StepbyStep pdf for a certain generation
Inputs:
-c_1: Initial consumption (not necessarily for the year they were born)
-wpath_chunk: Wages of an agents lifetime, a section of the timepath
-rpath_chunk: Rental rate of an agents lifetime, a section of the timepath
-e_chunk: Worker productivities of an agents lifetime, a section of the global matrix
-starting_assets: Initial assets of the agent. Will be 0s if we are beginning in the year the agent was born
-current_s: Current age of the agent
Objects in Function:
-NONE
Outputs:
-c_path[I, S]: Path of consumption until the agent dies
-asset_path[I, S+1]: Path of assets until the agent dies
"""
I, S, beta, sigma, delta = params
#Initializes the cpath and asset path vectors
c_path = np.zeros((I, S))
asset_path = np.zeros((I, S+1))
#For each country, the cpath and asset path vectors' are the initial values provided.
c_path[:,0] = c_1
asset_path[:,0] = starting_assets
#Based on the individual chunks, these are the households choices
for s in range(1,S):
c_path[:,s] = (beta * (1 + rpath_chunk[s] - delta))**(1/sigma) * c_path[:,s-1]
asset_path[:,s] = wpath_chunk[:,s]*e_chunk[:,s-1] + (1 + rpath_chunk[s-1] - delta)*asset_path[:,s-1] - c_path[:,s-1]
asset_path[:,s+1] = wpath_chunk[:,s]*e_chunk[:,s] + (1 + rpath_chunk[s] - delta)*asset_path[:,s] - c_path[:,s]
#Returns the relevant part of c_path and asset_path for all countries
return c_path[:,0:S-current_s], asset_path[:,0:S+1-current_s]
def find_optimal_starting_consumptions(c_1, wpath_chunk, rpath_chunk, epath_chunk, starting_assets, current_s, params):
"""
Description:
Takes the assets path from the get_householdchoices_path function and creates Euluer errors
Inputs:
Dimension varies
-c_1: Initial consumption (not necessarily for the year they were born)
-wpath_chunk: Wages of an agents lifetime, a part of the timepath
-rpath_chunk: Rental rate of an agents lifetime, another part of the timepath.
-epath_chunk: Worker productivities of an agents lifetime, another part.
-starting_assets: Initial assets of the agent. It's 0 at the beginning of life.
-current_s: Current age of the agent
Objects in Function:
-cpath: Path of consumption based on chunk given.
-assets_path: Path of assets based on the chunks given
Outputs:
-Euler:A flattened version of the assets_path matrix
"""
#Executes the get_household_choices_path function. Sees above.
c_path, assets_path = get_lifetime_decisions(params, c_1, wpath_chunk, rpath_chunk, epath_chunk, starting_assets, current_s)
if np.any(c_path<0):
c_path=np.ones(I)*9999.
Euler = np.ravel(assets_path[:,-1])
return Euler
def get_cons_assets_matrix(params, wpath, rpath, starting_assets):
I, S, T, T_1, beta, sigma, delta, e, StartFertilityAge, StartDyingAge, N_matrix, MortalityRates = params
#Initializes timepath variables
c_timepath = np.zeros((I,S,S+T+1))
a_timepath = np.zeros((I, S+1, S+T+1)) #I,S+1,S+T+1
a_timepath[:,:,0]=starting_assets
bq_timepath = np.zeros((I, S+1, S+T+1)) #Is this too big?
c_timepath[:,S-1,0] = wpath[:,0]*e[:,S-1,0] + (1 + rpath[0] - delta)*a_timepath[:,S-1,0]
#Fills the upper triangle
for s in range(S-2,-1, -1):
agent_assets = starting_assets[:,s]
#We are only doing this for all generations alive in time t=0
t = 0
#We are iterating through each generation in time t=0
current_s = s
#Uses the previous generation's consumption at age s to get the value for our guess
c_guess = c_timepath[:,s+1,t]/((beta*(1+rpath[t]-delta))**(1/sigma))
#Gets optimal initial consumption beginning in the current age of the agent using chunks of w and r that span the lifetime of the given generation
household_params = (I, S, beta, sigma, delta)
opt_consump = opt.fsolve(find_optimal_starting_consumptions, c_guess, args = \
(wpath[:,t:t+S], rpath[t:t+S], e[:,0,t:t+S],agent_assets, current_s, household_params))
#Gets optimal timepaths beginning initial consumption and starting assets
cpath_indiv, apath_indiv = get_lifetime_decisions\
(household_params, opt_consump, wpath[:,t:t+S], rpath[t:t+S], e[:,0,t:t+S], agent_assets, current_s)
for i in xrange(I):
np.fill_diagonal(c_timepath[i,s:,:], cpath_indiv[i,:])
np.fill_diagonal(a_timepath[i,s:,:], apath_indiv[i,:])
bq_params = (I, S, T, StartFertilityAge, StartDyingAge, N_matrix[:,StartDyingAge:,s], N_matrix[:,StartFertilityAge:StartDyingAge+1,s], MortalityRates[:,StartDyingAge:,s])
bq_timepath[:,:,S-s-2] = getBequests(bq_params, a_timepath[:,StartDyingAge:,S-s-2])
#print np.round(cpath_indiv[0,:], decimals=3), opt_consump[0]
#print np.round(np.transpose(c_timepath[0,:,:T_1-s+3]), decimals=3)
#print np.round(starting_assets[0,:], decimals=3)
#print np.round(assetpath_indiv[0,:], decimals=3), agent_assets[0]
#print np.round(np.transpose(a_timepath[0,:,:T_1]), decimals=3)
#Fills everything except for the upper triangle
for t in xrange(1,T):
current_s = 0 #This is always zero because this section deals with people who haven't been born yet in time T=0
agent_assets = np.zeros((I))
#Uses the previous generation's consumption at age s to get the value for our guess
c_guess = c_timepath[:,s+1,t]/((beta*(1+rpath[t+1]-delta))**(1/sigma))
optimalconsumption = opt.fsolve(find_optimal_starting_consumptions, c_guess, args = \
(wpath[:,t:t+S], rpath[t:t+S], e[:,0,t:t+S], agent_assets, current_s, household_params))
cpath_indiv, assetpath_indiv = get_lifetime_decisions\
(household_params, optimalconsumption, wpath[:,t:t+S], rpath[t:t+S], e[:,0,t:t+S], agent_assets, current_s)
for i in range(I):
np.fill_diagonal(c_timepath[i,:,t:], cpath_indiv[i,:])
np.fill_diagonal(a_timepath[i,:,t:], assetpath_indiv[i,:])
if t >= T_1:
temp_t = T_1
else:
temp_t = t
bq_params = (I, S, T, StartFertilityAge, StartDyingAge, N_matrix[:,StartDyingAge:,temp_t+S-2], N_matrix[:,StartFertilityAge:StartDyingAge+1,temp_t+S-2], MortalityRates[:,StartDyingAge:,temp_t+S-2])
bq_timepath[:,:,t+S-2] = getBequests(bq_params, a_timepath[:,StartDyingAge:,temp_t+S-2])
#bq_timepath[:,:,t+S-2] = getBequests(a_timepath[:,:,t+S-2], t+S-2)
return c_timepath, a_timepath
def get_wpathnew_rpathnew(params, wpath, rpath, starting_assets, kd_ss, kf_ss, w_ss, r_ss):
"""
Description:
Takes initial paths of wages and rental rates, gives the consumption path and the the wage and rental paths that are implied by that consumption path.
Inputs:
-w_path0[I, S+T+1]: initial w path
-r_path0[I, S+T+1]: initial r path
Objects in Function:
Note that these vary in dimension depending on the loop.
-current_s: The age of the cohort at time 0
-opt_consump: Solved for consumption
-starting_assets: Initial assets for the cohorts.
-cpath_indiv: The small chunk of cpath.
-assetpath_indiv: The small chunk of assetpath_indiv
-optimalconsumption: Solved from the chunks
-c_timepath: Overall consumption path
-a_timepath: Overall assets timepath
-kfpath: Foreign held domestic capital
-agent assets: Assets held by individuals.
Outputs:
-w_path1[I,S+T+1]: calculated w path
-r_path1[I,S+T+1]: calculated r path
-CPath[I,S+T+1]: Calculated aggregate consumption path for each country
-Kpath[I,S+T+1]: Calculated capital stock path.
-Ypath1[I, S+T+1]: timepath of assets implied from initial guess
"""
I, S, T, T_1, beta, sigma, delta, alpha, e, A, StartFertilityAge, StartDyingAge, N_matrix, MortalityRates = params
ca_params = (I, S, T, T_1, beta, sigma, delta, e, StartFertilityAge, StartDyingAge, N_matrix, MortalityRates)
c_timepath, a_timepath = get_cons_assets_matrix(ca_params, wpath, rpath, starting_assets)
#Calculates the total amount of capital in each country
Kpath=np.sum(a_timepath,axis=1)
#Calculates Aggregate Consumption
Cpath=np.sum(c_timepath,axis=1)
#After time period T, the total capital stock and total consumption is forced to be the steady state
Kpath[:,T:] = np.einsum("i,t->it", kd_ss+kf_ss, np.ones(S+1))
Cpath[:,T:] = np.einsum("i,t->it", Cpath[:,T-1], np.ones(S+1))
#Gets the foriegned owned capital
kf_params = (I, S, T, alpha, e, A)
kfpath = get_foreignK_path(kf_params, Kpath, rpath, kf_ss)
#Based on the overall capital path and the foreign owned capital path, we get new w and r paths.
kdpath = Kpath - kfpath
npath = get_n(e)
Yparams = (alpha, A)
Ypath = get_Y(Yparams, kdpath, npath)
rpath_new = get_r(alpha, Ypath[0], kdpath[0])
wpath_new = get_w(alpha, Ypath, npath)
#Checks to see if any of the timepaths have negative values
check_feasible(Kpath, Ypath, wpath, rpath, c_timepath)
return wpath_new, rpath_new, Cpath, Kpath, Ypath, a_timepath
def get_Timepath(params, wstart, rstart, assets_init, kd_ss, kf_ss, w_ss, r_ss):
I, S, T, T_1, beta, sigma, delta, alpha, e, A, StartFertilityAge, StartDyingAge, N_matrix, MortalityRates, distance, diff, xi, MaxIters = params
Iter=1 #Serves as the iteration counter
wr_params = (I, S, T, T_1, beta, sigma, delta, alpha, e, A, StartFertilityAge, StartDyingAge, N_matrix, MortalityRates)
while distance>diff and Iter<MaxIters: #The timepath iteration runs until the distance gets below a threshold or the iterations hit the maximum
wpath_new, rpath_new, Cpath_new, Kpath_new, Ypath_new, apath_new = \
get_wpathnew_rpathnew(wr_params, wstart, rstart, assets_init, kd_ss, kf_ss, w_ss, r_ss)
dist1=sp.linalg.norm(wstart-wpath_new,2) #Norm of the wage path
dist2=sp.linalg.norm(rstart-rpath_new,2) #Norm of the intrest rate path
distance=max([dist1,dist2]) #We take the maximum of the two norms to get the distance
print "Iteration:",Iter,", Norm Distance: ", distance#, "Euler Error, ", EError
Iter+=1 #Updates the iteration counter
if distance<diff or Iter==MaxIters: #When the distance gets below the tolerance or the maximum of iterations is hit, then the TPI finishes.
wend=wpath_new
rend=rpath_new
Cend=Cpath_new
Kend=Kpath_new
Yend=Ypath_new
aend=apath_new
#if Iter==MaxIters: #In case it never gets below the tolerance, it will throw this warning and give the last timepath.
#print "Doesn't converge within the maximum number of iterations"
#print "Providing the last iteration"
wstart=wstart*xi+(1-xi)*wpath_new #Convex conjugate of the wage path
rstart=rstart*xi+(1-xi)*rpath_new #Convex conjugate of the intrest rate path
return wend, rend, Cend, Kend, Yend, aend
def CountryLabel(Country): #Activated by line 28
'''
Description:
Converts the generic country label given for the graphs and converts it to a proper name
Inputs:
-Country (String): This is simply the generic country label
Objects in Function:
-NONE
Outputs:
-Name (String): The proper name of the country which you decide. Make sure the number of country names lines
up with the number of countries, otherwise, the function will not proceed.
'''
#Each country is given a number
if Country=="Country 0":
Name="United States"
if Country=="Country 1":
Name="Europe"
if Country=="Country 2":
Name="Japan"
if Country=="Country 3":
Name="China"
if Country=="Country 4":
Name="India"
if Country=="Country 5":
Name="Russia"
if Country=="Country 6":
Name="Korea"
#Add More Country labels here
return Name
def plotTimepaths(I, S, T, wpath, rpath, cpath, kpath, Ypath, CountryNamesON):
for i in xrange(I): #Wages
label1='Country '+str(i)
if CountryNamesON==True:
label1=CountryLabel(label1)
plt.plot(np.arange(0,T),wpath[i,:T], label=label1)
plt.title("Time path for Wages")
plt.ylabel("Wages")
plt.xlabel("Time Period")
plt.legend(loc="upper right")
plt.show()
#Rental Rates
label1='Global Interest Rate'
plt.plot(np.arange(0,T),rpath[:T], label=label1)
plt.title("Time path for Rental Rates")
plt.ylabel("Rental Rates")
plt.xlabel("Time Period")
plt.legend(loc="upper right")
plt.show()
for i in xrange(I): #Aggregate Consumption
label1='Country '+str(i)
if CountryNamesON==True:
label1=CountryLabel(label1)
plt.plot(np.arange(0,S+T+1),cpath[i,:],label=label1)
plt.title("Time Path for Aggregate Consumption")
plt.ylabel("Consumption Level")
plt.xlabel("Time Period")
plt.legend(loc="upper right")
plt.show()
for i in xrange(I): #Aggregate Capital Stock
label1='Country '+str(i)
if CountryNamesON==True:
label1=CountryLabel(label1)
plt.plot(np.arange(0,T),kpath[i,:T],label=label1)
plt.title("Time path for Capital Path")
plt.ylabel("Capital Stock level")
plt.xlabel("Time Period")
plt.legend(loc="upper right")
plt.show()
for i in xrange(I):
label1='Country '+str(i)
if CountryNamesON==True:
label1=CountryLabel(label1)
plt.plot(np.arange(0,T),Ypath[i,:T],label=label1)
plt.title("Time path for Output")
plt.ylabel("Output Stock level")
plt.xlabel("Time Period")
plt.legend(loc="upper right")
plt.show()
|
mit
|
JonathanStein/odoo
|
openerp/tools/graph.py
|
441
|
26118
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import operator
import math
class graph(object):
def __init__(self, nodes, transitions, no_ancester=None):
"""Initialize graph's object
@param nodes list of ids of nodes in the graph
@param transitions list of edges in the graph in the form (source_node, destination_node)
@param no_ancester list of nodes with no incoming edges
"""
self.nodes = nodes or []
self.edges = transitions or []
self.no_ancester = no_ancester or {}
trans = {}
for t in transitions:
trans.setdefault(t[0], [])
trans[t[0]].append(t[1])
self.transitions = trans
self.result = {}
def init_rank(self):
"""Computes rank of the nodes of the graph by finding initial feasible tree
"""
self.edge_wt = {}
for link in self.links:
self.edge_wt[link] = self.result[link[1]]['x'] - self.result[link[0]]['x']
tot_node = len(self.partial_order)
#do until all the nodes in the component are searched
while self.tight_tree()<tot_node:
list_node = []
list_edge = []
for node in self.nodes:
if node not in self.reachable_nodes:
list_node.append(node)
for edge in self.edge_wt:
if edge not in self.tree_edges:
list_edge.append(edge)
slack = 100
for edge in list_edge:
if ((edge[0] in self.reachable_nodes and edge[1] not in self.reachable_nodes) or
(edge[1] in self.reachable_nodes and edge[0] not in self.reachable_nodes)):
if slack > self.edge_wt[edge]-1:
slack = self.edge_wt[edge]-1
new_edge = edge
if new_edge[0] not in self.reachable_nodes:
delta = -(self.edge_wt[new_edge]-1)
else:
delta = self.edge_wt[new_edge]-1
for node in self.result:
if node in self.reachable_nodes:
self.result[node]['x'] += delta
for edge in self.edge_wt:
self.edge_wt[edge] = self.result[edge[1]]['x'] - self.result[edge[0]]['x']
self.init_cutvalues()
def tight_tree(self):
self.reachable_nodes = []
self.tree_edges = []
self.reachable_node(self.start)
return len(self.reachable_nodes)
def reachable_node(self, node):
"""Find the nodes of the graph which are only 1 rank apart from each other
"""
if node not in self.reachable_nodes:
self.reachable_nodes.append(node)
for edge in self.edge_wt:
if edge[0]==node:
if self.edge_wt[edge]==1:
self.tree_edges.append(edge)
if edge[1] not in self.reachable_nodes:
self.reachable_nodes.append(edge[1])
self.reachable_node(edge[1])
def init_cutvalues(self):
"""Initailize cut values of edges of the feasible tree.
Edges with negative cut-values are removed from the tree to optimize rank assignment
"""
self.cut_edges = {}
self.head_nodes = []
i=0
for edge in self.tree_edges:
self.head_nodes = []
rest_edges = []
rest_edges += self.tree_edges
del rest_edges[i]
self.head_component(self.start, rest_edges)
i+=1
positive = 0
negative = 0
for source_node in self.transitions:
if source_node in self.head_nodes:
for dest_node in self.transitions[source_node]:
if dest_node not in self.head_nodes:
negative+=1
else:
for dest_node in self.transitions[source_node]:
if dest_node in self.head_nodes:
positive+=1
self.cut_edges[edge] = positive - negative
def head_component(self, node, rest_edges):
"""Find nodes which are reachable from the starting node, after removing an edge
"""
if node not in self.head_nodes:
self.head_nodes.append(node)
for edge in rest_edges:
if edge[0]==node:
self.head_component(edge[1],rest_edges)
def process_ranking(self, node, level=0):
"""Computes initial feasible ranking after making graph acyclic with depth-first search
"""
if node not in self.result:
self.result[node] = {'y': None, 'x':level, 'mark':0}
else:
if level > self.result[node]['x']:
self.result[node]['x'] = level
if self.result[node]['mark']==0:
self.result[node]['mark'] = 1
for sec_end in self.transitions.get(node, []):
self.process_ranking(sec_end, level+1)
def make_acyclic(self, parent, node, level, tree):
"""Computes Partial-order of the nodes with depth-first search
"""
if node not in self.partial_order:
self.partial_order[node] = {'level':level, 'mark':0}
if parent:
tree.append((parent, node))
if self.partial_order[node]['mark']==0:
self.partial_order[node]['mark'] = 1
for sec_end in self.transitions.get(node, []):
self.links.append((node, sec_end))
self.make_acyclic(node, sec_end, level+1, tree)
return tree
def rev_edges(self, tree):
"""reverse the direction of the edges whose source-node-partail_order> destination-node-partail_order
to make the graph acyclic
"""
Is_Cyclic = False
i=0
for link in self.links:
src = link[0]
des = link[1]
edge_len = self.partial_order[des]['level'] - self.partial_order[src]['level']
if edge_len < 0:
del self.links[i]
self.links.insert(i, (des, src))
self.transitions[src].remove(des)
self.transitions.setdefault(des, []).append(src)
Is_Cyclic = True
elif math.fabs(edge_len) > 1:
Is_Cyclic = True
i += 1
return Is_Cyclic
def exchange(self, e, f):
"""Exchange edges to make feasible-tree optimized
:param e: edge with negative cut-value
:param f: new edge with minimum slack-value
"""
del self.tree_edges[self.tree_edges.index(e)]
self.tree_edges.append(f)
self.init_cutvalues()
def enter_edge(self, edge):
"""Finds a new_edge with minimum slack value to replace an edge with negative cut-value
@param edge edge with negative cut-value
"""
self.head_nodes = []
rest_edges = []
rest_edges += self.tree_edges
del rest_edges[rest_edges.index(edge)]
self.head_component(self.start, rest_edges)
if edge[1] in self.head_nodes:
l = []
for node in self.result:
if node not in self.head_nodes:
l.append(node)
self.head_nodes = l
slack = 100
new_edge = edge
for source_node in self.transitions:
if source_node in self.head_nodes:
for dest_node in self.transitions[source_node]:
if dest_node not in self.head_nodes:
if slack>(self.edge_wt[edge]-1):
slack = self.edge_wt[edge]-1
new_edge = (source_node, dest_node)
return new_edge
def leave_edge(self):
"""Returns the edge with negative cut_value(if exists)
"""
if self.critical_edges:
for edge in self.critical_edges:
self.cut_edges[edge] = 0
for edge in self.cut_edges:
if self.cut_edges[edge]<0:
return edge
return None
def finalize_rank(self, node, level):
self.result[node]['x'] = level
for destination in self.optimal_edges.get(node, []):
self.finalize_rank(destination, level+1)
def normalize(self):
"""The ranks are normalized by setting the least rank to zero.
"""
least_rank = min(map(lambda x: x['x'], self.result.values()))
if least_rank!=0:
for node in self.result:
self.result[node]['x']-=least_rank
def make_chain(self):
"""Edges between nodes more than one rank apart are replaced by chains of unit
length edges between temporary nodes.
"""
for edge in self.edge_wt:
if self.edge_wt[edge]>1:
self.transitions[edge[0]].remove(edge[1])
start = self.result[edge[0]]['x']
end = self.result[edge[1]]['x']
for rank in range(start+1, end):
if not self.result.get((rank, 'temp'), False):
self.result[(rank, 'temp')] = {'y': None, 'x': rank, 'mark': 0}
for rank in range(start, end):
if start==rank:
self.transitions[edge[0]].append((rank+1, 'temp'))
elif rank==end-1:
self.transitions.setdefault((rank, 'temp'), []).append(edge[1])
else:
self.transitions.setdefault((rank, 'temp'), []).append((rank+1, 'temp'))
def init_order(self, node, level):
"""Initialize orders the nodes in each rank with depth-first search
"""
if not self.result[node]['y']:
self.result[node]['y'] = self.order[level]
self.order[level] += 1
for sec_end in self.transitions.get(node, []):
if node!=sec_end:
self.init_order(sec_end, self.result[sec_end]['x'])
def order_heuristic(self):
for i in range(12):
self.wmedian()
def wmedian(self):
"""Applies median heuristic to find optimzed order of the nodes with in their ranks
"""
for level in self.levels:
node_median = []
nodes = self.levels[level]
for node in nodes:
node_median.append((node, self.median_value(node, level-1)))
sort_list = sorted(node_median, key=operator.itemgetter(1))
new_list = [tuple[0] for tuple in sort_list]
self.levels[level] = new_list
order = 0
for node in nodes:
self.result[node]['y'] = order
order +=1
def median_value(self, node, adj_rank):
"""Returns median value of a vertex , defined as the median position of the adjacent vertices
@param node node to process
@param adj_rank rank 1 less than the node's rank
"""
adj_nodes = self.adj_position(node, adj_rank)
l = len(adj_nodes)
m = l/2
if l==0:
return -1.0
elif l%2 == 1:
return adj_nodes[m]#median of the middle element
elif l==2:
return (adj_nodes[0]+adj_nodes[1])/2
else:
left = adj_nodes[m-1] - adj_nodes[0]
right = adj_nodes[l-1] - adj_nodes[m]
return ((adj_nodes[m-1]*right) + (adj_nodes[m]*left))/(left+right)
def adj_position(self, node, adj_rank):
"""Returns list of the present positions of the nodes adjacent to node in the given adjacent rank.
@param node node to process
@param adj_rank rank 1 less than the node's rank
"""
pre_level_nodes = self.levels.get(adj_rank, [])
adj_nodes = []
if pre_level_nodes:
for src in pre_level_nodes:
if self.transitions.get(src) and node in self.transitions[src]:
adj_nodes.append(self.result[src]['y'])
return adj_nodes
def preprocess_order(self):
levels = {}
for r in self.partial_order:
l = self.result[r]['x']
levels.setdefault(l,[])
levels[l].append(r)
self.levels = levels
def graph_order(self):
"""Finds actual-order of the nodes with respect to maximum number of nodes in a rank in component
"""
mid_pos = 0.0
max_level = max(map(lambda x: len(x), self.levels.values()))
for level in self.levels:
if level:
no = len(self.levels[level])
factor = (max_level - no) * 0.10
list = self.levels[level]
list.reverse()
if no%2==0:
first_half = list[no/2:]
factor = -factor
else:
first_half = list[no/2+1:]
if max_level==1:#for the case when horizontal graph is there
self.result[list[no/2]]['y'] = mid_pos + (self.result[list[no/2]]['x']%2 * 0.5)
else:
self.result[list[no/2]]['y'] = mid_pos + factor
last_half = list[:no/2]
i=1
for node in first_half:
self.result[node]['y'] = mid_pos - (i + factor)
i += 1
i=1
for node in last_half:
self.result[node]['y'] = mid_pos + (i + factor)
i += 1
else:
self.max_order += max_level+1
mid_pos = self.result[self.start]['y']
def tree_order(self, node, last=0):
mid_pos = self.result[node]['y']
l = self.transitions.get(node, [])
l.reverse()
no = len(l)
rest = no%2
first_half = l[no/2+rest:]
last_half = l[:no/2]
for i, child in enumerate(first_half):
self.result[child]['y'] = mid_pos - (i+1 - (0 if rest else 0.5))
if self.transitions.get(child, False):
if last:
self.result[child]['y'] = last + len(self.transitions[child])/2 + 1
last = self.tree_order(child, last)
if rest:
mid_node = l[no/2]
self.result[mid_node]['y'] = mid_pos
if self.transitions.get(mid_node, False):
if last:
self.result[mid_node]['y'] = last + len(self.transitions[mid_node])/2 + 1
if node!=mid_node:
last = self.tree_order(mid_node)
else:
if last:
self.result[mid_node]['y'] = last + 1
self.result[node]['y'] = self.result[mid_node]['y']
mid_pos = self.result[node]['y']
i=1
last_child = None
for child in last_half:
self.result[child]['y'] = mid_pos + (i - (0 if rest else 0.5))
last_child = child
i += 1
if self.transitions.get(child, False):
if last:
self.result[child]['y'] = last + len(self.transitions[child])/2 + 1
if node!=child:
last = self.tree_order(child, last)
if last_child:
last = self.result[last_child]['y']
return last
def process_order(self):
"""Finds actual-order of the nodes with respect to maximum number of nodes in a rank in component
"""
if self.Is_Cyclic:
max_level = max(map(lambda x: len(x), self.levels.values()))
if max_level%2:
self.result[self.start]['y'] = (max_level+1)/2 + self.max_order + (self.max_order and 1)
else:
self.result[self.start]['y'] = max_level /2 + self.max_order + (self.max_order and 1)
self.graph_order()
else:
self.result[self.start]['y'] = 0
self.tree_order(self.start, 0)
min_order = math.fabs(min(map(lambda x: x['y'], self.result.values())))
index = self.start_nodes.index(self.start)
same = False
roots = []
if index>0:
for start in self.start_nodes[:index]:
same = True
for edge in self.tree_list[start][1:]:
if edge in self.tree_list[self.start]:
continue
else:
same = False
break
if same:
roots.append(start)
if roots:
min_order += self.max_order
else:
min_order += self.max_order + 1
for level in self.levels:
for node in self.levels[level]:
self.result[node]['y'] += min_order
if roots:
roots.append(self.start)
one_level_el = self.tree_list[self.start][0][1]
base = self.result[one_level_el]['y']# * 2 / (index + 2)
no = len(roots)
first_half = roots[:no/2]
if no%2==0:
last_half = roots[no/2:]
else:
last_half = roots[no/2+1:]
factor = -math.floor(no/2)
for start in first_half:
self.result[start]['y'] = base + factor
factor += 1
if no%2:
self.result[roots[no/2]]['y'] = base + factor
factor +=1
for start in last_half:
self.result[start]['y'] = base + factor
factor += 1
self.max_order = max(map(lambda x: x['y'], self.result.values()))
def find_starts(self):
"""Finds other start nodes of the graph in the case when graph is disconneted
"""
rem_nodes = []
for node in self.nodes:
if not self.partial_order.get(node):
rem_nodes.append(node)
cnt = 0
while True:
if len(rem_nodes)==1:
self.start_nodes.append(rem_nodes[0])
break
else:
count = 0
new_start = rem_nodes[0]
largest_tree = []
for node in rem_nodes:
self.partial_order = {}
tree = self.make_acyclic(None, node, 0, [])
if len(tree)+1 > count:
count = len(tree) + 1
new_start = node
largest_tree = tree
else:
if not largest_tree:
new_start = rem_nodes[0]
rem_nodes.remove(new_start)
self.start_nodes.append(new_start)
for edge in largest_tree:
if edge[0] in rem_nodes:
rem_nodes.remove(edge[0])
if edge[1] in rem_nodes:
rem_nodes.remove(edge[1])
if not rem_nodes:
break
def rank(self):
"""Finds the optimized rank of the nodes using Network-simplex algorithm
"""
self.levels = {}
self.critical_edges = []
self.partial_order = {}
self.links = []
self.Is_Cyclic = False
self.tree_list[self.start] = self.make_acyclic(None, self.start, 0, [])
self.Is_Cyclic = self.rev_edges(self.tree_list[self.start])
self.process_ranking(self.start)
self.init_rank()
#make cut values of all tree edges to 0 to optimize feasible tree
e = self.leave_edge()
while e :
f = self.enter_edge(e)
if e==f:
self.critical_edges.append(e)
else:
self.exchange(e,f)
e = self.leave_edge()
#finalize rank using optimum feasible tree
# self.optimal_edges = {}
# for edge in self.tree_edges:
# source = self.optimal_edges.setdefault(edge[0], [])
# source.append(edge[1])
# self.finalize_rank(self.start, 0)
#normalization
self.normalize()
for edge in self.edge_wt:
self.edge_wt[edge] = self.result[edge[1]]['x'] - self.result[edge[0]]['x']
def order_in_rank(self):
"""Finds optimized order of the nodes within their ranks using median heuristic
"""
self.make_chain()
self.preprocess_order()
self.order = {}
max_rank = max(map(lambda x: x, self.levels.keys()))
for i in range(max_rank+1):
self.order[i] = 0
self.init_order(self.start, self.result[self.start]['x'])
for level in self.levels:
self.levels[level].sort(lambda x, y: cmp(self.result[x]['y'], self.result[y]['y']))
self.order_heuristic()
self.process_order()
def process(self, starting_node):
"""Process the graph to find ranks and order of the nodes
@param starting_node node from where to start the graph search
"""
self.start_nodes = starting_node or []
self.partial_order = {}
self.links = []
self.tree_list = {}
if self.nodes:
if self.start_nodes:
#add dummy edges to the nodes which does not have any incoming edges
tree = self.make_acyclic(None, self.start_nodes[0], 0, [])
for node in self.no_ancester:
for sec_node in self.transitions.get(node, []):
if sec_node in self.partial_order.keys():
self.transitions[self.start_nodes[0]].append(node)
break
self.partial_order = {}
tree = self.make_acyclic(None, self.start_nodes[0], 0, [])
# if graph is disconnected or no start-node is given
#than to find starting_node for each component of the node
if len(self.nodes) > len(self.partial_order):
self.find_starts()
self.max_order = 0
#for each component of the graph find ranks and order of the nodes
for s in self.start_nodes:
self.start = s
self.rank() # First step:Netwoek simplex algorithm
self.order_in_rank() #Second step: ordering nodes within ranks
def __str__(self):
result = ''
for l in self.levels:
result += 'PosY: ' + str(l) + '\n'
for node in self.levels[l]:
result += '\tPosX: '+ str(self.result[node]['y']) + ' - Node:' + str(node) + "\n"
return result
def scale(self, maxx, maxy, nwidth=0, nheight=0, margin=20):
"""Computes actual co-ordiantes of the nodes
"""
#for flat edges ie. source an destination nodes are on the same rank
for src in self.transitions:
for des in self.transitions[src]:
if self.result[des]['x'] - self.result[src]['x'] == 0:
self.result[src]['x'] += 0.08
self.result[des]['x'] -= 0.08
factorX = maxx + nheight
factorY = maxy + nwidth
for node in self.result:
self.result[node]['y'] = (self.result[node]['y']) * factorX + margin
self.result[node]['x'] = (self.result[node]['x']) * factorY + margin
def result_get(self):
return self.result
if __name__=='__main__':
starting_node = ['profile'] # put here nodes with flow_start=True
nodes = ['project','account','hr','base','product','mrp','test','profile']
transitions = [
('profile','mrp'),
('mrp','project'),
('project','product'),
('mrp','hr'),
('mrp','test'),
('project','account'),
('project','hr'),
('product','base'),
('account','product'),
('account','test'),
('account','base'),
('hr','base'),
('test','base')
]
radius = 20
g = graph(nodes, transitions)
g.process(starting_node)
g.scale(radius*3,radius*3, radius, radius)
from PIL import Image
from PIL import ImageDraw
img = Image.new("RGB", (800, 600), "#ffffff")
draw = ImageDraw.Draw(img)
result = g.result_get()
node_res = {}
for node in nodes:
node_res[node] = result[node]
for name,node in node_res.items():
draw.arc( (int(node['y']-radius), int(node['x']-radius),int(node['y']+radius), int(node['x']+radius) ), 0, 360, (128,128,128))
draw.text( (int(node['y']), int(node['x'])), str(name), (128,128,128))
for t in transitions:
draw.line( (int(node_res[t[0]]['y']), int(node_res[t[0]]['x']),int(node_res[t[1]]['y']),int(node_res[t[1]]['x'])),(128,128,128) )
img.save("graph.png", "PNG")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
tntC4stl3/scrapy
|
tests/test_http_headers.py
|
161
|
6348
|
import unittest
import copy
from scrapy.http import Headers
class HeadersTest(unittest.TestCase):
def assertSortedEqual(self, first, second, msg=None):
return self.assertEqual(sorted(first), sorted(second), msg)
def test_basics(self):
h = Headers({'Content-Type': 'text/html', 'Content-Length': 1234})
assert h['Content-Type']
assert h['Content-Length']
self.assertRaises(KeyError, h.__getitem__, 'Accept')
self.assertEqual(h.get('Accept'), None)
self.assertEqual(h.getlist('Accept'), [])
self.assertEqual(h.get('Accept', '*/*'), b'*/*')
self.assertEqual(h.getlist('Accept', '*/*'), [b'*/*'])
self.assertEqual(h.getlist('Accept', ['text/html', 'images/jpeg']),
[b'text/html', b'images/jpeg'])
def test_single_value(self):
h = Headers()
h['Content-Type'] = 'text/html'
self.assertEqual(h['Content-Type'], b'text/html')
self.assertEqual(h.get('Content-Type'), b'text/html')
self.assertEqual(h.getlist('Content-Type'), [b'text/html'])
def test_multivalue(self):
h = Headers()
h['X-Forwarded-For'] = hlist = ['ip1', 'ip2']
self.assertEqual(h['X-Forwarded-For'], b'ip2')
self.assertEqual(h.get('X-Forwarded-For'), b'ip2')
self.assertEqual(h.getlist('X-Forwarded-For'), [b'ip1', b'ip2'])
assert h.getlist('X-Forwarded-For') is not hlist
def test_encode_utf8(self):
h = Headers({u'key': u'\xa3'}, encoding='utf-8')
key, val = dict(h).popitem()
assert isinstance(key, bytes), key
assert isinstance(val[0], bytes), val[0]
self.assertEqual(val[0], b'\xc2\xa3')
def test_encode_latin1(self):
h = Headers({u'key': u'\xa3'}, encoding='latin1')
key, val = dict(h).popitem()
self.assertEqual(val[0], b'\xa3')
def test_encode_multiple(self):
h = Headers({u'key': [u'\xa3']}, encoding='utf-8')
key, val = dict(h).popitem()
self.assertEqual(val[0], b'\xc2\xa3')
def test_delete_and_contains(self):
h = Headers()
h['Content-Type'] = 'text/html'
assert 'Content-Type' in h
del h['Content-Type']
assert 'Content-Type' not in h
def test_setdefault(self):
h = Headers()
hlist = ['ip1', 'ip2']
olist = h.setdefault('X-Forwarded-For', hlist)
assert h.getlist('X-Forwarded-For') is not hlist
assert h.getlist('X-Forwarded-For') is olist
h = Headers()
olist = h.setdefault('X-Forwarded-For', 'ip1')
self.assertEqual(h.getlist('X-Forwarded-For'), [b'ip1'])
assert h.getlist('X-Forwarded-For') is olist
def test_iterables(self):
idict = {'Content-Type': 'text/html', 'X-Forwarded-For': ['ip1', 'ip2']}
h = Headers(idict)
self.assertDictEqual(dict(h),
{b'Content-Type': [b'text/html'],
b'X-Forwarded-For': [b'ip1', b'ip2']})
self.assertSortedEqual(h.keys(),
[b'X-Forwarded-For', b'Content-Type'])
self.assertSortedEqual(h.items(),
[(b'X-Forwarded-For', [b'ip1', b'ip2']),
(b'Content-Type', [b'text/html'])])
self.assertSortedEqual(h.iteritems(),
[(b'X-Forwarded-For', [b'ip1', b'ip2']),
(b'Content-Type', [b'text/html'])])
self.assertSortedEqual(h.values(), [b'ip2', b'text/html'])
def test_update(self):
h = Headers()
h.update({'Content-Type': 'text/html',
'X-Forwarded-For': ['ip1', 'ip2']})
self.assertEqual(h.getlist('Content-Type'), [b'text/html'])
self.assertEqual(h.getlist('X-Forwarded-For'), [b'ip1', b'ip2'])
def test_copy(self):
h1 = Headers({'header1': ['value1', 'value2']})
h2 = copy.copy(h1)
self.assertEqual(h1, h2)
self.assertEqual(h1.getlist('header1'), h2.getlist('header1'))
assert h1.getlist('header1') is not h2.getlist('header1')
assert isinstance(h2, Headers)
def test_appendlist(self):
h1 = Headers({'header1': 'value1'})
h1.appendlist('header1', 'value3')
self.assertEqual(h1.getlist('header1'), [b'value1', b'value3'])
h1 = Headers()
h1.appendlist('header1', 'value1')
h1.appendlist('header1', 'value3')
self.assertEqual(h1.getlist('header1'), [b'value1', b'value3'])
def test_setlist(self):
h1 = Headers({'header1': 'value1'})
self.assertEqual(h1.getlist('header1'), [b'value1'])
h1.setlist('header1', [b'value2', b'value3'])
self.assertEqual(h1.getlist('header1'), [b'value2', b'value3'])
def test_setlistdefault(self):
h1 = Headers({'header1': 'value1'})
h1.setlistdefault('header1', ['value2', 'value3'])
h1.setlistdefault('header2', ['value2', 'value3'])
self.assertEqual(h1.getlist('header1'), [b'value1'])
self.assertEqual(h1.getlist('header2'), [b'value2', b'value3'])
def test_none_value(self):
h1 = Headers()
h1['foo'] = 'bar'
h1['foo'] = None
h1.setdefault('foo', 'bar')
self.assertEqual(h1.get('foo'), None)
self.assertEqual(h1.getlist('foo'), [])
def test_int_value(self):
h1 = Headers({'hey': 5})
h1['foo'] = 1
h1.setdefault('bar', 2)
h1.setlist('buz', [1, 'dos', 3])
self.assertEqual(h1.getlist('foo'), [b'1'])
self.assertEqual(h1.getlist('bar'), [b'2'])
self.assertEqual(h1.getlist('buz'), [b'1', b'dos', b'3'])
self.assertEqual(h1.getlist('hey'), [b'5'])
def test_invalid_value(self):
self.assertRaisesRegexp(TypeError, 'Unsupported value type',
Headers, {'foo': object()})
self.assertRaisesRegexp(TypeError, 'Unsupported value type',
Headers().__setitem__, 'foo', object())
self.assertRaisesRegexp(TypeError, 'Unsupported value type',
Headers().setdefault, 'foo', object())
self.assertRaisesRegexp(TypeError, 'Unsupported value type',
Headers().setlist, 'foo', [object()])
|
bsd-3-clause
|
samdowd/drumm-farm
|
drumm_env/lib/python2.7/site-packages/django/views/decorators/http.py
|
58
|
4411
|
"""
Decorators for views based on HTTP headers.
"""
import logging
from calendar import timegm
from functools import wraps
from django.http import HttpResponseNotAllowed
from django.middleware.http import ConditionalGetMiddleware
from django.utils.cache import get_conditional_response
from django.utils.decorators import available_attrs, decorator_from_middleware
from django.utils.http import http_date, quote_etag
conditional_page = decorator_from_middleware(ConditionalGetMiddleware)
logger = logging.getLogger('django.request')
def require_http_methods(request_method_list):
"""
Decorator to make a view only accept particular request methods. Usage::
@require_http_methods(["GET", "POST"])
def my_view(request):
# I can assume now that only GET or POST requests make it this far
# ...
Note that request methods should be in uppercase.
"""
def decorator(func):
@wraps(func, assigned=available_attrs(func))
def inner(request, *args, **kwargs):
if request.method not in request_method_list:
logger.warning('Method Not Allowed (%s): %s', request.method, request.path,
extra={
'status_code': 405,
'request': request
}
)
return HttpResponseNotAllowed(request_method_list)
return func(request, *args, **kwargs)
return inner
return decorator
require_GET = require_http_methods(["GET"])
require_GET.__doc__ = "Decorator to require that a view only accepts the GET method."
require_POST = require_http_methods(["POST"])
require_POST.__doc__ = "Decorator to require that a view only accepts the POST method."
require_safe = require_http_methods(["GET", "HEAD"])
require_safe.__doc__ = "Decorator to require that a view only accepts safe methods: GET and HEAD."
def condition(etag_func=None, last_modified_func=None):
"""
Decorator to support conditional retrieval (or change) for a view
function.
The parameters are callables to compute the ETag and last modified time for
the requested resource, respectively. The callables are passed the same
parameters as the view itself. The Etag function should return a string (or
None if the resource doesn't exist), while the last_modified function
should return a datetime object (or None if the resource doesn't exist).
If both parameters are provided, all the preconditions must be met before
the view is processed.
This decorator will either pass control to the wrapped view function or
return an HTTP 304 response (unmodified) or 412 response (preconditions
failed), depending upon the request method.
Any behavior marked as "undefined" in the HTTP spec (e.g. If-none-match
plus If-modified-since headers) will result in the view function being
called.
"""
def decorator(func):
@wraps(func, assigned=available_attrs(func))
def inner(request, *args, **kwargs):
# Compute values (if any) for the requested resource.
def get_last_modified():
if last_modified_func:
dt = last_modified_func(request, *args, **kwargs)
if dt:
return timegm(dt.utctimetuple())
res_etag = etag_func(request, *args, **kwargs) if etag_func else None
res_last_modified = get_last_modified()
response = get_conditional_response(
request,
etag=res_etag,
last_modified=res_last_modified,
)
if response is None:
response = func(request, *args, **kwargs)
# Set relevant headers on the response if they don't already exist.
if res_last_modified and not response.has_header('Last-Modified'):
response['Last-Modified'] = http_date(res_last_modified)
if res_etag and not response.has_header('ETag'):
response['ETag'] = quote_etag(res_etag)
return response
return inner
return decorator
# Shortcut decorators for common cases based on ETag or Last-Modified only
def etag(etag_func):
return condition(etag_func=etag_func)
def last_modified(last_modified_func):
return condition(last_modified_func=last_modified_func)
|
mit
|
artemalive/DigitalWhip
|
framework/colorama/initialise.py
|
86
|
1831
|
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
import atexit
import contextlib
import sys
from .ansitowin32 import AnsiToWin32
orig_stdout = None
orig_stderr = None
wrapped_stdout = None
wrapped_stderr = None
atexit_done = False
def reset_all():
AnsiToWin32(orig_stdout).reset_all()
def init(autoreset=False, convert=None, strip=None, wrap=True):
if not wrap and any([autoreset, convert, strip]):
raise ValueError('wrap=False conflicts with any other arg=True')
global wrapped_stdout, wrapped_stderr
global orig_stdout, orig_stderr
orig_stdout = sys.stdout
orig_stderr = sys.stderr
if sys.stdout is None:
wrapped_stdout = None
else:
sys.stdout = wrapped_stdout = \
wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
if sys.stderr is None:
wrapped_stderr = None
else:
sys.stderr = wrapped_stderr = \
wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
global atexit_done
if not atexit_done:
atexit.register(reset_all)
atexit_done = True
def deinit():
if orig_stdout is not None:
sys.stdout = orig_stdout
if orig_stderr is not None:
sys.stderr = orig_stderr
@contextlib.contextmanager
def colorama_text(*args, **kwargs):
init(*args, **kwargs)
try:
yield
finally:
deinit()
def reinit():
if wrapped_stdout is not None:
sys.stdout = wrapped_stdout
if wrapped_stderr is not None:
sys.stderr = wrapped_stderr
def wrap_stream(stream, convert, strip, autoreset, wrap):
if wrap:
wrapper = AnsiToWin32(stream,
convert=convert, strip=strip, autoreset=autoreset)
if wrapper.should_wrap():
stream = wrapper.stream
return stream
|
mit
|
HSAnet/glimpse_client
|
3rdparty/breakpad/src/tools/gyp/tools/pretty_sln.py
|
806
|
5092
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Prints the information in a sln file in a diffable way.
It first outputs each projects in alphabetical order with their
dependencies.
Then it outputs a possible build order.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import re
import sys
import pretty_vcproj
def BuildProject(project, built, projects, deps):
# if all dependencies are done, we can build it, otherwise we try to build the
# dependency.
# This is not infinite-recursion proof.
for dep in deps[project]:
if dep not in built:
BuildProject(dep, built, projects, deps)
print project
built.append(project)
def ParseSolution(solution_file):
# All projects, their clsid and paths.
projects = dict()
# A list of dependencies associated with a project.
dependencies = dict()
# Regular expressions that matches the SLN format.
# The first line of a project definition.
begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
'}"\) = "(.*)", "(.*)", "(.*)"$'))
# The last line of a project definition.
end_project = re.compile('^EndProject$')
# The first line of a dependency list.
begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$')
# The last line of a dependency list.
end_dep = re.compile('EndProjectSection$')
# A line describing a dependency.
dep_line = re.compile(' *({.*}) = ({.*})$')
in_deps = False
solution = open(solution_file)
for line in solution:
results = begin_project.search(line)
if results:
# Hack to remove icu because the diff is too different.
if results.group(1).find('icu') != -1:
continue
# We remove "_gyp" from the names because it helps to diff them.
current_project = results.group(1).replace('_gyp', '')
projects[current_project] = [results.group(2).replace('_gyp', ''),
results.group(3),
results.group(2)]
dependencies[current_project] = []
continue
results = end_project.search(line)
if results:
current_project = None
continue
results = begin_dep.search(line)
if results:
in_deps = True
continue
results = end_dep.search(line)
if results:
in_deps = False
continue
results = dep_line.search(line)
if results and in_deps and current_project:
dependencies[current_project].append(results.group(1))
continue
# Change all dependencies clsid to name instead.
for project in dependencies:
# For each dependencies in this project
new_dep_array = []
for dep in dependencies[project]:
# Look for the project name matching this cldis
for project_info in projects:
if projects[project_info][1] == dep:
new_dep_array.append(project_info)
dependencies[project] = sorted(new_dep_array)
return (projects, dependencies)
def PrintDependencies(projects, deps):
print "---------------------------------------"
print "Dependencies for all projects"
print "---------------------------------------"
print "-- --"
for (project, dep_list) in sorted(deps.items()):
print "Project : %s" % project
print "Path : %s" % projects[project][0]
if dep_list:
for dep in dep_list:
print " - %s" % dep
print ""
print "-- --"
def PrintBuildOrder(projects, deps):
print "---------------------------------------"
print "Build order "
print "---------------------------------------"
print "-- --"
built = []
for (project, _) in sorted(deps.items()):
if project not in built:
BuildProject(project, built, projects, deps)
print "-- --"
def PrintVCProj(projects):
for project in projects:
print "-------------------------------------"
print "-------------------------------------"
print project
print project
print project
print "-------------------------------------"
print "-------------------------------------"
project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
projects[project][2]))
pretty = pretty_vcproj
argv = [ '',
project_path,
'$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]),
]
argv.extend(sys.argv[3:])
pretty.main(argv)
def main():
# check if we have exactly 1 parameter.
if len(sys.argv) < 2:
print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
return 1
(projects, deps) = ParseSolution(sys.argv[1])
PrintDependencies(projects, deps)
PrintBuildOrder(projects, deps)
if '--recursive' in sys.argv:
PrintVCProj(projects)
return 0
if __name__ == '__main__':
sys.exit(main())
|
bsd-3-clause
|
mw46d/ardupilot
|
Tools/ardupilotwaf/cmake.py
|
42
|
13322
|
#!/usr/bin/env python
# encoding: utf-8
# Copyright (C) 2015-2016 Intel Corporation. All rights reserved.
#
# This file is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Waf tool for external builds with cmake. This tool defines the feature
'cmake_build', for building through the cmake interface.
You can use CMAKE_MIN_VERSION environment variable before loading this tool in
the configuration to set a minimum version required for cmake. Example::
def configure(cfg):
cfg.CMAKE_MIN_VERSION = '3.5.2'
cfg.load('cmake')
Usage example::
def build(bld):
# cmake configuration
foo = bld.cmake(
name='foo',
cmake_src='path/to/foosrc', # where is the source tree
cmake_bld='path/to/foobld', # where to generate the build system
cmake_vars=dict(
CMAKE_BUILD_TYPE='Release',
...
),
)
# cmake build for external target 'bar'
bld(
features='cmake_build',
cmake_config='foo', # this build depends on the cmake generation above defined
cmake_target='bar', # what to pass to option --target of cmake
)
# cmake build for target 'baz' (syntactic sugar)
foo.build('baz')
The keys of cmake_vars are sorted so that unnecessary execution is avoided. If
you want to ensure an order in which the variables are passed to cmake, use an
OrderedDict. Example::
def build(bld):
foo_vars = OrderedDict()
foo_vars['CMAKE_BUILD_TYPE'] = 'Release'
foo_vars['FOO'] = 'value_of_foo'
foo_vars['BAR'] = 'value_of_bar'
# cmake configuration
foo = bld.cmake(
cmake_vars=foo_vars,
...
)
There may be cases when you want to establish dependency between other tasks and
the external build system's products (headers and libraries, for example). In
that case, you can specify the specific files in the option 'target' of your
cmake_build task generator. Example::
def build(bld):
...
# declaring on target only what I'm interested in
foo.build('baz', target='path/to/foobld/include/baz.h')
# myprogram.c includes baz.h, so the dependency is (implicitly)
# established
bld.program(target='myprogram', source='myprogram.c')
# another example
foo.build('another', target='another.txt')
bld(
rule='${CP} ${SRC} ${TGT}',
source=bld.bldnode.find_or_declare('another.txt'),
target='another_copied.txt',
)
You can also establish the dependency directly on a task object::
@feature('myfeature')
def process_myfeature(self):
baz_taskgen = self.bld.get_tgen_by_name('baz')
baz_taskgen.post()
# every cmake_build taskgen stores its task in cmake_build_task
baz_task = baz_taskgen.cmake_build_task
tsk = self.create_task('mytask')
tsk.set_run_after(baz_task)
# tsk is run whenever baz_task changes its outputs, namely,
# path/to/foobld/include/baz.h
tsk.dep_nodes.extend(baz_task.outputs)
If your cmake build creates several files (that may be dependency for several
tasks), you can use the parameter cmake_output_patterns. It receives a pattern
or a list of patterns relative to the cmake build directory. After the build
task is run, the files that match those patterns are set as output of the cmake
build task, so that they get a signature. Example::
def build(bld):
...
foo.build('baz', cmake_output_patterns='include/*.h')
...
"""
from waflib import Context, Node, Task, Utils
from waflib.Configure import conf
from waflib.TaskGen import feature, taskgen_method
from collections import OrderedDict
import os
import re
import sys
class cmake_configure_task(Task.Task):
vars = ['CMAKE_BLD_DIR']
run_str = '${CMAKE} ${CMAKE_FLAGS} ${CMAKE_SRC_DIR} ${CMAKE_VARS} ${CMAKE_GENERATOR_OPTION}'
color = 'BLUE'
def exec_command(self, cmd, **kw):
kw['stdout'] = sys.stdout
return super(cmake_configure_task, self).exec_command(cmd, **kw)
def uid(self):
if not hasattr(self, 'uid_'):
m = Utils.md5()
def u(s):
m.update(s.encode('utf-8'))
u(self.__class__.__name__)
u(self.env.get_flat('CMAKE_SRC_DIR'))
u(self.env.get_flat('CMAKE_BLD_DIR'))
u(self.env.get_flat('CMAKE_VARS'))
u(self.env.get_flat('CMAKE_FLAGS'))
self.uid_ = m.digest()
return self.uid_
def __str__(self):
return self.cmake.name
def keyword(self):
return 'CMake Configure'
# Clean cmake configuration
cmake_configure_task._original_run = cmake_configure_task.run
def _cmake_configure_task_run(self):
cmakecache_path = self.outputs[0].abspath()
if os.path.exists(cmakecache_path):
os.remove(cmakecache_path)
self._original_run()
cmake_configure_task.run = _cmake_configure_task_run
class cmake_build_task(Task.Task):
run_str = '${CMAKE} --build ${CMAKE_BLD_DIR} --target ${CMAKE_TARGET}'
color = 'BLUE'
# the cmake-generated build system is responsible of managing its own
# dependencies
always_run = True
def exec_command(self, cmd, **kw):
kw['stdout'] = sys.stdout
return super(cmake_build_task, self).exec_command(cmd, **kw)
def uid(self):
if not hasattr(self, 'uid_'):
m = Utils.md5()
def u(s):
m.update(s.encode('utf-8'))
u(self.__class__.__name__)
u(self.env.get_flat('CMAKE_BLD_DIR'))
u(self.env.get_flat('CMAKE_TARGET'))
self.uid_ = m.digest()
return self.uid_
def __str__(self):
return '%s %s' % (self.cmake.name, self.cmake_target)
def keyword(self):
return 'CMake Build'
# allow tasks to depend on possible headers or other resources if the user
# declares outputs for the cmake build
cmake_build_task = Task.update_outputs(cmake_build_task)
cmake_build_task.original_post_run = cmake_build_task.post_run
def _cmake_build_task_post_run(self):
self.output_patterns = Utils.to_list(self.output_patterns)
if not self.output_patterns:
return self.original_post_run()
bldnode = self.cmake.bldnode
for node in bldnode.ant_glob(self.output_patterns, remove=False):
self.set_outputs(node)
return self.original_post_run()
cmake_build_task.post_run = _cmake_build_task_post_run
class CMakeConfig(object):
'''
CMake configuration. This object shouldn't be instantiated directly. Use
bld.cmake().
'''
def __init__(self, bld, name, srcnode, bldnode, cmake_vars, cmake_flags):
self.bld = bld
self.name = name
self.srcnode = srcnode
self.bldnode = bldnode
self.vars = cmake_vars
self.flags = cmake_flags
self._config_task = None
self.last_build_task = None
def vars_keys(self):
keys = list(self.vars.keys())
if not isinstance(self.vars, OrderedDict):
keys.sort()
return keys
def config_sig(self):
m = Utils.md5()
def u(s):
m.update(s.encode('utf-8'))
u(self.srcnode.abspath())
u(self.bldnode.abspath())
for v in self.flags:
u(v)
keys = self.vars_keys()
for k in keys:
u(k)
u(self.vars[k])
return m.digest()
def config_task(self, taskgen):
sig = self.config_sig()
if self._config_task and self._config_task.cmake_config_sig == sig:
return self._config_task
self._config_task = taskgen.create_task('cmake_configure_task')
self._config_task.cwd = self.bldnode
self._config_task.cmake = self
self._config_task.cmake_config_sig = sig
env = self._config_task.env
env.CMAKE_BLD_DIR = self.bldnode.abspath()
env.CMAKE_SRC_DIR = self.srcnode.abspath()
keys = self.vars_keys()
env.CMAKE_VARS = ["-D%s='%s'" % (k, self.vars[k]) for k in keys]
env.CMAKE_FLAGS = self.flags
self._config_task.set_outputs(
self.bldnode.find_or_declare('CMakeCache.txt'),
)
if self.last_build_task:
self._config_task.set_run_after(self.last_build_task)
self.bldnode.mkdir()
return self._config_task
def build(self, cmake_target, **kw):
return self.bld.cmake_build(self.name, cmake_target, **kw)
_cmake_instances = {}
def get_cmake(name):
if name not in _cmake_instances:
raise Exception('cmake: configuration named "%s" not found' % name)
return _cmake_instances[name]
@conf
def cmake(bld, name, cmake_src=None, cmake_bld=None, cmake_vars={}, cmake_flags=''):
'''
This function has two signatures:
- bld.cmake(name, cmake_src, cmake_bld, cmake_vars):
Create a cmake configuration.
- bld.cmake(name):
Get the cmake configuration with name.
'''
if not cmake_src and not cmake_bld and not cmake_vars:
return get_cmake(name)
if name in _cmake_instances:
bld.fatal('cmake: configuration named "%s" already exists' % name)
if not isinstance(cmake_src, Node.Node):
cmake_src = bld.path.find_dir(cmake_src)
if not cmake_bld:
cmake_bld = cmake_src.get_bld()
elif not isinstance(cmake_bld, Node.Node):
cmake_bld = bld.bldnode.make_node(cmake_bld)
c = CMakeConfig(bld, name, cmake_src, cmake_bld, cmake_vars, cmake_flags)
_cmake_instances[name] = c
return c
@feature('cmake_build')
def process_cmake_build(self):
if not hasattr(self, 'cmake_target'):
self.bld.fatal('cmake_build: taskgen is missing cmake_target')
if not hasattr(self, 'cmake_config'):
self.bld.fatal('cmake_build: taskgen is missing cmake_config')
tsk = self.create_cmake_build_task(self.cmake_config, self.cmake_target)
self.cmake_build_task = tsk
outputs = Utils.to_list(getattr(self, 'target', ''))
if not isinstance(outputs, list):
outputs = [outputs]
for o in outputs:
if not isinstance(o, Node.Node):
o = self.path.find_or_declare(o)
tsk.set_outputs(o)
tsk.output_patterns = getattr(self, 'cmake_output_patterns', [])
@conf
def cmake_build(bld, cmake_config, cmake_target, **kw):
kw['cmake_config'] = cmake_config
kw['cmake_target'] = cmake_target
kw['features'] = Utils.to_list(kw.get('features', [])) + ['cmake_build']
if 'name' not in kw:
kw['name'] = '%s_%s' % (cmake_config, cmake_target)
return bld(**kw)
@taskgen_method
def create_cmake_build_task(self, cmake_config, cmake_target):
cmake = get_cmake(cmake_config)
tsk = self.create_task('cmake_build_task')
tsk.cmake = cmake
tsk.cmake_target = cmake_target
tsk.output_patterns = []
tsk.env.CMAKE_BLD_DIR = cmake.bldnode.abspath()
tsk.env.CMAKE_TARGET = cmake_target
self.cmake_config_task = cmake.config_task(self)
tsk.set_run_after(self.cmake_config_task)
if cmake.last_build_task:
tsk.set_run_after(cmake.last_build_task)
cmake.last_build_task = tsk
return tsk
def _check_min_version(cfg):
cfg.start_msg('Checking cmake version')
cmd = cfg.env.get_flat('CMAKE'), '--version'
out = cfg.cmd_and_log(cmd, quiet=Context.BOTH)
m = re.search(r'\d+\.\d+(\.\d+(\.\d+)?)?', out)
if not m:
cfg.end_msg(
'unable to parse version, build is not guaranteed to succeed',
color='YELLOW',
)
else:
version = Utils.num2ver(m.group(0))
minver_str = cfg.env.get_flat('CMAKE_MIN_VERSION')
minver = Utils.num2ver(minver_str)
if version < minver:
cfg.fatal('cmake must be at least at version %s' % minver_str)
cfg.end_msg(m.group(0))
generators = dict(
default=[
(['ninja', 'ninja-build'], 'Ninja'),
(['make'], 'Unix Makefiles'),
],
win32=[
(['ninja', 'ninja-build'], 'Ninja'),
(['nmake'], 'NMake Makefiles'),
],
)
def configure(cfg):
cfg.find_program('cmake')
if cfg.env.CMAKE_MIN_VERSION:
_check_min_version(cfg)
l = generators.get(Utils.unversioned_sys_platform(), generators['default'])
for names, generator in l:
if cfg.find_program(names, mandatory=False):
cfg.env.CMAKE_GENERATOR_OPTION = '-G%s' % generator
break
else:
cfg.fatal("cmake: couldn't find a suitable CMake generator. " +
"The ones supported by this Waf tool for this platform are: %s" % ', '.join(g for _, g in l))
|
gpl-3.0
|
holmboe/hyde
|
hydeengine/path_util.py
|
14
|
2092
|
import os
class PathUtil:
@staticmethod
def filter_hidden_inplace(item_list):
if(not len(item_list)):
return
wanted = filter(
lambda item:
not ((item.startswith('.') and item != ".htaccess") or item.endswith('~')), item_list)
count = len(item_list)
good_item_count = len(wanted)
if(count == good_item_count):
return
item_list[:good_item_count] = wanted
for i in range(good_item_count, count):
item_list.pop()
@staticmethod
def get_path_fragment(root_dir, a_dir):
current_dir = a_dir
current_fragment = ''
while not current_dir == root_dir:
(current_dir, current_fragment_part) = os.path.split(current_dir)
current_fragment = os.path.join(
current_fragment_part, current_fragment)
return current_fragment
@staticmethod
def get_mirror_dir(directory,
source_root, mirror_root, ignore_root = False):
current_fragment = PathUtil.get_path_fragment(
source_root, directory)
if not current_fragment:
return mirror_root
mirror_directory = mirror_root
if not ignore_root:
mirror_directory = os.path.join(
mirror_root,
os.path.basename(source_root))
mirror_directory = os.path.join(
mirror_directory, current_fragment)
return mirror_directory
@staticmethod
def mirror_dir_tree(directory,
source_root, mirror_root, ignore_root = False):
mirror_directory = PathUtil.get_mirror_dir(
directory, source_root,
mirror_root, ignore_root)
try:
os.makedirs(mirror_directory)
except:
pass
return mirror_directory
|
mit
|
cloudbase/lis-tempest
|
tempest/tests/test_rest_client.py
|
4
|
23314
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import httplib2
from oslotest import mockpatch
from tempest.common import rest_client
from tempest.common import xml_utils as xml
from tempest import config
from tempest import exceptions
from tempest.tests import base
from tempest.tests import fake_auth_provider
from tempest.tests import fake_config
from tempest.tests import fake_http
class BaseRestClientTestClass(base.TestCase):
url = 'fake_endpoint'
def _get_region(self):
return 'fake region'
def setUp(self):
super(BaseRestClientTestClass, self).setUp()
self.useFixture(fake_config.ConfigFixture())
self.stubs.Set(config, 'TempestConfigPrivate', fake_config.FakePrivate)
self.rest_client = rest_client.RestClient(
fake_auth_provider.FakeAuthProvider())
self.stubs.Set(httplib2.Http, 'request', self.fake_http.request)
self.useFixture(mockpatch.PatchObject(self.rest_client, '_get_region',
side_effect=self._get_region()))
self.useFixture(mockpatch.PatchObject(self.rest_client,
'_log_request'))
class TestRestClientHTTPMethods(BaseRestClientTestClass):
def setUp(self):
self.fake_http = fake_http.fake_httplib2()
super(TestRestClientHTTPMethods, self).setUp()
self.useFixture(mockpatch.PatchObject(self.rest_client,
'_error_checker'))
def test_post(self):
__, return_dict = self.rest_client.post(self.url, {}, {})
self.assertEqual('POST', return_dict['method'])
def test_get(self):
__, return_dict = self.rest_client.get(self.url)
self.assertEqual('GET', return_dict['method'])
def test_delete(self):
__, return_dict = self.rest_client.delete(self.url)
self.assertEqual('DELETE', return_dict['method'])
def test_patch(self):
__, return_dict = self.rest_client.patch(self.url, {}, {})
self.assertEqual('PATCH', return_dict['method'])
def test_put(self):
__, return_dict = self.rest_client.put(self.url, {}, {})
self.assertEqual('PUT', return_dict['method'])
def test_head(self):
self.useFixture(mockpatch.PatchObject(self.rest_client,
'response_checker'))
__, return_dict = self.rest_client.head(self.url)
self.assertEqual('HEAD', return_dict['method'])
def test_copy(self):
__, return_dict = self.rest_client.copy(self.url)
self.assertEqual('COPY', return_dict['method'])
class TestRestClientNotFoundHandling(BaseRestClientTestClass):
def setUp(self):
self.fake_http = fake_http.fake_httplib2(404)
super(TestRestClientNotFoundHandling, self).setUp()
def test_post(self):
self.assertRaises(exceptions.NotFound, self.rest_client.post,
self.url, {}, {})
class TestRestClientHeadersJSON(TestRestClientHTTPMethods):
TYPE = "json"
def _verify_headers(self, resp):
self.assertEqual(self.rest_client._get_type(), self.TYPE)
resp = dict((k.lower(), v) for k, v in resp.iteritems())
self.assertEqual(self.header_value, resp['accept'])
self.assertEqual(self.header_value, resp['content-type'])
def setUp(self):
super(TestRestClientHeadersJSON, self).setUp()
self.rest_client.TYPE = self.TYPE
self.header_value = 'application/%s' % self.rest_client._get_type()
def test_post(self):
resp, __ = self.rest_client.post(self.url, {})
self._verify_headers(resp)
def test_get(self):
resp, __ = self.rest_client.get(self.url)
self._verify_headers(resp)
def test_delete(self):
resp, __ = self.rest_client.delete(self.url)
self._verify_headers(resp)
def test_patch(self):
resp, __ = self.rest_client.patch(self.url, {})
self._verify_headers(resp)
def test_put(self):
resp, __ = self.rest_client.put(self.url, {})
self._verify_headers(resp)
def test_head(self):
self.useFixture(mockpatch.PatchObject(self.rest_client,
'response_checker'))
resp, __ = self.rest_client.head(self.url)
self._verify_headers(resp)
def test_copy(self):
resp, __ = self.rest_client.copy(self.url)
self._verify_headers(resp)
class TestRestClientUpdateHeaders(BaseRestClientTestClass):
def setUp(self):
self.fake_http = fake_http.fake_httplib2()
super(TestRestClientUpdateHeaders, self).setUp()
self.useFixture(mockpatch.PatchObject(self.rest_client,
'_error_checker'))
self.headers = {'X-Configuration-Session': 'session_id'}
def test_post_update_headers(self):
__, return_dict = self.rest_client.post(self.url, {},
extra_headers=True,
headers=self.headers)
self.assertDictContainsSubset(
{'X-Configuration-Session': 'session_id',
'Content-Type': 'application/json',
'Accept': 'application/json'},
return_dict['headers']
)
def test_get_update_headers(self):
__, return_dict = self.rest_client.get(self.url,
extra_headers=True,
headers=self.headers)
self.assertDictContainsSubset(
{'X-Configuration-Session': 'session_id',
'Content-Type': 'application/json',
'Accept': 'application/json'},
return_dict['headers']
)
def test_delete_update_headers(self):
__, return_dict = self.rest_client.delete(self.url,
extra_headers=True,
headers=self.headers)
self.assertDictContainsSubset(
{'X-Configuration-Session': 'session_id',
'Content-Type': 'application/json',
'Accept': 'application/json'},
return_dict['headers']
)
def test_patch_update_headers(self):
__, return_dict = self.rest_client.patch(self.url, {},
extra_headers=True,
headers=self.headers)
self.assertDictContainsSubset(
{'X-Configuration-Session': 'session_id',
'Content-Type': 'application/json',
'Accept': 'application/json'},
return_dict['headers']
)
def test_put_update_headers(self):
__, return_dict = self.rest_client.put(self.url, {},
extra_headers=True,
headers=self.headers)
self.assertDictContainsSubset(
{'X-Configuration-Session': 'session_id',
'Content-Type': 'application/json',
'Accept': 'application/json'},
return_dict['headers']
)
def test_head_update_headers(self):
self.useFixture(mockpatch.PatchObject(self.rest_client,
'response_checker'))
__, return_dict = self.rest_client.head(self.url,
extra_headers=True,
headers=self.headers)
self.assertDictContainsSubset(
{'X-Configuration-Session': 'session_id',
'Content-Type': 'application/json',
'Accept': 'application/json'},
return_dict['headers']
)
def test_copy_update_headers(self):
__, return_dict = self.rest_client.copy(self.url,
extra_headers=True,
headers=self.headers)
self.assertDictContainsSubset(
{'X-Configuration-Session': 'session_id',
'Content-Type': 'application/json',
'Accept': 'application/json'},
return_dict['headers']
)
class TestRestClientHeadersXML(TestRestClientHeadersJSON):
TYPE = "xml"
# These two tests are needed in one exemplar
def test_send_json_accept_xml(self):
resp, __ = self.rest_client.get(self.url,
self.rest_client.get_headers("xml",
"json"))
resp = dict((k.lower(), v) for k, v in resp.iteritems())
self.assertEqual("application/json", resp["content-type"])
self.assertEqual("application/xml", resp["accept"])
def test_send_xml_accept_json(self):
resp, __ = self.rest_client.get(self.url,
self.rest_client.get_headers("json",
"xml"))
resp = dict((k.lower(), v) for k, v in resp.iteritems())
self.assertEqual("application/json", resp["accept"])
self.assertEqual("application/xml", resp["content-type"])
class TestRestClientParseRespXML(BaseRestClientTestClass):
TYPE = "xml"
keys = ["fake_key1", "fake_key2"]
values = ["fake_value1", "fake_value2"]
item_expected = dict((key, value) for (key, value) in zip(keys, values))
list_expected = {"body_list": [
{keys[0]: values[0]},
{keys[1]: values[1]},
]}
dict_expected = {"body_dict": {
keys[0]: values[0],
keys[1]: values[1],
}}
def setUp(self):
self.fake_http = fake_http.fake_httplib2()
super(TestRestClientParseRespXML, self).setUp()
self.rest_client.TYPE = self.TYPE
def test_parse_resp_body_item(self):
body_item = xml.Element("item", **self.item_expected)
body = self.rest_client._parse_resp(str(xml.Document(body_item)))
self.assertEqual(self.item_expected, body)
def test_parse_resp_body_list(self):
self.rest_client.list_tags = ["fake_list", ]
body_list = xml.Element(self.rest_client.list_tags[0])
for i in range(2):
body_list.append(xml.Element("fake_item",
**self.list_expected["body_list"][i]))
body = self.rest_client._parse_resp(str(xml.Document(body_list)))
self.assertEqual(self.list_expected["body_list"], body)
def test_parse_resp_body_dict(self):
self.rest_client.dict_tags = ["fake_dict", ]
body_dict = xml.Element(self.rest_client.dict_tags[0])
for i in range(2):
body_dict.append(xml.Element("fake_item", xml.Text(self.values[i]),
key=self.keys[i]))
body = self.rest_client._parse_resp(str(xml.Document(body_dict)))
self.assertEqual(self.dict_expected["body_dict"], body)
class TestRestClientParseRespJSON(TestRestClientParseRespXML):
TYPE = "json"
def test_parse_resp_body_item(self):
body = self.rest_client._parse_resp(json.dumps(self.item_expected))
self.assertEqual(self.item_expected, body)
def test_parse_resp_body_list(self):
body = self.rest_client._parse_resp(json.dumps(self.list_expected))
self.assertEqual(self.list_expected["body_list"], body)
def test_parse_resp_body_dict(self):
body = self.rest_client._parse_resp(json.dumps(self.dict_expected))
self.assertEqual(self.dict_expected["body_dict"], body)
def test_parse_resp_two_top_keys(self):
dict_two_keys = self.dict_expected.copy()
dict_two_keys.update({"second_key": ""})
body = self.rest_client._parse_resp(json.dumps(dict_two_keys))
self.assertEqual(dict_two_keys, body)
def test_parse_resp_one_top_key_without_list_or_dict(self):
data = {"one_top_key": "not_list_or_dict_value"}
body = self.rest_client._parse_resp(json.dumps(data))
self.assertEqual(data, body)
class TestRestClientErrorCheckerJSON(base.TestCase):
c_type = "application/json"
def set_data(self, r_code, enc=None, r_body=None):
if enc is None:
enc = self.c_type
resp_dict = {'status': r_code, 'content-type': enc}
resp = httplib2.Response(resp_dict)
data = {
"method": "fake_method",
"url": "fake_url",
"headers": "fake_headers",
"body": "fake_body",
"resp": resp,
"resp_body": '{"resp_body": "fake_resp_body"}',
}
if r_body is not None:
data.update({"resp_body": r_body})
return data
def setUp(self):
super(TestRestClientErrorCheckerJSON, self).setUp()
self.useFixture(fake_config.ConfigFixture())
self.stubs.Set(config, 'TempestConfigPrivate', fake_config.FakePrivate)
self.rest_client = rest_client.RestClient(
fake_auth_provider.FakeAuthProvider())
def test_response_less_than_400(self):
self.rest_client._error_checker(**self.set_data("399"))
def test_response_400(self):
self.assertRaises(exceptions.BadRequest,
self.rest_client._error_checker,
**self.set_data("400"))
def test_response_401(self):
self.assertRaises(exceptions.Unauthorized,
self.rest_client._error_checker,
**self.set_data("401"))
def test_response_403(self):
self.assertRaises(exceptions.Unauthorized,
self.rest_client._error_checker,
**self.set_data("403"))
def test_response_404(self):
self.assertRaises(exceptions.NotFound,
self.rest_client._error_checker,
**self.set_data("404"))
def test_response_409(self):
self.assertRaises(exceptions.Conflict,
self.rest_client._error_checker,
**self.set_data("409"))
def test_response_413(self):
self.assertRaises(exceptions.OverLimit,
self.rest_client._error_checker,
**self.set_data("413"))
def test_response_422(self):
self.assertRaises(exceptions.UnprocessableEntity,
self.rest_client._error_checker,
**self.set_data("422"))
def test_response_500_with_text(self):
# _parse_resp is expected to return 'str'
self.assertRaises(exceptions.ServerFault,
self.rest_client._error_checker,
**self.set_data("500"))
def test_response_501_with_text(self):
self.assertRaises(exceptions.ServerFault,
self.rest_client._error_checker,
**self.set_data("501"))
def test_response_500_with_dict(self):
r_body = '{"resp_body": {"err": "fake_resp_body"}}'
self.assertRaises(exceptions.ServerFault,
self.rest_client._error_checker,
**self.set_data("500", r_body=r_body))
def test_response_501_with_dict(self):
r_body = '{"resp_body": {"err": "fake_resp_body"}}'
self.assertRaises(exceptions.ServerFault,
self.rest_client._error_checker,
**self.set_data("501", r_body=r_body))
def test_response_bigger_than_400(self):
# Any response code, that bigger than 400, and not in
# (401, 403, 404, 409, 413, 422, 500, 501)
self.assertRaises(exceptions.UnexpectedResponseCode,
self.rest_client._error_checker,
**self.set_data("402"))
class TestRestClientErrorCheckerXML(TestRestClientErrorCheckerJSON):
c_type = "application/xml"
class TestRestClientErrorCheckerTEXT(TestRestClientErrorCheckerJSON):
c_type = "text/plain"
def test_fake_content_type(self):
# This test is required only in one exemplar
# Any response code, that bigger than 400, and not in
# (401, 403, 404, 409, 413, 422, 500, 501)
self.assertRaises(exceptions.InvalidContentType,
self.rest_client._error_checker,
**self.set_data("405", enc="fake_enc"))
class TestRestClientUtils(BaseRestClientTestClass):
def _is_resource_deleted(self, resource_id):
if not isinstance(self.retry_pass, int):
return False
if self.retry_count >= self.retry_pass:
return True
self.retry_count = self.retry_count + 1
return False
def setUp(self):
self.fake_http = fake_http.fake_httplib2()
super(TestRestClientUtils, self).setUp()
self.retry_count = 0
self.retry_pass = None
self.original_deleted_method = self.rest_client.is_resource_deleted
self.rest_client.is_resource_deleted = self._is_resource_deleted
def test_wait_for_resource_deletion(self):
self.retry_pass = 2
# Ensure timeout long enough for loop execution to hit retry count
self.rest_client.build_timeout = 500
sleep_mock = self.patch('time.sleep')
self.rest_client.wait_for_resource_deletion('1234')
self.assertEqual(len(sleep_mock.mock_calls), 2)
def test_wait_for_resource_deletion_not_deleted(self):
self.patch('time.sleep')
# Set timeout to be very quick to force exception faster
self.rest_client.build_timeout = 1
self.assertRaises(exceptions.TimeoutException,
self.rest_client.wait_for_resource_deletion,
'1234')
def test_wait_for_deletion_with_unimplemented_deleted_method(self):
self.rest_client.is_resource_deleted = self.original_deleted_method
self.assertRaises(NotImplementedError,
self.rest_client.wait_for_resource_deletion,
'1234')
class TestNegativeRestClient(BaseRestClientTestClass):
def setUp(self):
self.fake_http = fake_http.fake_httplib2()
super(TestNegativeRestClient, self).setUp()
self.negative_rest_client = rest_client.NegativeRestClient(
fake_auth_provider.FakeAuthProvider())
self.useFixture(mockpatch.PatchObject(self.negative_rest_client,
'_log_request'))
def test_post(self):
__, return_dict = self.negative_rest_client.send_request('POST',
self.url,
[], {})
self.assertEqual('POST', return_dict['method'])
def test_get(self):
__, return_dict = self.negative_rest_client.send_request('GET',
self.url,
[])
self.assertEqual('GET', return_dict['method'])
def test_delete(self):
__, return_dict = self.negative_rest_client.send_request('DELETE',
self.url,
[])
self.assertEqual('DELETE', return_dict['method'])
def test_patch(self):
__, return_dict = self.negative_rest_client.send_request('PATCH',
self.url,
[], {})
self.assertEqual('PATCH', return_dict['method'])
def test_put(self):
__, return_dict = self.negative_rest_client.send_request('PUT',
self.url,
[], {})
self.assertEqual('PUT', return_dict['method'])
def test_head(self):
self.useFixture(mockpatch.PatchObject(self.negative_rest_client,
'response_checker'))
__, return_dict = self.negative_rest_client.send_request('HEAD',
self.url,
[])
self.assertEqual('HEAD', return_dict['method'])
def test_copy(self):
__, return_dict = self.negative_rest_client.send_request('COPY',
self.url,
[])
self.assertEqual('COPY', return_dict['method'])
def test_other(self):
self.assertRaises(AssertionError,
self.negative_rest_client.send_request,
'OTHER', self.url, [])
class TestExpectedSuccess(BaseRestClientTestClass):
def setUp(self):
self.fake_http = fake_http.fake_httplib2()
super(TestExpectedSuccess, self).setUp()
def test_expected_succes_int_match(self):
expected_code = 202
read_code = 202
resp = self.rest_client.expected_success(expected_code, read_code)
# Assert None resp on success
self.assertFalse(resp)
def test_expected_succes_int_no_match(self):
expected_code = 204
read_code = 202
self.assertRaises(exceptions.InvalidHttpSuccessCode,
self.rest_client.expected_success,
expected_code, read_code)
def test_expected_succes_list_match(self):
expected_code = [202, 204]
read_code = 202
resp = self.rest_client.expected_success(expected_code, read_code)
# Assert None resp on success
self.assertFalse(resp)
def test_expected_succes_list_no_match(self):
expected_code = [202, 204]
read_code = 200
self.assertRaises(exceptions.InvalidHttpSuccessCode,
self.rest_client.expected_success,
expected_code, read_code)
def test_non_success_expected_int(self):
expected_code = 404
read_code = 202
self.assertRaises(AssertionError, self.rest_client.expected_success,
expected_code, read_code)
def test_non_success_expected_list(self):
expected_code = [404, 202]
read_code = 202
self.assertRaises(AssertionError, self.rest_client.expected_success,
expected_code, read_code)
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.