repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
21winner/namebench | nb_third_party/jinja2/compiler.py | 199 | 61044 | # -*- coding: utf-8 -*-
"""
jinja2.compiler
~~~~~~~~~~~~~~~
Compiles nodes into python code.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from cStringIO import StringIO
from itertools import chain
from copy import deepcopy
from jinja2 import nodes
from jinja2.nodes import EvalContext
from jinja2.visitor import NodeVisitor, NodeTransformer
from jinja2.exceptions import TemplateAssertionError
from jinja2.utils import Markup, concat, escape, is_python_keyword, next
operators = {
'eq': '==',
'ne': '!=',
'gt': '>',
'gteq': '>=',
'lt': '<',
'lteq': '<=',
'in': 'in',
'notin': 'not in'
}
try:
exec '(0 if 0 else 0)'
except SyntaxError:
have_condexpr = False
else:
have_condexpr = True
# what method to iterate over items do we want to use for dict iteration
# in generated code? on 2.x let's go with iteritems, on 3.x with items
if hasattr(dict, 'iteritems'):
dict_item_iter = 'iteritems'
else:
dict_item_iter = 'items'
# does if 0: dummy(x) get us x into the scope?
def unoptimize_before_dead_code():
x = 42
def f():
if 0: dummy(x)
return f
unoptimize_before_dead_code = bool(unoptimize_before_dead_code().func_closure)
def generate(node, environment, name, filename, stream=None,
defer_init=False):
"""Generate the python source for a node tree."""
if not isinstance(node, nodes.Template):
raise TypeError('Can\'t compile non template nodes')
generator = CodeGenerator(environment, name, filename, stream, defer_init)
generator.visit(node)
if stream is None:
return generator.stream.getvalue()
def has_safe_repr(value):
"""Does the node have a safe representation?"""
if value is None or value is NotImplemented or value is Ellipsis:
return True
if isinstance(value, (bool, int, long, float, complex, basestring,
xrange, Markup)):
return True
if isinstance(value, (tuple, list, set, frozenset)):
for item in value:
if not has_safe_repr(item):
return False
return True
elif isinstance(value, dict):
for key, value in value.iteritems():
if not has_safe_repr(key):
return False
if not has_safe_repr(value):
return False
return True
return False
def find_undeclared(nodes, names):
"""Check if the names passed are accessed undeclared. The return value
is a set of all the undeclared names from the sequence of names found.
"""
visitor = UndeclaredNameVisitor(names)
try:
for node in nodes:
visitor.visit(node)
except VisitorExit:
pass
return visitor.undeclared
class Identifiers(object):
"""Tracks the status of identifiers in frames."""
def __init__(self):
# variables that are known to be declared (probably from outer
# frames or because they are special for the frame)
self.declared = set()
# undeclared variables from outer scopes
self.outer_undeclared = set()
# names that are accessed without being explicitly declared by
# this one or any of the outer scopes. Names can appear both in
# declared and undeclared.
self.undeclared = set()
# names that are declared locally
self.declared_locally = set()
# names that are declared by parameters
self.declared_parameter = set()
def add_special(self, name):
"""Register a special name like `loop`."""
self.undeclared.discard(name)
self.declared.add(name)
def is_declared(self, name, local_only=False):
"""Check if a name is declared in this or an outer scope."""
if name in self.declared_locally or name in self.declared_parameter:
return True
if local_only:
return False
return name in self.declared
def copy(self):
return deepcopy(self)
class Frame(object):
"""Holds compile time information for us."""
def __init__(self, eval_ctx, parent=None):
self.eval_ctx = eval_ctx
self.identifiers = Identifiers()
# a toplevel frame is the root + soft frames such as if conditions.
self.toplevel = False
# the root frame is basically just the outermost frame, so no if
# conditions. This information is used to optimize inheritance
# situations.
self.rootlevel = False
# in some dynamic inheritance situations the compiler needs to add
# write tests around output statements.
self.require_output_check = parent and parent.require_output_check
# inside some tags we are using a buffer rather than yield statements.
# this for example affects {% filter %} or {% macro %}. If a frame
# is buffered this variable points to the name of the list used as
# buffer.
self.buffer = None
# the name of the block we're in, otherwise None.
self.block = parent and parent.block or None
# a set of actually assigned names
self.assigned_names = set()
# the parent of this frame
self.parent = parent
if parent is not None:
self.identifiers.declared.update(
parent.identifiers.declared |
parent.identifiers.declared_parameter |
parent.assigned_names
)
self.identifiers.outer_undeclared.update(
parent.identifiers.undeclared -
self.identifiers.declared
)
self.buffer = parent.buffer
def copy(self):
"""Create a copy of the current one."""
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.identifiers = object.__new__(self.identifiers.__class__)
rv.identifiers.__dict__.update(self.identifiers.__dict__)
return rv
def inspect(self, nodes, hard_scope=False):
"""Walk the node and check for identifiers. If the scope is hard (eg:
enforce on a python level) overrides from outer scopes are tracked
differently.
"""
visitor = FrameIdentifierVisitor(self.identifiers, hard_scope)
for node in nodes:
visitor.visit(node)
def find_shadowed(self, extra=()):
"""Find all the shadowed names. extra is an iterable of variables
that may be defined with `add_special` which may occour scoped.
"""
i = self.identifiers
return (i.declared | i.outer_undeclared) & \
(i.declared_locally | i.declared_parameter) | \
set(x for x in extra if i.is_declared(x))
def inner(self):
"""Return an inner frame."""
return Frame(self.eval_ctx, self)
def soft(self):
"""Return a soft frame. A soft frame may not be modified as
standalone thing as it shares the resources with the frame it
was created of, but it's not a rootlevel frame any longer.
"""
rv = self.copy()
rv.rootlevel = False
return rv
__copy__ = copy
class VisitorExit(RuntimeError):
"""Exception used by the `UndeclaredNameVisitor` to signal a stop."""
class DependencyFinderVisitor(NodeVisitor):
"""A visitor that collects filter and test calls."""
def __init__(self):
self.filters = set()
self.tests = set()
def visit_Filter(self, node):
self.generic_visit(node)
self.filters.add(node.name)
def visit_Test(self, node):
self.generic_visit(node)
self.tests.add(node.name)
def visit_Block(self, node):
"""Stop visiting at blocks."""
class UndeclaredNameVisitor(NodeVisitor):
"""A visitor that checks if a name is accessed without being
declared. This is different from the frame visitor as it will
not stop at closure frames.
"""
def __init__(self, names):
self.names = set(names)
self.undeclared = set()
def visit_Name(self, node):
if node.ctx == 'load' and node.name in self.names:
self.undeclared.add(node.name)
if self.undeclared == self.names:
raise VisitorExit()
else:
self.names.discard(node.name)
def visit_Block(self, node):
"""Stop visiting a blocks."""
class FrameIdentifierVisitor(NodeVisitor):
"""A visitor for `Frame.inspect`."""
def __init__(self, identifiers, hard_scope):
self.identifiers = identifiers
self.hard_scope = hard_scope
def visit_Name(self, node):
"""All assignments to names go through this function."""
if node.ctx == 'store':
self.identifiers.declared_locally.add(node.name)
elif node.ctx == 'param':
self.identifiers.declared_parameter.add(node.name)
elif node.ctx == 'load' and not \
self.identifiers.is_declared(node.name, self.hard_scope):
self.identifiers.undeclared.add(node.name)
def visit_If(self, node):
self.visit(node.test)
real_identifiers = self.identifiers
old_names = real_identifiers.declared_locally | \
real_identifiers.declared_parameter
def inner_visit(nodes):
if not nodes:
return set()
self.identifiers = real_identifiers.copy()
for subnode in nodes:
self.visit(subnode)
rv = self.identifiers.declared_locally - old_names
# we have to remember the undeclared variables of this branch
# because we will have to pull them.
real_identifiers.undeclared.update(self.identifiers.undeclared)
self.identifiers = real_identifiers
return rv
body = inner_visit(node.body)
else_ = inner_visit(node.else_ or ())
# the differences between the two branches are also pulled as
# undeclared variables
real_identifiers.undeclared.update(body.symmetric_difference(else_) -
real_identifiers.declared)
# remember those that are declared.
real_identifiers.declared_locally.update(body | else_)
def visit_Macro(self, node):
self.identifiers.declared_locally.add(node.name)
def visit_Import(self, node):
self.generic_visit(node)
self.identifiers.declared_locally.add(node.target)
def visit_FromImport(self, node):
self.generic_visit(node)
for name in node.names:
if isinstance(name, tuple):
self.identifiers.declared_locally.add(name[1])
else:
self.identifiers.declared_locally.add(name)
def visit_Assign(self, node):
"""Visit assignments in the correct order."""
self.visit(node.node)
self.visit(node.target)
def visit_For(self, node):
"""Visiting stops at for blocks. However the block sequence
is visited as part of the outer scope.
"""
self.visit(node.iter)
def visit_CallBlock(self, node):
self.visit(node.call)
def visit_FilterBlock(self, node):
self.visit(node.filter)
def visit_Scope(self, node):
"""Stop visiting at scopes."""
def visit_Block(self, node):
"""Stop visiting at blocks."""
class CompilerExit(Exception):
"""Raised if the compiler encountered a situation where it just
doesn't make sense to further process the code. Any block that
raises such an exception is not further processed.
"""
class CodeGenerator(NodeVisitor):
def __init__(self, environment, name, filename, stream=None,
defer_init=False):
if stream is None:
stream = StringIO()
self.environment = environment
self.name = name
self.filename = filename
self.stream = stream
self.created_block_context = False
self.defer_init = defer_init
# aliases for imports
self.import_aliases = {}
# a registry for all blocks. Because blocks are moved out
# into the global python scope they are registered here
self.blocks = {}
# the number of extends statements so far
self.extends_so_far = 0
# some templates have a rootlevel extends. In this case we
# can safely assume that we're a child template and do some
# more optimizations.
self.has_known_extends = False
# the current line number
self.code_lineno = 1
# registry of all filters and tests (global, not block local)
self.tests = {}
self.filters = {}
# the debug information
self.debug_info = []
self._write_debug_info = None
# the number of new lines before the next write()
self._new_lines = 0
# the line number of the last written statement
self._last_line = 0
# true if nothing was written so far.
self._first_write = True
# used by the `temporary_identifier` method to get new
# unique, temporary identifier
self._last_identifier = 0
# the current indentation
self._indentation = 0
# -- Various compilation helpers
def fail(self, msg, lineno):
"""Fail with a :exc:`TemplateAssertionError`."""
raise TemplateAssertionError(msg, lineno, self.name, self.filename)
def temporary_identifier(self):
"""Get a new unique identifier."""
self._last_identifier += 1
return 't_%d' % self._last_identifier
def buffer(self, frame):
"""Enable buffering for the frame from that point onwards."""
frame.buffer = self.temporary_identifier()
self.writeline('%s = []' % frame.buffer)
def return_buffer_contents(self, frame):
"""Return the buffer contents of the frame."""
if frame.eval_ctx.volatile:
self.writeline('if context.eval_ctx.autoescape:')
self.indent()
self.writeline('return Markup(concat(%s))' % frame.buffer)
self.outdent()
self.writeline('else:')
self.indent()
self.writeline('return concat(%s)' % frame.buffer)
self.outdent()
elif frame.eval_ctx.autoescape:
self.writeline('return Markup(concat(%s))' % frame.buffer)
else:
self.writeline('return concat(%s)' % frame.buffer)
def indent(self):
"""Indent by one."""
self._indentation += 1
def outdent(self, step=1):
"""Outdent by step."""
self._indentation -= step
def start_write(self, frame, node=None):
"""Yield or write into the frame buffer."""
if frame.buffer is None:
self.writeline('yield ', node)
else:
self.writeline('%s.append(' % frame.buffer, node)
def end_write(self, frame):
"""End the writing process started by `start_write`."""
if frame.buffer is not None:
self.write(')')
def simple_write(self, s, frame, node=None):
"""Simple shortcut for start_write + write + end_write."""
self.start_write(frame, node)
self.write(s)
self.end_write(frame)
def blockvisit(self, nodes, frame):
"""Visit a list of nodes as block in a frame. If the current frame
is no buffer a dummy ``if 0: yield None`` is written automatically
unless the force_generator parameter is set to False.
"""
if frame.buffer is None:
self.writeline('if 0: yield None')
else:
self.writeline('pass')
try:
for node in nodes:
self.visit(node, frame)
except CompilerExit:
pass
def write(self, x):
"""Write a string into the output stream."""
if self._new_lines:
if not self._first_write:
self.stream.write('\n' * self._new_lines)
self.code_lineno += self._new_lines
if self._write_debug_info is not None:
self.debug_info.append((self._write_debug_info,
self.code_lineno))
self._write_debug_info = None
self._first_write = False
self.stream.write(' ' * self._indentation)
self._new_lines = 0
self.stream.write(x)
def writeline(self, x, node=None, extra=0):
"""Combination of newline and write."""
self.newline(node, extra)
self.write(x)
def newline(self, node=None, extra=0):
"""Add one or more newlines before the next write."""
self._new_lines = max(self._new_lines, 1 + extra)
if node is not None and node.lineno != self._last_line:
self._write_debug_info = node.lineno
self._last_line = node.lineno
def signature(self, node, frame, extra_kwargs=None):
"""Writes a function call to the stream for the current node.
A leading comma is added automatically. The extra keyword
arguments may not include python keywords otherwise a syntax
error could occour. The extra keyword arguments should be given
as python dict.
"""
# if any of the given keyword arguments is a python keyword
# we have to make sure that no invalid call is created.
kwarg_workaround = False
for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()):
if is_python_keyword(kwarg):
kwarg_workaround = True
break
for arg in node.args:
self.write(', ')
self.visit(arg, frame)
if not kwarg_workaround:
for kwarg in node.kwargs:
self.write(', ')
self.visit(kwarg, frame)
if extra_kwargs is not None:
for key, value in extra_kwargs.iteritems():
self.write(', %s=%s' % (key, value))
if node.dyn_args:
self.write(', *')
self.visit(node.dyn_args, frame)
if kwarg_workaround:
if node.dyn_kwargs is not None:
self.write(', **dict({')
else:
self.write(', **{')
for kwarg in node.kwargs:
self.write('%r: ' % kwarg.key)
self.visit(kwarg.value, frame)
self.write(', ')
if extra_kwargs is not None:
for key, value in extra_kwargs.iteritems():
self.write('%r: %s, ' % (key, value))
if node.dyn_kwargs is not None:
self.write('}, **')
self.visit(node.dyn_kwargs, frame)
self.write(')')
else:
self.write('}')
elif node.dyn_kwargs is not None:
self.write(', **')
self.visit(node.dyn_kwargs, frame)
def pull_locals(self, frame):
"""Pull all the references identifiers into the local scope."""
for name in frame.identifiers.undeclared:
self.writeline('l_%s = context.resolve(%r)' % (name, name))
def pull_dependencies(self, nodes):
"""Pull all the dependencies."""
visitor = DependencyFinderVisitor()
for node in nodes:
visitor.visit(node)
for dependency in 'filters', 'tests':
mapping = getattr(self, dependency)
for name in getattr(visitor, dependency):
if name not in mapping:
mapping[name] = self.temporary_identifier()
self.writeline('%s = environment.%s[%r]' %
(mapping[name], dependency, name))
def unoptimize_scope(self, frame):
"""Disable Python optimizations for the frame."""
# XXX: this is not that nice but it has no real overhead. It
# mainly works because python finds the locals before dead code
# is removed. If that breaks we have to add a dummy function
# that just accepts the arguments and does nothing.
if frame.identifiers.declared:
self.writeline('%sdummy(%s)' % (
unoptimize_before_dead_code and 'if 0: ' or '',
', '.join('l_' + name for name in frame.identifiers.declared)
))
def push_scope(self, frame, extra_vars=()):
"""This function returns all the shadowed variables in a dict
in the form name: alias and will write the required assignments
into the current scope. No indentation takes place.
This also predefines locally declared variables from the loop
body because under some circumstances it may be the case that
`extra_vars` is passed to `Frame.find_shadowed`.
"""
aliases = {}
for name in frame.find_shadowed(extra_vars):
aliases[name] = ident = self.temporary_identifier()
self.writeline('%s = l_%s' % (ident, name))
to_declare = set()
for name in frame.identifiers.declared_locally:
if name not in aliases:
to_declare.add('l_' + name)
if to_declare:
self.writeline(' = '.join(to_declare) + ' = missing')
return aliases
def pop_scope(self, aliases, frame):
"""Restore all aliases and delete unused variables."""
for name, alias in aliases.iteritems():
self.writeline('l_%s = %s' % (name, alias))
to_delete = set()
for name in frame.identifiers.declared_locally:
if name not in aliases:
to_delete.add('l_' + name)
if to_delete:
# we cannot use the del statement here because enclosed
# scopes can trigger a SyntaxError:
# a = 42; b = lambda: a; del a
self.writeline(' = '.join(to_delete) + ' = missing')
def function_scoping(self, node, frame, children=None,
find_special=True):
"""In Jinja a few statements require the help of anonymous
functions. Those are currently macros and call blocks and in
the future also recursive loops. As there is currently
technical limitation that doesn't allow reading and writing a
variable in a scope where the initial value is coming from an
outer scope, this function tries to fall back with a common
error message. Additionally the frame passed is modified so
that the argumetns are collected and callers are looked up.
This will return the modified frame.
"""
# we have to iterate twice over it, make sure that works
if children is None:
children = node.iter_child_nodes()
children = list(children)
func_frame = frame.inner()
func_frame.inspect(children, hard_scope=True)
# variables that are undeclared (accessed before declaration) and
# declared locally *and* part of an outside scope raise a template
# assertion error. Reason: we can't generate reasonable code from
# it without aliasing all the variables.
# this could be fixed in Python 3 where we have the nonlocal
# keyword or if we switch to bytecode generation
overriden_closure_vars = (
func_frame.identifiers.undeclared &
func_frame.identifiers.declared &
(func_frame.identifiers.declared_locally |
func_frame.identifiers.declared_parameter)
)
if overriden_closure_vars:
self.fail('It\'s not possible to set and access variables '
'derived from an outer scope! (affects: %s)' %
', '.join(sorted(overriden_closure_vars)), node.lineno)
# remove variables from a closure from the frame's undeclared
# identifiers.
func_frame.identifiers.undeclared -= (
func_frame.identifiers.undeclared &
func_frame.identifiers.declared
)
# no special variables for this scope, abort early
if not find_special:
return func_frame
func_frame.accesses_kwargs = False
func_frame.accesses_varargs = False
func_frame.accesses_caller = False
func_frame.arguments = args = ['l_' + x.name for x in node.args]
undeclared = find_undeclared(children, ('caller', 'kwargs', 'varargs'))
if 'caller' in undeclared:
func_frame.accesses_caller = True
func_frame.identifiers.add_special('caller')
args.append('l_caller')
if 'kwargs' in undeclared:
func_frame.accesses_kwargs = True
func_frame.identifiers.add_special('kwargs')
args.append('l_kwargs')
if 'varargs' in undeclared:
func_frame.accesses_varargs = True
func_frame.identifiers.add_special('varargs')
args.append('l_varargs')
return func_frame
def macro_body(self, node, frame, children=None):
"""Dump the function def of a macro or call block."""
frame = self.function_scoping(node, frame, children)
# macros are delayed, they never require output checks
frame.require_output_check = False
args = frame.arguments
# XXX: this is an ugly fix for the loop nesting bug
# (tests.test_old_bugs.test_loop_call_bug). This works around
# a identifier nesting problem we have in general. It's just more
# likely to happen in loops which is why we work around it. The
# real solution would be "nonlocal" all the identifiers that are
# leaking into a new python frame and might be used both unassigned
# and assigned.
if 'loop' in frame.identifiers.declared:
args = args + ['l_loop=l_loop']
self.writeline('def macro(%s):' % ', '.join(args), node)
self.indent()
self.buffer(frame)
self.pull_locals(frame)
self.blockvisit(node.body, frame)
self.return_buffer_contents(frame)
self.outdent()
return frame
def macro_def(self, node, frame):
"""Dump the macro definition for the def created by macro_body."""
arg_tuple = ', '.join(repr(x.name) for x in node.args)
name = getattr(node, 'name', None)
if len(node.args) == 1:
arg_tuple += ','
self.write('Macro(environment, macro, %r, (%s), (' %
(name, arg_tuple))
for arg in node.defaults:
self.visit(arg, frame)
self.write(', ')
self.write('), %r, %r, %r)' % (
bool(frame.accesses_kwargs),
bool(frame.accesses_varargs),
bool(frame.accesses_caller)
))
def position(self, node):
"""Return a human readable position for the node."""
rv = 'line %d' % node.lineno
if self.name is not None:
rv += ' in ' + repr(self.name)
return rv
# -- Statement Visitors
def visit_Template(self, node, frame=None):
assert frame is None, 'no root frame allowed'
eval_ctx = EvalContext(self.environment, self.name)
from jinja2.runtime import __all__ as exported
self.writeline('from __future__ import division')
self.writeline('from jinja2.runtime import ' + ', '.join(exported))
if not unoptimize_before_dead_code:
self.writeline('dummy = lambda *x: None')
# if we want a deferred initialization we cannot move the
# environment into a local name
envenv = not self.defer_init and ', environment=environment' or ''
# do we have an extends tag at all? If not, we can save some
# overhead by just not processing any inheritance code.
have_extends = node.find(nodes.Extends) is not None
# find all blocks
for block in node.find_all(nodes.Block):
if block.name in self.blocks:
self.fail('block %r defined twice' % block.name, block.lineno)
self.blocks[block.name] = block
# find all imports and import them
for import_ in node.find_all(nodes.ImportedName):
if import_.importname not in self.import_aliases:
imp = import_.importname
self.import_aliases[imp] = alias = self.temporary_identifier()
if '.' in imp:
module, obj = imp.rsplit('.', 1)
self.writeline('from %s import %s as %s' %
(module, obj, alias))
else:
self.writeline('import %s as %s' % (imp, alias))
# add the load name
self.writeline('name = %r' % self.name)
# generate the root render function.
self.writeline('def root(context%s):' % envenv, extra=1)
# process the root
frame = Frame(eval_ctx)
frame.inspect(node.body)
frame.toplevel = frame.rootlevel = True
frame.require_output_check = have_extends and not self.has_known_extends
self.indent()
if have_extends:
self.writeline('parent_template = None')
if 'self' in find_undeclared(node.body, ('self',)):
frame.identifiers.add_special('self')
self.writeline('l_self = TemplateReference(context)')
self.pull_locals(frame)
self.pull_dependencies(node.body)
self.blockvisit(node.body, frame)
self.outdent()
# make sure that the parent root is called.
if have_extends:
if not self.has_known_extends:
self.indent()
self.writeline('if parent_template is not None:')
self.indent()
self.writeline('for event in parent_template.'
'root_render_func(context):')
self.indent()
self.writeline('yield event')
self.outdent(2 + (not self.has_known_extends))
# at this point we now have the blocks collected and can visit them too.
for name, block in self.blocks.iteritems():
block_frame = Frame(eval_ctx)
block_frame.inspect(block.body)
block_frame.block = name
self.writeline('def block_%s(context%s):' % (name, envenv),
block, 1)
self.indent()
undeclared = find_undeclared(block.body, ('self', 'super'))
if 'self' in undeclared:
block_frame.identifiers.add_special('self')
self.writeline('l_self = TemplateReference(context)')
if 'super' in undeclared:
block_frame.identifiers.add_special('super')
self.writeline('l_super = context.super(%r, '
'block_%s)' % (name, name))
self.pull_locals(block_frame)
self.pull_dependencies(block.body)
self.blockvisit(block.body, block_frame)
self.outdent()
self.writeline('blocks = {%s}' % ', '.join('%r: block_%s' % (x, x)
for x in self.blocks),
extra=1)
# add a function that returns the debug info
self.writeline('debug_info = %r' % '&'.join('%s=%s' % x for x
in self.debug_info))
def visit_Block(self, node, frame):
"""Call a block and register it for the template."""
level = 1
if frame.toplevel:
# if we know that we are a child template, there is no need to
# check if we are one
if self.has_known_extends:
return
if self.extends_so_far > 0:
self.writeline('if parent_template is None:')
self.indent()
level += 1
context = node.scoped and 'context.derived(locals())' or 'context'
self.writeline('for event in context.blocks[%r][0](%s):' % (
node.name, context), node)
self.indent()
self.simple_write('event', frame)
self.outdent(level)
def visit_Extends(self, node, frame):
"""Calls the extender."""
if not frame.toplevel:
self.fail('cannot use extend from a non top-level scope',
node.lineno)
# if the number of extends statements in general is zero so
# far, we don't have to add a check if something extended
# the template before this one.
if self.extends_so_far > 0:
# if we have a known extends we just add a template runtime
# error into the generated code. We could catch that at compile
# time too, but i welcome it not to confuse users by throwing the
# same error at different times just "because we can".
if not self.has_known_extends:
self.writeline('if parent_template is not None:')
self.indent()
self.writeline('raise TemplateRuntimeError(%r)' %
'extended multiple times')
self.outdent()
# if we have a known extends already we don't need that code here
# as we know that the template execution will end here.
if self.has_known_extends:
raise CompilerExit()
self.writeline('parent_template = environment.get_template(', node)
self.visit(node.template, frame)
self.write(', %r)' % self.name)
self.writeline('for name, parent_block in parent_template.'
'blocks.%s():' % dict_item_iter)
self.indent()
self.writeline('context.blocks.setdefault(name, []).'
'append(parent_block)')
self.outdent()
# if this extends statement was in the root level we can take
# advantage of that information and simplify the generated code
# in the top level from this point onwards
if frame.rootlevel:
self.has_known_extends = True
# and now we have one more
self.extends_so_far += 1
def visit_Include(self, node, frame):
"""Handles includes."""
if node.with_context:
self.unoptimize_scope(frame)
if node.ignore_missing:
self.writeline('try:')
self.indent()
func_name = 'get_or_select_template'
if isinstance(node.template, nodes.Const):
if isinstance(node.template.value, basestring):
func_name = 'get_template'
elif isinstance(node.template.value, (tuple, list)):
func_name = 'select_template'
elif isinstance(node.template, (nodes.Tuple, nodes.List)):
func_name = 'select_template'
self.writeline('template = environment.%s(' % func_name, node)
self.visit(node.template, frame)
self.write(', %r)' % self.name)
if node.ignore_missing:
self.outdent()
self.writeline('except TemplateNotFound:')
self.indent()
self.writeline('pass')
self.outdent()
self.writeline('else:')
self.indent()
if node.with_context:
self.writeline('for event in template.root_render_func('
'template.new_context(context.parent, True, '
'locals())):')
else:
self.writeline('for event in template.module._body_stream:')
self.indent()
self.simple_write('event', frame)
self.outdent()
if node.ignore_missing:
self.outdent()
def visit_Import(self, node, frame):
"""Visit regular imports."""
if node.with_context:
self.unoptimize_scope(frame)
self.writeline('l_%s = ' % node.target, node)
if frame.toplevel:
self.write('context.vars[%r] = ' % node.target)
self.write('environment.get_template(')
self.visit(node.template, frame)
self.write(', %r).' % self.name)
if node.with_context:
self.write('make_module(context.parent, True, locals())')
else:
self.write('module')
if frame.toplevel and not node.target.startswith('_'):
self.writeline('context.exported_vars.discard(%r)' % node.target)
frame.assigned_names.add(node.target)
def visit_FromImport(self, node, frame):
"""Visit named imports."""
self.newline(node)
self.write('included_template = environment.get_template(')
self.visit(node.template, frame)
self.write(', %r).' % self.name)
if node.with_context:
self.write('make_module(context.parent, True)')
else:
self.write('module')
var_names = []
discarded_names = []
for name in node.names:
if isinstance(name, tuple):
name, alias = name
else:
alias = name
self.writeline('l_%s = getattr(included_template, '
'%r, missing)' % (alias, name))
self.writeline('if l_%s is missing:' % alias)
self.indent()
self.writeline('l_%s = environment.undefined(%r %% '
'included_template.__name__, '
'name=%r)' %
(alias, 'the template %%r (imported on %s) does '
'not export the requested name %s' % (
self.position(node),
repr(name)
), name))
self.outdent()
if frame.toplevel:
var_names.append(alias)
if not alias.startswith('_'):
discarded_names.append(alias)
frame.assigned_names.add(alias)
if var_names:
if len(var_names) == 1:
name = var_names[0]
self.writeline('context.vars[%r] = l_%s' % (name, name))
else:
self.writeline('context.vars.update({%s})' % ', '.join(
'%r: l_%s' % (name, name) for name in var_names
))
if discarded_names:
if len(discarded_names) == 1:
self.writeline('context.exported_vars.discard(%r)' %
discarded_names[0])
else:
self.writeline('context.exported_vars.difference_'
'update((%s))' % ', '.join(map(repr, discarded_names)))
def visit_For(self, node, frame):
# when calculating the nodes for the inner frame we have to exclude
# the iterator contents from it
children = node.iter_child_nodes(exclude=('iter',))
if node.recursive:
loop_frame = self.function_scoping(node, frame, children,
find_special=False)
else:
loop_frame = frame.inner()
loop_frame.inspect(children)
# try to figure out if we have an extended loop. An extended loop
# is necessary if the loop is in recursive mode if the special loop
# variable is accessed in the body.
extended_loop = node.recursive or 'loop' in \
find_undeclared(node.iter_child_nodes(
only=('body',)), ('loop',))
# if we don't have an recursive loop we have to find the shadowed
# variables at that point. Because loops can be nested but the loop
# variable is a special one we have to enforce aliasing for it.
if not node.recursive:
aliases = self.push_scope(loop_frame, ('loop',))
# otherwise we set up a buffer and add a function def
else:
self.writeline('def loop(reciter, loop_render_func):', node)
self.indent()
self.buffer(loop_frame)
aliases = {}
# make sure the loop variable is a special one and raise a template
# assertion error if a loop tries to write to loop
if extended_loop:
loop_frame.identifiers.add_special('loop')
for name in node.find_all(nodes.Name):
if name.ctx == 'store' and name.name == 'loop':
self.fail('Can\'t assign to special loop variable '
'in for-loop target', name.lineno)
self.pull_locals(loop_frame)
if node.else_:
iteration_indicator = self.temporary_identifier()
self.writeline('%s = 1' % iteration_indicator)
# Create a fake parent loop if the else or test section of a
# loop is accessing the special loop variable and no parent loop
# exists.
if 'loop' not in aliases and 'loop' in find_undeclared(
node.iter_child_nodes(only=('else_', 'test')), ('loop',)):
self.writeline("l_loop = environment.undefined(%r, name='loop')" %
("'loop' is undefined. the filter section of a loop as well "
"as the else block doesn't have access to the special 'loop'"
" variable of the current loop. Because there is no parent "
"loop it's undefined. Happened in loop on %s" %
self.position(node)))
self.writeline('for ', node)
self.visit(node.target, loop_frame)
self.write(extended_loop and ', l_loop in LoopContext(' or ' in ')
# if we have an extened loop and a node test, we filter in the
# "outer frame".
if extended_loop and node.test is not None:
self.write('(')
self.visit(node.target, loop_frame)
self.write(' for ')
self.visit(node.target, loop_frame)
self.write(' in ')
if node.recursive:
self.write('reciter')
else:
self.visit(node.iter, loop_frame)
self.write(' if (')
test_frame = loop_frame.copy()
self.visit(node.test, test_frame)
self.write('))')
elif node.recursive:
self.write('reciter')
else:
self.visit(node.iter, loop_frame)
if node.recursive:
self.write(', recurse=loop_render_func):')
else:
self.write(extended_loop and '):' or ':')
# tests in not extended loops become a continue
if not extended_loop and node.test is not None:
self.indent()
self.writeline('if not ')
self.visit(node.test, loop_frame)
self.write(':')
self.indent()
self.writeline('continue')
self.outdent(2)
self.indent()
self.blockvisit(node.body, loop_frame)
if node.else_:
self.writeline('%s = 0' % iteration_indicator)
self.outdent()
if node.else_:
self.writeline('if %s:' % iteration_indicator)
self.indent()
self.blockvisit(node.else_, loop_frame)
self.outdent()
# reset the aliases if there are any.
if not node.recursive:
self.pop_scope(aliases, loop_frame)
# if the node was recursive we have to return the buffer contents
# and start the iteration code
if node.recursive:
self.return_buffer_contents(loop_frame)
self.outdent()
self.start_write(frame, node)
self.write('loop(')
self.visit(node.iter, frame)
self.write(', loop)')
self.end_write(frame)
def visit_If(self, node, frame):
if_frame = frame.soft()
self.writeline('if ', node)
self.visit(node.test, if_frame)
self.write(':')
self.indent()
self.blockvisit(node.body, if_frame)
self.outdent()
if node.else_:
self.writeline('else:')
self.indent()
self.blockvisit(node.else_, if_frame)
self.outdent()
def visit_Macro(self, node, frame):
macro_frame = self.macro_body(node, frame)
self.newline()
if frame.toplevel:
if not node.name.startswith('_'):
self.write('context.exported_vars.add(%r)' % node.name)
self.writeline('context.vars[%r] = ' % node.name)
self.write('l_%s = ' % node.name)
self.macro_def(node, macro_frame)
frame.assigned_names.add(node.name)
def visit_CallBlock(self, node, frame):
children = node.iter_child_nodes(exclude=('call',))
call_frame = self.macro_body(node, frame, children)
self.writeline('caller = ')
self.macro_def(node, call_frame)
self.start_write(frame, node)
self.visit_Call(node.call, call_frame, forward_caller=True)
self.end_write(frame)
def visit_FilterBlock(self, node, frame):
filter_frame = frame.inner()
filter_frame.inspect(node.iter_child_nodes())
aliases = self.push_scope(filter_frame)
self.pull_locals(filter_frame)
self.buffer(filter_frame)
self.blockvisit(node.body, filter_frame)
self.start_write(frame, node)
self.visit_Filter(node.filter, filter_frame)
self.end_write(frame)
self.pop_scope(aliases, filter_frame)
def visit_ExprStmt(self, node, frame):
self.newline(node)
self.visit(node.node, frame)
def visit_Output(self, node, frame):
# if we have a known extends statement, we don't output anything
# if we are in a require_output_check section
if self.has_known_extends and frame.require_output_check:
return
if self.environment.finalize:
finalize = lambda x: unicode(self.environment.finalize(x))
else:
finalize = unicode
self.newline(node)
# if we are inside a frame that requires output checking, we do so
outdent_later = False
if frame.require_output_check:
self.writeline('if parent_template is None:')
self.indent()
outdent_later = True
# try to evaluate as many chunks as possible into a static
# string at compile time.
body = []
for child in node.nodes:
try:
const = child.as_const(frame.eval_ctx)
except nodes.Impossible:
body.append(child)
continue
# the frame can't be volatile here, becaus otherwise the
# as_const() function would raise an Impossible exception
# at that point.
try:
if frame.eval_ctx.autoescape:
if hasattr(const, '__html__'):
const = const.__html__()
else:
const = escape(const)
const = finalize(const)
except:
# if something goes wrong here we evaluate the node
# at runtime for easier debugging
body.append(child)
continue
if body and isinstance(body[-1], list):
body[-1].append(const)
else:
body.append([const])
# if we have less than 3 nodes or a buffer we yield or extend/append
if len(body) < 3 or frame.buffer is not None:
if frame.buffer is not None:
# for one item we append, for more we extend
if len(body) == 1:
self.writeline('%s.append(' % frame.buffer)
else:
self.writeline('%s.extend((' % frame.buffer)
self.indent()
for item in body:
if isinstance(item, list):
val = repr(concat(item))
if frame.buffer is None:
self.writeline('yield ' + val)
else:
self.writeline(val + ', ')
else:
if frame.buffer is None:
self.writeline('yield ', item)
else:
self.newline(item)
close = 1
if frame.eval_ctx.volatile:
self.write('(context.eval_ctx.autoescape and'
' escape or to_string)(')
elif frame.eval_ctx.autoescape:
self.write('escape(')
else:
self.write('to_string(')
if self.environment.finalize is not None:
self.write('environment.finalize(')
close += 1
self.visit(item, frame)
self.write(')' * close)
if frame.buffer is not None:
self.write(', ')
if frame.buffer is not None:
# close the open parentheses
self.outdent()
self.writeline(len(body) == 1 and ')' or '))')
# otherwise we create a format string as this is faster in that case
else:
format = []
arguments = []
for item in body:
if isinstance(item, list):
format.append(concat(item).replace('%', '%%'))
else:
format.append('%s')
arguments.append(item)
self.writeline('yield ')
self.write(repr(concat(format)) + ' % (')
idx = -1
self.indent()
for argument in arguments:
self.newline(argument)
close = 0
if frame.eval_ctx.volatile:
self.write('(context.eval_ctx.autoescape and'
' escape or to_string)(')
close += 1
elif frame.eval_ctx.autoescape:
self.write('escape(')
close += 1
if self.environment.finalize is not None:
self.write('environment.finalize(')
close += 1
self.visit(argument, frame)
self.write(')' * close + ', ')
self.outdent()
self.writeline(')')
if outdent_later:
self.outdent()
def visit_Assign(self, node, frame):
self.newline(node)
# toplevel assignments however go into the local namespace and
# the current template's context. We create a copy of the frame
# here and add a set so that the Name visitor can add the assigned
# names here.
if frame.toplevel:
assignment_frame = frame.copy()
assignment_frame.toplevel_assignments = set()
else:
assignment_frame = frame
self.visit(node.target, assignment_frame)
self.write(' = ')
self.visit(node.node, frame)
# make sure toplevel assignments are added to the context.
if frame.toplevel:
public_names = [x for x in assignment_frame.toplevel_assignments
if not x.startswith('_')]
if len(assignment_frame.toplevel_assignments) == 1:
name = next(iter(assignment_frame.toplevel_assignments))
self.writeline('context.vars[%r] = l_%s' % (name, name))
else:
self.writeline('context.vars.update({')
for idx, name in enumerate(assignment_frame.toplevel_assignments):
if idx:
self.write(', ')
self.write('%r: l_%s' % (name, name))
self.write('})')
if public_names:
if len(public_names) == 1:
self.writeline('context.exported_vars.add(%r)' %
public_names[0])
else:
self.writeline('context.exported_vars.update((%s))' %
', '.join(map(repr, public_names)))
# -- Expression Visitors
def visit_Name(self, node, frame):
if node.ctx == 'store' and frame.toplevel:
frame.toplevel_assignments.add(node.name)
self.write('l_' + node.name)
frame.assigned_names.add(node.name)
def visit_Const(self, node, frame):
val = node.value
if isinstance(val, float):
self.write(str(val))
else:
self.write(repr(val))
def visit_TemplateData(self, node, frame):
self.write(repr(node.as_const(frame.eval_ctx)))
def visit_Tuple(self, node, frame):
self.write('(')
idx = -1
for idx, item in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item, frame)
self.write(idx == 0 and ',)' or ')')
def visit_List(self, node, frame):
self.write('[')
for idx, item in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item, frame)
self.write(']')
def visit_Dict(self, node, frame):
self.write('{')
for idx, item in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item.key, frame)
self.write(': ')
self.visit(item.value, frame)
self.write('}')
def binop(operator):
def visitor(self, node, frame):
self.write('(')
self.visit(node.left, frame)
self.write(' %s ' % operator)
self.visit(node.right, frame)
self.write(')')
return visitor
def uaop(operator):
def visitor(self, node, frame):
self.write('(' + operator)
self.visit(node.node, frame)
self.write(')')
return visitor
visit_Add = binop('+')
visit_Sub = binop('-')
visit_Mul = binop('*')
visit_Div = binop('/')
visit_FloorDiv = binop('//')
visit_Pow = binop('**')
visit_Mod = binop('%')
visit_And = binop('and')
visit_Or = binop('or')
visit_Pos = uaop('+')
visit_Neg = uaop('-')
visit_Not = uaop('not ')
del binop, uaop
def visit_Concat(self, node, frame):
if frame.eval_ctx.volatile:
func_name = '(context.eval_ctx.volatile and' \
' markup_join or unicode_join)'
elif frame.eval_ctx.autoescape:
func_name = 'markup_join'
else:
func_name = 'unicode_join'
self.write('%s((' % func_name)
for arg in node.nodes:
self.visit(arg, frame)
self.write(', ')
self.write('))')
def visit_Compare(self, node, frame):
self.visit(node.expr, frame)
for op in node.ops:
self.visit(op, frame)
def visit_Operand(self, node, frame):
self.write(' %s ' % operators[node.op])
self.visit(node.expr, frame)
def visit_Getattr(self, node, frame):
self.write('environment.getattr(')
self.visit(node.node, frame)
self.write(', %r)' % node.attr)
def visit_Getitem(self, node, frame):
# slices bypass the environment getitem method.
if isinstance(node.arg, nodes.Slice):
self.visit(node.node, frame)
self.write('[')
self.visit(node.arg, frame)
self.write(']')
else:
self.write('environment.getitem(')
self.visit(node.node, frame)
self.write(', ')
self.visit(node.arg, frame)
self.write(')')
def visit_Slice(self, node, frame):
if node.start is not None:
self.visit(node.start, frame)
self.write(':')
if node.stop is not None:
self.visit(node.stop, frame)
if node.step is not None:
self.write(':')
self.visit(node.step, frame)
def visit_Filter(self, node, frame):
self.write(self.filters[node.name] + '(')
func = self.environment.filters.get(node.name)
if func is None:
self.fail('no filter named %r' % node.name, node.lineno)
if getattr(func, 'contextfilter', False):
self.write('context, ')
elif getattr(func, 'evalcontextfilter', False):
self.write('context.eval_ctx, ')
elif getattr(func, 'environmentfilter', False):
self.write('environment, ')
# if the filter node is None we are inside a filter block
# and want to write to the current buffer
if node.node is not None:
self.visit(node.node, frame)
elif frame.eval_ctx.volatile:
self.write('(context.eval_ctx.autoescape and'
' Markup(concat(%s)) or concat(%s))' %
(frame.buffer, frame.buffer))
elif frame.eval_ctx.autoescape:
self.write('Markup(concat(%s))' % frame.buffer)
else:
self.write('concat(%s)' % frame.buffer)
self.signature(node, frame)
self.write(')')
def visit_Test(self, node, frame):
self.write(self.tests[node.name] + '(')
if node.name not in self.environment.tests:
self.fail('no test named %r' % node.name, node.lineno)
self.visit(node.node, frame)
self.signature(node, frame)
self.write(')')
def visit_CondExpr(self, node, frame):
def write_expr2():
if node.expr2 is not None:
return self.visit(node.expr2, frame)
self.write('environment.undefined(%r)' % ('the inline if-'
'expression on %s evaluated to false and '
'no else section was defined.' % self.position(node)))
if not have_condexpr:
self.write('((')
self.visit(node.test, frame)
self.write(') and (')
self.visit(node.expr1, frame)
self.write(',) or (')
write_expr2()
self.write(',))[0]')
else:
self.write('(')
self.visit(node.expr1, frame)
self.write(' if ')
self.visit(node.test, frame)
self.write(' else ')
write_expr2()
self.write(')')
def visit_Call(self, node, frame, forward_caller=False):
if self.environment.sandboxed:
self.write('environment.call(context, ')
else:
self.write('context.call(')
self.visit(node.node, frame)
extra_kwargs = forward_caller and {'caller': 'caller'} or None
self.signature(node, frame, extra_kwargs)
self.write(')')
def visit_Keyword(self, node, frame):
self.write(node.key + '=')
self.visit(node.value, frame)
# -- Unused nodes for extensions
def visit_MarkSafe(self, node, frame):
self.write('Markup(')
self.visit(node.expr, frame)
self.write(')')
def visit_EnvironmentAttribute(self, node, frame):
self.write('environment.' + node.name)
def visit_ExtensionAttribute(self, node, frame):
self.write('environment.extensions[%r].%s' % (node.identifier, node.name))
def visit_ImportedName(self, node, frame):
self.write(self.import_aliases[node.importname])
def visit_InternalName(self, node, frame):
self.write(node.name)
def visit_ContextReference(self, node, frame):
self.write('context')
def visit_Continue(self, node, frame):
self.writeline('continue', node)
def visit_Break(self, node, frame):
self.writeline('break', node)
def visit_Scope(self, node, frame):
scope_frame = frame.inner()
scope_frame.inspect(node.iter_child_nodes())
aliases = self.push_scope(scope_frame)
self.pull_locals(scope_frame)
self.blockvisit(node.body, scope_frame)
self.pop_scope(aliases, scope_frame)
def visit_EvalContextModifier(self, node, frame):
for keyword in node.options:
self.writeline('context.eval_ctx.%s = ' % keyword.key)
self.visit(keyword.value, frame)
try:
val = keyword.value.as_const(frame.eval_ctx)
except nodes.Impossible:
frame.eval_ctx.volatile = True
else:
setattr(frame.eval_ctx, keyword.key, val)
def visit_ScopedEvalContextModifier(self, node, frame):
old_ctx_name = self.temporary_identifier()
safed_ctx = frame.eval_ctx.save()
self.writeline('%s = context.eval_ctx.save()' % old_ctx_name)
self.visit_EvalContextModifier(node, frame)
for child in node.body:
self.visit(child, frame)
frame.eval_ctx.revert(safed_ctx)
self.writeline('context.eval_ctx.revert(%s)' % old_ctx_name)
| apache-2.0 |
tysonclugg/django | tests/check_framework/test_templates.py | 102 | 3666 | from copy import copy, deepcopy
from django.core.checks.templates import E001, E002
from django.test import SimpleTestCase
from django.test.utils import override_settings
class CheckTemplateSettingsAppDirsTest(SimpleTestCase):
TEMPLATES_APP_DIRS_AND_LOADERS = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'loaders': ['django.template.loaders.filesystem.Loader'],
},
},
]
@property
def func(self):
from django.core.checks.templates import check_setting_app_dirs_loaders
return check_setting_app_dirs_loaders
@override_settings(TEMPLATES=TEMPLATES_APP_DIRS_AND_LOADERS)
def test_app_dirs_and_loaders(self):
"""
Error if template loaders are specified and APP_DIRS is True.
"""
self.assertEqual(self.func(None), [E001])
def test_app_dirs_removed(self):
TEMPLATES = deepcopy(self.TEMPLATES_APP_DIRS_AND_LOADERS)
del TEMPLATES[0]['APP_DIRS']
with self.settings(TEMPLATES=TEMPLATES):
self.assertEqual(self.func(None), [])
def test_loaders_removed(self):
TEMPLATES = deepcopy(self.TEMPLATES_APP_DIRS_AND_LOADERS)
del TEMPLATES[0]['OPTIONS']['loaders']
with self.settings(TEMPLATES=TEMPLATES):
self.assertEqual(self.func(None), [])
class CheckTemplateStringIfInvalidTest(SimpleTestCase):
TEMPLATES_STRING_IF_INVALID = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'string_if_invalid': False,
},
},
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'string_if_invalid': 42,
},
},
]
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.error1 = copy(E002)
cls.error2 = copy(E002)
string_if_invalid1 = cls.TEMPLATES_STRING_IF_INVALID[0]['OPTIONS']['string_if_invalid']
string_if_invalid2 = cls.TEMPLATES_STRING_IF_INVALID[1]['OPTIONS']['string_if_invalid']
cls.error1.msg = cls.error1.msg.format(string_if_invalid1, type(string_if_invalid1).__name__)
cls.error2.msg = cls.error2.msg.format(string_if_invalid2, type(string_if_invalid2).__name__)
@property
def func(self):
from django.core.checks.templates import check_string_if_invalid_is_string
return check_string_if_invalid_is_string
@override_settings(TEMPLATES=TEMPLATES_STRING_IF_INVALID)
def test_string_if_invalid_not_string(self):
self.assertEqual(self.func(None), [self.error1, self.error2])
def test_string_if_invalid_first_is_string(self):
TEMPLATES = deepcopy(self.TEMPLATES_STRING_IF_INVALID)
TEMPLATES[0]['OPTIONS']['string_if_invalid'] = 'test'
with self.settings(TEMPLATES=TEMPLATES):
self.assertEqual(self.func(None), [self.error2])
def test_string_if_invalid_both_are_strings(self):
TEMPLATES = deepcopy(self.TEMPLATES_STRING_IF_INVALID)
TEMPLATES[0]['OPTIONS']['string_if_invalid'] = 'test'
TEMPLATES[1]['OPTIONS']['string_if_invalid'] = 'test'
with self.settings(TEMPLATES=TEMPLATES):
self.assertEqual(self.func(None), [])
def test_string_if_invalid_not_specified(self):
TEMPLATES = deepcopy(self.TEMPLATES_STRING_IF_INVALID)
del TEMPLATES[1]['OPTIONS']['string_if_invalid']
with self.settings(TEMPLATES=TEMPLATES):
self.assertEqual(self.func(None), [self.error1])
| bsd-3-clause |
hslee16/ansible-modules-extras | cloud/cloudstack/cs_pod.py | 27 | 8054 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2016, René Moser <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: cs_pod
short_description: Manages pods on Apache CloudStack based clouds.
description:
- Create, update, delete pods.
version_added: "2.1"
author: "René Moser (@resmo)"
options:
name:
description:
- Name of the pod.
required: true
id:
description:
- uuid of the exising pod.
default: null
required: false
start_ip:
description:
- Starting IP address for the Pod.
- Required on C(state=present)
default: null
required: false
end_ip:
description:
- Ending IP address for the Pod.
default: null
required: false
netmask:
description:
- Netmask for the Pod.
- Required on C(state=present)
default: null
required: false
gateway:
description:
- Gateway for the Pod.
- Required on C(state=present)
default: null
required: false
zone:
description:
- Name of the zone in which the pod belongs to.
- If not set, default zone is used.
required: false
default: null
state:
description:
- State of the pod.
required: false
default: 'present'
choices: [ 'present', 'enabled', 'disabled', 'absent' ]
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# Ensure a pod is present
- local_action:
module: cs_pod
name: pod1
zone: ch-zrh-ix-01
start_ip: 10.100.10.101
gateway: 10.100.10.1
netmask: 255.255.255.0
# Ensure a pod is disabled
- local_action:
module: cs_pod
name: pod1
zone: ch-zrh-ix-01
state: disabled
# Ensure a pod is enabled
- local_action:
module: cs_pod
name: pod1
zone: ch-zrh-ix-01
state: enabled
# Ensure a pod is absent
- local_action:
module: cs_pod
name: pod1
zone: ch-zrh-ix-01
state: absent
'''
RETURN = '''
---
id:
description: UUID of the pod.
returned: success
type: string
sample: 04589590-ac63-4ffc-93f5-b698b8ac38b6
name:
description: Name of the pod.
returned: success
type: string
sample: pod01
start_ip:
description: Starting IP of the pod.
returned: success
type: string
sample: 10.100.1.101
end_ip:
description: Ending IP of the pod.
returned: success
type: string
sample: 10.100.1.254
netmask:
description: Netmask of the pod.
returned: success
type: string
sample: 255.255.255.0
gateway:
description: Gateway of the pod.
returned: success
type: string
sample: 10.100.1.1
allocation_state:
description: State of the pod.
returned: success
type: string
sample: Enabled
zone:
description: Name of zone the pod is in.
returned: success
type: string
sample: ch-gva-2
'''
try:
from cs import CloudStack, CloudStackException, read_config
has_lib_cs = True
except ImportError:
has_lib_cs = False
# import cloudstack common
from ansible.module_utils.cloudstack import *
class AnsibleCloudStackPod(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackPod, self).__init__(module)
self.returns = {
'endip': 'end_ip',
'startip': 'start_ip',
'gateway': 'gateway',
'netmask': 'netmask',
'allocationstate': 'allocation_state',
}
self.pod = None
def _get_common_pod_args(self):
args = {}
args['name'] = self.module.params.get('name')
args['zoneid'] = self.get_zone(key='id')
args['startip'] = self.module.params.get('start_ip')
args['endip'] = self.module.params.get('end_ip')
args['netmask'] = self.module.params.get('netmask')
args['gateway'] = self.module.params.get('gateway')
state = self.module.params.get('state')
if state in [ 'enabled', 'disabled']:
args['allocationstate'] = state.capitalize()
return args
def get_pod(self):
if not self.pod:
args = {}
uuid = self.module.params.get('id')
if uuid:
args['id'] = uuid
args['zoneid'] = self.get_zone(key='id')
pods = self.cs.listPods(**args)
if pods:
self.pod = pods['pod'][0]
return self.pod
args['name'] = self.module.params.get('name')
args['zoneid'] = self.get_zone(key='id')
pods = self.cs.listPods(**args)
if pods:
self.pod = pods['pod'][0]
return self.pod
def present_pod(self):
pod = self.get_pod()
if pod:
pod = self._update_pod()
else:
pod = self._create_pod()
return pod
def _create_pod(self):
required_params = [
'start_ip',
'netmask',
'gateway',
]
self.module.fail_on_missing_params(required_params=required_params)
pod = None
self.result['changed'] = True
args = self._get_common_pod_args()
if not self.module.check_mode:
res = self.cs.createPod(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
pod = res['pod']
return pod
def _update_pod(self):
pod = self.get_pod()
args = self._get_common_pod_args()
args['id'] = pod['id']
if self.has_changed(args, pod):
self.result['changed'] = True
if not self.module.check_mode:
res = self.cs.updatePod(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
pod = res['pod']
return pod
def absent_pod(self):
pod = self.get_pod()
if pod:
self.result['changed'] = True
args = {}
args['id'] = pod['id']
if not self.module.check_mode:
res = self.cs.deletePod(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
return pod
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
id = dict(default=None),
name = dict(required=True),
gateway = dict(default=None),
netmask = dict(default=None),
start_ip = dict(default=None),
end_ip = dict(default=None),
zone = dict(default=None),
state = dict(choices=['present', 'enabled', 'disabled', 'absent'], default='present'),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
if not has_lib_cs:
module.fail_json(msg="python library cs required: pip install cs")
try:
acs_pod = AnsibleCloudStackPod(module)
state = module.params.get('state')
if state in ['absent']:
pod = acs_pod.absent_pod()
else:
pod = acs_pod.present_pod()
result = acs_pod.get_result(pod)
except CloudStackException as e:
module.fail_json(msg='CloudStackException: %s' % str(e))
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
tonnrueter/pymca_devel | PyMca/PyMcaPlugins/XASBatchPlugin.py | 1 | 2033 | try:
from PyMca import Plugin1DBase
except ImportError:
from . import Plugin1DBase
try:
from PyMca import XASBatch
except ImportError:
print("XMCDWindow importing from somewhere else")
import XASBatch
class XASBatchPlugin(Plugin1DBase.Plugin1DBase):
def __init__(self, plotWindow, **kw):
Plugin1DBase.Plugin1DBase.__init__(self, plotWindow, **kw)
self.methodDict = {
'showDialog': [self.showDialog, 'Displays the XASBatchDialog', None]
}
self.widget = None
#Methods to be implemented by the plugin
def getMethods(self, plottype=None):
"""
A list with the NAMES associated to the callable methods
that are applicable to the specified plot.
Plot type can be "SCAN", "MCA", None, ...
"""
names = list(self.methodDict.keys())
names.sort()
return names
def getMethodToolTip(self, name):
"""
Returns the help associated to the particular method name or None.
"""
return self.methodDict[name][1]
def getMethodPixmap(self, name):
"""
Returns the pixmap associated to the particular method name or None.
"""
return self.methodDict[name][2]
def applyMethod(self, name):
"""
The plugin is asked to apply the method associated to name.
"""
self.methodDict[name][0]()
return
def showDialog(self):
if self.widget == None:
self.widget = XASBatch.XASBatchDialog(None)
self.widget.show()
self.widget.raise_()
MENU_TEXT = "XAS Something"
def getPlugin1DInstance(plotWindow, **kw):
ob = XASBatchPlugin(plotWindow)
return ob
def main():
from PyMca import PyMcaQt as qt
from PyMca import Plot1D
app = qt.QApplication([])
plot = Plot1D.Plot1D()
plugin = getPlugin1DInstance(plot)
plugin.applyMethod(plugin.getMethods()[0])
print app.exec_()
if __name__ == '__main__':
main()
| gpl-2.0 |
vikatory/kbengine | kbe/res/scripts/common/Lib/test/test_base64.py | 86 | 29596 | import unittest
from test import support
import base64
import binascii
import os
import sys
import subprocess
import struct
from array import array
class LegacyBase64TestCase(unittest.TestCase):
# Legacy API is not as permissive as the modern API
def check_type_errors(self, f):
self.assertRaises(TypeError, f, "")
self.assertRaises(TypeError, f, [])
multidimensional = memoryview(b"1234").cast('B', (2, 2))
self.assertRaises(TypeError, f, multidimensional)
int_data = memoryview(b"1234").cast('I')
self.assertRaises(TypeError, f, int_data)
def test_encodebytes(self):
eq = self.assertEqual
eq(base64.encodebytes(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=\n")
eq(base64.encodebytes(b"a"), b"YQ==\n")
eq(base64.encodebytes(b"ab"), b"YWI=\n")
eq(base64.encodebytes(b"abc"), b"YWJj\n")
eq(base64.encodebytes(b""), b"")
eq(base64.encodebytes(b"abcdefghijklmnopqrstuvwxyz"
b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
b"0123456789!@#0^&*();:<>,. []{}"),
b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n")
# Non-bytes
eq(base64.encodebytes(bytearray(b'abc')), b'YWJj\n')
eq(base64.encodebytes(memoryview(b'abc')), b'YWJj\n')
eq(base64.encodebytes(array('B', b'abc')), b'YWJj\n')
self.check_type_errors(base64.encodebytes)
def test_decodebytes(self):
eq = self.assertEqual
eq(base64.decodebytes(b"d3d3LnB5dGhvbi5vcmc=\n"), b"www.python.org")
eq(base64.decodebytes(b"YQ==\n"), b"a")
eq(base64.decodebytes(b"YWI=\n"), b"ab")
eq(base64.decodebytes(b"YWJj\n"), b"abc")
eq(base64.decodebytes(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n"),
b"abcdefghijklmnopqrstuvwxyz"
b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
b"0123456789!@#0^&*();:<>,. []{}")
eq(base64.decodebytes(b''), b'')
# Non-bytes
eq(base64.decodebytes(bytearray(b'YWJj\n')), b'abc')
eq(base64.decodebytes(memoryview(b'YWJj\n')), b'abc')
eq(base64.decodebytes(array('B', b'YWJj\n')), b'abc')
self.check_type_errors(base64.decodebytes)
def test_encode(self):
eq = self.assertEqual
from io import BytesIO, StringIO
infp = BytesIO(b'abcdefghijklmnopqrstuvwxyz'
b'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
b'0123456789!@#0^&*();:<>,. []{}')
outfp = BytesIO()
base64.encode(infp, outfp)
eq(outfp.getvalue(),
b'YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE'
b'RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT'
b'Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n')
# Non-binary files
self.assertRaises(TypeError, base64.encode, StringIO('abc'), BytesIO())
self.assertRaises(TypeError, base64.encode, BytesIO(b'abc'), StringIO())
self.assertRaises(TypeError, base64.encode, StringIO('abc'), StringIO())
def test_decode(self):
from io import BytesIO, StringIO
infp = BytesIO(b'd3d3LnB5dGhvbi5vcmc=')
outfp = BytesIO()
base64.decode(infp, outfp)
self.assertEqual(outfp.getvalue(), b'www.python.org')
# Non-binary files
self.assertRaises(TypeError, base64.encode, StringIO('YWJj\n'), BytesIO())
self.assertRaises(TypeError, base64.encode, BytesIO(b'YWJj\n'), StringIO())
self.assertRaises(TypeError, base64.encode, StringIO('YWJj\n'), StringIO())
class BaseXYTestCase(unittest.TestCase):
# Modern API completely ignores exported dimension and format data and
# treats any buffer as a stream of bytes
def check_encode_type_errors(self, f):
self.assertRaises(TypeError, f, "")
self.assertRaises(TypeError, f, [])
def check_decode_type_errors(self, f):
self.assertRaises(TypeError, f, [])
def check_other_types(self, f, bytes_data, expected):
eq = self.assertEqual
b = bytearray(bytes_data)
eq(f(b), expected)
# The bytearray wasn't mutated
eq(b, bytes_data)
eq(f(memoryview(bytes_data)), expected)
eq(f(array('B', bytes_data)), expected)
# XXX why is b64encode hardcoded here?
self.check_nonbyte_element_format(base64.b64encode, bytes_data)
self.check_multidimensional(base64.b64encode, bytes_data)
def check_multidimensional(self, f, data):
padding = b"\x00" if len(data) % 2 else b""
bytes_data = data + padding # Make sure cast works
shape = (len(bytes_data) // 2, 2)
multidimensional = memoryview(bytes_data).cast('B', shape)
self.assertEqual(f(multidimensional), f(bytes_data))
def check_nonbyte_element_format(self, f, data):
padding = b"\x00" * ((4 - len(data)) % 4)
bytes_data = data + padding # Make sure cast works
int_data = memoryview(bytes_data).cast('I')
self.assertEqual(f(int_data), f(bytes_data))
def test_b64encode(self):
eq = self.assertEqual
# Test default alphabet
eq(base64.b64encode(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=")
eq(base64.b64encode(b'\x00'), b'AA==')
eq(base64.b64encode(b"a"), b"YQ==")
eq(base64.b64encode(b"ab"), b"YWI=")
eq(base64.b64encode(b"abc"), b"YWJj")
eq(base64.b64encode(b""), b"")
eq(base64.b64encode(b"abcdefghijklmnopqrstuvwxyz"
b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
b"0123456789!@#0^&*();:<>,. []{}"),
b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==")
# Test with arbitrary alternative characters
eq(base64.b64encode(b'\xd3V\xbeo\xf7\x1d', altchars=b'*$'), b'01a*b$cd')
eq(base64.b64encode(b'\xd3V\xbeo\xf7\x1d', altchars=bytearray(b'*$')),
b'01a*b$cd')
eq(base64.b64encode(b'\xd3V\xbeo\xf7\x1d', altchars=memoryview(b'*$')),
b'01a*b$cd')
eq(base64.b64encode(b'\xd3V\xbeo\xf7\x1d', altchars=array('B', b'*$')),
b'01a*b$cd')
# Non-bytes
self.check_other_types(base64.b64encode, b'abcd', b'YWJjZA==')
self.check_encode_type_errors(base64.b64encode)
self.assertRaises(TypeError, base64.b64encode, b"", altchars="*$")
# Test standard alphabet
eq(base64.standard_b64encode(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=")
eq(base64.standard_b64encode(b"a"), b"YQ==")
eq(base64.standard_b64encode(b"ab"), b"YWI=")
eq(base64.standard_b64encode(b"abc"), b"YWJj")
eq(base64.standard_b64encode(b""), b"")
eq(base64.standard_b64encode(b"abcdefghijklmnopqrstuvwxyz"
b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
b"0123456789!@#0^&*();:<>,. []{}"),
b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==")
# Non-bytes
self.check_other_types(base64.standard_b64encode,
b'abcd', b'YWJjZA==')
self.check_encode_type_errors(base64.standard_b64encode)
# Test with 'URL safe' alternative characters
eq(base64.urlsafe_b64encode(b'\xd3V\xbeo\xf7\x1d'), b'01a-b_cd')
# Non-bytes
self.check_other_types(base64.urlsafe_b64encode,
b'\xd3V\xbeo\xf7\x1d', b'01a-b_cd')
self.check_encode_type_errors(base64.urlsafe_b64encode)
def test_b64decode(self):
eq = self.assertEqual
tests = {b"d3d3LnB5dGhvbi5vcmc=": b"www.python.org",
b'AA==': b'\x00',
b"YQ==": b"a",
b"YWI=": b"ab",
b"YWJj": b"abc",
b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==":
b"abcdefghijklmnopqrstuvwxyz"
b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
b"0123456789!@#0^&*();:<>,. []{}",
b'': b'',
}
for data, res in tests.items():
eq(base64.b64decode(data), res)
eq(base64.b64decode(data.decode('ascii')), res)
# Non-bytes
self.check_other_types(base64.b64decode, b"YWJj", b"abc")
self.check_decode_type_errors(base64.b64decode)
# Test with arbitrary alternative characters
tests_altchars = {(b'01a*b$cd', b'*$'): b'\xd3V\xbeo\xf7\x1d',
}
for (data, altchars), res in tests_altchars.items():
data_str = data.decode('ascii')
altchars_str = altchars.decode('ascii')
eq(base64.b64decode(data, altchars=altchars), res)
eq(base64.b64decode(data_str, altchars=altchars), res)
eq(base64.b64decode(data, altchars=altchars_str), res)
eq(base64.b64decode(data_str, altchars=altchars_str), res)
# Test standard alphabet
for data, res in tests.items():
eq(base64.standard_b64decode(data), res)
eq(base64.standard_b64decode(data.decode('ascii')), res)
# Non-bytes
self.check_other_types(base64.standard_b64decode, b"YWJj", b"abc")
self.check_decode_type_errors(base64.standard_b64decode)
# Test with 'URL safe' alternative characters
tests_urlsafe = {b'01a-b_cd': b'\xd3V\xbeo\xf7\x1d',
b'': b'',
}
for data, res in tests_urlsafe.items():
eq(base64.urlsafe_b64decode(data), res)
eq(base64.urlsafe_b64decode(data.decode('ascii')), res)
# Non-bytes
self.check_other_types(base64.urlsafe_b64decode, b'01a-b_cd',
b'\xd3V\xbeo\xf7\x1d')
self.check_decode_type_errors(base64.urlsafe_b64decode)
def test_b64decode_padding_error(self):
self.assertRaises(binascii.Error, base64.b64decode, b'abc')
self.assertRaises(binascii.Error, base64.b64decode, 'abc')
def test_b64decode_invalid_chars(self):
# issue 1466065: Test some invalid characters.
tests = ((b'%3d==', b'\xdd'),
(b'$3d==', b'\xdd'),
(b'[==', b''),
(b'YW]3=', b'am'),
(b'3{d==', b'\xdd'),
(b'3d}==', b'\xdd'),
(b'@@', b''),
(b'!', b''),
(b'YWJj\nYWI=', b'abcab'))
for bstr, res in tests:
self.assertEqual(base64.b64decode(bstr), res)
self.assertEqual(base64.b64decode(bstr.decode('ascii')), res)
with self.assertRaises(binascii.Error):
base64.b64decode(bstr, validate=True)
with self.assertRaises(binascii.Error):
base64.b64decode(bstr.decode('ascii'), validate=True)
def test_b32encode(self):
eq = self.assertEqual
eq(base64.b32encode(b''), b'')
eq(base64.b32encode(b'\x00'), b'AA======')
eq(base64.b32encode(b'a'), b'ME======')
eq(base64.b32encode(b'ab'), b'MFRA====')
eq(base64.b32encode(b'abc'), b'MFRGG===')
eq(base64.b32encode(b'abcd'), b'MFRGGZA=')
eq(base64.b32encode(b'abcde'), b'MFRGGZDF')
# Non-bytes
self.check_other_types(base64.b32encode, b'abcd', b'MFRGGZA=')
self.check_encode_type_errors(base64.b32encode)
def test_b32decode(self):
eq = self.assertEqual
tests = {b'': b'',
b'AA======': b'\x00',
b'ME======': b'a',
b'MFRA====': b'ab',
b'MFRGG===': b'abc',
b'MFRGGZA=': b'abcd',
b'MFRGGZDF': b'abcde',
}
for data, res in tests.items():
eq(base64.b32decode(data), res)
eq(base64.b32decode(data.decode('ascii')), res)
# Non-bytes
self.check_other_types(base64.b32decode, b'MFRGG===', b"abc")
self.check_decode_type_errors(base64.b32decode)
def test_b32decode_casefold(self):
eq = self.assertEqual
tests = {b'': b'',
b'ME======': b'a',
b'MFRA====': b'ab',
b'MFRGG===': b'abc',
b'MFRGGZA=': b'abcd',
b'MFRGGZDF': b'abcde',
# Lower cases
b'me======': b'a',
b'mfra====': b'ab',
b'mfrgg===': b'abc',
b'mfrggza=': b'abcd',
b'mfrggzdf': b'abcde',
}
for data, res in tests.items():
eq(base64.b32decode(data, True), res)
eq(base64.b32decode(data.decode('ascii'), True), res)
self.assertRaises(binascii.Error, base64.b32decode, b'me======')
self.assertRaises(binascii.Error, base64.b32decode, 'me======')
# Mapping zero and one
eq(base64.b32decode(b'MLO23456'), b'b\xdd\xad\xf3\xbe')
eq(base64.b32decode('MLO23456'), b'b\xdd\xad\xf3\xbe')
map_tests = {(b'M1023456', b'L'): b'b\xdd\xad\xf3\xbe',
(b'M1023456', b'I'): b'b\x1d\xad\xf3\xbe',
}
for (data, map01), res in map_tests.items():
data_str = data.decode('ascii')
map01_str = map01.decode('ascii')
eq(base64.b32decode(data, map01=map01), res)
eq(base64.b32decode(data_str, map01=map01), res)
eq(base64.b32decode(data, map01=map01_str), res)
eq(base64.b32decode(data_str, map01=map01_str), res)
self.assertRaises(binascii.Error, base64.b32decode, data)
self.assertRaises(binascii.Error, base64.b32decode, data_str)
def test_b32decode_error(self):
for data in [b'abc', b'ABCDEF==', b'==ABCDEF']:
with self.assertRaises(binascii.Error):
base64.b32decode(data)
with self.assertRaises(binascii.Error):
base64.b32decode(data.decode('ascii'))
def test_b16encode(self):
eq = self.assertEqual
eq(base64.b16encode(b'\x01\x02\xab\xcd\xef'), b'0102ABCDEF')
eq(base64.b16encode(b'\x00'), b'00')
# Non-bytes
self.check_other_types(base64.b16encode, b'\x01\x02\xab\xcd\xef',
b'0102ABCDEF')
self.check_encode_type_errors(base64.b16encode)
def test_b16decode(self):
eq = self.assertEqual
eq(base64.b16decode(b'0102ABCDEF'), b'\x01\x02\xab\xcd\xef')
eq(base64.b16decode('0102ABCDEF'), b'\x01\x02\xab\xcd\xef')
eq(base64.b16decode(b'00'), b'\x00')
eq(base64.b16decode('00'), b'\x00')
# Lower case is not allowed without a flag
self.assertRaises(binascii.Error, base64.b16decode, b'0102abcdef')
self.assertRaises(binascii.Error, base64.b16decode, '0102abcdef')
# Case fold
eq(base64.b16decode(b'0102abcdef', True), b'\x01\x02\xab\xcd\xef')
eq(base64.b16decode('0102abcdef', True), b'\x01\x02\xab\xcd\xef')
# Non-bytes
self.check_other_types(base64.b16decode, b"0102ABCDEF",
b'\x01\x02\xab\xcd\xef')
self.check_decode_type_errors(base64.b16decode)
eq(base64.b16decode(bytearray(b"0102abcdef"), True),
b'\x01\x02\xab\xcd\xef')
eq(base64.b16decode(memoryview(b"0102abcdef"), True),
b'\x01\x02\xab\xcd\xef')
eq(base64.b16decode(array('B', b"0102abcdef"), True),
b'\x01\x02\xab\xcd\xef')
def test_a85encode(self):
eq = self.assertEqual
tests = {
b'': b'',
b"www.python.org": b'GB\\6`E-ZP=Df.1GEb>',
bytes(range(255)): b"""!!*-'"9eu7#RLhG$k3[W&.oNg'GVB"(`=52*$$"""
b"""(B+<_pR,UFcb-n-Vr/1iJ-0JP==1c70M3&s#]4?Ykm5X@_(6q'R884cE"""
b"""H9MJ8X:f1+h<)lt#=BSg3>[:ZC?t!MSA7]@cBPD3sCi+'.E,fo>FEMbN"""
b"""G^4U^I!pHnJ:W<)KS>/9Ll%"IN/`jYOHG]iPa.Q$R$jD4S=Q7DTV8*TU"""
b"""nsrdW2ZetXKAY/Yd(L?['d?O\\@K2_]Y2%o^qmn*`5Ta:aN;TJbg"GZd"""
b"""*^:jeCE.%f\\,!5gtgiEi8N\\UjQ5OekiqBum-X60nF?)@o_%qPq"ad`"""
b"""r;HT""",
b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
b"0123456789!@#0^&*();:<>,. []{}":
b'@:E_WAS,RgBkhF"D/O92EH6,BF`qtRH$VbC6UX@47n?3D92&&T'
b":Jand;cHat='/U/0JP==1c70M3&r-I,;<FN.OZ`-3]oSW/g+A(H[P",
b"no padding..": b'DJpY:@:Wn_DJ(RS',
b"zero compression\0\0\0\0": b'H=_,8+Cf>,E,oN2F(oQ1z',
b"zero compression\0\0\0": b'H=_,8+Cf>,E,oN2F(oQ1!!!!',
b"Boundary:\0\0\0\0": b'6>q!aA79M(3WK-[!!',
b"Space compr: ": b';fH/TAKYK$D/aMV+<VdL',
b'\xff': b'rr',
b'\xff'*2: b's8N',
b'\xff'*3: b's8W*',
b'\xff'*4: b's8W-!',
}
for data, res in tests.items():
eq(base64.a85encode(data), res, data)
eq(base64.a85encode(data, adobe=False), res, data)
eq(base64.a85encode(data, adobe=True), b'<~' + res + b'~>', data)
self.check_other_types(base64.a85encode, b"www.python.org",
b'GB\\6`E-ZP=Df.1GEb>')
self.assertRaises(TypeError, base64.a85encode, "")
eq(base64.a85encode(b"www.python.org", wrapcol=7, adobe=False),
b'GB\\6`E-\nZP=Df.1\nGEb>')
eq(base64.a85encode(b"\0\0\0\0www.python.org", wrapcol=7, adobe=False),
b'zGB\\6`E\n-ZP=Df.\n1GEb>')
eq(base64.a85encode(b"www.python.org", wrapcol=7, adobe=True),
b'<~GB\\6`\nE-ZP=Df\n.1GEb>\n~>')
eq(base64.a85encode(b' '*8, foldspaces=True, adobe=False), b'yy')
eq(base64.a85encode(b' '*7, foldspaces=True, adobe=False), b'y+<Vd')
eq(base64.a85encode(b' '*6, foldspaces=True, adobe=False), b'y+<U')
eq(base64.a85encode(b' '*5, foldspaces=True, adobe=False), b'y+9')
def test_b85encode(self):
eq = self.assertEqual
tests = {
b'': b'',
b'www.python.org': b'cXxL#aCvlSZ*DGca%T',
bytes(range(255)): b"""009C61O)~M2nh-c3=Iws5D^j+6crX17#SKH9337X"""
b"""AR!_nBqb&%C@Cr{EG;fCFflSSG&MFiI5|2yJUu=?KtV!7L`6nNNJ&ad"""
b"""OifNtP*GA-R8>}2SXo+ITwPvYU}0ioWMyV&XlZI|Y;A6DaB*^Tbai%j"""
b"""czJqze0_d@fPsR8goTEOh>41ejE#<ukdcy;l$Dm3n3<ZJoSmMZprN9p"""
b"""q@|{(sHv)}tgWuEu(7hUw6(UkxVgH!yuH4^z`?@9#Kp$P$jQpf%+1cv"""
b"""(9zP<)YaD4*xB0K+}+;a;Njxq<mKk)=;`X~?CtLF@bU8V^!4`l`1$(#"""
b"""{Qdp""",
b"""abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"""
b"""0123456789!@#0^&*();:<>,. []{}""":
b"""VPa!sWoBn+X=-b1ZEkOHadLBXb#`}nd3r%YLqtVJM@UIZOH55pPf$@("""
b"""Q&d$}S6EqEFflSSG&MFiI5{CeBQRbjDkv#CIy^osE+AW7dwl""",
b'no padding..': b'Zf_uPVPs@!Zf7no',
b'zero compression\x00\x00\x00\x00': b'dS!BNAY*TBaB^jHb7^mG00000',
b'zero compression\x00\x00\x00': b'dS!BNAY*TBaB^jHb7^mG0000',
b"""Boundary:\x00\x00\x00\x00""": b"""LT`0$WMOi7IsgCw00""",
b'Space compr: ': b'Q*dEpWgug3ZE$irARr(h',
b'\xff': b'{{',
b'\xff'*2: b'|Nj',
b'\xff'*3: b'|Ns9',
b'\xff'*4: b'|NsC0',
}
for data, res in tests.items():
eq(base64.b85encode(data), res)
self.check_other_types(base64.b85encode, b"www.python.org",
b'cXxL#aCvlSZ*DGca%T')
def test_a85decode(self):
eq = self.assertEqual
tests = {
b'': b'',
b'GB\\6`E-ZP=Df.1GEb>': b'www.python.org',
b"""! ! * -'"\n\t\t9eu\r\n7# RL\vhG$k3[W&.oNg'GVB"(`=52*$$"""
b"""(B+<_pR,UFcb-n-Vr/1iJ-0JP==1c70M3&s#]4?Ykm5X@_(6q'R884cE"""
b"""H9MJ8X:f1+h<)lt#=BSg3>[:ZC?t!MSA7]@cBPD3sCi+'.E,fo>FEMbN"""
b"""G^4U^I!pHnJ:W<)KS>/9Ll%"IN/`jYOHG]iPa.Q$R$jD4S=Q7DTV8*TU"""
b"""nsrdW2ZetXKAY/Yd(L?['d?O\\@K2_]Y2%o^qmn*`5Ta:aN;TJbg"GZd"""
b"""*^:jeCE.%f\\,!5gtgiEi8N\\UjQ5OekiqBum-X60nF?)@o_%qPq"ad`"""
b"""r;HT""": bytes(range(255)),
b"""@:E_WAS,RgBkhF"D/O92EH6,BF`qtRH$VbC6UX@47n?3D92&&T:Jand;c"""
b"""Hat='/U/0JP==1c70M3&r-I,;<FN.OZ`-3]oSW/g+A(H[P""":
b'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234'
b'56789!@#0^&*();:<>,. []{}',
b'DJpY:@:Wn_DJ(RS': b'no padding..',
b'H=_,8+Cf>,E,oN2F(oQ1z': b'zero compression\x00\x00\x00\x00',
b'H=_,8+Cf>,E,oN2F(oQ1!!!!': b'zero compression\x00\x00\x00',
b'6>q!aA79M(3WK-[!!': b"Boundary:\x00\x00\x00\x00",
b';fH/TAKYK$D/aMV+<VdL': b'Space compr: ',
b'rr': b'\xff',
b's8N': b'\xff'*2,
b's8W*': b'\xff'*3,
b's8W-!': b'\xff'*4,
}
for data, res in tests.items():
eq(base64.a85decode(data), res, data)
eq(base64.a85decode(data, adobe=False), res, data)
eq(base64.a85decode(data.decode("ascii"), adobe=False), res, data)
eq(base64.a85decode(b'<~' + data + b'~>', adobe=True), res, data)
eq(base64.a85decode('<~%s~>' % data.decode("ascii"), adobe=True),
res, data)
eq(base64.a85decode(b'yy', foldspaces=True, adobe=False), b' '*8)
eq(base64.a85decode(b'y+<Vd', foldspaces=True, adobe=False), b' '*7)
eq(base64.a85decode(b'y+<U', foldspaces=True, adobe=False), b' '*6)
eq(base64.a85decode(b'y+9', foldspaces=True, adobe=False), b' '*5)
self.check_other_types(base64.a85decode, b'GB\\6`E-ZP=Df.1GEb>',
b"www.python.org")
def test_b85decode(self):
eq = self.assertEqual
tests = {
b'': b'',
b'cXxL#aCvlSZ*DGca%T': b'www.python.org',
b"""009C61O)~M2nh-c3=Iws5D^j+6crX17#SKH9337X"""
b"""AR!_nBqb&%C@Cr{EG;fCFflSSG&MFiI5|2yJUu=?KtV!7L`6nNNJ&ad"""
b"""OifNtP*GA-R8>}2SXo+ITwPvYU}0ioWMyV&XlZI|Y;A6DaB*^Tbai%j"""
b"""czJqze0_d@fPsR8goTEOh>41ejE#<ukdcy;l$Dm3n3<ZJoSmMZprN9p"""
b"""q@|{(sHv)}tgWuEu(7hUw6(UkxVgH!yuH4^z`?@9#Kp$P$jQpf%+1cv"""
b"""(9zP<)YaD4*xB0K+}+;a;Njxq<mKk)=;`X~?CtLF@bU8V^!4`l`1$(#"""
b"""{Qdp""": bytes(range(255)),
b"""VPa!sWoBn+X=-b1ZEkOHadLBXb#`}nd3r%YLqtVJM@UIZOH55pPf$@("""
b"""Q&d$}S6EqEFflSSG&MFiI5{CeBQRbjDkv#CIy^osE+AW7dwl""":
b"""abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"""
b"""0123456789!@#0^&*();:<>,. []{}""",
b'Zf_uPVPs@!Zf7no': b'no padding..',
b'dS!BNAY*TBaB^jHb7^mG00000': b'zero compression\x00\x00\x00\x00',
b'dS!BNAY*TBaB^jHb7^mG0000': b'zero compression\x00\x00\x00',
b"""LT`0$WMOi7IsgCw00""": b"""Boundary:\x00\x00\x00\x00""",
b'Q*dEpWgug3ZE$irARr(h': b'Space compr: ',
b'{{': b'\xff',
b'|Nj': b'\xff'*2,
b'|Ns9': b'\xff'*3,
b'|NsC0': b'\xff'*4,
}
for data, res in tests.items():
eq(base64.b85decode(data), res)
eq(base64.b85decode(data.decode("ascii")), res)
self.check_other_types(base64.b85decode, b'cXxL#aCvlSZ*DGca%T',
b"www.python.org")
def test_a85_padding(self):
eq = self.assertEqual
eq(base64.a85encode(b"x", pad=True), b'GQ7^D')
eq(base64.a85encode(b"xx", pad=True), b"G^'2g")
eq(base64.a85encode(b"xxx", pad=True), b'G^+H5')
eq(base64.a85encode(b"xxxx", pad=True), b'G^+IX')
eq(base64.a85encode(b"xxxxx", pad=True), b'G^+IXGQ7^D')
eq(base64.a85decode(b'GQ7^D'), b"x\x00\x00\x00")
eq(base64.a85decode(b"G^'2g"), b"xx\x00\x00")
eq(base64.a85decode(b'G^+H5'), b"xxx\x00")
eq(base64.a85decode(b'G^+IX'), b"xxxx")
eq(base64.a85decode(b'G^+IXGQ7^D'), b"xxxxx\x00\x00\x00")
def test_b85_padding(self):
eq = self.assertEqual
eq(base64.b85encode(b"x", pad=True), b'cmMzZ')
eq(base64.b85encode(b"xx", pad=True), b'cz6H+')
eq(base64.b85encode(b"xxx", pad=True), b'czAdK')
eq(base64.b85encode(b"xxxx", pad=True), b'czAet')
eq(base64.b85encode(b"xxxxx", pad=True), b'czAetcmMzZ')
eq(base64.b85decode(b'cmMzZ'), b"x\x00\x00\x00")
eq(base64.b85decode(b'cz6H+'), b"xx\x00\x00")
eq(base64.b85decode(b'czAdK'), b"xxx\x00")
eq(base64.b85decode(b'czAet'), b"xxxx")
eq(base64.b85decode(b'czAetcmMzZ'), b"xxxxx\x00\x00\x00")
def test_a85decode_errors(self):
illegal = (set(range(32)) | set(range(118, 256))) - set(b' \t\n\r\v')
for c in illegal:
with self.assertRaises(ValueError, msg=bytes([c])):
base64.a85decode(b'!!!!' + bytes([c]))
with self.assertRaises(ValueError, msg=bytes([c])):
base64.a85decode(b'!!!!' + bytes([c]), adobe=False)
with self.assertRaises(ValueError, msg=bytes([c])):
base64.a85decode(b'<~!!!!' + bytes([c]) + b'~>', adobe=True)
self.assertRaises(ValueError, base64.a85decode,
b"malformed", adobe=True)
self.assertRaises(ValueError, base64.a85decode,
b"<~still malformed", adobe=True)
self.assertRaises(ValueError, base64.a85decode,
b"also malformed~>", adobe=True)
# With adobe=False (the default), Adobe framing markers are disallowed
self.assertRaises(ValueError, base64.a85decode,
b"<~~>")
self.assertRaises(ValueError, base64.a85decode,
b"<~~>", adobe=False)
base64.a85decode(b"<~~>", adobe=True) # sanity check
self.assertRaises(ValueError, base64.a85decode,
b"abcx", adobe=False)
self.assertRaises(ValueError, base64.a85decode,
b"abcdey", adobe=False)
self.assertRaises(ValueError, base64.a85decode,
b"a b\nc", adobe=False, ignorechars=b"")
self.assertRaises(ValueError, base64.a85decode, b's', adobe=False)
self.assertRaises(ValueError, base64.a85decode, b's8', adobe=False)
self.assertRaises(ValueError, base64.a85decode, b's8W', adobe=False)
self.assertRaises(ValueError, base64.a85decode, b's8W-', adobe=False)
self.assertRaises(ValueError, base64.a85decode, b's8W-"', adobe=False)
def test_b85decode_errors(self):
illegal = list(range(33)) + \
list(b'"\',./:[\\]') + \
list(range(128, 256))
for c in illegal:
with self.assertRaises(ValueError, msg=bytes([c])):
base64.b85decode(b'0000' + bytes([c]))
self.assertRaises(ValueError, base64.b85decode, b'|')
self.assertRaises(ValueError, base64.b85decode, b'|N')
self.assertRaises(ValueError, base64.b85decode, b'|Ns')
self.assertRaises(ValueError, base64.b85decode, b'|NsC')
self.assertRaises(ValueError, base64.b85decode, b'|NsC1')
def test_decode_nonascii_str(self):
decode_funcs = (base64.b64decode,
base64.standard_b64decode,
base64.urlsafe_b64decode,
base64.b32decode,
base64.b16decode,
base64.b85decode,
base64.a85decode)
for f in decode_funcs:
self.assertRaises(ValueError, f, 'with non-ascii \xcb')
def test_ErrorHeritage(self):
self.assertTrue(issubclass(binascii.Error, ValueError))
class TestMain(unittest.TestCase):
def tearDown(self):
if os.path.exists(support.TESTFN):
os.unlink(support.TESTFN)
def get_output(self, *args, **options):
args = (sys.executable, '-m', 'base64') + args
return subprocess.check_output(args, **options)
def test_encode_decode(self):
output = self.get_output('-t')
self.assertSequenceEqual(output.splitlines(), (
b"b'Aladdin:open sesame'",
br"b'QWxhZGRpbjpvcGVuIHNlc2FtZQ==\n'",
b"b'Aladdin:open sesame'",
))
def test_encode_file(self):
with open(support.TESTFN, 'wb') as fp:
fp.write(b'a\xffb\n')
output = self.get_output('-e', support.TESTFN)
self.assertEqual(output.rstrip(), b'Yf9iCg==')
with open(support.TESTFN, 'rb') as fp:
output = self.get_output('-e', stdin=fp)
self.assertEqual(output.rstrip(), b'Yf9iCg==')
def test_decode(self):
with open(support.TESTFN, 'wb') as fp:
fp.write(b'Yf9iCg==')
output = self.get_output('-d', support.TESTFN)
self.assertEqual(output.rstrip(), b'a\xffb')
def test_main():
support.run_unittest(__name__)
if __name__ == '__main__':
test_main()
| lgpl-3.0 |
sometallgit/AutoUploader | Python27/Lib/msilib/__init__.py | 15 | 17605 | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2005 Martin v. Löwis
# Licensed to PSF under a Contributor Agreement.
from _msi import *
import glob
import os
import re
import string
import sys
AMD64 = "AMD64" in sys.version
Itanium = "Itanium" in sys.version
Win64 = AMD64 or Itanium
# Partially taken from Wine
datasizemask= 0x00ff
type_valid= 0x0100
type_localizable= 0x0200
typemask= 0x0c00
type_long= 0x0000
type_short= 0x0400
type_string= 0x0c00
type_binary= 0x0800
type_nullable= 0x1000
type_key= 0x2000
# XXX temporary, localizable?
knownbits = datasizemask | type_valid | type_localizable | \
typemask | type_nullable | type_key
class Table:
def __init__(self, name):
self.name = name
self.fields = []
def add_field(self, index, name, type):
self.fields.append((index,name,type))
def sql(self):
fields = []
keys = []
self.fields.sort()
fields = [None]*len(self.fields)
for index, name, type in self.fields:
index -= 1
unk = type & ~knownbits
if unk:
print "%s.%s unknown bits %x" % (self.name, name, unk)
size = type & datasizemask
dtype = type & typemask
if dtype == type_string:
if size:
tname="CHAR(%d)" % size
else:
tname="CHAR"
elif dtype == type_short:
assert size==2
tname = "SHORT"
elif dtype == type_long:
assert size==4
tname="LONG"
elif dtype == type_binary:
assert size==0
tname="OBJECT"
else:
tname="unknown"
print "%s.%sunknown integer type %d" % (self.name, name, size)
if type & type_nullable:
flags = ""
else:
flags = " NOT NULL"
if type & type_localizable:
flags += " LOCALIZABLE"
fields[index] = "`%s` %s%s" % (name, tname, flags)
if type & type_key:
keys.append("`%s`" % name)
fields = ", ".join(fields)
keys = ", ".join(keys)
return "CREATE TABLE %s (%s PRIMARY KEY %s)" % (self.name, fields, keys)
def create(self, db):
v = db.OpenView(self.sql())
v.Execute(None)
v.Close()
class _Unspecified:pass
def change_sequence(seq, action, seqno=_Unspecified, cond = _Unspecified):
"Change the sequence number of an action in a sequence list"
for i in range(len(seq)):
if seq[i][0] == action:
if cond is _Unspecified:
cond = seq[i][1]
if seqno is _Unspecified:
seqno = seq[i][2]
seq[i] = (action, cond, seqno)
return
raise ValueError, "Action not found in sequence"
def add_data(db, table, values):
v = db.OpenView("SELECT * FROM `%s`" % table)
count = v.GetColumnInfo(MSICOLINFO_NAMES).GetFieldCount()
r = CreateRecord(count)
for value in values:
assert len(value) == count, value
for i in range(count):
field = value[i]
if isinstance(field, (int, long)):
r.SetInteger(i+1,field)
elif isinstance(field, basestring):
r.SetString(i+1,field)
elif field is None:
pass
elif isinstance(field, Binary):
r.SetStream(i+1, field.name)
else:
raise TypeError, "Unsupported type %s" % field.__class__.__name__
try:
v.Modify(MSIMODIFY_INSERT, r)
except Exception, e:
raise MSIError("Could not insert "+repr(values)+" into "+table)
r.ClearData()
v.Close()
def add_stream(db, name, path):
v = db.OpenView("INSERT INTO _Streams (Name, Data) VALUES ('%s', ?)" % name)
r = CreateRecord(1)
r.SetStream(1, path)
v.Execute(r)
v.Close()
def init_database(name, schema,
ProductName, ProductCode, ProductVersion,
Manufacturer):
try:
os.unlink(name)
except OSError:
pass
ProductCode = ProductCode.upper()
# Create the database
db = OpenDatabase(name, MSIDBOPEN_CREATE)
# Create the tables
for t in schema.tables:
t.create(db)
# Fill the validation table
add_data(db, "_Validation", schema._Validation_records)
# Initialize the summary information, allowing atmost 20 properties
si = db.GetSummaryInformation(20)
si.SetProperty(PID_TITLE, "Installation Database")
si.SetProperty(PID_SUBJECT, ProductName)
si.SetProperty(PID_AUTHOR, Manufacturer)
if Itanium:
si.SetProperty(PID_TEMPLATE, "Intel64;1033")
elif AMD64:
si.SetProperty(PID_TEMPLATE, "x64;1033")
else:
si.SetProperty(PID_TEMPLATE, "Intel;1033")
si.SetProperty(PID_REVNUMBER, gen_uuid())
si.SetProperty(PID_WORDCOUNT, 2) # long file names, compressed, original media
si.SetProperty(PID_PAGECOUNT, 200)
si.SetProperty(PID_APPNAME, "Python MSI Library")
# XXX more properties
si.Persist()
add_data(db, "Property", [
("ProductName", ProductName),
("ProductCode", ProductCode),
("ProductVersion", ProductVersion),
("Manufacturer", Manufacturer),
("ProductLanguage", "1033")])
db.Commit()
return db
def add_tables(db, module):
for table in module.tables:
add_data(db, table, getattr(module, table))
def make_id(str):
identifier_chars = string.ascii_letters + string.digits + "._"
str = "".join([c if c in identifier_chars else "_" for c in str])
if str[0] in (string.digits + "."):
str = "_" + str
assert re.match("^[A-Za-z_][A-Za-z0-9_.]*$", str), "FILE"+str
return str
def gen_uuid():
return "{"+UuidCreate().upper()+"}"
class CAB:
def __init__(self, name):
self.name = name
self.files = []
self.filenames = set()
self.index = 0
def gen_id(self, file):
logical = _logical = make_id(file)
pos = 1
while logical in self.filenames:
logical = "%s.%d" % (_logical, pos)
pos += 1
self.filenames.add(logical)
return logical
def append(self, full, file, logical):
if os.path.isdir(full):
return
if not logical:
logical = self.gen_id(file)
self.index += 1
self.files.append((full, logical))
return self.index, logical
def commit(self, db):
from tempfile import mktemp
filename = mktemp()
FCICreate(filename, self.files)
add_data(db, "Media",
[(1, self.index, None, "#"+self.name, None, None)])
add_stream(db, self.name, filename)
os.unlink(filename)
db.Commit()
_directories = set()
class Directory:
def __init__(self, db, cab, basedir, physical, _logical, default, componentflags=None):
"""Create a new directory in the Directory table. There is a current component
at each point in time for the directory, which is either explicitly created
through start_component, or implicitly when files are added for the first
time. Files are added into the current component, and into the cab file.
To create a directory, a base directory object needs to be specified (can be
None), the path to the physical directory, and a logical directory name.
Default specifies the DefaultDir slot in the directory table. componentflags
specifies the default flags that new components get."""
index = 1
_logical = make_id(_logical)
logical = _logical
while logical in _directories:
logical = "%s%d" % (_logical, index)
index += 1
_directories.add(logical)
self.db = db
self.cab = cab
self.basedir = basedir
self.physical = physical
self.logical = logical
self.component = None
self.short_names = set()
self.ids = set()
self.keyfiles = {}
self.componentflags = componentflags
if basedir:
self.absolute = os.path.join(basedir.absolute, physical)
blogical = basedir.logical
else:
self.absolute = physical
blogical = None
add_data(db, "Directory", [(logical, blogical, default)])
def start_component(self, component = None, feature = None, flags = None, keyfile = None, uuid=None):
"""Add an entry to the Component table, and make this component the current for this
directory. If no component name is given, the directory name is used. If no feature
is given, the current feature is used. If no flags are given, the directory's default
flags are used. If no keyfile is given, the KeyPath is left null in the Component
table."""
if flags is None:
flags = self.componentflags
if uuid is None:
uuid = gen_uuid()
else:
uuid = uuid.upper()
if component is None:
component = self.logical
self.component = component
if Win64:
flags |= 256
if keyfile:
keyid = self.cab.gen_id(self.absolute, keyfile)
self.keyfiles[keyfile] = keyid
else:
keyid = None
add_data(self.db, "Component",
[(component, uuid, self.logical, flags, None, keyid)])
if feature is None:
feature = current_feature
add_data(self.db, "FeatureComponents",
[(feature.id, component)])
def make_short(self, file):
oldfile = file
file = file.replace('+', '_')
file = ''.join(c for c in file if not c in ' "/\[]:;=,')
parts = file.split(".")
if len(parts) > 1:
prefix = "".join(parts[:-1]).upper()
suffix = parts[-1].upper()
if not prefix:
prefix = suffix
suffix = None
else:
prefix = file.upper()
suffix = None
if len(parts) < 3 and len(prefix) <= 8 and file == oldfile and (
not suffix or len(suffix) <= 3):
if suffix:
file = prefix+"."+suffix
else:
file = prefix
else:
file = None
if file is None or file in self.short_names:
prefix = prefix[:6]
if suffix:
suffix = suffix[:3]
pos = 1
while 1:
if suffix:
file = "%s~%d.%s" % (prefix, pos, suffix)
else:
file = "%s~%d" % (prefix, pos)
if file not in self.short_names: break
pos += 1
assert pos < 10000
if pos in (10, 100, 1000):
prefix = prefix[:-1]
self.short_names.add(file)
assert not re.search(r'[\?|><:/*"+,;=\[\]]', file) # restrictions on short names
return file
def add_file(self, file, src=None, version=None, language=None):
"""Add a file to the current component of the directory, starting a new one
if there is no current component. By default, the file name in the source
and the file table will be identical. If the src file is specified, it is
interpreted relative to the current directory. Optionally, a version and a
language can be specified for the entry in the File table."""
if not self.component:
self.start_component(self.logical, current_feature, 0)
if not src:
# Allow relative paths for file if src is not specified
src = file
file = os.path.basename(file)
absolute = os.path.join(self.absolute, src)
assert not re.search(r'[\?|><:/*]"', file) # restrictions on long names
if file in self.keyfiles:
logical = self.keyfiles[file]
else:
logical = None
sequence, logical = self.cab.append(absolute, file, logical)
assert logical not in self.ids
self.ids.add(logical)
short = self.make_short(file)
full = "%s|%s" % (short, file)
filesize = os.stat(absolute).st_size
# constants.msidbFileAttributesVital
# Compressed omitted, since it is the database default
# could add r/o, system, hidden
attributes = 512
add_data(self.db, "File",
[(logical, self.component, full, filesize, version,
language, attributes, sequence)])
#if not version:
# # Add hash if the file is not versioned
# filehash = FileHash(absolute, 0)
# add_data(self.db, "MsiFileHash",
# [(logical, 0, filehash.IntegerData(1),
# filehash.IntegerData(2), filehash.IntegerData(3),
# filehash.IntegerData(4))])
# Automatically remove .pyc/.pyo files on uninstall (2)
# XXX: adding so many RemoveFile entries makes installer unbelievably
# slow. So instead, we have to use wildcard remove entries
if file.endswith(".py"):
add_data(self.db, "RemoveFile",
[(logical+"c", self.component, "%sC|%sc" % (short, file),
self.logical, 2),
(logical+"o", self.component, "%sO|%so" % (short, file),
self.logical, 2)])
return logical
def glob(self, pattern, exclude = None):
"""Add a list of files to the current component as specified in the
glob pattern. Individual files can be excluded in the exclude list."""
files = glob.glob1(self.absolute, pattern)
for f in files:
if exclude and f in exclude: continue
self.add_file(f)
return files
def remove_pyc(self):
"Remove .pyc/.pyo files on uninstall"
add_data(self.db, "RemoveFile",
[(self.component+"c", self.component, "*.pyc", self.logical, 2),
(self.component+"o", self.component, "*.pyo", self.logical, 2)])
class Binary:
def __init__(self, fname):
self.name = fname
def __repr__(self):
return 'msilib.Binary(os.path.join(dirname,"%s"))' % self.name
class Feature:
def __init__(self, db, id, title, desc, display, level = 1,
parent=None, directory = None, attributes=0):
self.id = id
if parent:
parent = parent.id
add_data(db, "Feature",
[(id, parent, title, desc, display,
level, directory, attributes)])
def set_current(self):
global current_feature
current_feature = self
class Control:
def __init__(self, dlg, name):
self.dlg = dlg
self.name = name
def event(self, event, argument, condition = "1", ordering = None):
add_data(self.dlg.db, "ControlEvent",
[(self.dlg.name, self.name, event, argument,
condition, ordering)])
def mapping(self, event, attribute):
add_data(self.dlg.db, "EventMapping",
[(self.dlg.name, self.name, event, attribute)])
def condition(self, action, condition):
add_data(self.dlg.db, "ControlCondition",
[(self.dlg.name, self.name, action, condition)])
class RadioButtonGroup(Control):
def __init__(self, dlg, name, property):
self.dlg = dlg
self.name = name
self.property = property
self.index = 1
def add(self, name, x, y, w, h, text, value = None):
if value is None:
value = name
add_data(self.dlg.db, "RadioButton",
[(self.property, self.index, value,
x, y, w, h, text, None)])
self.index += 1
class Dialog:
def __init__(self, db, name, x, y, w, h, attr, title, first, default, cancel):
self.db = db
self.name = name
self.x, self.y, self.w, self.h = x,y,w,h
add_data(db, "Dialog", [(name, x,y,w,h,attr,title,first,default,cancel)])
def control(self, name, type, x, y, w, h, attr, prop, text, next, help):
add_data(self.db, "Control",
[(self.name, name, type, x, y, w, h, attr, prop, text, next, help)])
return Control(self, name)
def text(self, name, x, y, w, h, attr, text):
return self.control(name, "Text", x, y, w, h, attr, None,
text, None, None)
def bitmap(self, name, x, y, w, h, text):
return self.control(name, "Bitmap", x, y, w, h, 1, None, text, None, None)
def line(self, name, x, y, w, h):
return self.control(name, "Line", x, y, w, h, 1, None, None, None, None)
def pushbutton(self, name, x, y, w, h, attr, text, next):
return self.control(name, "PushButton", x, y, w, h, attr, None, text, next, None)
def radiogroup(self, name, x, y, w, h, attr, prop, text, next):
add_data(self.db, "Control",
[(self.name, name, "RadioButtonGroup",
x, y, w, h, attr, prop, text, next, None)])
return RadioButtonGroup(self, name, prop)
def checkbox(self, name, x, y, w, h, attr, prop, text, next):
return self.control(name, "CheckBox", x, y, w, h, attr, prop, text, next, None)
| mit |
KyleAMoore/KanjiNani | Android/.buildozer/android/platform/build/build/python-installs/KanjiNani/kivy/modules/keybinding.py | 81 | 1699 | '''Keybinding
==========
This module forces the mapping of some keys to functions:
* F11: Rotate the Window through 0, 90, 180 and 270 degrees
* Shift + F11: Switches between portrait and landscape on desktops
* F12: Take a screenshot
Note: this does't work if the application requests the keyboard beforehand.
Usage
-----
For normal module usage, please see the :mod:`~kivy.modules` documentation.
The Keybinding module, however, can also be imported and used just
like a normal python module. This has the added advantage of being
able to activate and deactivate the module programmatically::
from kivy.app import App
from kivy.uix.button import Button
from kivy.modules import keybinding
from kivy.core.window import Window
class Demo(App):
def build(self):
button = Button(text="Hello")
keybinding.start(Window, button)
return button
Demo().run()
To remove the Keybinding, you can do the following::
Keybinding.stop(Window, button)
'''
from kivy.utils import platform
__all__ = ('start', 'stop')
def _on_keyboard_handler(instance, key, scancode, codepoint, modifiers):
if key == 293 and modifiers == []: # F12
instance.screenshot()
elif key == 292 and modifiers == []: # F11
instance.rotation += 90
elif key == 292 and modifiers == ['shift']: # Shift + F11
if platform in ('win', 'linux', 'macosx'):
instance.rotation = 0
w, h = instance.size
w, h = h, w
instance.size = (w, h)
def start(win, ctx):
win.bind(on_keyboard=_on_keyboard_handler)
def stop(win, ctx):
win.unbind(on_keyboard=_on_keyboard_handler)
| gpl-3.0 |
invitu/odoomrp-wip | mrp_operations_extension/models/mrp_workcenter.py | 4 | 2159 | # -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp import models, fields, api
from openerp.addons import decimal_precision as dp
class MrpWorkcenter(models.Model):
_inherit = 'mrp.workcenter'
@api.one
@api.depends('operators')
def _operators_number_avg_cost(self):
self.op_number = len(self.operators)
op_avg_cost = 0.0
for operator in self.operators:
op_avg_cost += operator.employee_ids[:1].product_id.standard_price
self.op_avg_cost = op_avg_cost / (self.op_number or 1)
pre_op_product = fields.Many2one('product.product',
string='Pre-operation costing product')
post_op_product = fields.Many2one('product.product',
string='Post-operation costing product')
rt_operations = fields.Many2many(
'mrp.routing.operation', 'mrp_operation_workcenter_rel', 'workcenter',
'operation', 'Routing Operations')
operators = fields.Many2many('res.users', 'mrp_wc_operator_rel',
'workcenter_id', 'operator_id', 'Operators')
op_number = fields.Integer(
string='# Operators', compute=_operators_number_avg_cost)
op_avg_cost = fields.Float(
string='Operator average hour cost',
digits=dp.get_precision('Product Price'))
| agpl-3.0 |
snlpatel001213/algorithmia | bayesian/gaussianNaiveBayes/gaussiannaiveBayes.py | 1 | 7699 | from utils import loadData
from utils import splitToTrainTest
from utils import functionalTesting
import math
# Dataset - https://archive.ics.uci.edu/ml/datasets/Breast+Cancer+Wisconsin+(Diagnostic)
# read - https://web.stanford.edu/~jurafsky/slp3/9.pdf
loadDataInstance = loadData()
functionalTestingInstance = functionalTesting()
# load data
datasetInString = loadDataInstance.loadFromcsv("dataset/breast-cancer-wisconsin.data")
# convert to float
dataset = loadDataInstance.convertDataToFloat(datasetInString)
# making dictionary
def makeDictionaryFromDataset(dataset):
"""
takes dataset as list of list
:param dataset: [[5.0, 1.0, 1.0, 1.0, 2.0, 1.0, 3.0, 1.0, 1.0, 1.0],[5.0, 6.0, 5.0, 6.0, 10.0, 1.0, 3.0, 1.0, 1.0, 0.0],[4.0, 5.0, 1.0, 6.0, 2.0, 7.0, 3.0, 1.0, 1.0, 1.0]]
:return:
"""
classDict = {}
for each in dataset:
if each[-1] in classDict:
# append the new number to the existing array at this slot
classDict[each[-1]].append(each[:-1]) # each[-1] is a class , 0.0 or 1.0 | each[:-1] = all features except class
else:
# create a new array in this slot
classDict[each[-1]] =[each[:-1]]
return classDict #{0.0:[[5.0, 6.0, 5.0, 6.0, 10.0, 1.0, 3.0, 1.0, 1.0]], 1.0:[[5.0, 1.0, 1.0, 1.0, 2.0, 1.0, 3.0, 1.0, 1.0],[4.0, 5.0, 1.0, 6.0, 2.0, 7.0, 3.0, 1.0, 1.0]]}
def getMean(array):
"""
get array and return mean
:param array: [5.0, 6.0, 5.0, 6.0, 10.0, 1.0, 3.0, 1.0, 1.0]
:return: float
"""
return sum(array)/float(len(array))
def getStandardDeviation(array):
"""
get array and return standard deviation
:param array: [5.0, 6.0, 5.0, 6.0, 10.0, 1.0, 3.0, 1.0, 1.0]
:return: float
"""
average = getMean(array)
variance = sum([math.pow(y-average,2) for y in array])/float(len(array)-1)
return variance
def gaussianProbabilityDensity(x,mean,stddev):
"""
calculate gaussian Probability Density
:param x: data; float
:param mean: data; float
:param stddev: data; float
:return:
"""
exponent = math.exp(-(math.pow(x-mean,2)/(2*math.pow(stddev,2))))
return (1/(math.sqrt(2*math.pi)*stddev))*exponent
# 70% of data, seperating for train
train = dataset[:int(len(dataset)*0.7)]
print "Size of train dataset : ", len(train), " size of total dataset : ", len(dataset)
classDict = makeDictionaryFromDataset(train)
numberOfFeatures = len(dataset[0])-1 # number Of Features
# print numberOfFeatures # e.g. 0.9 Here
classes = classDict.keys() # number of unique classes
# print classes # e.g. [0.0, 1.0] Here
model = {}
for eachclass in classes:
# print eachclass
model[eachclass] = {}
model[eachclass]['mean'] = []
model[eachclass]['stddev'] = []
for eachFeatureNo in range(numberOfFeatures):
tempColumn = []
for eachList in classDict[eachclass]: #[[8.0, 2.0, 4.0, 1.0, 5.0, 1.0, 5.0, 4.0, 4.0],[5.0, 2.0, 3.0, 1.0, 6.0, 10.0, 5.0, 1.0, 1.0]]
tempColumn.append(eachList[eachFeatureNo]) # tempColumn will be having any particular column
# calculating mean for each feature
model[eachclass]['mean'].append(getMean(tempColumn))
# calculating stddev for each feature
model[eachclass]['stddev'].append(getStandardDeviation(tempColumn)) #{0.0: {'stddev': [5.95045670637252, 7.381656962769089, 6.375327172693769, 10.368169435393417, 6.718337695635912, 9.712648896960653, 4.850595587842532, 10.829255915816487, 6.950296458522511], 'mean': [7.396907216494846, 6.298969072164948, 6.396907216494846, 5.304123711340206, 5.402061855670103, 7.675257731958763, 5.649484536082475, 5.84020618556701, 2.716494845360825]}, 1.0: {'stddev': [2.9417041392828223, 1.0992736077481833, 1.2235673930589215, 1.0448518390406987, 1.0773665398362717, 1.8841692609247165, 1.3593450939697855, 1.4419923901764191, 0.21692609247088446], 'mean': [2.833898305084746, 1.4067796610169492, 1.5084745762711864, 1.4067796610169492, 2.1864406779661016, 1.3864406779661016, 2.2813559322033896, 1.3864406779661016, 1.064406779661017]}}
#WHAT EVER IS THERE IN model IS CALLED NAIVE BAISE MODEL HERE
# IT LOOKS LIKE THIS
print "MODEL : ",model
# BASED ON model, WE WILL CALCULATE GAUSSIAN PROBABILITY DENSITY THAT WILL SERVE AS ULTIMATE CLASSIFIER.
# lets do testing
# Remaining 30% of data, separating for train
test = dataset[int(len(dataset)*0.7):]
print "Size of test data-set : ", len(test), " size of total data-set : ", len(dataset)
def predict(features,model):
"""
Will do prediction on test data based on model so generated.
:param features: [8.0, 2.0, 4.0, 1.0, 5.0, 1.0, 5.0, 4.0, 4.0] only features, no class
:param model: can be considered as model
:return:
"""
combinedProbability = {} # a dictionary where probability for each feature of each class will be saved
for eachclass in classes:
combinedProbability[eachclass] = []
for eachFeatureNo in range(numberOfFeatures):
for eachclass in classes:
meanForFeature = model[eachclass]['mean'][eachFeatureNo] # get mean for that particular feature of class from model
stddevForFeature = model[eachclass]['stddev'][eachFeatureNo] # get stddev for that particular feature of class from model
gpd = gaussianProbabilityDensity(features[eachFeatureNo],meanForFeature,stddevForFeature) #calculate gaussian Probability Density for that feature for both class
combinedProbability[eachclass].append(gpd) # store gaussian predicted probability for each class for each feature
#{0.0: [6.651930570966195e-17, 9.154229062240036e-131, 1.4689405384278686e-172, 0.0, 1.6667067014825224e-58, 8.24203399075415e-279, 1.0334229828147304e-15, 0.0, 7.123285287614845e-33], 1.0: [1.9757527520696125e-20, 0.07829567060266986, 0.27210727230597875, 0.3488418781229466, 0.25218666596082123, 1.1483036351939655e-06, 0.019160052488986935, 0.23687445105815633, 1.838890565762708]}
# print combinedProbability
# class probability is equal to multiplication of each individual feature probabilities
classprobability = [0] * len(classes)
for eachClass in combinedProbability.keys():
allFeatureProbability = 1
for eachProbability in combinedProbability[eachClass]:
allFeatureProbability = allFeatureProbability * eachProbability
# allFeatureProbability multiplying all feature probabilities
classprobability[int(eachClass)] = allFeatureProbability
return classprobability # store probability for each class [1.445545,-0.456825] , for class 0 and 1 respectively
originalClass = [] # will store original class
predictedClass= [] # will store predicted class
for eachtestsample in test: #iter through test
originalClass.append(int(eachtestsample[-1])) # getting original class for each test sample
onlyFeatures = eachtestsample[:-1] # getting features for each test sample
predicteProbability = predict(onlyFeatures,model) # predicted probability per class
# converting probability to class, if probabilty for class 0 is higher then predictedClass = 0 else predictedClass = 1
if predicteProbability[0] > predicteProbability[1]:
predictedClass.append(0)
else:
predictedClass.append(1)
# you may print this to see original and predicted classes value
# print originalClasses
# print predictedCalsses
# getting accuracy measures
functionalTestingInstance.createConfusionMatrix(originalClass, predictedClass, 0.95)
# False Positive : 3 , False Negative : 1 , True Positive : 160 , True Negative : 46 , Accuracy : 0.980952380952 , F1 Score : 0.987654320988
| gpl-3.0 |
yencarnacion/jaikuengine | .google_appengine/lib/django-1.5/django/contrib/gis/geoip/tests.py | 102 | 4766 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.conf import settings
from django.contrib.gis.geos import GEOSGeometry
from django.contrib.gis.geoip import GeoIP, GeoIPException
from django.utils import unittest
from django.utils import six
# Note: Requires use of both the GeoIP country and city datasets.
# The GEOIP_DATA path should be the only setting set (the directory
# should contain links or the actual database files 'GeoIP.dat' and
# 'GeoLiteCity.dat'.
class GeoIPTest(unittest.TestCase):
def test01_init(self):
"Testing GeoIP initialization."
g1 = GeoIP() # Everything inferred from GeoIP path
path = settings.GEOIP_PATH
g2 = GeoIP(path, 0) # Passing in data path explicitly.
g3 = GeoIP.open(path, 0) # MaxMind Python API syntax.
for g in (g1, g2, g3):
self.assertEqual(True, bool(g._country))
self.assertEqual(True, bool(g._city))
# Only passing in the location of one database.
city = os.path.join(path, 'GeoLiteCity.dat')
cntry = os.path.join(path, 'GeoIP.dat')
g4 = GeoIP(city, country='')
self.assertEqual(None, g4._country)
g5 = GeoIP(cntry, city='')
self.assertEqual(None, g5._city)
# Improper parameters.
bad_params = (23, 'foo', 15.23)
for bad in bad_params:
self.assertRaises(GeoIPException, GeoIP, cache=bad)
if isinstance(bad, six.string_types):
e = GeoIPException
else:
e = TypeError
self.assertRaises(e, GeoIP, bad, 0)
def test02_bad_query(self):
"Testing GeoIP query parameter checking."
cntry_g = GeoIP(city='<foo>')
# No city database available, these calls should fail.
self.assertRaises(GeoIPException, cntry_g.city, 'google.com')
self.assertRaises(GeoIPException, cntry_g.coords, 'yahoo.com')
# Non-string query should raise TypeError
self.assertRaises(TypeError, cntry_g.country_code, 17)
self.assertRaises(TypeError, cntry_g.country_name, GeoIP)
def test03_country(self):
"Testing GeoIP country querying methods."
g = GeoIP(city='<foo>')
fqdn = 'www.google.com'
addr = '12.215.42.19'
for query in (fqdn, addr):
for func in (g.country_code, g.country_code_by_addr, g.country_code_by_name):
self.assertEqual('US', func(query))
for func in (g.country_name, g.country_name_by_addr, g.country_name_by_name):
self.assertEqual('United States', func(query))
self.assertEqual({'country_code' : 'US', 'country_name' : 'United States'},
g.country(query))
def test04_city(self):
"Testing GeoIP city querying methods."
g = GeoIP(country='<foo>')
addr = '128.249.1.1'
fqdn = 'tmc.edu'
for query in (fqdn, addr):
# Country queries should still work.
for func in (g.country_code, g.country_code_by_addr, g.country_code_by_name):
self.assertEqual('US', func(query))
for func in (g.country_name, g.country_name_by_addr, g.country_name_by_name):
self.assertEqual('United States', func(query))
self.assertEqual({'country_code' : 'US', 'country_name' : 'United States'},
g.country(query))
# City information dictionary.
d = g.city(query)
self.assertEqual('USA', d['country_code3'])
self.assertEqual('Houston', d['city'])
self.assertEqual('TX', d['region'])
self.assertEqual(713, d['area_code'])
geom = g.geos(query)
self.assertTrue(isinstance(geom, GEOSGeometry))
lon, lat = (-95.4010, 29.7079)
lat_lon = g.lat_lon(query)
lat_lon = (lat_lon[1], lat_lon[0])
for tup in (geom.tuple, g.coords(query), g.lon_lat(query), lat_lon):
self.assertAlmostEqual(lon, tup[0], 4)
self.assertAlmostEqual(lat, tup[1], 4)
def test05_unicode_response(self):
"Testing that GeoIP strings are properly encoded, see #16553."
g = GeoIP()
d = g.city('62.224.93.23')
self.assertEqual('Schümberg', d['city'])
def test06_unicode_query(self):
"Testing that GeoIP accepts unicode string queries, see #17059."
g = GeoIP()
d = g.country('whitehouse.gov')
self.assertEqual('US', d['country_code'])
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(GeoIPTest))
return s
def run(verbosity=1):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
| apache-2.0 |
DirtyUnicorns/android_external_chromium_org | mojo/public/tools/bindings/generators/mojom_python_generator.py | 25 | 10188 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates Python source files from a mojom.Module."""
import re
from itertools import ifilter
import mojom.generate.generator as generator
import mojom.generate.module as mojom
from mojom.generate.template_expander import UseJinja
_kind_to_type = {
mojom.BOOL: '_descriptor.TYPE_BOOL',
mojom.INT8: '_descriptor.TYPE_INT8',
mojom.UINT8: '_descriptor.TYPE_UINT8',
mojom.INT16: '_descriptor.TYPE_INT16',
mojom.UINT16: '_descriptor.TYPE_UINT16',
mojom.INT32: '_descriptor.TYPE_INT32',
mojom.UINT32: '_descriptor.TYPE_UINT32',
mojom.INT64: '_descriptor.TYPE_INT64',
mojom.UINT64: '_descriptor.TYPE_UINT64',
mojom.FLOAT: '_descriptor.TYPE_FLOAT',
mojom.DOUBLE: '_descriptor.TYPE_DOUBLE',
mojom.STRING: '_descriptor.TYPE_STRING',
mojom.NULLABLE_STRING: '_descriptor.TYPE_NULLABLE_STRING',
mojom.HANDLE: '_descriptor.TYPE_HANDLE',
mojom.DCPIPE: '_descriptor.TYPE_HANDLE',
mojom.DPPIPE: '_descriptor.TYPE_HANDLE',
mojom.MSGPIPE: '_descriptor.TYPE_HANDLE',
mojom.SHAREDBUFFER: '_descriptor.TYPE_HANDLE',
mojom.NULLABLE_HANDLE: '_descriptor.TYPE_NULLABLE_HANDLE',
mojom.NULLABLE_DCPIPE: '_descriptor.TYPE_NULLABLE_HANDLE',
mojom.NULLABLE_DPPIPE: '_descriptor.TYPE_NULLABLE_HANDLE',
mojom.NULLABLE_MSGPIPE: '_descriptor.TYPE_NULLABLE_HANDLE',
mojom.NULLABLE_SHAREDBUFFER: '_descriptor.TYPE_NULLABLE_HANDLE',
}
# int64 integers are not handled by array.array. int64/uint64 array are
# supported but storage is not optimized (ie. they are plain python list, not
# array.array)
_kind_to_typecode_for_native_array = {
mojom.INT8: 'b',
mojom.UINT8: 'B',
mojom.INT16: 'h',
mojom.UINT16: 'H',
mojom.INT32: 'i',
mojom.UINT32: 'I',
mojom.FLOAT: 'f',
mojom.DOUBLE: 'd',
}
_kind_to_typecode = dict(_kind_to_typecode_for_native_array)
_kind_to_typecode.update({
mojom.INT64: 'q',
mojom.UINT64: 'Q',
mojom.HANDLE: 'i',
mojom.DCPIPE: 'i',
mojom.DPPIPE: 'i',
mojom.MSGPIPE: 'i',
mojom.SHAREDBUFFER: 'i',
mojom.NULLABLE_HANDLE: 'i',
mojom.NULLABLE_DCPIPE: 'i',
mojom.NULLABLE_DPPIPE: 'i',
mojom.NULLABLE_MSGPIPE: 'i',
mojom.NULLABLE_SHAREDBUFFER: 'i',
})
def NameToComponent(name):
# insert '_' between anything and a Title name (e.g, HTTPEntry2FooBar ->
# HTTP_Entry2_FooBar)
name = re.sub('([^_])([A-Z][^A-Z_]+)', r'\1_\2', name)
# insert '_' between non upper and start of upper blocks (e.g.,
# HTTP_Entry2_FooBar -> HTTP_Entry2_Foo_Bar)
name = re.sub('([^A-Z_])([A-Z])', r'\1_\2', name)
return [x.lower() for x in name.split('_')]
def UpperCamelCase(name):
return ''.join([x.capitalize() for x in NameToComponent(name)])
def CamelCase(name):
uccc = UpperCamelCase(name)
return uccc[0].lower() + uccc[1:]
def ConstantStyle(name):
components = NameToComponent(name)
if components[0] == 'k':
components = components[1:]
return '_'.join([x.upper() for x in components])
def GetNameForElement(element):
if (mojom.IsEnumKind(element) or mojom.IsInterfaceKind(element) or
mojom.IsStructKind(element)):
return UpperCamelCase(element.name)
if isinstance(element, mojom.EnumValue):
return (GetNameForElement(element.enum) + '.' +
ConstantStyle(element.name))
if isinstance(element, (mojom.NamedValue,
mojom.Constant)):
return ConstantStyle(element.name)
raise Exception('Unexpected element: ' % element)
def ExpressionToText(token):
if isinstance(token, (mojom.EnumValue, mojom.NamedValue)):
return str(token.computed_value)
if isinstance(token, mojom.BuiltinValue):
if token.value == 'double.INFINITY' or token.value == 'float.INFINITY':
return 'float(\'inf\')';
if (token.value == 'double.NEGATIVE_INFINITY' or
token.value == 'float.NEGATIVE_INFINITY'):
return 'float(\'-inf\')'
if token.value == 'double.NAN' or token.value == 'float.NAN':
return 'float(\'nan\')';
if token in ['true', 'false']:
return str(token == 'true')
return token
def GetStructClass(kind):
name = []
if kind.imported_from:
name.append(kind.imported_from['python_module'])
name.append(GetNameForElement(kind))
return '.'.join(name)
def GetFieldType(kind, field=None):
if mojom.IsAnyArrayKind(kind):
arguments = []
if kind.kind in _kind_to_typecode_for_native_array:
arguments.append('%r' %_kind_to_typecode_for_native_array[kind.kind])
elif kind.kind != mojom.BOOL:
arguments.append(GetFieldType(kind.kind))
if mojom.IsNullableKind(kind):
arguments.append('nullable=True')
if mojom.IsFixedArrayKind(kind):
arguments.append('length=%d' % kind.length)
array_type = 'GenericArrayType'
if kind.kind == mojom.BOOL:
array_type = 'BooleanArrayType'
elif kind.kind in _kind_to_typecode_for_native_array:
array_type = 'NativeArrayType'
return '_descriptor.%s(%s)' % (array_type, ', '.join(arguments))
if mojom.IsStructKind(kind):
arguments = [ GetStructClass(kind) ]
if mojom.IsNullableKind(kind):
arguments.append('nullable=True')
return '_descriptor.StructType(%s)' % ', '.join(arguments)
if mojom.IsEnumKind(kind):
return GetFieldType(mojom.INT32)
return _kind_to_type.get(kind, '_descriptor.TYPE_NONE')
def GetFieldDescriptor(packed_field):
field = packed_field.field
class_name = 'SingleFieldGroup'
if field.kind == mojom.BOOL:
class_name = 'FieldDescriptor'
arguments = [ '%r' % field.name ]
arguments.append(GetFieldType(field.kind, field))
arguments.append(str(packed_field.field.ordinal))
if field.default:
if mojom.IsStructKind(field.kind):
arguments.append('default_value=True')
else:
arguments.append('default_value=%s' % ExpressionToText(field.default))
return '_descriptor.%s(%s)' % (class_name, ', '.join(arguments))
def GetFieldGroup(byte):
if len(byte.packed_fields) > 1:
descriptors = map(GetFieldDescriptor, byte.packed_fields)
return '_descriptor.BooleanGroup([%s])' % ', '.join(descriptors)
assert len(byte.packed_fields) == 1
return GetFieldDescriptor(byte.packed_fields[0])
def ComputeStaticValues(module):
in_progress = set()
computed = set()
def GetComputedValue(named_value):
if isinstance(named_value, mojom.EnumValue):
field = next(ifilter(lambda field: field.name == named_value.name,
named_value.enum.fields), None)
if not field:
raise RuntimeError(
'Unable to get computed value for field %s of enum %s' %
(named_value.name, named_value.enum.name))
if field not in computed:
ResolveEnum(named_value.enum)
return field.computed_value
elif isinstance(named_value, mojom.ConstantValue):
ResolveConstant(named_value.constant)
named_value.computed_value = named_value.constant.computed_value
return named_value.computed_value
else:
print named_value
def ResolveConstant(constant):
if constant in computed:
return
if constant in in_progress:
raise RuntimeError('Circular dependency for constant: %s' % constant.name)
in_progress.add(constant)
if isinstance(constant.value, (mojom.EnumValue, mojom.ConstantValue)):
computed_value = GetComputedValue(constant.value)
else:
computed_value = ExpressionToText(constant.value)
constant.computed_value = computed_value
in_progress.remove(constant)
computed.add(constant)
def ResolveEnum(enum):
def ResolveEnumField(enum, field, default_value):
if field in computed:
return
if field in in_progress:
raise RuntimeError('Circular dependency for enum: %s' % enum.name)
in_progress.add(field)
if field.value:
if isinstance(field.value, mojom.EnumValue):
computed_value = GetComputedValue(field.value)
elif isinstance(field.value, str):
computed_value = int(field.value, 0)
else:
raise RuntimeError('Unexpected value: %s' % field.value)
else:
computed_value = default_value
field.computed_value = computed_value
in_progress.remove(field)
computed.add(field)
current_value = 0
for field in enum.fields:
ResolveEnumField(enum, field, current_value)
current_value = field.computed_value + 1
for constant in module.constants:
ResolveConstant(constant)
for enum in module.enums:
ResolveEnum(enum)
for struct in module.structs:
for constant in struct.constants:
ResolveConstant(constant)
for enum in struct.enums:
ResolveEnum(enum)
for field in struct.fields:
if isinstance(field.default, (mojom.ConstantValue, mojom.EnumValue)):
field.default.computed_value = GetComputedValue(field.default)
return module
class Generator(generator.Generator):
python_filters = {
'expression_to_text': ExpressionToText,
'field_group': GetFieldGroup,
'name': GetNameForElement,
}
@UseJinja('python_templates/module.py.tmpl', filters=python_filters)
def GeneratePythonModule(self):
return {
'imports': self.GetImports(),
'enums': self.module.enums,
'module': ComputeStaticValues(self.module),
'structs': self.GetStructs(),
}
def GenerateFiles(self, args):
self.Write(self.GeneratePythonModule(),
'%s.py' % self.module.name.replace('.mojom', '_mojom'))
def GetImports(self):
for each in self.module.imports:
each['python_module'] = each['module_name'].replace('.mojom', '_mojom')
return self.module.imports
def GetJinjaParameters(self):
return {
'lstrip_blocks': True,
'trim_blocks': True,
}
| bsd-3-clause |
toshywoshy/ansible | lib/ansible/modules/network/fortios/fortios_system_automation_action.py | 7 | 15263 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_automation_action
short_description: Action for automation stitches in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify system feature and automation_action category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
system_automation_action:
description:
- Action for automation stitches.
default: null
type: dict
suboptions:
action_type:
description:
- Action type.
type: str
choices:
- email
- ios-notification
- alert
- disable-ssid
- quarantine
- quarantine-forticlient
- ban-ip
- aws-lambda
- webhook
aws_api_id:
description:
- AWS API Gateway ID.
type: str
aws_api_key:
description:
- AWS API Gateway API key.
type: str
aws_api_path:
description:
- AWS API Gateway path.
type: str
aws_api_stage:
description:
- AWS API Gateway deployment stage name.
type: str
aws_domain:
description:
- AWS domain.
type: str
aws_region:
description:
- AWS region.
type: str
delay:
description:
- Delay before execution (in seconds).
type: int
email_subject:
description:
- Email subject.
type: str
email_to:
description:
- Email addresses.
type: list
suboptions:
name:
description:
- Email address.
required: true
type: str
headers:
description:
- Request headers.
type: list
suboptions:
header:
description:
- Request header.
required: true
type: str
http_body:
description:
- Request body (if necessary). Should be serialized json string.
type: str
method:
description:
- Request method (GET, POST or PUT).
type: str
choices:
- post
- put
- get
minimum_interval:
description:
- Limit execution to no more than once in this interval (in seconds).
type: int
name:
description:
- Name.
required: true
type: str
port:
description:
- Protocol port.
type: int
protocol:
description:
- Request protocol.
type: str
choices:
- http
- https
required:
description:
- Required in action chain.
type: str
choices:
- enable
- disable
uri:
description:
- Request API URI.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Action for automation stitches.
fortios_system_automation_action:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
system_automation_action:
action_type: "email"
aws_api_id: "<your_own_value>"
aws_api_key: "<your_own_value>"
aws_api_path: "<your_own_value>"
aws_api_stage: "<your_own_value>"
aws_domain: "<your_own_value>"
aws_region: "<your_own_value>"
delay: "10"
email_subject: "<your_own_value>"
email_to:
-
name: "default_name_13"
headers:
-
header: "<your_own_value>"
http_body: "<your_own_value>"
method: "post"
minimum_interval: "18"
name: "default_name_19"
port: "20"
protocol: "http"
required: "enable"
uri: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_system_automation_action_data(json):
option_list = ['action_type', 'aws_api_id', 'aws_api_key',
'aws_api_path', 'aws_api_stage', 'aws_domain',
'aws_region', 'delay', 'email_subject',
'email_to', 'headers', 'http_body',
'method', 'minimum_interval', 'name',
'port', 'protocol', 'required',
'uri']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for i, elem in enumerate(data):
data[i] = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def system_automation_action(data, fos):
vdom = data['vdom']
state = data['state']
system_automation_action_data = data['system_automation_action']
filtered_data = underscore_to_hyphen(filter_system_automation_action_data(system_automation_action_data))
if state == "present":
return fos.set('system',
'automation-action',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('system',
'automation-action',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_system(data, fos):
if data['system_automation_action']:
resp = system_automation_action(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"system_automation_action": {
"required": False, "type": "dict", "default": None,
"options": {
"action_type": {"required": False, "type": "str",
"choices": ["email", "ios-notification", "alert",
"disable-ssid", "quarantine", "quarantine-forticlient",
"ban-ip", "aws-lambda", "webhook"]},
"aws_api_id": {"required": False, "type": "str"},
"aws_api_key": {"required": False, "type": "str"},
"aws_api_path": {"required": False, "type": "str"},
"aws_api_stage": {"required": False, "type": "str"},
"aws_domain": {"required": False, "type": "str"},
"aws_region": {"required": False, "type": "str"},
"delay": {"required": False, "type": "int"},
"email_subject": {"required": False, "type": "str"},
"email_to": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"headers": {"required": False, "type": "list",
"options": {
"header": {"required": True, "type": "str"}
}},
"http_body": {"required": False, "type": "str"},
"method": {"required": False, "type": "str",
"choices": ["post", "put", "get"]},
"minimum_interval": {"required": False, "type": "int"},
"name": {"required": True, "type": "str"},
"port": {"required": False, "type": "int"},
"protocol": {"required": False, "type": "str",
"choices": ["http", "https"]},
"required": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"uri": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_system(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_system(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
chokribr/invenio | invenio/modules/access/control.py | 5 | 66182 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Invenio Access Control Admin."""
from __future__ import print_function
import urlparse
from intbitset import intbitset
from invenio.config import CFG_SITE_ADMIN_EMAIL, CFG_SITE_LANG, CFG_SITE_RECORD
from invenio.ext import principal
from invenio.ext.sqlalchemy import db
from invenio.legacy.dbquery import ProgrammingError, run_sql
from invenio.modules.access.firerole import (
acc_firerole_check_user, compile_role_definition, deserialize,
load_role_definition, serialize
)
from invenio.modules.access.local_config import (
CFG_ACC_ACTIVITIES_URLS, CFG_ACC_EMPTY_ROLE_DEFINITION_SER,
CFG_ACC_EMPTY_ROLE_DEFINITION_SRC, DEF_AUTHS, DEF_ROLES, DEF_USERS,
DELEGATEADDUSERROLE, SUPERADMINROLE
)
from invenio.modules.access.models import AccACTION, AccAuthorization, \
UserAccROLE
from six import iteritems
CFG_SUPERADMINROLE_ID = 0
try:
id_tmp = run_sql('SELECT id FROM accROLE WHERE name=%s',
(SUPERADMINROLE, ))
if id_tmp:
CFG_SUPERADMINROLE_ID = int(id_tmp[0][0])
except Exception:
pass
# ACTIONS
def acc_add_action(name_action='', description='', optional='no',
*allowedkeywords):
"""Create new entry in accACTION for an action.
:param name_action: name of the new action, must be unique
:param allowedkeywords: a list of allowedkeywords
:return: id_action, name_action, description and allowedkeywords or
0 in case of failure
"""
keystr = ''
# action with this name all ready exists, return 0
if db.session.query(db.exists()).filter(
AccACTION.name == name_action).scalar():
return 0
# create keyword string
for value in allowedkeywords:
if keystr:
keystr += ','
keystr += value
if not allowedkeywords:
optional = 'no'
# insert the new entry
try:
a = AccACTION(name=name_action, description=description,
allowedkeywords=keystr, optional=optional)
db.session.add(a)
db.session.commit()
return True, name_action, description, keystr, optional
except ProgrammingError:
return 0
def acc_delete_action(id_action=0, name_action=0):
"""delete action in accACTION according to id, or secondly name.
entries in accROLE_accACTION_accARGUMENT will also be removed.
id_action - id of action to be deleted, prefered variable
name_action - this is used if id_action is not given
if the name or id is wrong, the function does nothing
"""
id_action = id_action or acc_get_action_id(name_action=name_action)
if not id_action:
return 0
# delete the action
if run_sql("""DELETE FROM accACTION WHERE id=%s""", (id_action, )):
# delete all entries related
return 1 + run_sql("""DELETE FROM accROLE_accACTION_accARGUMENT WHERE
id_accACTION=%s""", (id_action, ))
else:
return 0
def acc_verify_action(name_action='', description='', allowedkeywords='',
dummy=''):
"""check if all the values of a given action are the same as
those in accACTION in the database. self explanatory parameters.
return id if identical, 0 if not. """
id_action = acc_get_action_id(name_action=name_action)
if not id_action:
return 0
res_desc = acc_get_action_description(id_action=id_action)
res_keys = acc_get_action_keywords_string(id_action=id_action)
bool_desc = res_desc == description and 1 or 0
bool_keys = res_keys == allowedkeywords and 1 or 0
bool_opti = acc_get_action_is_optional(id_action=id_action)
return bool_desc and bool_keys and bool_opti and id_action or 0
def acc_update_action(id_action=0, name_action='', verbose=0, **update):
"""try to change the values of given action details.
if there is no change nothing is done.
some changes require to update other parts of the database.
id_action - id of the action to change
name_action - if no id_action is given try to find it using this name
**update - dictionary containg keywords: description,
allowedkeywords and/or
optional
other keywords are ignored """
id_action = id_action or acc_get_action_id(name_action=name_action)
if not id_action:
return 0
try:
if 'description' in update:
# change the description, no other effects
if verbose:
print('desc')
run_sql("""UPDATE accACTION SET description = %s WHERE id = %s""",
(update['description'], id_action))
if 'allowedkeywords' in update:
# change allowedkeywords
if verbose:
print('keys')
# check if changing allowedkeywords or not
if run_sql("""SELECT id FROM accACTION
WHERE id = %s AND allowedkeywords != %s """,
(id_action, update['allowedkeywords'])):
# change allowedkeywords
if verbose:
print(' changing')
run_sql("""UPDATE accACTION SET allowedkeywords = %s
WHERE id = %s""", (update['allowedkeywords'], id_action))
# delete entries, but keep optional authorizations
# if there still is keywords
if verbose:
print(' deleting auths')
run_sql("""DELETE FROM accROLE_accACTION_accARGUMENT
WHERE id_accACTION = %s %s """,
(id_action, update['allowedkeywords'] and
'AND id_accARGUMENT != -1' or ''))
if 'optional' in update:
# check if there changing optional or not
if verbose:
print('optional')
if run_sql("""SELECT id FROM accACTION
WHERE id = %s AND optional != %s """,
(id_action, update['optional'])):
# change optional
if verbose:
print(' changing')
run_sql("""UPDATE accACTION SET optional = %s WHERE id = %s""",
(update['optional'], id_action))
# setting it to no, delete authorizations with
# optional arguments
if update['optional'] == 'no':
if verbose:
print(' deleting optional')
run_sql("""DELETE FROM accROLE_accACTION_accARGUMENT
WHERE id_accACTION = %s AND
id_accARGUMENT = -1 AND
argumentlistid = -1 """, (id_action, ))
except ProgrammingError:
return 0
return 1
# ROLES
def acc_add_role(name_role, description,
firerole_def_ser = CFG_ACC_EMPTY_ROLE_DEFINITION_SER,
firerole_def_src = CFG_ACC_EMPTY_ROLE_DEFINITION_SRC):
"""add a new role to accROLE in the database.
name_role - name of the role, must be unique
description - text to describe the role
firerole_def_ser - compiled firewall like role definition
firerole_def_src - firewall like role definition sources
"""
if not run_sql("""SELECT name FROM accROLE WHERE name = %s""", (name_role, )):
res = run_sql("""INSERT INTO accROLE (name, description,
firerole_def_ser, firerole_def_src)
VALUES (%s, %s, %s, %s)""",
(name_role, description, firerole_def_ser, firerole_def_src))
return res, name_role, description, firerole_def_src
return 0
def acc_is_role(name_action, **arguments):
""" check whether the role which allows action name_action on arguments
exists (different from SUPERADMINROLE)
action_name - name of the action
arguments - arguments for authorization"""
# first check if an action exists with this name
id_action = acc_get_action_id(name_action)
arole = run_sql("SELECT id_accROLE FROM accROLE_accACTION_accARGUMENT WHERE id_accACTION=%s AND argumentlistid <= 0 LIMIT 1", (id_action, ), 1, run_on_slave=True)
if arole:
return True
other_roles_to_check = run_sql("SELECT id_accROLE, keyword, value, argumentlistid FROM accROLE_accACTION_accARGUMENT JOIN accARGUMENT ON id_accARGUMENT=id WHERE id_accACTION=%s AND argumentlistid > 0", (id_action, ), run_on_slave=True)
other_roles_to_check_dict = {}
for id_accROLE, keyword, value, argumentlistid in other_roles_to_check:
try:
other_roles_to_check_dict[(id_accROLE, argumentlistid)][keyword] = value
except KeyError:
other_roles_to_check_dict[(id_accROLE, argumentlistid)] = {keyword : value}
for ((id_accROLE, argumentlistid), stored_arguments) in iteritems(other_roles_to_check_dict):
for key, value in iteritems(stored_arguments):
if (value != arguments.get(key, '*') != '*') and value != '*':
break
else:
return True
return False
def acc_delete_role(id_role=0, name_role=0):
""" delete role entry in table accROLE and all references from
other tables.
id_role - id of role to be deleted, prefered variable
name_role - this is used if id_role is not given
note: you can't delete the SUPERADMINROLE
"""
count = 0
id_role = id_role or acc_get_role_id(name_role=name_role)
if SUPERADMINROLE == acc_get_role_name(id_role):
return 0
# try to delete
if run_sql("""DELETE FROM accROLE WHERE id = %s """, (id_role, )):
# delete everything related
# authorization entries
count += 1 + run_sql("""DELETE FROM
accROLE_accACTION_accARGUMENT WHERE id_accROLE = %s""",
(id_role, ))
# connected users
count += run_sql("""DELETE FROM user_accROLE WHERE id_accROLE = %s""",
(id_role, ))
# delegated rights over the role
rolenames = run_sql("""SELECT name FROM accROLE""")
# string of rolenames
roles_str = ''
for (name, ) in rolenames:
roles_str += (roles_str and ',' or '') + \
'"%s"' % (name, )
# arguments with non existing rolenames
not_valid = run_sql("""SELECT ar.id FROM accARGUMENT ar
WHERE keyword = 'role' AND value NOT IN (%s)""" % (roles_str, ))
if not_valid:
nv_str = ''
for (id_value, ) in not_valid:
nv_str += (nv_str and ',' or '') + \
'%s' % (id_value, )
# delete entries
count += run_sql("""DELETE FROM accROLE_accACTION_accARGUMENT
WHERE id_accACTION = %s AND id_accARGUMENT IN (%s) """ %
(acc_get_action_id(name_action=DELEGATEADDUSERROLE), nv_str))
# return number of deletes
return count
def acc_update_role(id_role=0, name_role='', dummy=0, description='', \
firerole_def_ser=CFG_ACC_EMPTY_ROLE_DEFINITION_SER, \
firerole_def_src=CFG_ACC_EMPTY_ROLE_DEFINITION_SRC):
"""try to change the description.
id_role - id of the role to change
name_role - use this to find id if not present
verbose - extra output
description - new description
firerole_def_ser - compiled firewall like role definition
firerole_def_src - firewall like role definition
"""
id_role = id_role or acc_get_role_id(name_role=name_role)
if not id_role:
return 0
return run_sql("""UPDATE accROLE SET description = %s,
firerole_def_ser = %s, firerole_def_src = %s
WHERE id = %s""", (description, firerole_def_ser,
firerole_def_src, id_role))
# CONNECTIONS BETWEEN USER AND ROLE
def acc_add_user_role(id_user=0, id_role=0, email='', name_role='',
expiration='9999-12-31 23:59:59'):
""" this function adds a new entry to table user_accROLE and returns it
id_user, id_role - self explanatory
email - email of the user
name_role - name of the role, to be used instead of id. """
id_user = id_user or acc_get_user_id(email=email)
id_role = id_role or acc_get_role_id(name_role=name_role)
# check if the id_role exists
if id_role and not acc_get_role_name(id_role=id_role):
return 0
# check that the user actually exist
if not acc_get_user_email(id_user=id_user):
return 0
# control if existing entry
if run_sql("""SELECT id_user FROM user_accROLE WHERE id_user = %s AND
id_accROLE = %s""", (id_user, id_role)):
run_sql("""UPDATE user_accROLE SET expiration=%s WHERE id_user=%s AND
id_accROLE=%s AND expiration<%s""",
(expiration, id_user, id_role, expiration) )
return id_user, id_role, 0
else:
run_sql("""INSERT INTO user_accROLE (id_user, id_accROLE, expiration)
VALUES (%s, %s, %s) """, (id_user, id_role, expiration))
return id_user, id_role, 1
def acc_delete_user_role(id_user, id_role=0, name_role=0):
""" function deletes entry from user_accROLE and reports the success.
id_user - user in database
id_role - role in the database, prefered parameter
name_role - can also delete role on background of role name. """
# need to find id of the role
id_role = id_role or acc_get_role_id(name_role=name_role)
# number of deleted entries will be returned (0 or 1)
return run_sql("""DELETE FROM user_accROLE WHERE id_user = %s
AND id_accROLE = %s """, (id_user, id_role))
# ARGUMENTS
def acc_add_argument(keyword='', value=''):
""" function to insert an argument into table accARGUMENT.
if it exists the old id is returned, if it does not the entry is
created and the new id is returned.
keyword - inserted in keyword column
value - inserted in value column. """
# if one of the values are missing, return 0
if not keyword or not value:
return 0
# try to return id of existing argument
try:
return run_sql("""SELECT id from accARGUMENT where keyword = %s and
value = %s""", (keyword, value))[0][0]
# return id of newly added argument
except IndexError:
return run_sql("""INSERT INTO accARGUMENT (keyword, value)
VALUES (%s, %s) """, (keyword, value))
def acc_delete_argument(id_argument):
""" functions deletes one entry in table accARGUMENT.
the success of the operation is returned.
id_argument - id of the argument to be deleted"""
# return number of deleted entries, 1 or 0
return run_sql("""DELETE FROM accARGUMENT WHERE id = %s """,
(id_argument, ))
def acc_delete_argument_names(keyword='', value=''):
"""delete argument according to keyword and value,
send call to another function..."""
# one of the values is missing
if not keyword or not value:
return 0
# find id of the entry
try:
return run_sql("""SELECT id from accARGUMENT where keyword = %s
and value = %s""", (keyword, value))[0][0]
except IndexError:
return 0
# AUTHORIZATIONS
# ADD WITH names and keyval list
def acc_add_authorization(name_role='', name_action='', optional=0, **keyval):
""" function inserts entries in accROLE_accACTION_accARGUMENT if all
references are valid.
this function is made specially for the webaccessadmin web interface.
always inserting only one authorization.
id_role, id_action - self explanatory, preferably used
name_role, name_action - self explanatory, used if id not given
optional - if this is set to 1, check that function can have optional
arguments and add with arglistid -1 and id_argument -1
**keyval - dictionary of keyword=value pairs, used to find ids. """
inserted = []
# check that role and action exist
id_role = run_sql("""SELECT id FROM accROLE where name = %s""",
(name_role, ))
action_details = run_sql("""SELECT id,name,description,allowedkeywords,optional from accACTION where name = %s """,
(name_action, ))
if not id_role or not action_details:
return []
# get role id and action id and details
id_role, id_action = id_role[0][0], action_details[0][0]
allowedkeywords_str = action_details[0][3]
allowedkeywords_lst = acc_get_action_keywords(id_action=id_action)
optional_action = action_details[0][4] == 'yes' and 1 or 0
optional = int(optional)
# this action does not take arguments
if not optional and not keyval:
# can not add if user is doing a mistake
if allowedkeywords_str:
return []
# check if entry exists
if not run_sql("""SELECT id_accROLE FROM accROLE_accACTION_accARGUMENT
WHERE id_accROLE = %s AND id_accACTION = %s AND
argumentlistid = %s AND id_accARGUMENT = %s""",
(id_role, id_action, 0, 0)):
# insert new authorization
run_sql("""INSERT INTO accROLE_accACTION_accARGUMENT (id_accROLE,
id_accACTION, id_accARGUMENT, argumentlistid)
VALUES (%s, %s, %s, %s)""", (id_role, id_action, 0, 0))
return [[id_role, id_action, 0, 0], ]
return []
# try to add authorization without the optional arguments
elif optional:
# optional not allowed for this action
if not optional_action:
return []
# check if authorization already exists
if not run_sql("""SELECT id_accROLE FROM accROLE_accACTION_accARGUMENT
WHERE id_accROLE = %s AND
id_accACTION = %s AND
id_accARGUMENT = -1 AND
argumentlistid = -1""" % (id_role, id_action, )):
# insert new authorization
run_sql("""INSERT INTO accROLE_accACTION_accARGUMENT (id_accROLE,
id_accACTION, id_accARGUMENT, argumentlistid)
VALUES (%s, %s, -1, -1) """, (id_role, id_action))
return [[id_role, id_action, -1, -1], ]
return []
else:
# regular authorization
# get list of ids, if they don't exist, create arguments
id_arguments = []
argstr = ''
for key in keyval.keys():
if key not in allowedkeywords_lst:
return []
id_argument = (acc_get_argument_id(key, keyval[key])
or
run_sql("""INSERT INTO accARGUMENT (keyword, value) values
(%s, %s) """, (key, keyval[key])))
id_arguments.append(id_argument)
argstr += (argstr and ',' or '') + str(id_argument)
# check if equal authorization exists
for (id_trav, ) in run_sql("""SELECT DISTINCT argumentlistid FROM
accROLE_accACTION_accARGUMENT WHERE id_accROLE = %s AND
id_accACTION = %s """, (id_role, id_action)):
listlength = run_sql("""SELECT COUNT(*) FROM
accROLE_accACTION_accARGUMENT WHERE id_accROLE = %%s AND
id_accACTION = %%s AND argumentlistid = %%s AND
id_accARGUMENT IN (%s) """ % (argstr),
(id_role, id_action, id_trav))[0][0]
notlist = run_sql("""SELECT COUNT(*) FROM
accROLE_accACTION_accARGUMENT WHERE id_accROLE = %%s AND
id_accACTION = %%s AND argumentlistid = %%s AND
id_accARGUMENT NOT IN (%s) """ % (argstr),
(id_role, id_action, id_trav))[0][0]
# this means that a duplicate already exists
if not notlist and listlength == len(id_arguments):
return []
# find new arglistid, highest + 1
try:
arglistid = 1 + run_sql("""SELECT MAX(argumentlistid) FROM
accROLE_accACTION_accARGUMENT WHERE id_accROLE = %s
AND id_accACTION = %s""", (id_role, id_action))[0][0]
except (IndexError, TypeError):
arglistid = 1
if arglistid <= 0:
arglistid = 1
# insert
for id_argument in id_arguments:
run_sql("""INSERT INTO accROLE_accACTION_accARGUMENT (id_accROLE,
id_accACTION, id_accARGUMENT, argumentlistid)
VALUES (%s, %s, %s, %s) """,
(id_role, id_action, id_argument, arglistid))
inserted.append([id_role, id_action, id_argument, arglistid])
return inserted
def acc_add_role_action_arguments(id_role=0, id_action=0, arglistid=-1,
optional=0, verbose=0, id_arguments=[]):
""" function inserts entries in accROLE_accACTION_accARGUMENT if all
references are valid.
id_role, id_action - self explanatory
arglistid - argumentlistid for the inserted entries
if -1: create new group
other values: add to this group, if it exists or not
optional - if this is set to 1, check that function can have
optional arguments and add with arglistid -1 and
id_argument -1
verbose - extra output
id_arguments - list of arguments to add to group."""
inserted = []
if verbose:
print('ids: starting')
if verbose:
print('ids: checking ids')
# check that all the ids are valid and reference something...
if not run_sql("""SELECT id FROM accROLE WHERE id = %s""", (id_role, )):
return 0
if verbose:
print('ids: get allowed keywords')
# check action exist and get allowed keywords
try:
allowedkeys = acc_get_action_keywords(id_action=id_action)
# allowedkeys = run_sql("""SELECT id FROM accACTION WHERE id = %s""" %
# (id_action, ))[0][3].split(',')
except (IndexError, AttributeError):
return 0
if verbose:
print('ids: is it optional')
# action with optional arguments
if optional:
if verbose:
print('ids: yes - optional')
if not acc_get_action_is_optional(id_action=id_action):
return []
if verbose:
print('ids: run query to check if exists')
if not run_sql("""SELECT id_accROLE FROM accROLE_accACTION_accARGUMENT
WHERE id_accROLE = %s AND
id_accACTION = %s AND
id_accARGUMENT = -1 AND
argumentlistid = -1""", (id_role, id_action, )):
if verbose:
print('ids: does not exist')
run_sql("""INSERT INTO accROLE_accACTION_accARGUMENT (id_accROLE,
id_accACTION, id_accARGUMENT, argumentlistid)
VALUES (%s, %s, -1, -1) """, (id_role, id_action))
return ((id_role, id_action, -1, -1), )
if verbose:
print('ids: exists')
return []
if verbose:
print('ids: check if not arguments')
# action without arguments
if not allowedkeys:
if verbose:
print('ids: not arguments')
if not run_sql("""SELECT id_accROLE FROM accROLE_accACTION_accARGUMENT
WHERE id_accROLE = %s AND id_accACTION = %s AND
argumentlistid = %s AND id_accARGUMENT = %s""",
(id_role, id_action, 0, 0)):
if verbose:
print('ids: try to insert')
run_sql("""INSERT INTO accROLE_accACTION_accARGUMENT (id_accROLE,
id_accACTION, id_accARGUMENT, argumentlistid)
VALUES (%s, %s, %s, %s)""", (id_role, id_action, 0, 0))
return ((id_role, id_action, 0, 0), )
else:
if verbose:
print('ids: already existed')
return 0
else:
if verbose:
print('ids: arguments exist')
argstr = ''
# check that the argument exists, and that it is a valid key
if verbose:
print('ids: checking all the arguments')
for id_argument in id_arguments:
res_arg = run_sql("""SELECT id,keyword,value FROM accARGUMENT WHERE id = %s""",
(id_argument, ))
if not res_arg or res_arg[0][1] not in allowedkeys:
return 0
else:
if argstr:
argstr += ','
argstr += '%s' % (id_argument, )
# arglistid = -1 means that the user wants a new group
if verbose:
print('ids: find arglistid')
if arglistid < 0:
# check if such single group already exists
for (id_trav, ) in run_sql("""SELECT DISTINCT argumentlistid FROM
accROLE_accACTION_accARGUMENT WHERE id_accROLE = %s AND
id_accACTION = %s""", (id_role, id_action)):
listlength = run_sql("""SELECT COUNT(*) FROM
accROLE_accACTION_accARGUMENT WHERE id_accROLE = %%s AND
id_accACTION = %%s AND argumentlistid = %%s AND
id_accARGUMENT IN (%s)""" % (argstr),
(id_role, id_action, id_trav))[0][0]
notlist = run_sql("""SELECT COUNT(*) FROM
accROLE_accACTION_accARGUMENT WHERE id_accROLE = %%s AND
id_accACTION = %%s AND argumentlistid = %%s AND
id_accARGUMENT NOT IN (%s)""" % (argstr),
(id_role, id_action, id_trav))[0][0]
# this means that a duplicate already exists
if not notlist and listlength == len(id_arguments):
return 0
# find new arglistid
try:
arglistid = run_sql("""SELECT MAX(argumentlistid) FROM
accROLE_accACTION_accARGUMENT WHERE id_accROLE = %s AND
id_accACTION = %s""", (id_role, id_action))[0][0] + 1
except ProgrammingError:
return 0
except (IndexError, TypeError):
arglistid = 1
if arglistid <= 0:
arglistid = 1
if verbose:
print('ids: insert all the entries')
# all references are valid, insert: one entry in raa for each argument
for id_argument in id_arguments:
if not run_sql("""SELECT id_accROLE FROM accROLE_accACTION_accARGUMENT
WHERE id_accROLE = %s AND id_accACTION = %s AND
id_accARGUMENT = %s AND argumentlistid = %s""",
(id_role, id_action, id_argument, arglistid)):
run_sql("""INSERT INTO accROLE_accACTION_accARGUMENT (id_accROLE,
id_accACTION, id_accARGUMENT, argumentlistid)
VALUES (%s, %s, %s, %s)""",
(id_role, id_action, id_argument, arglistid))
inserted.append((id_role, id_action, id_argument, arglistid))
# [(r, ac, ar1, aid), (r, ac, ar2, aid)]
if verbose:
print('ids: inside add function')
for r in acc_find_possible_actions(id_role=id_role,
id_action=id_action):
print('ids: ', r)
return inserted
def acc_add_role_action_arguments_names(name_role='', name_action='',
arglistid=-1, optional=0, verbose=0, **keyval):
""" this function makes it possible to pass names when creating new entries
instead of ids.
get ids for all the names,
create entries in accARGUMENT that does not exist,
pass on to id based function.
name_role, name_action - self explanatory
arglistid - add entries to or create group with arglistid, default -1
create new.
optional - create entry with optional keywords, **keyval is ignored, but
should be empty
verbose - used to print extra information
**keyval - dictionary of keyword=value pairs, used to find ids. """
if verbose:
print('names: starting')
if verbose:
print('names: checking ids')
# find id of the role, return 0 if it doesn't exist
id_role = run_sql("""SELECT id FROM accROLE where name = %s""",
(name_role, ))
if id_role:
id_role = id_role[0][0]
else:
return 0
# find id of the action, return 0 if it doesn't exist
res = run_sql("""SELECT id from accACTION where name = %s""",
(name_action, ))
if res:
id_action = res[0][0]
else:
return 0
if verbose:
print('names: checking arguments')
id_arguments = []
if not optional:
if verbose:
print('names: not optional')
# place to keep ids of arguments and list of allowed keywords
allowedkeys = acc_get_action_keywords(id_action=id_action)
# res[0][3].split(',')
# find all the id_arguments and create those that does not exist
for key in keyval.keys():
# this key does not exist
if key not in allowedkeys:
return 0
id_argument = acc_get_argument_id(key, keyval[key])
id_argument = id_argument or \
run_sql("""INSERT INTO accARGUMENT (keyword, value)
VALUES (%s, %s) """, (key, keyval[key]))
id_arguments.append(id_argument) # append the id to the list
else:
if verbose:
print('names: optional')
# use the other function
return acc_add_role_action_arguments(id_role=id_role,
id_action=id_action,
arglistid=arglistid,
optional=optional,
verbose=verbose,
id_arguments=id_arguments)
# DELETE WITH ID OR NAMES
def acc_delete_role_action_arguments(id_role, id_action, arglistid=1,
auths=[[]]):
"""delete all entries in accROLE_accACTION_accARGUMENT that satisfy the
parameters.
return number of actual deletes.
this function relies on the id-lists in auths to have the same order has
the possible actions...
id_role, id_action - self explanatory
arglistid - group to delete from.
if more entries than deletes, split the group before delete.
id_arguments - list of ids to delete."""
keepauths = [] # these will be kept
# find all possible actions
pas = acc_find_possible_actions_ids(id_role, id_action)
dummy = pas[0]
# decide which to keep or throw away
for pa in pas[1:]:
if pa[0] == arglistid and pa[1:] not in auths:
keepauths.append(pa[1:])
# delete everything
run_sql("""DELETE FROM accROLE_accACTION_accARGUMENT
WHERE id_accROLE = %s AND
id_accACTION = %s AND
argumentlistid = %s""", (id_role, id_action, arglistid))
# insert those to be kept
for auth in keepauths:
acc_add_role_action_arguments(id_role=id_role,
id_action=id_action,
arglistid=-1,
id_arguments=auth)
return 1
def acc_delete_role_action_arguments_names(name_role='', name_action='',
arglistid=1, **keyval):
"""utilize the function on ids by first finding all ids and redirecting the
function call.
break of and return 0 if any of the ids can't be found.
name_role = name of the role
name_action - name of the action
arglistid - the argumentlistid, all keyword=value pairs must be in this
same group.
**keyval - dictionary of keyword=value pairs for the arguments."""
# find ids for role and action
id_role = acc_get_role_id(name_role=name_role)
id_action = acc_get_action_id(name_action=name_action)
# create string with the ids
idstr = ''
idlist = []
for key in keyval.keys():
argument_id = acc_get_argument_id(key, keyval[key])
if not argument_id:
return 0
if idstr:
idstr += ','
idstr += '%s' % argument_id
idlist.append(argument_id)
# control that a fitting group exists
try:
count = run_sql("""SELECT COUNT(*) FROM accROLE_accACTION_accARGUMENT
WHERE id_accROLE = %%s AND
id_accACTION = %%s AND
argumentlistid = %%s AND
id_accARGUMENT IN (%s)""" % (idstr),
(id_role, id_action, arglistid))[0][0]
except IndexError:
return 0
if count < len(keyval):
return 0
# call id based function
return acc_delete_role_action_arguments(id_role, id_action, arglistid,
[idlist])
def acc_delete_role_action_arguments_group(id_role=0, id_action=0, arglistid=0):
"""delete entire group of arguments for connection between
role and action."""
if not id_role or not id_action:
return []
return run_sql("""DELETE FROM accROLE_accACTION_accARGUMENT
WHERE id_accROLE = %s AND
id_accACTION = %s AND
argumentlistid = %s """, (id_role, id_action, arglistid))
def acc_delete_possible_actions(id_role=0, id_action=0, authids=[]):
"""delete authorizations in selected rows. utilization of the
delete function.
id_role - id of role to be connected to action.
id_action - id of action to be connected to role
authids - list of row indexes to be removed. """
# find all authorizations
pas = acc_find_possible_actions(id_role=id_role, id_action=id_action)
# get the keys
keys = pas[0][1:]
# create dictionary for all the argumentlistids
ald = {}
for authid in authids:
if authid > len(pas):
return authid, len(pas)
# get info from possible action
pas_auth_id = pas[authid][0]
values = pas[authid][1:]
# create list of authids for each authorization
auth = [acc_get_argument_id(keys[0], values[0])]
for i in range(1, len(keys)):
auth.append(acc_get_argument_id(keys[i], values[i]))
# create entries in the dictionary for each argumentlistid
try:
ald[pas_auth_id].append(auth)
except KeyError:
ald[pas_auth_id] = [auth]
# do the deletes
result = 1
for key in ald.keys():
result = 1 and acc_delete_role_action_arguments(id_role=id_role,
id_action=id_action,
arglistid=key,
auths=ald[key])
return result
def acc_delete_role_action(id_role=0, id_action=0):
"""delete all connections between a role and an action. """
count = run_sql("""DELETE FROM accROLE_accACTION_accARGUMENT
WHERE id_accROLE = %s AND id_accACTION = %s """, (id_role, id_action))
return count
# GET FUNCTIONS
# ACTION RELATED
def acc_get_action_id(name_action):
"""get id of action when name is given
name_action - name of the wanted action"""
try:
return run_sql("""SELECT id FROM accACTION WHERE name = %s""",
(name_action, ), run_on_slave=True)[0][0]
except (ProgrammingError, IndexError):
return 0
def acc_get_action_name(id_action):
"""get name of action when id is given. """
try:
return run_sql("""SELECT name FROM accACTION WHERE id = %s""",
(id_action, ))[0][0]
except (ProgrammingError, IndexError):
return ''
def acc_get_action_description(id_action):
"""get description of action when id is given. """
try:
return run_sql("""SELECT description FROM accACTION WHERE id = %s""",
(id_action, ))[0][0]
except (ProgrammingError, IndexError):
return ''
def acc_get_action_keywords(id_action=0, name_action=''):
"""get list of keywords for action when id is given.
empty list if no keywords."""
result = acc_get_action_keywords_string(id_action=id_action,
name_action=name_action)
if result:
return result.split(',')
else:
return []
def acc_get_action_keywords_string(id_action=0, name_action=''):
"""get keywordstring when id is given. """
id_action = id_action or acc_get_action_id(name_action)
try:
result = run_sql("""SELECT allowedkeywords from accACTION
where id = %s """, (id_action, ))[0][0]
except IndexError:
return ''
return result
def acc_get_action_is_optional(id_action=0):
"""get if the action arguments are optional or not.
return 1 if yes, 0 if no."""
result = acc_get_action_optional(id_action=id_action)
return result == 'yes' and 1 or 0
def acc_get_action_optional(id_action=0):
"""get if the action arguments are optional or not.
return result, but 0 if action does not exist. """
try:
result = run_sql("""SELECT optional from accACTION where id = %s""",
(id_action, ))[0][0]
except IndexError:
return 0
return result
def acc_get_action_details(id_action=0):
"""get all the fields for an action."""
try:
result = run_sql("""SELECT id,name,description,allowedkeywords,optional FROM accACTION WHERE id = %s""",
(id_action, ))[0]
except IndexError:
return []
if result:
return list(result)
else:
return []
def acc_get_all_actions():
"""returns all entries in accACTION."""
return run_sql("""SELECT id, name, description
FROM accACTION ORDER BY name""")
def acc_get_action_roles(id_action):
"""Returns all the roles connected with an action."""
return run_sql("""SELECT DISTINCT(r.id), r.name, r.description
FROM accROLE_accACTION_accARGUMENT raa, accROLE r
WHERE (raa.id_accROLE = r.id AND raa.id_accACTION = %s) OR r.name = %s
ORDER BY r.name """, (id_action, SUPERADMINROLE))
# ROLE RELATED
def acc_get_role_id(name_role):
"""get id of role, name given. """
try:
return run_sql("""SELECT id FROM accROLE WHERE name = %s""",
(name_role, ), run_on_slave=True)[0][0]
except IndexError:
return 0
def acc_get_role_name(id_role):
"""get name of role, id given. """
try:
return run_sql("""SELECT name FROM accROLE WHERE id = %s""",
(id_role, ))[0][0]
except IndexError:
return ''
def acc_get_role_definition(id_role=0):
"""get firewall like role definition object for a role."""
try:
return run_sql("""SELECT firerole_def_ser FROM accROLE
WHERE id = %s""", (id_role, ))[0][0]
except IndexError:
return ''
def acc_get_role_details(id_role=0):
"""get all the fields for a role."""
try:
result = run_sql("""SELECT id, name, description, firerole_def_src
FROM accROLE WHERE id = %s """, (id_role, ))[0]
except IndexError:
return []
if result:
return list(result)
else:
return []
def acc_get_all_roles():
"""get all entries in accROLE."""
return run_sql("""SELECT id, name, description,
firerole_def_ser, firerole_def_src
FROM accROLE ORDER BY name""")
def acc_get_role_actions(id_role):
"""get all actions connected to a role. """
if acc_get_role_name(id_role) == SUPERADMINROLE:
return run_sql("""SELECT id, name, description
FROM accACTION
ORDER BY name """)
else:
return run_sql("""SELECT DISTINCT(a.id), a.name, a.description
FROM accROLE_accACTION_accARGUMENT raa, accACTION a
WHERE raa.id_accROLE = %s and
raa.id_accACTION = a.id
ORDER BY a.name""", (id_role, ))
def acc_get_role_users(id_role):
"""get all users that have direct access to a role.
Note this function will not consider implicit user linked by the
FireRole definition.
"""
return run_sql("""SELECT DISTINCT(u.id), u.email, u.settings
FROM user_accROLE ur, user u
WHERE ur.id_accROLE = %s AND
ur.expiration >= NOW() AND
u.id = ur.id_user
ORDER BY u.email""", (id_role, ))
def acc_get_roles_emails(id_roles):
from invenio.modules.accounts.models import User
return set(map(lambda u: u.email.lower().strip(),
db.session.query(db.func.distinct(User.email)).join(
User.active_roles
).filter(UserAccROLE.id_accROLE.in_(id_roles)).all()))
# ARGUMENT RELATED
def acc_get_argument_id(keyword, value):
"""get id of argument, keyword=value pair given.
value = 'optional value' is replaced for id_accARGUMENT = -1."""
try:
return run_sql("""SELECT DISTINCT id FROM accARGUMENT
WHERE keyword = %s and value = %s""", (keyword, value))[0][0]
except IndexError:
if value == 'optional value':
return -1
return 0
# USER RELATED
def acc_get_user_email(id_user=0):
"""get email of user, id given."""
try:
return run_sql("""SELECT email FROM user WHERE id = %s """,
(id_user, ))[0][0].lower().strip()
except IndexError:
return ''
def acc_get_user_id(email=''):
"""get id of user, email given."""
try:
return run_sql("""SELECT id FROM user WHERE email = %s """,
(email.lower().strip(), ))[0][0]
except IndexError:
return 0
def acc_is_user_in_role(user_info, id_role):
"""Return True if the user belong implicitly or explicitly to the role."""
if run_sql("""SELECT ur.id_accROLE
FROM user_accROLE ur
WHERE ur.id_user = %s AND ur.expiration >= NOW() AND
ur.id_accROLE = %s LIMIT 1""", (user_info['uid'], id_role), 1, run_on_slave=True):
return True
return acc_firerole_check_user(user_info, load_role_definition(id_role))
def acc_is_user_in_any_role(user_info, id_roles):
if db.session.query(db.func.count(UserAccROLE.id_accROLE)).filter(db.and_(
UserAccROLE.id_user == user_info['uid'],
UserAccROLE.expiration >= db.func.now(),
UserAccROLE.id_accROLE.in_(id_roles))).scalar() > 0:
return True
for id_role in id_roles:
if acc_firerole_check_user(user_info, load_role_definition(id_role)):
return True
return False
def acc_get_user_roles_from_user_info(user_info):
"""get all roles a user is connected to."""
uid = user_info['uid']
if uid == -1:
roles = intbitset()
else:
roles = intbitset(run_sql("""SELECT ur.id_accROLE
FROM user_accROLE ur
WHERE ur.id_user = %s AND ur.expiration >= NOW()
ORDER BY ur.id_accROLE""", (uid, ), run_on_slave=True))
potential_implicit_roles = run_sql("""SELECT id, firerole_def_ser FROM accROLE
WHERE firerole_def_ser IS NOT NULL""", run_on_slave=True)
for role_id, firerole_def_ser in potential_implicit_roles:
if role_id not in roles:
if acc_firerole_check_user(user_info, deserialize(firerole_def_ser)):
roles.add(role_id)
return roles
def acc_get_user_roles(id_user):
"""get all roles a user is explicitly connected to."""
explicit_roles = run_sql("""SELECT ur.id_accROLE
FROM user_accROLE ur
WHERE ur.id_user = %s AND ur.expiration >= NOW()
ORDER BY ur.id_accROLE""", (id_user, ), run_on_slave=True)
return [id_role[0] for id_role in explicit_roles]
def acc_find_possible_activities(user_info, ln=CFG_SITE_LANG):
"""Return a dictionary with all the possible activities for which the user
is allowed (i.e. all the administrative action which are connected to
an web area in Invenio) and the corresponding url.
"""
your_role_actions = acc_find_user_role_actions(user_info)
your_admin_activities = {}
for (role, action) in your_role_actions:
if action in CFG_ACC_ACTIVITIES_URLS:
your_admin_activities[action] = CFG_ACC_ACTIVITIES_URLS[action]
if role == SUPERADMINROLE:
your_admin_activities = dict(CFG_ACC_ACTIVITIES_URLS)
break
# For BibEdit and BibDocFile menu items, take into consideration
# current record whenever possible
if 'runbibedit' in your_admin_activities or \
'runbibdocfile' in your_admin_activities and \
user_info['uri'].startswith('/' + CFG_SITE_RECORD + '/'):
try:
# Get record ID and try to cast it to an int
current_record_id = int(
urlparse.urlparse(user_info['uri'])[2].split('/')[2]
)
except:
pass
else:
if 'runbibedit' in your_admin_activities:
your_admin_activities['runbibedit'] = \
(your_admin_activities['runbibedit'][0] +
'&#state=edit&recid=' + str(current_record_id),
your_admin_activities['runbibedit'][1])
if 'runbibdocfile' in your_admin_activities:
your_admin_activities['runbibdocfile'] = \
(your_admin_activities['runbibdocfile'][0] +
'&recid=' + str(current_record_id),
your_admin_activities['runbibdocfile'][1])
ret = {}
for action, (name, url) in iteritems(your_admin_activities):
ret[name] = url % ln
return ret
def acc_find_user_role_actions(user_info):
"""find name of all roles and actions connected to user_info."""
uid = user_info['uid']
# Not actions for anonymous
if uid == -1:
res1 = []
else:
# Let's check if user is superadmin
id_superadmin = acc_get_role_id(SUPERADMINROLE)
if id_superadmin in acc_get_user_roles_from_user_info(user_info):
return [(SUPERADMINROLE, action[1]) \
for action in acc_get_all_actions()]
query = """SELECT DISTINCT r.name, a.name
FROM user_accROLE ur, accROLE_accACTION_accARGUMENT raa,
accACTION a, accROLE r
WHERE ur.id_user = %s AND
ur.expiration >= NOW() AND
ur.id_accROLE = raa.id_accROLE AND
raa.id_accACTION = a.id AND
raa.id_accROLE = r.id """
res1 = run_sql(query, (uid, ), run_on_slave=True)
res2 = []
for res in res1:
res2.append(res)
res2.sort()
if type(user_info) == type({}):
query = """SELECT DISTINCT r.name, a.name, r.firerole_def_ser
FROM accROLE_accACTION_accARGUMENT raa, accACTION a, accROLE r
WHERE raa.id_accACTION = a.id AND
raa.id_accROLE = r.id """
res3 = run_sql(query, run_on_slave=True)
res4 = []
for role_name, action_name, role_definition in res3:
if acc_firerole_check_user(user_info,
deserialize(role_definition)):
if role_name == SUPERADMINROLE:
# Ok, every action. There's no need to go on :-)
return [(id_superadmin, action[0]) for action in acc_get_all_actions()]
res4.append((role_name, action_name))
return list(set(res2) | set(res4))
else:
return res2
# POSSIBLE ACTIONS / AUTHORIZATIONS
def acc_find_possible_actions_all(id_role):
"""find all the possible actions for a role.
the function utilizes acc_find_possible_actions to find
all the entries from each of the actions under the given role
id_role - role to find all actions for
returns a list with headers"""
query = """SELECT DISTINCT(aar.id_accACTION)
FROM accROLE_accACTION_accARGUMENT aar
WHERE aar.id_accROLE = %s
ORDER BY aar.id_accACTION""" % (id_role, )
res = []
for (id_action, ) in run_sql(query):
hlp = acc_find_possible_actions(id_role, id_action)
if hlp:
res.append(['role', 'action'] + hlp[0])
for row in hlp[1:]:
res.append([id_role, id_action] + row)
return res
def acc_find_possible_actions_argument_listid(id_role, id_action, arglistid):
"""find all possible actions with the given arglistid only."""
# get all, independent of argumentlistid
res1 = acc_find_possible_actions_ids(id_role, id_action)
# create list with only those with the right arglistid
res2 = []
for row in res1[1:]:
if row[0] == arglistid:
res2.append(row)
# return this list
return res2
def acc_find_possible_roles(name_action, always_add_superadmin=True,
batch_args=False, **arguments):
"""Find all the possible roles that are enabled to a given action.
:return: roles as a list of role_id
"""
query_roles_without_args = \
db.select([AccAuthorization.id_accROLE], db.and_(
AccAuthorization.argumentlistid <= 0,
AccAuthorization.id_accACTION == db.bindparam('id_action')))
query_roles_with_args = \
AccAuthorization.query.filter(db.and_(
AccAuthorization.argumentlistid > 0,
AccAuthorization.id_accACTION == db.bindparam('id_action')
)).join(AccAuthorization.argument)
id_action = db.session.query(AccACTION.id).filter(
AccACTION.name == name_action).scalar()
roles = intbitset(db.engine.execute(query_roles_without_args.params(
id_action=id_action)).fetchall())
if always_add_superadmin:
roles.add(CFG_SUPERADMINROLE_ID)
# Unpack arguments
if batch_args:
batch_arguments = [dict(zip(arguments.keys(), values))
for values in zip(*arguments.values())]
else:
batch_arguments = [arguments]
acc_authorizations = query_roles_with_args.params(
id_action=id_action
).all()
result = []
for arguments in batch_arguments:
batch_roles = roles.copy()
for auth in acc_authorizations:
if auth.id_accROLE not in batch_roles:
if not ((auth.argument.value != arguments.get(
auth.argument.keyword, '*') != '*'
) and auth.argument.value != '*'):
batch_roles.add(auth.id_accROLE)
result.append(batch_roles)
return result if batch_args else result[0]
def acc_find_possible_actions_user_from_user_info(user_info, id_action):
"""user based function to find all action combination for a given
user and action. find all the roles and utilize findPossibleActions
for all these.
user_info - user information dictionary, used to find roles
id_action - action id.
"""
res = []
for id_role in acc_get_user_roles_from_user_info(user_info):
hlp = acc_find_possible_actions(id_role, id_action)
if hlp and not res:
res.append(['role'] + hlp[0])
for row in hlp[1:]:
res.append([id_role] + row)
return res
def acc_find_possible_actions_user(id_user, id_action):
"""user based function to find all action combination for a given
user and action. find all the roles and utilize findPossibleActions
for all these.
id_user - user id, used to find roles
id_action - action id.
Note this function considers only explicit links between users and roles,
and not FireRole definitions.
"""
res = []
for id_role in acc_get_user_roles(id_user):
hlp = acc_find_possible_actions(id_role, id_action)
if hlp and not res:
res.append(['role'] + hlp[0])
for row in hlp[1:]:
res.append([id_role] + row)
return res
def acc_find_possible_actions_ids(id_role, id_action):
"""finds the ids of the possible actions.
utilization of acc_get_argument_id and acc_find_possible_actions. """
pas = acc_find_possible_actions(id_role, id_action)
if not pas:
return []
keys = pas[0]
pas_ids = [pas[0:1]]
for pa in pas[1:]:
auth = [pa[0]]
for i in range(1, len(pa)):
auth.append(acc_get_argument_id(keys[i], pa[i]))
pas_ids.append(auth)
return pas_ids
def acc_find_possible_actions(id_role, id_action):
"""Role based function to find all action combinations for a
give role and action.
id_role - id of role in the database
id_action - id of the action in the database
returns a list with all the combinations.
first row is used for header.
if SUPERADMINROLE, nothing is returned since an infinte number of
combination are possible.
"""
# query to find all entries for user and action
res1 = run_sql(""" SELECT raa.argumentlistid, ar.keyword, ar.value
FROM accROLE_accACTION_accARGUMENT raa, accARGUMENT ar
WHERE raa.id_accROLE = %s and
raa.id_accACTION = %s and
raa.id_accARGUMENT = ar.id """, (id_role, id_action))
# find needed keywords, create header
keywords = acc_get_action_keywords(id_action=id_action)
keywords.sort()
if not keywords:
# action without arguments
if run_sql("""SELECT id_accROLE FROM accROLE_accACTION_accARGUMENT
WHERE id_accROLE = %s AND id_accACTION = %s AND id_accARGUMENT = 0
AND argumentlistid = 0""", (id_role, id_action)):
return [['#', 'argument keyword'],
['0', 'action without arguments']]
# tuples into lists
res2, arglistids = [], {}
for res in res1:
res2.append([])
for r in res:
res2[-1].append(r)
res2.sort()
# create multilevel dictionary
for res in res2:
a, kw, value = res # rolekey, argumentlistid, keyword, value
if kw not in keywords:
continue
if a not in arglistids:
arglistids[a] = {}
# fill dictionary
if kw not in arglistids[a]:
arglistids[a][kw] = [value]
elif not value in arglistids[a][kw]:
arglistids[a][kw] = arglistids[a][kw] + [value]
# fill list with all possible combinations
res3 = []
# rolekeys = roles2.keys(); rolekeys.sort()
for a in arglistids.keys(): # argumentlistids
# fill a list with the new entries, shortcut and copying first
# keyword list
next_arglistid = []
for row in arglistids[a][keywords[0]]:
next_arglistid.append([a, row[:] ])
# run through the rest of the keywords
for kw in keywords[1:]:
if kw not in arglistids[a]:
arglistids[a][kw] = ['optional value']
new_list = arglistids[a][kw][:]
new_len = len(new_list)
# duplicate the list
temp_list = []
for row in next_arglistid:
for i in range(new_len):
temp_list.append(row[:])
# append new values
for i in range(len(temp_list)):
new_item = new_list[i % new_len][:]
temp_list[i].append( new_item )
next_arglistid = temp_list[:]
res3.extend(next_arglistid)
res3.sort()
# if optional allowed, put on top
opt = run_sql("""SELECT id_accROLE FROM accROLE_accACTION_accARGUMENT
WHERE id_accROLE = %s AND
id_accACTION = %s AND
id_accARGUMENT = -1 AND
argumentlistid = -1""", (id_role, id_action))
if opt:
res3.insert(0, [-1] + ['optional value'] * len(keywords))
# put header on top
if res3:
res3.insert(0, ['#'] + keywords)
return res3
def acc_split_argument_group(id_role=0, id_action=0, arglistid=0):
"""collect the arguments, find all combinations, delete original entries
and insert the new ones with different argumentlistids for each group
id_role - id of the role
id_action - id of the action
arglistid - argumentlistid to be splittetd"""
if not id_role or not id_action or not arglistid:
return []
# don't split if none or one possible actions
res = acc_find_possible_actions_argument_listid(id_role, id_action,
arglistid)
if not res or len(res) <= 1:
return 0
# delete the existing group
acc_delete_role_action_arguments_group(id_role, id_action,
arglistid)
# add all authorizations with new and different argumentlistid
addlist = []
for row in res:
argids = row[1:]
addlist.append(acc_add_role_action_arguments(id_role=id_role,
id_action=id_action,
arglistid=-1,
id_arguments=argids))
# return list of added authorizations
return addlist
def acc_merge_argument_groups(id_role=0, id_action=0, arglistids=[]):
"""merge the authorizations from groups with different argumentlistids
into one single group.
this can both save entries in the database and create extra authorizations.
id_role - id of the role
id_action - role of the action
arglistids - list of groups to be merged together into one."""
if len(arglistids) < 2:
return []
argstr = ''
for arglist_id in arglistids:
argstr += 'raa.argumentlistid = %s or ' % (arglist_id, )
argstr = '(%s)' % (argstr[:-4], )
# query to find all entries that will be merged
query = """ SELECT ar.keyword, ar.value, raa.id_accARGUMENT
FROM accROLE_accACTION_accARGUMENT raa, accARGUMENT ar
WHERE raa.id_accROLE = %%s and
raa.id_accACTION = %%s and
%s and
raa.id_accARGUMENT = ar.id """ % argstr
q_del = """DELETE FROM accROLE_accACTION_accARGUMENT
WHERE id_accROLE = %%s and
id_accACTION = %%s and
%s """ % (argstr.replace('raa.', ''))
res = run_sql(query, (id_role, id_action))
if not res:
return []
run_sql(q_del, (id_role, id_action))
# list of entire entries
old = []
# list of only the ids
ids = []
for (keyword, value, argument_id) in res:
if [keyword, value, argument_id] not in old:
old.append([keyword, value, argument_id])
ids.append(argument_id)
# for (k, v, id) in res: if id not in ids: ids.append(id)
return acc_add_role_action_arguments(id_role=id_role,
id_action=id_action,
arglistid=-1,
id_arguments=ids)
def acc_reset_default_settings(superusers=(),
additional_def_user_roles=(),
additional_def_roles=(),
additional_def_auths=()):
"""reset to default by deleting everything and adding default.
superusers - list of superuser emails
additional_def_user_roles - additional list of pair email, rolename
(see DEF_DEMO_USER_ROLES in access_control_config.py)
additional_def_roles - additional list of default list of roles
(see DEF_DEMO_ROLES in access_control_config.py)
additional_def_auths - additional list of default authorizations
(see DEF_DEMO_AUTHS in access_control_config.py)
"""
remove = acc_delete_all_settings()
add = acc_add_default_settings(superusers, additional_def_user_roles, additional_def_roles, additional_def_auths)
return remove, add
def acc_delete_all_settings():
"""simply remove all data affiliated with webaccess by truncating
tables accROLE, accACTION, accARGUMENT and those connected. """
from invenio.ext.sqlalchemy import db
db.session.commit()
run_sql("""TRUNCATE accROLE""")
run_sql("""TRUNCATE accACTION""")
run_sql("""TRUNCATE accARGUMENT""")
run_sql("""TRUNCATE user_accROLE""")
run_sql("""TRUNCATE accROLE_accACTION_accARGUMENT""")
return 1
def acc_add_default_settings(superusers=(),
additional_def_user_roles=(),
additional_def_roles=(),
additional_def_auths=()):
"""add the default settings if they don't exist.
superusers - list of superuser emails
additional_def_user_roles - additional list of pair email, rolename
(see DEF_DEMO_USER_ROLES in access_control_config.py)
additional_def_roles - additional list of default list of roles
(see DEF_DEMO_ROLES in access_control_config.py)
additional_def_auths - additional list of default authorizations
(see DEF_DEMO_AUTHS in access_control_config.py)
"""
# from superusers: allow input formats ['email1', 'email2'] and
# [['email1'], ['email2']] and [['email1', id], ['email2', id]]
for user in superusers:
if type(user) is str:
user = [user]
DEF_USERS.append(user[0])
if CFG_SITE_ADMIN_EMAIL not in DEF_USERS:
DEF_USERS.append(CFG_SITE_ADMIN_EMAIL)
# add data
# add roles
insroles = []
def_roles = dict([(role[0], role[1:]) for role in DEF_ROLES])
def_roles.update(dict([(role[0], role[1:]) for role in additional_def_roles]))
for name, (description, firerole_def_src) in iteritems(def_roles):
# try to add, don't care if description is different
role_id = acc_add_role(name_role=name,
description=description, firerole_def_ser=serialize(
compile_role_definition(firerole_def_src)),
firerole_def_src=firerole_def_src)
if not role_id:
role_id = acc_get_role_id(name_role=name)
acc_update_role(id_role=role_id, description=description,
firerole_def_ser=serialize(compile_role_definition(
firerole_def_src)), firerole_def_src=firerole_def_src)
insroles.append([role_id, name, description, firerole_def_src])
# add users to superadmin
insuserroles = []
for user in DEF_USERS:
insuserroles.append(acc_add_user_role(email=user,
name_role=SUPERADMINROLE))
for user, role in additional_def_user_roles:
insuserroles.append(acc_add_user_role(email=user, name_role=role))
# add actions
insactions = []
for action in principal.actions:
name = action.name
description = action.description
optional = 'yes' if action.optional else 'no'
allkeys = ','.join(action.allowedkeywords) \
if action.allowedkeywords is not None else ''
# try to add action as new
action_id = acc_add_action(name, description, optional, allkeys)
# action with the name exist
if not action_id:
action_id = acc_get_action_id(name_action=action.name)
# update the action, necessary updates to the database
# will also be done
acc_update_action(id_action=action_id, optional=optional,
allowedkeywords=allkeys)
# keep track of inserted actions
insactions.append([action_id, name, description, allkeys])
# add authorizations
insauths = []
def_auths = list(DEF_AUTHS) + list(additional_def_auths)
for (name_role, name_action, args) in def_auths:
# add the authorization
optional = not args and acc_get_action_is_optional(acc_get_action_id(name_action))
acc_add_authorization(name_role=name_role,
name_action=name_action,
optional=optional,
**args)
# keep track of inserted authorizations
insauths.append([name_role, name_action, args])
return insroles, insactions, insuserroles, insauths
def acc_find_delegated_roles(id_role_admin=0):
"""find all the roles the admin role has delegation rights over.
return tuple of all the roles.
id_role_admin - id of the admin role """
id_action_delegate = acc_get_action_id(name_action=DELEGATEADDUSERROLE)
rolenames = run_sql("""SELECT DISTINCT(ar.value)
FROM accROLE_accACTION_accARGUMENT raa LEFT JOIN accARGUMENT ar
ON raa.id_accARGUMENT = ar.id
WHERE raa.id_accROLE = %s AND
raa.id_accACTION = %s""", (id_role_admin, id_action_delegate))
result = []
for (name_role, ) in rolenames:
roledetails = run_sql("""SELECT id,name,description,firerole_def_ser,firerole_def_src FROM accROLE WHERE name = %s """,
(name_role, ))
if roledetails:
result.append(roledetails)
return result
def acc_cleanup_arguments():
"""function deletes all accARGUMENTs that are not referenced by
accROLE_accACTION_accARGUMENT.
returns how many arguments where deleted and a list of the deleted
id_arguments"""
# find unreferenced arguments
ids1 = run_sql("""SELECT DISTINCT ar.id
FROM accARGUMENT ar LEFT JOIN accROLE_accACTION_accARGUMENT raa ON
ar.id = raa.id_accARGUMENT WHERE raa.id_accARGUMENT IS NULL """)
# it is clean
if not ids1:
return 1
# create list and string of the ids
ids2 = []
idstr = ''
for (argument_id, ) in ids1:
ids2.append(argument_id)
if idstr:
idstr += ','
idstr += '%s' % argument_id
# delete unreferenced arguments
count = run_sql("""DELETE FROM accARGUMENT
WHERE id in (%s)""" % (idstr, ))
# return count and ids of deleted arguments
return (count, ids2)
| gpl-2.0 |
w1ll1am23/home-assistant | homeassistant/components/eddystone_temperature/sensor.py | 5 | 5649 | """
Read temperature information from Eddystone beacons.
Your beacons must be configured to transmit UID (for identification) and TLM
(for temperature) frames.
"""
import logging
# pylint: disable=import-error
from beacontools import BeaconScanner, EddystoneFilter, EddystoneTLMFrame
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import (
CONF_NAME,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
STATE_UNKNOWN,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_BEACONS = "beacons"
CONF_BT_DEVICE_ID = "bt_device_id"
CONF_INSTANCE = "instance"
CONF_NAMESPACE = "namespace"
BEACON_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAMESPACE): cv.string,
vol.Required(CONF_INSTANCE): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_BT_DEVICE_ID, default=0): cv.positive_int,
vol.Required(CONF_BEACONS): vol.Schema({cv.string: BEACON_SCHEMA}),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Validate configuration, create devices and start monitoring thread."""
bt_device_id = config.get("bt_device_id")
beacons = config.get(CONF_BEACONS)
devices = []
for dev_name, properties in beacons.items():
namespace = get_from_conf(properties, CONF_NAMESPACE, 20)
instance = get_from_conf(properties, CONF_INSTANCE, 12)
name = properties.get(CONF_NAME, dev_name)
if instance is None or namespace is None:
_LOGGER.error("Skipping %s", dev_name)
continue
devices.append(EddystoneTemp(name, namespace, instance))
if devices:
mon = Monitor(hass, devices, bt_device_id)
def monitor_stop(_service_or_event):
"""Stop the monitor thread."""
_LOGGER.info("Stopping scanner for Eddystone beacons")
mon.stop()
def monitor_start(_service_or_event):
"""Start the monitor thread."""
_LOGGER.info("Starting scanner for Eddystone beacons")
mon.start()
add_entities(devices)
mon.start()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, monitor_stop)
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, monitor_start)
else:
_LOGGER.warning("No devices were added")
def get_from_conf(config, config_key, length):
"""Retrieve value from config and validate length."""
string = config.get(config_key)
if len(string) != length:
_LOGGER.error(
"Error in configuration parameter %s: Must be exactly %d "
"bytes. Device will not be added",
config_key,
length / 2,
)
return None
return string
class EddystoneTemp(SensorEntity):
"""Representation of a temperature sensor."""
def __init__(self, name, namespace, instance):
"""Initialize a sensor."""
self._name = name
self.namespace = namespace
self.instance = instance
self.bt_addr = None
self.temperature = STATE_UNKNOWN
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self.temperature
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return TEMP_CELSIUS
@property
def should_poll(self):
"""Return the polling state."""
return False
class Monitor:
"""Continuously scan for BLE advertisements."""
def __init__(self, hass, devices, bt_device_id):
"""Construct interface object."""
self.hass = hass
# List of beacons to monitor
self.devices = devices
# Number of the bt device (hciX)
self.bt_device_id = bt_device_id
def callback(bt_addr, _, packet, additional_info):
"""Handle new packets."""
self.process_packet(
additional_info["namespace"],
additional_info["instance"],
packet.temperature,
)
device_filters = [EddystoneFilter(d.namespace, d.instance) for d in devices]
self.scanner = BeaconScanner(
callback, bt_device_id, device_filters, EddystoneTLMFrame
)
self.scanning = False
def start(self):
"""Continuously scan for BLE advertisements."""
if not self.scanning:
self.scanner.start()
self.scanning = True
else:
_LOGGER.debug("start() called, but scanner is already running")
def process_packet(self, namespace, instance, temperature):
"""Assign temperature to device."""
_LOGGER.debug(
"Received temperature for <%s,%s>: %d", namespace, instance, temperature
)
for dev in self.devices:
if (
dev.namespace == namespace
and dev.instance == instance
and dev.temperature != temperature
):
dev.temperature = temperature
dev.schedule_update_ha_state()
def stop(self):
"""Signal runner to stop and join thread."""
if self.scanning:
_LOGGER.debug("Stopping")
self.scanner.stop()
_LOGGER.debug("Stopped")
self.scanning = False
else:
_LOGGER.debug("stop() called but scanner was not running")
| apache-2.0 |
2013Commons/HUE-SHARK | build/env/lib/python2.7/site-packages/django_extensions-0.5-py2.7.egg/django_extensions/management/modelviz.py | 7 | 9303 | #!/usr/bin/env python
"""Django model to DOT (Graphviz) converter
by Antonio Cavedoni <[email protected]>
Make sure your DJANGO_SETTINGS_MODULE is set to your project or
place this script in the same directory of the project and call
the script like this:
$ python modelviz.py [-h] [-a] [-d] [-g] [-i <model_names>] <app_label> ... <app_label> > <filename>.dot
$ dot <filename>.dot -Tpng -o <filename>.png
options:
-h, --help
show this help message and exit.
-a, --all_applications
show models from all applications.
-d, --disable_fields
don't show the class member fields.
-g, --group_models
draw an enclosing box around models from the same app.
-i, --include_models=User,Person,Car
only include selected models in graph.
"""
__version__ = "0.9"
__svnid__ = "$Id$"
__license__ = "Python"
__author__ = "Antonio Cavedoni <http://cavedoni.com/>"
__contributors__ = [
"Stefano J. Attardi <http://attardi.org/>",
"limodou <http://www.donews.net/limodou/>",
"Carlo C8E Miron",
"Andre Campos <[email protected]>",
"Justin Findlay <[email protected]>",
"Alexander Houben <[email protected]>",
"Bas van Oostveen <[email protected]>",
]
import getopt, sys
from django.core.management import setup_environ
try:
import settings
except ImportError:
pass
else:
setup_environ(settings)
from django.utils.safestring import mark_safe
from django.template import Template, Context
from django.db import models
from django.db.models import get_models
from django.db.models.fields.related import \
ForeignKey, OneToOneField, ManyToManyField
try:
from django.db.models.fields.generic import GenericRelation
except ImportError:
from django.contrib.contenttypes.generic import GenericRelation
head_template = """
digraph name {
fontname = "Helvetica"
fontsize = 8
node [
fontname = "Helvetica"
fontsize = 8
shape = "plaintext"
]
edge [
fontname = "Helvetica"
fontsize = 8
]
"""
body_template = """
{% if use_subgraph %}
subgraph {{ cluster_app_name }} {
label=<
<TABLE BORDER="0" CELLBORDER="0" CELLSPACING="0">
<TR><TD COLSPAN="2" CELLPADDING="4" ALIGN="CENTER"
><FONT FACE="Helvetica Bold" COLOR="Black" POINT-SIZE="12"
>{{ app_name }}</FONT></TD></TR>
</TABLE>
>
color=olivedrab4
style="rounded"
{% endif %}
{% for model in models %}
{{ model.app_name }}_{{ model.name }} [label=<
<TABLE BGCOLOR="palegoldenrod" BORDER="0" CELLBORDER="0" CELLSPACING="0">
<TR><TD COLSPAN="2" CELLPADDING="4" ALIGN="CENTER" BGCOLOR="olivedrab4"
><FONT FACE="Helvetica Bold" COLOR="white"
>{{ model.name }}{% if model.abstracts %}<BR/><<FONT FACE="Helvetica Italic">{{ model.abstracts|join:"," }}</FONT>>{% endif %}</FONT></TD></TR>
{% if not disable_fields %}
{% for field in model.fields %}
<TR><TD ALIGN="LEFT" BORDER="0"
><FONT {% if field.blank %}COLOR="#7B7B7B" {% endif %}FACE="Helvetica {% if field.abstract %}Italic{% else %}Bold{% endif %}">{{ field.name }}</FONT
></TD>
<TD ALIGN="LEFT"
><FONT {% if field.blank %}COLOR="#7B7B7B" {% endif %}FACE="Helvetica {% if field.abstract %}Italic{% else %}Bold{% endif %}">{{ field.type }}</FONT
></TD></TR>
{% endfor %}
{% endif %}
</TABLE>
>]
{% endfor %}
{% if use_subgraph %}
}
{% endif %}
"""
rel_template = """
{% for model in models %}
{% for relation in model.relations %}
{% if relation.needs_node %}
{{ relation.target_app }}_{{ relation.target }} [label=<
<TABLE BGCOLOR="palegoldenrod" BORDER="0" CELLBORDER="0" CELLSPACING="0">
<TR><TD COLSPAN="2" CELLPADDING="4" ALIGN="CENTER" BGCOLOR="olivedrab4"
><FONT FACE="Helvetica Bold" COLOR="white"
>{{ relation.target }}</FONT></TD></TR>
</TABLE>
>]
{% endif %}
{{ model.app_name }}_{{ model.name }} -> {{ relation.target_app }}_{{ relation.target }}
[label="{{ relation.name }}"] {{ relation.arrows }};
{% endfor %}
{% endfor %}
"""
tail_template = """
}
"""
def generate_dot(app_labels, **kwargs):
disable_fields = kwargs.get('disable_fields', False)
include_models = kwargs.get('include_models', [])
all_applications = kwargs.get('all_applications', False)
use_subgraph = kwargs.get('group_models', False)
dot = head_template
apps = []
if all_applications:
apps = models.get_apps()
for app_label in app_labels:
app = models.get_app(app_label)
if not app in apps:
apps.append(app)
graphs = []
for app in apps:
graph = Context({
'name': '"%s"' % app.__name__,
'app_name': "%s" % '.'.join(app.__name__.split('.')[:-1]),
'cluster_app_name': "cluster_%s" % app.__name__.replace(".", "_"),
'disable_fields': disable_fields,
'use_subgraph': use_subgraph,
'models': []
})
for appmodel in get_models(app):
abstracts = [e.__name__ for e in appmodel.__bases__ if hasattr(e, '_meta') and e._meta.abstract]
abstract_fields = []
for e in appmodel.__bases__:
if hasattr(e, '_meta') and e._meta.abstract:
abstract_fields.extend(e._meta.fields)
model = {
'app_name': appmodel.__module__.replace(".", "_"),
'name': appmodel.__name__,
'abstracts': abstracts,
'fields': [],
'relations': []
}
# consider given model name ?
def consider(model_name):
return not include_models or model_name in include_models
if not consider(appmodel._meta.object_name):
continue
# model attributes
def add_attributes(field):
model['fields'].append({
'name': field.name,
'type': type(field).__name__,
'blank': field.blank,
'abstract': field in abstract_fields,
})
for field in appmodel._meta.fields:
add_attributes(field)
if appmodel._meta.many_to_many:
for field in appmodel._meta.many_to_many:
add_attributes(field)
# relations
def add_relation(field, extras=""):
_rel = {
'target_app': field.rel.to.__module__.replace('.','_'),
'target': field.rel.to.__name__,
'type': type(field).__name__,
'name': field.name,
'arrows': extras,
'needs_node': True
}
if _rel not in model['relations'] and consider(_rel['target']):
model['relations'].append(_rel)
for field in appmodel._meta.fields:
if isinstance(field, ForeignKey):
add_relation(field)
elif isinstance(field, OneToOneField):
add_relation(field, '[arrowhead=none arrowtail=none]')
if appmodel._meta.many_to_many:
for field in appmodel._meta.many_to_many:
if isinstance(field, ManyToManyField) and getattr(field, 'creates_table', False):
add_relation(field, '[arrowhead=normal arrowtail=normal]')
elif isinstance(field, GenericRelation):
add_relation(field, mark_safe('[style="dotted"] [arrowhead=normal arrowtail=normal]'))
graph['models'].append(model)
graphs.append(graph)
nodes = []
for graph in graphs:
nodes.extend([e['name'] for e in graph['models']])
for graph in graphs:
# don't draw duplication nodes because of relations
for model in graph['models']:
for relation in model['relations']:
if relation['target'] in nodes:
relation['needs_node'] = False
# render templates
t = Template(body_template)
dot += '\n' + t.render(graph)
for graph in graphs:
t = Template(rel_template)
dot += '\n' + t.render(graph)
dot += '\n' + tail_template
return dot
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "hadgi:",
["help", "all_applications", "disable_fields", "group_models", "include_models="])
except getopt.GetoptError, error:
print __doc__
sys.exit(error)
kwargs = {}
for opt, arg in opts:
if opt in ("-h", "--help"):
print __doc__
sys.exit()
if opt in ("-a", "--all_applications"):
kwargs['all_applications'] = True
if opt in ("-d", "--disable_fields"):
kwargs['disable_fields'] = True
if opt in ("-g", "--group_models"):
kwargs['group_models'] = True
if opt in ("-i", "--include_models"):
kwargs['include_models'] = arg.split(',')
if not args and not kwargs.get('all_applications', False):
print __doc__
sys.exit()
print generate_dot(args, **kwargs)
if __name__ == "__main__":
main()
| apache-2.0 |
jjscarafia/odoo | openerp/tools/safe_eval.py | 19 | 12057 | # -*- coding: utf-8 -*-
##############################################################################
# Copyright (C) 2004-2014 OpenERP s.a. (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""
safe_eval module - methods intended to provide more restricted alternatives to
evaluate simple and/or untrusted code.
Methods in this module are typically used as alternatives to eval() to parse
OpenERP domain strings, conditions and expressions, mostly based on locals
condition/math builtins.
"""
# Module partially ripped from/inspired by several different sources:
# - http://code.activestate.com/recipes/286134/
# - safe_eval in lp:~xrg/openobject-server/optimize-5.0
# - safe_eval in tryton http://hg.tryton.org/hgwebdir.cgi/trytond/rev/bbb5f73319ad
from opcode import HAVE_ARGUMENT, opmap, opname
from types import CodeType
import logging
from .misc import ustr
import openerp
__all__ = ['test_expr', 'safe_eval', 'const_eval']
# The time module is usually already provided in the safe_eval environment
# but some code, e.g. datetime.datetime.now() (Windows/Python 2.5.2, bug
# lp:703841), does import time.
_ALLOWED_MODULES = ['_strptime', 'time']
_CONST_OPCODES = set(opmap[x] for x in [
'POP_TOP', 'ROT_TWO', 'ROT_THREE', 'ROT_FOUR', 'DUP_TOP', 'DUP_TOPX',
'POP_BLOCK','SETUP_LOOP', 'BUILD_LIST', 'BUILD_MAP', 'BUILD_TUPLE',
'LOAD_CONST', 'RETURN_VALUE', 'STORE_SUBSCR', 'STORE_MAP'] if x in opmap)
_EXPR_OPCODES = _CONST_OPCODES.union(set(opmap[x] for x in [
'UNARY_POSITIVE', 'UNARY_NEGATIVE', 'UNARY_NOT',
'UNARY_INVERT', 'BINARY_POWER', 'BINARY_MULTIPLY',
'BINARY_DIVIDE', 'BINARY_FLOOR_DIVIDE', 'BINARY_TRUE_DIVIDE',
'BINARY_MODULO', 'BINARY_ADD', 'BINARY_SUBTRACT', 'BINARY_SUBSCR',
'BINARY_LSHIFT', 'BINARY_RSHIFT', 'BINARY_AND', 'BINARY_XOR',
'BINARY_OR', 'INPLACE_ADD', 'INPLACE_SUBTRACT', 'INPLACE_MULTIPLY',
'INPLACE_DIVIDE', 'INPLACE_REMAINDER', 'INPLACE_POWER',
'INPLACE_LEFTSHIFT', 'INPLACE_RIGHTSHIFT', 'INPLACE_AND',
'INPLACE_XOR','INPLACE_OR'
] if x in opmap))
_SAFE_OPCODES = _EXPR_OPCODES.union(set(opmap[x] for x in [
'LOAD_NAME', 'CALL_FUNCTION', 'COMPARE_OP', 'LOAD_ATTR',
'STORE_NAME', 'GET_ITER', 'FOR_ITER', 'LIST_APPEND', 'DELETE_NAME',
'JUMP_FORWARD', 'JUMP_IF_TRUE', 'JUMP_IF_FALSE', 'JUMP_ABSOLUTE',
'MAKE_FUNCTION', 'SLICE+0', 'SLICE+1', 'SLICE+2', 'SLICE+3', 'BREAK_LOOP',
'CONTINUE_LOOP', 'RAISE_VARARGS', 'YIELD_VALUE',
# New in Python 2.7 - http://bugs.python.org/issue4715 :
'JUMP_IF_FALSE_OR_POP', 'JUMP_IF_TRUE_OR_POP', 'POP_JUMP_IF_FALSE',
'POP_JUMP_IF_TRUE', 'SETUP_EXCEPT', 'END_FINALLY',
'LOAD_FAST', 'STORE_FAST', 'DELETE_FAST', 'UNPACK_SEQUENCE',
'LOAD_GLOBAL', # Only allows access to restricted globals
] if x in opmap))
_logger = logging.getLogger(__name__)
def _get_opcodes(codeobj):
"""_get_opcodes(codeobj) -> [opcodes]
Extract the actual opcodes as a list from a code object
>>> c = compile("[1 + 2, (1,2)]", "", "eval")
>>> _get_opcodes(c)
[100, 100, 23, 100, 100, 102, 103, 83]
"""
i = 0
byte_codes = codeobj.co_code
while i < len(byte_codes):
code = ord(byte_codes[i])
yield code
if code >= HAVE_ARGUMENT:
i += 3
else:
i += 1
def assert_no_dunder_name(code_obj, expr):
""" assert_no_dunder_name(code_obj, expr) -> None
Asserts that the code object does not refer to any "dunder name"
(__$name__), so that safe_eval prevents access to any internal-ish Python
attribute or method (both are loaded via LOAD_ATTR which uses a name, not a
const or a var).
Checks that no such name exists in the provided code object (co_names).
:param code_obj: code object to name-validate
:type code_obj: CodeType
:param str expr: expression corresponding to the code object, for debugging
purposes
:raises NameError: in case a forbidden name (containing two underscores)
is found in ``code_obj``
.. note:: actually forbids every name containing 2 underscores
"""
for name in code_obj.co_names:
if "__" in name:
raise NameError('Access to forbidden name %r (%r)' % (name, expr))
def assert_valid_codeobj(allowed_codes, code_obj, expr):
""" Asserts that the provided code object validates against the bytecode
and name constraints.
Recursively validates the code objects stored in its co_consts in case
lambdas are being created/used (lambdas generate their own separated code
objects and don't live in the root one)
:param allowed_codes: list of permissible bytecode instructions
:type allowed_codes: set(int)
:param code_obj: code object to name-validate
:type code_obj: CodeType
:param str expr: expression corresponding to the code object, for debugging
purposes
:raises ValueError: in case of forbidden bytecode in ``code_obj``
:raises NameError: in case a forbidden name (containing two underscores)
is found in ``code_obj``
"""
assert_no_dunder_name(code_obj, expr)
for opcode in _get_opcodes(code_obj):
if opcode not in allowed_codes:
raise ValueError(
"opcode %s not allowed (%r)" % (opname[opcode], expr))
for const in code_obj.co_consts:
if isinstance(const, CodeType):
assert_valid_codeobj(allowed_codes, const, 'lambda')
def test_expr(expr, allowed_codes, mode="eval"):
"""test_expr(expression, allowed_codes[, mode]) -> code_object
Test that the expression contains only the allowed opcodes.
If the expression is valid and contains only allowed codes,
return the compiled code object.
Otherwise raise a ValueError, a Syntax Error or TypeError accordingly.
"""
try:
if mode == 'eval':
# eval() does not like leading/trailing whitespace
expr = expr.strip()
code_obj = compile(expr, "", mode)
except (SyntaxError, TypeError, ValueError):
raise
except Exception, e:
import sys
exc_info = sys.exc_info()
raise ValueError, '"%s" while compiling\n%r' % (ustr(e), expr), exc_info[2]
assert_valid_codeobj(allowed_codes, code_obj, expr)
return code_obj
def const_eval(expr):
"""const_eval(expression) -> value
Safe Python constant evaluation
Evaluates a string that contains an expression describing
a Python constant. Strings that are not valid Python expressions
or that contain other code besides the constant raise ValueError.
>>> const_eval("10")
10
>>> const_eval("[1,2, (3,4), {'foo':'bar'}]")
[1, 2, (3, 4), {'foo': 'bar'}]
>>> const_eval("1+2")
Traceback (most recent call last):
...
ValueError: opcode BINARY_ADD not allowed
"""
c = test_expr(expr, _CONST_OPCODES)
return eval(c)
def expr_eval(expr):
"""expr_eval(expression) -> value
Restricted Python expression evaluation
Evaluates a string that contains an expression that only
uses Python constants. This can be used to e.g. evaluate
a numerical expression from an untrusted source.
>>> expr_eval("1+2")
3
>>> expr_eval("[1,2]*2")
[1, 2, 1, 2]
>>> expr_eval("__import__('sys').modules")
Traceback (most recent call last):
...
ValueError: opcode LOAD_NAME not allowed
"""
c = test_expr(expr, _EXPR_OPCODES)
return eval(c)
def _import(name, globals=None, locals=None, fromlist=None, level=-1):
if globals is None:
globals = {}
if locals is None:
locals = {}
if fromlist is None:
fromlist = []
if name in _ALLOWED_MODULES:
return __import__(name, globals, locals, level)
raise ImportError(name)
def safe_eval(expr, globals_dict=None, locals_dict=None, mode="eval", nocopy=False, locals_builtins=False):
"""safe_eval(expression[, globals[, locals[, mode[, nocopy]]]]) -> result
System-restricted Python expression evaluation
Evaluates a string that contains an expression that mostly
uses Python constants, arithmetic expressions and the
objects directly provided in context.
This can be used to e.g. evaluate
an OpenERP domain expression from an untrusted source.
:throws TypeError: If the expression provided is a code object
:throws SyntaxError: If the expression provided is not valid Python
:throws NameError: If the expression provided accesses forbidden names
:throws ValueError: If the expression provided uses forbidden bytecode
"""
if isinstance(expr, CodeType):
raise TypeError("safe_eval does not allow direct evaluation of code objects.")
if globals_dict is None:
globals_dict = {}
# prevent altering the globals/locals from within the sandbox
# by taking a copy.
if not nocopy:
# isinstance() does not work below, we want *exactly* the dict class
if (globals_dict is not None and type(globals_dict) is not dict) \
or (locals_dict is not None and type(locals_dict) is not dict):
_logger.warning(
"Looks like you are trying to pass a dynamic environment, "
"you should probably pass nocopy=True to safe_eval().")
globals_dict = dict(globals_dict)
if locals_dict is not None:
locals_dict = dict(locals_dict)
globals_dict.update(
__builtins__={
'__import__': _import,
'True': True,
'False': False,
'None': None,
'str': str,
'unicode': unicode,
'globals': locals,
'locals': locals,
'bool': bool,
'int': int,
'float': float,
'long': long,
'enumerate': enumerate,
'dict': dict,
'list': list,
'tuple': tuple,
'map': map,
'abs': abs,
'min': min,
'max': max,
'sum': sum,
'reduce': reduce,
'filter': filter,
'round': round,
'len': len,
'repr': repr,
'set': set,
'all': all,
'any': any,
'ord': ord,
'chr': chr,
'cmp': cmp,
'divmod': divmod,
'isinstance': isinstance,
'range': range,
'xrange': xrange,
'zip': zip,
'Exception': Exception,
}
)
if locals_builtins:
if locals_dict is None:
locals_dict = {}
locals_dict.update(globals_dict.get('__builtins__'))
c = test_expr(expr, _SAFE_OPCODES, mode=mode)
try:
return eval(c, globals_dict, locals_dict)
except openerp.osv.orm.except_orm:
raise
except openerp.exceptions.Warning:
raise
except openerp.exceptions.RedirectWarning:
raise
except openerp.exceptions.AccessDenied:
raise
except openerp.exceptions.AccessError:
raise
except Exception, e:
import sys
exc_info = sys.exc_info()
raise ValueError, '"%s" while evaluating\n%r' % (ustr(e), expr), exc_info[2]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
sarthfrey/Texty | lib/tests/test_keys.py | 2 | 2303 | from mock import patch, Mock
from twilio.rest.resources.keys import Keys, Key
from tests.tools import create_mock_json
ACCOUNT_SID = "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
AUTH = (ACCOUNT_SID, "token")
BASE_URL = "https://api.twilio.com/2010-04-01/Accounts/{0}".format(ACCOUNT_SID)
KEY_SID = "SKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
list_resource = Keys(BASE_URL, AUTH)
@patch("twilio.rest.resources.base.make_twilio_request")
def test_get_key(mock):
resp = create_mock_json("tests/resources/keys_instance.json")
mock.return_value = resp
url = BASE_URL + "/Keys/{0}".format(KEY_SID)
list_resource.get(KEY_SID)
mock.assert_called_with("GET", url, auth=AUTH, use_json_extension=True)
@patch("twilio.rest.resources.base.make_twilio_request")
def test_create_key(mock):
resp = create_mock_json("tests/resources/keys_instance.json")
resp.status_code = 201
mock.return_value = resp
url = BASE_URL + "/Keys"
list_resource.create(friendly_name="Fuzzy Lumpkins' SigningKey")
params = {
'FriendlyName': "Fuzzy Lumpkins' SigningKey"
}
mock.assert_called_with("POST", url, data=params, auth=AUTH, use_json_extension=True)
@patch("twilio.rest.resources.base.make_twilio_request")
def test_update_key(mock):
resp = create_mock_json("tests/resources/keys_instance.json")
mock.return_value = resp
url = BASE_URL + "/Keys/{0}".format(KEY_SID)
list_resource.update(sid=KEY_SID, friendly_name="Fuzzy Lumpkins' SigningKey")
params = {
'FriendlyName': "Fuzzy Lumpkins' SigningKey"
}
mock.assert_called_with("POST", url, data=params, auth=AUTH, use_json_extension=True)
@patch("twilio.rest.resources.base.Resource.request")
def test_delete_key(mock):
resp = Mock()
resp.content = ""
resp.status_code = 204
mock.return_value = resp, {}
key = Key(list_resource, KEY_SID)
key.delete()
url = BASE_URL + "/Keys/{0}".format(KEY_SID)
mock.assert_called_with("DELETE", url)
@patch("twilio.rest.resources.base.make_twilio_request")
def test_list_keys(mock):
resp = create_mock_json("tests/resources/keys_list.json")
mock.return_value = resp
url = BASE_URL + "/Keys"
list_resource.list()
mock.assert_called_with("GET", url, params={}, auth=AUTH, use_json_extension=True)
| apache-2.0 |
Dandandan/wikiprogramming | jsrepl/extern/python/unclosured/lib/python2.7/encodings/ascii.py | 858 | 1248 | """ Python 'ascii' Codec
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.ascii_encode
decode = codecs.ascii_decode
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.ascii_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.ascii_decode(input, self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
class StreamConverter(StreamWriter,StreamReader):
encode = codecs.ascii_decode
decode = codecs.ascii_encode
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='ascii',
encode=Codec.encode,
decode=Codec.decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
| mit |
thardie/Diamond | src/diamond/handler/queue.py | 21 | 1152 | # coding=utf-8
"""
This is a meta handler to act as a shim for the new threading model. Please
do not try to use it as a normal handler
"""
from Handler import Handler
class QueueHandler(Handler):
def __init__(self, config=None, queue=None, log=None):
# Initialize Handler
Handler.__init__(self, config=config, log=log)
self.metrics = []
self.queue = queue
def __del__(self):
"""
Ensure as many of the metrics as possible are sent to the handers on
a shutdown
"""
self._flush()
def process(self, metric):
return self._process(metric)
def _process(self, metric):
"""
We skip any locking code due to the fact that this is now a single
process per collector
"""
self.metrics.append(metric)
def flush(self):
return self._flush()
def _flush(self):
"""
We skip any locking code due to the fact that this is now a single
process per collector
"""
if len(self.metrics) > 0:
self.queue.put(self.metrics, block=False)
self.metrics = []
| mit |
resmo/ansible | lib/ansible/modules/cloud/azure/azure_rm_sqlfirewallrule_info.py | 20 | 6319 | #!/usr/bin/python
#
# Copyright (c) 2017 Zim Kalinowski, <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_sqlfirewallrule_info
version_added: "2.8"
short_description: Get Azure SQL Firewall Rule facts
description:
- Get facts of SQL Firewall Rule.
options:
resource_group:
description:
- The name of the resource group that contains the server.
required: True
server_name:
description:
- The name of the server.
required: True
name:
description:
- The name of the firewall rule.
extends_documentation_fragment:
- azure
author:
- Zim Kalinowski (@zikalino)
'''
EXAMPLES = '''
- name: Get instance of SQL Firewall Rule
azure_rm_sqlfirewallrule_info:
resource_group: myResourceGroup
server_name: testserver
name: testrule
- name: List instances of SQL Firewall Rule
azure_rm_sqlfirewallrule_info:
resource_group: myResourceGroup
server_name: testserver
'''
RETURN = '''
rules:
description:
- A list of dict results containing the facts for matching SQL firewall rules.
returned: always
type: complex
contains:
id:
description:
- Resource ID.
returned: always
type: str
sample: "/subscriptions/xxxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Sql/servers/testser
ver/firewallRules/testrule"
resource_group:
description:
- Resource group name.
returned: always
type: str
sample: testgroup
server_name:
description:
- SQL server name.
returned: always
type: str
sample: testserver
name:
description:
- Firewall rule name.
returned: always
type: str
sample: testrule
start_ip_address:
description:
- The start IP address of the firewall rule.
returned: always
type: str
sample: 10.0.0.1
end_ip_address:
description:
- The end IP address of the firewall rule.
returned: always
type: str
sample: 10.0.0.5
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller
from azure.mgmt.sql import SqlManagementClient
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class AzureRMSqlFirewallRuleInfo(AzureRMModuleBase):
def __init__(self):
# define user inputs into argument
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
server_name=dict(
type='str',
required=True
),
name=dict(
type='str'
)
)
# store the results of the module operation
self.results = dict(
changed=False
)
self.resource_group = None
self.server_name = None
self.name = None
super(AzureRMSqlFirewallRuleInfo, self).__init__(self.module_arg_spec, supports_tags=False)
def exec_module(self, **kwargs):
is_old_facts = self.module._name == 'azure_rm_sqlfirewallrule_facts'
if is_old_facts:
self.module.deprecate("The 'azure_rm_sqlfirewallrule_facts' module has been renamed to 'azure_rm_sqlfirewallrule_info'", version='2.13')
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
if (self.name is not None):
self.results['rules'] = self.get()
else:
self.results['rules'] = self.list_by_server()
return self.results
def get(self):
'''
Gets facts of the specified SQL Firewall Rule.
:return: deserialized SQL Firewall Ruleinstance state dictionary
'''
response = None
results = []
try:
response = self.sql_client.firewall_rules.get(resource_group_name=self.resource_group,
server_name=self.server_name,
firewall_rule_name=self.name)
self.log("Response : {0}".format(response))
except CloudError as e:
self.log('Could not get facts for FirewallRules.')
if response is not None:
results.append(self.format_item(response))
return results
def list_by_server(self):
'''
Gets facts of the specified SQL Firewall Rule.
:return: deserialized SQL Firewall Ruleinstance state dictionary
'''
response = None
results = []
try:
response = self.sql_client.firewall_rules.list_by_server(resource_group_name=self.resource_group,
server_name=self.server_name)
self.log("Response : {0}".format(response))
except CloudError as e:
self.log('Could not get facts for FirewallRules.')
if response is not None:
for item in response:
results.append(self.format_item(item))
return results
def format_item(self, item):
d = item.as_dict()
d = {
'id': d['id'],
'resource_group': self.resource_group,
'server_name': self.server_name,
'name': d['name'],
'start_ip_address': d['start_ip_address'],
'end_ip_address': d['end_ip_address']
}
return d
def main():
AzureRMSqlFirewallRuleInfo()
if __name__ == '__main__':
main()
| gpl-3.0 |
JioCloud/nova | nova/tests/unit/test_crypto.py | 15 | 16497 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for Crypto module.
"""
import os
import StringIO
import mock
from mox3 import mox
from oslo_concurrency import processutils
import paramiko
from nova import crypto
from nova import db
from nova import exception
from nova import test
from nova import utils
class X509Test(test.TestCase):
def test_can_generate_x509(self):
with utils.tempdir() as tmpdir:
self.flags(ca_path=tmpdir)
crypto.ensure_ca_filesystem()
_key, cert_str = crypto.generate_x509_cert('fake', 'fake')
project_cert = crypto.fetch_ca(project_id='fake')
signed_cert_file = os.path.join(tmpdir, "signed")
with open(signed_cert_file, 'w') as keyfile:
keyfile.write(cert_str)
project_cert_file = os.path.join(tmpdir, "project")
with open(project_cert_file, 'w') as keyfile:
keyfile.write(project_cert)
enc, err = utils.execute('openssl', 'verify', '-CAfile',
project_cert_file, '-verbose', signed_cert_file)
self.assertFalse(err)
def test_encrypt_decrypt_x509(self):
with utils.tempdir() as tmpdir:
self.flags(ca_path=tmpdir)
project_id = "fake"
crypto.ensure_ca_filesystem()
cert = crypto.fetch_ca(project_id)
public_key = os.path.join(tmpdir, "public.pem")
with open(public_key, 'w') as keyfile:
keyfile.write(cert)
text = "some @#!%^* test text"
enc, _err = utils.execute('openssl',
'rsautl',
'-certin',
'-encrypt',
'-inkey', '%s' % public_key,
process_input=text)
dec = crypto.decrypt_text(project_id, enc)
self.assertEqual(text, dec)
@mock.patch.object(utils, 'execute',
side_effect=processutils.ProcessExecutionError)
def test_ensure_ca_filesystem_chdir(self, *args, **kargs):
with utils.tempdir() as tmpdir:
self.flags(ca_path=tmpdir)
start = os.getcwd()
self.assertRaises(processutils.ProcessExecutionError,
crypto.ensure_ca_filesystem)
self.assertEqual(start, os.getcwd())
class RevokeCertsTest(test.TestCase):
def test_revoke_certs_by_user_and_project(self):
user_id = 'test_user'
project_id = 2
file_name = 'test_file'
def mock_certificate_get_all_by_user_and_project(context,
user_id,
project_id):
return [{"user_id": user_id, "project_id": project_id,
"file_name": file_name}]
self.stubs.Set(db, 'certificate_get_all_by_user_and_project',
mock_certificate_get_all_by_user_and_project)
self.mox.StubOutWithMock(crypto, 'revoke_cert')
crypto.revoke_cert(project_id, file_name)
self.mox.ReplayAll()
crypto.revoke_certs_by_user_and_project(user_id, project_id)
def test_revoke_certs_by_user(self):
user_id = 'test_user'
project_id = 2
file_name = 'test_file'
def mock_certificate_get_all_by_user(context, user_id):
return [{"user_id": user_id, "project_id": project_id,
"file_name": file_name}]
self.stubs.Set(db, 'certificate_get_all_by_user',
mock_certificate_get_all_by_user)
self.mox.StubOutWithMock(crypto, 'revoke_cert')
crypto.revoke_cert(project_id, mox.IgnoreArg())
self.mox.ReplayAll()
crypto.revoke_certs_by_user(user_id)
def test_revoke_certs_by_project(self):
user_id = 'test_user'
project_id = 2
file_name = 'test_file'
def mock_certificate_get_all_by_project(context, project_id):
return [{"user_id": user_id, "project_id": project_id,
"file_name": file_name}]
self.stubs.Set(db, 'certificate_get_all_by_project',
mock_certificate_get_all_by_project)
self.mox.StubOutWithMock(crypto, 'revoke_cert')
crypto.revoke_cert(project_id, mox.IgnoreArg())
self.mox.ReplayAll()
crypto.revoke_certs_by_project(project_id)
@mock.patch.object(utils, 'execute',
side_effect=processutils.ProcessExecutionError)
@mock.patch.object(os, 'chdir', return_value=None)
def test_revoke_cert_process_execution_error(self, *args, **kargs):
self.assertRaises(exception.RevokeCertFailure, crypto.revoke_cert,
2, 'test_file')
@mock.patch.object(os, 'chdir', mock.Mock(side_effect=OSError))
def test_revoke_cert_project_not_found_chdir_fails(self, *args, **kargs):
self.assertRaises(exception.ProjectNotFound, crypto.revoke_cert,
2, 'test_file')
class CertExceptionTests(test.TestCase):
def test_fetch_ca_file_not_found(self):
with utils.tempdir() as tmpdir:
self.flags(ca_path=tmpdir)
self.flags(use_project_ca=True)
self.assertRaises(exception.CryptoCAFileNotFound, crypto.fetch_ca,
project_id='fake')
def test_fetch_crl_file_not_found(self):
with utils.tempdir() as tmpdir:
self.flags(ca_path=tmpdir)
self.flags(use_project_ca=True)
self.assertRaises(exception.CryptoCRLFileNotFound,
crypto.fetch_crl, project_id='fake')
class EncryptionTests(test.TestCase):
pubkey = ("ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDArtgrfBu/g2o28o+H2ng/crv"
"zgES91i/NNPPFTOutXelrJ9QiPTPTm+B8yspLsXifmbsmXztNOlBQgQXs6usxb4"
"fnJKNUZ84Vkp5esbqK/L7eyRqwPvqo7btKBMoAMVX/kUyojMpxb7Ssh6M6Y8cpi"
"goi+MSDPD7+5yRJ9z4mH9h7MCY6Ejv8KTcNYmVHvRhsFUcVhWcIISlNWUGiG7rf"
"oki060F5myQN3AXcL8gHG5/Qb1RVkQFUKZ5geQ39/wSyYA1Q65QTba/5G2QNbl2"
"0eAIBTyKZhN6g88ak+yARa6BLLDkrlP7L4WctHQMLsuXHohQsUO9AcOlVMARgrg"
"uF test@test")
prikey = """-----BEGIN RSA PRIVATE KEY-----
MIIEpQIBAAKCAQEAwK7YK3wbv4NqNvKPh9p4P3K784BEvdYvzTTzxUzrrV3payfU
Ij0z05vgfMrKS7F4n5m7Jl87TTpQUIEF7OrrMW+H5ySjVGfOFZKeXrG6ivy+3ska
sD76qO27SgTKADFV/5FMqIzKcW+0rIejOmPHKYoKIvjEgzw+/uckSfc+Jh/YezAm
OhI7/Ck3DWJlR70YbBVHFYVnCCEpTVlBohu636JItOtBeZskDdwF3C/IBxuf0G9U
VZEBVCmeYHkN/f8EsmANUOuUE22v+RtkDW5dtHgCAU8imYTeoPPGpPsgEWugSyw5
K5T+y+FnLR0DC7Llx6IULFDvQHDpVTAEYK4LhQIDAQABAoIBAF9ibrrgHnBpItx+
qVUMbriiGK8LUXxUmqdQTljeolDZi6KzPc2RVKWtpazBSvG7skX3+XCediHd+0JP
DNri1HlNiA6B0aUIGjoNsf6YpwsE4YwyK9cR5k5YGX4j7se3pKX2jOdngxQyw1Mh
dkmCeWZz4l67nbSFz32qeQlwrsB56THJjgHB7elDoGCXTX/9VJyjFlCbfxVCsIng
inrNgT0uMSYMNpAjTNOjguJt/DtXpwzei5eVpsERe0TRRVH23ycS0fuq/ancYwI/
MDr9KSB8r+OVGeVGj3popCxECxYLBxhqS1dAQyJjhQXKwajJdHFzidjXO09hLBBz
FiutpYUCgYEA6OFikTrPlCMGMJjSj+R9woDAOPfvCDbVZWfNo8iupiECvei88W28
RYFnvUQRjSC0pHe//mfUSmiEaE+SjkNCdnNR+vsq9q+htfrADm84jl1mfeWatg/g
zuGz2hAcZnux3kQMI7ufOwZNNpM2bf5B4yKamvG8tZRRxSkkAL1NV48CgYEA08/Z
Ty9g9XPKoLnUWStDh1zwG+c0q14l2giegxzaUAG5DOgOXbXcw0VQ++uOWD5ARELG
g9wZcbBsXxJrRpUqx+GAlv2Y1bkgiPQS1JIyhsWEUtwfAC/G+uZhCX53aI3Pbsjh
QmkPCSp5DuOuW2PybMaw+wVe+CaI/gwAWMYDAasCgYEA4Fzkvc7PVoU33XIeywr0
LoQkrb4QyPUrOvt7H6SkvuFm5thn0KJMlRpLfAksb69m2l2U1+HooZd4mZawN+eN
DNmlzgxWJDypq83dYwq8jkxmBj1DhMxfZnIE+L403nelseIVYAfPLOqxUTcbZXVk
vRQFp+nmSXqQHUe5rAy1ivkCgYEAqLu7cclchCxqDv/6mc5NTVhMLu5QlvO5U6fq
HqitgW7d69oxF5X499YQXZ+ZFdMBf19ypTiBTIAu1M3nh6LtIa4SsjXzus5vjKpj
FdQhTBus/hU83Pkymk1MoDOPDEtsI+UDDdSDldmv9pyKGWPVi7H86vusXCLWnwsQ
e6fCXWECgYEAqgpGvva5kJ1ISgNwnJbwiNw0sOT9BMOsdNZBElf0kJIIy6FMPvap
6S1ziw+XWfdQ83VIUOCL5DrwmcYzLIogS0agmnx/monfDx0Nl9+OZRxy6+AI9vkK
86A1+DXdo+IgX3grFK1l1gPhAZPRWJZ+anrEkyR4iLq6ZoPZ3BQn97U=
-----END RSA PRIVATE KEY-----"""
text = "Some text! %$*"
def _ssh_decrypt_text(self, ssh_private_key, text):
with utils.tempdir() as tmpdir:
sshkey = os.path.abspath(os.path.join(tmpdir, 'ssh.key'))
with open(sshkey, 'w') as f:
f.write(ssh_private_key)
try:
dec, _err = utils.execute('openssl',
'rsautl',
'-decrypt',
'-inkey', sshkey,
process_input=text)
return dec
except processutils.ProcessExecutionError as exc:
raise exception.DecryptionFailure(reason=exc.stderr)
def test_ssh_encrypt_decrypt_text(self):
enc = crypto.ssh_encrypt_text(self.pubkey, self.text)
self.assertNotEqual(enc, self.text)
result = self._ssh_decrypt_text(self.prikey, enc)
self.assertEqual(result, self.text)
def test_ssh_encrypt_failure(self):
self.assertRaises(exception.EncryptionFailure,
crypto.ssh_encrypt_text, '', self.text)
class ConversionTests(test.TestCase):
k1 = ("ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA4CqmrxfU7x4sJrubpMNxeglul+d"
"ByrsicnvQcHDEjPzdvoz+BaoAG9bjCA5mCeTBIISsVTVXz/hxNeiuBV6LH/UR/c"
"27yl53ypN+821ImoexQZcKItdnjJ3gVZlDob1f9+1qDVy63NJ1c+TstkrCTRVeo"
"9VyE7RpdSS4UCiBe8Xwk3RkedioFxePrI0Ktc2uASw2G0G2Rl7RN7KZOJbCivfF"
"LQMAOu6e+7fYvuE1gxGHHj7dxaBY/ioGOm1W4JmQ1V7AKt19zTBlZKduN8FQMSF"
"r35CDlvoWs0+OP8nwlebKNCi/5sdL8qiSLrAcPB4LqdkAf/blNSVA2Yl83/c4lQ"
"== test@test")
k2 = ("-----BEGIN PUBLIC KEY-----\n"
"MIIBIDANBgkqhkiG9w0BAQEFAAOCAQ0AMIIBCAKCAQEA4CqmrxfU7x4sJrubpMNx\n"
"eglul+dByrsicnvQcHDEjPzdvoz+BaoAG9bjCA5mCeTBIISsVTVXz/hxNeiuBV6L\n"
"H/UR/c27yl53ypN+821ImoexQZcKItdnjJ3gVZlDob1f9+1qDVy63NJ1c+TstkrC\n"
"TRVeo9VyE7RpdSS4UCiBe8Xwk3RkedioFxePrI0Ktc2uASw2G0G2Rl7RN7KZOJbC\n"
"ivfFLQMAOu6e+7fYvuE1gxGHHj7dxaBY/ioGOm1W4JmQ1V7AKt19zTBlZKduN8FQ\n"
"MSFr35CDlvoWs0+OP8nwlebKNCi/5sdL8qiSLrAcPB4LqdkAf/blNSVA2Yl83/c4\n"
"lQIBIw==\n"
"-----END PUBLIC KEY-----\n")
def test_convert_keys(self):
result = crypto.convert_from_sshrsa_to_pkcs8(self.k1)
self.assertEqual(result, self.k2)
def test_convert_failure(self):
self.assertRaises(exception.EncryptionFailure,
crypto.convert_from_sshrsa_to_pkcs8, '')
class KeyPairTest(test.TestCase):
rsa_prv = (
"-----BEGIN RSA PRIVATE KEY-----\n"
"MIIEowIBAAKCAQEA5G44D6lEgMj6cRwCPydsMl1VRN2B9DVyV5lmwssGeJClywZM\n"
"WcKlSZBaWPbwbt20/r74eMGZPlqtEi9Ro+EHj4/n5+3A2Mh11h0PGSt53PSPfWwo\n"
"ZhEg9hQ1w1ZxfBMCx7eG2YdGFQocMgR0zQasJGjjt8hruCnWRB3pNH9DhEwKhgET\n"
"H0/CFzxSh0eZWs/O4GSf4upwmRG/1Yu90vnVZq3AanwvvW5UBk6g4uWb6FTES867\n"
"kAy4b5EcH6WR3lLE09omuG/NqtH+qkgIdQconDkmkuK3xf5go6GSwEod0erM1G1v\n"
"e+C4w/MD98KZ4Zlon9hy7oE2rcqHXf58gZtOTQIDAQABAoIBAQCnkeM2Oemyv7xY\n"
"dT+ArJ7GY4lFt2i5iOuUL0ge5Wid0R6OTNR9lDhEOszMLno6GhHIPrdvfjW4dDQ5\n"
"/tRY757oRZzNmq+5V3R52V9WC3qeCBmq3EjWdwJDAphd72/YoOmNMKiPsphKntwI\n"
"JRS5wodNPlSuYSwEMUypM3f7ttAEn5CASgYgribBDapm7EqkVa2AqSvpFzNvN3/e\n"
"Sc36/XlxJin7AkKVOnRksuVOOj504VUQfXgVWZkfTeZqAROgA1FSnjUAffcubJmq\n"
"pDL/JSgOqN4S+sJkkTrb19MuM9M/IdXteloynF+GUKZx6FdVQQc8xCiXgeupeeSD\n"
"fNMAP7DRAoGBAP0JRFm3fCAavBREKVOyZm20DpeR6zMrVP7ht0SykkT/bw/kiRG+\n"
"FH1tNioj9uyixt5SiKhH3ZVAunjsKvrwET8i3uz1M2Gk+ovWdLXurBogYNNWafjQ\n"
"hRhFHpyExoZYRsn58bvYvjFXTO6JxuNS2b59DGBRkQ5mpsOhxarfbZnXAoGBAOcb\n"
"K+qoPDeDicnQZ8+ygYYHxY3fy1nvm1F19jBiWd26bAUOHeZNPPKGvTSlrGWJgEyA\n"
"FjZIlHJOY2s0dhukiytOiXzdA5iqK1NvlF+QTUI4tCeNMVejWC+n6sKR9ADZkX8D\n"
"NOHaLkDzc/ukus59aKyjxP53I6SV6y6m5NeyvDx7AoGAaUji1MXA8wbMvU4DOB0h\n"
"+4GRFMYVbEwaaJd4jzASJn12M9GuquBBXFMF15DxXFL6lmUXEZYdf83YCRqTY6hi\n"
"NLgIs+XuxDFGQssv8sdletWAFE9/dpUk3A1eiFfC1wGCKuZCDBxKPvOJQjO3uryt\n"
"d1JGxQkLZ0eVGg+E1O10iC8CgYB4w2QRfNPqllu8D6EPkVHJfeonltgmKOTajm+V\n"
"HO+kw7OKeLP7EkVU3j+kcSZC8LUQRKZWu1qG2Jtu+7zz+OmYObPygXNNpS56rQW1\n"
"Yixc/FB3knpEN2DvlilAfxAoGYjD/CL4GhCtdAoZZx0Opc262OEpr4v6hzSb7i4K\n"
"4KUoXQKBgHfbiaSilxx9guUqvSaexpHmtiUwx05a05fD6tu8Cofl6AM9wGpw3xOT\n"
"tfo4ehvS13tTz2RDE2xKuetMmkya7UgifcxUmBzqkOlgr0oOi2rp+eDKXnzUUqsH\n"
"V7E96Dj36K8q2+gZIXcNqjN7PzfkF8pA0G+E1veTi8j5dnvIsy1x\n"
"-----END RSA PRIVATE KEY-----\n"
)
rsa_pub = (
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDkbjgPqUSAyPpxHAI/J2wyXVVE"
"3YH0NXJXmWbCywZ4kKXLBkxZwqVJkFpY9vBu3bT+vvh4wZk+Wq0SL1Gj4QePj+fn"
"7cDYyHXWHQ8ZK3nc9I99bChmESD2FDXDVnF8EwLHt4bZh0YVChwyBHTNBqwkaOO3"
"yGu4KdZEHek0f0OETAqGARMfT8IXPFKHR5laz87gZJ/i6nCZEb/Vi73S+dVmrcBq"
"fC+9blQGTqDi5ZvoVMRLzruQDLhvkRwfpZHeUsTT2ia4b82q0f6qSAh1ByicOSaS"
"4rfF/mCjoZLASh3R6szUbW974LjD8wP3wpnhmWif2HLugTatyodd/nyBm05N Gen"
"erated-by-Nova"
)
rsa_fp = "e7:66:a1:2c:4f:90:6e:11:19:da:ac:c2:69:e1:ad:89"
dss_pub = (
"ssh-dss AAAAB3NzaC1kc3MAAACBAKWFW2++pDxJWObkADbSXw8KfZ4VupkRKEXF"
"SPN2kV0v+FgdnBEcrEJPExaOTMhmxIuc82ktTv76wHSEpbbsLuI7IDbB6KJJwHs2"
"y356yB28Q9rin7X0VMYKkPxvAcbIUSrEbQtyPMihlOaaQ2dGSsEQGQSpjm3f3RU6"
"OWux0w/NAAAAFQCgzWF2zxQmi/Obd11z9Im6gY02gwAAAIAHCDLjipVwMLXIqNKO"
"MktiPex+ewRQxBi80dzZ3mJzARqzLPYI9hJFUU0LiMtLuypV/djpUWN0cQpmgTQf"
"TfuZx9ipC6Mtiz66NQqjkQuoihzdk+9KlOTo03UsX5uBGwuZ09Dnf1VTF8ZsW5Hg"
"HyOk6qD71QBajkcFJAKOT3rFfgAAAIAy8trIzqEps9/n37Nli1TvNPLbFQAXl1LN"
"wUFmFDwBCGTLl8puVZv7VSu1FG8ko+mzqNebqcN4RMC26NxJqe+RRubn5KtmLoIa"
"7tRe74hvQ1HTLLuGxugwa4CewNbwzzEDEs8U79WDhGKzDkJR4nLPVimj5WLAWV70"
"RNnRX7zj5w== Generated-by-Nova"
)
dss_fp = "b9:dc:ac:57:df:2a:2b:cf:65:a8:c3:4e:9d:4a:82:3c"
ecdsa_pub = (
"ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAy"
"NTYAAABBBG1r4wzPTIjSo78POCq+u/czb8gYK0KvqlmCvcRPrnDWxgLw7y6BX51t"
"uYREz7iLRCP7BwUt8R+ZWzFZDeOLIWU= Generated-by-Nova"
)
ecdsa_fp = "16:6a:c9:ec:80:4d:17:3e:d5:3b:6f:c0:d7:15:04:40"
def test_generate_fingerprint(self):
fingerprint = crypto.generate_fingerprint(self.rsa_pub)
self.assertEqual(self.rsa_fp, fingerprint)
fingerprint = crypto.generate_fingerprint(self.dss_pub)
self.assertEqual(self.dss_fp, fingerprint)
fingerprint = crypto.generate_fingerprint(self.ecdsa_pub)
self.assertEqual(self.ecdsa_fp, fingerprint)
def test_generate_key_pair_2048_bits(self):
(private_key, public_key, fingerprint) = crypto.generate_key_pair()
raw_pub = public_key.split(' ')[1].decode('base64')
pkey = paramiko.rsakey.RSAKey(None, raw_pub)
self.assertEqual(2048, pkey.get_bits())
def test_generate_key_pair_1024_bits(self):
bits = 1024
(private_key, public_key, fingerprint) = crypto.generate_key_pair(bits)
raw_pub = public_key.split(' ')[1].decode('base64')
pkey = paramiko.rsakey.RSAKey(None, raw_pub)
self.assertEqual(bits, pkey.get_bits())
def test_generate_key_pair_mocked_private_key(self):
keyin = StringIO.StringIO()
keyin.write(self.rsa_prv)
keyin.seek(0)
key = paramiko.RSAKey.from_private_key(keyin)
with mock.patch.object(paramiko.RSAKey, 'generate') as mock_generate:
mock_generate.return_value = key
(private_key, public_key, fingerprint) = crypto.generate_key_pair()
self.assertEqual(self.rsa_pub, public_key)
self.assertEqual(self.rsa_fp, fingerprint)
| apache-2.0 |
mikewiebe-ansible/ansible | lib/ansible/modules/network/cnos/cnos_template.py | 52 | 5475 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
#
# Copyright (C) 2017 Lenovo, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Module to send CLI templates to Lenovo Switches
# Lenovo Networking
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cnos_template
author: "Anil Kumar Muraleedharan (@amuraleedhar)"
short_description: Manage switch configuration using templates on devices running Lenovo CNOS
description:
- This module allows you to work with the running configuration of a switch. It provides a way
to execute a set of CNOS commands on a switch by evaluating the current running configuration
and executing the commands only if the specific settings have not been already configured.
The configuration source can be a set of commands or a template written in the Jinja2 templating language.
This module uses SSH to manage network device configuration.
The results of the operation will be placed in a directory named 'results'
that must be created by the user in their local directory to where the playbook is run.
version_added: "2.3"
extends_documentation_fragment: cnos
options:
commandfile:
description:
- This specifies the path to the CNOS command file which needs to be applied. This usually
comes from the commands folder. Generally this file is the output of the variables applied
on a template file. So this command is preceded by a template module.
Note The command file must contain the Ansible keyword {{ inventory_hostname }} in its
filename to ensure that the command file is unique for each switch and condition.
If this is omitted, the command file will be overwritten during iteration. For example,
commandfile=./commands/clos_leaf_bgp_{{ inventory_hostname }}_commands.txt
required: true
default: Null
'''
EXAMPLES = '''
Tasks : The following are examples of using the module cnos_template. These are written in the main.yml file of the tasks directory.
---
- name: Replace Config CLI command template with values
template:
src: demo_template.j2
dest: "./commands/demo_template_{{ inventory_hostname }}_commands.txt"
vlanid1: 13
slot_chassis_number1: "1/2"
portchannel_interface_number1: 100
portchannel_mode1: "active"
- name: Applying CLI commands on Switches
cnos_template:
deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}"
commandfile: "./commands/demo_template_{{ inventory_hostname }}_commands.txt"
outputfile: "./results/demo_template_command_{{ inventory_hostname }}_output.txt"
'''
RETURN = '''
msg:
description: Success or failure message
returned: always
type: str
sample: "Template Applied."
'''
import sys
import time
import socket
import array
import json
import time
import re
import os
try:
from ansible.module_utils.network.cnos import cnos
HAS_LIB = True
except Exception:
HAS_LIB = False
from ansible.module_utils.basic import AnsibleModule
from collections import defaultdict
def main():
module = AnsibleModule(
argument_spec=dict(
commandfile=dict(required=True),
outputfile=dict(required=True),
host=dict(required=False),
deviceType=dict(required=True),
username=dict(required=False),
password=dict(required=False, no_log=True),
enablePassword=dict(required=False, no_log=True),),
supports_check_mode=False)
commandfile = module.params['commandfile']
outputfile = module.params['outputfile']
output = ''
# Send commands one by one to the device
f = open(commandfile, "r")
cmd = []
for line in f:
# Omit the comment lines in template file
if not line.startswith("#"):
command = line.strip()
inner_cmd = [{'command': command, 'prompt': None, 'answer': None}]
cmd.extend(inner_cmd)
# Write to memory
save_cmd = [{'command': 'save', 'prompt': None, 'answer': None}]
cmd.extend(save_cmd)
output = output + str(cnos.run_cnos_commands(module, cmd))
# Write output to file
path = outputfile.rsplit('/', 1)
# cnos.debugOutput(path[0])
if not os.path.exists(path[0]):
os.makedirs(path[0])
file = open(outputfile, "a")
file.write(output)
file.close()
# Logic to check when changes occur or not
errorMsg = cnos.checkOutputForError(output)
if(errorMsg is None):
module.exit_json(changed=True, msg="Template Applied")
else:
module.fail_json(msg=errorMsg)
if __name__ == '__main__':
main()
| gpl-3.0 |
tolo137/IHSS | programs/scoreSheet.py | 1 | 32460 | from Tkinter import *
import ttk
import datetime
import teamClass
import playerClass
class Application(Frame):
def __init__(self, master=None):
Frame.__init__(self, master)
self.teamList = []
self.pack()
self.maxTeamPlayers = 2
self.loadData()
self.gameNumber = int([line for line in open("gameNumber.txt", "rU")][0])
self.teamNames = []
for team in self.teamList:
self.teamNames.append(team.teamName)
self.team1 = self.teamList[0]
self.team2 = self.teamList[1]
self.team1score = 0
self.team2score = 0
self.actions = []
nullPlayer = playerClass.Player(0, "0", "0", 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
for player in self.team1.players:
if player.isGoalie:
self.team1currentGoalie = player
break
else:
self.team1currentGoalie = nullPlayer
for player in self.team2.players:
if player.isGoalie:
self.team2currentGoalie = player
break
else:
self.team2currentGoalie = nullPlayer
self.createWidgets()
def addTeam(self, team):
self.teamList.append(team)
def loadData(self):
dataFile = open("teamData.txt", "rU")
dataArray = [line for line in dataFile]
dataFile.close()
teams = []
currentTeam = -1
players = 0
for item in dataArray:
if str(item)[0] != '-':
currentTeam += 1
newTeam = teamClass.Team(str(item).strip())
teams.append(newTeam)
if players > self.maxTeamPlayers:
self.maxTeamPlayers = players
players = 0
else:
players += 1
# (self, playerID, firstName, lastName, number, isGoalie=False)
playerData = [part for part in item.strip().split(';')]
# Remove minus sign
playerData[0] = playerData[0][1:]
# Set isGoalie flag
if int(playerData[9]) == 0:
playerData[9] = False
else:
playerData[9] = True
newPlayer = playerClass.Player(playerData[0], playerData[1], playerData[2], int(playerData[3]), int(playerData[4]), int(playerData[5]), int(playerData[6]), int(playerData[7]), int(playerData[8]), int(playerData[9]), int(playerData[10]), int(playerData[11]), int(playerData[12]), int(playerData[13]), int(playerData[14]))
# Assign to team
teams[currentTeam].addPlayer(newPlayer)
for team in teams:
self.addTeam(team)
def saveData(self):
## Save individual game stats
gameFileName = "savedGames/game" + str(self.gameNumber) + ".txt"
gameFile = open(gameFileName, "w")
gameFile.write(str(self.gameNumber))
gameFile.write("\n")
gameFile.write(self.dateVar.get())
gameFile.write("\n")
gameFile.write(self.timeVar.get())
gameFile.write("\n")
gameFile.write(self.ref1Var.get())
gameFile.write("\n")
gameFile.write(self.ref2Var.get())
gameFile.write("\n")
gameFile.write(self.scorerVar.get())
gameFile.write("\n")
gameFile.write(self.timekeeperVar.get())
gameFile.write("\n")
gameFile.write("\n")
gameFile.write(str(self.team1score)+"v"+str(self.team2score))
gameFile.write("\n")
gameFile.write("\n")
## Team 1 information
gameFile.write(self.team1.teamName)
for player in self.team1.players:
gameFile.write("\n")
gameFile.write(player.saveData(True))
gameFile.write("\n")
## Team 2 Information
gameFile.write(self.team2.teamName)
for player in self.team2.players:
gameFile.write("\n")
gameFile.write(player.saveData(True))
gameFile.write("\n\n")
## Action summary
for e, item in enumerate(self.actions):
if item == "-":
del self.actions[e]
del self.actions[e-1]
for item in self.actions:
gameFile.write(item + "\n")
gameFile.close()
## Update game number for next game
self.gameNumber += 1
gameNFile = open("gameNumber.txt", "w")
gameNFile.write(str(self.gameNumber))
gameNFile.close()
## Save overall player stats
teamData = []
for team in self.teamList:
teamData.append(team.teamName)
for player in team.players:
teamData.append(player.saveData(False))
self.quit()
dataFile = open("teamData.txt", "w")
for item in teamData:
dataFile.write(item + "\n")
dataFile.close()
self.saveHTMLTable("htmlTables/table")
self.saveHTMLTable("/Users/eganm/Dropbox/testing/table")
def saveHTMLTable(self, loc):
htmlFileName = loc + str(self.gameNumber) + ".html"
htmlFile = open(htmlFileName, "w")
## Write html constants for table including headers
htmlFile.write("""<table border="1">""")
htmlFile.write("""<tr>
<th>Name</th>
<th>Team</th>
<th>#</th>
<th>G</th>
<th>A</th>
<th>P</th>
</tr>""")
## Sort players by points
allPlayers = []
for team in self.teamList:
for player in team.players:
allPlayers.append((player.points, player, team.teamName))
allPlayers.sort()
allPlayers.reverse()
## Write each players stats
for tup in allPlayers:
player = tup[1]
teamName = tup[2]
stringToWrite = "<tr><td>" + player.getFullName() + "</td><td>" + teamName + "</td><td>" + str(player.number) + "</td><td>" + str(player.goals) + "</td><td>" + str(player.assists) + "</td><td>" + str(player.points) + "</td></tr>"
htmlFile.write(stringToWrite)
htmlFile.close()
def saveAllTimeData(self):
## Save overall player stats
teamData = []
for team in self.teamList:
teamData.append(team.teamName)
for player in team.players:
teamData.append(player.saveData(False))
dataFile = open("teamData.txt", "w")
for item in teamData:
dataFile.write(item + "\n")
dataFile.close()
def createWidgets(self):
WIDTH = 10
## Date, time and officals area
admin = Frame(self)
admin.grid(row=0, column=0)
game = Label(admin)
game["text"] = "Game: " + str(self.gameNumber)
game.grid(row=0, column=0, sticky=N+S+E+W)
timeDate = Frame(admin)
timeDate.grid(row=1, column=0)
self.dateLbl = Label(timeDate, text="Date: ").grid(row=0, column=0)
self.dateVar = StringVar()
self.dateField = Entry(timeDate, textvariable=self.dateVar).grid(row=0, column=1)
currentDate = str(datetime.date.today().day) + "/" + str(datetime.date.today().month) + "/" + str(datetime.date.today().year)
self.dateVar.set(currentDate)
self.timeLbl = Label(timeDate, text="Time: ").grid(row=1, column=0)
self.timeVar = StringVar()
self.timeEntry = Entry(timeDate, textvariable=self.timeVar).grid(row=1, column=1)
self.gameStart = datetime.datetime.now().time()
self.timeVar.set(str(self.gameStart).split()[0][:-10])
self.resultLbl = Label(timeDate)
self.resultLbl["text"] = "Result: "+str(self.team1score)+"v"+str(self.team2score)
self.resultLbl.grid(row=0, column=2)
self.shootoutLbl = Label(timeDate, text="Shootout: v").grid(row=1, column=2)
self.finalLbl = Label(timeDate, text="Final: v").grid(row=2, column=2)
self.ref1Lbl = Label(timeDate, text="Ref1").grid(row=0, column=4)
self.ref1Var = StringVar()
self.ref1 = Entry(timeDate, textvariable=self.ref1Var).grid(row=0, column=5)
self.ref1Var.set("Referee 1")
self.ref2Lbl = Label(timeDate, text="Ref2").grid(row=1, column=4)
self.ref2Var = StringVar()
self.ref2 = Entry(timeDate, textvariable=self.ref2Var).grid(row=1, column=5)
self.ref2Var.set("Referee 2")
self.scorerLbl = Label(timeDate, text="Scorer").grid(row=0, column=6)
self.scorerVar = StringVar()
self.scorerFld = Entry(timeDate, textvariable=self.scorerVar).grid(row=0, column=7)
self.scorerVar.set("Scorer")
self.timekeeperLbl = Label(timeDate, text="Timekeeper").grid(row=1, column=6)
self.timekeeperVar = StringVar()
self.timekeeperFld = Entry(timeDate, textvariable=self.timekeeperVar).grid(row=1, column=7)
self.timekeeperVar.set("Timekeeper")
## Scoring Area
scorer = Frame(self)
scorer.grid(row=2, column=0)
## Team 1 Stuff ##
self.team1Lab = Label(scorer)
self.team1Lab["text"] = "Team1"
self.team1Lab.grid(row=1, column=0)
## Create combobox
self.combo = ttk.Combobox(scorer)
self.combo.bind("<<ComboboxSelected>>", self._updatecb1)
self.combo["values"] = (self.teamNames)
self.combo.current(0)
self.combo.grid(row=1, column=1)
## Goal Add
self.team1goaladd = Button(scorer, width=WIDTH)
self.team1goaladd["text"] = "+G"
self.team1goaladd["command"] = self.team1goalUp
self.team1goaladd.grid(row=2, column=2, sticky=S)
## Goal Minus
self.team1goalmin = Button(scorer, width=WIDTH)
self.team1goalmin["text"] = "-G"
self.team1goalmin["command"] = self.team1goalDown
self.team1goalmin.grid(row=2, column=3, sticky=S)
## Assist Add
self.team1assadd = Button(scorer, width=WIDTH)
self.team1assadd["text"] = "+A"
self.team1assadd["command"] = self.team1assistUp
self.team1assadd.grid(row=3, column=2, sticky=N)
## Assist Minus
self.team1assmin = Button(scorer, width=WIDTH)
self.team1assmin["text"] = "-A"
self.team1assmin["command"] = self.team1assistDown
self.team1assmin.grid(row=3, column=3, sticky=N)
## Saves Add
self.team1saveadd = Button(scorer, width=WIDTH)
self.team1saveadd["text"] = "+S"
self.team1saveadd["command"] = self.team1ShotsOnUp
self.team1saveadd.grid(row=4, column=2, sticky=S)
## Saves Minus
self.team1savemin = Button(scorer, width=WIDTH)
self.team1savemin["text"] = "-S"
self.team1savemin["command"] = self.team1ShotsOnDown
self.team1savemin.grid(row=4, column=3, sticky=S)
## Miss Add
#self.team1missadd = Button(scorer, width=WIDTH)
#self.team1missadd["text"] = "+M"
#self.team1missadd["command"] = self.team1missesUp
#self.team1missadd.grid(row=5, column=2, sticky=N)
## Miss Minus
#self.team1missmin = Button(scorer, width=WIDTH)
#self.team1missmin["text"] = "-M"
#self.team1missmin["command"] = self.team1missesDown
#self.team1missmin.grid(row=5, column=3, sticky=N)
## Add Minor
self.team1addMinor = Button(scorer, width=WIDTH, text="+Minor")
self.team1addMinor["command"] = self.team1MinorUp
self.team1addMinor.grid(row=6, column=2)
## Sub Minor
self.team1subMinor = Button(scorer, width=WIDTH, text="-Minor")
self.team1subMinor["command"] = self.team1MinorDown
self.team1subMinor.grid(row=6, column=3)
## Add Major
self.team1addMajor = Button(scorer, width=WIDTH, text="+Major")
self.team1addMajor["command"] = self.team1MajorUp
self.team1addMajor.grid(row=7, column=2)
## Sub Major
self.team1subMajor = Button(scorer, width=WIDTH, text="-Major")
self.team1subMajor["command"] = self.team1MajorDown
self.team1subMajor.grid(row=7, column=3)
## Add Misconduct
self.team1addMiscon = Button(scorer, width=WIDTH, text="+Miscon")
self.team1addMiscon["command"] = self.team1MisconUp
self.team1addMiscon.grid(row=8, column=2)
## Sub Misconduct
self.team1subMiscon = Button(scorer, width=WIDTH, text="-Miscon")
self.team1subMiscon["command"] = self.team1MisconDown
self.team1subMiscon.grid(row=8, column=3)
## Add Match
self.team1addMatch = Button(scorer, width=WIDTH, text="+Match")
self.team1addMatch["command"] = self.team1MatchUp
self.team1addMatch.grid(row=9, column=2)
## Sub Match
self.team1subMatch = Button(scorer, width=WIDTH, text="-Match")
self.team1subMatch["command"] = self.team1MajorDown
self.team1subMatch.grid(row=9, column=3)
## Add GameMisconduct
self.team1addGameMis = Button(scorer, width=WIDTH, text="+GameMis")
self.team1addGameMis["command"] = self.team1GameMisUp
self.team1addGameMis.grid(row=10, column=2)
## Sub GameMisconduct
self.team1subGameMis = Button(scorer, width=WIDTH, text="-GameMis")
self.team1subGameMis["command"] = self.team1GameMisDown
self.team1subGameMis.grid(row=10, column=3)
## Set goalie button
self.team1setGoalie = Button(scorer, width=WIDTH, text="Set Goalie")
self.team1setGoalie["command"] = self.team1setActGoalie
self.team1setGoalie.grid(row=12, column=2)
## Create player list
self.team1list = Listbox(scorer, height=self.maxTeamPlayers)
for i in (player.getFullName() for player in self.team1.players):
self.team1list.insert(END, i)
self.team1list.grid(row=2, column=1, rowspan=9, sticky=W+E+N+S)
## Create goalie list
self.team1goalieLbl = Label(scorer)
self.team1goalieLbl["text"] = str("Goalie: " + self.team1currentGoalie.getFullName())
self.team1goalieLbl.grid(row=11, column=1)
self.team1goalielist = Listbox(scorer)
for i in self.team1.players:
if i.isGoalie:
self.team1goalielist.insert(END, i.getFullName())
self.team1goalielist.grid(row=12, column=1)
## Team 2 Stuff ##
self.team2Lab = Label(scorer)
self.team2Lab["text"] = "Team2"
self.team2Lab.grid(row=1, column=3, sticky=E)
## Create combobox
self.combo2 = ttk.Combobox(scorer)
self.combo2.bind("<<ComboboxSelected>>", self._updatecb2)
self.combo2["values"] = (self.teamNames)
self.combo2.current(1)
self.combo2.grid(row=1, column=4)
## Goal Add
self.team2goaladd = Button(scorer, width=WIDTH)
self.team2goaladd["text"] = "+G"
self.team2goaladd["command"] = self.team2goalUp
self.team2goaladd.grid(row=2, column=5, sticky=S)
## Goal Minus
self.team2goalmin = Button(scorer, width=WIDTH)
self.team2goalmin["text"] = "-G"
self.team2goalmin["command"] = self.team2goalDown
self.team2goalmin.grid(row=2, column=6, sticky=S)
## Assist Add
self.team2assadd = Button(scorer, width=WIDTH)
self.team2assadd["text"] = "+A"
self.team2assadd["command"] = self.team2assistUp
self.team2assadd.grid(row=3, column=5, sticky=N)
## Assist Minus
self.team2assmin = Button(scorer, width=WIDTH)
self.team2assmin["text"] = "-A"
self.team2assmin["command"] = self.team2assistDown
self.team2assmin.grid(row=3, column=6, sticky=N)
## Saves Add
self.team2saveadd = Button(scorer, width=WIDTH)
self.team2saveadd["text"] = "+S"
self.team2saveadd["command"] = self.team2ShotsOnUp
self.team2saveadd.grid(row=4, column=5, sticky=S)
## Saves Minus
self.team2savemin = Button(scorer, width=WIDTH)
self.team2savemin["text"] = "-S"
self.team2savemin["command"] = self.team2ShotsOnDown
self.team2savemin.grid(row=4, column=6, sticky=S)
## Miss Add
#self.team2missadd = Button(scorer, width=WIDTH)
#self.team2missadd["text"] = "+M"
#self.team2missadd["command"] = self.team2missesUp
#self.team2missadd.grid(row=5, column=5, sticky=N)
## Miss Minus
#self.team2missmin = Button(scorer, width=WIDTH)
#self.team2missmin["text"] = "-M"
#self.team2missmin["command"] = self.team2missesDown
#self.team2missmin.grid(row=5, column=6, sticky=N)
## Add Minor
self.team2addMinor = Button(scorer, width=WIDTH, text="+Minor")
self.team2addMinor["command"] = self.team2MinorUp
self.team2addMinor.grid(row=6, column=5)
## Sub Minor
self.team2subMinor = Button(scorer, width=WIDTH, text="-Minor")
self.team2subMinor["command"] = self.team2MinorDown
self.team2subMinor.grid(row=6, column=6)
## Add Major
self.team2addMajor = Button(scorer, width=WIDTH, text="+Major")
self.team2addMajor["command"] = self.team2MajorUp
self.team2addMajor.grid(row=7, column=5)
## Sub Major
self.team2subMajor = Button(scorer, width=WIDTH, text="-Major")
self.team2subMajor["command"] = self.team2MajorDown
self.team2subMajor.grid(row=7, column=6)
## Add Misconduct
self.team2addMiscon = Button(scorer, width=WIDTH, text="+Miscon")
self.team2addMiscon["command"] = self.team2MisconUp
self.team2addMiscon.grid(row=8, column=5)
## Sub Misconduct
self.team2subMiscon = Button(scorer, width=WIDTH, text="-Miscon")
self.team2subMiscon["command"] = self.team2MisconDown
self.team2subMiscon.grid(row=8, column=6)
## Add Match
self.team2addMatch = Button(scorer, width=WIDTH, text="+Match")
self.team2addMatch["command"] = self.team2MatchUp
self.team2addMatch.grid(row=9, column=5)
## Sub Match
self.team2subMatch = Button(scorer, width=WIDTH, text="-Match")
self.team2subMatch["command"] = self.team2MatchDown
self.team2subMatch.grid(row=9, column=6)
## Add GameMisconduct
self.team2addGameMis = Button(scorer, width=WIDTH, text="+GameMis")
self.team2addGameMis["command"] = self.team2GameMisUp
self.team2addGameMis.grid(row=10, column=5)
## Sub GameMisconduct
self.team2subGameMis = Button(scorer, width=WIDTH, text="-GameMis")
self.team2subGameMis["command"] = self.team2GameMisDown
self.team2subGameMis.grid(row=10, column=6)
## Set goalie button
self.team2setGoalie = Button(scorer, width=WIDTH, text="Set Goalie")
self.team2setGoalie["command"] = self.team2setActGoalie
self.team2setGoalie.grid(row=12, column=5)
## Create player list
self.team2list = Listbox(scorer, height=self.maxTeamPlayers)
for i in (player.getFullName() for player in self.team2.players):
self.team2list.insert(END, i)
self.team2list.grid(row=2, column=4, rowspan=9, sticky=W+E+N+S)
## Create goalie list
self.team2goalieLbl = Label(scorer)
self.team2goalieLbl["text"] = str("Goalie: " + self.team2currentGoalie.getFullName())
self.team2goalieLbl.grid(row=11, column=4)
self.team2goalielist = Listbox(scorer)
for i in self.team2.players:
if i.isGoalie:
self.team2goalielist.insert(END, i.getFullName())
self.team2goalielist.grid(row=12, column=4)
## End Game Button
self.endGame = Button(self)
self.endGame["text"] = "End Game"
self.endGame["command"] = self.endGameProc
self.endGame.grid(row=3, column=0, sticky=N+S+E+W)
def team1setActGoalie(self):
playerIndex = self.team1goalielist.curselection()[0]
self.team1currentGoalie = self.team1.players[int(playerIndex)]
self.actions.append("-" + self.team1currentGoalie.getFullName() + " is the active goalie")
self.team1goalieLbl["text"]=str("Goalie: " + self.team1currentGoalie.getFullName())
def team2setActGoalie(self):
playerIndex = self.team2goalielist.curselection()[0]
self.team2currentGoalie = self.team2.players[int(playerIndex)]
self.actions.append("-" + self.team2currentGoalie.getFullName() + " is the active goalie")
self.team2goalieLbl["text"]=str("Goalie: " + self.team2currentGoalie.getFullName())
def _updatecb1(self, evt):
changedTo = evt.widget.get()
for team in self.teamList:
if team.teamName == changedTo:
self.team1 = team
self.team1list.delete(0, END)
for i in [player.getFullName() for player in self.team1.players]:
self.team1list.insert(END, i)
self.team1goalielist.delete(0, END)
for i in self.team1.players:
if i.isGoalie:
self.team1goalielist.insert(END, i.getFullName())
def _updatecb2(self, evt):
changedTo = evt.widget.get()
for team in self.teamList:
if team.teamName == changedTo:
self.team2 = team
self.team2list.delete(0, END)
for i in [player.getFullName() for player in self.team2.players]:
self.team2list.insert(END, i)
self.team2goalielist.delete(0, END)
for i in self.team2.players:
if i.isGoalie:
self.team2goalielist.insert(END, i.getFullName())
def team1goalUp(self):
self.team1score += 1
self.resultLbl["text"] = str("Result: "+str(self.team1score)+"v"+str(self.team2score))
playerIndex = self.team1list.curselection()[0]
self.team1.players[int(playerIndex)].addGoal()
self.team2currentGoalie.addShotsOn()
self.team2currentGoalie.addMiss()
self.actions.append("-" + self.team1.players[int(playerIndex)].getFullName() + " scored:" + str(self.team1score) + "v" + str(self.team2score))
self.saveAllTimeData()
def team1goalDown(self):
self.team1score -= 1
self.resultLbl["text"] = str("Result: "+str(self.team1score)+"v"+str(self.team2score))
playerIndex = self.team1list.curselection()[0]
self.team1.players[int(playerIndex)].subGoal()
self.team2currentGoalie.subShotsOn()
self.team2currentGoalie.subMiss()
self.actions.append("-")
self.saveAllTimeData()
def team2goalUp(self):
self.team2score += 1
self.resultLbl["text"] = str("Result: "+str(self.team1score)+"v"+str(self.team2score))
playerIndex = self.team2list.curselection()[0]
self.team2.players[int(playerIndex)].addGoal()
self.team1currentGoalie.addShotsOn()
self.team1currentGoalie.addMiss()
self.actions.append("-" + self.team1.players[int(playerIndex)].getFullName() + " scored:" + str(self.team1score) + "v" + str(self.team2score))
self.saveAllTimeData()
def team2goalDown(self):
self.team2score -= 1
self.resultLbl["text"] = str("Result: "+str(self.team1score)+"v"+str(self.team2score))
playerIndex = self.team2list.curselection()[0]
self.team1currentGoalie.subShotsOn()
self.team1currentGoalie.subMiss()
self.team2.players[int(playerIndex)].subGoal()
self.actions.append("-")
self.saveAllTimeData()
def team1assistUp(self):
playerIndex = self.team1list.curselection()[0]
self.team1.players[int(playerIndex)].addAssist()
self.actions.append("-" + self.team1.players[int(playerIndex)].getFullName() + " gained an assist")
self.saveAllTimeData()
def team1assistDown(self):
playerIndex = self.team1list.curselection()[0]
self.team1.players[int(playerIndex)].subAssist()
self.actions.append("-")
self.saveAllTimeData()
def team2assistUp(self):
playerIndex = self.team2list.curselection()[0]
self.team2.players[int(playerIndex)].addAssist()
self.actions.append("-" + self.team2.players[int(playerIndex)].getFullName() + " gained an assist")
self.saveAllTimeData()
def team2assistDown(self):
playerIndex = self.team2list.curselection()[0]
self.team2.players[int(playerIndex)].subAssist()
self.actions.append("-")
self.saveAllTimeData()
def team1ShotsOnUp(self):
self.team1currentGoalie.addShotsOn()
self.actions.append("-" + self.team1currentGoalie.getFullName() + " made a save")
self.saveAllTimeData()
def team1ShotsOnDown(self):
self.team1currentGoalie.subShotsOn()
self.actions.append("-")
self.saveAllTimeData()
def team2ShotsOnUp(self):
self.team2currentGoalie.addShotsOn()
self.actions.append("-" + self.team2currentGoalie.getFullName() + " made a save")
self.saveAllTimeData()
def team2ShotsOnDown(self):
self.team2currentGoalie.subShotsOn()
self.actions.append("-")
self.saveAllTimeData()
#def team1missesUp(self):
# playerIndex = self.team1list.curselection()[0]
# self.team1.players[int(playerIndex)].addMiss()
# self.actions.append("-" + self.team1.players[int(playerIndex)].getFullName() + " was scored on")
#def team1missesDown(self):
# playerIndex = self.team1list.curselection()[0]
# self.team1.players[int(playerIndex)].subMiss()
# self.actions.append("-")
#def team2missesUp(self):
# playerIndex = self.team2list.curselection()[0]
# self.team2.players[int(playerIndex)].addMiss()
# self.actions.append("-" + self.team2.players[int(playerIndex)].getFullName() + " was scored on")
#def team2missesDown(self):
# playerIndex = self.team2list.curselection()[0]
# self.team2.players[int(playerIndex)].subMiss()
# self.actions.append("-")
def team1MinorUp(self):
playerIndex = self.team1list.curselection()[0]
self.team1.players[int(playerIndex)].addMinor()
self.actions.append("-" + self.team1.players[int(playerIndex)].getFullName() + " gained a minor")
self.saveAllTimeData()
def team1MinorDown(self):
playerIndex = self.team1list.curselection()[0]
self.team1.players[int(playerIndex)].subMinor()
self.actions.append("-")
self.saveAllTimeData()
def team1MajorUp(self):
playerIndex = self.team1list.curselection()[0]
self.team1.players[int(playerIndex)].addMajor()
self.actions.append("-" + self.team1.players[int(playerIndex)].getFullName() + " gained a major")
self.saveAllTimeData()
def team1MajorDown(self):
playerIndex = self.team1list.curselection()[0]
self.team1.players[int(playerIndex)].subMajor()
self.actions.append("-")
self.saveAllTimeData()
def team1MisconUp(self):
playerIndex = self.team1list.curselection()[0]
self.team1.players[int(playerIndex)].addMisconduct()
self.actions.append("-" + self.team1.players[int(playerIndex)].getFullName() + " gained a misconduct penalty")
self.saveAllTimeData()
def team1MisconDown(self):
playerIndex = self.team1list.curselection()[0]
self.team1.players[int(playerIndex)].subMisconduct()
self.actions.append("-")
self.saveAllTimeData()
def team1MatchUp(self):
playerIndex = self.team1list.curselection()[0]
ctime = datetime.datetime.now().time()
diff = datetime.datetime.combine(datetime.date.today(), ctime) - datetime.datetime.combine(datetime.date.today(), self.gameStart)
minutes = int(str(diff).split(":")[1])
remainder = 48 - minutes
self.team1.players[int(playerIndex)].addMatch(remainder)
self.actions.append("-" + self.team1.players[int(playerIndex)].getFullName() + " gained a match penalty")
self.saveAllTimeData()
def team1MatchDown(self):
playerIndex = self.team1list.curselection()[0]
self.team1.players[int(playerIndex)].subMatch()
self.actions.append("-")
self.saveAllTimeData()
def team1GameMisUp(self):
playerIndex = self.team1list.curselection()[0]
ctime = datetime.datetime.now().time()
diff = datetime.datetime.combine(datetime.date.today(), ctime) - datetime.datetime.combine(datetime.date.today(), self.gameStart)
minutes = int(str(diff).split(":")[1])
remainder = 48 - minutes
self.team1.players[int(playerIndex)].addGameMis(remainder)
self.actions.append("-" + self.team1.players[int(playerIndex)].getFullName() + " gained a game misconduct penalty")
self.saveAllTimeData()
def team1GameMisDown(self):
playerIndex = self.team1list.curselection()[0]
self.team1.players[int(playerIndex)].subGameMis()
self.actions.append("-")
self.saveAllTimeData()
def team2MinorUp(self):
playerIndex = self.team1list.curselection()[0]
self.team2.players[int(playerIndex)].addMinor()
self.actions.append("-" + self.team2.players[int(playerIndex)].getFullName() + " gained a minor")
self.saveAllTimeData()
def team2MinorDown(self):
playerIndex = self.team1list.curselection()[0]
self.team2.players[int(playerIndex)].subMinor()
self.actions.append("-")
self.saveAllTimeData()
def team2MajorUp(self):
playerIndex = self.team1list.curselection()[0]
self.team2.players[int(playerIndex)].addMajor()
self.actions.append("-" + self.team2.players[int(playerIndex)].getFullName() + " gained a major")
self.saveAllTimeData()
def team2MajorDown(self):
playerIndex = self.team1list.curselection()[0]
self.team2.players[int(playerIndex)].subMajor()
self.actions.append("-")
self.saveAllTimeData()
def team2MisconUp(self):
playerIndex = self.team1list.curselection()[0]
self.team2.players[int(playerIndex)].addMisconduct()
self.actions.append("-" + self.team2.players[int(playerIndex)].getFullName() + " gained a misconduct penalty")
self.saveAllTimeData()
def team2MisconDown(self):
playerIndex = self.team1list.curselection()[0]
self.team2.players[int(playerIndex)].subMisconduct()
self.actions.append("-")
self.saveAllTimeData()
def team2MatchUp(self):
playerIndex = self.team1list.curselection()[0]
ctime = datetime.datetime.now().time()
diff = datetime.datetime.combine(datetime.date.today(), ctime) - datetime.datetime.combine(datetime.date.today(), self.gameStart)
minutes = int(str(diff).split(":")[2].split(".")[0])
remainder = 48 - minutes
self.team2.players[int(playerIndex)].addMatch(remainder)
self.actions.append("-" + self.team2.players[int(playerIndex)].getFullName() + " gained a match penalty")
self.saveAllTimeData()
def team2MatchDown(self):
playerIndex = self.team1list.curselection()[0]
self.team2.players[int(playerIndex)].subMatch()
self.actions.append("-")
self.saveAllTimeData()
def team2GameMisUp(self):
playerIndex = self.team1list.curselection()[0]
ctime = datetime.datetime.now().time()
diff = datetime.datetime.combine(datetime.date.today(), ctime) - datetime.datetime.combine(datetime.date.today(), self.gameStart)
minutes = int(str(diff).split(":")[2].split(".")[0])
remainder = 48 - minutes
self.team2.players[int(playerIndex)].addGameMis(remainder)
self.actions.append("-" + self.team2.players[int(playerIndex)].getFullName() + " gained a game misconduct penalty")
self.saveAllTimeData()
def team2GameMisDown(self):
playerIndex = self.team1list.curselection()[0]
self.team2.players[int(playerIndex)].subGameMis()
self.actions.append("-")
self.saveAllTimeData()
def endGameProc(self):
self.saveData()
def main():
root = Tk()
root.title("Scorer")
#root.overrideredirect(1) #Uncomment to remove window borders
app = Application(master=root)
root.protocol('WM_DELETE_WINDOW', app.saveData)
app.mainloop()
root.destroy()
if __name__ == "__main__": main() | mit |
tai271828/courses | cs/udacity/cs101-intro-cs/code/lesson3/problem-set/max_depth.py | 4 | 5972 | #
# This question explores a different way (from the previous question)
# to limit the pages that it can crawl.
#
#######
# THREE GOLD STARS #
# Yes, we really mean it! This is really tough (but doable) unless
# you have some previous experience before this course.
# Modify the crawl_web procedure to take a second parameter,
# max_depth, that limits the depth of the search. We can
# define the depth of a page as the number of links that must
# be followed to reach that page starting from the seed page,
# that is, the length of the shortest path from the seed to
# the page. No pages whose depth exceeds max_depth should be
# included in the crawl.
#
# For example, if max_depth is 0, the only page that should
# be crawled is the seed page. If max_depth is 1, the pages
# that should be crawled are the seed page and every page that
# it links to directly. If max_depth is 2, the crawl should
# also include all pages that are linked to by these pages.
#
# Note that the pages in the crawl may be in any order.
#
# The following definition of get_page provides an interface
# to the website found at http://www.udacity.com/cs101x/index.html
# The function output order does not affect grading.
def get_page(url):
try:
if url == "http://www.udacity.com/cs101x/index.html":
return ('<html> <body> This is a test page for learning to crawl! '
'<p> It is a good idea to '
'<a href="http://www.udacity.com/cs101x/crawling.html">learn to '
'crawl</a> before you try to '
'<a href="http://www.udacity.com/cs101x/walking.html">walk</a> '
'or <a href="http://www.udacity.com/cs101x/flying.html">fly</a>. '
'</p> </body> </html> ')
elif url == "http://www.udacity.com/cs101x/crawling.html":
return ('<html> <body> I have not learned to crawl yet, but I '
'am quite good at '
'<a href="http://www.udacity.com/cs101x/kicking.html">kicking</a>.'
'</body> </html>')
elif url == "http://www.udacity.com/cs101x/walking.html":
return ('<html> <body> I cant get enough '
'<a href="http://www.udacity.com/cs101x/index.html">crawling</a>! '
'</body> </html>')
elif url == "http://www.udacity.com/cs101x/flying.html":
return ('<html> <body> The magic words are Squeamish Ossifrage! '
'</body> </html>')
elif url == "http://top.contributors/velak.html":
return ('<a href="http://top.contributors/jesyspa.html">'
'<a href="http://top.contributors/forbiddenvoid.html">')
elif url == "http://top.contributors/jesyspa.html":
return ('<a href="http://top.contributors/elssar.html">'
'<a href="http://top.contributors/kilaws.html">')
elif url == "http://top.contributors/forbiddenvoid.html":
return ('<a href="http://top.contributors/charlzz.html">'
'<a href="http://top.contributors/johang.html">'
'<a href="http://top.contributors/graemeblake.html">')
elif url == "http://top.contributors/kilaws.html":
return ('<a href="http://top.contributors/tomvandenbosch.html">'
'<a href="http://top.contributors/mathprof.html">')
elif url == "http://top.contributors/graemeblake.html":
return ('<a href="http://top.contributors/dreyescat.html">'
'<a href="http://top.contributors/angel.html">')
elif url == "A1":
return '<a href="B1"> <a href="C1"> '
elif url == "B1":
return '<a href="E1">'
elif url == "C1":
return '<a href="D1">'
elif url == "D1":
return '<a href="E1"> '
elif url == "E1":
return '<a href="F1"> '
except:
return ""
return ""
def get_next_target(page):
start_link = page.find('<a href=')
if start_link == -1:
return None, 0
start_quote = page.find('"', start_link)
end_quote = page.find('"', start_quote + 1)
url = page[start_quote + 1:end_quote]
return url, end_quote
def union(p,q):
for e in q:
if e not in p:
p.append(e)
def get_all_links(page):
links = []
while True:
url,endpos = get_next_target(page)
if url:
links.append(url)
page = page[endpos:]
else:
break
return links
def crawl_web(seed,max_depth):
tocrawl = [seed]
crawled = []
next_depth = []
depth = 0
while tocrawl and depth <= max_depth:
page = tocrawl.pop()
if page not in crawled:
union(next_depth, get_all_links(get_page(page)))
crawled.append(page)
if not tocrawl:
tocrawl, next_depth = next_depth, []
depth = depth + 1
return crawled
print crawl_web("http://www.udacity.com/cs101x/index.html",0)
#>>> ['http://www.udacity.com/cs101x/index.html']
print crawl_web("http://www.udacity.com/cs101x/index.html",1)
#>>> ['http://www.udacity.com/cs101x/index.html',
#>>> 'http://www.udacity.com/cs101x/flying.html',
#>>> 'http://www.udacity.com/cs101x/walking.html',
#>>> 'http://www.udacity.com/cs101x/crawling.html']
print crawl_web("http://www.udacity.com/cs101x/index.html",50)
#>>> ['http://www.udacity.com/cs101x/index.html',
#>>> 'http://www.udacity.com/cs101x/flying.html',
#>>> 'http://www.udacity.com/cs101x/walking.html',
#>>> 'http://www.udacity.com/cs101x/crawling.html',
#>>> 'http://www.udacity.com/cs101x/kicking.html']
print crawl_web("http://top.contributors/forbiddenvoid.html",2)
#>>> ['http://top.contributors/forbiddenvoid.html',
#>>> 'http://top.contributors/graemeblake.html',
#>>> 'http://top.contributors/angel.html',
#>>> 'http://top.contributors/dreyescat.html',
#>>> 'http://top.contributors/johang.html',
#>>> 'http://top.contributors/charlzz.html']
print crawl_web("A1",3)
#>>> ['A1', 'C1', 'B1', 'E1', 'D1', 'F1']
# (May be in any order) | mit |
smspillaz/pychecker | pychecker2/utest/format.py | 11 | 3277 | from pychecker2 import TestSupport
from pychecker2 import FormatStringChecks
class FormatTestCase(TestSupport.WarningTester):
def testGoodFormats(self):
self.silent('def f(x):\n'
' return "%s" % x\n')
self.silent('def f(x):\n'
" return ('%s' + '%s') % (x, x)\n")
self.silent("def f(x):\n"
" return (('%s' + '%s') * 8) % ((x,) * 16)\n")
self.silent("def f(x):\n"
" y = 2\n"
" return '%(x)f %(y)s' % locals()\n")
self.silent("y = 1\n"
"def f():\n"
" return '%(y)s' % globals()\n")
self.silent("def f():\n"
" return '%*.s %*.*s %*f' % locals()\n")
self.silent("def f():\n"
" return '%s %%' % ('',)\n")
self.silent("def f(t):\n"
" return '%s %f' % t\n")
self.silent("def f(t):\n"
" return ('%s %f' + t) % (1, 2)\n")
self.silent("def f(t):\n"
" return '%s' % `t`\n")
self.silent("def f(t):\n"
" return '%s' * ((7 - 1) / 2) % (t,t,t)\n")
def testBadFormats(self):
w = FormatStringChecks.FormatStringCheck.badFormat
self.warning("def f():\n"
" return '%' % locals()\n", 2, w, 0, '%')
self.warning("def f():\n"
" return '%z a kookie format, yah' % locals()\n",
2, w, 0, '%z a kooki...')
self.warning("def f():\n"
" return '%(foo)*.*s' % {'foo': 'bar'}\n",
2, w, 0, '%(foo)*.*s')
def testMixed(self):
w = FormatStringChecks.FormatStringCheck.mixedFormat
self.warning("def f():\n"
" return '%(mi)x %up' % locals()\n", 2, w, '(mi)')
self.warning("def f():\n"
" return '%up %(mi)x' % (1, 2)\n", 2, w, '(mi)')
def testFormatCount(self):
w = FormatStringChecks.FormatStringCheck.formatCount
self.warning("def f():\n"
" return '%s %d %f' % ('', 2)\n",
2, w, 2, 3)
def testUselessModifier(self):
w = FormatStringChecks.FormatStringCheck.uselessModifier
self.warning("def f(t):\n"
" return '%s %lf' % (t, t)\n",
2, w, 'l')
def testFormatConstants(self):
w = FormatStringChecks.FormatStringCheck.badConstant
self.warning("def f():\n"
" return ('%s' * 6) % ((1, 2) + 3 * 7)\n",
2, w, 'can only concatenate tuple (not "int") to tuple')
self.warning("def f():\n"
" return ('%s' + 6) % ((1, 2) * 3)\n",
2, w, "cannot concatenate 'str' and 'int' objects")
def testUnknownName(self):
w = FormatStringChecks.FormatStringCheck.unknownFormatName
self.warning("def f():\n"
" return '%(unknown)s' % globals()\n",
2, w, "unknown", "globals")
self.warning("def f():\n"
" return '%(unknown)s' % locals()\n",
2, w, "unknown", "locals")
| bsd-3-clause |
AnySDK/Sample_Lua | frameworks/cocos2d-x/tools/jenkins-scripts/cocos-console-test.py | 9 | 11174 | #!/usr/bin/python
# create new project by cocos-console
# compile, deploy project and run
# perpose: for autotest cocos2d-console command.
# now support: mac- mac/ios/android
# will add: window-android,linux-android
import os
import subprocess
import sys
import json
import time
import socket
import threading
import smtplib
from email.mime.text import MIMEText
from os.path import join, getsize
# default console_param.
console_param = '[console run]'
# get param from commit.
if os.environ.has_key('payload'):
payload_str = os.environ['payload']
payload = json.loads(payload_str)
if payload.has_key('console'):
console_param = payload['console']
console_param = console_param[1:len(console_param)-1]
print 'console_param:',console_param
console_param_arr = console_param.split(' ')
# enum command type
class ENUM_PARAM:
new = 0
compile = 1
deploy = 2
run = 3
# partition different level
LEVEL_COCOS = {
ENUM_PARAM.new : 1,
ENUM_PARAM.compile : 2,
ENUM_PARAM.deploy : 4,
ENUM_PARAM.run : 8
}
# level's cocos command
COCOS_CMD = {
ENUM_PARAM.new:'new',
ENUM_PARAM.compile:'compile',
ENUM_PARAM.deploy:'deploy',
ENUM_PARAM.run:'run'
}
# set cocos_param for run different command
cocos_param = 0
for level in LEVEL_COCOS:
if console_param_arr.count(COCOS_CMD[level]):
cocos_param = cocos_param + LEVEL_COCOS[level]
if cocos_param < LEVEL_COCOS[ENUM_PARAM.new]:
cocos_param = LEVEL_COCOS[ENUM_PARAM.new]
print 'cocos_param:', cocos_param
# project types
project_types = ['cpp', 'lua']
# project suffix
PROJ_SUFFIX = 'Proj'
# different phone platform
phonePlats = ['mac','ios','android']
# need use console's position, perhaps should be set an env-param
cocos_console_dir = 'tools/cocos2d-console/bin/'
# now cocos2d-console suport different run on Platforms, e.g: only run android on win
runSupport = {
'darwin' : {'mac':1,'ios':1,'android':1},
'win' : {'mac':0,'ios':0,'android':1},
'linux' : {'mac':0,'ios':0,'android':1}
}
# get current running system
curPlat = sys.platform
if curPlat.find('linux') >= 0:
curPlat = 'linux'
elif curPlat.find('darwin') >= 0:
curPlat = 'darwin'
else:
curPlat = 'win'
print 'current platform is:', curPlat
# delete project.(will use different system command to delete.just mac now.)
def clean_project():
print 'delete older project.'
for proj in project_types:
cmd = 'rm -rf '+proj+PROJ_SUFFIX
os.system(cmd)
# file path.(for add console listen command.)
FILE_PATH = '/Classes/AppDelegate.cpp'
FILE_DIR = {
'cpp':'',
'lua':'/frameworks/runtime-src'
}
PARSE_WORD = 'director->setDisplayStats(true);'
CONSOLE_COMMAND = 'director->getConsole()->listenOnTCP(5678);'
# add console listenOnTCP to AppDelegate.cpp.
def addConsoleListenOnTCP(name):
filePath = name+PROJ_SUFFIX+FILE_DIR[name]+FILE_PATH
print 'filePath:',filePath
strCont = ''
if os.path.isfile(filePath):
file_object = open(filePath, 'r')
strLine = file_object.readline()
while strLine:
strCont = strCont + strLine
if strLine.find(PARSE_WORD) > -1:
print 'add console listenOnTCP command.'
strCont = strCont+'\n\t' + CONSOLE_COMMAND + '\n'
strLine = file_object.readline()
file_object.close()
file_object = open(filePath, 'w')
file_object.write(strCont)
file_object.close()
time.sleep(2)
else:
print 'file is not exist.'
# console result, for record result
console_result = 'the result of cocos-console-test is:\n\r'
# get current android devices count.
def getAndroidDevices():
cmd = 'adb devices'
info_devices = os.popen(cmd).read()
arrDevices = info_devices.split('\n')
del arrDevices[0]
count = 0
for device in arrDevices:
# e.g: emulator-5554 device, contains 'device', so, min length is len('device')
if len(device) > len('device') and (device.find('device') >= 0):
count += 1
return count
# close running app or exe by using console command.
IP_PHONE = {
'mac':'localhost',
'ios':'localhost'
}
PORT = 5678
def close_proj(proj, phone):
print 'close running project'
# connect socket
strClose = 'close ' + proj + ' on ' + phone
if IP_PHONE.has_key(phone):
soc = socket.socket( socket.AF_INET, socket.SOCK_STREAM )
print proj, phone, IP_PHONE[phone]
try:
soc.connect((IP_PHONE[phone], PORT))
cmd = 'director end\r\n'
print 'cmd close:', cmd
soc.send(cmd)
time.sleep(2)
strClose = strClose + ' success.'
except Exception, e:
print 'socket is not connect.'
strClose = strClose + ' failed.' + ' socket is not connect.'
else:
strClose = strClose + ' failed.' + ' no ' +phone+ ' type.'
time.sleep(2)
return strClose
# appendToResult
def appendToResult(content):
global console_result
console_result = console_result + content
info_of_close_app = {}
cur_test_name = ''
class myThread(threading.Thread):
def __init__(self,threadname):
threading.Thread.__init__(self,name=threadname)
def run(self):
run_name = self.getName()
print 'run_name:', run_name
if run_name == 'close':
while True:
soc = socket.socket( socket.AF_INET, socket.SOCK_STREAM )
try:
soc.connect(('localhost', PORT))
cmd_close = 'director end\r\n'
print 'cmd close:', cmd_close
soc.send(cmd_close)
time.sleep(2)
global cur_test_name
print 'cur_test_name:', cur_test_name
info_of_close_app[cur_test_name] = True
break
except Exception, e:
time.sleep(5)
# if any error
ANY_ERROR_IN_RUN = 0
# excute cocos command
def cocos_project(level):
global ANY_ERROR_IN_RUN
print 'will excute cocos_command: ', COCOS_CMD[level], level
appendToResult('will excute ' + COCOS_CMD[level] + ' command:'+"\n\r\t")
for proj in project_types:
print 'proj: ', proj
if level == ENUM_PARAM.new:
cmd = './'+cocos_console_dir+'cocos new -l '+proj+' '+proj+PROJ_SUFFIX
print proj,'cmd:',cmd
info_create = os.system(cmd) #call cmd on win is diff
if info_create == 0:
time.sleep(12)
addConsoleListenOnTCP(proj)
print 'create project',proj,' is:', not info_create
ANY_ERROR_IN_RUN = ANY_ERROR_IN_RUN + info_create
appendToResult(' '+cmd +': ' + str(not info_create) + ".\n\r\t")
else:
for phone in phonePlats:
print 'platform is: ', phone
cmd = './'+cocos_console_dir+'cocos '+COCOS_CMD[level]+' -s '+proj+PROJ_SUFFIX+' -p '+phone
print 'cmd:',cmd
info_cmd = ''
if level == ENUM_PARAM.compile:
if runSupport[curPlat][phone]:
info_cmd = os.system(cmd)
print 'info '+COCOS_CMD[level]+':', not info_cmd
appendToResult(' '+cmd +': ' + str(not info_cmd) + ".\n\r\t")
else:
if runSupport[curPlat][phone]:
print 'in desploy or run:', phone, getAndroidDevices()
if phone == 'android' and getAndroidDevices() == 0:
strInfo = 'no android device, please checkout the device is running ok.'
print strInfo
else:
if level == ENUM_PARAM.run:
global cur_test_name
cur_test_name = proj+','+phone
thread_close = myThread('close')
thread_close.start()
info_cmd = os.system(cmd)
time.sleep(5)
appendToResult(' '+cmd +': ' + str(not info_cmd) + ".\n\r\t")
# build and run according to params of provided.(lv_ignore: e.g:ignore new)
def build_run(lv_ignore):
print 'will build and run, in function build_run'
for level in LEVEL_COCOS:
print 'level:', level, cocos_param, LEVEL_COCOS[level]
if cocos_param >= LEVEL_COCOS[level] and level > lv_ignore:
if level == ENUM_PARAM.new:
clean_project()
cocos_project(level)
# android simulator name.
ANDROID_SIMULATOR_NAME = 'console-test'
# start android simulator if no android devices connected.
def start_android_simulator():
print 'in function start_android_simulator.'
if getAndroidDevices() > 0:
print 'already connected android device.'
return
if cocos_param >= LEVEL_COCOS[ENUM_PARAM.deploy]:
cmd_start = [ 'emulator -avd '+ANDROID_SIMULATOR_NAME ]
# print 'cmd_start:', cmd_start
# info_start = subprocess.Popen(cmd_start, stdin=subprocess.PIPE, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# print 'start an android simulator:', not info_start
# send email
EMAIL_KEYS={
0:'EMAIL_HOST',
1:'EMAIL_USER',
2:'EMAIL_PWD',
3:'EMAIL_POSTFIX',
4:'EMAIL_LIST',
5:'NEED_SEND_EMAIL'
}
OBJ_EMAIL_INFO = {}
print 'will get env info.'
for key in EMAIL_KEYS:
if os.environ.has_key(EMAIL_KEYS[key]):
OBJ_EMAIL_INFO[EMAIL_KEYS[key]] = os.environ[EMAIL_KEYS[key]]
if key == 4:
# string to list by ' ', for separate users.
OBJ_EMAIL_INFO[EMAIL_KEYS[4]] = OBJ_EMAIL_INFO[EMAIL_KEYS[4]].split(' ')
print 'will send email.', OBJ_EMAIL_INFO
def send_mail(to_list,sub,title,content):
mail_user = OBJ_EMAIL_INFO[ EMAIL_KEYS[1] ]
mail_postfix = OBJ_EMAIL_INFO[ EMAIL_KEYS[3] ]
mail_host = OBJ_EMAIL_INFO[ EMAIL_KEYS[0] ]
mail_pass = OBJ_EMAIL_INFO[ EMAIL_KEYS[2] ]
me = mail_user+"<"+mail_user+"@"+mail_postfix+">"
msg = MIMEText(content,_subtype='plain',_charset='gb2312')
msg['Subject'] = sub
msg['From'] = me
msg['To'] = " ".join(to_list)
print 'to users:', msg['To']
msg['Content'] = 'test'
try:
s = smtplib.SMTP()
s.connect(mail_host)
s.login(mail_user,mail_pass)
s.sendmail(me, to_list, str(msg))
print 'info:', me, to_list, str(msg)
s.close()
appendToResult( 'send email true:' + str(msg) )
return True
except Exception, e:
appendToResult( 'send email false:' + str(e) )
print str(e)
return False
def sendEmail(msg):
send_mail(OBJ_EMAIL_INFO[EMAIL_KEYS[4]], "cocos-console-test result", 'for error.', msg)
# get package size
def getdirsize(dir):
size = 0L
for root, dirs, files in os.walk(dir):
size += sum([getsize(join(root, name)) for name in files])
return size
APP_FILE_DIR = {
'cpp':'bin/debug/',
'lua':'runtime/'
}
APP_FILE_SUFFIX = {
'mac':'.app',
'ios':'.app',
'android':'-debug-unaligned.apk'
}
if os.environ.has_key('APP_FILE_SUFFIX'):
str_app_suffix = os.environ['APP_FILE_SUFFIX']
APP_FILE_SUFFIX = eval(str_app_suffix)
def getPackageSize():
for proj in project_types:
for phone in phonePlats:
# if runSupport[curPlat][phone]:
package_path = './'+proj+PROJ_SUFFIX+'/'+APP_FILE_DIR[proj]+phone+'/'+proj+PROJ_SUFFIX+APP_FILE_SUFFIX[phone]
print 'package_path', package_path
package_size = 0
if os.path.isfile(package_path):
package_size = os.path.getsize(package_path);
else:
package_size = getdirsize(package_path);
strSize = 'size of '+proj+PROJ_SUFFIX+' '+phone+' is:'+str(package_size/(1024))+'KB'+'\n\t'
print 'strSize:', strSize
appendToResult(strSize)
def main():
print 'in main:'
# start_android_simulator()
print 'will build_run:'
build_run(-1)
print 'ANY_ERROR_IN_RUN:', ANY_ERROR_IN_RUN
print 'end build run. and get package size.'
getPackageSize()
print 'will send email:'
print 'console_result:', console_result
if OBJ_EMAIL_INFO[ EMAIL_KEYS[5] ] or ANY_ERROR_IN_RUN:
sendEmail(console_result)
# -------------- main --------------
if __name__ == '__main__':
sys_ret = 0
try:
sys_ret = main()
except:
traceback.print_exc()
sys_ret = 1
finally:
sys.exit(sys_ret)
| mit |
rlugojr/django | django/middleware/cache.py | 69 | 7573 | """
Cache middleware. If enabled, each Django-powered page will be cached based on
URL. The canonical way to enable cache middleware is to set
``UpdateCacheMiddleware`` as your first piece of middleware, and
``FetchFromCacheMiddleware`` as the last::
MIDDLEWARE = [
'django.middleware.cache.UpdateCacheMiddleware',
...
'django.middleware.cache.FetchFromCacheMiddleware'
]
This is counter-intuitive, but correct: ``UpdateCacheMiddleware`` needs to run
last during the response phase, which processes middleware bottom-up;
``FetchFromCacheMiddleware`` needs to run last during the request phase, which
processes middleware top-down.
The single-class ``CacheMiddleware`` can be used for some simple sites.
However, if any other piece of middleware needs to affect the cache key, you'll
need to use the two-part ``UpdateCacheMiddleware`` and
``FetchFromCacheMiddleware``. This'll most often happen when you're using
Django's ``LocaleMiddleware``.
More details about how the caching works:
* Only GET or HEAD-requests with status code 200 are cached.
* The number of seconds each page is stored for is set by the "max-age" section
of the response's "Cache-Control" header, falling back to the
CACHE_MIDDLEWARE_SECONDS setting if the section was not found.
* This middleware expects that a HEAD request is answered with the same response
headers exactly like the corresponding GET request.
* When a hit occurs, a shallow copy of the original response object is returned
from process_request.
* Pages will be cached based on the contents of the request headers listed in
the response's "Vary" header.
* This middleware also sets ETag, Last-Modified, Expires and Cache-Control
headers on the response object.
"""
from django.conf import settings
from django.core.cache import DEFAULT_CACHE_ALIAS, caches
from django.utils.cache import (
get_cache_key, get_max_age, has_vary_header, learn_cache_key,
patch_response_headers,
)
from django.utils.deprecation import MiddlewareMixin
class UpdateCacheMiddleware(MiddlewareMixin):
"""
Response-phase cache middleware that updates the cache if the response is
cacheable.
Must be used as part of the two-part update/fetch cache middleware.
UpdateCacheMiddleware must be the first piece of middleware in MIDDLEWARE
so that it'll get called last during the response phase.
"""
def __init__(self, get_response=None):
self.cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
self.key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
self.cache_alias = settings.CACHE_MIDDLEWARE_ALIAS
self.cache = caches[self.cache_alias]
self.get_response = get_response
def _should_update_cache(self, request, response):
return hasattr(request, '_cache_update_cache') and request._cache_update_cache
def process_response(self, request, response):
"""Sets the cache, if needed."""
if not self._should_update_cache(request, response):
# We don't need to update the cache, just return.
return response
if response.streaming or response.status_code not in (200, 304):
return response
# Don't cache responses that set a user-specific (and maybe security
# sensitive) cookie in response to a cookie-less request.
if not request.COOKIES and response.cookies and has_vary_header(response, 'Cookie'):
return response
# Try to get the timeout from the "max-age" section of the "Cache-
# Control" header before reverting to using the default cache_timeout
# length.
timeout = get_max_age(response)
if timeout is None:
timeout = self.cache_timeout
elif timeout == 0:
# max-age was set to 0, don't bother caching.
return response
patch_response_headers(response, timeout)
if timeout and response.status_code == 200:
cache_key = learn_cache_key(request, response, timeout, self.key_prefix, cache=self.cache)
if hasattr(response, 'render') and callable(response.render):
response.add_post_render_callback(
lambda r: self.cache.set(cache_key, r, timeout)
)
else:
self.cache.set(cache_key, response, timeout)
return response
class FetchFromCacheMiddleware(MiddlewareMixin):
"""
Request-phase cache middleware that fetches a page from the cache.
Must be used as part of the two-part update/fetch cache middleware.
FetchFromCacheMiddleware must be the last piece of middleware in MIDDLEWARE
so that it'll get called last during the request phase.
"""
def __init__(self, get_response=None):
self.key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
self.cache_alias = settings.CACHE_MIDDLEWARE_ALIAS
self.cache = caches[self.cache_alias]
self.get_response = get_response
def process_request(self, request):
"""
Checks whether the page is already cached and returns the cached
version if available.
"""
if request.method not in ('GET', 'HEAD'):
request._cache_update_cache = False
return None # Don't bother checking the cache.
# try and get the cached GET response
cache_key = get_cache_key(request, self.key_prefix, 'GET', cache=self.cache)
if cache_key is None:
request._cache_update_cache = True
return None # No cache information available, need to rebuild.
response = self.cache.get(cache_key)
# if it wasn't found and we are looking for a HEAD, try looking just for that
if response is None and request.method == 'HEAD':
cache_key = get_cache_key(request, self.key_prefix, 'HEAD', cache=self.cache)
response = self.cache.get(cache_key)
if response is None:
request._cache_update_cache = True
return None # No cache information available, need to rebuild.
# hit, return cached response
request._cache_update_cache = False
return response
class CacheMiddleware(UpdateCacheMiddleware, FetchFromCacheMiddleware):
"""
Cache middleware that provides basic behavior for many simple sites.
Also used as the hook point for the cache decorator, which is generated
using the decorator-from-middleware utility.
"""
def __init__(self, get_response=None, cache_timeout=None, **kwargs):
self.get_response = get_response
# We need to differentiate between "provided, but using default value",
# and "not provided". If the value is provided using a default, then
# we fall back to system defaults. If it is not provided at all,
# we need to use middleware defaults.
try:
key_prefix = kwargs['key_prefix']
if key_prefix is None:
key_prefix = ''
except KeyError:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
self.key_prefix = key_prefix
try:
cache_alias = kwargs['cache_alias']
if cache_alias is None:
cache_alias = DEFAULT_CACHE_ALIAS
except KeyError:
cache_alias = settings.CACHE_MIDDLEWARE_ALIAS
self.cache_alias = cache_alias
if cache_timeout is None:
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
self.cache_timeout = cache_timeout
self.cache = caches[self.cache_alias]
| bsd-3-clause |
tacrow/tacrow | node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py | 1363 | 58344 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import filecmp
import gyp.common
import gyp.xcodeproj_file
import gyp.xcode_ninja
import errno
import os
import sys
import posixpath
import re
import shutil
import subprocess
import tempfile
# Project files generated by this module will use _intermediate_var as a
# custom Xcode setting whose value is a DerivedSources-like directory that's
# project-specific and configuration-specific. The normal choice,
# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
# as it is likely that multiple targets within a single project file will want
# to access the same set of generated files. The other option,
# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
# it is not configuration-specific. INTERMEDIATE_DIR is defined as
# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
_intermediate_var = 'INTERMEDIATE_DIR'
# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
# targets that share the same BUILT_PRODUCTS_DIR.
_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
_library_search_paths_var = 'LIBRARY_SEARCH_PATHS'
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_SUFFIX': '.dylib',
# INTERMEDIATE_DIR is a place for targets to build up intermediate products.
# It is specific to each build environment. It is only guaranteed to exist
# and be constant within the context of a project, corresponding to a single
# input file. Some build environments may allow their intermediate directory
# to be shared on a wider scale, but this is not guaranteed.
'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,
'OS': 'mac',
'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',
'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',
'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',
'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',
'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',
'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',
'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)',
'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,
'CONFIGURATION_NAME': '$(CONFIGURATION)',
}
# The Xcode-specific sections that hold paths.
generator_additional_path_sections = [
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
# 'mac_framework_dirs', input already handles _dirs endings.
]
# The Xcode-specific keys that exist on targets and aren't moved down to
# configurations.
generator_additional_non_configuration_keys = [
'ios_app_extension',
'ios_watch_app',
'ios_watchkit_extension',
'mac_bundle',
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
'mac_xctest_bundle',
'xcode_create_dependents_test_runner',
]
# We want to let any rules apply to files that are resources also.
generator_extra_sources_for_rules = [
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
]
generator_filelist_paths = None
# Xcode's standard set of library directories, which don't need to be duplicated
# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
xcode_standard_library_dirs = frozenset([
'$(SDKROOT)/usr/lib',
'$(SDKROOT)/usr/local/lib',
])
def CreateXCConfigurationList(configuration_names):
xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})
if len(configuration_names) == 0:
configuration_names = ['Default']
for configuration_name in configuration_names:
xcbc = gyp.xcodeproj_file.XCBuildConfiguration({
'name': configuration_name})
xccl.AppendProperty('buildConfigurations', xcbc)
xccl.SetProperty('defaultConfigurationName', configuration_names[0])
return xccl
class XcodeProject(object):
def __init__(self, gyp_path, path, build_file_dict):
self.gyp_path = gyp_path
self.path = path
self.project = gyp.xcodeproj_file.PBXProject(path=path)
projectDirPath = gyp.common.RelativePath(
os.path.dirname(os.path.abspath(self.gyp_path)),
os.path.dirname(path) or '.')
self.project.SetProperty('projectDirPath', projectDirPath)
self.project_file = \
gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})
self.build_file_dict = build_file_dict
# TODO(mark): add destructor that cleans up self.path if created_dir is
# True and things didn't complete successfully. Or do something even
# better with "try"?
self.created_dir = False
try:
os.makedirs(self.path)
self.created_dir = True
except OSError, e:
if e.errno != errno.EEXIST:
raise
def Finalize1(self, xcode_targets, serialize_all_tests):
# Collect a list of all of the build configuration names used by the
# various targets in the file. It is very heavily advised to keep each
# target in an entire project (even across multiple project files) using
# the same set of configuration names.
configurations = []
for xct in self.project.GetProperty('targets'):
xccl = xct.GetProperty('buildConfigurationList')
xcbcs = xccl.GetProperty('buildConfigurations')
for xcbc in xcbcs:
name = xcbc.GetProperty('name')
if name not in configurations:
configurations.append(name)
# Replace the XCConfigurationList attached to the PBXProject object with
# a new one specifying all of the configuration names used by the various
# targets.
try:
xccl = CreateXCConfigurationList(configurations)
self.project.SetProperty('buildConfigurationList', xccl)
except:
sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
raise
# The need for this setting is explained above where _intermediate_var is
# defined. The comments below about wanting to avoid project-wide build
# settings apply here too, but this needs to be set on a project-wide basis
# so that files relative to the _intermediate_var setting can be displayed
# properly in the Xcode UI.
#
# Note that for configuration-relative files such as anything relative to
# _intermediate_var, for the purposes of UI tree view display, Xcode will
# only resolve the configuration name once, when the project file is
# opened. If the active build configuration is changed, the project file
# must be closed and reopened if it is desired for the tree view to update.
# This is filed as Apple radar 6588391.
xccl.SetBuildSetting(_intermediate_var,
'$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')
xccl.SetBuildSetting(_shared_intermediate_var,
'$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
# Set user-specified project-wide build settings and config files. This
# is intended to be used very sparingly. Really, almost everything should
# go into target-specific build settings sections. The project-wide
# settings are only intended to be used in cases where Xcode attempts to
# resolve variable references in a project context as opposed to a target
# context, such as when resolving sourceTree references while building up
# the tree tree view for UI display.
# Any values set globally are applied to all configurations, then any
# per-configuration values are applied.
for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
xccl.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in self.build_file_dict:
config_ref = self.project.AddOrGetFileInRootGroup(
self.build_file_dict['xcode_config_file'])
xccl.SetBaseConfiguration(config_ref)
build_file_configurations = self.build_file_dict.get('configurations', {})
if build_file_configurations:
for config_name in configurations:
build_file_configuration_named = \
build_file_configurations.get(config_name, {})
if build_file_configuration_named:
xcc = xccl.ConfigurationNamed(config_name)
for xck, xcv in build_file_configuration_named.get('xcode_settings',
{}).iteritems():
xcc.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in build_file_configuration_named:
config_ref = self.project.AddOrGetFileInRootGroup(
build_file_configurations[config_name]['xcode_config_file'])
xcc.SetBaseConfiguration(config_ref)
# Sort the targets based on how they appeared in the input.
# TODO(mark): Like a lot of other things here, this assumes internal
# knowledge of PBXProject - in this case, of its "targets" property.
# ordinary_targets are ordinary targets that are already in the project
# file. run_test_targets are the targets that run unittests and should be
# used for the Run All Tests target. support_targets are the action/rule
# targets used by GYP file targets, just kept for the assert check.
ordinary_targets = []
run_test_targets = []
support_targets = []
# targets is full list of targets in the project.
targets = []
# does the it define it's own "all"?
has_custom_all = False
# targets_for_all is the list of ordinary_targets that should be listed
# in this project's "All" target. It includes each non_runtest_target
# that does not have suppress_wildcard set.
targets_for_all = []
for target in self.build_file_dict['targets']:
target_name = target['target_name']
toolset = target['toolset']
qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,
toolset)
xcode_target = xcode_targets[qualified_target]
# Make sure that the target being added to the sorted list is already in
# the unsorted list.
assert xcode_target in self.project._properties['targets']
targets.append(xcode_target)
ordinary_targets.append(xcode_target)
if xcode_target.support_target:
support_targets.append(xcode_target.support_target)
targets.append(xcode_target.support_target)
if not int(target.get('suppress_wildcard', False)):
targets_for_all.append(xcode_target)
if target_name.lower() == 'all':
has_custom_all = True;
# If this target has a 'run_as' attribute, add its target to the
# targets, and add it to the test targets.
if target.get('run_as'):
# Make a target to run something. It should have one
# dependency, the parent xcode target.
xccl = CreateXCConfigurationList(configurations)
run_target = gyp.xcodeproj_file.PBXAggregateTarget({
'name': 'Run ' + target_name,
'productName': xcode_target.GetProperty('productName'),
'buildConfigurationList': xccl,
},
parent=self.project)
run_target.AddDependency(xcode_target)
command = target['run_as']
script = ''
if command.get('working_directory'):
script = script + 'cd "%s"\n' % \
gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
command.get('working_directory'))
if command.get('environment'):
script = script + "\n".join(
['export %s="%s"' %
(key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
for (key, val) in command.get('environment').iteritems()]) + "\n"
# Some test end up using sockets, files on disk, etc. and can get
# confused if more then one test runs at a time. The generator
# flag 'xcode_serialize_all_test_runs' controls the forcing of all
# tests serially. It defaults to True. To get serial runs this
# little bit of python does the same as the linux flock utility to
# make sure only one runs at a time.
command_prefix = ''
if serialize_all_tests:
command_prefix = \
"""python -c "import fcntl, subprocess, sys
file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
fcntl.flock(file.fileno(), fcntl.LOCK_EX)
sys.exit(subprocess.call(sys.argv[1:]))" """
# If we were unable to exec for some reason, we want to exit
# with an error, and fixup variable references to be shell
# syntax instead of xcode syntax.
script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \
gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
gyp.common.EncodePOSIXShellList(command.get('action')))
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'shellScript': script,
'showEnvVarsInLog': 0,
})
run_target.AppendProperty('buildPhases', ssbp)
# Add the run target to the project file.
targets.append(run_target)
run_test_targets.append(run_target)
xcode_target.test_runner = run_target
# Make sure that the list of targets being replaced is the same length as
# the one replacing it, but allow for the added test runner targets.
assert len(self.project._properties['targets']) == \
len(ordinary_targets) + len(support_targets)
self.project._properties['targets'] = targets
# Get rid of unnecessary levels of depth in groups like the Source group.
self.project.RootGroupsTakeOverOnlyChildren(True)
# Sort the groups nicely. Do this after sorting the targets, because the
# Products group is sorted based on the order of the targets.
self.project.SortGroups()
# Create an "All" target if there's more than one target in this project
# file and the project didn't define its own "All" target. Put a generated
# "All" target first so that people opening up the project for the first
# time will build everything by default.
if len(targets_for_all) > 1 and not has_custom_all:
xccl = CreateXCConfigurationList(configurations)
all_target = gyp.xcodeproj_file.PBXAggregateTarget(
{
'buildConfigurationList': xccl,
'name': 'All',
},
parent=self.project)
for target in targets_for_all:
all_target.AddDependency(target)
# TODO(mark): This is evil because it relies on internal knowledge of
# PBXProject._properties. It's important to get the "All" target first,
# though.
self.project._properties['targets'].insert(0, all_target)
# The same, but for run_test_targets.
if len(run_test_targets) > 1:
xccl = CreateXCConfigurationList(configurations)
run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
{
'buildConfigurationList': xccl,
'name': 'Run All Tests',
},
parent=self.project)
for run_test_target in run_test_targets:
run_all_tests_target.AddDependency(run_test_target)
# Insert after the "All" target, which must exist if there is more than
# one run_test_target.
self.project._properties['targets'].insert(1, run_all_tests_target)
def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
# Finalize2 needs to happen in a separate step because the process of
# updating references to other projects depends on the ordering of targets
# within remote project files. Finalize1 is responsible for sorting duty,
# and once all project files are sorted, Finalize2 can come in and update
# these references.
# To support making a "test runner" target that will run all the tests
# that are direct dependents of any given target, we look for
# xcode_create_dependents_test_runner being set on an Aggregate target,
# and generate a second target that will run the tests runners found under
# the marked target.
for bf_tgt in self.build_file_dict['targets']:
if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):
tgt_name = bf_tgt['target_name']
toolset = bf_tgt['toolset']
qualified_target = gyp.common.QualifiedTarget(self.gyp_path,
tgt_name, toolset)
xcode_target = xcode_targets[qualified_target]
if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
# Collect all the run test targets.
all_run_tests = []
pbxtds = xcode_target.GetProperty('dependencies')
for pbxtd in pbxtds:
pbxcip = pbxtd.GetProperty('targetProxy')
dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')
if hasattr(dependency_xct, 'test_runner'):
all_run_tests.append(dependency_xct.test_runner)
# Directly depend on all the runners as they depend on the target
# that builds them.
if len(all_run_tests) > 0:
run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({
'name': 'Run %s Tests' % tgt_name,
'productName': tgt_name,
},
parent=self.project)
for run_test_target in all_run_tests:
run_all_target.AddDependency(run_test_target)
# Insert the test runner after the related target.
idx = self.project._properties['targets'].index(xcode_target)
self.project._properties['targets'].insert(idx + 1, run_all_target)
# Update all references to other projects, to make sure that the lists of
# remote products are complete. Otherwise, Xcode will fill them in when
# it opens the project file, which will result in unnecessary diffs.
# TODO(mark): This is evil because it relies on internal knowledge of
# PBXProject._other_pbxprojects.
for other_pbxproject in self.project._other_pbxprojects.keys():
self.project.AddOrGetProjectReference(other_pbxproject)
self.project.SortRemoteProductReferences()
# Give everything an ID.
self.project_file.ComputeIDs()
# Make sure that no two objects in the project file have the same ID. If
# multiple objects wind up with the same ID, upon loading the file, Xcode
# will only recognize one object (the last one in the file?) and the
# results are unpredictable.
self.project_file.EnsureNoIDCollisions()
def Write(self):
# Write the project file to a temporary location first. Xcode watches for
# changes to the project file and presents a UI sheet offering to reload
# the project when it does change. However, in some cases, especially when
# multiple projects are open or when Xcode is busy, things don't work so
# seamlessly. Sometimes, Xcode is able to detect that a project file has
# changed but can't unload it because something else is referencing it.
# To mitigate this problem, and to avoid even having Xcode present the UI
# sheet when an open project is rewritten for inconsequential changes, the
# project file is written to a temporary file in the xcodeproj directory
# first. The new temporary file is then compared to the existing project
# file, if any. If they differ, the new file replaces the old; otherwise,
# the new project file is simply deleted. Xcode properly detects a file
# being renamed over an open project file as a change and so it remains
# able to present the "project file changed" sheet under this system.
# Writing to a temporary file first also avoids the possible problem of
# Xcode rereading an incomplete project file.
(output_fd, new_pbxproj_path) = \
tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',
dir=self.path)
try:
output_file = os.fdopen(output_fd, 'wb')
self.project_file.Print(output_file)
output_file.close()
pbxproj_path = os.path.join(self.path, 'project.pbxproj')
same = False
try:
same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if same:
# The new file is identical to the old one, just get rid of the new
# one.
os.unlink(new_pbxproj_path)
else:
# The new file is different from the old one, or there is no old one.
# Rename the new file to the permanent name.
#
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
# file that can only be read by the owner, regardless of the umask.
# There's no reason to not respect the umask here, which means that
# an extra hoop is required to fetch it and reset the new file's mode.
#
# No way to get the umask without setting a new one? Set a safe one
# and then set it back to the old value.
umask = os.umask(077)
os.umask(umask)
os.chmod(new_pbxproj_path, 0666 & ~umask)
os.rename(new_pbxproj_path, pbxproj_path)
except Exception:
# Don't leave turds behind. In fact, if this code was responsible for
# creating the xcodeproj directory, get rid of that too.
os.unlink(new_pbxproj_path)
if self.created_dir:
shutil.rmtree(self.path, True)
raise
def AddSourceToTarget(source, type, pbxp, xct):
# TODO(mark): Perhaps source_extensions and library_extensions can be made a
# little bit fancier.
source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's', 'swift']
# .o is conceptually more of a "source" than a "library," but Xcode thinks
# of "sources" as things to compile and "libraries" (or "frameworks") as
# things to link with. Adding an object file to an Xcode target's frameworks
# phase works properly.
library_extensions = ['a', 'dylib', 'framework', 'o']
basename = posixpath.basename(source)
(root, ext) = posixpath.splitext(basename)
if ext:
ext = ext[1:].lower()
if ext in source_extensions and type != 'none':
xct.SourcesPhase().AddFile(source)
elif ext in library_extensions and type != 'none':
xct.FrameworksPhase().AddFile(source)
else:
# Files that aren't added to a sources or frameworks build phase can still
# go into the project file, just not as part of a build phase.
pbxp.AddOrGetFileInRootGroup(source)
def AddResourceToTarget(resource, pbxp, xct):
# TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
# where it's used.
xct.ResourcesPhase().AddFile(resource)
def AddHeaderToTarget(header, pbxp, xct, is_public):
# TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
# where it's used.
settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public]
xct.HeadersPhase().AddFile(header, settings)
_xcode_variable_re = re.compile(r'(\$\((.*?)\))')
def ExpandXcodeVariables(string, expansions):
"""Expands Xcode-style $(VARIABLES) in string per the expansions dict.
In some rare cases, it is appropriate to expand Xcode variables when a
project file is generated. For any substring $(VAR) in string, if VAR is a
key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
Any $(VAR) substring in string for which VAR is not a key in the expansions
dict will remain in the returned string.
"""
matches = _xcode_variable_re.findall(string)
if matches == None:
return string
matches.reverse()
for match in matches:
(to_replace, variable) = match
if not variable in expansions:
continue
replacement = expansions[variable]
string = re.sub(re.escape(to_replace), replacement, string)
return string
_xcode_define_re = re.compile(r'([\\\"\' ])')
def EscapeXcodeDefine(s):
"""We must escape the defines that we give to XCode so that it knows not to
split on spaces and to respect backslash and quote literals. However, we
must not quote the define, or Xcode will incorrectly intepret variables
especially $(inherited)."""
return re.sub(_xcode_define_re, r'\\\1', s)
def PerformBuild(data, configurations, params):
options = params['options']
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
if options.generator_output:
xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
for config in configurations:
arguments = ['xcodebuild', '-project', xcodeproj_path]
arguments += ['-configuration', config]
print "Building [%s]: %s" % (config, arguments)
subprocess.check_call(arguments)
def CalculateGeneratorInputInfo(params):
toplevel = params['options'].toplevel_dir
if params.get('flavor') == 'ninja':
generator_dir = os.path.relpath(params['options'].generator_output or '.')
output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
output_dir = os.path.normpath(os.path.join(generator_dir, output_dir))
qualified_out_dir = os.path.normpath(os.path.join(
toplevel, output_dir, 'gypfiles-xcode-ninja'))
else:
output_dir = os.path.normpath(os.path.join(toplevel, 'xcodebuild'))
qualified_out_dir = os.path.normpath(os.path.join(
toplevel, output_dir, 'gypfiles'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': toplevel,
'qualified_out_dir': qualified_out_dir,
}
def GenerateOutput(target_list, target_dicts, data, params):
# Optionally configure each spec to use ninja as the external builder.
ninja_wrapper = params.get('flavor') == 'ninja'
if ninja_wrapper:
(target_list, target_dicts, data) = \
gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params)
options = params['options']
generator_flags = params.get('generator_flags', {})
parallel_builds = generator_flags.get('xcode_parallel_builds', True)
serialize_all_tests = \
generator_flags.get('xcode_serialize_all_test_runs', True)
upgrade_check_project_version = \
generator_flags.get('xcode_upgrade_check_project_version', None)
# Format upgrade_check_project_version with leading zeros as needed.
if upgrade_check_project_version:
upgrade_check_project_version = str(upgrade_check_project_version)
while len(upgrade_check_project_version) < 4:
upgrade_check_project_version = '0' + upgrade_check_project_version
skip_excluded_files = \
not generator_flags.get('xcode_list_excluded_files', True)
xcode_projects = {}
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
if options.generator_output:
xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
xcode_projects[build_file] = xcp
pbxp = xcp.project
# Set project-level attributes from multiple options
project_attributes = {};
if parallel_builds:
project_attributes['BuildIndependentTargetsInParallel'] = 'YES'
if upgrade_check_project_version:
project_attributes['LastUpgradeCheck'] = upgrade_check_project_version
project_attributes['LastTestingUpgradeCheck'] = \
upgrade_check_project_version
project_attributes['LastSwiftUpdateCheck'] = \
upgrade_check_project_version
pbxp.SetProperty('attributes', project_attributes)
# Add gyp/gypi files to project
if not generator_flags.get('standalone'):
main_group = pbxp.GetProperty('mainGroup')
build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})
main_group.AppendChild(build_group)
for included_file in build_file_dict['included_files']:
build_group.AddOrGetFileByPath(included_file, False)
xcode_targets = {}
xcode_target_to_target_dict = {}
for qualified_target in target_list:
[build_file, target_name, toolset] = \
gyp.common.ParseQualifiedTarget(qualified_target)
spec = target_dicts[qualified_target]
if spec['toolset'] != 'target':
raise Exception(
'Multiple toolsets not supported in xcode build (target %s)' %
qualified_target)
configuration_names = [spec['default_configuration']]
for configuration_name in sorted(spec['configurations'].keys()):
if configuration_name not in configuration_names:
configuration_names.append(configuration_name)
xcp = xcode_projects[build_file]
pbxp = xcp.project
# Set up the configurations for the target according to the list of names
# supplied.
xccl = CreateXCConfigurationList(configuration_names)
# Create an XCTarget subclass object for the target. The type with
# "+bundle" appended will be used if the target has "mac_bundle" set.
# loadable_modules not in a mac_bundle are mapped to
# com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
# to create a single-file mh_bundle.
_types = {
'executable': 'com.apple.product-type.tool',
'loadable_module': 'com.googlecode.gyp.xcode.bundle',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
'mac_kernel_extension': 'com.apple.product-type.kernel-extension',
'executable+bundle': 'com.apple.product-type.application',
'loadable_module+bundle': 'com.apple.product-type.bundle',
'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
'shared_library+bundle': 'com.apple.product-type.framework',
'executable+extension+bundle': 'com.apple.product-type.app-extension',
'executable+watch+extension+bundle':
'com.apple.product-type.watchkit-extension',
'executable+watch+bundle':
'com.apple.product-type.application.watchapp',
'mac_kernel_extension+bundle': 'com.apple.product-type.kernel-extension',
}
target_properties = {
'buildConfigurationList': xccl,
'name': target_name,
}
type = spec['type']
is_xctest = int(spec.get('mac_xctest_bundle', 0))
is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
is_app_extension = int(spec.get('ios_app_extension', 0))
is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0))
is_watch_app = int(spec.get('ios_watch_app', 0))
if type != 'none':
type_bundle_key = type
if is_xctest:
type_bundle_key += '+xctest'
assert type == 'loadable_module', (
'mac_xctest_bundle targets must have type loadable_module '
'(target %s)' % target_name)
elif is_app_extension:
assert is_bundle, ('ios_app_extension flag requires mac_bundle '
'(target %s)' % target_name)
type_bundle_key += '+extension+bundle'
elif is_watchkit_extension:
assert is_bundle, ('ios_watchkit_extension flag requires mac_bundle '
'(target %s)' % target_name)
type_bundle_key += '+watch+extension+bundle'
elif is_watch_app:
assert is_bundle, ('ios_watch_app flag requires mac_bundle '
'(target %s)' % target_name)
type_bundle_key += '+watch+bundle'
elif is_bundle:
type_bundle_key += '+bundle'
xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
try:
target_properties['productType'] = _types[type_bundle_key]
except KeyError, e:
gyp.common.ExceptionAppend(e, "-- unknown product type while "
"writing target %s" % target_name)
raise
else:
xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
assert not is_bundle, (
'mac_bundle targets cannot have type none (target "%s")' %
target_name)
assert not is_xctest, (
'mac_xctest_bundle targets cannot have type none (target "%s")' %
target_name)
target_product_name = spec.get('product_name')
if target_product_name is not None:
target_properties['productName'] = target_product_name
xct = xctarget_type(target_properties, parent=pbxp,
force_outdir=spec.get('product_dir'),
force_prefix=spec.get('product_prefix'),
force_extension=spec.get('product_extension'))
pbxp.AppendProperty('targets', xct)
xcode_targets[qualified_target] = xct
xcode_target_to_target_dict[xct] = spec
spec_actions = spec.get('actions', [])
spec_rules = spec.get('rules', [])
# Xcode has some "issues" with checking dependencies for the "Compile
# sources" step with any source files/headers generated by actions/rules.
# To work around this, if a target is building anything directly (not
# type "none"), then a second target is used to run the GYP actions/rules
# and is made a dependency of this target. This way the work is done
# before the dependency checks for what should be recompiled.
support_xct = None
# The Xcode "issues" don't affect xcode-ninja builds, since the dependency
# logic all happens in ninja. Don't bother creating the extra targets in
# that case.
if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper:
support_xccl = CreateXCConfigurationList(configuration_names);
support_target_suffix = generator_flags.get(
'support_target_suffix', ' Support')
support_target_properties = {
'buildConfigurationList': support_xccl,
'name': target_name + support_target_suffix,
}
if target_product_name:
support_target_properties['productName'] = \
target_product_name + ' Support'
support_xct = \
gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,
parent=pbxp)
pbxp.AppendProperty('targets', support_xct)
xct.AddDependency(support_xct)
# Hang the support target off the main target so it can be tested/found
# by the generator during Finalize.
xct.support_target = support_xct
prebuild_index = 0
# Add custom shell script phases for "actions" sections.
for action in spec_actions:
# There's no need to write anything into the script to ensure that the
# output directories already exist, because Xcode will look at the
# declared outputs and automatically ensure that they exist for us.
# Do we have a message to print when this action runs?
message = action.get('message')
if message:
message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)
else:
message = ''
# Turn the list into a string that can be passed to a shell.
action_string = gyp.common.EncodePOSIXShellList(action['action'])
# Convert Xcode-type variable references to sh-compatible environment
# variable references.
message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
action_string)
script = ''
# Include the optional message
if message_sh:
script += message_sh + '\n'
# Be sure the script runs in exec, and that if exec fails, the script
# exits signalling an error.
script += 'exec ' + action_string_sh + '\nexit 1\n'
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'inputPaths': action['inputs'],
'name': 'Action "' + action['action_name'] + '"',
'outputPaths': action['outputs'],
'shellScript': script,
'showEnvVarsInLog': 0,
})
if support_xct:
support_xct.AppendProperty('buildPhases', ssbp)
else:
# TODO(mark): this assumes too much knowledge of the internals of
# xcodeproj_file; some of these smarts should move into xcodeproj_file
# itself.
xct._properties['buildPhases'].insert(prebuild_index, ssbp)
prebuild_index = prebuild_index + 1
# TODO(mark): Should verify that at most one of these is specified.
if int(action.get('process_outputs_as_sources', False)):
for output in action['outputs']:
AddSourceToTarget(output, type, pbxp, xct)
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
for output in action['outputs']:
AddResourceToTarget(output, pbxp, xct)
# tgt_mac_bundle_resources holds the list of bundle resources so
# the rule processing can check against it.
if is_bundle:
tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])
else:
tgt_mac_bundle_resources = []
# Add custom shell script phases driving "make" for "rules" sections.
#
# Xcode's built-in rule support is almost powerful enough to use directly,
# but there are a few significant deficiencies that render them unusable.
# There are workarounds for some of its inadequacies, but in aggregate,
# the workarounds added complexity to the generator, and some workarounds
# actually require input files to be crafted more carefully than I'd like.
# Consequently, until Xcode rules are made more capable, "rules" input
# sections will be handled in Xcode output by shell script build phases
# performed prior to the compilation phase.
#
# The following problems with Xcode rules were found. The numbers are
# Apple radar IDs. I hope that these shortcomings are addressed, I really
# liked having the rules handled directly in Xcode during the period that
# I was prototyping this.
#
# 6588600 Xcode compiles custom script rule outputs too soon, compilation
# fails. This occurs when rule outputs from distinct inputs are
# interdependent. The only workaround is to put rules and their
# inputs in a separate target from the one that compiles the rule
# outputs. This requires input file cooperation and it means that
# process_outputs_as_sources is unusable.
# 6584932 Need to declare that custom rule outputs should be excluded from
# compilation. A possible workaround is to lie to Xcode about a
# rule's output, giving it a dummy file it doesn't know how to
# compile. The rule action script would need to touch the dummy.
# 6584839 I need a way to declare additional inputs to a custom rule.
# A possible workaround is a shell script phase prior to
# compilation that touches a rule's primary input files if any
# would-be additional inputs are newer than the output. Modifying
# the source tree - even just modification times - feels dirty.
# 6564240 Xcode "custom script" build rules always dump all environment
# variables. This is a low-prioroty problem and is not a
# show-stopper.
rules_by_ext = {}
for rule in spec_rules:
rules_by_ext[rule['extension']] = rule
# First, some definitions:
#
# A "rule source" is a file that was listed in a target's "sources"
# list and will have a rule applied to it on the basis of matching the
# rule's "extensions" attribute. Rule sources are direct inputs to
# rules.
#
# Rule definitions may specify additional inputs in their "inputs"
# attribute. These additional inputs are used for dependency tracking
# purposes.
#
# A "concrete output" is a rule output with input-dependent variables
# resolved. For example, given a rule with:
# 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
# if the target's "sources" list contained "one.ext" and "two.ext",
# the "concrete output" for rule input "two.ext" would be "two.cc". If
# a rule specifies multiple outputs, each input file that the rule is
# applied to will have the same number of concrete outputs.
#
# If any concrete outputs are outdated or missing relative to their
# corresponding rule_source or to any specified additional input, the
# rule action must be performed to generate the concrete outputs.
# concrete_outputs_by_rule_source will have an item at the same index
# as the rule['rule_sources'] that it corresponds to. Each item is a
# list of all of the concrete outputs for the rule_source.
concrete_outputs_by_rule_source = []
# concrete_outputs_all is a flat list of all concrete outputs that this
# rule is able to produce, given the known set of input files
# (rule_sources) that apply to it.
concrete_outputs_all = []
# messages & actions are keyed by the same indices as rule['rule_sources']
# and concrete_outputs_by_rule_source. They contain the message and
# action to perform after resolving input-dependent variables. The
# message is optional, in which case None is stored for each rule source.
messages = []
actions = []
for rule_source in rule.get('rule_sources', []):
rule_source_dirname, rule_source_basename = \
posixpath.split(rule_source)
(rule_source_root, rule_source_ext) = \
posixpath.splitext(rule_source_basename)
# These are the same variable names that Xcode uses for its own native
# rule support. Because Xcode's rule engine is not being used, they
# need to be expanded as they are written to the makefile.
rule_input_dict = {
'INPUT_FILE_BASE': rule_source_root,
'INPUT_FILE_SUFFIX': rule_source_ext,
'INPUT_FILE_NAME': rule_source_basename,
'INPUT_FILE_PATH': rule_source,
'INPUT_FILE_DIRNAME': rule_source_dirname,
}
concrete_outputs_for_this_rule_source = []
for output in rule.get('outputs', []):
# Fortunately, Xcode and make both use $(VAR) format for their
# variables, so the expansion is the only transformation necessary.
# Any remaning $(VAR)-type variables in the string can be given
# directly to make, which will pick up the correct settings from
# what Xcode puts into the environment.
concrete_output = ExpandXcodeVariables(output, rule_input_dict)
concrete_outputs_for_this_rule_source.append(concrete_output)
# Add all concrete outputs to the project.
pbxp.AddOrGetFileInRootGroup(concrete_output)
concrete_outputs_by_rule_source.append( \
concrete_outputs_for_this_rule_source)
concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
# TODO(mark): Should verify that at most one of these is specified.
if int(rule.get('process_outputs_as_sources', False)):
for output in concrete_outputs_for_this_rule_source:
AddSourceToTarget(output, type, pbxp, xct)
# If the file came from the mac_bundle_resources list or if the rule
# is marked to process outputs as bundle resource, do so.
was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
if was_mac_bundle_resource or \
int(rule.get('process_outputs_as_mac_bundle_resources', False)):
for output in concrete_outputs_for_this_rule_source:
AddResourceToTarget(output, pbxp, xct)
# Do we have a message to print when this rule runs?
message = rule.get('message')
if message:
message = gyp.common.EncodePOSIXShellArgument(message)
message = ExpandXcodeVariables(message, rule_input_dict)
messages.append(message)
# Turn the list into a string that can be passed to a shell.
action_string = gyp.common.EncodePOSIXShellList(rule['action'])
action = ExpandXcodeVariables(action_string, rule_input_dict)
actions.append(action)
if len(concrete_outputs_all) > 0:
# TODO(mark): There's a possibilty for collision here. Consider
# target "t" rule "A_r" and target "t_A" rule "r".
makefile_name = '%s.make' % re.sub(
'[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name']))
makefile_path = os.path.join(xcode_projects[build_file].path,
makefile_name)
# TODO(mark): try/close? Write to a temporary file and swap it only
# if it's got changes?
makefile = open(makefile_path, 'wb')
# make will build the first target in the makefile by default. By
# convention, it's called "all". List all (or at least one)
# concrete output for each rule source as a prerequisite of the "all"
# target.
makefile.write('all: \\\n')
for concrete_output_index in \
xrange(0, len(concrete_outputs_by_rule_source)):
# Only list the first (index [0]) concrete output of each input
# in the "all" target. Otherwise, a parallel make (-j > 1) would
# attempt to process each input multiple times simultaneously.
# Otherwise, "all" could just contain the entire list of
# concrete_outputs_all.
concrete_output = \
concrete_outputs_by_rule_source[concrete_output_index][0]
if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
eol = ''
else:
eol = ' \\'
makefile.write(' %s%s\n' % (concrete_output, eol))
for (rule_source, concrete_outputs, message, action) in \
zip(rule['rule_sources'], concrete_outputs_by_rule_source,
messages, actions):
makefile.write('\n')
# Add a rule that declares it can build each concrete output of a
# rule source. Collect the names of the directories that are
# required.
concrete_output_dirs = []
for concrete_output_index in xrange(0, len(concrete_outputs)):
concrete_output = concrete_outputs[concrete_output_index]
if concrete_output_index == 0:
bol = ''
else:
bol = ' '
makefile.write('%s%s \\\n' % (bol, concrete_output))
concrete_output_dir = posixpath.dirname(concrete_output)
if (concrete_output_dir and
concrete_output_dir not in concrete_output_dirs):
concrete_output_dirs.append(concrete_output_dir)
makefile.write(' : \\\n')
# The prerequisites for this rule are the rule source itself and
# the set of additional rule inputs, if any.
prerequisites = [rule_source]
prerequisites.extend(rule.get('inputs', []))
for prerequisite_index in xrange(0, len(prerequisites)):
prerequisite = prerequisites[prerequisite_index]
if prerequisite_index == len(prerequisites) - 1:
eol = ''
else:
eol = ' \\'
makefile.write(' %s%s\n' % (prerequisite, eol))
# Make sure that output directories exist before executing the rule
# action.
if len(concrete_output_dirs) > 0:
makefile.write('\t@mkdir -p "%s"\n' %
'" "'.join(concrete_output_dirs))
# The rule message and action have already had the necessary variable
# substitutions performed.
if message:
# Mark it with note: so Xcode picks it up in build output.
makefile.write('\t@echo note: %s\n' % message)
makefile.write('\t%s\n' % action)
makefile.close()
# It might be nice to ensure that needed output directories exist
# here rather than in each target in the Makefile, but that wouldn't
# work if there ever was a concrete output that had an input-dependent
# variable anywhere other than in the leaf position.
# Don't declare any inputPaths or outputPaths. If they're present,
# Xcode will provide a slight optimization by only running the script
# phase if any output is missing or outdated relative to any input.
# Unfortunately, it will also assume that all outputs are touched by
# the script, and if the outputs serve as files in a compilation
# phase, they will be unconditionally rebuilt. Since make might not
# rebuild everything that could be declared here as an output, this
# extra compilation activity is unnecessary. With inputPaths and
# outputPaths not supplied, make will always be called, but it knows
# enough to not do anything when everything is up-to-date.
# To help speed things up, pass -j COUNT to make so it does some work
# in parallel. Don't use ncpus because Xcode will build ncpus targets
# in parallel and if each target happens to have a rules step, there
# would be ncpus^2 things going. With a machine that has 2 quad-core
# Xeons, a build can quickly run out of processes based on
# scheduling/other tasks, and randomly failing builds are no good.
script = \
"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
if [ "${JOB_COUNT}" -gt 4 ]; then
JOB_COUNT=4
fi
exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
exit 1
""" % makefile_name
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'name': 'Rule "' + rule['rule_name'] + '"',
'shellScript': script,
'showEnvVarsInLog': 0,
})
if support_xct:
support_xct.AppendProperty('buildPhases', ssbp)
else:
# TODO(mark): this assumes too much knowledge of the internals of
# xcodeproj_file; some of these smarts should move into xcodeproj_file
# itself.
xct._properties['buildPhases'].insert(prebuild_index, ssbp)
prebuild_index = prebuild_index + 1
# Extra rule inputs also go into the project file. Concrete outputs were
# already added when they were computed.
groups = ['inputs', 'inputs_excluded']
if skip_excluded_files:
groups = [x for x in groups if not x.endswith('_excluded')]
for group in groups:
for item in rule.get(group, []):
pbxp.AddOrGetFileInRootGroup(item)
# Add "sources".
for source in spec.get('sources', []):
(source_root, source_extension) = posixpath.splitext(source)
if source_extension[1:] not in rules_by_ext:
# AddSourceToTarget will add the file to a root group if it's not
# already there.
AddSourceToTarget(source, type, pbxp, xct)
else:
pbxp.AddOrGetFileInRootGroup(source)
# Add "mac_bundle_resources" and "mac_framework_private_headers" if
# it's a bundle of any type.
if is_bundle:
for resource in tgt_mac_bundle_resources:
(resource_root, resource_extension) = posixpath.splitext(resource)
if resource_extension[1:] not in rules_by_ext:
AddResourceToTarget(resource, pbxp, xct)
else:
pbxp.AddOrGetFileInRootGroup(resource)
for header in spec.get('mac_framework_private_headers', []):
AddHeaderToTarget(header, pbxp, xct, False)
# Add "mac_framework_headers". These can be valid for both frameworks
# and static libraries.
if is_bundle or type == 'static_library':
for header in spec.get('mac_framework_headers', []):
AddHeaderToTarget(header, pbxp, xct, True)
# Add "copies".
pbxcp_dict = {}
for copy_group in spec.get('copies', []):
dest = copy_group['destination']
if dest[0] not in ('/', '$'):
# Relative paths are relative to $(SRCROOT).
dest = '$(SRCROOT)/' + dest
code_sign = int(copy_group.get('xcode_code_sign', 0))
settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign];
# Coalesce multiple "copies" sections in the same target with the same
# "destination" property into the same PBXCopyFilesBuildPhase, otherwise
# they'll wind up with ID collisions.
pbxcp = pbxcp_dict.get(dest, None)
if pbxcp is None:
pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
'name': 'Copy to ' + copy_group['destination']
},
parent=xct)
pbxcp.SetDestination(dest)
# TODO(mark): The usual comment about this knowing too much about
# gyp.xcodeproj_file internals applies.
xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
pbxcp_dict[dest] = pbxcp
for file in copy_group['files']:
pbxcp.AddFile(file, settings)
# Excluded files can also go into the project file.
if not skip_excluded_files:
for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers',
'mac_framework_private_headers']:
excluded_key = key + '_excluded'
for item in spec.get(excluded_key, []):
pbxp.AddOrGetFileInRootGroup(item)
# So can "inputs" and "outputs" sections of "actions" groups.
groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']
if skip_excluded_files:
groups = [x for x in groups if not x.endswith('_excluded')]
for action in spec.get('actions', []):
for group in groups:
for item in action.get(group, []):
# Exclude anything in BUILT_PRODUCTS_DIR. They're products, not
# sources.
if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):
pbxp.AddOrGetFileInRootGroup(item)
for postbuild in spec.get('postbuilds', []):
action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])
script = 'exec ' + action_string_sh + '\nexit 1\n'
# Make the postbuild step depend on the output of ld or ar from this
# target. Apparently putting the script step after the link step isn't
# sufficient to ensure proper ordering in all cases. With an input
# declared but no outputs, the script step should run every time, as
# desired.
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'],
'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',
'shellScript': script,
'showEnvVarsInLog': 0,
})
xct.AppendProperty('buildPhases', ssbp)
# Add dependencies before libraries, because adding a dependency may imply
# adding a library. It's preferable to keep dependencies listed first
# during a link phase so that they can override symbols that would
# otherwise be provided by libraries, which will usually include system
# libraries. On some systems, ld is finicky and even requires the
# libraries to be ordered in such a way that unresolved symbols in
# earlier-listed libraries may only be resolved by later-listed libraries.
# The Mac linker doesn't work that way, but other platforms do, and so
# their linker invocations need to be constructed in this way. There's
# no compelling reason for Xcode's linker invocations to differ.
if 'dependencies' in spec:
for dependency in spec['dependencies']:
xct.AddDependency(xcode_targets[dependency])
# The support project also gets the dependencies (in case they are
# needed for the actions/rules to work).
if support_xct:
support_xct.AddDependency(xcode_targets[dependency])
if 'libraries' in spec:
for library in spec['libraries']:
xct.FrameworksPhase().AddFile(library)
# Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
# I wish Xcode handled this automatically.
library_dir = posixpath.dirname(library)
if library_dir not in xcode_standard_library_dirs and (
not xct.HasBuildSetting(_library_search_paths_var) or
library_dir not in xct.GetBuildSetting(_library_search_paths_var)):
xct.AppendBuildSetting(_library_search_paths_var, library_dir)
for configuration_name in configuration_names:
configuration = spec['configurations'][configuration_name]
xcbc = xct.ConfigurationNamed(configuration_name)
for include_dir in configuration.get('mac_framework_dirs', []):
xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
for include_dir in configuration.get('include_dirs', []):
xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
for library_dir in configuration.get('library_dirs', []):
if library_dir not in xcode_standard_library_dirs and (
not xcbc.HasBuildSetting(_library_search_paths_var) or
library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)):
xcbc.AppendBuildSetting(_library_search_paths_var, library_dir)
if 'defines' in configuration:
for define in configuration['defines']:
set_define = EscapeXcodeDefine(define)
xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
if 'xcode_settings' in configuration:
for xck, xcv in configuration['xcode_settings'].iteritems():
xcbc.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in configuration:
config_ref = pbxp.AddOrGetFileInRootGroup(
configuration['xcode_config_file'])
xcbc.SetBaseConfiguration(config_ref)
build_files = []
for build_file, build_file_dict in data.iteritems():
if build_file.endswith('.gyp'):
build_files.append(build_file)
for build_file in build_files:
xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
for build_file in build_files:
xcode_projects[build_file].Finalize2(xcode_targets,
xcode_target_to_target_dict)
for build_file in build_files:
xcode_projects[build_file].Write()
| mit |
avanov/django | tests/custom_pk/models.py | 99 | 1256 | # -*- coding: utf-8 -*-
"""
Using a custom primary key
By default, Django adds an ``"id"`` field to each model. But you can override
this behavior by explicitly adding ``primary_key=True`` to a field.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from .fields import MyAutoField
@python_2_unicode_compatible
class Employee(models.Model):
employee_code = models.IntegerField(primary_key=True, db_column='code')
first_name = models.CharField(max_length=20)
last_name = models.CharField(max_length=20)
class Meta:
ordering = ('last_name', 'first_name')
def __str__(self):
return "%s %s" % (self.first_name, self.last_name)
@python_2_unicode_compatible
class Business(models.Model):
name = models.CharField(max_length=20, primary_key=True)
employees = models.ManyToManyField(Employee)
class Meta:
verbose_name_plural = 'businesses'
def __str__(self):
return self.name
@python_2_unicode_compatible
class Bar(models.Model):
id = MyAutoField(primary_key=True, db_index=True)
def __str__(self):
return repr(self.pk)
class Foo(models.Model):
bar = models.ForeignKey(Bar)
| bsd-3-clause |
zstackio/zstack-woodpecker | integrationtest/vm/mini/multiclusters/paths/multi_path198.py | 1 | 2957 | import zstackwoodpecker.test_state as ts_header
import os
TestAction = ts_header.TestAction
def path():
return dict(initial_formation="template5", checking_point=1, faild_point=100000, path_list=[
[TestAction.create_mini_vm, 'vm1', 'cluster=cluster2'],
[TestAction.change_vm_ha, 'vm1'],
[TestAction.create_vm_backup, 'vm1', 'vm1-backup1'],
[TestAction.create_mini_vm, 'vm2', 'cluster=cluster1'],
[TestAction.stop_vm, 'vm2'],
[TestAction.start_vm, 'vm2'],
[TestAction.resize_volume, 'vm1', 5*1024*1024],
[TestAction.poweroff_only, 'cluster=cluster2'],
[TestAction.create_volume, 'volume1', 'size=random', 'cluster=cluster2', 'flag=scsi'],
[TestAction.create_volume, 'volume2', 'cluster=cluster2', 'flag=thin,scsi'],
[TestAction.add_image, 'image1', 'root', 'http://172.20.1.28/mirror/diskimages/centos_vdbench.qcow2'],
[TestAction.attach_volume, 'vm1', 'volume1'],
[TestAction.create_volume_backup, 'volume1', 'volume1-backup2'],
[TestAction.change_vm_ha, 'vm1'],
[TestAction.stop_vm, 'vm1'],
[TestAction.use_volume_backup, 'volume1-backup2'],
[TestAction.start_vm, 'vm1'],
[TestAction.change_vm_ha, 'vm1'],
[TestAction.delete_image, 'image1'],
[TestAction.expunge_image, 'image1'],
[TestAction.create_vm_backup, 'vm1', 'vm1-backup3'],
[TestAction.destroy_vm, 'vm1'],
[TestAction.recover_vm, 'vm1'],
[TestAction.poweroff_only, 'cluster=cluster1'],
[TestAction.resize_data_volume, 'volume2', 5*1024*1024],
[TestAction.attach_volume, 'vm1', 'volume2'],
[TestAction.create_volume, 'volume3', 'cluster=cluster1', 'flag=thick,scsi'],
[TestAction.attach_volume, 'vm2', 'volume3'],
[TestAction.start_vm, 'vm2'],
[TestAction.create_volume_backup, 'volume3', 'volume3-backup5'],
[TestAction.stop_vm, 'vm2'],
[TestAction.use_volume_backup, 'volume3-backup5'],
[TestAction.create_mini_vm, 'vm3', 'cluster=cluster1', 'flag=thick'],
[TestAction.delete_volume, 'volume3'],
[TestAction.expunge_volume, 'volume3'],
[TestAction.add_image, 'image2', 'root', os.environ.get('isoForVmUrl')],
[TestAction.create_vm_by_image, 'image2', 'iso', 'vm4', 'cluster=cluster2'],
[TestAction.start_vm, 'vm1'],
[TestAction.create_vm_backup, 'vm1', 'vm1-backup6'],
[TestAction.stop_vm, 'vm1'],
[TestAction.create_volume, 'volume4', 'cluster=cluster1', 'flag=scsi'],
[TestAction.resize_data_volume, 'volume4', 5*1024*1024],
[TestAction.poweroff_only, 'cluster=cluster1'],
[TestAction.use_vm_backup, 'vm1-backup1'],
])
'''
The final status:
Running:['vm4']
Stopped:['vm2', 'vm3', 'vm1']
Enadbled:['vm1-backup1', 'volume1-backup2', 'vm1-backup3', 'volume1-backup3', 'volume3-backup5', 'vm1-backup6', 'volume2-backup6', 'image2']
attached:['volume2']
Detached:['volume1', 'volume4']
Deleted:[]
Expunged:['volume3', 'image1']
Ha:[]
Group:
vm_backup2:['vm1-backup3', 'volume1-backup3']---vm1@volume1
vm_backup3:['vm1-backup6', 'volume2-backup6']---vm1@volume2
vm_backup1:['vm1-backup1']---vm1@
''' | apache-2.0 |
vlegoff/tsunami | src/primaires/scripting/fonctions/champs.py | 1 | 5366 | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant la fonction champs."""
import re
from primaires.scripting.fonction import Fonction
from primaires.scripting.instruction import ErreurExecution
class ClasseFonction(Fonction):
"""Retourne les champs de la structure indiquée."""
@classmethod
def init_types(cls):
cls.ajouter_types(cls.champs, "Structure")
cls.ajouter_types(cls.champs, "Structure", "str")
@staticmethod
def champs(structure, filtres=""):
"""Retourne les champs de la structure indiquée.
Paramètres à préciser :
* structure : la structure dont on veut récupérer les champs.
* filtres (optionnel) : une suite de filtres séparés par un espace.
Cette fonction retourne tous les champs d'une structure. Pour
les structures enregistrées, cela inclut le champ "structure"
(contenant le nom du groupe de la structure) et "id" (contenant
l'ID de la structure). Cette fonction retourne une liste de
couples (clé du champ, valeur du champ) que vous pouvez parcourir
(voir les exemples ci-dessous).
Vous pouvez également préciser une liste de filtres séparés par
un espace. Les filtres sont des expressions régulières (ce peut
être, simplement, le nom du champ). Si le filtre commence par
un tiret (-), les champs correspondants sont supprimés. Là encore,
consultez les exemples pour voir leur utilisation.
Exemples d'utilisation :
# Ces exemples utiliseront la même structure que l'on crée ici
structure = creer_structure()
ecrire structure "auteur" joueur("Kredh")
ecrire structure "creation" temps()
ecrire structure "titre" "sans titre"
ecrire structure "valide" 1
# Retourne et parcourt la liste de champs
pour chaque couple dans champs(structure):
cle = recuperer(couple, 1)
valeur = recuperer(couple, 2)
dire personnage "Le champ $cle vaut $valeur."
fait
# Ce code envoie au personnage
# Le champ auteur vaut Kredh.
# Le champ creation vaut <TempsVariable ...>
# Le champ titre vaut sans titre.
# Le champ valide vaut 1.
# (notez que la fonction peut retourner les champs dans un
# ordre différent.)
# Exemple de filtrage
champs = champs(structure, "titre auteur")
# Ne récupère que les champs 'titre' et 'auteur'
champs = champs(structure, "-creation")
# Récupère tous les champs sauf 'creation'
champs = champs(structure, ".*e")
# Récupère tous les champs dont la clé finit par 'e'.
"""
# On commence par récupérer tous les champs sans filtrer
champs = structure.donnees.copy()
# Constitue une expression régulière pour les filtres indiqués
expression = "^(?!e_existe$"
contraires = []
correspond = []
for filtre in filtres.split(" "):
if not filtre:
continue
if filtre.startswith("-"):
contraires.append(filtre[1:] + "$")
else:
correspond.append(filtre + "$")
if contraires:
expression += "|" + "|".join(contraires)
expression += ")(" + "|".join(correspond) + ")"
print("travaille avec", expression)
for champ, valeur in list(champs.items()):
if not re.search(expression, champ, re.I):
del champs[champ]
# On convertit la liste des champs en liste
liste = []
for champ, valeur in champs.items():
liste.append([champ, valeur])
return liste
| bsd-3-clause |
GunoH/intellij-community | python/testData/inspections/PyTypeCheckerInspection/ParametrizedBuiltinCollectionsAndTheirTypingAliasesAreEquivalent.py | 10 | 2016 | from typing import Dict, FrozenSet, List, Set, Tuple
def expects_builtin_list(xs: list[int]):
expects_typing_List(xs)
expects_typing_List(<warning descr="Expected type 'list[int]', got 'list[str]' instead">['a']</warning>)
def expects_typing_List(xs: List[int]):
expects_builtin_list(xs)
expects_builtin_list(<warning descr="Expected type 'list[int]', got 'list[str]' instead">['a']</warning>)
def expects_builtin_set(xs: set[int]):
expects_typing_Set(xs)
expects_typing_Set(<warning descr="Expected type 'set[int]', got 'set[str]' instead">{'a'}</warning>)
def expects_typing_Set(xs: Set[int]):
expects_builtin_set(xs)
expects_builtin_set(<warning descr="Expected type 'set[int]', got 'set[str]' instead">{'a'}</warning>)
def expects_builtin_frozenset(xs: frozenset[int]):
expects_typing_FrozenSet(xs)
expects_typing_FrozenSet(<warning descr="Expected type 'frozenset[int]', got 'frozenset[str]' instead">frozenset(['a'])</warning>)
def expects_typing_FrozenSet(xs: FrozenSet[int]):
expects_builtin_frozenset(xs)
expects_builtin_frozenset(<warning descr="Expected type 'frozenset[int]', got 'frozenset[str]' instead">frozenset(['a'])</warning>)
def expects_builtin_dict(xs: dict[str, int]):
expects_typing_Dict(xs)
expects_typing_Dict(<warning descr="Expected type 'dict[str, int]', got 'dict[int, str]' instead">{42: 'a'}</warning>)
def expects_typing_Dict(xs: Dict[str, int]):
expects_builtin_dict(xs)
expects_builtin_dict(<warning descr="Expected type 'dict[str, int]', got 'dict[int, str]' instead">{42: 'a'}</warning>)
def expects_builtin_tuple(xs: tuple[str, int]):
expects_typing_Tuple(xs)
expects_typing_Tuple(<warning descr="Expected type 'tuple[str, int]', got 'tuple[int, str]' instead">(42, 'a')</warning>)
def expects_typing_Tuple(xs: Tuple[str, int]):
expects_builtin_tuple(xs)
expects_builtin_tuple(<warning descr="Expected type 'tuple[str, int]', got 'tuple[int, str]' instead">(42, 'a')</warning>)
| apache-2.0 |
madmath/sous-chef | src/member/tests.py | 1 | 69056 | import datetime
import json
from django.test import TestCase, Client
from member.models import Member, Client, User, Address, Referencing
from member.models import Contact, Option, Client_option, Restriction, Route
from member.models import Client_avoid_ingredient, Client_avoid_component
from member.models import ClientScheduledStatus
from member.models import CELL, HOME, EMAIL
from meal.models import Restricted_item, Ingredient, Component
from datetime import date
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse, reverse_lazy
from order.models import Order
from member.factories import(
RouteFactory, ClientFactory, ClientScheduledStatusFactory, MemberFactory
)
from meal.factories import IngredientFactory, ComponentFactory
from django.core.management import call_command
from django.utils.six import StringIO
from order.factories import OrderFactory
from member.forms import(
ClientBasicInformation, ClientAddressInformation,
load_initial_data,
)
class MemberContact(TestCase):
"""
Contact information data should be attached to members.
Three types of data: EMAIL, CELL and HOME.
"""
@classmethod
def setUpTestData(cls):
cls.member = MemberFactory()
def test_home_phone(self):
"""
Test that the factory properly sets a home phone information.
"""
self.assertNotEqual(self.member.home_phone, "")
def test_add_contact_information(self):
"""
Test the add_contact_information method when no contact information
of the given type exists yet.
It is supposed to return TRUE if a new contact information is created.
"""
self.assertTrue(
self.member.add_contact_information(CELL, '438-000-0000')
)
self.assertEquals(self.member.cell_phone, '438-000-0000')
self.assertTrue(
self.member.add_contact_information(EMAIL, '[email protected]')
)
self.assertEquals(self.member.email, '[email protected]')
def test_add_contact_information_empty(self):
"""
Passing an empty value should not update nor create anything, unless
the force_update parameter was passed.
"""
self.member.add_contact_information(CELL, '438-000-0000')
self.assertEquals(self.member.cell_phone, '438-000-0000')
self.assertFalse(
self.member.add_contact_information(CELL, '')
)
self.assertEquals(self.member.cell_phone, '438-000-0000')
self.assertFalse(
self.member.add_contact_information(CELL, '', True)
)
self.assertEquals(self.member.cell_phone, '')
def test_update_contact_information(self):
"""
Test the add_contact_information method when a contact information
of the given type already exists. The contact information should be
updated, instead of creating a new one.
"""
self.assertFalse(
self.member.add_contact_information(HOME, '514-000-0000')
)
self.assertEquals(self.member.home_phone, '514-000-0000')
class MemberTestCase(TestCase):
@classmethod
def setUpTestData(cls):
member = Member.objects.create(
firstname='Katrina', lastname='Heide')
Contact.objects.create(
type='Home phone', value='514-456-7890', member=member)
Contact.objects.create(
type='Cell phone', value='555-555-4444', member=member
)
Contact.objects.create(
type='Work phone', value='555-444-5555', member=member
)
Contact.objects.create(
type='Email', value='[email protected]', member=member
)
def test_str_is_fullname(self):
"""A member must be listed using his/her fullname"""
member = Member.objects.get(firstname='Katrina')
str_member = str(member)
self.assertEqual(str_member, 'Katrina Heide')
def test_home_phone(self):
"""Test that the home phone property is valid"""
member = Member.objects.get(firstname="Katrina")
self.assertEqual(member.home_phone, '514-456-7890')
def test_cell_phone(self):
"""Test that the cell phone property is valid"""
member = Member.objects.get(firstname="Katrina")
self.assertEqual(member.cell_phone, '555-555-4444')
def test_work_phone(self):
"""Test that the work phone property is valid"""
member = Member.objects.get(firstname="Katrina")
self.assertEqual(member.work_phone, '555-444-5555')
def test_email(self):
"""Test that the email property is valid"""
member = Member.objects.get(firstname="Katrina")
self.assertEqual(member.email, "[email protected]")
class ReferencingTestCase(TestCase):
@classmethod
def setUpTestData(cls):
professional_member = Member.objects.create(firstname='Dr. John',
lastname='Taylor')
billing_address = Address.objects.create(
number=123, street='De Bullion',
city='Montreal', postal_code='H3C4G5')
beneficiary_member = Member.objects.create(firstname='Angela',
lastname='Desousa',
address=billing_address)
client = Client.objects.create(
member=beneficiary_member, billing_member=beneficiary_member)
Referencing.objects.create(referent=professional_member, client=client,
date=date(2015, 3, 15))
def test_str_includes_all_names(self):
"""A reference listing shows by which member for which client"""
professional_member = Member.objects.get(firstname='Dr. John')
beneficiary_member = Member.objects.get(firstname='Angela')
reference = Referencing.objects.get(referent=professional_member)
self.assertTrue(professional_member.firstname in str(reference))
self.assertTrue(professional_member.lastname in str(reference))
self.assertTrue(beneficiary_member.firstname in str(reference))
self.assertTrue(beneficiary_member.lastname in str(reference))
class ContactTestCase(TestCase):
@classmethod
def setUpTestData(cls):
member = Member.objects.create(
firstname='Katrina', lastname='Heide')
Contact.objects.create(
type='Home phone', value='514-456-7890', member=member)
def test_str_is_fullname(self):
"""A contact must be listed using his/her fullname"""
member = Member.objects.get(firstname='Katrina')
contact = Contact.objects.get(member=member)
self.assertTrue(member.firstname in str(contact))
self.assertTrue(member.lastname in str(contact))
class AddressTestCase(TestCase):
@classmethod
def setUpTestData(cls):
address = Address.objects.create(
number=123, street='De Bullion',
city='Montreal', postal_code='H3C4G5')
Member.objects.create(
firstname='Katrina', lastname='Heide',
address=address)
def test_str_includes_street(self):
"""An address listing must include the street name"""
member = Member.objects.get(firstname='Katrina')
# address = Address.objects.get(member=member)
self.assertTrue('De Bullion' in str(member.address))
class ClientTestCase(TestCase):
fixtures = ['routes.json']
@classmethod
def setUpTestData(cls):
cls.ss_client = ClientFactory(
birthdate=date(1980, 4, 19),
meal_default_week={
'monday_size': 'L',
'monday_main_dish_quantity': 1
}
)
cls.order = OrderFactory(
client=cls.ss_client
)
def test_str_is_fullname(self):
"""A client must be listed using his/her fullname"""
self.assertTrue(self.ss_client.member.firstname in str(self.ss_client))
self.assertTrue(self.ss_client.member.lastname in str(self.ss_client))
def test_is_geolocalized(self):
self.assertTrue(self.ss_client.is_geolocalized)
def test_age(self):
"""The age on given date is properly computed"""
self.assertEqual(self.ss_client.age, 36)
def test_orders(self):
"""Orders of a given client must be available as a model property"""
self.assertEqual(self.ss_client.orders.count(), 1)
self.assertEqual(
self.ss_client.orders.first().creation_date,
date.today())
def test_meal_default(self):
# monday_size = 'L'
self.assertEqual(self.ss_client.meal_default_week['monday_size'], 'L')
# monday_main_dish_quantity = 1
self.assertEqual(
self.ss_client.meal_default_week['monday_main_dish_quantity'], 1
)
class OptionTestCase(TestCase):
@classmethod
def setUpTestData(cls):
Option.objects.create(
name='PUREE ALL', option_group='preparation')
def test_str_is_fullname(self):
"""Option's string representation is its name"""
name = 'PUREE ALL'
option = Option.objects.get(name=name)
self.assertEqual(name, str(option))
class ClientOptionTestCase(TestCase):
fixtures = ['routes']
@classmethod
def setUpTestData(cls):
cls.clientTest = ClientFactory()
option = Option.objects.create(
name='PUREE ALL', option_group='preparation')
meals_schedule_option = Option.objects.create(
name='meals_schedule', option_group='dish'
)
Client_option.objects.create(client=cls.clientTest, option=option)
Client_option.objects.create(
client=cls.clientTest,
option=meals_schedule_option,
value=json.dumps(['monday', 'wednesday', 'friday']),
)
def test_str_includes_all_names(self):
"""
A Client_option's string representation includes the name
of the client and the name of the option.
"""
name = 'PUREE ALL'
option = Option.objects.get(name=name)
client_option = Client_option.objects.get(
client=self.clientTest, option=option)
self.assertTrue(self.clientTest.member.firstname in str(client_option))
self.assertTrue(self.clientTest.member.lastname in str(client_option))
self.assertTrue(option.name in str(client_option))
def test_meals_schedule_option(self):
"""
Meals schedule must be saved as a client option.
"""
self.assertEqual(
self.clientTest.simple_meals_schedule,
['monday', 'wednesday', 'friday']
)
class RestrictionTestCase(TestCase):
@classmethod
def setUpTestData(cls):
address = Address.objects.create(
number=123, street='De Bullion',
city='Montreal', postal_code='H3C4G5')
member = Member.objects.create(firstname='Angela',
lastname='Desousa',
address=address)
client = Client.objects.create(
member=member, billing_member=member,
birthdate=date(1980, 4, 19))
restricted_item = Restricted_item.objects.create(
name='pork', restricted_item_group='meat')
Restriction.objects.create(client=client,
restricted_item=restricted_item)
def test_str_includes_all_names(self):
"""A restriction's string representation includes the name
of the client and the name of the restricted_item.
"""
member = Member.objects.get(firstname='Angela')
client = Client.objects.get(member=member)
name = 'pork'
restricted_item = Restricted_item.objects.get(name=name)
restriction = Restriction.objects.get(
client=client, restricted_item=restricted_item)
self.assertTrue(client.member.firstname in str(restriction))
self.assertTrue(client.member.lastname in str(restriction))
self.assertTrue(restricted_item.name in str(restriction))
class ClientAvoidIngredientTestCase(TestCase):
@classmethod
def setUpTestData(cls):
address = Address.objects.create(
number=123, street='De Bullion',
city='Montreal', postal_code='H3C4G5')
member = Member.objects.create(firstname='Angela',
lastname='Desousa',
address=address)
client = Client.objects.create(
member=member, billing_member=member,
birthdate=date(1980, 4, 19))
ingredient = Ingredient.objects.create(
name='ground pork')
Client_avoid_ingredient.objects.create(client=client,
ingredient=ingredient)
def test_str_includes_all_names(self):
"""A client_avoid_ingredient's string representation includes the name
of the client and the name of the ingredient.
"""
member = Member.objects.get(firstname='Angela')
client = Client.objects.get(member=member)
name = 'ground pork'
ingredient = Ingredient.objects.get(name=name)
client_avoid_ingredient = Client_avoid_ingredient.objects.get(
client=client, ingredient=ingredient)
self.assertTrue(
client.member.firstname in str(client_avoid_ingredient))
self.assertTrue(client.member.lastname in str(client_avoid_ingredient))
self.assertTrue(ingredient.name in str(client_avoid_ingredient))
class ClientAvoidComponentTestCase(TestCase):
@classmethod
def setUpTestData(cls):
address = Address.objects.create(
number=123, street='De Bullion',
city='Montreal', postal_code='H3C4G5')
member = Member.objects.create(firstname='Angela',
lastname='Desousa',
address=address)
client = Client.objects.create(
member=member, billing_member=member,
birthdate=date(1980, 4, 19))
component = Component.objects.create(
name='ginger pork', component_group='main dish')
Client_avoid_component.objects.create(client=client,
component=component)
def test_str_includes_all_names(self):
"""A client_avoid_component's string representation includes the name
of the client and the name of the component.
"""
member = Member.objects.get(firstname='Angela')
client = Client.objects.get(member=member)
name = 'ginger pork'
component = Component.objects.get(name=name)
client_avoid_component = Client_avoid_component.objects.get(
client=client, component=component)
self.assertTrue(client.member.firstname in str(client_avoid_component))
self.assertTrue(client.member.lastname in str(client_avoid_component))
self.assertTrue(component.name in str(client_avoid_component))
class FormTestCase(TestCase):
fixtures = ['client_options.json']
@classmethod
def setUpTestData(cls):
cls.admin = User.objects.create_superuser(
username='[email protected]',
email='[email protected]',
password='test1234'
)
address = Address.objects.create(
number=123, street='De Bullion',
city='Montreal', postal_code='H3C4G5'
)
Member.objects.create(
firstname='First',
lastname='Member',
address=address
)
Member.objects.create(
firstname='Second',
lastname='Member'
)
cls.route = RouteFactory()
cls.restricted_item_1 = Restricted_item.objects.create(
name='pork', restricted_item_group='meat')
cls.restricted_item_2 = Restricted_item.objects.create(
name='soya', restricted_item_group='other')
cls.food_preparation = Option.objects.create(
name='PUREE ALL', option_group='preparation')
cls.ingredient = IngredientFactory()
cls.component = ComponentFactory()
def setUp(self):
self.client.login(username=self.admin.username, password='test1234')
def tearDown(self):
self.client.logout()
def test_acces_to_form(self):
"""Test if the form is accesible from its url"""
result = self.client.get(
reverse_lazy(
'member:member_step'
), follow=True
)
self.assertEqual(result.status_code, 200)
def test_acces_to_form_by_url_basic_info(self):
result = self.client.get(
reverse_lazy(
'member:member_step',
kwargs={'step': 'basic_information'}
),
follow=True
)
self.assertEqual(result.status_code, 200)
def test_acces_to_form_by_url_adress_information(self):
result = self.client.get(
reverse_lazy(
'member:member_step',
kwargs={'step': 'address_information'}
),
follow=True
)
self.assertEqual(result.status_code, 200)
def test_acces_to_form_by_url_referent_information(self):
result = self.client.get(
reverse_lazy(
'member:member_step',
kwargs={'step': 'referent_information'}
),
follow=True
)
self.assertEqual(result.status_code, 200)
def test_acces_to_form_by_url_payment_information(self):
result = self.client.get(
reverse_lazy(
'member:member_step',
kwargs={'step': 'payment_information'}
),
follow=True
)
self.assertEqual(result.status_code, 200)
def test_acces_to_form_by_url_dietary_restriction(self):
result = self.client.get(
reverse_lazy(
'member:member_step',
kwargs={'step': 'dietary_restriction'}
),
follow=True
)
self.assertEqual(result.status_code, 200)
def test_acces_to_form_by_url_emergency_contact(self):
result = self.client.get(
reverse_lazy(
'member:member_step',
kwargs={'step': 'emergency_contact'}
),
follow=True
)
self.assertEqual(result.status_code, 200)
def test_form_save_data_all_different_members(self):
basic_information_data = {
"client_wizard-current_step": "basic_information",
"basic_information-firstname": "User",
"basic_information-lastname": "Testing",
"basic_information-language": "fr",
"basic_information-gender": "M",
"basic_information-birthdate": "1990-12-12",
"basic_information-home_phone": "555-555-5555",
"basic_information-email": "[email protected]",
"basic_information-cell_phone": "438-000-0000",
"basic_information-alert": "Testing alert message",
"wizard_goto_step": ""
}
address_information_data = {
"client_wizard-current_step": "address_information",
"address_information-street": "555 rue clark",
"address_information-apartment": "222",
"address_information-city": "montreal",
"address_information-postal_code": "H3C2C2",
"address_information-route": self.route.id,
"address_information-latitude": 45.5343077,
"address_information-longitude": -73.620735,
"address_information-distance": 4.062611162244175,
"wizard_goto_step": "",
}
referent_information_data = {
"client_wizard-current_step": "referent_information",
"referent_information-firstname": "Referent",
"referent_information-lastname": "Testing",
"referent_information-work_information": "CLSC",
"referent_information-date": "2012-12-12",
"referent_information-referral_reason": "Testing referral reason",
"wizard_goto_step": "",
}
payment_information_data = {
"client_wizard-current_step": "payment_information",
"payment_information-same_as_client": False,
"payment_information-firstname": "Billing",
"payment_information-lastname": "Testing",
"payment_information-billing_payment_type": "check",
"payment_information-facturation": "default",
"payment_information-street": "111 rue clark",
"payment_information-apartement": "222",
"payment_information-city": "Montreal",
"payment_information-postal_code": "H2C3G4",
"wizard_goto_step": "",
}
restriction_information_data = {
"client_wizard-current_step": "dietary_restriction",
"dietary_restriction-status": "on",
"dietary_restriction-delivery_type": "O",
"dietary_restriction-meals_schedule": ['monday', 'wednesday'],
"dietary_restriction-meal_default": "1",
"dietary_restriction-restrictions":
[self.restricted_item_1.id, self.restricted_item_2.id],
"dietary_restriction-food_preparation": self.food_preparation.id,
"dietary_restriction-ingredient_to_avoid": self.ingredient.id,
"dietary_restriction-dish_to_avoid": self.component.id,
"wizard_goto_step": ""
}
emergency_contact_data = {
"client_wizard-current_step": "emergency_contact",
"emergency_contact-firstname": "Emergency",
"emergency_contact-lastname": "User",
"emergency_contact-contact_type": "Home phone",
"emergency_contact-contact_value": "555-444-5555"
}
stepsdata = [
('basic_information', basic_information_data),
('address_information', address_information_data),
('referent_information', referent_information_data),
('payment_information', payment_information_data),
('dietary_restriction', restriction_information_data),
('emergency_contact', emergency_contact_data)
]
for step, data in stepsdata:
response = self.client.post(
reverse_lazy('member:member_step', kwargs={'step': step}),
data,
follow=True
)
member = Member.objects.get(firstname="User")
self._test_assert_member_info_all_different_members(member)
client = Client.objects.get(member=member)
self._test_assert_client_info_all_different_members(client)
# Test the client view
self._test_client_detail_view_all_different_members(client)
self._test_client_view_preferences(client)
def _test_assert_member_info_all_different_members(self, member):
# test firstname and lastname
self.assertEqual(member.firstname, "User")
self.assertEqual(member.lastname, "Testing")
# test_home_phone_member:
self.assertTrue(member.home_phone.startswith('555'))
self.assertEquals(member.email, '[email protected]')
self.assertEquals(member.cell_phone, '438-000-0000')
# test_client_contact_type:
self.assertEqual(member.member_contact.first().type, "Home phone")
# test_client_address:
self.assertEqual(member.address.street, "555 rue clark")
self.assertEqual(member.address.postal_code, "H3C2C2")
self.assertEqual(member.address.apartment, "222")
self.assertEqual(member.address.city, "montreal")
def _test_assert_client_info_all_different_members(self, client):
# test_client_alert:
self.assertEqual(client.alert, "Testing alert message")
# test_client_languages:
self.assertEqual(client.language, "fr")
# test_client_birthdate:
self.assertEqual(client.birthdate, date(1990, 12, 12))
# test_client_gender:
self.assertEqual(client.gender, "M")
# test client delivery type
self.assertEqual(client.delivery_type, 'O')
# test_referent_name:
self.assertEqual(
client.client_referent.first().referent.firstname,
"Referent"
)
self.assertEqual(
client.client_referent.first().referent.lastname,
"Testing"
)
# test_referent_work_information:
self.assertEqual(
client.client_referent.first().work_information,
"CLSC"
)
# test_referral_date(self):
self.assertEqual(
client.client_referent.first().date,
date(2012, 12, 12)
)
# test_referral_reason:
self.assertEqual(
client.client_referent.first().referral_reason,
"Testing referral reason"
)
# test_billing_name:
self.assertEqual(client.billing_member.firstname, "Billing")
self.assertEqual(client.billing_member.lastname, "Testing")
# test_billing_type:
self.assertEqual(client.billing_payment_type, "check")
# test_billing_address:
self.assertEqual(client.billing_member.address.city, "Montreal")
self.assertEqual(client.billing_member.address.street, "111 rue clark")
self.assertEqual(client.billing_member.address.postal_code, "H2C3G4")
# test_billing_rate_type:
self.assertEqual(client.rate_type, 'default')
# test_emergency_contact_name:
self.assertEqual(client.emergency_contact.firstname, "Emergency")
self.assertEqual(client.emergency_contact.lastname, "User")
# test_emergency_contact_type:
self.assertEqual(
client.emergency_contact.member_contact.first().type,
"Home phone"
)
# test_emergency_contact_value:
self.assertEqual(
client.emergency_contact.member_contact.first().value,
"555-444-5555"
)
# Test meals schedule
self.assertEqual(client.simple_meals_schedule, ['monday', 'wednesday'])
# test_restrictions
restriction_1 = Restriction.objects.get(
client=client, restricted_item=self.restricted_item_1)
restriction_2 = Restriction.objects.get(
client=client, restricted_item=self.restricted_item_2)
self.assertTrue(self.restricted_item_1.name in str(restriction_1))
self.assertTrue(self.restricted_item_2.name in str(restriction_2))
# Test food preparation
food_preparation = Client_option.objects.get(
client=client,
option=self.food_preparation
)
self.assertTrue(self.food_preparation.name in str(food_preparation))
# Test for ingredients to avoid
self.assertTrue(self.ingredient in set(client.ingredients_to_avoid.all())) # noqa
# Test for components to avoid
self.assertTrue(self.component in set(client.components_to_avoid.all())) # noqa
"""
Test that the meals preferences are properly displayed.
"""
def _test_client_view_preferences(self, client):
resp = self.client.get(
reverse_lazy('member:client_allergies', kwargs={'pk': client.id})
)
# self.assertContains(resp, client.get_status_display)
self.assertContains(resp, self.restricted_item_1)
self.assertContains(resp, self.restricted_item_2)
self.assertContains(resp, self.food_preparation)
self.assertContains(resp, self.ingredient.name)
self.assertContains(resp, self.component.name)
def _test_client_detail_view_all_different_members(self, client):
response = self.client.get(
reverse_lazy('member:client_information', kwargs={'pk': client.id})
)
self.assertTrue(b"User" in response.content)
self.assertTrue(b"Testing" in response.content)
self.assertTrue(b"Home phone" in response.content)
self.assertTrue(b"555 rue clark" in response.content)
self.assertTrue(b"H3C2C2" in response.content)
self.assertTrue(b"montreal" in response.content)
self.assertTrue(b"Testing alert message" in response.content)
self.assertTrue(b"555-444-5555" in response.content)
def test_form_save_data_same_members(self):
basic_information_data = {
"client_wizard-current_step": "basic_information",
"basic_information-firstname": "Same",
"basic_information-lastname": "User",
"basic_information-language": "fr",
"basic_information-gender": "M",
"basic_information-birthdate": "1986-06-06",
"basic_information-home_phone": "514-868-8686",
"basic_information-cell_phone": "438-000-0000",
"basic_information-email": "[email protected]",
"basic_information-alert": "Testing alert message",
"wizard_goto_step": ""
}
address_information_data = {
"client_wizard-current_step": "address_information",
"address_information-street": "8686 rue clark",
"address_information-apartment": "86",
"address_information-city": "Montreal",
"address_information-postal_code": "H8C6C8",
"address_information-route": self.route.id,
"address_information-latitude": 45.5343077,
"address_information-longitude": -73.620735,
"address_information-distance": 4.062611162244175,
"wizard_goto_step": "",
}
referent_information_data = {
"client_wizard-current_step": "referent_information",
"referent_information-firstname": "Same",
"referent_information-lastname": "User",
"referent_information-work_information": "CLSC",
"referent_information-date": "2012-06-06",
"referent_information-referral_reason": "Testing referral reason",
"wizard_goto_step": "",
}
payment_information_data = {
"client_wizard-current_step": "payment_information",
"payment_information-same_as_client": True,
"payment_information-billing_payment_type": "check",
"payment_information-facturation": "default",
"address_information-latitude": 0.0,
"address_information-longitude": 0.0,
"address_information-distance": 0.0,
"wizard_goto_step": "",
}
restriction_information_data = {
"client_wizard-current_step": "dietary_restriction",
"dietary_restriction-status": "on",
"dietary_restriction-delivery_type": "O",
"dietary_restriction-meals_schedule": "monday",
"dietary_restriction-meal_default": "1",
"wizard_goto_step": ""
}
emergency_contact_data = {
"client_wizard-current_step": "emergency_contact",
"emergency_contact-firstname": "Same",
"emergency_contact-lastname": "User",
"emergency_contact-contact_type": "Home phone",
"emergency_contact-contact_value": "514-868-8686"
}
stepsdata = [
('basic_information', basic_information_data),
('address_information', address_information_data),
('referent_information', referent_information_data),
('payment_information', payment_information_data),
('dietary_restriction', restriction_information_data),
('emergency_contact', emergency_contact_data)
]
for step, data in stepsdata:
self.client.post(
reverse_lazy('member:member_step', kwargs={'step': step}),
data,
follow=True
)
member = Member.objects.get(firstname="Same")
self._test_assert_member_info_same_members(member)
client = Client.objects.get(member=member)
self._test_assert_client_info_same_members(client)
self._test_client_detail_view_same_members(client)
self._test_client_list_view_same_members()
def _test_assert_member_info_same_members(self, member):
# test firstname and lastname
self.assertEqual(member.firstname, "Same")
self.assertEqual(member.lastname, "User")
# test_home_phone_member:
self.assertTrue(member.home_phone.startswith('514'))
self.assertEquals(member.email, '[email protected]')
self.assertEquals(member.cell_phone, '438-000-0000')
# test_client_contact_type:
self.assertEqual(member.member_contact.first().type, "Home phone")
# test_client_address:
self.assertEqual(member.address.street, "8686 rue clark")
self.assertEqual(member.address.postal_code, "H8C6C8")
self.assertEqual(member.address.apartment, "86")
self.assertEqual(member.address.city, "Montreal")
def _test_assert_client_info_same_members(self, client):
# test_client_alert:
self.assertEqual(client.alert, "Testing alert message")
# test_client_languages:
self.assertEqual(client.language, "fr")
# test_client_birthdate:
self.assertEqual(client.birthdate, date(1986, 6, 6))
# test_client_gender:
self.assertEqual(client.gender, "M")
# test client delivery type
self.assertEqual(client.delivery_type, 'O')
# test referent member is emergency member
self.assertEqual(
client.client_referent.first().referent.id,
client.emergency_contact.id
)
# test_referent_name:
self.assertEqual(
client.client_referent.first().referent.firstname,
"Same"
)
self.assertEqual(
client.client_referent.first().referent.lastname,
"User"
)
# test_referent_work_information:
self.assertEqual(
client.client_referent.first().work_information,
"CLSC"
)
# test_referral_date(self):
self.assertEqual(
client.client_referent.first().date,
date(2012, 6, 6)
)
# test_referral_reason:
self.assertEqual(
client.client_referent.first().referral_reason,
"Testing referral reason"
)
# test client member is billing member
self.assertEqual(client.member.id, client.billing_member.id)
# test_billing_name:
self.assertEqual(client.billing_member.firstname, "Same")
self.assertEqual(client.billing_member.lastname, "User")
# test_billing_type:
self.assertEqual(client.billing_payment_type, "check")
# test_billing_address:
self.assertEqual(client.billing_member.address.city, "Montreal")
self.assertEqual(
client.billing_member.address.street,
"8686 rue clark"
)
self.assertEqual(client.billing_member.address.postal_code, "H8C6C8")
# test_billing_rate_type:
self.assertEqual(client.rate_type, 'default')
# test_emergency_contact_name:
self.assertEqual(client.emergency_contact.firstname, "Same")
self.assertEqual(client.emergency_contact.lastname, "User")
# test_emergency_contact_type:
self.assertEqual(
client.emergency_contact.member_contact.first().type,
"Home phone"
)
# test_emergency_contact_value:
self.assertEqual(
client.emergency_contact.member_contact.first().value,
"514-868-8686"
)
def _test_client_detail_view_same_members(self, client):
response = self.client.get(
reverse_lazy('member:client_information', kwargs={'pk': client.id})
)
self.assertTrue(b"User" in response.content)
self.assertTrue(b"Same" in response.content)
self.assertTrue(b"Home phone" in response.content)
self.assertTrue(b"8686 rue clark" in response.content)
self.assertTrue(b"H8C6C8" in response.content)
self.assertTrue(b"Montreal" in response.content)
self.assertTrue(b"Testing alert message" in response.content)
self.assertTrue(b"514-868-8686" in response.content)
def _test_client_list_view_same_members(self):
response = self.client.get(reverse_lazy('member:list'))
self.assertTrue(b"User" in response.content)
self.assertTrue(b"Same" in response.content)
self.assertTrue(b"30 years old" in response.content)
self.assertTrue(b"Active" in response.content)
self.assertTrue(b"Ongoing" in response.content)
self.assertTrue(b"514-868-8686" in response.content)
def test_form_validate_data(self):
"""Test all the step of the form with and without wrong data"""
self._test_basic_information_with_errors()
self._test_basic_information_without_errors()
self._test_address_information_with_errors()
self._test_address_information_without_errors()
self._test_referent_information_with_errors()
self._test_referent_information_without_errors()
self._test_payment_information_with_errors()
self._test_payment_information_without_errors()
self._test_step_dietary_restriction_with_errors()
self._test_step_dietary_restriction_without_errors()
self._test_step_emergency_contact_with_errors()
self._test_step_emergency_contact_without_errors()
def _test_basic_information_with_errors(self):
# Data for the basic_information step with errors.
basic_information_data_with_error = {
"client_wizard-current_step": "basic_information",
"basic_information-firstname": "User",
"basic_information-lastname": "",
"basic_information-language": "fr",
"basic_information-gender": "M",
"basic_information-birthdate": "",
"basic_information-alert": "",
"wizard_goto_step": ""
}
# Send the data to the form.
error_response = self.client.post(
reverse_lazy(
'member:member_step',
kwargs={'step': "basic_information"}
),
basic_information_data_with_error,
follow=True
)
# The response is the same form with the errors messages.
self.assertTrue(b'Required information' in error_response.content)
self.assertTrue(b'lastname' in error_response.content)
self.assertTrue(b'birthdate' in error_response.content)
self.assertTrue(b'This field is required' in error_response.content)
def _test_basic_information_without_errors(self):
# Data for the basic_information step without errors.
basic_information_data = {
"client_wizard-current_step": "basic_info",
"basic_information-firstname": "User",
"basic_information-lastname": "Testing",
"basic_information-language": "fr",
"basic_information-gender": "M",
"basic_information-birthdate": "1990-12-12",
"basic_information-home_phone": "555-555-5555",
"basic_information-alert": "Testing alert message",
"wizard_goto_step": ""
}
# Send the data to the form.
response = self.client.post(
reverse_lazy(
'member:member_step',
kwargs={'step': "basic_information"}
),
basic_information_data,
follow=True
)
# The response is the next step of the form with no errors messages.
self.assertTrue(b'Required information' not in response.content)
self.assertTrue(b'gender' not in response.content)
self.assertTrue(b'contact_value' not in response.content)
self.assertTrue(b'This field is required' not in response.content)
# HTML from the next step
self.assertTrue(b'street' in response.content)
def _test_address_information_with_errors(self):
# Data for the address_information step with errors.
address_information_data_with_error = {
"client_wizard-current_step": "address_information",
"address_information-street": "",
"address_information-apartment": "",
"address_information-city": "",
"address_information-postal_code": "",
"wizard_goto_step": "",
}
# Send the data to the form.
response_error = self.client.post(
reverse_lazy(
'member:member_step',
kwargs={'step': "address_information"}
),
address_information_data_with_error,
follow=True
)
# The response is the same form with the errors messages.
self.assertTrue(b'Required information' in response_error.content)
self.assertTrue(b'street' in response_error.content)
self.assertTrue(b'apartment' in response_error.content)
self.assertTrue(b'city' in response_error.content)
self.assertTrue(b'This field is required' in response_error.content)
def _test_address_information_without_errors(self):
# Data for the address_information step without errors.
address_information_data = {
"client_wizard-current_step": "address_information",
"address_information-street": "555 rue clark",
"address_information-apartment": "222",
"address_information-city": "montreal",
"address_information-postal_code": "H3C2C2",
"address_information-route": self.route.id,
"address_information-latitude": 45.5343077,
"address_information-longitude": -73.620735,
"address_information-distance": 4.062611162244175,
"wizard_goto_step": "",
}
# Send the data to the form.
response = self.client.post(
reverse_lazy(
'member:member_step',
kwargs={'step': "address_information"}),
address_information_data,
follow=True
)
# The response is the next step of the form with no errors messages.
self.assertTrue(b'Required information' not in response.content)
# self.assertTrue(b'street' not in response.content)
# self.assertTrue(b'apartment' not in response.content)
self.assertTrue(b'This field is required' not in response.content)
# HTML from the next step
self.assertTrue(b'work_information' in response.content)
def _test_referent_information_with_errors(self):
# Data for the address_information step with errors.
referent_information_data_with_error = {
"client_wizard-current_step": "referent_information",
"referent_information-member": "",
"referent_information-firstname": "",
"referent_information-lastname": "",
"referent_information-work_information": "",
"referent_information-date": "",
"referent_information-referral_reason": "",
"wizard_goto_step": "",
}
# Send the data to the form.
response_error = self.client.post(
reverse_lazy(
'member:member_step',
kwargs={'step': "referent_information"}
),
referent_information_data_with_error,
follow=True
)
# Validate that the response is the same form with the errors messages.
self.assertTrue(b'Required information' in response_error.content)
self.assertTrue(b'member' in response_error.content)
self.assertTrue(b'work_information' in response_error.content)
self.assertTrue(b'This field is required' in response_error.content)
referent_information_data_with_error = {
"client_wizard-current_step": "referent_information",
"referent_information-member": "[0] NotValid Member",
"referent_information-firstname": "",
"referent_information-lastname": "",
"referent_information-work_information": "CLSC",
"referent_information-date": "2012-12-12",
"referent_information-referral_reason": "Testing referral reason",
"wizard_goto_step": "",
}
# Send the data to the form.
response_error = self.client.post(
reverse_lazy(
'member:member_step',
kwargs={'step': "referent_information"}
),
referent_information_data_with_error,
follow=True
)
# Validate that the response is the same form with the errors messages.
self.assertTrue(b'Required information' in response_error.content)
self.assertTrue(b'member' in response_error.content)
self.assertTrue(b'work_information' in response_error.content)
self.assertTrue(b'Not a valid member' in response_error.content)
def _test_referent_information_without_errors(self):
pk = Member.objects.get(firstname="First").id
referent_information_data = {
"client_wizard-current_step": "referent_information",
"referent_information-member": "[{}] First Member".format(pk),
"referent_information-firstname": "",
"referent_information-lastname": "",
"referent_information-work_information": "CLSC",
"referent_information-date": "2012-12-12",
"referent_information-referral_reason": "Testing referral reason",
"wizard_goto_step": "",
}
# Send the data to the form.
response = self.client.post(
reverse_lazy(
'member:member_step',
kwargs={'step': "referent_information"}
),
referent_information_data,
follow=True
)
# The response is the next step of the form with no errors messages.
self.assertTrue(b'Required information' not in response.content)
self.assertTrue(b'work_information' not in response.content)
self.assertTrue(b'This field is required' not in response.content)
# HTML from the next step
self.assertTrue(b'billing_payment_type' in response.content)
def _test_payment_information_with_errors(self):
# Data for the address_information step with errors.
pk = Member.objects.get(firstname="Second").id
payment_information_data_with_error = {
"client_wizard-current_step": "payment_information",
"payment_information-member": "[{}] Second Member".format(pk),
"payment_information-firstname": "",
"payment_information-lastname": "",
"payment_information-billing_payment_type": "check",
"payment_information-facturation": "default",
"payment_information-street": "",
"payment_information-apartement": "",
"payment_information-city": "",
"payment_information-postal_code": "",
"wizard_goto_step": "",
}
# Send the data to the form.
response_error = self.client.post(
reverse_lazy(
'member:member_step',
kwargs={'step': "payment_information"}
),
payment_information_data_with_error,
follow=True
)
# Validate that the response is the same form with the errors messages.
self.assertTrue(b'Required information' in response_error.content)
self.assertTrue(b'billing_payment_type' in response_error.content)
self.assertTrue(b'facturation' in response_error.content)
self.assertTrue(
b'member has not a valid address'
in response_error.content
)
# Data for the address_information step with errors.
payment_information_data_with_error = {
"client_wizard-current_step": "payment_information",
"payment_information-member": "",
"payment_information-firstname": "Third",
"payment_information-lastname": "Member",
"payment_information-billing_payment_type": "check",
"payment_information-facturation": "default",
"payment_information-street": "",
"payment_information-apartement": "",
"payment_information-city": "",
"payment_information-postal_code": "",
"wizard_goto_step": "",
}
# Send the data to the form.
response_error = self.client.post(
reverse_lazy(
'member:member_step',
kwargs={'step': "payment_information"}
),
payment_information_data_with_error,
follow=True
)
# Validate that the response is the same form with the errors messages.
self.assertTrue(b'Required information' in response_error.content)
self.assertTrue(b'street' in response_error.content)
self.assertTrue(b'city' in response_error.content)
self.assertTrue(b'postal_code' in response_error.content)
self.assertTrue(b'This field is required' in response_error.content)
def _test_payment_information_without_errors(self):
# Data for the address_information step without errors.
pk = Member.objects.get(firstname="First").id
payment_information_data = {
"client_wizard-current_step": "payment_information",
"payment_information-member": "[{}] First Member".format(pk),
"payment_information-firstname": "",
"payment_information-lastname": "",
"payment_information-billing_payment_type": "check",
"payment_information-facturation": "default",
"payment_information-street": "",
"payment_information-apartement": "",
"payment_information-city": "",
"payment_information-postal_code": "",
"wizard_goto_step": "",
}
# Send the data to the form.
response = self.client.post(
reverse_lazy(
'member:member_step',
kwargs={'step': "payment_information"}
),
payment_information_data,
follow=True
)
# The response is the next step of the form with no errors messages.
self.assertTrue(b'Required information' not in response.content)
self.assertTrue(b'billing_payment_type' not in response.content)
self.assertTrue(b'facturation' not in response.content)
self.assertTrue(
b'member has not a valid address' not in response.content
)
# HTML from the next step
self.assertTrue(b'status' in response.content)
def _test_step_dietary_restriction_with_errors(self):
# Data for the address_information step with errors.
restriction_information_data_with_error = {
"client_wizard-current_step": "dietary_restriction",
"dietary_restriction-status": "",
"dietary_restriction-delivery_type": "",
"dietary_restriction-meals_schedule": "",
"dietary_restriction-meal_default": "",
"wizard_goto_step": ""
}
# Send the data to the form.
response_error = self.client.post(
reverse_lazy(
'member:member_step',
kwargs={'step': "dietary_restriction"}
),
restriction_information_data_with_error,
follow=True
)
# Validate that the response is the same form with the errors messages.
self.assertTrue(b'Required information' in response_error.content)
self.assertTrue(b'status' in response_error.content)
self.assertTrue(b'delivery_type' in response_error.content)
self.assertTrue(b'meals_schedule' in response_error.content)
def _test_step_dietary_restriction_without_errors(self):
# Data for the address_information step without errors.
restriction_information_data = {
"client_wizard-current_step": "dietary_restriction",
"dietary_restriction-status": "on",
"dietary_restriction-delivery_type": "O",
"dietary_restriction-meals_schedule": "monday",
"dietary_restriction-meal_default": "1",
"wizard_goto_step": ""
}
# Send the data to the form.
response = self.client.post(
reverse_lazy(
'member:member_step',
kwargs={'step': "dietary_restriction"}
),
restriction_information_data,
follow=True
)
# The response is the next step of the form with no errors messages.
self.assertTrue(b'Required information' not in response.content)
self.assertTrue(b'status' not in response.content)
self.assertTrue(b'Delivery' not in response.content)
self.assertTrue(b'Food preference' not in response.content)
# HTML from the next step
self.assertTrue(b'contact_type' in response.content)
def _test_step_emergency_contact_with_errors(self):
# Data for the address_information step with errors.
emergency_contact_data_with_error = {
"client_wizard-current_step": "emergency_contact",
"emergency_contact-firstname": "",
"emergency_contact-lastname": "",
"emergency_contact-contact_type": "Home phone",
"emergency_contact-contact_value": ""
}
# Send the data to the form.
response_error = self.client.post(
reverse_lazy(
'member:member_step',
kwargs={'step': "emergency_contact"}
),
emergency_contact_data_with_error,
follow=True
)
# The response is the next step of the form with no errors messages.
self.assertTrue(b'Required information' in response_error.content)
self.assertTrue(b'contact_type' in response_error.content)
self.assertTrue(b'contact_value' in response_error.content)
def _test_step_emergency_contact_without_errors(self):
# Data for the address_information step without errors.
pk = Member.objects.get(firstname="First").id
emergency_contact_data = {
"client_wizard-current_step": "emergency_contact",
"emergency_contact-member": "[{}] First Member".format(pk),
"emergency_contact-firstname": "Emergency",
"emergency_contact-lastname": "User",
"emergency_contact-contact_type": "Home phone",
"emergency_contact-contact_value": "555-444-5555"
}
# Send the data to the form.
response = self.client.post(
reverse_lazy(
'member:member_step',
kwargs={'step': "emergency_contact"}
),
emergency_contact_data,
follow=True
)
# The response is the next step of the form with no errors messages.
self.assertTrue(b'Required information' not in response.content)
self.assertTrue(b'contact_type' not in response.content)
self.assertTrue(b'contact_value' not in response.content)
self.assertTrue(b'Clients' in response.content)
self.assertRedirects(response, reverse('member:list'))
class MemberSearchTestCase(TestCase):
@classmethod
def setUpTestData(cls):
member = Member.objects.create(
firstname='Katrina', lastname='Heide')
Contact.objects.create(
type='Home phone', value='514-456-7890', member=member)
def test_search_member_by_firstname(self):
"""
A member must be find if the search use
at least 3 characters of his first name
"""
result = self.client.get(
reverse_lazy('member:search') + '?name=Heid',
HTTP_X_REQUESTED_WITH='XMLHttpRequest',
follow=True
)
self.assertTrue(b'Katrina Heide' in result.content)
def test_search_member_by_lastname(self):
"""
A member must be find if the search use
at least 3 characters of his last name
"""
result = self.client.get(
reverse_lazy('member:search') + '?name=Katri',
HTTP_X_REQUESTED_WITH='XMLHttpRequest',
follow=True
)
self.assertTrue(b'Katrina Heide' in result.content)
class ClientStatusUpdateAndScheduleCase(TestCase):
fixtures = ['routes.json']
def setUp(self):
self.active_client = ClientFactory(status=Client.ACTIVE)
self.stop_client = ClientFactory(status=Client.STOPNOCONTACT)
def test_scheduled_change_is_valid(self):
"""
A scheduled change is only valid if the client status matches with
the status_from attribute of the schedules change.
"""
scheduled_change = ClientScheduledStatusFactory(
client=self.active_client,
change_date=date.today(),
status_from=Client.ACTIVE,
status_to=Client.PAUSED,
)
self.assertTrue(scheduled_change.is_valid())
def test_scheduled_change_is_invalid(self):
"""
A scheduled change is only valid if the client status matches with
the status_from attribute of the schedules change.
"""
scheduled_change = ClientScheduledStatusFactory(
client=self.stop_client,
change_date=date.today(),
status_from=Client.ACTIVE,
status_to=Client.PAUSED
)
self.assertFalse(scheduled_change.is_valid())
def test_scheduled_change_process_success(self):
scheduled_change = ClientScheduledStatusFactory(
client=self.active_client,
change_date=date.today(),
status_from=Client.ACTIVE,
status_to=Client.PAUSED
)
self.assertTrue(scheduled_change.process())
self.assertEqual(
scheduled_change.operation_status,
ClientScheduledStatus.PROCESSED)
self.assertEqual(self.active_client.status, Client.PAUSED)
self.assertEqual(self.active_client.notes.count(), 1)
self.assertEqual(self.active_client.notes.first().note,
scheduled_change.__str__())
def test_scheduled_change_process_failed(self):
scheduled_change = ClientScheduledStatusFactory(
client=self.stop_client,
change_date=date.today(),
status_from=Client.ACTIVE,
status_to=Client.PAUSED
)
self.assertFalse(scheduled_change.process())
self.assertEqual(
scheduled_change.operation_status,
ClientScheduledStatus.ERROR)
self.assertEqual(self.stop_client.status, Client.STOPNOCONTACT)
def test_command_process_scheduled_status_idle(self):
ClientScheduledStatusFactory.create_batch(
10,
change_date=date.today(),
operation_status=ClientScheduledStatus.PROCESSED)
out = StringIO()
call_command('processscheduledstatuschange', stdout=out)
self.assertNotIn('status updated', out.getvalue())
def test_command_process_scheduled_status(self):
scheduled_change = ClientScheduledStatusFactory(
client=self.active_client,
change_date=date.today(),
status_from=Client.ACTIVE,
status_to=Client.PAUSED
)
out = StringIO()
call_command('processscheduledstatuschange', stdout=out)
self.assertIn('client «{}» status updated from {} to {}'.format(
self.active_client.member,
scheduled_change.get_status_from_display(),
scheduled_change.get_status_to_display()
), out.getvalue())
# Reload
scheduled_change = ClientScheduledStatus.objects.get(
id=scheduled_change.id)
self.assertEqual(
scheduled_change.operation_status,
ClientScheduledStatus.PROCESSED)
def test_view_client_status_update_empty_dates(self):
admin = User.objects.create_superuser(
username='[email protected]',
email='[email protected]',
password='test1234'
)
self.client.login(username=admin.username, password='test1234')
data = {
'client': self.active_client.id,
'status_from': self.active_client.status,
'status_to': Client.PAUSED,
'reason': '',
}
response = self.client.post(
reverse_lazy('member:clientStatusScheduler',
kwargs={'pk': self.active_client.id}),
data,
HTTP_X_REQUESTED_WITH='XMLHttpRequest',
follow=True
)
self.assertTrue(b'This field is required' in response.content)
def test_view_client_status_update_future_date(self):
admin = User.objects.create_superuser(
username='[email protected]',
email='[email protected]',
password='test1234'
)
self.client.login(username=admin.username, password='test1234')
data = {
'client': self.active_client.id,
'status_from': self.active_client.status,
'status_to': Client.PAUSED,
'reason': 'Holidays',
'change_date': '2018-09-23',
'end_date': '2018-10-02',
}
response = self.client.post(
reverse_lazy('member:clientStatusScheduler',
kwargs={'pk': self.active_client.id}),
data,
HTTP_X_REQUESTED_WITH='XMLHttpRequest',
follow=True
)
client = Client.objects.get(pk=self.active_client.id)
scheduled_change_start = ClientScheduledStatus.objects.get(
client=client.id, change_date='2018-09-23')
scheduled_change_end = ClientScheduledStatus.objects.get(
client=client.id, change_date='2018-10-02')
self.assertEqual(scheduled_change_start.operation_status,
ClientScheduledStatus.TOBEPROCESSED)
self.assertEqual(scheduled_change_start.status_from,
self.active_client.status)
self.assertEqual(scheduled_change_start.status_to, Client.PAUSED)
self.assertEqual(scheduled_change_start.reason, 'Holidays')
self.assertEqual(scheduled_change_start.linked_scheduled_status, None)
self.assertEqual(scheduled_change_end.operation_status,
ClientScheduledStatus.TOBEPROCESSED)
self.assertEqual(scheduled_change_end.status_from, Client.PAUSED)
self.assertEqual(scheduled_change_end.status_to,
self.active_client.status)
self.assertEqual(scheduled_change_end.reason, 'Holidays')
self.assertEqual(scheduled_change_end.linked_scheduled_status,
scheduled_change_start)
def test_view_client_status_update_no_end_date(self):
admin = User.objects.create_superuser(
username='[email protected]',
email='[email protected]',
password='test1234'
)
self.client.login(username=admin.username, password='test1234')
data = {
'client': self.active_client.id,
'status_from': self.active_client.status,
'status_to': Client.STOPCONTACT,
'reason': 'Holidays',
'change_date': '2019-09-23',
'end_date': '',
}
response = self.client.post(
reverse_lazy('member:clientStatusScheduler',
kwargs={'pk': self.active_client.id}),
data,
HTTP_X_REQUESTED_WITH='XMLHttpRequest',
follow=True,
)
client = Client.objects.get(pk=self.active_client.id)
scheduled_change = ClientScheduledStatus.objects.get(
client=client.id)
self.assertEqual(scheduled_change.operation_status,
ClientScheduledStatus.TOBEPROCESSED)
self.assertEqual(scheduled_change.status_from,
self.active_client.status)
self.assertEqual(scheduled_change.status_to, Client.STOPCONTACT)
self.assertEqual(scheduled_change.reason, 'Holidays')
self.assertEqual(scheduled_change.linked_scheduled_status, None)
class ClientUpdateBasicInformation(TestCase):
fixtures = ['routes.json']
"""
Login as administrator.
"""
def login_as_admin(self):
admin = User.objects.create_superuser(
username='[email protected]',
email='[email protected]',
password='test1234'
)
self.client.login(username=admin.username, password='test1234')
"""
Test validation form.
"""
def test_form_validation(self):
client = ClientFactory()
form_data = {
'firstname': 'John'
}
form = ClientBasicInformation(data=form_data)
self.assertFalse(form.is_valid())
form = ClientBasicInformation(data=load_initial_data(client))
self.assertTrue(form.is_valid())
"""
Test the update basic information form.
"""
def test_update_basic_information(self):
client = ClientFactory()
# Load initial data related to the client
data = load_initial_data(client)
# Update some data
data['firstname'] = 'John'
data['lastname'] = 'Doe'
data['birthdate'] = '1923-03-21'
# Login as admin
self.login_as_admin()
# Send the data to the form.
response = self.client.post(
reverse_lazy(
'member:member_update_basic_information',
kwargs={'client_id': client.id}
),
data,
follow=True
)
# Reload client data as it should have been changed in the database
client = Client.objects.get(id=client.id)
# Test that values have been updated
self.assertEqual(str(client), 'John Doe')
self.assertEqual(client.birthdate, date(1923, 3, 21))
# Test that old values are still there
self.assertEqual(client.alert, data.get('alert'))
self.assertEqual(client.gender, data.get('gender'))
self.assertEqual(client.language, data.get('language'))
class ClientUpdateAddressInformation(TestCase):
fixtures = ['routes.json']
"""
Login as administrator.
"""
def login_as_admin(self):
admin = User.objects.create_superuser(
username='[email protected]',
email='[email protected]',
password='test1234'
)
self.client.login(username=admin.username, password='test1234')
"""
Test validation form.
"""
def test_form_validation(self):
client = ClientFactory()
form_data = {
'street': '111 rue Roy',
}
form = ClientAddressInformation(data=form_data)
self.assertFalse(form.is_valid())
form = ClientAddressInformation(data=load_initial_data(client))
self.assertTrue(form.is_valid())
"""
Test the update basic information form.
"""
def test_update_address_information(self):
client = ClientFactory()
# Load initial data related to the client
data = load_initial_data(client)
# Update some data
data['street'] = '111 rue Roy Est'
# Login as admin
self.login_as_admin()
# Send the data to the form.
response = self.client.post(
reverse_lazy(
'member:member_update_address_information',
kwargs={'client_id': client.id}
),
data,
follow=True
)
# Reload client data as it should have been changed in the database
client = Client.objects.get(id=client.id)
self.assertEqual(client.member.address.street, '111 rue Roy Est')
self.assertEqual(client.member.address.city, data.get('city'))
self.assertEqual(client.route.id, data.get('route'))
self.assertEqual(client.delivery_note, data.get('delivery_note'))
self.assertEqual(str(client.member.address.latitude),
data.get('latitude'))
self.assertEqual(str(client.member.address.longitude),
data.get('longitude'))
| agpl-3.0 |
diofant/diofant | diofant/tests/series/test_order.py | 1 | 14204 | import pytest
from diofant import (Add, Derivative, Function, I, Integer, Integral, O,
Rational, Symbol, conjugate, cos, digamma, exp, expand,
ln, log, nan, oo, pi, sin, sqrt, symbols, transpose)
from diofant.abc import w, x, y, z
__all__ = ()
def test_caching_bug():
# needs to be a first test, so that all caches are clean
# cache it
O(w)
# and test that this won't raise an exception
O(w**(-1/x/log(3)*log(5)), w)
def test_free_symbols():
assert O(1).free_symbols == set()
assert O(x).free_symbols == {x}
assert O(1, x).free_symbols == {x}
assert O(x*y).free_symbols == {x, y}
assert O(x, x, y).free_symbols == {x, y}
def test_simple_1():
o = Integer(0)
assert O(2*x) == O(x)
assert O(x)*3 == O(x)
assert -28*O(x) == O(x)
assert O(O(x)) == O(x)
assert O(O(x), y) == O(O(x), x, y)
assert O(-23) == O(1)
assert O(exp(x)) == O(1, x)
assert O(exp(1/x)).expr == exp(1/x)
assert O(x*exp(1/x)).expr == x*exp(1/x)
assert O(x**(o/3)).expr == x**(o/3)
assert O(x**(5*o/3)).expr == x**(5*o/3)
assert O(x**2 + x + y, x) == O(1, x)
assert O(x**2 + x + y, y) == O(1, y)
pytest.raises(ValueError, lambda: O(exp(x), x, x))
pytest.raises(TypeError, lambda: O(x, 2 - x))
pytest.raises(ValueError, lambda: O(x, (x, x**2)))
assert O(x**2).is_commutative
# issue sympy/sympy#9192
assert O(1)*O(1) == O(1)
assert O(1)**O(1) == O(1)
def test_simple_2():
assert O(2*x)*x == O(x**2)
assert O(2*x)/x == O(1, x)
assert O(2*x)*x*exp(1/x) == O(x**2*exp(1/x))
assert (O(2*x)*x*exp(1/x)/ln(x)**3).expr == x**2*exp(1/x)*ln(x)**-3
def test_simple_3():
assert O(x) + x == O(x)
assert O(x) + 2 == 2 + O(x)
assert O(x) + x**2 == O(x)
assert O(x) + 1/x == 1/x + O(x)
assert O(1/x) + 1/x**2 == 1/x**2 + O(1/x)
assert O(x) + exp(1/x) == O(x) + exp(1/x)
def test_simple_4():
assert O(x)**2 == O(x**2)
def test_simple_5():
assert O(x) + O(x**2) == O(x)
assert O(x) + O(x**-2) == O(x**-2)
assert O(x) + O(1/x) == O(1/x)
def test_simple_6():
assert O(x) - O(x) == O(x)
assert O(x) + O(1) == O(1)
assert O(x) + O(x**2) == O(x)
assert O(1/x) + O(1) == O(1/x)
assert O(x) + O(exp(1/x)) == O(exp(1/x))
assert O(x**3) + O(exp(2/x)) == O(exp(2/x))
assert O(x**-3) + O(exp(2/x)) == O(exp(2/x))
def test_simple_7():
assert 1 + O(1) == O(1)
assert 2 + O(1) == O(1)
assert x + O(1) == O(1)
assert 1/x + O(1) == 1/x + O(1)
def test_simple_8():
assert O(sqrt(-x)) == O(sqrt(x))
assert O(x**2*sqrt(x)) == O(x**Rational(5, 2))
assert O(x**3*sqrt(-(-x)**3)) == O(x**Rational(9, 2))
assert O(x**Rational(3, 2)*sqrt((-x)**3)) == O(x**3)
assert O(x*(-2*x)**(I/2)) == O(x*(-x)**(I/2))
assert O(sqrt((-x)**I)) == O(sqrt((-x)**I), evaluate=False)
assert O(sqrt(-x**I)) == O(sqrt(-x**I), evaluate=False)
def test_as_expr_variables():
assert O(x).as_expr_variables(None) == (x, ((x, 0),))
assert O(x).as_expr_variables((((x, 0),))) == (x, ((x, 0),))
assert O(y).as_expr_variables(((x, 0),)) == (y, ((x, 0), (y, 0)))
assert O(y).as_expr_variables(((x, 0), (y, 0))) == (y, ((x, 0), (y, 0)))
def test_contains():
assert O(1, x).contains(O(1, x))
assert O(1, x).contains(O(1))
assert O(1).contains(O(1, x)) is False
assert O(x).contains(O(x))
assert O(x).contains(O(x**2))
assert not O(x**2).contains(O(x))
assert not O(x).contains(O(1/x))
assert not O(1/x).contains(O(exp(1/x)))
assert not O(x).contains(O(exp(1/x)))
assert O(1/x).contains(O(x))
assert O(exp(1/x)).contains(O(x))
assert O(exp(1/x)).contains(O(1/x))
assert O(exp(1/x)).contains(O(exp(1/x)))
assert O(exp(2/x)).contains(O(exp(1/x)))
assert not O(exp(1/x)).contains(O(exp(2/x)))
assert O(x).contains(O(y)) is None
assert O(x).contains(O(y*x))
assert O(y*x).contains(O(x))
assert O(y).contains(O(x*y))
assert O(x).contains(O(y**2*x))
assert O(x*y**2).contains(O(x**2*y)) is None
assert O(x**2*y).contains(O(x*y**2)) is None
assert O(sin(1/x**2)).contains(O(cos(1/x**2))) is None
assert O(cos(1/x**2)).contains(O(sin(1/x**2))) is None
q = Symbol('q', positive=True)
assert O(x**8).contains(x**(q + 7)) is None
assert O(x**8).contains(x**(q + 8))
assert O(1, x) not in O(1)
assert O(1) in O(1, x)
pytest.raises(TypeError, lambda: O(x*y**2) in O(x**2*y))
def test_add_1():
assert O(x + x) == O(x)
assert O(3*x - 2*x**2) == O(x)
assert O(1 + x) == O(1, x)
assert O(1 + 1/x) == O(1/x)
assert O(ln(x) + 1/ln(x)) == O(ln(x))
assert O(exp(1/x) + x) == O(exp(1/x))
assert O(exp(1/x) + 1/x**20) == O(exp(1/x))
def test_ln_args():
assert O(log(x)) + O(log(2*x)) == O(log(x))
assert O(log(x)) + O(log(x**3)) == O(log(x))
assert O(log(x*y)) + O(log(x) + log(y)) == O(log(x*y))
def test_multivar_0():
assert O(x*y).expr == x*y
assert O(x*y**2).expr == x*y**2
assert O(x*y, x).expr == x
assert O(x*y**2, y).expr == y**2
assert O(x*y*z).expr == x*y*z
assert O(x/y).expr == x/y
assert O(x*exp(1/y)).expr == x*exp(1/y)
assert O(exp(x)*exp(1/y)).expr == exp(1/y)
def test_multivar_0a():
assert O(exp(1/x)*exp(1/y)).expr == exp(1/x + 1/y)
def test_multivar_1():
assert O(x + y).expr == x + y
assert O(x + 2*y).expr == x + y
assert (O(x + y) + x).expr == (x + y)
assert (O(x + y) + x**2) == O(x + y)
assert (O(x + y) + 1/x) == 1/x + O(x + y)
assert O(x**2 + y*x).expr == x**2 + y*x
def test_multivar_2():
assert O(x**2*y + y**2*x, x, y).expr == x**2*y + y**2*x
def test_multivar_mul_1():
assert O(x + y)*x == O(x**2 + y*x, x, y)
def test_multivar_3():
assert (O(x) + O(y)).args in [
(O(x), O(y)),
(O(y), O(x))]
assert O(x) + O(y) + O(x + y) == O(x + y)
assert (O(x**2*y) + O(y**2*x)).args in [
(O(x*y**2), O(y*x**2)),
(O(y*x**2), O(x*y**2))]
assert (O(x**2*y) + O(y*x)) == O(x*y)
def test_sympyissue_3468():
y = Symbol('y', negative=True)
z = Symbol('z', complex=True)
# check that Order does not modify assumptions about symbols
O(x)
O(y)
O(z)
assert x.is_positive is None
assert y.is_positive is False
assert z.is_positive is None
def test_leading_order():
assert (x + 1 + 1/x**5).extract_leading_order(x) == ((1/x**5, O(1/x**5)),)
assert (1 + 1/x).extract_leading_order(x) == ((1/x, O(1/x)),)
assert (1 + x).extract_leading_order(x) == ((1, O(1, x)),)
assert (1 + x**2).extract_leading_order(x) == ((1, O(1, x)),)
assert (2 + x**2).extract_leading_order(x) == ((2, O(1, x)),)
assert (x + x**2).extract_leading_order(x) == ((x, O(x)),)
def test_leading_order2():
assert set((2 + pi + x**2).extract_leading_order(x)) == {(pi, O(1, x)),
(2, O(1, x))}
assert set((2*x + pi*x + x**2).extract_leading_order(x)) == {(2*x, O(x)),
(x*pi, O(x))}
def test_order_leadterm():
assert O(x**2)._eval_as_leading_term(x) == O(x**2)
def test_order_symbols():
e = x*y*sin(x)*Integral(x, (x, 1, 2))
assert O(e) == O(x**2*y)
assert O(e, x) == O(x**2)
def test_nan():
assert O(nan) == nan
assert not O(x).contains(nan)
def test_O1():
assert O(1, x) * x == O(x)
assert O(1, y) * x == O(1, y)
def test_getn():
# other lines are tested incidentally by the suite
assert O(x).getn() == 1
assert O(x/log(x)).getn() == 1
assert O(x**2/log(x)**2).getn() == 2
assert O(x*log(x)).getn() == 1
pytest.raises(NotImplementedError, lambda: (O(x) + O(y)).getn())
pytest.raises(NotImplementedError, lambda: O(x**y*log(x)**z, (x, 0)).getn())
pytest.raises(NotImplementedError, lambda: O(x**pi*log(x), (x, 0)).getn())
f = Function('f')
pytest.raises(NotImplementedError, lambda: O(f(x)).getn())
def test_diff():
assert O(1).diff(x) == 0
assert O(1, x).diff(x) == Derivative(O(1, x), x)
assert O(x**2).diff(x) == Derivative(O(x**2), x)
def test_getO():
assert x.getO() is None
assert x.removeO() == x
assert O(x).getO() == O(x)
assert O(x).removeO() == 0
assert (z + O(x) + O(y)).getO() == O(x) + O(y)
assert (z + O(x) + O(y)).removeO() == z
pytest.raises(NotImplementedError, lambda: (O(x) + O(y)).getn())
def test_leading_term():
assert O(1/digamma(1/x)) == O(1/log(x))
def test_eval():
assert O(x).subs({O(x): 1}) == 1
assert O(x).subs({x: y}) == O(y)
assert O(x).subs({y: x}) == O(x)
assert O(x).subs({x: x + y}) == O(x + y, (x, -y))
assert (O(1)**x).is_Pow
def test_sympyissue_4279():
a, b = symbols('a b')
assert O(a, a, b) + O(1, a, b) == O(1, a, b)
assert O(b, a, b) + O(1, a, b) == O(1, a, b)
assert O(a + b) + O(1, a, b) == O(1, a, b)
assert O(1, a, b) + O(a, a, b) == O(1, a, b)
assert O(1, a, b) + O(b, a, b) == O(1, a, b)
assert O(1, a, b) + O(a + b) == O(1, a, b)
def test_sympyissue_4855():
assert 1/O(1) != O(1)
assert 1/O(x) != O(1/x)
assert 1/O(x, (x, oo)) != O(1/x, (x, oo))
f = Function('f')
assert 1/O(f(x)) != O(1/x)
def test_order_conjugate_transpose():
x = Symbol('x', extended_real=True)
y = Symbol('y', imaginary=True)
assert conjugate(O(x)) == O(conjugate(x))
assert conjugate(O(y)) == O(conjugate(y))
assert conjugate(O(x**2)) == O(conjugate(x)**2)
assert conjugate(O(y**2)) == O(conjugate(y)**2)
assert conjugate(O(z)) == conjugate(O(z), evaluate=False)
assert transpose(O(x)) == O(transpose(x))
assert transpose(O(y)) == O(transpose(y))
assert transpose(O(x**2)) == O(transpose(x)**2)
assert transpose(O(y**2)) == O(transpose(y)**2)
assert transpose(O(z)) == transpose(O(z), evaluate=False)
def test_order_noncommutative():
A = Symbol('A', commutative=False)
assert O(A + A*x, x) == O(1, x)
assert (A + A*x)*O(x) == O(x)
assert (A*x)*O(x) == O(x**2, x)
assert expand((1 + O(x))*A*A*x) == A*A*x + O(x**2, x)
assert expand((A*A + O(x))*x) == A*A*x + O(x**2, x)
assert expand((A + O(x))*A*x) == A*A*x + O(x**2, x)
def test_sympyissue_6753():
assert (1 + x**2)**10000*O(x) == O(x)
def test_sympyissue_7872():
assert O(x**3).subs({x: exp(-x**2)}) in [O(exp(-3*x**2), (x, oo)),
O(exp(-3*x**2), (x, -oo))]
def test_order_at_infinity():
assert O(1 + x, (x, oo)) == O(x, (x, oo))
assert O(3*x, (x, oo)) == O(x, (x, oo))
assert O(x, (x, oo))*3 == O(x, (x, oo))
assert -28*O(x, (x, oo)) == O(x, (x, oo))
assert O(O(x, (x, oo)), (x, oo)) == O(x, (x, oo))
assert O(O(x, (x, oo)), (y, oo)) == O(x, (x, oo), (y, oo))
assert O(3, (x, oo)) == O(1, (x, oo))
assert O(x**2 + x + y, (x, oo)) == O(x**2, (x, oo))
assert O(x**2 + x + y, (y, oo)) == O(y, (y, oo))
assert O(2*x, (x, oo))*x == O(x**2, (x, oo))
assert O(2*x, (x, oo))/x == O(1, (x, oo))
assert O(2*x, (x, oo))*x*exp(1/x) == O(x**2*exp(1/x), (x, oo))
assert O(2*x, (x, oo))*x*exp(1/x)/ln(x)**3 == O(x**2*exp(1/x)*ln(x)**-3, (x, oo))
assert O(x, (x, oo)) + 1/x == 1/x + O(x, (x, oo)) == O(x, (x, oo))
assert O(x, (x, oo)) + 1 == 1 + O(x, (x, oo)) == O(x, (x, oo))
assert O(x, (x, oo)) + x == x + O(x, (x, oo)) == O(x, (x, oo))
assert O(x, (x, oo)) + x**2 == x**2 + O(x, (x, oo))
assert O(1/x, (x, oo)) + 1/x**2 == 1/x**2 + O(1/x, (x, oo)) == O(1/x, (x, oo))
assert O(x, (x, oo)) + exp(1/x) == exp(1/x) + O(x, (x, oo))
assert O(x, (x, oo))**2 == O(x**2, (x, oo))
assert O(x, (x, oo)) + O(x**2, (x, oo)) == O(x**2, (x, oo))
assert O(x, (x, oo)) + O(x**-2, (x, oo)) == O(x, (x, oo))
assert O(x, (x, oo)) + O(1/x, (x, oo)) == O(x, (x, oo))
assert O(x, (x, oo)) - O(x, (x, oo)) == O(x, (x, oo))
assert O(x, (x, oo)) + O(1, (x, oo)) == O(x, (x, oo))
assert O(x, (x, oo)) + O(x**2, (x, oo)) == O(x**2, (x, oo))
assert O(1/x, (x, oo)) + O(1, (x, oo)) == O(1, (x, oo))
assert O(x, (x, oo)) + O(exp(1/x), (x, oo)) == O(x, (x, oo))
assert O(x**3, (x, oo)) + O(exp(2/x), (x, oo)) == O(x**3, (x, oo))
assert O(x**-3, (x, oo)) + O(exp(2/x), (x, oo)) == O(exp(2/x), (x, oo))
# issue sympy/sympy#7207
assert O(exp(x), (x, oo)).expr == O(2*exp(x), (x, oo)).expr == exp(x)
assert O(y**x, (x, oo)).expr == O(2*y**x, (x, oo)).expr == y**x
# issue sympy/sympy#9917
assert O(x*sin(x) + 1, (x, oo)) != O(x*sin(x), (x, oo))
def test_mixing_order_at_zero_and_infinity():
assert (O(x, (x, 0)) + O(x, (x, oo))).is_Add
assert O(x, (x, 0)) + O(x, (x, oo)) == O(x, (x, oo)) + O(x, (x, 0))
assert O(O(x, (x, oo))) == O(x, (x, oo))
# not supported (yet)
pytest.raises(NotImplementedError, lambda: O(x, (x, 0))*O(x, (x, oo)))
pytest.raises(NotImplementedError, lambda: O(x, (x, oo))*O(x, (x, 0)))
pytest.raises(NotImplementedError, lambda: O(O(x, (x, oo)), y))
pytest.raises(NotImplementedError, lambda: O(O(x), (x, oo)))
def test_order_at_some_point():
assert O(x, (x, 1)) == O(1, (x, 1))
assert O(2*x - 2, (x, 1)) == O(x - 1, (x, 1))
assert O(-x + 1, (x, 1)) == O(x - 1, (x, 1))
assert O(x - 1, (x, 1))**2 == O((x - 1)**2, (x, 1))
assert O(x - 2, (x, 2)) - O(x - 2, (x, 2)) == O(x - 2, (x, 2))
def test_order_subs_limits():
# issue sympy/sympy#3333
assert (1 + O(x)).subs({x: 1/x}) == 1 + O(1/x, (x, oo))
assert (1 + O(x)).limit(x, 0) == 1
# issue sympy/sympy#5769
assert ((x + O(x**2))/x).limit(x, 0) == 1
assert O(x**2).subs({x: y - 1}) == O((y - 1)**2, (y, 1))
assert O(10*x**2, (x, 2)).subs({x: y - 1}) == O(1, (y, 3))
assert O(x).subs({x: y*z}) == O(y*z, y, z)
def test_sympyissue_9351():
assert exp(x).series(x, 10, 1) == exp(10) + O(x - 10, (x, 10))
def test_sympyissue_7599():
n = Symbol('n', integer=True)
assert O(x**n, x) + O(x**2) == Add(O(x**2), O(x**n, x), evaluate=False)
def test_sympyissue_15539():
assert O(x**-6, (x, -oo)) == O(x**(-6), (x, -oo), evaluate=False)
| bsd-3-clause |
wengole/eveonline-assistant | eveonline-assistant/config/urls.py | 1 | 1127 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$',
TemplateView.as_view(template_name='pages/home.html'),
name="home"),
url(r'^about/$',
TemplateView.as_view(template_name='pages/about.html'),
name="about"),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
# User management
url(r'^users/', include("users.urls", namespace="users")),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls go here
url(r'^skills/', include('skills.urls', namespace='skills')),
url(r'^characters/', include('characters.urls', namespace='characters')),
url(r'^plans/', include('plans.urls', namespace='plans'))
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| bsd-3-clause |
kawamon/hue | desktop/core/ext-py/SQLAlchemy-1.3.17/lib/sqlalchemy/dialects/firebird/fdb.py | 4 | 4079 | # firebird/fdb.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: firebird+fdb
:name: fdb
:dbapi: pyodbc
:connectstring: firebird+fdb://user:password@host:port/path/to/db[?key=value&key=value...]
:url: http://pypi.python.org/pypi/fdb/
fdb is a kinterbasdb compatible DBAPI for Firebird.
.. versionchanged:: 0.9 - The fdb dialect is now the default dialect
under the ``firebird://`` URL space, as ``fdb`` is now the official
Python driver for Firebird.
Arguments
----------
The ``fdb`` dialect is based on the
:mod:`sqlalchemy.dialects.firebird.kinterbasdb` dialect, however does not
accept every argument that Kinterbasdb does.
* ``enable_rowcount`` - True by default, setting this to False disables
the usage of "cursor.rowcount" with the
Kinterbasdb dialect, which SQLAlchemy ordinarily calls upon automatically
after any UPDATE or DELETE statement. When disabled, SQLAlchemy's
ResultProxy will return -1 for result.rowcount. The rationale here is
that Kinterbasdb requires a second round trip to the database when
.rowcount is called - since SQLA's resultproxy automatically closes
the cursor after a non-result-returning statement, rowcount must be
called, if at all, before the result object is returned. Additionally,
cursor.rowcount may not return correct results with older versions
of Firebird, and setting this flag to False will also cause the
SQLAlchemy ORM to ignore its usage. The behavior can also be controlled on a
per-execution basis using the ``enable_rowcount`` option with
:meth:`_engine.Connection.execution_options`::
conn = engine.connect().execution_options(enable_rowcount=True)
r = conn.execute(stmt)
print(r.rowcount)
* ``retaining`` - False by default. Setting this to True will pass the
``retaining=True`` keyword argument to the ``.commit()`` and ``.rollback()``
methods of the DBAPI connection, which can improve performance in some
situations, but apparently with significant caveats.
Please read the fdb and/or kinterbasdb DBAPI documentation in order to
understand the implications of this flag.
.. versionchanged:: 0.9.0 - the ``retaining`` flag defaults to ``False``.
In 0.8 it defaulted to ``True``.
.. seealso::
http://pythonhosted.org/fdb/usage-guide.html#retaining-transactions
- information on the "retaining" flag.
""" # noqa
from .kinterbasdb import FBDialect_kinterbasdb
from ... import util
class FBDialect_fdb(FBDialect_kinterbasdb):
def __init__(self, enable_rowcount=True, retaining=False, **kwargs):
super(FBDialect_fdb, self).__init__(
enable_rowcount=enable_rowcount, retaining=retaining, **kwargs
)
@classmethod
def dbapi(cls):
return __import__("fdb")
def create_connect_args(self, url):
opts = url.translate_connect_args(username="user")
if opts.get("port"):
opts["host"] = "%s/%s" % (opts["host"], opts["port"])
del opts["port"]
opts.update(url.query)
util.coerce_kw_type(opts, "type_conv", int)
return ([], opts)
def _get_server_version_info(self, connection):
"""Get the version of the Firebird server used by a connection.
Returns a tuple of (`major`, `minor`, `build`), three integers
representing the version of the attached server.
"""
# This is the simpler approach (the other uses the services api),
# that for backward compatibility reasons returns a string like
# LI-V6.3.3.12981 Firebird 2.0
# where the first version is a fake one resembling the old
# Interbase signature.
isc_info_firebird_version = 103
fbconn = connection.connection
version = fbconn.db_info(isc_info_firebird_version)
return self._parse_version_info(version)
dialect = FBDialect_fdb
| apache-2.0 |
kalxas/QGIS | tests/src/python/test_qgsserver_accesscontrol_wfs.py | 12 | 25583 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServer.
From build dir, run: ctest -R PyQgsServerAccessControlWFS -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Stephane Brunner'
__date__ = '28/08/2015'
__copyright__ = 'Copyright 2015, The QGIS Project'
from qgis.testing import unittest
import urllib.request
import urllib.parse
import urllib.error
from test_qgsserver_accesscontrol import TestQgsServerAccessControl, XML_NS
class TestQgsServerAccessControlWFS(TestQgsServerAccessControl):
def test_wfs_getcapabilities(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetCapabilities"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<Name>Hello</Name>") != -1,
"No Hello layer in WFS/GetCapabilities\n%s" % response)
self.assertTrue(
str(response).find("<Name>Hello_OnOff</Name>") != -1,
"No Hello layer in WFS/GetCapabilities\n%s" % response)
self.assertTrue(
str(response).find("<Name>Country</Name>") != -1,
"No Country layer in WFS/GetCapabilities\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<Name>Hello</Name>") != -1,
"No Hello layer in WFS/GetCapabilities\n%s" % response)
self.assertFalse(
str(response).find("<Name>Country</Name>") != -1,
"Unexpected Country layer in WFS/GetCapabilities\n%s" % response)
def test_wfs_describefeaturetype_hello(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "DescribeFeatureType",
"TYPENAME": "Hello"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find('name="Hello"') != -1,
"No Hello layer in DescribeFeatureType\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find('name="Hello"') != -1,
"No Hello layer in DescribeFeatureType\n%s" % response)
def test_wfs_describefeaturetype_country(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "DescribeFeatureType",
"TYPENAME": "Country"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find('name="Country"') != -1,
"No Country layer in DescribeFeatureType\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find('name="Country"') != -1,
"Unexpected Country layer in DescribeFeatureType\n%s" % response)
def test_wfs_getfeature_hello(self):
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:color>red</qgs:color>") != -1, # spellok
"No color in result of GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:color>red</qgs:color>") != -1, # spellok
"Unexpected color in result of GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:color>NULL</qgs:color>") != -1, # spellok
"Unexpected color NULL in result of GetFeature\n%s" % response)
def test_wfs_getfeature_hello2(self):
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>2</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>2</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertFalse(
str(response).find("<qgs:pk>2</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
def test_wfs_getfeature_filter(self):
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello_Filter" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>6</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertFalse(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>6</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
def test_wfs_getfeature_filter2(self):
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello_Filter" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>6</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>6</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>7</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertTrue(
str(response).find("<qgs:pk>6</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>7</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
def test_wfs_getfeature_country(self):
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello_OnOff" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertFalse(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response) # spellok
# # Subset String # #
def test_wfs_getfeature_subsetstring(self):
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello_SubsetString" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No good result in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No good result in GetFeature\n%s" % response)
def test_wfs_getfeature_subsetstring2(self):
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello_SubsetString" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>2</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>2</qgs:pk>") != -1,
"No good result in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertFalse(
str(response).find("<qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
def test_wfs_getfeature_project_subsetstring(self):
"""Tests access control with a subset string already applied to a layer in a project
'Hello_Project_SubsetString' layer has a subsetString of "pkuid in (7,8)"
This test checks for retrieving a feature which should be available in with/without access control
"""
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello_Project_SubsetString" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>7</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
# should be one result
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>7</qgs:pk>") != -1,
"Feature with pkuid=7 not found in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>7</qgs:pk>") != -1,
"Feature with pkuid=7 not found in GetFeature, has been incorrectly filtered out by access controls\n%s" % response)
def test_wfs_getfeature_project_subsetstring2(self):
"""Tests access control with a subset string already applied to a layer in a project
'Hello_Project_SubsetString' layer has a subsetString of "pkuid in (7,8)"
This test checks for a feature which should be filtered out by access controls
"""
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello_Project_SubsetString" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>8</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
# should be one result
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>8</qgs:pk>") != -1,
"Feature with pkuid=8 not found in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertFalse(
str(response).find("<qgs:pk>") != -1,
"Feature with pkuid=8 was found in GetFeature, but should have been filtered out by access controls\n%s" % response)
def test_wfs_getfeature_project_subsetstring3(self):
"""Tests access control with a subset string already applied to a layer in a project
'Hello_Project_SubsetString' layer has a subsetString of "pkuid in (7,8)"
This test checks for a features which should be filtered out by project subsetStrings.
For example, pkuid 6 passes the access control checks, but should not be shown because of project layer subsetString
"""
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello_Project_SubsetString" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>6</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
# should be no results, since pkuid 1 should be filtered out by project subsetString
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>") == -1,
"Project based layer subsetString not respected in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertFalse(
str(response).find("<qgs:pk>") != -1,
"Project based layer subsetString not respected in GetFeature with restricted access\n%s" % response)
def test_wfs_getfeature_exp_filter_hello(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetFeature",
"TYPENAME": "Hello",
"EXP_FILTER": "pkuid = 1"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:color>red</qgs:color>") != -1, # spellok
"No color in result of GetFeature\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:color>red</qgs:color>") != -1, # spellok
"Unexpected color in result of GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:color>NULL</qgs:color>") != -1, # spellok
"Unexpected color NULL in result of GetFeature\n%s" % response)
def test_wfs_getfeature_exp_filter_hello2(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetFeature",
"TYPENAME": "Hello",
"EXP_FILTER": "pkuid = 2"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>2</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<qgs:pk>2</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
def test_wfs_getfeature_exp_filter_hello_filter(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetFeature",
"TYPENAME": "Hello_Filter",
"EXP_FILTER": "pkuid = 1"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>6</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>6</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
def test_wfs_getfeature_exp_filter_hello_filter2(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetFeature",
"TYPENAME": "Hello_Filter",
"EXP_FILTER": "pkuid = 6"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>6</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>7</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<qgs:pk>6</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>7</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
def test_wfs_getfeature_featureid_hello(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetFeature",
"TYPENAME": "Hello",
"FEATUREID": "Hello.1"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:color>red</qgs:color>") != -1, # spellok
"No color in result of GetFeature\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:color>red</qgs:color>") != -1, # spellok
"Unexpected color in result of GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:color>NULL</qgs:color>") != -1, # spellok
"Unexpected color NULL in result of GetFeature\n%s" % response)
def test_wfs_getfeature_featureid_hello(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetFeature",
"TYPENAME": "Hello",
"FEATUREID": "Hello.2"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>2</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<qgs:pk>2</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
def test_wfs_getfeature_featureid_hello_filter(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetFeature",
"TYPENAME": "Hello_Filter",
"FEATUREID": "Hello_Filter.1"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>6</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>6</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
def test_wfs_getfeature_featureid_hello_filter2(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetFeature",
"TYPENAME": "Hello_Filter",
"FEATUREID": "Hello_Filter.6"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>6</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>7</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<qgs:pk>6</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:pk>7</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
if __name__ == "__main__":
unittest.main()
| gpl-2.0 |
jiangzhuo/kbengine | kbe/res/scripts/common/Lib/test/test_site.py | 67 | 19039 | """Tests for 'site'.
Tests assume the initial paths in sys.path once the interpreter has begun
executing have not been removed.
"""
import unittest
import test.support
from test.support import captured_stderr, TESTFN, EnvironmentVarGuard
import builtins
import os
import sys
import re
import encodings
import urllib.request
import urllib.error
import subprocess
import sysconfig
from copy import copy
# These tests are not particularly useful if Python was invoked with -S.
# If you add tests that are useful under -S, this skip should be moved
# to the class level.
if sys.flags.no_site:
raise unittest.SkipTest("Python was invoked with -S")
import site
if site.ENABLE_USER_SITE and not os.path.isdir(site.USER_SITE):
# need to add user site directory for tests
os.makedirs(site.USER_SITE)
site.addsitedir(site.USER_SITE)
class HelperFunctionsTests(unittest.TestCase):
"""Tests for helper functions.
"""
def setUp(self):
"""Save a copy of sys.path"""
self.sys_path = sys.path[:]
self.old_base = site.USER_BASE
self.old_site = site.USER_SITE
self.old_prefixes = site.PREFIXES
self.original_vars = sysconfig._CONFIG_VARS
self.old_vars = copy(sysconfig._CONFIG_VARS)
def tearDown(self):
"""Restore sys.path"""
sys.path[:] = self.sys_path
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
site.PREFIXES = self.old_prefixes
sysconfig._CONFIG_VARS = self.original_vars
sysconfig._CONFIG_VARS.clear()
sysconfig._CONFIG_VARS.update(self.old_vars)
def test_makepath(self):
# Test makepath() have an absolute path for its first return value
# and a case-normalized version of the absolute path for its
# second value.
path_parts = ("Beginning", "End")
original_dir = os.path.join(*path_parts)
abs_dir, norm_dir = site.makepath(*path_parts)
self.assertEqual(os.path.abspath(original_dir), abs_dir)
if original_dir == os.path.normcase(original_dir):
self.assertEqual(abs_dir, norm_dir)
else:
self.assertEqual(os.path.normcase(abs_dir), norm_dir)
def test_init_pathinfo(self):
dir_set = site._init_pathinfo()
for entry in [site.makepath(path)[1] for path in sys.path
if path and os.path.isdir(path)]:
self.assertIn(entry, dir_set,
"%s from sys.path not found in set returned "
"by _init_pathinfo(): %s" % (entry, dir_set))
def pth_file_tests(self, pth_file):
"""Contain common code for testing results of reading a .pth file"""
self.assertIn(pth_file.imported, sys.modules,
"%s not in sys.modules" % pth_file.imported)
self.assertIn(site.makepath(pth_file.good_dir_path)[0], sys.path)
self.assertFalse(os.path.exists(pth_file.bad_dir_path))
def test_addpackage(self):
# Make sure addpackage() imports if the line starts with 'import',
# adds directories to sys.path for any line in the file that is not a
# comment or import that is a valid directory name for where the .pth
# file resides; invalid directories are not added
pth_file = PthFile()
pth_file.cleanup(prep=True) # to make sure that nothing is
# pre-existing that shouldn't be
try:
pth_file.create()
site.addpackage(pth_file.base_dir, pth_file.filename, set())
self.pth_file_tests(pth_file)
finally:
pth_file.cleanup()
def make_pth(self, contents, pth_dir='.', pth_name=TESTFN):
# Create a .pth file and return its (abspath, basename).
pth_dir = os.path.abspath(pth_dir)
pth_basename = pth_name + '.pth'
pth_fn = os.path.join(pth_dir, pth_basename)
pth_file = open(pth_fn, 'w', encoding='utf-8')
self.addCleanup(lambda: os.remove(pth_fn))
pth_file.write(contents)
pth_file.close()
return pth_dir, pth_basename
def test_addpackage_import_bad_syntax(self):
# Issue 10642
pth_dir, pth_fn = self.make_pth("import bad)syntax\n")
with captured_stderr() as err_out:
site.addpackage(pth_dir, pth_fn, set())
self.assertRegex(err_out.getvalue(), "line 1")
self.assertRegex(err_out.getvalue(),
re.escape(os.path.join(pth_dir, pth_fn)))
# XXX: the previous two should be independent checks so that the
# order doesn't matter. The next three could be a single check
# but my regex foo isn't good enough to write it.
self.assertRegex(err_out.getvalue(), 'Traceback')
self.assertRegex(err_out.getvalue(), r'import bad\)syntax')
self.assertRegex(err_out.getvalue(), 'SyntaxError')
def test_addpackage_import_bad_exec(self):
# Issue 10642
pth_dir, pth_fn = self.make_pth("randompath\nimport nosuchmodule\n")
with captured_stderr() as err_out:
site.addpackage(pth_dir, pth_fn, set())
self.assertRegex(err_out.getvalue(), "line 2")
self.assertRegex(err_out.getvalue(),
re.escape(os.path.join(pth_dir, pth_fn)))
# XXX: ditto previous XXX comment.
self.assertRegex(err_out.getvalue(), 'Traceback')
self.assertRegex(err_out.getvalue(), 'ImportError')
@unittest.skipIf(sys.platform == "win32", "Windows does not raise an "
"error for file paths containing null characters")
def test_addpackage_import_bad_pth_file(self):
# Issue 5258
pth_dir, pth_fn = self.make_pth("abc\x00def\n")
with captured_stderr() as err_out:
site.addpackage(pth_dir, pth_fn, set())
self.assertRegex(err_out.getvalue(), "line 1")
self.assertRegex(err_out.getvalue(),
re.escape(os.path.join(pth_dir, pth_fn)))
# XXX: ditto previous XXX comment.
self.assertRegex(err_out.getvalue(), 'Traceback')
self.assertRegex(err_out.getvalue(), 'TypeError')
def test_addsitedir(self):
# Same tests for test_addpackage since addsitedir() essentially just
# calls addpackage() for every .pth file in the directory
pth_file = PthFile()
pth_file.cleanup(prep=True) # Make sure that nothing is pre-existing
# that is tested for
try:
pth_file.create()
site.addsitedir(pth_file.base_dir, set())
self.pth_file_tests(pth_file)
finally:
pth_file.cleanup()
@unittest.skipUnless(site.ENABLE_USER_SITE, "requires access to PEP 370 "
"user-site (site.ENABLE_USER_SITE)")
def test_s_option(self):
usersite = site.USER_SITE
self.assertIn(usersite, sys.path)
env = os.environ.copy()
rc = subprocess.call([sys.executable, '-c',
'import sys; sys.exit(%r in sys.path)' % usersite],
env=env)
self.assertEqual(rc, 1)
env = os.environ.copy()
rc = subprocess.call([sys.executable, '-s', '-c',
'import sys; sys.exit(%r in sys.path)' % usersite],
env=env)
if usersite == site.getsitepackages()[0]:
self.assertEqual(rc, 1)
else:
self.assertEqual(rc, 0)
env = os.environ.copy()
env["PYTHONNOUSERSITE"] = "1"
rc = subprocess.call([sys.executable, '-c',
'import sys; sys.exit(%r in sys.path)' % usersite],
env=env)
if usersite == site.getsitepackages()[0]:
self.assertEqual(rc, 1)
else:
self.assertEqual(rc, 0)
env = os.environ.copy()
env["PYTHONUSERBASE"] = "/tmp"
rc = subprocess.call([sys.executable, '-c',
'import sys, site; sys.exit(site.USER_BASE.startswith("/tmp"))'],
env=env)
self.assertEqual(rc, 1)
def test_getuserbase(self):
site.USER_BASE = None
user_base = site.getuserbase()
# the call sets site.USER_BASE
self.assertEqual(site.USER_BASE, user_base)
# let's set PYTHONUSERBASE and see if it uses it
site.USER_BASE = None
import sysconfig
sysconfig._CONFIG_VARS = None
with EnvironmentVarGuard() as environ:
environ['PYTHONUSERBASE'] = 'xoxo'
self.assertTrue(site.getuserbase().startswith('xoxo'),
site.getuserbase())
def test_getusersitepackages(self):
site.USER_SITE = None
site.USER_BASE = None
user_site = site.getusersitepackages()
# the call sets USER_BASE *and* USER_SITE
self.assertEqual(site.USER_SITE, user_site)
self.assertTrue(user_site.startswith(site.USER_BASE), user_site)
def test_getsitepackages(self):
site.PREFIXES = ['xoxo']
dirs = site.getsitepackages()
if (sys.platform == "darwin" and
sysconfig.get_config_var("PYTHONFRAMEWORK")):
# OS X framework builds
site.PREFIXES = ['Python.framework']
dirs = site.getsitepackages()
self.assertEqual(len(dirs), 3)
wanted = os.path.join('/Library',
sysconfig.get_config_var("PYTHONFRAMEWORK"),
sys.version[:3],
'site-packages')
self.assertEqual(dirs[2], wanted)
elif os.sep == '/':
# OS X non-framwework builds, Linux, FreeBSD, etc
self.assertEqual(len(dirs), 2)
wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3],
'site-packages')
self.assertEqual(dirs[0], wanted)
wanted = os.path.join('xoxo', 'lib', 'site-python')
self.assertEqual(dirs[1], wanted)
else:
# other platforms
self.assertEqual(len(dirs), 2)
self.assertEqual(dirs[0], 'xoxo')
wanted = os.path.join('xoxo', 'lib', 'site-packages')
self.assertEqual(dirs[1], wanted)
class PthFile(object):
"""Helper class for handling testing of .pth files"""
def __init__(self, filename_base=TESTFN, imported="time",
good_dirname="__testdir__", bad_dirname="__bad"):
"""Initialize instance variables"""
self.filename = filename_base + ".pth"
self.base_dir = os.path.abspath('')
self.file_path = os.path.join(self.base_dir, self.filename)
self.imported = imported
self.good_dirname = good_dirname
self.bad_dirname = bad_dirname
self.good_dir_path = os.path.join(self.base_dir, self.good_dirname)
self.bad_dir_path = os.path.join(self.base_dir, self.bad_dirname)
def create(self):
"""Create a .pth file with a comment, blank lines, an ``import
<self.imported>``, a line with self.good_dirname, and a line with
self.bad_dirname.
Creation of the directory for self.good_dir_path (based off of
self.good_dirname) is also performed.
Make sure to call self.cleanup() to undo anything done by this method.
"""
FILE = open(self.file_path, 'w')
try:
print("#import @bad module name", file=FILE)
print("\n", file=FILE)
print("import %s" % self.imported, file=FILE)
print(self.good_dirname, file=FILE)
print(self.bad_dirname, file=FILE)
finally:
FILE.close()
os.mkdir(self.good_dir_path)
def cleanup(self, prep=False):
"""Make sure that the .pth file is deleted, self.imported is not in
sys.modules, and that both self.good_dirname and self.bad_dirname are
not existing directories."""
if os.path.exists(self.file_path):
os.remove(self.file_path)
if prep:
self.imported_module = sys.modules.get(self.imported)
if self.imported_module:
del sys.modules[self.imported]
else:
if self.imported_module:
sys.modules[self.imported] = self.imported_module
if os.path.exists(self.good_dir_path):
os.rmdir(self.good_dir_path)
if os.path.exists(self.bad_dir_path):
os.rmdir(self.bad_dir_path)
class ImportSideEffectTests(unittest.TestCase):
"""Test side-effects from importing 'site'."""
def setUp(self):
"""Make a copy of sys.path"""
self.sys_path = sys.path[:]
def tearDown(self):
"""Restore sys.path"""
sys.path[:] = self.sys_path
def test_abs_paths(self):
# Make sure all imported modules have their __file__ and __cached__
# attributes as absolute paths. Arranging to put the Lib directory on
# PYTHONPATH would cause the os module to have a relative path for
# __file__ if abs_paths() does not get run. sys and builtins (the
# only other modules imported before site.py runs) do not have
# __file__ or __cached__ because they are built-in.
parent = os.path.relpath(os.path.dirname(os.__file__))
env = os.environ.copy()
env['PYTHONPATH'] = parent
code = ('import os, sys',
# use ASCII to avoid locale issues with non-ASCII directories
'os_file = os.__file__.encode("ascii", "backslashreplace")',
r'sys.stdout.buffer.write(os_file + b"\n")',
'os_cached = os.__cached__.encode("ascii", "backslashreplace")',
r'sys.stdout.buffer.write(os_cached + b"\n")')
command = '\n'.join(code)
# First, prove that with -S (no 'import site'), the paths are
# relative.
proc = subprocess.Popen([sys.executable, '-S', '-c', command],
env=env,
stdout=subprocess.PIPE)
stdout, stderr = proc.communicate()
self.assertEqual(proc.returncode, 0)
os__file__, os__cached__ = stdout.splitlines()[:2]
self.assertFalse(os.path.isabs(os__file__))
self.assertFalse(os.path.isabs(os__cached__))
# Now, with 'import site', it works.
proc = subprocess.Popen([sys.executable, '-c', command],
env=env,
stdout=subprocess.PIPE)
stdout, stderr = proc.communicate()
self.assertEqual(proc.returncode, 0)
os__file__, os__cached__ = stdout.splitlines()[:2]
self.assertTrue(os.path.isabs(os__file__))
self.assertTrue(os.path.isabs(os__cached__))
def test_no_duplicate_paths(self):
# No duplicate paths should exist in sys.path
# Handled by removeduppaths()
site.removeduppaths()
seen_paths = set()
for path in sys.path:
self.assertNotIn(path, seen_paths)
seen_paths.add(path)
@unittest.skip('test not implemented')
def test_add_build_dir(self):
# Test that the build directory's Modules directory is used when it
# should be.
# XXX: implement
pass
def test_setting_quit(self):
# 'quit' and 'exit' should be injected into builtins
self.assertTrue(hasattr(builtins, "quit"))
self.assertTrue(hasattr(builtins, "exit"))
def test_setting_copyright(self):
# 'copyright', 'credits', and 'license' should be in builtins
self.assertTrue(hasattr(builtins, "copyright"))
self.assertTrue(hasattr(builtins, "credits"))
self.assertTrue(hasattr(builtins, "license"))
def test_setting_help(self):
# 'help' should be set in builtins
self.assertTrue(hasattr(builtins, "help"))
def test_aliasing_mbcs(self):
if sys.platform == "win32":
import locale
if locale.getdefaultlocale()[1].startswith('cp'):
for value in encodings.aliases.aliases.values():
if value == "mbcs":
break
else:
self.fail("did not alias mbcs")
def test_sitecustomize_executed(self):
# If sitecustomize is available, it should have been imported.
if "sitecustomize" not in sys.modules:
try:
import sitecustomize
except ImportError:
pass
else:
self.fail("sitecustomize not imported automatically")
@test.support.requires_resource('network')
@unittest.skipUnless(sys.version_info[3] == 'final',
'only for released versions')
@unittest.skipUnless(hasattr(urllib.request, "HTTPSHandler"),
'need SSL support to download license')
def test_license_exists_at_url(self):
# This test is a bit fragile since it depends on the format of the
# string displayed by license in the absence of a LICENSE file.
url = license._Printer__data.split()[1]
req = urllib.request.Request(url, method='HEAD')
try:
with test.support.transient_internet(url):
with urllib.request.urlopen(req) as data:
code = data.getcode()
except urllib.error.HTTPError as e:
code = e.code
self.assertEqual(code, 200, msg="Can't find " + url)
class StartupImportTests(unittest.TestCase):
def test_startup_imports(self):
# This tests checks which modules are loaded by Python when it
# initially starts upon startup.
popen = subprocess.Popen([sys.executable, '-I', '-v', '-c',
'import sys; print(set(sys.modules))'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = popen.communicate()
stdout = stdout.decode('utf-8')
stderr = stderr.decode('utf-8')
modules = eval(stdout)
self.assertIn('site', modules)
# http://bugs.python.org/issue19205
re_mods = {'re', '_sre', 'sre_compile', 'sre_constants', 'sre_parse'}
# _osx_support uses the re module in many placs
if sys.platform != 'darwin':
self.assertFalse(modules.intersection(re_mods), stderr)
# http://bugs.python.org/issue9548
self.assertNotIn('locale', modules, stderr)
if sys.platform != 'darwin':
# http://bugs.python.org/issue19209
self.assertNotIn('copyreg', modules, stderr)
# http://bugs.python.org/issue19218>
collection_mods = {'_collections', 'collections', 'functools',
'heapq', 'itertools', 'keyword', 'operator',
'reprlib', 'types', 'weakref'
}.difference(sys.builtin_module_names)
self.assertFalse(modules.intersection(collection_mods), stderr)
if __name__ == "__main__":
unittest.main()
| lgpl-3.0 |
jatinmistry13/pattern | pattern/server/cherrypy/cherrypy/_cpcompat.py | 37 | 10921 | """Compatibility code for using CherryPy with various versions of Python.
CherryPy 3.2 is compatible with Python versions 2.3+. This module provides a
useful abstraction over the differences between Python versions, sometimes by
preferring a newer idiom, sometimes an older one, and sometimes a custom one.
In particular, Python 2 uses str and '' for byte strings, while Python 3
uses str and '' for unicode strings. We will call each of these the 'native
string' type for each version. Because of this major difference, this module
provides new 'bytestr', 'unicodestr', and 'nativestr' attributes, as well as
two functions: 'ntob', which translates native strings (of type 'str') into
byte strings regardless of Python version, and 'ntou', which translates native
strings to unicode strings. This also provides a 'BytesIO' name for dealing
specifically with bytes, and a 'StringIO' name for dealing with native strings.
It also provides a 'base64_decode' function with native strings as input and
output.
"""
import os
import re
import sys
import threading
if sys.version_info >= (3, 0):
py3k = True
bytestr = bytes
unicodestr = str
nativestr = unicodestr
basestring = (bytes, str)
def ntob(n, encoding='ISO-8859-1'):
"""Return the given native string as a byte string in the given encoding."""
assert_native(n)
# In Python 3, the native string type is unicode
return n.encode(encoding)
def ntou(n, encoding='ISO-8859-1'):
"""Return the given native string as a unicode string with the given encoding."""
assert_native(n)
# In Python 3, the native string type is unicode
return n
def tonative(n, encoding='ISO-8859-1'):
"""Return the given string as a native string in the given encoding."""
# In Python 3, the native string type is unicode
if isinstance(n, bytes):
return n.decode(encoding)
return n
# type("")
from io import StringIO
# bytes:
from io import BytesIO as BytesIO
else:
# Python 2
py3k = False
bytestr = str
unicodestr = unicode
nativestr = bytestr
basestring = basestring
def ntob(n, encoding='ISO-8859-1'):
"""Return the given native string as a byte string in the given encoding."""
assert_native(n)
# In Python 2, the native string type is bytes. Assume it's already
# in the given encoding, which for ISO-8859-1 is almost always what
# was intended.
return n
def ntou(n, encoding='ISO-8859-1'):
"""Return the given native string as a unicode string with the given encoding."""
assert_native(n)
# In Python 2, the native string type is bytes.
# First, check for the special encoding 'escape'. The test suite uses this
# to signal that it wants to pass a string with embedded \uXXXX escapes,
# but without having to prefix it with u'' for Python 2, but no prefix
# for Python 3.
if encoding == 'escape':
return unicode(
re.sub(r'\\u([0-9a-zA-Z]{4})',
lambda m: unichr(int(m.group(1), 16)),
n.decode('ISO-8859-1')))
# Assume it's already in the given encoding, which for ISO-8859-1 is almost
# always what was intended.
return n.decode(encoding)
def tonative(n, encoding='ISO-8859-1'):
"""Return the given string as a native string in the given encoding."""
# In Python 2, the native string type is bytes.
if isinstance(n, unicode):
return n.encode(encoding)
return n
try:
# type("")
from cStringIO import StringIO
except ImportError:
# type("")
from StringIO import StringIO
# bytes:
BytesIO = StringIO
def assert_native(n):
if not isinstance(n, nativestr):
raise TypeError("n must be a native str (got %s)" % type(n).__name__)
try:
set = set
except NameError:
from sets import Set as set
try:
# Python 3.1+
from base64 import decodebytes as _base64_decodebytes
except ImportError:
# Python 3.0-
# since CherryPy claims compability with Python 2.3, we must use
# the legacy API of base64
from base64 import decodestring as _base64_decodebytes
def base64_decode(n, encoding='ISO-8859-1'):
"""Return the native string base64-decoded (as a native string)."""
if isinstance(n, unicodestr):
b = n.encode(encoding)
else:
b = n
b = _base64_decodebytes(b)
if nativestr is unicodestr:
return b.decode(encoding)
else:
return b
try:
# Python 2.5+
from hashlib import md5
except ImportError:
from md5 import new as md5
try:
# Python 2.5+
from hashlib import sha1 as sha
except ImportError:
from sha import new as sha
try:
sorted = sorted
except NameError:
def sorted(i):
i = i[:]
i.sort()
return i
try:
reversed = reversed
except NameError:
def reversed(x):
i = len(x)
while i > 0:
i -= 1
yield x[i]
try:
# Python 3
from urllib.parse import urljoin, urlencode
from urllib.parse import quote, quote_plus
from urllib.request import unquote, urlopen
from urllib.request import parse_http_list, parse_keqv_list
except ImportError:
# Python 2
from urlparse import urljoin
from urllib import urlencode, urlopen
from urllib import quote, quote_plus
from urllib import unquote
from urllib2 import parse_http_list, parse_keqv_list
try:
from threading import local as threadlocal
except ImportError:
from cherrypy._cpthreadinglocal import local as threadlocal
try:
dict.iteritems
# Python 2
iteritems = lambda d: d.iteritems()
copyitems = lambda d: d.items()
except AttributeError:
# Python 3
iteritems = lambda d: d.items()
copyitems = lambda d: list(d.items())
try:
dict.iterkeys
# Python 2
iterkeys = lambda d: d.iterkeys()
copykeys = lambda d: d.keys()
except AttributeError:
# Python 3
iterkeys = lambda d: d.keys()
copykeys = lambda d: list(d.keys())
try:
dict.itervalues
# Python 2
itervalues = lambda d: d.itervalues()
copyvalues = lambda d: d.values()
except AttributeError:
# Python 3
itervalues = lambda d: d.values()
copyvalues = lambda d: list(d.values())
try:
# Python 3
import builtins
except ImportError:
# Python 2
import __builtin__ as builtins
try:
# Python 2. We try Python 2 first clients on Python 2
# don't try to import the 'http' module from cherrypy.lib
from Cookie import SimpleCookie, CookieError
from httplib import BadStatusLine, HTTPConnection, IncompleteRead, NotConnected
from BaseHTTPServer import BaseHTTPRequestHandler
except ImportError:
# Python 3
from http.cookies import SimpleCookie, CookieError
from http.client import BadStatusLine, HTTPConnection, IncompleteRead, NotConnected
from http.server import BaseHTTPRequestHandler
# Some platforms don't expose HTTPSConnection, so handle it separately
if py3k:
try:
from http.client import HTTPSConnection
except ImportError:
# Some platforms which don't have SSL don't expose HTTPSConnection
HTTPSConnection = None
else:
try:
from httplib import HTTPSConnection
except ImportError:
HTTPSConnection = None
try:
# Python 2
xrange = xrange
except NameError:
# Python 3
xrange = range
import threading
if hasattr(threading.Thread, "daemon"):
# Python 2.6+
def get_daemon(t):
return t.daemon
def set_daemon(t, val):
t.daemon = val
else:
def get_daemon(t):
return t.isDaemon()
def set_daemon(t, val):
t.setDaemon(val)
try:
from email.utils import formatdate
def HTTPDate(timeval=None):
return formatdate(timeval, usegmt=True)
except ImportError:
from rfc822 import formatdate as HTTPDate
try:
# Python 3
from urllib.parse import unquote as parse_unquote
def unquote_qs(atom, encoding, errors='strict'):
return parse_unquote(atom.replace('+', ' '), encoding=encoding, errors=errors)
except ImportError:
# Python 2
from urllib import unquote as parse_unquote
def unquote_qs(atom, encoding, errors='strict'):
return parse_unquote(atom.replace('+', ' ')).decode(encoding, errors)
try:
# Prefer simplejson, which is usually more advanced than the builtin module.
import simplejson as json
json_decode = json.JSONDecoder().decode
json_encode = json.JSONEncoder().iterencode
except ImportError:
if py3k:
# Python 3.0: json is part of the standard library,
# but outputs unicode. We need bytes.
import json
json_decode = json.JSONDecoder().decode
_json_encode = json.JSONEncoder().iterencode
def json_encode(value):
for chunk in _json_encode(value):
yield chunk.encode('utf8')
elif sys.version_info >= (2, 6):
# Python 2.6: json is part of the standard library
import json
json_decode = json.JSONDecoder().decode
json_encode = json.JSONEncoder().iterencode
else:
json = None
def json_decode(s):
raise ValueError('No JSON library is available')
def json_encode(s):
raise ValueError('No JSON library is available')
try:
import cPickle as pickle
except ImportError:
# In Python 2, pickle is a Python version.
# In Python 3, pickle is the sped-up C version.
import pickle
try:
os.urandom(20)
import binascii
def random20():
return binascii.hexlify(os.urandom(20)).decode('ascii')
except (AttributeError, NotImplementedError):
import random
# os.urandom not available until Python 2.4. Fall back to random.random.
def random20():
return sha('%s' % random.random()).hexdigest()
try:
from _thread import get_ident as get_thread_ident
except ImportError:
from thread import get_ident as get_thread_ident
try:
# Python 3
next = next
except NameError:
# Python 2
def next(i):
return i.next()
if sys.version_info >= (3,3):
Timer = threading.Timer
Event = threading.Event
else:
# Python 3.2 and earlier
Timer = threading._Timer
Event = threading._Event
# Prior to Python 2.6, the Thread class did not have a .daemon property.
# This mix-in adds that property.
class SetDaemonProperty:
def __get_daemon(self):
return self.isDaemon()
def __set_daemon(self, daemon):
self.setDaemon(daemon)
if sys.version_info < (2,6):
daemon = property(__get_daemon, __set_daemon)
# Use subprocess module from Python 2.7 on Python 2.3-2.6
if sys.version_info < (2,7):
import cherrypy._cpcompat_subprocess as subprocess
else:
import subprocess
| bsd-3-clause |
jeffwidman/ansible-modules-core | cloud/openstack/_quantum_router.py | 41 | 7042 | #!/usr/bin/python
#coding: utf-8 -*-
# (c) 2013, Benno Joy <[email protected]>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
try:
from neutronclient.neutron import client
except ImportError:
from quantumclient.quantum import client
from keystoneclient.v2_0 import client as ksclient
HAVE_DEPS = True
except ImportError:
HAVE_DEPS = False
DOCUMENTATION = '''
---
module: quantum_router
version_added: "1.2"
author: "Benno Joy (@bennojoy)"
deprecated: Deprecated in 2.0. Use os_router instead
short_description: Create or Remove router from openstack
description:
- Create or Delete routers from OpenStack
options:
login_username:
description:
- login username to authenticate to keystone
required: true
default: admin
login_password:
description:
- Password of login user
required: true
default: 'yes'
login_tenant_name:
description:
- The tenant name of the login user
required: true
default: 'yes'
auth_url:
description:
- The keystone url for authentication
required: false
default: 'http://127.0.0.1:35357/v2.0/'
region_name:
description:
- Name of the region
required: false
default: None
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
name:
description:
- Name to be give to the router
required: true
default: None
tenant_name:
description:
- Name of the tenant for which the router has to be created, if none router would be created for the login tenant.
required: false
default: None
admin_state_up:
description:
- desired admin state of the created router .
required: false
default: true
requirements:
- "python >= 2.6"
- "python-neutronclient or python-quantumclient"
- "python-keystoneclient"
'''
EXAMPLES = '''
# Creates a router for tenant admin
- quantum_router: state=present
login_username=admin
login_password=admin
login_tenant_name=admin
name=router1"
'''
_os_keystone = None
_os_tenant_id = None
def _get_ksclient(module, kwargs):
try:
kclient = ksclient.Client(username=kwargs.get('login_username'),
password=kwargs.get('login_password'),
tenant_name=kwargs.get('login_tenant_name'),
auth_url=kwargs.get('auth_url'))
except Exception as e:
module.fail_json(msg = "Error authenticating to the keystone: %s " % e.message)
global _os_keystone
_os_keystone = kclient
return kclient
def _get_endpoint(module, ksclient):
try:
endpoint = ksclient.service_catalog.url_for(service_type='network', endpoint_type='publicURL')
except Exception as e:
module.fail_json(msg = "Error getting network endpoint: %s" % e.message)
return endpoint
def _get_neutron_client(module, kwargs):
_ksclient = _get_ksclient(module, kwargs)
token = _ksclient.auth_token
endpoint = _get_endpoint(module, _ksclient)
kwargs = {
'token': token,
'endpoint_url': endpoint
}
try:
neutron = client.Client('2.0', **kwargs)
except Exception as e:
module.fail_json(msg = "Error in connecting to neutron: %s " % e.message)
return neutron
def _set_tenant_id(module):
global _os_tenant_id
if not module.params['tenant_name']:
_os_tenant_id = _os_keystone.tenant_id
else:
tenant_name = module.params['tenant_name']
for tenant in _os_keystone.tenants.list():
if tenant.name == tenant_name:
_os_tenant_id = tenant.id
break
if not _os_tenant_id:
module.fail_json(msg = "The tenant id cannot be found, please check the parameters")
def _get_router_id(module, neutron):
kwargs = {
'name': module.params['name'],
'tenant_id': _os_tenant_id,
}
try:
routers = neutron.list_routers(**kwargs)
except Exception as e:
module.fail_json(msg = "Error in getting the router list: %s " % e.message)
if not routers['routers']:
return None
return routers['routers'][0]['id']
def _create_router(module, neutron):
router = {
'name': module.params['name'],
'tenant_id': _os_tenant_id,
'admin_state_up': module.params['admin_state_up'],
}
try:
new_router = neutron.create_router(dict(router=router))
except Exception as e:
module.fail_json( msg = "Error in creating router: %s" % e.message)
return new_router['router']['id']
def _delete_router(module, neutron, router_id):
try:
neutron.delete_router(router_id)
except:
module.fail_json("Error in deleting the router")
return True
def main():
argument_spec = openstack_argument_spec()
argument_spec.update(dict(
name = dict(required=True),
tenant_name = dict(default=None),
state = dict(default='present', choices=['absent', 'present']),
admin_state_up = dict(type='bool', default=True),
))
module = AnsibleModule(argument_spec=argument_spec)
if not HAVE_DEPS:
module.fail_json(msg='python-keystoneclient and either python-neutronclient or python-quantumclient are required')
neutron = _get_neutron_client(module, module.params)
_set_tenant_id(module)
if module.params['state'] == 'present':
router_id = _get_router_id(module, neutron)
if not router_id:
router_id = _create_router(module, neutron)
module.exit_json(changed=True, result="Created", id=router_id)
else:
module.exit_json(changed=False, result="success" , id=router_id)
else:
router_id = _get_router_id(module, neutron)
if not router_id:
module.exit_json(changed=False, result="success")
else:
_delete_router(module, neutron, router_id)
module.exit_json(changed=True, result="deleted")
# this is magic, see lib/ansible/module.params['common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 |
tudorvio/tempest | tempest/test_discover/test_discover.py | 17 | 2009 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
from tempest.test_discover import plugins
if sys.version_info >= (2, 7):
import unittest
else:
import unittest2 as unittest
def load_tests(loader, tests, pattern):
ext_plugins = plugins.TempestTestPluginManager()
suite = unittest.TestSuite()
base_path = os.path.split(os.path.dirname(os.path.abspath(__file__)))[0]
base_path = os.path.split(base_path)[0]
# Load local tempest tests
for test_dir in ['tempest/api', 'tempest/scenario',
'tempest/thirdparty']:
full_test_dir = os.path.join(base_path, test_dir)
if not pattern:
suite.addTests(loader.discover(full_test_dir,
top_level_dir=base_path))
else:
suite.addTests(loader.discover(full_test_dir, pattern=pattern,
top_level_dir=base_path))
plugin_load_tests = ext_plugins.get_plugin_load_tests_tuple()
if not plugin_load_tests:
return suite
# Load any installed plugin tests
for plugin in plugin_load_tests:
test_dir, top_path = plugin_load_tests[plugin]
if not pattern:
suite.addTests(loader.discover(test_dir, top_level_dir=top_path))
else:
suite.addTests(loader.discover(test_dir, pattern=pattern,
top_level_dir=top_path))
return suite
| apache-2.0 |
bzennn/blog_flask | python/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/utf8prober.py | 2919 | 2652 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .mbcssm import UTF8SMModel
ONE_CHAR_PROB = 0.5
class UTF8Prober(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(UTF8SMModel)
self.reset()
def reset(self):
CharSetProber.reset(self)
self._mCodingSM.reset()
self._mNumOfMBChar = 0
def get_charset_name(self):
return "utf-8"
def feed(self, aBuf):
for c in aBuf:
codingState = self._mCodingSM.next_state(c)
if codingState == constants.eError:
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
if self._mCodingSM.get_current_charlen() >= 2:
self._mNumOfMBChar += 1
if self.get_state() == constants.eDetecting:
if self.get_confidence() > constants.SHORTCUT_THRESHOLD:
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
unlike = 0.99
if self._mNumOfMBChar < 6:
for i in range(0, self._mNumOfMBChar):
unlike = unlike * ONE_CHAR_PROB
return 1.0 - unlike
else:
return unlike
| gpl-3.0 |
Pafcholini/Nadia-kernel-N7100XXUFNI1_SWA_KK_Opensource_Update1 | scripts/tracing/draw_functrace.py | 14676 | 3560 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <[email protected]>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
| gpl-2.0 |
cedi4155476/QGIS | python/plugins/processing/script/ScriptUtils.py | 12 | 2741 | # -*- coding: utf-8 -*-
"""
***************************************************************************
ScriptUtils.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from processing.core.ProcessingConfig import ProcessingConfig
from processing.script.ScriptAlgorithm import ScriptAlgorithm
from processing.script.WrongScriptException import WrongScriptException
from processing.core.ProcessingLog import ProcessingLog
from processing.tools.system import mkdir, userFolder
class ScriptUtils:
SCRIPTS_FOLDER = 'SCRIPTS_FOLDER'
ACTIVATE_SCRIPTS = 'ACTIVATE_SCRIPTS'
@staticmethod
def scriptsFolder():
folder = ProcessingConfig.getSetting(ScriptUtils.SCRIPTS_FOLDER)
if folder is None:
folder = unicode(os.path.join(userFolder(), 'scripts'))
mkdir(folder)
return os.path.abspath(folder)
@staticmethod
def loadFromFolder(folder):
if not os.path.exists(folder):
return []
algs = []
for path, subdirs, files in os.walk(folder):
for descriptionFile in files:
if descriptionFile.endswith('py'):
try:
fullpath = os.path.join(path, descriptionFile)
alg = ScriptAlgorithm(fullpath)
if alg.name.strip() != '':
algs.append(alg)
except WrongScriptException as e:
ProcessingLog.addToLog(ProcessingLog.LOG_ERROR, e.msg)
except Exception as e:
ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,
'Could not load script:' + descriptionFile + '\n'
+ unicode(e))
return algs
| gpl-2.0 |
wooga/airflow | docs/exts/docroles.py | 1 | 3913 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#
"""Document roles"""
from functools import partial
from docutils import nodes, utils
from sphinx.ext.autodoc.importer import import_module, mock
class RoleException(Exception):
"""Exception for roles extension """
def get_template_field(env, fullname):
"""
Gets template fields for specific operator class.
:param fullname: Full path to operator class.
For example: ``airflow.providers.google.cloud.operators.vision.CloudVisionCreateProductSetOperator``
:return: List of template field
:rtype: list[str]
"""
modname, classname = fullname.rsplit(".", 1)
try:
with mock(env.config.autodoc_mock_imports):
mod = import_module(modname)
except ImportError:
raise RoleException("Error loading %s module." % (modname, ))
clazz = getattr(mod, classname)
if not clazz:
raise RoleException("Error finding %s class in %s module." % (classname, modname))
template_fields = getattr(clazz, "template_fields")
if not template_fields:
raise RoleException(
"Could not find the template fields for %s class in %s module." % (classname, modname)
)
return list(template_fields)
# noinspection PyUnusedLocal
def template_field_role(app,
typ, # pylint: disable=unused-argument
rawtext,
text,
lineno,
inliner,
options=None, # pylint: disable=unused-argument
content=None): # pylint: disable=unused-argument
"""
A role that allows you to include a list of template fields in the middle of the text. This is especially
useful when writing guides describing how to use the operator.
The result is a list of fields where each field is shorted in the literal block.
Sample usage::
:template-fields:
`airflow.contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeSentimentOperator`
For further information look at:
* [http://docutils.sourceforge.net/docs/howto/rst-roles.html](Creating reStructuredText Interpreted
Text Roles)
"""
if options is None:
options = {}
if content is None:
content = []
text = utils.unescape(text)
try:
template_fields = get_template_field(app.env, text)
except RoleException as e:
msg = inliner.reporter.error("invalid class name %s \n%s" % (text, e, ), line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
node = nodes.inline(rawtext=rawtext)
for i, field in enumerate(template_fields):
if i != 0:
node += nodes.Text(", ")
node += nodes.literal(field, "", nodes.Text(field))
return [node], []
def setup(app):
"""Sets the extension up"""
from docutils.parsers.rst import roles # pylint: disable=wrong-import-order
roles.register_local_role("template-fields", partial(template_field_role, app))
return {"version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True}
| apache-2.0 |
theonion/bulbs2 | tests/test_managers.py | 2 | 1025 | from datetime import timedelta
from django.utils import timezone
from example.app.models import Headline
from model_mommy import mommy
import pytest
@pytest.mark.django_db
def test_draft():
hl = mommy.make(Headline, published=None)
hl.save()
assert hl in Headline.draft_objects.all()
assert hl not in Headline.scheduled_objects.all()
assert hl not in Headline.published_objects.all()
@pytest.mark.django_db
def test_scheduled():
future = timezone.now() + timedelta(days=1)
hl = mommy.make(Headline, published=future)
hl.save()
assert hl not in Headline.draft_objects.all()
assert hl in Headline.scheduled_objects.all()
assert hl not in Headline.published_objects.all()
@pytest.mark.django_db
def test_published():
past = timezone.now() - timedelta(days=1)
hl = mommy.make(Headline, published=past)
hl.save()
assert hl not in Headline.draft_objects.all()
assert hl not in Headline.scheduled_objects.all()
assert hl in Headline.published_objects.all()
| mit |
drawks/ansible | lib/ansible/modules/cloud/azure/azure_rm_virtualmachinescalesetextension.py | 20 | 10667 | #!/usr/bin/python
#
# Copyright (c) 2019 Zim Kalinowski (@zikalino)
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_virtualmachinescalesetextension
version_added: "2.8"
short_description: Managed Azure Virtual Machine Scale Set extension
description:
- Create, update and delete Azure Virtual Machine Extension
options:
resource_group:
description:
- Name of a resource group where the VMSS extension exists or will be created.
required: true
vmss_name:
description:
- The name of the virtual machine where the extension should be create or updated.
required: true
name:
description:
- Name of the VMSS extension
location:
description:
- Valid azure location. Defaults to location of the resource group.
publisher:
description:
- The name of the extension handler publisher.
type:
description:
- The type of the extension handler.
type_handler_version:
description:
- The type version of the extension handler.
settings:
description:
- A dictionary containing extension settings.
- Settings depend on extension type.
- Refer to U(https://docs.microsoft.com/en-us/azure/virtual-machines/extensions/overview) for more information.
protected_settings:
description:
- A dictionary containing protected extension settings.
- Settings depend on extension type.
- Refer to U(https://docs.microsoft.com/en-us/azure/virtual-machines/extensions/overview) for more information.
auto_upgrade_minor_version:
description:
- Whether the extension handler should be automatically upgraded across minor versions.
type: bool
state:
description:
- Assert the state of the extension.
- Use C(present) to create or update an extension and C(absent) to delete it.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
author:
- "Zim Kalinowski (@zikalino)"
'''
EXAMPLES = '''
- name: Install VMSS Extension
azure_rm_virtualmachinescalesetextension:
name: myvmssextension
location: eastus
resource_group: myResourceGroup
vmss_name: myvm
publisher: Microsoft.Azure.Extensions
type: CustomScript
type_handler_version: 2.0
settings: '{"commandToExecute": "hostname"}'
auto_upgrade_minor_version: true
- name: Remove VMSS Extension
azure_rm_virtualmachinescalesetextension:
name: myvmssextension
location: eastus
resource_group: myResourceGroup
vmss_name: myvm
state: absent
'''
RETURN = '''
id:
description:
- VMSS extension resource ID
returned: always
type: str
sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Compute/scalesets/myscaleset/extensions/myext
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
except ImportError:
# This is handled in azure_rm_common
pass
class AzureRMVMSSExtension(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
vmss_name=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
location=dict(
type='str'
),
publisher=dict(
type='str'
),
type=dict(
type='str'
),
type_handler_version=dict(
type='str'
),
auto_upgrade_minor_version=dict(
type='bool'
),
settings=dict(
type='dict'
),
protected_settings=dict(
type='dict'
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
),
)
self.resource_group = None
self.name = None
self.location = None
self.publisher = None
self.type = None
self.type_handler_version = None
self.auto_upgrade_minor_version = None
self.settings = None
self.protected_settings = None
self.state = None
required_if = [
('state', 'present', [
'publisher', 'type', 'type_handler_version'])
]
self.results = dict(changed=False, state=dict())
super(AzureRMVMSSExtension, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_tags=False,
required_if=required_if)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()):
setattr(self, key, kwargs[key])
resource_group = None
response = None
to_be_updated = False
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
self.location = resource_group.location
if self.state == 'present':
response = self.get_vmssextension()
if not response:
to_be_updated = True
else:
if self.settings is not None:
if response.get('settings') != self.settings:
response['settings'] = self.settings
to_be_updated = True
else:
self.settings = response.get('settings')
if self.protected_settings is not None:
if response.get('protected_settings') != self.protected_settings:
response['protected_settings'] = self.protected_settings
to_be_updated = True
else:
self.protected_settings = response.get('protected_settings')
if response['publisher'] != self.publisher:
self.publisher = response['publisher']
self.module.warn("Property 'publisher' cannot be changed")
if response['type'] != self.type:
self.type = response['type']
self.module.warn("Property 'type' cannot be changed")
if response['type_handler_version'] != self.type_handler_version:
response['type_handler_version'] = self.type_handler_version
to_be_updated = True
if self.auto_upgrade_minor_version is not None:
if response['auto_upgrade_minor_version'] != self.auto_upgrade_minor_version:
response['auto_upgrade_minor_version'] = self.auto_upgrade_minor_version
to_be_updated = True
else:
self.auto_upgrade_minor_version = response['auto_upgrade_minor_version']
if to_be_updated:
if not self.check_mode:
response = self.create_or_update_vmssextension()
self.results['changed'] = True
elif self.state == 'absent':
if not self.check_mode:
self.delete_vmssextension()
self.results['changed'] = True
if response:
self.results['id'] = response.get('id')
return self.results
def create_or_update_vmssextension(self):
self.log("Creating VMSS extension {0}".format(self.name))
try:
params = self.compute_models.VirtualMachineScaleSetExtension(
location=self.location,
publisher=self.publisher,
type=self.type,
type_handler_version=self.type_handler_version,
auto_upgrade_minor_version=self.auto_upgrade_minor_version,
settings=self.settings,
protected_settings=self.protected_settings
)
poller = self.compute_client.virtual_machine_scale_set_extensions.create_or_update(resource_group_name=self.resource_group,
vm_scale_set_name=self.vmss_name,
vmss_extension_name=self.name,
extension_parameters=params)
response = self.get_poller_result(poller)
return response.as_dict()
except CloudError as e:
self.log('Error attempting to create the VMSS extension.')
self.fail("Error creating the VMSS extension: {0}".format(str(e)))
def delete_vmssextension(self):
self.log("Deleting vmextension {0}".format(self.name))
try:
poller = self.compute_client.virtual_machine_scale_set_extensions.delete(resource_group_name=self.resource_group,
vm_scale_set_name=self.vmss_name,
vmss_extension_name=self.name)
self.get_poller_result(poller)
except CloudError as e:
self.log('Error attempting to delete the vmextension.')
self.fail("Error deleting the vmextension: {0}".format(str(e)))
def get_vmssextension(self):
self.log("Checking if the VMSS extension {0} is present".format(self.name))
try:
response = self.compute_client.virtual_machine_scale_set_extensions.get(self.resource_group, self.vmss_name, self.name)
return response.as_dict()
except CloudError as e:
self.log('Did not find VMSS extension')
return False
def main():
AzureRMVMSSExtension()
if __name__ == '__main__':
main()
| gpl-3.0 |
Findspire/workflow | workflow/apps/workflow/tests.py | 1 | 19209 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License (MIT)
# Copyright (c) 2015 Findspire
from unittest import TestCase
from django.core.urlresolvers import reverse
from django.test import TestCase
from workflow.apps.workflow.models import Project, Workflow, ItemCategory, ItemModel, Item, Comment
class MiscTest(TestCase):
fixtures = ['auth_user']
def test_index_not_logged(self):
resp = self.client.get(reverse('workflow:index'))
self.assertEqual(resp.status_code, 302)
def test_index_logged(self):
resp = self.client.login(username='admin', password='admin')
self.assertEqual(resp, True)
resp = self.client.get(reverse('workflow:index'))
self.assertEqual(resp.status_code, 200)
class ProjectTest(TestCase):
fixtures = ['auth_user', 'team_all', 'workflow']
def setUp(self):
resp = self.client.login(username='admin', password='admin')
self.assertEqual(resp, True)
def test_project_create(self):
resp = self.client.get(reverse('workflow:project_new'))
self.assertEqual(resp.status_code, 200)
data = {
'name': 'new project',
'team': 1,
'items': [1, 2, 3, 5],
}
resp = self.client.post(reverse('workflow:project_new'), data)
self.assertEqual(resp.status_code, 302)
project_count = Project.objects.filter(name='new project').count()
self.assertEqual(project_count, 1)
project_items = [item.pk for item in Project.objects.get(name='new project').items.all()]
self.assertEqual(project_items, [1, 2, 3, 5])
def test_project_edit(self):
project_pk = Project.objects.get(name='project 1').pk
resp = self.client.get(reverse('workflow:project_edit', args=[project_pk]))
self.assertEqual(resp.status_code, 200)
data = {
'name': 'project 1 edited',
'team': 1,
'items': [1, 2, 3, 6],
}
resp = self.client.post(reverse('workflow:project_edit', args=[project_pk]), data)
self.assertEqual(resp.status_code, 302)
project_count = Project.objects.filter(name='project 1').count()
self.assertEqual(project_count, 0)
project_count = Project.objects.filter(name='project 1 edited').count()
self.assertEqual(project_count, 1)
project_items = [item.pk for item in Project.objects.get(name='project 1 edited').items.all()]
self.assertEqual(project_items, [1, 2, 3, 6])
def test_project_list(self):
resp = self.client.get(reverse('workflow:project_list'))
self.assertEqual(resp.status_code, 200)
project = Project.objects.get(name='project 1')
has_project = project in resp.context[-1]['projects'].keys()
self.assertEqual(has_project, True)
class WorkflowTest(TestCase):
fixtures = ['auth_user', 'team_all', 'workflow']
def setUp(self):
resp = self.client.login(username='admin', password='admin')
self.assertEqual(resp, True)
def test_workflow_create(self):
resp = self.client.get(reverse('workflow:workflow_new'))
self.assertEqual(resp.status_code, 200)
resp = self.client.get(reverse('workflow:workflow_new', args=[1]))
self.assertEqual(resp.status_code, 200)
data = {
'project': 1,
'version': 'new workflow',
}
resp = self.client.post(reverse('workflow:workflow_new'), data)
self.assertEqual(resp.status_code, 302)
workflow_count = Workflow.objects.filter(project__pk=1, version='new workflow').count()
self.assertEqual(workflow_count, 1)
project_items = [item.pk for item in Project.objects.get(pk=1).items.all()]
workflow = Workflow.objects.get(project__pk=1, version='new workflow')
workflow_items = [item.item_model.pk for item in Item.objects.filter(workflow=workflow)]
self.assertEqual(project_items, workflow_items)
def test_workflow_edit(self):
workflow_pk = Workflow.objects.get(project__pk=1, version='workflow 1').pk
resp = self.client.get(reverse('workflow:workflow_edit', args=[workflow_pk]))
self.assertEqual(resp.status_code, 200)
data = {
'project': 1,
'version': 'workflow 1 edited',
}
resp = self.client.post(reverse('workflow:workflow_edit', args=[workflow_pk]), data)
self.assertEqual(resp.status_code, 302)
workflow_count = Workflow.objects.filter(project__pk=1, version='workflow 1').count()
self.assertEqual(workflow_count, 0)
workflow_count = Workflow.objects.filter(project__pk=1, version='workflow 1 edited').count()
self.assertEqual(workflow_count, 1)
def test_workflow_list(self):
resp = self.client.get(reverse('workflow:project_list'))
self.assertEqual(resp.status_code, 200)
workflow = Workflow.objects.get(project__pk=1, version='workflow 1')
has_workflow = any([workflow in workflow_list for workflow_list in resp.context[-1]['projects'].values()])
self.assertEqual(has_workflow, True)
def test_workflow_show(self):
workflow_pk = Workflow.objects.get(project__pk=1, version='workflow 1').pk
resp = self.client.get(reverse('workflow:workflow_show', args=[workflow_pk, 'thisShouldRaiseA404']))
self.assertEqual(resp.status_code, 404)
for which_display in ('all', 'mine', 'untested', 'success', 'failed', 'untaken'):#, 'taken'):
resp = self.client.get(reverse('workflow:workflow_show', args=[workflow_pk, which_display]))
self.assertEqual(resp.status_code, 200)
itemmodel = ItemModel.objects.get(name='item model '+which_display)
item = Item.objects.get(item_model=itemmodel)
has_item = any([item in item_list for item_list in resp.context[-1]['items'].values()])
self.assertEqual(has_item, True)
def test_item_show(self):
resp = self.client.get(reverse('workflow:item_instance_show', args=[1]))
self.assertEqual(resp.status_code, 200)
# description
item = Item.objects.get(pk=1)
itemmodel = Item.objects.get(pk=1).item_model
self.assertEqual(itemmodel.description, '')
data = {
'type': 'description',
'description': 'Some new description',
}
resp = self.client.post(reverse('workflow:item_instance_show', args=[item.pk]), data)
self.assertEqual(resp.status_code, 302)
itemmodel = Item.objects.get(pk=1).item_model
self.assertEqual(itemmodel.description, 'Some new description')
# comment
data = {
'type': 'comment',
'item': 1,
'person': 1,
'text': 'This is a comment !'
}
resp = self.client.post(reverse('workflow:item_instance_show', args=[item.pk]), data)
self.assertEqual(resp.status_code, 302)
comment = Comment.objects.get(item=1, person=1, text='This is a comment !')
comments = Comment.objects.filter(item__pk=1)
self.assertEqual(comment in comments, True)
# comment - invalid form
data = {
'type': 'comment',
'item': 1,
'person': 1,
'text': ''
}
resp = self.client.post(reverse('workflow:item_instance_show', args=[item.pk]), data)
self.assertEqual(resp.status_code, 200)
# invalid form
data = {
'type': 'nothingExpected',
}
resp = self.client.post(reverse('workflow:item_instance_show', args=[item.pk]), data)
self.assertEqual(resp.status_code, 200)
def test_item_update(self):
resp = self.client.get(reverse('workflow:update', args=['all', 'someAction', 'thisShouldRaiseA404', 42]))
self.assertEqual(resp.status_code, 404)
# todo: more 404 tests ?
def test_item_update_item_take(self):
itemmodel = ItemModel.objects.get(name='item model untaken')
item = Item.objects.get(item_model=itemmodel)
self.assertEqual(item.assigned_to, None)
resp = self.client.get(reverse('workflow:update', args=['all', 'take', 'item', item.pk]))
self.assertEqual(resp.status_code, 302)
item = Item.objects.get(item_model=itemmodel)
self.assertNotEqual(item.assigned_to, None)
class WorkflowTest_updateAjax(TestCase):
fixtures = ['auth_user', 'team_all', 'workflow']
def get_wrapper(self, *args, **kwargs):
"""Adds the header to make django think it's an Ajax request"""
return self.client.get_save(HTTP_X_REQUESTED_WITH='XMLHttpRequest', *args, **kwargs)
def setUp(self):
resp = self.client.login(username='admin', password='admin')
self.assertEqual(resp, True)
self.client.get_save = self.client.get
self.client.get = self.get_wrapper
def test_item_update_item_take(self):
itemmodel = ItemModel.objects.get(name='item model untaken')
item = Item.objects.get(item_model=itemmodel)
self.assertEqual(item.assigned_to, None)
resp = self.client.get(reverse('workflow:update', args=['all', 'take', 'item', item.pk]))
self.assertEqual(resp.status_code, 200)
item = Item.objects.get(item_model=itemmodel)
self.assertNotEqual(item.assigned_to, None)
def test_item_update_item_untake(self):
itemmodel = ItemModel.objects.get(name='item model taken')
item = Item.objects.get(item_model=itemmodel)
self.assertNotEqual(item.assigned_to, None)
resp = self.client.get(reverse('workflow:update', args=['all', 'untake', 'item', item.pk]))
self.assertEqual(resp.status_code, 200)
item = Item.objects.get(item_model=itemmodel)
self.assertEqual(item.assigned_to, None)
def test_item_update_item_404(self):
itemmodel = ItemModel.objects.get(name='item model 1')
item = Item.objects.get(item_model=itemmodel)
resp = self.client.get(reverse('workflow:update', args=['all', '404', 'item', item.pk]))
self.assertEqual(resp.status_code, 404)
def test_item_update_validate_success(self):
itemmodel = ItemModel.objects.get(name='item model untested')
item = Item.objects.get(item_model=itemmodel)
self.assertEqual(item.validation, Item.VALIDATION_UNTESTED)
resp = self.client.get(reverse('workflow:update', args=['all', 'success', 'validate', item.pk]))
self.assertEqual(resp.status_code, 200)
item = Item.objects.get(item_model=itemmodel)
self.assertEqual(item.validation, Item.VALIDATION_SUCCESS)
def test_item_update_validate_failed(self):
itemmodel = ItemModel.objects.get(name='item model untested')
item = Item.objects.get(item_model=itemmodel)
self.assertEqual(item.validation, Item.VALIDATION_UNTESTED)
resp = self.client.get(reverse('workflow:update', args=['all', 'failed', 'validate', item.pk]))
self.assertEqual(resp.status_code, 200)
item = Item.objects.get(item_model=itemmodel)
self.assertEqual(item.validation, Item.VALIDATION_FAILED)
def test_item_update_validate_untested(self):
itemmodel = ItemModel.objects.get(name='item model success')
item = Item.objects.get(item_model=itemmodel)
self.assertEqual(item.validation, Item.VALIDATION_SUCCESS)
resp = self.client.get(reverse('workflow:update', args=['all', 'untested', 'validate', item.pk]))
self.assertEqual(resp.status_code, 200)
item = Item.objects.get(item_model=itemmodel)
self.assertEqual(item.validation, Item.VALIDATION_UNTESTED)
def test_item_update_validate_404(self):
itemmodel = ItemModel.objects.get(name='item model 1')
item = Item.objects.get(item_model=itemmodel)
resp = self.client.get(reverse('workflow:update', args=['all', '404', 'validate', item.pk]))
self.assertEqual(resp.status_code, 404)
def test_item_update_category_take(self):
workflow_pk = 1
category = ItemCategory.objects.get(pk=4)
def get_items():
return Item.objects.filter(workflow__pk=workflow_pk, item_model__category=category)
# check there are a few items in this category
items = get_items()
self.assertEqual(len(items) not in [0, 1], True)
# check a least one item in the category is untaken
items_untaken = get_items().filter(assigned_to=None)
self.assertNotEqual(len(items_untaken), 0)
# take all items
resp = self.client.get(reverse('workflow:update', args=['all', 'take', 'category', category.pk, workflow_pk]))
self.assertEqual(resp.status_code, 200)
# check all are taken
items_untaken = get_items().filter(assigned_to=None)
self.assertEqual(len(items_untaken), 0)
def test_item_update_category_untake(self):
workflow_pk = 1
category = ItemCategory.objects.get(pk=3)
def get_items():
return Item.objects.filter(workflow__pk=workflow_pk, item_model__category=category)
# check there are a few items in this category
items = get_items()
self.assertEqual(len(items) not in [0, 1], True)
# check a least one item in the category is taken
items_taken = get_items().exclude(assigned_to=None)
self.assertNotEqual(len(items_taken), 0)
# untake all items
resp = self.client.get(reverse('workflow:update', args=['all', 'untake', 'category', category.pk, workflow_pk]))
self.assertEqual(resp.status_code, 200)
# check all are untaken
items_taken = get_items().exclude(assigned_to=None)
self.assertEqual(len(items_taken), 0)
def test_item_update_category_show(self):
workflow_pk = 1
category = ItemCategory.objects.get(pk=4)
resp = self.client.get(reverse('workflow:update', args=['all', 'show', 'category', category.pk, workflow_pk]))
self.assertEqual(resp.status_code, 200)
def test_item_update_category_404(self):
workflow_pk = 1
category = ItemCategory.objects.get(pk=4)
resp = self.client.get(reverse('workflow:update', args=['all', '404', 'category', category.pk, workflow_pk]))
self.assertEqual(resp.status_code, 404)
class ItemModelTest(TestCase):
fixtures = ['auth_user', 'team_all', 'workflow']
def setUp(self):
resp = self.client.login(username='admin', password='admin')
self.assertEqual(resp, True)
def test_item_model_create(self):
# get with initial cat
resp = self.client.get(reverse('workflow:item_model_new'), args=[1])
self.assertEqual(resp.status_code, 200)
# get without initial cat
resp = self.client.get(reverse('workflow:item_model_new'))
self.assertEqual(resp.status_code, 200)
data = {
'name': 'new item model',
'category': 1,
'description': 'Some description',
}
resp = self.client.post(reverse('workflow:item_model_new'), data)
self.assertEqual(resp.status_code, 302)
person_count = ItemModel.objects.filter(name='new item model').count()
self.assertEqual(person_count, 1)
def test_item_model_create_from_workflow(self):
# get with initial cat
resp = self.client.get(reverse('workflow:item_model_add_to_workcat', args=[1, 1]))
self.assertEqual(resp.status_code, 200)
# get without initial cat
resp = self.client.get(reverse('workflow:item_model_add_to_workflow', args=[1]))
self.assertEqual(resp.status_code, 200)
data = {
'name': 'new item model',
'category': 1,
'description': 'Some description',
}
resp = self.client.post(reverse('workflow:item_model_add_to_workflow', args=[1]), data)
self.assertEqual(resp.status_code, 302)
person_count = ItemModel.objects.filter(name='new item model').count()
self.assertEqual(person_count, 1)
def test_item_model_edit(self):
item_pk = ItemModel.objects.get(name='item model 1').pk
resp = self.client.get(reverse('workflow:item_model_edit', args=[item_pk]))
self.assertEqual(resp.status_code, 200)
data = {
'name': 'item model 1 edited',
'category': 1,
'description': 'Some description',
}
resp = self.client.post(reverse('workflow:item_model_edit', args=[item_pk]), data)
self.assertEqual(resp.status_code, 302)
item_count = ItemModel.objects.filter(name='item model 1').count()
self.assertEqual(item_count, 0)
item_count = ItemModel.objects.filter(name='item model 1 edited').count()
self.assertEqual(item_count, 1)
def test_item_model_list(self):
resp = self.client.get(reverse('workflow:item_model_list'))
self.assertEqual(resp.status_code, 200)
item = ItemModel.objects.get(name='item model 1')
self.assertEqual(any([item in item_list for item_list in resp.context[-1]['object_list'].values()]), True)
class ItemCategoryTest(TestCase):
fixtures = ['auth_user', 'team_all', 'workflow']
def setUp(self):
resp = self.client.login(username='admin', password='admin')
self.assertEqual(resp, True)
def test_item_category_create(self):
resp = self.client.get(reverse('workflow:item_category_new'))
self.assertEqual(resp.status_code, 200)
data = {
'name': 'item category new',
}
resp = self.client.post(reverse('workflow:item_category_new'), data)
self.assertEqual(resp.status_code, 302)
items_count = ItemCategory.objects.filter(name='item category new').count()
self.assertEqual(items_count, 1)
def test_item_category_create_to_workflow(self):
resp = self.client.get(reverse('workflow:item_category_new', args=[1]))
self.assertEqual(resp.status_code, 200)
data = {
'name': 'item category new',
}
resp = self.client.post(reverse('workflow:item_category_new', args=[1]), data)
self.assertEqual(resp.status_code, 302)
items_count = ItemCategory.objects.filter(name='item category new').count()
self.assertEqual(items_count, 1)
# todo : assert workflow has the category
def test_item_category_edit(self):
item_pk = ItemCategory.objects.get(name='item category 1').pk
resp = self.client.get(reverse('workflow:item_category_edit', args=[item_pk]))
self.assertEqual(resp.status_code, 200)
data = {
'name': 'item category 1 edited',
}
resp = self.client.post(reverse('workflow:item_category_edit', args=[item_pk]), data)
self.assertEqual(resp.status_code, 302)
item_count = ItemCategory.objects.filter(name='item category 1').count()
self.assertEqual(item_count, 0)
item_count = ItemCategory.objects.filter(name='item category 1 edited').count()
self.assertEqual(item_count, 1)
| mit |
BenjamenMeyer/openstackinabox | openstackinabox/tests/models/db/test_base.py | 2 | 1380 | import ddt
from openstackinabox.tests.base import TestBase
from openstackinabox.models import base_db
@ddt.ddt
class TestModelBaseDb(TestBase):
def setUp(self):
super(TestModelBaseDb, self).setUp()
self.model = base_db.ModelDbBase
def tearDown(self):
super(TestModelBaseDb, self).tearDown()
def test_model_initialization(self):
name = 'Ra'
master = 'Omicron'
db = 'Heroditus'
instance = self.model(name, master, db)
self.assertEqual(name, instance.name)
self.assertEqual(master, instance.master)
self.assertEqual(db, instance.database)
@ddt.data(
(1, True),
(0, False)
)
@ddt.unpack
def test_bool_from_database(self, value, expected_value):
self.assertEqual(
self.model.bool_from_database(value),
expected_value
)
@ddt.data(
(True, 1),
(False, 0)
)
@ddt.unpack
def test_bool_to_database(self, value, expected_value):
self.assertEqual(
self.model.bool_to_database(value),
expected_value
)
def test_initialize_model(self):
with self.assertRaises(NotImplementedError):
instance = self.model(
'Ulysses',
'S',
'Grant'
)
instance.initialize()
| apache-2.0 |
zhangqi007/ZQ | Tools/autotest/common.py | 142 | 9612 | import util, pexpect, time, math
from pymavlink import mavwp
# a list of pexpect objects to read while waiting for
# messages. This keeps the output to stdout flowing
expect_list = []
def expect_list_clear():
'''clear the expect list'''
global expect_list
for p in expect_list[:]:
expect_list.remove(p)
def expect_list_extend(list):
'''extend the expect list'''
global expect_list
expect_list.extend(list)
def idle_hook(mav):
'''called when waiting for a mavlink message'''
global expect_list
for p in expect_list:
util.pexpect_drain(p)
def message_hook(mav, msg):
'''called as each mavlink msg is received'''
idle_hook(mav)
def expect_callback(e):
'''called when waiting for a expect pattern'''
global expect_list
for p in expect_list:
if p == e:
continue
util.pexpect_drain(p)
def get_distance(loc1, loc2):
'''get ground distance between two locations'''
dlat = loc2.lat - loc1.lat
dlong = loc2.lng - loc1.lng
return math.sqrt((dlat*dlat) + (dlong*dlong)) * 1.113195e5
def get_bearing(loc1, loc2):
'''get bearing from loc1 to loc2'''
off_x = loc2.lng - loc1.lng
off_y = loc2.lat - loc1.lat
bearing = 90.00 + math.atan2(-off_y, off_x) * 57.2957795
if bearing < 0:
bearing += 360.00
return bearing;
def wait_seconds(mav, seconds_to_wait):
tstart = get_sim_time(mav)
tnow = tstart
while tstart + seconds_to_wait > tnow:
tnow = get_sim_time(mav)
def get_sim_time(mav):
m = mav.recv_match(type='SYSTEM_TIME', blocking=True)
return m.time_boot_ms * 1.0e-3
def wait_altitude(mav, alt_min, alt_max, timeout=30):
climb_rate = 0
previous_alt = 0
'''wait for a given altitude range'''
tstart = get_sim_time(mav)
print("Waiting for altitude between %u and %u" % (alt_min, alt_max))
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
climb_rate = m.alt - previous_alt
previous_alt = m.alt
print("Wait Altitude: Cur:%u, min_alt:%u, climb_rate: %u" % (m.alt, alt_min , climb_rate))
if m.alt >= alt_min and m.alt <= alt_max:
print("Altitude OK")
return True
print("Failed to attain altitude range")
return False
def wait_groundspeed(mav, gs_min, gs_max, timeout=30):
'''wait for a given ground speed range'''
tstart = get_sim_time(mav)
print("Waiting for groundspeed between %.1f and %.1f" % (gs_min, gs_max))
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
print("Wait groundspeed %.1f, target:%.1f" % (m.groundspeed, gs_min))
if m.groundspeed >= gs_min and m.groundspeed <= gs_max:
return True
print("Failed to attain groundspeed range")
return False
def wait_roll(mav, roll, accuracy, timeout=30):
'''wait for a given roll in degrees'''
tstart = get_sim_time(mav)
print("Waiting for roll of %d at %s" % (roll, time.ctime()))
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='ATTITUDE', blocking=True)
p = math.degrees(m.pitch)
r = math.degrees(m.roll)
print("Roll %d Pitch %d" % (r, p))
if math.fabs(r - roll) <= accuracy:
print("Attained roll %d" % roll)
return True
print("Failed to attain roll %d" % roll)
return False
def wait_pitch(mav, pitch, accuracy, timeout=30):
'''wait for a given pitch in degrees'''
tstart = get_sim_time(mav)
print("Waiting for pitch of %u at %s" % (pitch, time.ctime()))
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='ATTITUDE', blocking=True)
p = math.degrees(m.pitch)
r = math.degrees(m.roll)
print("Pitch %d Roll %d" % (p, r))
if math.fabs(p - pitch) <= accuracy:
print("Attained pitch %d" % pitch)
return True
print("Failed to attain pitch %d" % pitch)
return False
def wait_heading(mav, heading, accuracy=5, timeout=30):
'''wait for a given heading'''
tstart = get_sim_time(mav)
print("Waiting for heading %u with accuracy %u" % (heading, accuracy))
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
print("Heading %u" % m.heading)
if math.fabs(m.heading - heading) <= accuracy:
print("Attained heading %u" % heading)
return True
print("Failed to attain heading %u" % heading)
return False
def wait_distance(mav, distance, accuracy=5, timeout=30):
'''wait for flight of a given distance'''
tstart = get_sim_time(mav)
start = mav.location()
while get_sim_time(mav) < tstart + timeout:
pos = mav.location()
delta = get_distance(start, pos)
print("Distance %.2f meters" % delta)
if math.fabs(delta - distance) <= accuracy:
print("Attained distance %.2f meters OK" % delta)
return True
if delta > (distance + accuracy):
print("Failed distance - overshoot delta=%f distance=%f" % (delta, distance))
return False
print("Failed to attain distance %u" % distance)
return False
def wait_location(mav, loc, accuracy=5, timeout=30, target_altitude=None, height_accuracy=-1):
'''wait for arrival at a location'''
tstart = get_sim_time(mav)
if target_altitude is None:
target_altitude = loc.alt
print("Waiting for location %.4f,%.4f at altitude %.1f height_accuracy=%.1f" % (
loc.lat, loc.lng, target_altitude, height_accuracy))
while get_sim_time(mav) < tstart + timeout:
pos = mav.location()
delta = get_distance(loc, pos)
print("Distance %.2f meters alt %.1f" % (delta, pos.alt))
if delta <= accuracy:
if height_accuracy != -1 and math.fabs(pos.alt - target_altitude) > height_accuracy:
continue
print("Reached location (%.2f meters)" % delta)
return True
print("Failed to attain location")
return False
def wait_waypoint(mav, wpnum_start, wpnum_end, allow_skip=True, max_dist=2, timeout=400, mode=None):
'''wait for waypoint ranges'''
tstart = get_sim_time(mav)
# this message arrives after we set the current WP
start_wp = mav.waypoint_current()
current_wp = start_wp
print("\ntest: wait for waypoint ranges start=%u end=%u\n\n" % (wpnum_start, wpnum_end))
# if start_wp != wpnum_start:
# print("test: Expected start waypoint %u but got %u" % (wpnum_start, start_wp))
# return False
while get_sim_time(mav) < tstart + timeout:
seq = mav.waypoint_current()
m = mav.recv_match(type='NAV_CONTROLLER_OUTPUT', blocking=True)
wp_dist = m.wp_dist
m = mav.recv_match(type='VFR_HUD', blocking=True)
# if we exited the required mode, finish
if mode is not None and mav.flightmode != mode:
print('Exited %s mode' % mode)
return True
print("test: WP %u (wp_dist=%u Alt=%d), current_wp: %u, wpnum_end: %u" % (seq, wp_dist, m.alt, current_wp, wpnum_end))
if seq == current_wp+1 or (seq > current_wp+1 and allow_skip):
print("test: Starting new waypoint %u" % seq)
tstart = get_sim_time(mav)
current_wp = seq
# the wp_dist check is a hack until we can sort out the right seqnum
# for end of mission
#if current_wp == wpnum_end or (current_wp == wpnum_end-1 and wp_dist < 2):
if (current_wp == wpnum_end and wp_dist < max_dist):
print("Reached final waypoint %u" % seq)
return True
if (seq >= 255):
print("Reached final waypoint %u" % seq)
return True
if seq > current_wp+1:
print("Failed: Skipped waypoint! Got wp %u expected %u" % (seq, current_wp+1))
return False
print("Failed: Timed out waiting for waypoint %u of %u" % (wpnum_end, wpnum_end))
return False
def save_wp(mavproxy, mav):
mavproxy.send('rc 7 1000\n')
mav.recv_match(condition='RC_CHANNELS_RAW.chan7_raw==1000', blocking=True)
wait_seconds(mav, 1)
mavproxy.send('rc 7 2000\n')
mav.recv_match(condition='RC_CHANNELS_RAW.chan7_raw==2000', blocking=True)
wait_seconds(mav, 1)
mavproxy.send('rc 7 1000\n')
mav.recv_match(condition='RC_CHANNELS_RAW.chan7_raw==1000', blocking=True)
wait_seconds(mav, 1)
def wait_mode(mav, mode, timeout=None):
print("Waiting for mode %s" % mode)
mav.recv_match(condition='MAV.flightmode.upper()=="%s".upper()' % mode, timeout=timeout, blocking=True)
print("Got mode %s" % mode)
return mav.flightmode
def mission_count(filename):
'''load a mission from a file and return number of waypoints'''
wploader = mavwp.MAVWPLoader()
wploader.load(filename)
num_wp = wploader.count()
return num_wp
def sim_location(mav):
'''return current simulator location'''
from pymavlink import mavutil
m = mav.recv_match(type='SIMSTATE', blocking=True)
return mavutil.location(m.lat*1.0e-7, m.lng*1.0e-7, 0, math.degrees(m.yaw))
def log_download(mavproxy, mav, filename, timeout=360):
'''download latest log'''
mavproxy.send("log list\n")
mavproxy.expect("numLogs")
mav.wait_heartbeat()
mav.wait_heartbeat()
mavproxy.send("log download latest %s\n" % filename)
mavproxy.expect("Finished downloading", timeout=timeout)
mav.wait_heartbeat()
mav.wait_heartbeat()
return True
| gpl-3.0 |
Azure/azure-sdk-for-python | sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/aio/operations/_disaster_recovery_configs_operations.py | 1 | 36795 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DisasterRecoveryConfigsOperations:
"""DisasterRecoveryConfigsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.eventhub.v2017_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_authorization_rules(
self,
resource_group_name: str,
namespace_name: str,
alias: str,
**kwargs: Any
) -> AsyncIterable["_models.AuthorizationRuleListResult"]:
"""Gets a list of authorization rules for a Namespace.
:param resource_group_name: Name of the resource group within the azure subscription.
:type resource_group_name: str
:param namespace_name: The Namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AuthorizationRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2017_04_01.models.AuthorizationRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_authorization_rules.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('AuthorizationRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_authorization_rules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/authorizationRules'} # type: ignore
async def get_authorization_rule(
self,
resource_group_name: str,
namespace_name: str,
alias: str,
authorization_rule_name: str,
**kwargs: Any
) -> "_models.AuthorizationRule":
"""Gets an AuthorizationRule for a Namespace by rule name.
:param resource_group_name: Name of the resource group within the azure subscription.
:type resource_group_name: str
:param namespace_name: The Namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:param authorization_rule_name: The authorization rule name.
:type authorization_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AuthorizationRule, or the result of cls(response)
:rtype: ~azure.mgmt.eventhub.v2017_04_01.models.AuthorizationRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
# Construct URL
url = self.get_authorization_rule.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('AuthorizationRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/authorizationRules/{authorizationRuleName}'} # type: ignore
async def list_keys(
self,
resource_group_name: str,
namespace_name: str,
alias: str,
authorization_rule_name: str,
**kwargs: Any
) -> "_models.AccessKeys":
"""Gets the primary and secondary connection strings for the Namespace.
:param resource_group_name: Name of the resource group within the azure subscription.
:type resource_group_name: str
:param namespace_name: The Namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:param authorization_rule_name: The authorization rule name.
:type authorization_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AccessKeys, or the result of cls(response)
:rtype: ~azure.mgmt.eventhub.v2017_04_01.models.AccessKeys
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AccessKeys"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
# Construct URL
url = self.list_keys.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('AccessKeys', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/authorizationRules/{authorizationRuleName}/listKeys'} # type: ignore
async def check_name_availability(
self,
resource_group_name: str,
namespace_name: str,
parameters: "_models.CheckNameAvailabilityParameter",
**kwargs: Any
) -> "_models.CheckNameAvailabilityResult":
"""Check the give Namespace name availability.
:param resource_group_name: Name of the resource group within the azure subscription.
:type resource_group_name: str
:param namespace_name: The Namespace name.
:type namespace_name: str
:param parameters: Parameters to check availability of the given Alias name.
:type parameters: ~azure.mgmt.eventhub.v2017_04_01.models.CheckNameAvailabilityParameter
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CheckNameAvailabilityResult, or the result of cls(response)
:rtype: ~azure.mgmt.eventhub.v2017_04_01.models.CheckNameAvailabilityResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CheckNameAvailabilityResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.check_name_availability.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'CheckNameAvailabilityParameter')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('CheckNameAvailabilityResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/checkNameAvailability'} # type: ignore
def list(
self,
resource_group_name: str,
namespace_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ArmDisasterRecoveryListResult"]:
"""Gets all Alias(Disaster Recovery configurations).
:param resource_group_name: Name of the resource group within the azure subscription.
:type resource_group_name: str
:param namespace_name: The Namespace name.
:type namespace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ArmDisasterRecoveryListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2017_04_01.models.ArmDisasterRecoveryListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ArmDisasterRecoveryListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ArmDisasterRecoveryListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs'} # type: ignore
async def create_or_update(
self,
resource_group_name: str,
namespace_name: str,
alias: str,
parameters: "_models.ArmDisasterRecovery",
**kwargs: Any
) -> Optional["_models.ArmDisasterRecovery"]:
"""Creates or updates a new Alias(Disaster Recovery configuration).
:param resource_group_name: Name of the resource group within the azure subscription.
:type resource_group_name: str
:param namespace_name: The Namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:param parameters: Parameters required to create an Alias(Disaster Recovery configuration).
:type parameters: ~azure.mgmt.eventhub.v2017_04_01.models.ArmDisasterRecovery
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ArmDisasterRecovery, or the result of cls(response)
:rtype: ~azure.mgmt.eventhub.v2017_04_01.models.ArmDisasterRecovery or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ArmDisasterRecovery"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ArmDisasterRecovery')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ArmDisasterRecovery', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}'} # type: ignore
async def delete(
self,
resource_group_name: str,
namespace_name: str,
alias: str,
**kwargs: Any
) -> None:
"""Deletes an Alias(Disaster Recovery configuration).
:param resource_group_name: Name of the resource group within the azure subscription.
:type resource_group_name: str
:param namespace_name: The Namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}'} # type: ignore
async def get(
self,
resource_group_name: str,
namespace_name: str,
alias: str,
**kwargs: Any
) -> "_models.ArmDisasterRecovery":
"""Retrieves Alias(Disaster Recovery configuration) for primary or secondary namespace.
:param resource_group_name: Name of the resource group within the azure subscription.
:type resource_group_name: str
:param namespace_name: The Namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ArmDisasterRecovery, or the result of cls(response)
:rtype: ~azure.mgmt.eventhub.v2017_04_01.models.ArmDisasterRecovery
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ArmDisasterRecovery"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ArmDisasterRecovery', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}'} # type: ignore
async def break_pairing(
self,
resource_group_name: str,
namespace_name: str,
alias: str,
**kwargs: Any
) -> None:
"""This operation disables the Disaster Recovery and stops replicating changes from primary to
secondary namespaces.
:param resource_group_name: Name of the resource group within the azure subscription.
:type resource_group_name: str
:param namespace_name: The Namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
# Construct URL
url = self.break_pairing.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
break_pairing.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/breakPairing'} # type: ignore
async def fail_over(
self,
resource_group_name: str,
namespace_name: str,
alias: str,
**kwargs: Any
) -> None:
"""Invokes GEO DR failover and reconfigure the alias to point to the secondary namespace.
:param resource_group_name: Name of the resource group within the azure subscription.
:type resource_group_name: str
:param namespace_name: The Namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
# Construct URL
url = self.fail_over.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
fail_over.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/failover'} # type: ignore
| mit |
atwoz/vimeko | sources_non_forked/ultisnips/pythonx/UltiSnips/text.py | 29 | 2128 | #!/usr/bin/env python
# encoding: utf-8
"""Utilities to deal with text."""
def unescape(text):
"""Removes '\\' escaping from 'text'."""
rv = ''
i = 0
while i < len(text):
if i + 1 < len(text) and text[i] == '\\':
rv += text[i + 1]
i += 1
else:
rv += text[i]
i += 1
return rv
def escape(text, chars):
"""Escapes all characters in 'chars' in text using backspaces."""
rv = ''
for char in text:
if char in chars:
rv += '\\'
rv += char
return rv
def fill_in_whitespace(text):
"""Returns 'text' with escaped whitespace replaced through whitespaces."""
text = text.replace(r"\n", '\n')
text = text.replace(r"\t", '\t')
text = text.replace(r"\r", '\r')
text = text.replace(r"\a", '\a')
text = text.replace(r"\b", '\b')
return text
def head_tail(line):
"""Returns the first word in 'line' and the rest of 'line' or None if the
line is too short."""
generator = (t.strip() for t in line.split(None, 1))
head = next(generator).strip()
tail = ''
try:
tail = next(generator).strip()
except StopIteration:
pass
return head, tail
class LineIterator(object):
"""Convenience class that keeps track of line numbers in files."""
def __init__(self, text):
self._line_index = -1
self._lines = list(text.splitlines(True))
def __iter__(self):
return self
def __next__(self):
"""Returns the next line."""
if self._line_index + 1 < len(self._lines):
self._line_index += 1
return self._lines[self._line_index]
raise StopIteration()
next = __next__ # for python2
@property
def line_index(self):
"""The 1 based line index in the current file."""
return self._line_index + 1
def peek(self):
"""Returns the next line (if there is any, otherwise None) without
advancing the iterator."""
try:
return self._lines[self._line_index + 1]
except IndexError:
return None
| mit |
google/in-silico-labeling | isl/util.py | 1 | 10259 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility functions and globals."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import os
# pylint: disable=g-bad-import-order
import numpy as np
import tensorflow as tf
from typing import Callable, Dict, List, Tuple, Union
import cv2
gfile = tf.gfile
logging = tf.logging
lt = tf.contrib.labeled_tensor
slim = tf.contrib.slim
# The standard axis order for Seeing More ground truth tensors.
# These represent data read from disk, before going through the model.
CANONICAL_AXIS_ORDER = ['batch', 'row', 'column', 'z', 'channel', 'mask']
# The standard axis order for Seeing More prediction tensors.
# These represent data output from the model; the values are either logits
# or probabilities.
CANONICAL_PREDICTION_AXIS_ORDER = [
'batch', 'row', 'column', 'z', 'channel', 'class'
]
# The standard axis order for Seeing More summary statistic tensors.
# The are currently only created by ops.distribution_statistics.
CANONICAL_STATISTIC_AXIS_ORDER = [
'batch', 'row', 'column', 'z', 'channel', 'statistic'
]
def read_image(path: str) -> np.ndarray:
"""Reads a 16-bit grayscale image and converts to floating point."""
logging.info('Reading image: %s', path)
image = cv2.imread(path, cv2.IMREAD_ANYDEPTH)
assert image is not None
assert len(image.shape) == 2, image.shape
assert image.dtype == np.uint8 or image.dtype == np.uint16
image = image.astype(np.float32) / np.iinfo(image.dtype).max
assert image.min() >= 0, image.min()
assert image.max() <= 1.0, image.max()
return image
def write_image(path: str, image: np.ndarray):
"""Writes the image to disk."""
image = (image * np.iinfo(np.uint16).max).astype(np.uint16)
if len(image.shape) == 3:
image = np.stack([image[:, :, 2], image[:, :, 1], image[:, :, 0]], axis=2)
logging.info('Writing image: %s', path)
cv2.imwrite(path, image)
def image_size(directory: str) -> Tuple[int, int]:
"""Get the dimensions of the images in the directory."""
png_paths = []
for f in gfile.ListDirectory(directory):
path = os.path.join(directory, f)
if gfile.Exists(path) and os.path.splitext(f)[1] == '.png':
png_paths.append(os.path.join(directory, f))
if not png_paths:
raise ValueError('No pngs in %s', directory)
image = read_image(png_paths[0])
return image.shape[0], image.shape[1]
def slice_1(x):
"""Given x return slice(x, x)."""
return slice(x, x)
class BatchParameters(object):
"""Convenience class for standard batch parameters."""
def __init__(self, size: int, num_threads: int, capacity: int):
self.size = size
self.num_threads = num_threads
self.capacity = capacity
def onehot(
num_classes: int,
labeled_tensor: lt.LabeledTensor,
name: str = None,
) -> lt.LabeledTensor:
"""Gets the one-hot encoding of rounded values in [0.0, 1.0].
See slim.one_hot_encoding.
Args:
num_classes: The number of classes in the encoding.
labeled_tensor: The input tensor.
name: Optional op name.
Returns:
A tensor, with the same axes as the input, plus a new axis "class", which
has size num_classes.
The classes are computed by dividing the unit interval into num_classes
bins.
"""
with tf.name_scope(name, 'onehot', [labeled_tensor]) as scope:
reshape_op = tf.reshape(labeled_tensor.tensor, [-1])
categorical_op = tf.to_int64(tf.round(reshape_op * (num_classes - 1)))
onehot_op = slim.one_hot_encoding(categorical_op, num_classes)
onehot_op = tf.reshape(
onehot_op,
labeled_tensor.tensor.shape.as_list() + [num_classes],
name=scope)
axes = list(labeled_tensor.axes.values()) + [('class', num_classes)]
return lt.LabeledTensor(onehot_op, axes)
def crop_center(
size: int,
input_lt: lt.LabeledTensor,
name: str = None,
) -> lt.LabeledTensor:
"""Center crop the 'row' and 'column' axes.
Args:
size: The width and height of the cropped region.
input_lt: The input tensor.
name: Optional op name.
Returns:
The center cropped tensor.
"""
with tf.name_scope(name, 'crop_center', [input_lt]) as scope:
num_rows = len(input_lt.axes['row'])
num_columns = len(input_lt.axes['column'])
assert (num_rows - size) % 2 == 0
assert (num_columns - size) % 2 == 0
row_offset = (num_rows - size) // 2
column_offset = (num_columns - size) // 2
return lt.slice(
input_lt, {
'row': slice(row_offset, num_rows - row_offset),
'column': slice(column_offset, num_columns - column_offset)
},
name=scope)
# TODO(ericmc): Remove this when the core graph ops are rewritten to use
# LabeledTensor.
def crop_center_unlabeled(
size: int,
input_op: tf.Tensor,
name: str = None,
) -> tf.Tensor:
"""Applies crop_center to an unlabeled tensor."""
input_lt = lt.LabeledTensor(input_op, ['batch', 'row', 'column', 'channel'])
crop_lt = crop_center(size, input_lt, name=name)
return crop_lt.tensor
def pad_constant(
labeled_tensor: lt.LabeledTensor,
paddings: Dict[str, Tuple[object, object]],
value: Union[int, float],
name: str = None,
) -> lt.LabeledTensor:
"""Pads a tensor with a constant scalar value.
See tf.pad and lt.pad.
Args:
labeled_tensor: The input tensor.
paddings: A mapping where the keys are axis names and the values are
tuples where the first element is the padding to insert at the beginning
of the axis and the second is the padding to insert at the end of the
axis.
value: The scalar value to pad with.
name: Optional op name.
Returns:
A tensor with the indicated axes padded, optionally with those axes extended
with the provided labels.
"""
with tf.name_scope(name, 'pad_constant', [labeled_tensor]) as scope:
# The constant padding value is zero.
zero_padded_lt = lt.pad(labeled_tensor, paddings, 'CONSTANT')
# Construct a tensor that has zeros on the interior and value `value` in
# the padded regions.
# TODO(ericmc): This should probably be directly supported by
# core Tensorflow op.
scalar_lt = lt.ones_like(labeled_tensor) * (-value)
scalar_lt = lt.pad(scalar_lt, paddings, 'CONSTANT')
scalar_lt += value
return lt.add(zero_padded_lt, scalar_lt, name=scope)
def entry_point_batch(
input_lts: List[lt.LabeledTensor],
bp: BatchParameters,
enqueue_many: bool,
entry_point_names: List[str],
name: str = None,
) -> List[lt.LabeledTensor]:
"""Wraps lt.batch with C++ entry points.
The original and rebatched tensors are given op names derived from
`entry_point_names`.
All entry point names begin with 'entry_point'.
Args:
input_lts: The input tensors.
bp: BatchParameters.
enqueue_many: Batch parameter.
entry_point_names: The names to give to each of the tensors.
name: Optional batch op name.
Returns:
The rebatched inputs.
"""
assert len(input_lts) == len(entry_point_names)
with tf.name_scope(''):
input_lts = [
lt.identity(t, name='entry_point_%s_pre_batch' % n)
for (t, n) in zip(input_lts, entry_point_names)
]
batch_lts = lt.batch(
input_lts,
batch_size=bp.size,
num_threads=bp.num_threads,
capacity=bp.capacity,
enqueue_many=enqueue_many,
name=name)
with tf.name_scope(''):
batch_lts = [
lt.identity(t, name='entry_point_%s_post_batch' % n)
for (t, n) in zip(batch_lts, entry_point_names)
]
return batch_lts
def softmax_cross_entropy(target_lt: lt.LabeledTensor,
mask_lt: lt.LabeledTensor,
predicted_lt: lt.LabeledTensor,
name: str = None) -> lt.LabeledTensor:
"""Rescaled sparse softmax cross entropy."""
with tf.name_scope(name, 'softmax_cross_entropy',
[target_lt, mask_lt, predicted_lt]) as scope:
target_lt = lt.transpose(target_lt, ['batch'])
mask_lt = lt.transpose(mask_lt, ['batch'])
predicted_lt = lt.transpose(predicted_lt, ['batch', 'class'])
num_classes = len(predicted_lt.axes['class'])
target_op = tf.to_int32(tf.round(target_lt.tensor * (num_classes - 1)))
loss_op = tf.losses.sparse_softmax_cross_entropy(
logits=predicted_lt, labels=target_op, weights=mask_lt)
# Scale the cross-entropy loss so that 0.0 remains perfect, and 1.0
# is the loss incurred by a uniform predictor.
# Any loss greater than 1.0 would therefore be a really bad sign.
loss_op /= -1.0 * math.log(1.0 / num_classes)
return lt.identity(lt.LabeledTensor(loss_op, []), name=scope)
def restore_model(
restore_directory: str,
restore_logits: bool,
restore_global_step: bool = False) -> Callable[[tf.Session], Callable]:
"""Creates a function to restore model parameters."""
logging.info('Restoring model from %s', restore_directory)
latest_checkpoint = tf.train.latest_checkpoint(restore_directory)
logging.info('Restore model checkpoint: %s', latest_checkpoint)
all_variables = slim.get_model_variables()
def filter_logits(v):
if restore_logits:
return True
else:
return 'head' not in v.name
variables_to_restore = [v for v in all_variables if filter_logits(v)]
if restore_global_step:
variables_to_restore.append(tf.train.get_or_create_global_step())
for v in variables_to_restore:
logging.info('Variable to restore: %s', (v.name, v.dtype))
restorer = tf.train.Saver(variables_to_restore)
return lambda sess: restorer.restore(sess, latest_checkpoint)
| apache-2.0 |
notriddle/servo | tests/wpt/web-platform-tests/tools/lint/lint.py | 4 | 34049 | from __future__ import print_function, unicode_literals
import abc
import argparse
import ast
import json
import logging
import os
import re
import subprocess
import sys
import tempfile
from collections import defaultdict
from . import fnmatch
from . import rules
from .. import localpaths
from ..gitignore.gitignore import PathFilter
from ..wpt import testfiles
from ..manifest.vcs import walk
from ..manifest.sourcefile import SourceFile, js_meta_re, python_meta_re, space_chars, get_any_variants, get_default_any_variants
from six import binary_type, iteritems, itervalues, with_metaclass
from six.moves import range
from six.moves.urllib.parse import urlsplit, urljoin
MYPY = False
if MYPY:
# MYPY is set to True when run under Mypy.
from typing import Any
from typing import Dict
from typing import IO
from typing import Iterable
from typing import List
from typing import Optional
from typing import Sequence
from typing import Set
from typing import Text
from typing import Tuple
from typing import Type
from typing import Union
Whitelist = Dict[Text, Dict[Text, Set[Optional[int]]]]
logger = None # type: Optional[logging.Logger]
def setup_logging(prefix=False):
# type: (bool) -> None
global logger
if logger is None:
logger = logging.getLogger(os.path.basename(os.path.splitext(__file__)[0]))
handler = logging.StreamHandler(sys.stdout) # type: logging.Handler
# Only add a handler if the parent logger is missing a handler
parent = logger.parent
assert isinstance(parent, logging.Logger)
if parent and len(parent.handlers) == 0:
handler = logging.StreamHandler(sys.stdout)
logger.addHandler(handler)
if prefix:
format = logging.BASIC_FORMAT
else:
format = str("%(message)s")
formatter = logging.Formatter(format)
for handler in logger.handlers:
handler.setFormatter(formatter)
logger.setLevel(logging.DEBUG)
setup_logging()
ERROR_MSG = """You must fix all errors; for details on how to fix them, see
https://web-platform-tests.org/writing-tests/lint-tool.html
However, instead of fixing a particular error, it's sometimes
OK to add a line to the lint.whitelist file in the root of the
web-platform-tests directory to make the lint tool ignore it.
For example, to make the lint tool ignore all '%s'
errors in the %s file,
you could add the following line to the lint.whitelist file.
%s: %s"""
def all_filesystem_paths(repo_root, subdir=None):
# type: (str, Optional[str]) -> Iterable[str]
path_filter = PathFilter(repo_root, extras=[str(".git/")])
if subdir:
expanded_path = subdir
else:
expanded_path = repo_root
for dirpath, dirnames, filenames in path_filter(walk(expanded_path)):
for filename, _ in filenames:
path = os.path.join(dirpath, filename)
if subdir:
path = os.path.join(subdir, path)
assert not os.path.isabs(path), path
yield path
def _all_files_equal(paths):
# type: (Iterable[str]) -> bool
"""
Checks all the paths are files that are byte-for-byte identical
:param paths: the list of paths to compare
:returns: True if they are all identical
"""
paths = list(paths)
if len(paths) < 2:
return True
first = paths.pop()
size = os.path.getsize(first)
if any(os.path.getsize(path) != size for path in paths):
return False
# Chunk this to avoid eating up memory and file descriptors
bufsize = 4096*4 # 16KB, a "reasonable" number of disk sectors
groupsize = 8 # Hypothesised to be large enough in the common case that everything fits in one group
with open(first, "rb") as first_f:
for start in range(0, len(paths), groupsize):
path_group = paths[start:start+groupsize]
first_f.seek(0)
try:
files = [open(x, "rb") for x in path_group]
for _ in range(0, size, bufsize):
a = first_f.read(bufsize)
for f in files:
b = f.read(bufsize)
if a != b:
return False
finally:
for f in files:
f.close()
return True
def check_path_length(repo_root, path):
# type: (str, str) -> List[rules.Error]
if len(path) + 1 > 150:
return [rules.PathLength.error(path, (path, len(path) + 1))]
return []
def check_file_type(repo_root, path):
# type: (str, str) -> List[rules.Error]
if os.path.islink(path):
return [rules.FileType.error(path, (path, "symlink"))]
return []
def check_worker_collision(repo_root, path):
# type: (str, str) -> List[rules.Error]
endings = [(".any.html", ".any.js"),
(".any.worker.html", ".any.js"),
(".worker.html", ".worker.js")]
for path_ending, generated in endings:
if path.endswith(path_ending):
return [rules.WorkerCollision.error(path, (path_ending, generated))]
return []
def check_gitignore_file(repo_root, path):
# type: (str, str) -> List[rules.Error]
if not path.endswith(".gitignore"):
return []
path_parts = path.split(os.path.sep)
if len(path_parts) == 1:
return []
if path_parts[-1] != ".gitignore":
return []
if (path_parts[0] in ["tools", "docs"] or
path_parts[:2] == ["resources", "webidl2"] or
path_parts[:3] == ["css", "tools", "apiclient"]):
return []
return [rules.GitIgnoreFile.error(path)]
def check_ahem_copy(repo_root, path):
# type: (str, str) -> List[rules.Error]
lpath = path.lower()
if "ahem" in lpath and lpath.endswith(".ttf"):
return [rules.AhemCopy.error(path)]
return []
def check_git_ignore(repo_root, paths):
# type: (str, List[str]) -> List[rules.Error]
errors = []
with tempfile.TemporaryFile('w+') as f:
f.write('\n'.join(paths))
f.seek(0)
try:
matches = subprocess.check_output(
["git", "check-ignore", "--verbose", "--no-index", "--stdin"], stdin=f)
for match in matches.strip().split('\n'):
match_filter, path = match.split()
_, _, filter_string = match_filter.split(':')
# If the matching filter reported by check-ignore is a special-case exception,
# that's fine. Otherwise, it requires a new special-case exception.
if filter_string[0] != '!':
errors.append(rules.IgnoredPath.error(path, (path,)))
except subprocess.CalledProcessError:
# Nonzero return code means that no match exists.
pass
return errors
drafts_csswg_re = re.compile(r"https?\:\/\/drafts\.csswg\.org\/([^/?#]+)")
w3c_tr_re = re.compile(r"https?\:\/\/www\.w3c?\.org\/TR\/([^/?#]+)")
w3c_dev_re = re.compile(r"https?\:\/\/dev\.w3c?\.org\/[^/?#]+\/([^/?#]+)")
def check_css_globally_unique(repo_root, paths):
# type: (str, List[str]) -> List[rules.Error]
"""
Checks that CSS filenames are sufficiently unique
This groups files by path classifying them as "test", "reference", or
"support".
"test" files must have a unique name across files that share links to the
same spec.
"reference" and "support" files, on the other hand, must have globally
unique names.
:param repo_root: the repository root
:param paths: list of all paths
:returns: a list of errors found in ``paths``
"""
test_files = defaultdict(set) # type: Dict[Union[bytes, Text], Set[str]]
ref_files = defaultdict(set) # type: Dict[Union[bytes, Text], Set[str]]
support_files = defaultdict(set) # type: Dict[Union[bytes, Text], Set[str]]
for path in paths:
if os.name == "nt":
if isinstance(path, binary_type):
path = path.replace(b"\\", b"/")
else:
path = path.replace(u"\\", u"/")
if not path.startswith("css/"):
continue
source_file = SourceFile(repo_root, path, "/")
if source_file.name_is_non_test:
# If we're name_is_non_test for a reason apart from support, ignore it.
# We care about support because of the requirement all support files in css/ to be in
# a support directory; see the start of check_parsed.
offset = path.find("/support/")
if offset == -1:
continue
parts = source_file.dir_path.split(os.path.sep)
if (parts[0] in source_file.root_dir_non_test or
any(item in source_file.dir_non_test - {"support"} for item in parts) or
any(parts[:len(non_test_path)] == list(non_test_path) for non_test_path in source_file.dir_path_non_test)):
continue
support_name = path[offset+1:]
support_files[support_name].add(path)
elif source_file.name_is_reference:
ref_files[source_file.name].add(path)
else:
test_name = source_file.name # type: Union[bytes, Text]
if isinstance(test_name, bytes):
test_name = test_name.replace(b'-manual', b'')
else:
test_name = test_name.replace(u'-manual', u'')
test_files[test_name].add(path)
errors = []
for name, colliding in iteritems(test_files):
if len(colliding) > 1:
if not _all_files_equal([os.path.join(repo_root, x) for x in colliding]):
# Only compute by_spec if there are prima-facie collisions because of cost
by_spec = defaultdict(set) # type: Dict[Text, Set[str]]
for path in colliding:
source_file = SourceFile(repo_root, path, "/")
for link in source_file.spec_links:
for r in (drafts_csswg_re, w3c_tr_re, w3c_dev_re):
m = r.match(link)
if m:
spec = m.group(1)
break
else:
continue
by_spec[spec].add(path)
for spec, spec_paths in iteritems(by_spec):
if not _all_files_equal([os.path.join(repo_root, x) for x in spec_paths]):
for x in spec_paths:
context1 = (name, spec, ", ".join(sorted(spec_paths)))
errors.append(rules.CSSCollidingTestName.error(x,
context1))
for rule_class, d in [(rules.CSSCollidingRefName, ref_files),
(rules.CSSCollidingSupportName, support_files)]:
for name, colliding in iteritems(d):
if len(colliding) > 1:
if not _all_files_equal([os.path.join(repo_root, x) for x in colliding]):
context2 = (name, ", ".join(sorted(colliding)))
for x in colliding:
errors.append(rule_class.error(x, context2))
return errors
def parse_whitelist(f):
# type: (IO[bytes]) -> Tuple[Whitelist, Set[Text]]
"""
Parse the whitelist file given by `f`, and return the parsed structure.
"""
data = defaultdict(lambda:defaultdict(set)) # type: Whitelist
ignored_files = set() # type: Set[Text]
for line in f:
line = line.strip()
if not line or line.startswith("#"):
continue
parts = [item.strip() for item in line.split(":")]
if len(parts) == 2:
error_types_s, file_match = parts
line_number = None # type: Optional[int]
else:
error_types_s, file_match, line_number_s = parts
line_number = int(line_number_s)
error_types = {item.strip() for item in error_types_s.split(",")}
file_match = os.path.normcase(file_match)
if "*" in error_types:
ignored_files.add(file_match)
else:
for error_type in error_types:
data[error_type][file_match].add(line_number)
return data, ignored_files
def filter_whitelist_errors(data, errors):
# type: (Whitelist, Sequence[rules.Error]) -> List[rules.Error]
"""
Filter out those errors that are whitelisted in `data`.
"""
if not errors:
return []
whitelisted = [False for item in range(len(errors))]
for i, (error_type, msg, path, line) in enumerate(errors):
normpath = os.path.normcase(path)
# Allow whitelisting all lint errors except the IGNORED PATH lint,
# which explains how to fix it correctly and shouldn't be ignored.
if error_type in data and error_type != "IGNORED PATH":
wl_files = data[error_type]
for file_match, allowed_lines in iteritems(wl_files):
if None in allowed_lines or line in allowed_lines:
if fnmatch.fnmatchcase(normpath, file_match):
whitelisted[i] = True
return [item for i, item in enumerate(errors) if not whitelisted[i]]
regexps = [item() for item in # type: ignore
[rules.TrailingWhitespaceRegexp,
rules.TabsRegexp,
rules.CRRegexp,
rules.SetTimeoutRegexp,
rules.W3CTestOrgRegexp,
rules.WebPlatformTestRegexp,
rules.Webidl2Regexp,
rules.ConsoleRegexp,
rules.GenerateTestsRegexp,
rules.PrintRegexp,
rules.LayoutTestsRegexp,
rules.MissingDepsRegexp,
rules.SpecialPowersRegexp]]
def check_regexp_line(repo_root, path, f):
# type: (str, str, IO[bytes]) -> List[rules.Error]
errors = [] # type: List[rules.Error]
applicable_regexps = [regexp for regexp in regexps if regexp.applies(path)]
for i, line in enumerate(f):
for regexp in applicable_regexps:
if regexp.search(line):
errors.append((regexp.name, regexp.description, path, i+1))
return errors
def check_parsed(repo_root, path, f):
# type: (str, str, IO[bytes]) -> List[rules.Error]
source_file = SourceFile(repo_root, path, "/", contents=f.read())
errors = [] # type: List[rules.Error]
if path.startswith("css/"):
if (source_file.type == "support" and
not source_file.name_is_non_test and
not source_file.name_is_reference):
return [rules.SupportWrongDir.error(path)]
if (source_file.type != "support" and
not source_file.name_is_reference and
not source_file.spec_links):
return [rules.MissingLink.error(path)]
if source_file.name_is_non_test:
return []
if source_file.markup_type is None:
return []
if source_file.root is None:
return [rules.ParseFailed.error(path)]
if source_file.type == "manual" and not source_file.name_is_manual:
errors.append(rules.ContentManual.error(path))
if source_file.type == "visual" and not source_file.name_is_visual:
errors.append(rules.ContentVisual.error(path))
about_blank_parts = urlsplit("about:blank")
for reftest_node in source_file.reftest_nodes:
href = reftest_node.attrib.get("href", "").strip(space_chars)
parts = urlsplit(href)
if parts == about_blank_parts:
continue
if (parts.scheme or parts.netloc):
errors.append(rules.AbsoluteUrlRef.error(path, (href,)))
continue
ref_url = urljoin(source_file.url, href)
ref_parts = urlsplit(ref_url)
if source_file.url == ref_url:
errors.append(rules.SameFileRef.error(path))
continue
assert ref_parts.path != ""
reference_file = os.path.join(repo_root, ref_parts.path[1:])
reference_rel = reftest_node.attrib.get("rel", "")
if not os.path.isfile(reference_file):
errors.append(rules.NonexistentRef.error(path,
(reference_rel, href)))
if len(source_file.timeout_nodes) > 1:
errors.append(rules.MultipleTimeout.error(path))
for timeout_node in source_file.timeout_nodes:
timeout_value = timeout_node.attrib.get("content", "").lower()
if timeout_value != "long":
errors.append(rules.InvalidTimeout.error(path, (timeout_value,)))
if source_file.testharness_nodes:
if len(source_file.testharness_nodes) > 1:
errors.append(rules.MultipleTestharness.error(path))
testharnessreport_nodes = source_file.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src='/resources/testharnessreport.js']")
if not testharnessreport_nodes:
errors.append(rules.MissingTestharnessReport.error(path))
else:
if len(testharnessreport_nodes) > 1:
errors.append(rules.MultipleTestharnessReport.error(path))
testharnesscss_nodes = source_file.root.findall(".//{http://www.w3.org/1999/xhtml}link[@href='/resources/testharness.css']")
if testharnesscss_nodes:
errors.append(rules.PresentTestharnessCSS.error(path))
for element in source_file.variant_nodes:
if "content" not in element.attrib:
errors.append(rules.VariantMissing.error(path))
else:
variant = element.attrib["content"]
if variant != "" and variant[0] not in ("?", "#"):
errors.append(rules.MalformedVariant.error(path, (path,)))
seen_elements = {"timeout": False,
"testharness": False,
"testharnessreport": False}
required_elements = [key for key, value in {"testharness": True,
"testharnessreport": len(testharnessreport_nodes) > 0,
"timeout": len(source_file.timeout_nodes) > 0}.items()
if value]
for elem in source_file.root.iter():
if source_file.timeout_nodes and elem == source_file.timeout_nodes[0]:
seen_elements["timeout"] = True
if seen_elements["testharness"]:
errors.append(rules.LateTimeout.error(path))
elif elem == source_file.testharness_nodes[0]:
seen_elements["testharness"] = True
elif testharnessreport_nodes and elem == testharnessreport_nodes[0]:
seen_elements["testharnessreport"] = True
if not seen_elements["testharness"]:
errors.append(rules.EarlyTestharnessReport.error(path))
if all(seen_elements[name] for name in required_elements):
break
if source_file.testdriver_nodes:
if len(source_file.testdriver_nodes) > 1:
errors.append(rules.MultipleTestdriver.error(path))
testdriver_vendor_nodes = source_file.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src='/resources/testdriver-vendor.js']")
if not testdriver_vendor_nodes:
errors.append(rules.MissingTestdriverVendor.error(path))
else:
if len(testdriver_vendor_nodes) > 1:
errors.append(rules.MultipleTestdriverVendor.error(path))
for element in source_file.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src]"):
src = element.attrib["src"]
def incorrect_path(script, src):
# type: (Text, Text) -> bool
return (script == src or
("/%s" % script in src and src != "/resources/%s" % script))
if incorrect_path("testharness.js", src):
errors.append(rules.TestharnessPath.error(path))
if incorrect_path("testharnessreport.js", src):
errors.append(rules.TestharnessReportPath.error(path))
if incorrect_path("testdriver.js", src):
errors.append(rules.TestdriverPath.error(path))
if incorrect_path("testdriver-vendor.js", src):
errors.append(rules.TestdriverVendorPath.error(path))
return errors
class ASTCheck(with_metaclass(abc.ABCMeta)):
@abc.abstractproperty
def rule(self):
# type: () -> Type[rules.Rule]
pass
@abc.abstractmethod
def check(self, root):
# type: (ast.AST) -> List[int]
pass
class OpenModeCheck(ASTCheck):
rule = rules.OpenNoMode
def check(self, root):
# type: (ast.AST) -> List[int]
errors = []
for node in ast.walk(root):
if isinstance(node, ast.Call):
if hasattr(node.func, "id") and node.func.id in ("open", "file"): # type: ignore
if (len(node.args) < 2 and
all(item.arg != "mode" for item in node.keywords)):
errors.append(node.lineno)
return errors
ast_checkers = [item() for item in [OpenModeCheck]]
def check_python_ast(repo_root, path, f):
# type: (str, str, IO[bytes]) -> List[rules.Error]
if not path.endswith(".py"):
return []
try:
root = ast.parse(f.read())
except SyntaxError as e:
return [rules.ParseFailed.error(path, line_no=e.lineno)]
errors = []
for checker in ast_checkers:
for lineno in checker.check(root):
errors.append(checker.rule.error(path, line_no=lineno))
return errors
broken_js_metadata = re.compile(br"//\s*META:")
broken_python_metadata = re.compile(br"#\s*META:")
def check_global_metadata(value):
# type: (str) -> Iterable[Tuple[Type[rules.Rule], Tuple[Any, ...]]]
global_values = {item.strip() for item in value.split(b",") if item.strip()}
included_variants = set.union(get_default_any_variants(),
*(get_any_variants(v) for v in global_values if not v.startswith(b"!")))
for global_value in global_values:
if global_value.startswith(b"!"):
excluded_value = global_value[1:]
if not get_any_variants(excluded_value):
yield (rules.UnknownGlobalMetadata, ())
elif excluded_value in global_values:
yield (rules.BrokenGlobalMetadata,
(("Cannot specify both %s and %s" % (global_value, excluded_value)),))
else:
excluded_variants = get_any_variants(excluded_value)
if not (excluded_variants & included_variants):
yield (rules.BrokenGlobalMetadata,
(("Cannot exclude %s if it is not included" % (excluded_value,)),))
else:
if not get_any_variants(global_value):
yield (rules.UnknownGlobalMetadata, ())
def check_script_metadata(repo_root, path, f):
# type: (str, str, IO[bytes]) -> List[rules.Error]
if path.endswith((".worker.js", ".any.js")):
meta_re = js_meta_re
broken_metadata = broken_js_metadata
elif path.endswith(".py"):
meta_re = python_meta_re
broken_metadata = broken_python_metadata
else:
return []
done = False
errors = []
for idx, line in enumerate(f):
assert isinstance(line, binary_type), line
m = meta_re.match(line)
if m:
key, value = m.groups()
if key == b"global":
for rule_class, context in check_global_metadata(value):
errors.append(rule_class.error(path, context, idx + 1))
elif key == b"timeout":
if value != b"long":
errors.append(rules.UnknownTimeoutMetadata.error(path,
line_no=idx + 1))
elif key == b"title":
pass
elif key == b"script":
pass
elif key == b"variant":
pass
else:
errors.append(rules.UnknownMetadata.error(path,
line_no=idx + 1))
else:
done = True
if done:
if meta_re.match(line):
errors.append(rules.StrayMetadata.error(path, line_no=idx + 1))
elif meta_re.search(line):
errors.append(rules.IndentedMetadata.error(path,
line_no=idx + 1))
elif broken_metadata.search(line):
errors.append(rules.BrokenMetadata.error(path, line_no=idx + 1))
return errors
ahem_font_re = re.compile(b"font.*:.*ahem", flags=re.IGNORECASE)
# Ahem can appear either in the global location or in the support
# directory for legacy Mozilla imports
ahem_stylesheet_re = re.compile(b"\/fonts\/ahem\.css|support\/ahem.css",
flags=re.IGNORECASE)
def check_ahem_system_font(repo_root, path, f):
# type: (str, str, IO[bytes]) -> List[rules.Error]
if not path.endswith((".html", ".htm", ".xht", ".xhtml")):
return []
contents = f.read()
errors = []
if ahem_font_re.search(contents) and not ahem_stylesheet_re.search(contents):
errors.append(rules.AhemSystemFont.error(path))
return errors
def check_path(repo_root, path):
# type: (str, str) -> List[rules.Error]
"""
Runs lints that check the file path.
:param repo_root: the repository root
:param path: the path of the file within the repository
:returns: a list of errors found in ``path``
"""
errors = []
for path_fn in path_lints:
errors.extend(path_fn(repo_root, path))
return errors
def check_all_paths(repo_root, paths):
# type: (str, List[str]) -> List[rules.Error]
"""
Runs lints that check all paths globally.
:param repo_root: the repository root
:param paths: a list of all the paths within the repository
:returns: a list of errors found in ``f``
"""
errors = []
for paths_fn in all_paths_lints:
errors.extend(paths_fn(repo_root, paths))
return errors
def check_file_contents(repo_root, path, f):
# type: (str, str, IO[bytes]) -> List[rules.Error]
"""
Runs lints that check the file contents.
:param repo_root: the repository root
:param path: the path of the file within the repository
:param f: a file-like object with the file contents
:returns: a list of errors found in ``f``
"""
errors = []
for file_fn in file_lints:
errors.extend(file_fn(repo_root, path, f))
f.seek(0)
return errors
def output_errors_text(errors):
# type: (List[rules.Error]) -> None
assert logger is not None
for error_type, description, path, line_number in errors:
pos_string = path
if line_number:
pos_string += ":%s" % line_number
logger.error("%s: %s (%s)" % (pos_string, description, error_type))
def output_errors_markdown(errors):
# type: (List[rules.Error]) -> None
if not errors:
return
assert logger is not None
heading = """Got lint errors:
| Error Type | Position | Message |
|------------|----------|---------|"""
for line in heading.split("\n"):
logger.error(line)
for error_type, description, path, line_number in errors:
pos_string = path
if line_number:
pos_string += ":%s" % line_number
logger.error("%s | %s | %s |" % (error_type, pos_string, description))
def output_errors_json(errors):
# type: (List[rules.Error]) -> None
for error_type, error, path, line_number in errors:
print(json.dumps({"path": path, "lineno": line_number,
"rule": error_type, "message": error}))
def output_error_count(error_count):
# type: (Dict[Text, int]) -> None
if not error_count:
return
assert logger is not None
by_type = " ".join("%s: %d" % item for item in error_count.items())
count = sum(error_count.values())
logger.info("")
if count == 1:
logger.info("There was 1 error (%s)" % (by_type,))
else:
logger.info("There were %d errors (%s)" % (count, by_type))
def changed_files(wpt_root):
# type: (str) -> List[Text]
revish = testfiles.get_revish(revish=None)
changed, _ = testfiles.files_changed(revish, None, include_uncommitted=True, include_new=True)
return [os.path.relpath(item, wpt_root) for item in changed]
def lint_paths(kwargs, wpt_root):
# type: (Dict[str, Any], str) -> List[str]
if kwargs.get(str("paths")):
paths = []
for path in kwargs.get(str("paths"), []):
if os.path.isdir(path):
path_dir = list(all_filesystem_paths(wpt_root, path))
paths.extend(path_dir)
elif os.path.isfile(path):
paths.append(os.path.relpath(os.path.abspath(path), wpt_root))
elif kwargs[str("all")]:
paths = list(all_filesystem_paths(wpt_root))
else:
changed_paths = changed_files(wpt_root)
force_all = False
for path in changed_paths:
path = path.replace(os.path.sep, "/")
if path == "lint.whitelist" or path.startswith("tools/lint/"):
force_all = True
break
paths = (list(changed_paths) if not force_all # type: ignore
else list(all_filesystem_paths(wpt_root)))
return paths
def create_parser():
# type: () -> argparse.ArgumentParser
parser = argparse.ArgumentParser()
parser.add_argument("paths", nargs="*",
help="List of paths to lint")
parser.add_argument("--json", action="store_true",
help="Output machine-readable JSON format")
parser.add_argument("--markdown", action="store_true",
help="Output markdown")
parser.add_argument("--repo-root", help="The WPT directory. Use this"
"option if the lint script exists outside the repository")
parser.add_argument("--all", action="store_true", help="If no paths are passed, try to lint the whole "
"working directory, not just files that changed")
return parser
def main(**kwargs):
# type: (**Any) -> int
assert logger is not None
if kwargs.get(str("json")) and kwargs.get(str("markdown")):
logger.critical("Cannot specify --json and --markdown")
sys.exit(2)
repo_root = kwargs.get(str('repo_root')) or localpaths.repo_root
output_format = {(True, False): str("json"),
(False, True): str("markdown"),
(False, False): str("normal")}[(kwargs.get(str("json"), False),
kwargs.get(str("markdown"), False))]
if output_format == "markdown":
setup_logging(True)
paths = lint_paths(kwargs, repo_root)
return lint(repo_root, paths, output_format)
def lint(repo_root, paths, output_format):
# type: (str, List[str], str) -> int
error_count = defaultdict(int) # type: Dict[Text, int]
last = None
with open(os.path.join(repo_root, "lint.whitelist")) as f:
whitelist, ignored_files = parse_whitelist(f)
output_errors = {"json": output_errors_json,
"markdown": output_errors_markdown,
"normal": output_errors_text}[output_format]
def process_errors(errors):
# type: (List[rules.Error]) -> Optional[Tuple[Text, Text]]
"""
Filters and prints the errors, and updates the ``error_count`` object.
:param errors: a list of error tuples (error type, message, path, line number)
:returns: ``None`` if there were no errors, or
a tuple of the error type and the path otherwise
"""
errors = filter_whitelist_errors(whitelist, errors)
if not errors:
return None
output_errors(errors)
for error_type, error, path, line in errors:
error_count[error_type] += 1
return (errors[-1][0], path)
for path in paths[:]:
abs_path = os.path.join(repo_root, path)
if not os.path.exists(abs_path):
paths.remove(path)
continue
if any(fnmatch.fnmatch(path, file_match) for file_match in ignored_files):
paths.remove(path)
continue
errors = check_path(repo_root, path)
last = process_errors(errors) or last
if not os.path.isdir(abs_path):
with open(abs_path, 'rb') as f:
errors = check_file_contents(repo_root, path, f)
last = process_errors(errors) or last
errors = check_all_paths(repo_root, paths)
last = process_errors(errors) or last
if output_format in ("normal", "markdown"):
output_error_count(error_count)
if error_count:
assert last is not None
assert logger is not None
for line in (ERROR_MSG % (last[0], last[1], last[0], last[1])).split("\n"):
logger.info(line)
return sum(itervalues(error_count))
path_lints = [check_file_type, check_path_length, check_worker_collision, check_ahem_copy,
check_gitignore_file]
all_paths_lints = [check_css_globally_unique]
file_lints = [check_regexp_line, check_parsed, check_python_ast, check_script_metadata,
check_ahem_system_font]
# Don't break users of the lint that don't have git installed.
try:
subprocess.check_output(["git", "--version"])
all_paths_lints += [check_git_ignore]
except subprocess.CalledProcessError:
print('No git present; skipping .gitignore lint.')
if __name__ == "__main__":
args = create_parser().parse_args()
error_count = main(**vars(args))
if error_count > 0:
sys.exit(1)
| mpl-2.0 |
tucbill/manila | manila/openstack/common/rootwrap/cmd.py | 2 | 4641 | #!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Root wrapper for OpenStack services
Filters which commands a service is allowed to run as another user.
To use this with manila, you should set the following in
manila.conf:
rootwrap_config=/etc/manila/rootwrap.conf
You also need to let the manila user run manila-rootwrap
as root in sudoers:
manila ALL = (root) NOPASSWD: /usr/bin/manila-rootwrap
/etc/manila/rootwrap.conf *
Service packaging should deploy .filters files only on nodes where
they are needed, to avoid allowing more than is necessary.
"""
import ConfigParser
import logging
import os
import pwd
import signal
import subprocess
import sys
RC_UNAUTHORIZED = 99
RC_NOCOMMAND = 98
RC_BADCONFIG = 97
RC_NOEXECFOUND = 96
def _subprocess_setup():
# Python installs a SIGPIPE handler by default. This is usually not what
# non-Python subprocesses expect.
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def _exit_error(execname, message, errorcode, log=True):
print "%s: %s" % (execname, message)
if log:
logging.error(message)
sys.exit(errorcode)
def main():
# Split arguments, require at least a command
execname = sys.argv.pop(0)
if len(sys.argv) < 2:
_exit_error(execname, "No command specified", RC_NOCOMMAND, log=False)
configfile = sys.argv.pop(0)
userargs = sys.argv[:]
# Add ../ to sys.path to allow running from branch
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(execname),
os.pardir, os.pardir))
if os.path.exists(os.path.join(possible_topdir, "manila", "__init__.py")):
sys.path.insert(0, possible_topdir)
from manila.openstack.common.rootwrap import wrapper
# Load configuration
try:
rawconfig = ConfigParser.RawConfigParser()
rawconfig.read(configfile)
config = wrapper.RootwrapConfig(rawconfig)
except ValueError as exc:
msg = "Incorrect value in %s: %s" % (configfile, exc.message)
_exit_error(execname, msg, RC_BADCONFIG, log=False)
except ConfigParser.Error:
_exit_error(execname, "Incorrect configuration file: %s" % configfile,
RC_BADCONFIG, log=False)
if config.use_syslog:
wrapper.setup_syslog(execname,
config.syslog_log_facility,
config.syslog_log_level)
# Execute command if it matches any of the loaded filters
filters = wrapper.load_filters(config.filters_path)
try:
filtermatch = wrapper.match_filter(filters, userargs,
exec_dirs=config.exec_dirs)
if filtermatch:
command = filtermatch.get_command(userargs,
exec_dirs=config.exec_dirs)
if config.use_syslog:
logging.info("(%s > %s) Executing %s (filter match = %s)" % (
os.getlogin(), pwd.getpwuid(os.getuid())[0],
command, filtermatch.name))
obj = subprocess.Popen(command,
stdin=sys.stdin,
stdout=sys.stdout,
stderr=sys.stderr,
preexec_fn=_subprocess_setup,
env=filtermatch.get_environment(userargs))
obj.wait()
sys.exit(obj.returncode)
except wrapper.FilterMatchNotExecutable as exc:
msg = ("Executable not found: %s (filter match = %s)"
% (exc.match.exec_path, exc.match.name))
_exit_error(execname, msg, RC_NOEXECFOUND, log=config.use_syslog)
except wrapper.NoFilterMatched:
msg = ("Unauthorized command: %s (no filter matched)"
% ' '.join(userargs))
_exit_error(execname, msg, RC_UNAUTHORIZED, log=config.use_syslog)
| apache-2.0 |
joelpinheiro/safebox-smartcard-auth | Server/veserver/lib/python2.7/site-packages/Crypto/SelfTest/Hash/__init__.py | 116 | 2518 | # -*- coding: utf-8 -*-
#
# SelfTest/Hash/__init__.py: Self-test for hash modules
#
# Written in 2008 by Dwayne C. Litzenberger <[email protected]>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test for hash modules"""
__revision__ = "$Id$"
def get_tests(config={}):
tests = []
from Crypto.SelfTest.Hash import test_HMAC; tests += test_HMAC.get_tests(config=config)
from Crypto.SelfTest.Hash import test_MD2; tests += test_MD2.get_tests(config=config)
from Crypto.SelfTest.Hash import test_MD4; tests += test_MD4.get_tests(config=config)
from Crypto.SelfTest.Hash import test_MD5; tests += test_MD5.get_tests(config=config)
from Crypto.SelfTest.Hash import test_RIPEMD; tests += test_RIPEMD.get_tests(config=config)
from Crypto.SelfTest.Hash import test_SHA; tests += test_SHA.get_tests(config=config)
from Crypto.SelfTest.Hash import test_SHA256; tests += test_SHA256.get_tests(config=config)
try:
from Crypto.SelfTest.Hash import test_SHA224; tests += test_SHA224.get_tests(config=config)
from Crypto.SelfTest.Hash import test_SHA384; tests += test_SHA384.get_tests(config=config)
from Crypto.SelfTest.Hash import test_SHA512; tests += test_SHA512.get_tests(config=config)
except ImportError:
import sys
sys.stderr.write("SelfTest: warning: not testing SHA224/SHA384/SHA512 modules (not available)\n")
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| gpl-2.0 |
proxysh/Safejumper-for-Mac | buildmac/Resources/env/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.py | 333 | 15224 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2015 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from io import BytesIO
import logging
import os
import re
import struct
import sys
from .compat import sysconfig, detect_encoding, ZipFile
from .resources import finder
from .util import (FileOperator, get_export_entry, convert_path,
get_executable, in_venv)
logger = logging.getLogger(__name__)
_DEFAULT_MANIFEST = '''
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity version="1.0.0.0"
processorArchitecture="X86"
name="%s"
type="win32"/>
<!-- Identify the application security requirements. -->
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
</requestedPrivileges>
</security>
</trustInfo>
</assembly>'''.strip()
# check if Python is called on the first line with this expression
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-
if __name__ == '__main__':
import sys, re
def _resolve(module, func):
__import__(module)
mod = sys.modules[module]
parts = func.split('.')
result = getattr(mod, parts.pop(0))
for p in parts:
result = getattr(result, p)
return result
try:
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
func = _resolve('%(module)s', '%(func)s')
rc = func() # None interpreted as 0
except Exception as e: # only supporting Python >= 2.6
sys.stderr.write('%%s\\n' %% e)
rc = 1
sys.exit(rc)
'''
def _enquote_executable(executable):
if ' ' in executable:
# make sure we quote only the executable in case of env
# for example /usr/bin/env "/dir with spaces/bin/jython"
# instead of "/usr/bin/env /dir with spaces/bin/jython"
# otherwise whole
if executable.startswith('/usr/bin/env '):
env, _executable = executable.split(' ', 1)
if ' ' in _executable and not _executable.startswith('"'):
executable = '%s "%s"' % (env, _executable)
else:
if not executable.startswith('"'):
executable = '"%s"' % executable
return executable
class ScriptMaker(object):
"""
A class to copy or create scripts from source scripts or callable
specifications.
"""
script_template = SCRIPT_TEMPLATE
executable = None # for shebangs
def __init__(self, source_dir, target_dir, add_launchers=True,
dry_run=False, fileop=None):
self.source_dir = source_dir
self.target_dir = target_dir
self.add_launchers = add_launchers
self.force = False
self.clobber = False
# It only makes sense to set mode bits on POSIX.
self.set_mode = (os.name == 'posix') or (os.name == 'java' and
os._name == 'posix')
self.variants = set(('', 'X.Y'))
self._fileop = fileop or FileOperator(dry_run)
self._is_nt = os.name == 'nt' or (
os.name == 'java' and os._name == 'nt')
def _get_alternate_executable(self, executable, options):
if options.get('gui', False) and self._is_nt: # pragma: no cover
dn, fn = os.path.split(executable)
fn = fn.replace('python', 'pythonw')
executable = os.path.join(dn, fn)
return executable
if sys.platform.startswith('java'): # pragma: no cover
def _is_shell(self, executable):
"""
Determine if the specified executable is a script
(contains a #! line)
"""
try:
with open(executable) as fp:
return fp.read(2) == '#!'
except (OSError, IOError):
logger.warning('Failed to open %s', executable)
return False
def _fix_jython_executable(self, executable):
if self._is_shell(executable):
# Workaround for Jython is not needed on Linux systems.
import java
if java.lang.System.getProperty('os.name') == 'Linux':
return executable
elif executable.lower().endswith('jython.exe'):
# Use wrapper exe for Jython on Windows
return executable
return '/usr/bin/env %s' % executable
def _get_shebang(self, encoding, post_interp=b'', options=None):
enquote = True
if self.executable:
executable = self.executable
enquote = False # assume this will be taken care of
elif not sysconfig.is_python_build():
executable = get_executable()
elif in_venv(): # pragma: no cover
executable = os.path.join(sysconfig.get_path('scripts'),
'python%s' % sysconfig.get_config_var('EXE'))
else: # pragma: no cover
executable = os.path.join(
sysconfig.get_config_var('BINDIR'),
'python%s%s' % (sysconfig.get_config_var('VERSION'),
sysconfig.get_config_var('EXE')))
if options:
executable = self._get_alternate_executable(executable, options)
if sys.platform.startswith('java'): # pragma: no cover
executable = self._fix_jython_executable(executable)
# Normalise case for Windows
executable = os.path.normcase(executable)
# If the user didn't specify an executable, it may be necessary to
# cater for executable paths with spaces (not uncommon on Windows)
if enquote:
executable = _enquote_executable(executable)
# Issue #51: don't use fsencode, since we later try to
# check that the shebang is decodable using utf-8.
executable = executable.encode('utf-8')
# in case of IronPython, play safe and enable frames support
if (sys.platform == 'cli' and '-X:Frames' not in post_interp
and '-X:FullFrames' not in post_interp): # pragma: no cover
post_interp += b' -X:Frames'
shebang = b'#!' + executable + post_interp + b'\n'
# Python parser starts to read a script using UTF-8 until
# it gets a #coding:xxx cookie. The shebang has to be the
# first line of a file, the #coding:xxx cookie cannot be
# written before. So the shebang has to be decodable from
# UTF-8.
try:
shebang.decode('utf-8')
except UnicodeDecodeError: # pragma: no cover
raise ValueError(
'The shebang (%r) is not decodable from utf-8' % shebang)
# If the script is encoded to a custom encoding (use a
# #coding:xxx cookie), the shebang has to be decodable from
# the script encoding too.
if encoding != 'utf-8':
try:
shebang.decode(encoding)
except UnicodeDecodeError: # pragma: no cover
raise ValueError(
'The shebang (%r) is not decodable '
'from the script encoding (%r)' % (shebang, encoding))
return shebang
def _get_script_text(self, entry):
return self.script_template % dict(module=entry.prefix,
func=entry.suffix)
manifest = _DEFAULT_MANIFEST
def get_manifest(self, exename):
base = os.path.basename(exename)
return self.manifest % base
def _write_script(self, names, shebang, script_bytes, filenames, ext):
use_launcher = self.add_launchers and self._is_nt
linesep = os.linesep.encode('utf-8')
if not use_launcher:
script_bytes = shebang + linesep + script_bytes
else: # pragma: no cover
if ext == 'py':
launcher = self._get_launcher('t')
else:
launcher = self._get_launcher('w')
stream = BytesIO()
with ZipFile(stream, 'w') as zf:
zf.writestr('__main__.py', script_bytes)
zip_data = stream.getvalue()
script_bytes = launcher + shebang + linesep + zip_data
for name in names:
outname = os.path.join(self.target_dir, name)
if use_launcher: # pragma: no cover
n, e = os.path.splitext(outname)
if e.startswith('.py'):
outname = n
outname = '%s.exe' % outname
try:
self._fileop.write_binary_file(outname, script_bytes)
except Exception:
# Failed writing an executable - it might be in use.
logger.warning('Failed to write executable - trying to '
'use .deleteme logic')
dfname = '%s.deleteme' % outname
if os.path.exists(dfname):
os.remove(dfname) # Not allowed to fail here
os.rename(outname, dfname) # nor here
self._fileop.write_binary_file(outname, script_bytes)
logger.debug('Able to replace executable using '
'.deleteme logic')
try:
os.remove(dfname)
except Exception:
pass # still in use - ignore error
else:
if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover
outname = '%s.%s' % (outname, ext)
if os.path.exists(outname) and not self.clobber:
logger.warning('Skipping existing file %s', outname)
continue
self._fileop.write_binary_file(outname, script_bytes)
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
def _make_script(self, entry, filenames, options=None):
post_interp = b''
if options:
args = options.get('interpreter_args', [])
if args:
args = ' %s' % ' '.join(args)
post_interp = args.encode('utf-8')
shebang = self._get_shebang('utf-8', post_interp, options=options)
script = self._get_script_text(entry).encode('utf-8')
name = entry.name
scriptnames = set()
if '' in self.variants:
scriptnames.add(name)
if 'X' in self.variants:
scriptnames.add('%s%s' % (name, sys.version[0]))
if 'X.Y' in self.variants:
scriptnames.add('%s-%s' % (name, sys.version[:3]))
if options and options.get('gui', False):
ext = 'pyw'
else:
ext = 'py'
self._write_script(scriptnames, shebang, script, filenames, ext)
def _copy_script(self, script, filenames):
adjust = False
script = os.path.join(self.source_dir, convert_path(script))
outname = os.path.join(self.target_dir, os.path.basename(script))
if not self.force and not self._fileop.newer(script, outname):
logger.debug('not copying %s (up-to-date)', script)
return
# Always open the file, but ignore failures in dry-run mode --
# that way, we'll get accurate feedback if we can read the
# script.
try:
f = open(script, 'rb')
except IOError: # pragma: no cover
if not self.dry_run:
raise
f = None
else:
first_line = f.readline()
if not first_line: # pragma: no cover
logger.warning('%s: %s is an empty file (skipping)',
self.get_command_name(), script)
return
match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
if match:
adjust = True
post_interp = match.group(1) or b''
if not adjust:
if f:
f.close()
self._fileop.copy_file(script, outname)
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
else:
logger.info('copying and adjusting %s -> %s', script,
self.target_dir)
if not self._fileop.dry_run:
encoding, lines = detect_encoding(f.readline)
f.seek(0)
shebang = self._get_shebang(encoding, post_interp)
if b'pythonw' in first_line: # pragma: no cover
ext = 'pyw'
else:
ext = 'py'
n = os.path.basename(outname)
self._write_script([n], shebang, f.read(), filenames, ext)
if f:
f.close()
@property
def dry_run(self):
return self._fileop.dry_run
@dry_run.setter
def dry_run(self, value):
self._fileop.dry_run = value
if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover
# Executable launcher support.
# Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
def _get_launcher(self, kind):
if struct.calcsize('P') == 8: # 64-bit
bits = '64'
else:
bits = '32'
name = '%s%s.exe' % (kind, bits)
# Issue 31: don't hardcode an absolute package name, but
# determine it relative to the current package
distlib_package = __name__.rsplit('.', 1)[0]
result = finder(distlib_package).find(name).bytes
return result
# Public API follows
def make(self, specification, options=None):
"""
Make a script.
:param specification: The specification, which is either a valid export
entry specification (to make a script from a
callable) or a filename (to make a script by
copying from a source location).
:param options: A dictionary of options controlling script generation.
:return: A list of all absolute pathnames written to.
"""
filenames = []
entry = get_export_entry(specification)
if entry is None:
self._copy_script(specification, filenames)
else:
self._make_script(entry, filenames, options=options)
return filenames
def make_multiple(self, specifications, options=None):
"""
Take a list of specifications and make scripts from them,
:param specifications: A list of specifications.
:return: A list of all absolute pathnames written to,
"""
filenames = []
for specification in specifications:
filenames.extend(self.make(specification, options))
return filenames
| gpl-2.0 |
alanswanson/webserver | admin/CTK/CTK/ProgressBar.py | 5 | 1975 | # CTK: Cherokee Toolkit
#
# Authors:
# Alvaro Lopez Ortega <[email protected]>
#
# Copyright (C) 2010-2014 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import os
from Widget import Widget
from util import props_to_str
HEADERS = [
'<link type="text/css" href="/CTK/css/CTK.css" rel="stylesheet" />',
'<script type="text/javascript" src="/CTK/js/jquery-ui-1.7.2.custom.min.js"></script>'
]
HTML = """
<div id="%(id)s" %(props)s></div>
"""
PERCENT_INIT_JS = """
$('#%(id)s').progressbar({ value: %(value)s });
"""
class ProgressBar (Widget):
def __init__ (self, props={}):
Widget.__init__ (self)
self.id = "progressbar_%d" %(self.uniq_id)
self.value = props.pop ('value', 0)
self.props = props.copy()
if 'class' in props:
self.props['class'] += ' progressbar'
else:
self.props['class'] = 'progressbar'
def Render (self):
render = Widget.Render (self)
props = {'id': self.id,
'value': self.value,
'props': props_to_str (self.props)}
render.html += HTML %(props)
render.js += PERCENT_INIT_JS %(props)
render.headers += HEADERS
return render
def JS_to_set (self, value):
return "$('#%s').progressbar ('option', 'value', %s);" %(self.id, value)
| gpl-2.0 |
temasek/android_external_chromium_org | tools/perf/measurements/smoothness.py | 23 | 2414 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from metrics import smoothness
from metrics import timeline
from telemetry.page import page_measurement
class MissingDisplayFrameRateError(page_measurement.MeasurementFailure):
def __init__(self, name):
super(MissingDisplayFrameRateError, self).__init__(
'Missing display frame rate metrics: ' + name)
class Smoothness(page_measurement.PageMeasurement):
def __init__(self):
super(Smoothness, self).__init__('smoothness')
self._metric = None
def AddCommandLineOptions(self, parser):
metric_choices = ['smoothness', 'timeline']
parser.add_option('--metric', dest='metric', type='choice',
choices=metric_choices,
default='smoothness',
help=('Metric to use in the measurement. ' +
'Supported values: ' + ', '.join(metric_choices)))
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs('--enable-gpu-benchmarking')
def CanRunForPage(self, page):
return hasattr(page, 'smoothness')
def WillRunActions(self, page, tab):
if self.options.metric == 'smoothness':
self._metric = smoothness.SmoothnessMetric()
elif self.options.metric == 'timeline':
self._metric = timeline.ThreadTimesTimelineMetric()
self._metric.Start(page, tab)
if tab.browser.platform.IsRawDisplayFrameRateSupported():
tab.browser.platform.StartRawDisplayFrameRateMeasurement()
def DidRunAction(self, page, tab, action):
timeline_marker_name = action.GetTimelineMarkerName()
if self.options.metric == 'smoothness' and timeline_marker_name:
self._metric.AddTimelineMarkerNameToIncludeInMetric(timeline_marker_name)
def DidRunActions(self, page, tab):
if tab.browser.platform.IsRawDisplayFrameRateSupported():
tab.browser.platform.StopRawDisplayFrameRateMeasurement()
self._metric.Stop(page, tab)
def MeasurePage(self, page, tab, results):
self._metric.AddResults(tab, results)
if tab.browser.platform.IsRawDisplayFrameRateSupported():
for r in tab.browser.platform.GetRawDisplayFrameRateMeasurements():
if r.value is None:
raise MissingDisplayFrameRateError(r.name)
results.Add(r.name, r.unit, r.value)
| bsd-3-clause |
brenolf/myfriend | dogs/persons_views.py | 1 | 3453 | from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, HttpResponseNotFound, HttpResponseRedirect
from django.template import RequestContext, loader
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from dogs.models import *
from django.contrib.auth.models import User
from django.contrib import messages
# From com as respostas
@login_required(login_url='/accounts/login/')
def createanswers(request):
user = request.user
if request.method == 'POST':
form_answer = AnswerForm(request.POST)
if form_answer.is_valid():
a = form_answer.save()
user.person.answers = a
user.person.save()
else:
return render(request, 'persons/createanswers.html', {
'form_answer': form_answer,
'user': request.user,
})
return HttpResponseRedirect('/user/')
form_answer = AnswerForm(instance=request.user.person.answers)
return render(request, 'persons/createanswers.html', {
'form_answer': form_answer,
'user': request.user,
})
# Form de completar registro
@login_required(login_url='/accounts/login/')
def create(request): # depois mudar pra ficar restful
user = request.user
if request.method == 'POST': # If the form has been submitted...
# A form bound to the POST data
form_address = AddressForm(request.POST)
form_person = PersonForm(request.POST)
form_user = UserForm(request.POST)
if form_address.is_valid() and form_user.is_valid():
user.first_name = form_user.cleaned_data['first_name']
user.last_name = form_user.cleaned_data['last_name']
address = form_address.save()
if form_person.is_valid():
person = form_person.save(commit=False)
person.address = address
person.id = user.person.id
person.answers = user.person.answers
user.person = person
person.save()
user.save()
return HttpResponseRedirect('/user/')
else:
return render(request, 'persons/create.html', {
'form_person': form_person,
'form_address': form_address,
'form_user': form_user,
'user': request.user,
})
else:
return render(request, 'persons/create.html', {
'form_person': form_person,
'form_address': form_address,
'form_user': form_user,
'user': request.user,
})
return HttpResponseRedirect('/user/') # Redirect after POST
else:
form_person = PersonForm(instance=request.user.person)
form_address = AddressForm(instance=request.user.person.address)
form_user = UserForm(instance=request.user)
print request.user.first_name
return render(request, 'persons/create.html', {
'form_person': form_person,
'form_address': form_address,
'form_user': form_user,
'user': request.user,
})
def detail(request, person_username):
user = get_object_or_404(User, pk=person_username)
person = user.person
return render(request, 'persons/detail.html', {'person': person})
| apache-2.0 |
MaDKaTZe/phantomjs | src/breakpad/src/tools/gyp/test/subdirectory/gyptest-top-all.py | 240 | 1384 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies building a target and a subsidiary dependent target from a
.gyp file in a subdirectory, without specifying an explicit output build
directory, and using the generated solution or project file at the top
of the tree as the entry point.
There is a difference here in the default behavior of the underlying
build tools. Specifically, when building the entire "solution", Xcode
puts the output of each project relative to the .xcodeproj directory,
while Visual Studio (and our implementations of SCons and Make) put it
in a build directory relative to the "solution"--that is, the entry-point
from which you built the entire tree.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('prog1.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('prog1.gyp', test.ALL, chdir='relocate/src')
test.run_built_executable('prog1',
stdout="Hello from prog1.c\n",
chdir='relocate/src')
if test.format == 'xcode':
chdir = 'relocate/src/subdir'
else:
chdir = 'relocate/src'
test.run_built_executable('prog2',
chdir=chdir,
stdout="Hello from prog2.c\n")
test.pass_test()
| bsd-3-clause |
jumpstarter-io/nova | nova/virt/block_device.py | 5 | 16937 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import operator
from nova import block_device
from nova.i18n import _
from nova.i18n import _LI
from nova import objects
from nova.objects import base as obj_base
from nova.openstack.common import excutils
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.volume import encryptors
LOG = logging.getLogger(__name__)
class _NotTransformable(Exception):
pass
class _InvalidType(_NotTransformable):
pass
class _NoLegacy(Exception):
pass
def update_db(method):
@functools.wraps(method)
def wrapped(obj, context, *args, **kwargs):
ret_val = method(obj, context, *args, **kwargs)
obj.save(context)
return ret_val
return wrapped
class DriverBlockDevice(dict):
"""A dict subclass that represents block devices used by the virt layer.
Uses block device objects internally to do the database access.
_fields and _legacy_fields class attributes present a set of fields that
are expected on a certain DriverBlockDevice type. We may have more legacy
versions in the future.
If an attribute access is attempted for a name that is found in the
_proxy_as_attr set, it will be proxied to the underlying object. This
allows us to access stuff that is not part of the data model that all
drivers understand.
The save() method allows us to update the database using the underlying
object. _update_on_save class attribute dictionary keeps the following
mapping:
{'object field name': 'driver dict field name (or None if same)'}
These fields will be updated on the internal object, from the values in the
dict, before the actual database update is done.
"""
_fields = set()
_legacy_fields = set()
_proxy_as_attr = set()
_update_on_save = {'disk_bus': None,
'device_name': None,
'device_type': None}
def __init__(self, bdm):
# TODO(ndipanov): Remove this check when we have all the rpc methods
# use objects for block devices.
if isinstance(bdm, obj_base.NovaObject):
self.__dict__['_bdm_obj'] = bdm
else:
self.__dict__['_bdm_obj'] = objects.BlockDeviceMapping()
self._bdm_obj.update(block_device.BlockDeviceDict(bdm))
self._bdm_obj.obj_reset_changes()
if self._bdm_obj.no_device:
raise _NotTransformable()
self.update(dict((field, None)
for field in self._fields))
self._transform()
def __getattr__(self, name):
if name in self._proxy_as_attr:
return getattr(self._bdm_obj, name)
else:
super(DriverBlockDevice, self).__getattr__(name)
def __setattr__(self, name, value):
if name in self._proxy_as_attr:
return setattr(self._bdm_obj, name, value)
else:
super(DriverBlockDevice, self).__setattr__(name, value)
def _transform(self):
"""Transform bdm to the format that is passed to drivers."""
raise NotImplementedError()
def legacy(self):
"""Basic legacy transformation.
Basic method will just drop the fields that are not in
_legacy_fields set. Override this in subclass if needed.
"""
return dict((key, self.get(key)) for key in self._legacy_fields)
def attach(self, **kwargs):
"""Make the device available to be used by VMs.
To be overridden in subclasses with the connecting logic for
the type of device the subclass represents.
"""
raise NotImplementedError()
def save(self, context=None):
for attr_name, key_name in self._update_on_save.iteritems():
setattr(self._bdm_obj, attr_name, self[key_name or attr_name])
if context:
self._bdm_obj.save(context)
else:
self._bdm_obj.save()
class DriverSwapBlockDevice(DriverBlockDevice):
_fields = set(['device_name', 'swap_size', 'disk_bus'])
_legacy_fields = _fields - set(['disk_bus'])
_update_on_save = {'disk_bus': None,
'device_name': None}
def _transform(self):
if not block_device.new_format_is_swap(self._bdm_obj):
raise _InvalidType
self.update({
'device_name': self._bdm_obj.device_name,
'swap_size': self._bdm_obj.volume_size or 0,
'disk_bus': self._bdm_obj.disk_bus
})
class DriverEphemeralBlockDevice(DriverBlockDevice):
_new_only_fields = set(['disk_bus', 'device_type', 'guest_format'])
_fields = set(['device_name', 'size']) | _new_only_fields
_legacy_fields = (_fields - _new_only_fields |
set(['num', 'virtual_name']))
def _transform(self):
if not block_device.new_format_is_ephemeral(self._bdm_obj):
raise _InvalidType
self.update({
'device_name': self._bdm_obj.device_name,
'size': self._bdm_obj.volume_size or 0,
'disk_bus': self._bdm_obj.disk_bus,
'device_type': self._bdm_obj.device_type,
'guest_format': self._bdm_obj.guest_format
})
def legacy(self, num=0):
legacy_bdm = super(DriverEphemeralBlockDevice, self).legacy()
legacy_bdm['num'] = num
legacy_bdm['virtual_name'] = 'ephemeral' + str(num)
return legacy_bdm
class DriverVolumeBlockDevice(DriverBlockDevice):
_legacy_fields = set(['connection_info', 'mount_device',
'delete_on_termination'])
_new_fields = set(['guest_format', 'device_type',
'disk_bus', 'boot_index'])
_fields = _legacy_fields | _new_fields
_valid_source = 'volume'
_valid_destination = 'volume'
_proxy_as_attr = set(['volume_size', 'volume_id'])
_update_on_save = {'disk_bus': None,
'device_name': 'mount_device',
'device_type': None}
def _transform(self):
if (not self._bdm_obj.source_type == self._valid_source
or not self._bdm_obj.destination_type ==
self._valid_destination):
raise _InvalidType
self.update(
dict((k, v) for k, v in self._bdm_obj.iteritems()
if k in self._new_fields | set(['delete_on_termination']))
)
self['mount_device'] = self._bdm_obj.device_name
try:
self['connection_info'] = jsonutils.loads(
self._bdm_obj.connection_info)
except TypeError:
self['connection_info'] = None
def _preserve_multipath_id(self, connection_info):
if self['connection_info'] and 'data' in self['connection_info']:
if 'multipath_id' in self['connection_info']['data']:
connection_info['data']['multipath_id'] =\
self['connection_info']['data']['multipath_id']
LOG.info(_LI('preserve multipath_id %s'),
connection_info['data']['multipath_id'])
@update_db
def attach(self, context, instance, volume_api, virt_driver,
do_check_attach=True, do_driver_attach=False):
volume = volume_api.get(context, self.volume_id)
if do_check_attach:
volume_api.check_attach(context, volume, instance=instance)
volume_id = volume['id']
context = context.elevated()
connector = virt_driver.get_volume_connector(instance)
connection_info = volume_api.initialize_connection(context,
volume_id,
connector)
if 'serial' not in connection_info:
connection_info['serial'] = self.volume_id
self._preserve_multipath_id(connection_info)
# If do_driver_attach is False, we will attach a volume to an instance
# at boot time. So actual attach is done by instance creation code.
if do_driver_attach:
encryption = encryptors.get_encryption_metadata(
context, volume_api, volume_id, connection_info)
try:
virt_driver.attach_volume(
context, connection_info, instance,
self['mount_device'], disk_bus=self['disk_bus'],
device_type=self['device_type'], encryption=encryption)
except Exception: # pylint: disable=W0702
with excutils.save_and_reraise_exception():
LOG.exception(_("Driver failed to attach volume "
"%(volume_id)s at %(mountpoint)s"),
{'volume_id': volume_id,
'mountpoint': self['mount_device']},
context=context, instance=instance)
volume_api.terminate_connection(context, volume_id,
connector)
self['connection_info'] = connection_info
mode = 'rw'
if 'data' in connection_info:
mode = connection_info['data'].get('access_mode', 'rw')
if volume['attach_status'] == "detached":
volume_api.attach(context, volume_id, instance['uuid'],
self['mount_device'], mode=mode)
@update_db
def refresh_connection_info(self, context, instance,
volume_api, virt_driver):
# NOTE (ndipanov): A no-op if there is no connection info already
if not self['connection_info']:
return
connector = virt_driver.get_volume_connector(instance)
connection_info = volume_api.initialize_connection(context,
self.volume_id,
connector)
if 'serial' not in connection_info:
connection_info['serial'] = self.volume_id
self._preserve_multipath_id(connection_info)
self['connection_info'] = connection_info
def save(self, context=None):
# NOTE(ndipanov): we might want to generalize this by adding it to the
# _update_on_save and adding a transformation function.
try:
self._bdm_obj.connection_info = jsonutils.dumps(
self.get('connection_info'))
except TypeError:
pass
super(DriverVolumeBlockDevice, self).save(context)
class DriverSnapshotBlockDevice(DriverVolumeBlockDevice):
_valid_source = 'snapshot'
_proxy_as_attr = set(['volume_size', 'volume_id', 'snapshot_id'])
def attach(self, context, instance, volume_api,
virt_driver, wait_func=None, do_check_attach=True):
if not self.volume_id:
snapshot = volume_api.get_snapshot(context,
self.snapshot_id)
vol = volume_api.create(context, self.volume_size,
'', '', snapshot)
if wait_func:
wait_func(context, vol['id'])
self.volume_id = vol['id']
# Call the volume attach now
super(DriverSnapshotBlockDevice, self).attach(
context, instance, volume_api, virt_driver,
do_check_attach=do_check_attach)
class DriverImageBlockDevice(DriverVolumeBlockDevice):
_valid_source = 'image'
_proxy_as_attr = set(['volume_size', 'volume_id', 'image_id'])
def attach(self, context, instance, volume_api,
virt_driver, wait_func=None, do_check_attach=True):
if not self.volume_id:
vol = volume_api.create(context, self.volume_size,
'', '', image_id=self.image_id)
if wait_func:
wait_func(context, vol['id'])
self.volume_id = vol['id']
super(DriverImageBlockDevice, self).attach(
context, instance, volume_api, virt_driver,
do_check_attach=do_check_attach)
class DriverBlankBlockDevice(DriverVolumeBlockDevice):
_valid_source = 'blank'
_proxy_as_attr = set(['volume_size', 'volume_id', 'image_id'])
def attach(self, context, instance, volume_api,
virt_driver, wait_func=None, do_check_attach=True):
if not self.volume_id:
vol_name = instance.uuid + '-blank-vol'
vol = volume_api.create(context, self.volume_size, vol_name, '')
if wait_func:
wait_func(context, vol['id'])
self.volume_id = vol['id']
super(DriverBlankBlockDevice, self).attach(
context, instance, volume_api, virt_driver,
do_check_attach=do_check_attach)
def _convert_block_devices(device_type, block_device_mapping):
def _is_transformable(bdm):
try:
device_type(bdm)
except _NotTransformable:
return False
return True
return [device_type(bdm)
for bdm in block_device_mapping
if _is_transformable(bdm)]
convert_swap = functools.partial(_convert_block_devices,
DriverSwapBlockDevice)
convert_ephemerals = functools.partial(_convert_block_devices,
DriverEphemeralBlockDevice)
convert_volumes = functools.partial(_convert_block_devices,
DriverVolumeBlockDevice)
convert_snapshots = functools.partial(_convert_block_devices,
DriverSnapshotBlockDevice)
convert_images = functools.partial(_convert_block_devices,
DriverImageBlockDevice)
convert_blanks = functools.partial(_convert_block_devices,
DriverBlankBlockDevice)
def attach_block_devices(block_device_mapping, *attach_args, **attach_kwargs):
def _log_and_attach(bdm):
context = attach_args[0]
instance = attach_args[1]
LOG.audit(_('Booting with volume %(volume_id)s at %(mountpoint)s'),
{'volume_id': bdm.volume_id,
'mountpoint': bdm['mount_device']},
context=context, instance=instance)
bdm.attach(*attach_args, **attach_kwargs)
map(_log_and_attach, block_device_mapping)
return block_device_mapping
def refresh_conn_infos(block_device_mapping, *refresh_args, **refresh_kwargs):
map(operator.methodcaller('refresh_connection_info',
*refresh_args, **refresh_kwargs),
block_device_mapping)
return block_device_mapping
def legacy_block_devices(block_device_mapping):
def _has_legacy(bdm):
try:
bdm.legacy()
except _NoLegacy:
return False
return True
bdms = [bdm.legacy()
for bdm in block_device_mapping
if _has_legacy(bdm)]
# Re-enumerate ephemeral devices
if all(isinstance(bdm, DriverEphemeralBlockDevice)
for bdm in block_device_mapping):
for i, dev in enumerate(bdms):
dev['virtual_name'] = dev['virtual_name'][:-1] + str(i)
dev['num'] = i
return bdms
def get_swap(transformed_list):
"""Get the swap device out of the list context.
The block_device_info needs swap to be a single device,
not a list - otherwise this is a no-op.
"""
if not all(isinstance(device, DriverSwapBlockDevice) or
'swap_size' in device
for device in transformed_list):
return transformed_list
try:
return transformed_list.pop()
except IndexError:
return None
_IMPLEMENTED_CLASSES = (DriverSwapBlockDevice, DriverEphemeralBlockDevice,
DriverVolumeBlockDevice, DriverSnapshotBlockDevice,
DriverImageBlockDevice, DriverBlankBlockDevice)
def is_implemented(bdm):
for cls in _IMPLEMENTED_CLASSES:
try:
cls(bdm)
return True
except _NotTransformable:
pass
return False
def is_block_device_mapping(bdm):
return (bdm.source_type in ('image', 'volume', 'snapshot', 'blank')
and bdm.destination_type == 'volume'
and is_implemented(bdm))
| apache-2.0 |
wangjun/odoo | openerp/service/common.py | 281 | 1873 | # -*- coding: utf-8 -*-
import logging
import openerp.release
import openerp.tools
from openerp.tools.translate import _
import security
_logger = logging.getLogger(__name__)
RPC_VERSION_1 = {
'server_version': openerp.release.version,
'server_version_info': openerp.release.version_info,
'server_serie': openerp.release.serie,
'protocol_version': 1,
}
def dispatch(method, params):
if method not in ['login', 'about', 'timezone_get',
'version', 'authenticate', 'set_loglevel']:
raise Exception("Method not found: %s" % method)
fn = globals()['exp_' + method]
return fn(*params)
def exp_login(db, login, password):
# TODO: legacy indirection through 'security', should use directly
# the res.users model
res = security.login(db, login, password)
msg = res and 'successful login' or 'bad login or password'
_logger.info("%s from '%s' using database '%s'", msg, login, db.lower())
return res or False
def exp_authenticate(db, login, password, user_agent_env):
res_users = openerp.registry(db)['res.users']
return res_users.authenticate(db, login, password, user_agent_env)
def exp_version():
return RPC_VERSION_1
def exp_about(extended=False):
"""Return information about the OpenERP Server.
@param extended: if True then return version info
@return string if extended is False else tuple
"""
info = _('See http://openerp.com')
if extended:
return info, openerp.release.version
return info
def exp_timezone_get(db, login, password):
return openerp.tools.misc.get_server_timezone()
def exp_set_loglevel(loglevel, logger=None):
# TODO Previously, the level was set on the now deprecated
# `openerp.netsvc.Logger` class.
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Kami/libcloud | libcloud/common/digitalocean.py | 6 | 7037 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Common settings and connection objects for DigitalOcean Cloud
"""
from libcloud.utils.py3 import httplib, parse_qs, urlparse
from libcloud.common.base import BaseDriver
from libcloud.common.base import ConnectionKey
from libcloud.common.base import JsonResponse
from libcloud.common.types import LibcloudError, InvalidCredsError
__all__ = [
'DigitalOcean_v2_Response',
'DigitalOcean_v2_Connection',
'DigitalOceanBaseDriver'
]
class DigitalOcean_v1_Error(LibcloudError):
"""
Exception for when attempting to use version 1
of the DigitalOcean API which is no longer
supported.
"""
def __init__(self,
value=('Driver no longer supported: Version 1 of the '
'DigitalOcean API reached end of life on November 9, '
'2015. Use the v2 driver. Please visit: '
'https://developers.digitalocean.com/documentation/changelog/api-v1/sunsetting-api-v1/'), # noqa: E501
driver=None):
super(DigitalOcean_v1_Error, self).__init__(value, driver=driver)
class DigitalOcean_v2_Response(JsonResponse):
valid_response_codes = [httplib.OK, httplib.ACCEPTED, httplib.CREATED,
httplib.NO_CONTENT]
def parse_error(self):
if self.status == httplib.UNAUTHORIZED:
body = self.parse_body()
raise InvalidCredsError(body['message'])
else:
body = self.parse_body()
if 'message' in body:
error = '%s (code: %s)' % (body['message'], self.status)
else:
error = body
return error
def success(self):
return self.status in self.valid_response_codes
class DigitalOcean_v2_Connection(ConnectionKey):
"""
Connection class for the DigitalOcean (v2) driver.
"""
host = 'api.digitalocean.com'
responseCls = DigitalOcean_v2_Response
def add_default_headers(self, headers):
"""
Add headers that are necessary for every request
This method adds ``token`` to the request.
"""
headers['Authorization'] = 'Bearer %s' % (self.key)
headers['Content-Type'] = 'application/json'
return headers
def add_default_params(self, params):
"""
Add parameters that are necessary for every request
This method adds ``per_page`` to the request to reduce the total
number of paginated requests to the API.
"""
# pylint: disable=maybe-no-member
params['per_page'] = self.driver.ex_per_page
return params
class DigitalOceanConnection(DigitalOcean_v2_Connection):
"""
Connection class for the DigitalOcean driver.
"""
pass
class DigitalOceanResponse(DigitalOcean_v2_Response):
pass
class DigitalOceanBaseDriver(BaseDriver):
"""
DigitalOcean BaseDriver
"""
name = 'DigitalOcean'
website = 'https://www.digitalocean.com'
def __new__(cls, key, secret=None, api_version='v2', **kwargs):
if cls is DigitalOceanBaseDriver:
if api_version == 'v1' or secret is not None:
raise DigitalOcean_v1_Error()
elif api_version == 'v2':
cls = DigitalOcean_v2_BaseDriver
else:
raise NotImplementedError('Unsupported API version: %s' %
(api_version))
return super(DigitalOceanBaseDriver, cls).__new__(cls, **kwargs)
def ex_account_info(self):
raise NotImplementedError(
'ex_account_info not implemented for this driver')
def ex_list_events(self):
raise NotImplementedError(
'ex_list_events not implemented for this driver')
def ex_get_event(self, event_id):
raise NotImplementedError(
'ex_get_event not implemented for this driver')
def _paginated_request(self, url, obj):
raise NotImplementedError(
'_paginated_requests not implemented for this driver')
class DigitalOcean_v2_BaseDriver(DigitalOceanBaseDriver):
"""
DigitalOcean BaseDriver using v2 of the API.
Supports `ex_per_page` ``int`` value keyword parameter to adjust per page
requests against the API.
"""
connectionCls = DigitalOcean_v2_Connection
def __init__(self, key, secret=None, secure=True, host=None, port=None,
api_version=None, region=None, ex_per_page=200, **kwargs):
self.ex_per_page = ex_per_page
super(DigitalOcean_v2_BaseDriver, self).__init__(key, **kwargs)
def ex_account_info(self):
return self.connection.request('/v2/account').object['account']
def ex_list_events(self):
return self._paginated_request('/v2/actions', 'actions')
def ex_get_event(self, event_id):
"""
Get an event object
:param event_id: Event id (required)
:type event_id: ``str``
"""
params = {}
return self.connection.request('/v2/actions/%s' % event_id,
params=params).object['action']
def _paginated_request(self, url, obj):
"""
Perform multiple calls in order to have a full list of elements when
the API responses are paginated.
:param url: API endpoint
:type url: ``str``
:param obj: Result object key
:type obj: ``str``
:return: ``list`` of API response objects
:rtype: ``list``
"""
params = {}
data = self.connection.request(url)
try:
query = urlparse.urlparse(data.object['links']['pages']['last'])
# The query[4] references the query parameters from the url
pages = parse_qs(query[4])['page'][0]
values = data.object[obj]
for page in range(2, int(pages) + 1):
params.update({'page': page})
new_data = self.connection.request(url, params=params)
more_values = new_data.object[obj]
for value in more_values:
values.append(value)
data = values
except KeyError: # No pages.
data = data.object[obj]
return data
| apache-2.0 |
PyCQA/pylint | tests/functional/m/missing/missing_docstring.py | 2 | 1027 | # [missing-module-docstring]
# pylint: disable=too-few-public-methods, useless-object-inheritance
def public_documented():
"""It has a docstring."""
def _private_undocumented():
# Doesn't need a docstring
pass
def _private_documented():
"""It has a docstring."""
class ClassDocumented(object):
"""It has a docstring."""
class ClassUndocumented(object): # [missing-class-docstring]
pass
def public_undocumented(): # [missing-function-docstring]
pass
def __sizeof__():
# Special
pass
def __mangled():
pass
class Property(object):
"""Don't warn about setters and deleters."""
def __init__(self):
self._value = None
@property
def test(self):
"""Default docstring for setters and deleters."""
@test.setter
def test(self, value):
self._value = value
@test.deleter
def test(self):
pass
class DocumentedViaDunderDoc(object):
__doc__ = "This one"
| gpl-2.0 |
robertnishihara/ray | doc/source/tune/_tutorials/tune-serve-integration-mnist.py | 2 | 27211 | # flake8: noqa
"""
Model selection and serving with Ray Tune and Ray Serve
=======================================================
This tutorial will show you an end-to-end example how to train a
model using Ray Tune on incrementally arriving data and deploy
the model using Ray Serve.
A machine learning workflow can be quite simple: You decide on
the objective you're trying to solve, collect and annotate the
data, and build a model to hopefully solve your problem. But
usually the work is not over yet. First, you would likely continue
to do some hyperparameter optimization to obtain the best possible
model (called *model selection*). Second, your trained model
somehow has to be moved to production - in other words, users
or services should be enabled to use your model to actually make
predictions. This part is called *model serving*.
Fortunately, Ray includes two libraries that help you with these
two steps: Ray Tune and Ray Serve. And even more, they compliment
each other nicely. Most notably, both are able to scale up your
workloads easily - so both your model training and serving benefit
from additional resources and can adapt to your environment. If you
need to train on more data or have more hyperparameters to tune,
Ray Tune can leverage your whole cluster for training. If you have
many users doing inference on your served models, Ray Serve can
automatically distribute the inference backends to multiple nodes.
This tutorial will show you an end-to-end example how to train a MNIST
image classifier on incrementally arriving data and automatically
serve an updated model on a HTTP endpoint.
By the end of this tutorial you will be able to
1. Do hyperparameter optimization on a simple MNIST classifier
2. Continue to train this classifier from an existing model with
newly arriving data
3. Automatically create and serve data backends with Ray Serve
Roadmap and desired functionality
---------------------------------
The general idea of this example is that we simulate newly arriving
data each day. So at day 0 we might have some initial data available
already, but at each day, new data arrives.
Our approach here is that we offer two ways to train: From scratch and
from an existing model. Maybe you would like to train and select models
from scratch each week with all data available until then, e.g. each
Sunday, like this:
.. code-block:: bash
# Train with all data available at day 0
python tune-serve-integration-mnist.py --from_scratch --day 0
During the other days you might want to improve your model, but
not train everything from scratch, saving some cluster resources.
.. code-block:: bash
# Train with data arriving between day 0 and day 1
python tune-serve-integration-mnist.py --from_existing --day 1
# Train with incremental data on the other days, too
python tune-serve-integration-mnist.py --from_existing --day 2
python tune-serve-integration-mnist.py --from_existing --day 3
python tune-serve-integration-mnist.py --from_existing --day 4
python tune-serve-integration-mnist.py --from_existing --day 5
python tune-serve-integration-mnist.py --from_existing --day 6
# Retrain from scratch every 7th day:
python tune-serve-integration-mnist.py --from_scratch --day 7
This example will support both modes. After each model selection run,
we will tell Ray Serve to serve an updated model. We also include a
small utility to query our served model to see if it works as it should.
.. code-block:: bash
$ python tune-serve-integration-mnist.py --query 6
Querying model with example #6. Label = 1, Response = 1, Correct = True
Imports
-------
Let's start with our dependencies. Most of these should be familiar
if you worked with PyTorch before. The most notable import for Ray
is the ``from ray import tune, serve`` import statement - which
includes almost all the things we need from the Ray side.
"""
import argparse
import json
import os
import shutil
import sys
from functools import partial
from math import ceil
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import ray
from ray import tune, serve
from ray.serve.exceptions import RayServeException
from ray.tune import CLIReporter
from ray.tune.schedulers import ASHAScheduler
from torch.utils.data import random_split, Subset
from torchvision.datasets import MNIST
from torchvision.transforms import transforms
#######################################################################
# Data interface
# --------------
# Let's start with a simulated data interface. This class acts as the
# interface between your training code and your database. We simulate
# that new data arrives each day with a ``day`` parameter. So, calling
# ``get_data(day=3)`` would return all data we received until day 3.
# We also implement an incremental data method, so calling
# ``get_incremental_data(day=3)`` would return all data collected
# between day 2 and day 3.
class MNISTDataInterface(object):
"""Data interface. Simulates that new data arrives every day."""
def __init__(self, data_dir, max_days=10):
self.data_dir = data_dir
self.max_days = max_days
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307, ), (0.3081, ))
])
self.dataset = MNIST(
self.data_dir, train=True, download=True, transform=transform)
def _get_day_slice(self, day=0):
if day < 0:
return 0
n = len(self.dataset)
# Start with 30% of the data, get more data each day
return min(n, ceil(n * (0.3 + 0.7 * day / self.max_days)))
def get_data(self, day=0):
"""Get complete normalized train and validation data to date."""
end = self._get_day_slice(day)
available_data = Subset(self.dataset, list(range(end)))
train_n = int(0.8 * end) # 80% train data, 20% validation data
return random_split(available_data, [train_n, end - train_n])
def get_incremental_data(self, day=0):
"""Get next normalized train and validation data day slice."""
start = self._get_day_slice(day - 1)
end = self._get_day_slice(day)
available_data = Subset(self.dataset, list(range(start, end)))
train_n = int(
0.8 * (end - start)) # 80% train data, 20% validation data
return random_split(available_data, [train_n, end - start - train_n])
#######################################################################
# PyTorch neural network classifier
# ---------------------------------
# Next, we will introduce our PyTorch neural network model and the
# train and test function. These are adapted directly from
# our :doc:`PyTorch MNIST example </tune/examples/mnist_pytorch>`.
# We only introduced an additional neural network layer with a configurable
# layer size. This is not strictly needed for learning good performance on
# MNIST, but it is useful to demonstrate scenarios where your hyperparameter
# search space affects the model complexity.
class ConvNet(nn.Module):
def __init__(self, layer_size=192):
super(ConvNet, self).__init__()
self.layer_size = layer_size
self.conv1 = nn.Conv2d(1, 3, kernel_size=3)
self.fc = nn.Linear(192, self.layer_size)
self.out = nn.Linear(self.layer_size, 10)
def forward(self, x):
x = F.relu(F.max_pool2d(self.conv1(x), 3))
x = x.view(-1, 192)
x = self.fc(x)
x = self.out(x)
return F.log_softmax(x, dim=1)
def train(model, optimizer, train_loader, device=None):
device = device or torch.device("cpu")
model.train()
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = F.nll_loss(output, target)
loss.backward()
optimizer.step()
def test(model, data_loader, device=None):
device = device or torch.device("cpu")
model.eval()
correct = 0
total = 0
with torch.no_grad():
for batch_idx, (data, target) in enumerate(data_loader):
data, target = data.to(device), target.to(device)
outputs = model(data)
_, predicted = torch.max(outputs.data, 1)
total += target.size(0)
correct += (predicted == target).sum().item()
return correct / total
#######################################################################
# Tune trainable for model selection
# ----------------------------------
# We'll now define our Tune trainable function. This function takes
# a ``config`` parameter containing the hyperparameters we should train
# the model on, and will start a full training run. This means it
# will take care of creating the model and optimizer and repeatedly
# call the ``train`` function to train the model. Also, this function
# will report the training progress back to Tune.
def train_mnist(config,
start_model=None,
checkpoint_dir=None,
num_epochs=10,
use_gpus=False,
data_fn=None,
day=0):
# Create model
use_cuda = use_gpus and torch.cuda.is_available()
device = torch.device("cuda" if use_cuda else "cpu")
model = ConvNet(layer_size=config["layer_size"]).to(device)
# Create optimizer
optimizer = optim.SGD(
model.parameters(), lr=config["lr"], momentum=config["momentum"])
# Load checkpoint, or load start model if no checkpoint has been
# passed and a start model is specified
load_dir = None
if checkpoint_dir:
load_dir = checkpoint_dir
elif start_model:
load_dir = start_model
if load_dir:
model_state, optimizer_state = torch.load(
os.path.join(load_dir, "checkpoint"))
model.load_state_dict(model_state)
optimizer.load_state_dict(optimizer_state)
# Get full training datasets
train_dataset, validation_dataset = data_fn(day=day)
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=config["batch_size"], shuffle=True)
validation_loader = torch.utils.data.DataLoader(
validation_dataset, batch_size=config["batch_size"], shuffle=True)
for i in range(num_epochs):
train(model, optimizer, train_loader, device)
acc = test(model, validation_loader, device)
if i == num_epochs - 1:
with tune.checkpoint_dir(step=i) as checkpoint_dir:
torch.save((model.state_dict(), optimizer.state_dict()),
os.path.join(checkpoint_dir, "checkpoint"))
tune.report(mean_accuracy=acc, done=True)
else:
tune.report(mean_accuracy=acc)
#######################################################################
# Configuring the search space and starting Ray Tune
# --------------------------------------------------
# We would like to support two modes of training the model: Training
# a model from scratch, and continuing to train a model from an
# existing one.
#
# This is our function to train a number of models with different
# hyperparameters from scratch, i.e. from all data that is available
# until the given day. Our search space can thus also contain parameters
# that affect the model complexity (such as the layer size), since it
# does not have to be compatible to an existing model.
def tune_from_scratch(num_samples=10, num_epochs=10, gpus_per_trial=0., day=0):
data_interface = MNISTDataInterface("/tmp/mnist_data", max_days=10)
num_examples = data_interface._get_day_slice(day)
config = {
"batch_size": tune.choice([16, 32, 64]),
"layer_size": tune.choice([32, 64, 128, 192]),
"lr": tune.loguniform(1e-4, 1e-1),
"momentum": tune.uniform(0.1, 0.9),
}
scheduler = ASHAScheduler(
metric="mean_accuracy",
mode="max",
max_t=num_epochs,
grace_period=1,
reduction_factor=2)
reporter = CLIReporter(
parameter_columns=["layer_size", "lr", "momentum", "batch_size"],
metric_columns=["mean_accuracy", "training_iteration"])
analysis = tune.run(
partial(
train_mnist,
start_model=None,
data_fn=data_interface.get_data,
num_epochs=num_epochs,
use_gpus=True if gpus_per_trial > 0 else False,
day=day),
resources_per_trial={
"cpu": 1,
"gpu": gpus_per_trial
},
config=config,
num_samples=num_samples,
scheduler=scheduler,
progress_reporter=reporter,
verbose=0,
name="tune_serve_mnist_fromscratch")
best_trial = analysis.get_best_trial("mean_accuracy", "max", "last")
best_accuracy = best_trial.metric_analysis["mean_accuracy"]["last"]
best_trial_config = best_trial.config
best_checkpoint = best_trial.checkpoint.value
return best_accuracy, best_trial_config, best_checkpoint, num_examples
#######################################################################
# To continue training from an existing model, we can use this function
# instead. It takes a starting model (a checkpoint) as a parameter and
# the old config.
#
# Note that this time the search space does _not_ contain the
# layer size parameter. Since we continue to train an existing model,
# we cannot change the layer size mid training, so we just continue
# to use the existing one.
def tune_from_existing(start_model,
start_config,
num_samples=10,
num_epochs=10,
gpus_per_trial=0.,
day=0):
data_interface = MNISTDataInterface("/tmp/mnist_data", max_days=10)
num_examples = data_interface._get_day_slice(day) - \
data_interface._get_day_slice(day - 1)
config = start_config.copy()
config.update({
"batch_size": tune.choice([16, 32, 64]),
"lr": tune.loguniform(1e-4, 1e-1),
"momentum": tune.uniform(0.1, 0.9),
})
scheduler = ASHAScheduler(
metric="mean_accuracy",
mode="max",
max_t=num_epochs,
grace_period=1,
reduction_factor=2)
reporter = CLIReporter(
parameter_columns=["lr", "momentum", "batch_size"],
metric_columns=["mean_accuracy", "training_iteration"])
analysis = tune.run(
partial(
train_mnist,
start_model=start_model,
data_fn=data_interface.get_incremental_data,
num_epochs=num_epochs,
use_gpus=True if gpus_per_trial > 0 else False,
day=day),
resources_per_trial={
"cpu": 1,
"gpu": gpus_per_trial
},
config=config,
num_samples=num_samples,
scheduler=scheduler,
progress_reporter=reporter,
verbose=0,
name="tune_serve_mnist_fromsexisting")
best_trial = analysis.get_best_trial("mean_accuracy", "max", "last")
best_accuracy = best_trial.metric_analysis["mean_accuracy"]["last"]
best_trial_config = best_trial.config
best_checkpoint = best_trial.checkpoint.value
return best_accuracy, best_trial_config, best_checkpoint, num_examples
#######################################################################
# Serving tuned models with Ray Serve
# -----------------------------------
# Let's now turn to the model serving part with Ray Serve. Serve
# distinguishes between _backends_ and _endpoints_. Broadly speaking, a
# backend handles incoming requests and replies with a result. For
# instance, our MNIST backend takes an image as input and outputs the
# digit it recognized from it. An endpoint on the other hand forwards
# incoming HTTP requests to one or more different backends, according
# to a routing policy.
#
# First, we will define our backend. This backend loads our PyTorch
# MNIST model from a checkpoint, takes an image as an input and
# outputs our digit prediction according to our trained model:
class MNISTBackend:
def __init__(self, checkpoint_dir, config, metrics, use_gpu=False):
self.checkpoint_dir = checkpoint_dir
self.config = config
self.metrics = metrics
use_cuda = use_gpu and torch.cuda.is_available()
self.device = torch.device("cuda" if use_cuda else "cpu")
model = ConvNet(layer_size=self.config["layer_size"]).to(self.device)
model_state, optimizer_state = torch.load(
os.path.join(self.checkpoint_dir, "checkpoint"),
map_location=self.device)
model.load_state_dict(model_state)
self.model = model
def __call__(self, flask_request):
images = torch.tensor(flask_request.json["images"])
images = images.to(self.device)
outputs = self.model(images)
predicted = torch.max(outputs.data, 1)[1]
return {"result": predicted.numpy().tolist()}
#######################################################################
# We would like to have a fixed location where we store the currently
# active model. We call this directory ``model_dir``. Everytime we
# would like to update our model, we copy the checkpoint of the new
# model to this directory. We then create a new backend pointing to
# that directory, route all the traffic on our model endpoint to this
# backend, and then delete the old backends to free up some memory.
def serve_new_model(model_dir, checkpoint, config, metrics, day, gpu=False):
print("Serving checkpoint: {}".format(checkpoint))
checkpoint_path = _move_checkpoint_to_model_dir(model_dir, checkpoint,
config, metrics)
try:
# Try to connect to an existing cluster.
client = serve.connect()
except RayServeException:
# If this is the first run, need to start the cluster.
client = serve.start(detached=True)
backend_name = "mnist:day_{}".format(day)
client.create_backend(backend_name, MNISTBackend, checkpoint_path, config,
metrics, gpu)
if "mnist" not in client.list_endpoints():
# First time we serve a model - create endpoint
client.create_endpoint(
"mnist", backend=backend_name, route="/mnist", methods=["POST"])
else:
# The endpoint already exists, route all traffic to the new model
# Here you could also implement an incremental rollout, where only
# a part of the traffic is sent to the new backend and the
# rest is sent to the existing backends.
client.set_traffic("mnist", {backend_name: 1.0})
# Delete previous existing backends
for existing_backend in client.list_backends():
if existing_backend.startswith("mnist:day") and \
existing_backend != backend_name:
client.delete_backend(existing_backend)
return True
def _move_checkpoint_to_model_dir(model_dir, checkpoint, config, metrics):
"""Move backend checkpoint to a central `model_dir` on the head node.
If you would like to run Serve on multiple nodes, you might want to
move the checkpoint to a shared storage, like Amazon S3, instead."""
os.makedirs(model_dir, 0o755, exist_ok=True)
checkpoint_path = os.path.join(model_dir, "checkpoint")
meta_path = os.path.join(model_dir, "meta.json")
if os.path.exists(checkpoint_path):
shutil.rmtree(checkpoint_path)
shutil.copytree(checkpoint, checkpoint_path)
with open(meta_path, "wt") as fp:
json.dump(dict(config=config, metrics=metrics), fp)
return checkpoint_path
#######################################################################
# Since we would like to continue training from the current existing
# model, we introduce an utility function that fetches the currently
# served checkpoint as well as the hyperparameter config and achieved
# accuracy.
def get_current_model(model_dir):
checkpoint_path = os.path.join(model_dir, "checkpoint")
meta_path = os.path.join(model_dir, "meta.json")
if not os.path.exists(checkpoint_path) or \
not os.path.exists(meta_path):
return None, None, None
with open(meta_path, "rt") as fp:
meta = json.load(fp)
return checkpoint_path, meta["config"], meta["metrics"]
#######################################################################
# Putting everything together
# ---------------------------
# Now we only need to glue this code together. This is the main
# entrypoint of the script, and we will define three methods:
#
# 1. Train new model from scratch with all data
# 2. Continue training from existing model with new data only
# 3. Query the model with test data
#
# Internally, this will just call the ``tune_from_scratch`` and
# ``tune_from_existing()`` functions.
# Both training functions will then call ``serve_new_model()`` to serve
# the newly trained or updated model.
# The query function will send a HTTP request to Serve with some
# test data obtained from the MNIST dataset.
if __name__ == "__main__":
"""
This script offers training a new model from scratch with all
available data, or continuing to train an existing model
with newly available data.
For instance, we might get new data every day. Every Sunday, we
would like to train a new model from scratch.
Naturally, we would like to use hyperparameter optimization to
find the best model for out data.
First, we might train a model with all data available at this day:
.. code-block:: bash
python tune-serve-integration-mnist.py --from_scratch --day 0
On the coming days, we want to continue to train this model with
newly available data:
.. code-block:: bash
python tune-serve-integration-mnist.py --from_existing --day 1
python tune-serve-integration-mnist.py --from_existing --day 2
python tune-serve-integration-mnist.py --from_existing --day 3
python tune-serve-integration-mnist.py --from_existing --day 4
python tune-serve-integration-mnist.py --from_existing --day 5
python tune-serve-integration-mnist.py --from_existing --day 6
# Retrain from scratch every 7th day:
python tune-serve-integration-mnist.py --from_scratch --day 7
We can also use this script to query our served model
with some test data:
.. code-block:: bash
python tune-serve-integration-mnist.py --query 6
Querying model with example #6. Label = 1, Response = 1, Correct = T
python tune-serve-integration-mnist.py --query 28
Querying model with example #28. Label = 2, Response = 7, Correct = F
"""
parser = argparse.ArgumentParser(description="MNIST Tune/Serve example")
parser.add_argument("--model_dir", type=str, default="~/mnist_tune_serve")
parser.add_argument(
"--from_scratch",
action="store_true",
help="Train and select best model from scratch",
default=False)
parser.add_argument(
"--from_existing",
action="store_true",
help="Train and select best model from existing model",
default=False)
parser.add_argument(
"--day",
help="Indicate the day to simulate the amount of data available to us",
type=int,
default=0)
parser.add_argument(
"--query", help="Query endpoint with example", type=int, default=-1)
parser.add_argument(
"--smoke-test",
action="store_true",
help="Finish quickly for testing",
default=False)
args = parser.parse_args()
if args.smoke_test:
ray.init(num_cpus=2)
model_dir = os.path.expanduser(args.model_dir)
if args.query >= 0:
import requests
dataset = MNISTDataInterface("/tmp/mnist_data", max_days=0).dataset
data = dataset[args.query]
label = data[1]
# Query our model
response = requests.post(
"http://localhost:8000/mnist",
json={"images": [data[0].numpy().tolist()]})
try:
pred = response.json()["result"][0]
except: # noqa: E722
pred = -1
print("Querying model with example #{}. "
"Label = {}, Response = {}, Correct = {}".format(
args.query, label, pred, label == pred))
sys.exit(0)
gpus_per_trial = 0.5 if not args.smoke_test else 0.
serve_gpu = True if gpus_per_trial > 0 else False
num_samples = 8 if not args.smoke_test else 1
num_epochs = 10 if not args.smoke_test else 1
if args.from_scratch: # train everyday from scratch
print("Start training job from scratch on day {}.".format(args.day))
acc, config, best_checkpoint, num_examples = tune_from_scratch(
num_samples, num_epochs, gpus_per_trial, day=args.day)
print("Trained day {} from scratch on {} samples. "
"Best accuracy: {:.4f}. Best config: {}".format(
args.day, num_examples, acc, config))
serve_new_model(model_dir, best_checkpoint, config, acc, args.day,
serve_gpu)
if args.from_existing:
old_checkpoint, old_config, old_acc = get_current_model(model_dir)
if not old_checkpoint or not old_config or not old_acc:
print("No existing model found. Train one with --from_scratch "
"first.")
sys.exit(1)
acc, config, best_checkpoint, num_examples = tune_from_existing(
old_checkpoint,
old_config,
num_samples,
num_epochs,
gpus_per_trial,
day=args.day)
print("Trained day {} from existing on {} samples. "
"Best accuracy: {:.4f}. Best config: {}".format(
args.day, num_examples, acc, config))
serve_new_model(model_dir, best_checkpoint, config, acc, args.day,
serve_gpu)
#######################################################################
# That's it! We now have an end-to-end workflow to train and update a
# model every day with newly arrived data. Every week we might retrain
# the whole model. At every point in time we make sure to serve the
# model that achieved the best validation set accuracy.
#
# There are some ways we might extend this example. For instance, right
# now we only serve the latest trained model. We could also choose to
# route only a certain percentage of users to the new model, maybe to
# see if the new model really does it's job right. These kind of
# deployments are called :ref:`canary deployments <serve-split-traffic>`.
# These kind of deployments would also require us to keep more than one
# model in our ``model_dir`` - which should be quite easy: We could just
# create subdirectories for each training day.
#
# Still, this example should show you how easy it is to integrate the
# Ray libraries Ray Tune and Ray Serve in your workflow. While both tools
# also work independently of each other, they complement each other
# nicely and support a large number of use cases.
| apache-2.0 |
bayger/kernel_amlogic | scripts/tracing/draw_functrace.py | 14676 | 3560 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <[email protected]>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
| gpl-2.0 |
ProfessionalIT/professionalit-webiste | sdk/google_appengine/google/storage/speckle/python/django/backend/client.py | 7 | 1274 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Django database client for rdbms.
Encapsulates the logic for starting up a command line client to the database,
for use with the "dbshell" management command.
"""
from django.db import backends
class DatabaseClient(backends.BaseDatabaseClient):
"""Database client for rdbms."""
def runshell(self):
"""Start an interactive database shell."""
settings_dict = self.connection.settings_dict
args = [self.executable_name]
args = ['', settings_dict.get('INSTANCE')]
database = settings_dict.get('NAME')
if database:
args.append(database)
from google.storage.speckle.python.tool import google_sql
google_sql.main(args)
| lgpl-3.0 |
tlatzko/spmcluster | .tox/2.7-cover/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py | 1730 | 3405 | """A collection of modules for building different kinds of tree from
HTML documents.
To create a treebuilder for a new type of tree, you need to do
implement several things:
1) A set of classes for various types of elements: Document, Doctype,
Comment, Element. These must implement the interface of
_base.treebuilders.Node (although comment nodes have a different
signature for their constructor, see treebuilders.etree.Comment)
Textual content may also be implemented as another node type, or not, as
your tree implementation requires.
2) A treebuilder object (called TreeBuilder by convention) that
inherits from treebuilders._base.TreeBuilder. This has 4 required attributes:
documentClass - the class to use for the bottommost node of a document
elementClass - the class to use for HTML Elements
commentClass - the class to use for comments
doctypeClass - the class to use for doctypes
It also has one required method:
getDocument - Returns the root node of the complete document tree
3) If you wish to run the unit tests, you must also create a
testSerializer method on your treebuilder which accepts a node and
returns a string containing Node and its children serialized according
to the format used in the unittests
"""
from __future__ import absolute_import, division, unicode_literals
from ..utils import default_etree
treeBuilderCache = {}
def getTreeBuilder(treeType, implementation=None, **kwargs):
"""Get a TreeBuilder class for various types of tree with built-in support
treeType - the name of the tree type required (case-insensitive). Supported
values are:
"dom" - A generic builder for DOM implementations, defaulting to
a xml.dom.minidom based implementation.
"etree" - A generic builder for tree implementations exposing an
ElementTree-like interface, defaulting to
xml.etree.cElementTree if available and
xml.etree.ElementTree if not.
"lxml" - A etree-based builder for lxml.etree, handling
limitations of lxml's implementation.
implementation - (Currently applies to the "etree" and "dom" tree types). A
module implementing the tree type e.g.
xml.etree.ElementTree or xml.etree.cElementTree."""
treeType = treeType.lower()
if treeType not in treeBuilderCache:
if treeType == "dom":
from . import dom
# Come up with a sane default (pref. from the stdlib)
if implementation is None:
from xml.dom import minidom
implementation = minidom
# NEVER cache here, caching is done in the dom submodule
return dom.getDomModule(implementation, **kwargs).TreeBuilder
elif treeType == "lxml":
from . import etree_lxml
treeBuilderCache[treeType] = etree_lxml.TreeBuilder
elif treeType == "etree":
from . import etree
if implementation is None:
implementation = default_etree
# NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeBuilder
else:
raise ValueError("""Unrecognised treebuilder "%s" """ % treeType)
return treeBuilderCache.get(treeType)
| bsd-2-clause |
chauhanhardik/populo_2 | lms/djangoapps/courseware/tests/test_video_handlers.py | 52 | 35861 | # -*- coding: utf-8 -*-
"""Video xmodule tests in mongo."""
import os
import freezegun
import tempfile
import textwrap
import json
import ddt
from nose.plugins.attrib import attr
from datetime import timedelta, datetime
from webob import Request
from mock import MagicMock, Mock, patch
from xmodule.contentstore.content import StaticContent
from xmodule.contentstore.django import contentstore
from xmodule.modulestore.django import modulestore
from xmodule.modulestore import ModuleStoreEnum
from xmodule.x_module import STUDENT_VIEW
from . import BaseTestXmodule
from .test_video_xml import SOURCE_XML
from cache_toolbox.core import del_cached_content
from xmodule.exceptions import NotFoundError
from xmodule.video_module.transcripts_utils import (
TranscriptException,
TranscriptsGenerationException,
)
TRANSCRIPT = {"start": [10], "end": [100], "text": ["Hi, welcome to Edx."]}
BUMPER_TRANSCRIPT = {"start": [1], "end": [10], "text": ["A bumper"]}
SRT_content = textwrap.dedent("""
0
00:00:00,12 --> 00:00:00,100
Привіт, edX вітає вас.
""")
def _create_srt_file(content=None):
"""
Create srt file in filesystem.
"""
content = content or SRT_content
srt_file = tempfile.NamedTemporaryFile(suffix=".srt")
srt_file.content_type = 'application/x-subrip; charset=utf-8'
srt_file.write(content)
srt_file.seek(0)
return srt_file
def _check_asset(location, asset_name):
"""
Check that asset with asset_name exists in assets.
"""
content_location = StaticContent.compute_location(
location.course_key, asset_name
)
try:
contentstore().find(content_location)
except NotFoundError:
return False
else:
return True
def _clear_assets(location):
"""
Clear all assets for location.
"""
store = contentstore()
assets, __ = store.get_all_content_for_course(location.course_key)
for asset in assets:
asset_location = asset['asset_key']
del_cached_content(asset_location)
store.delete(asset_location)
def _get_subs_id(filename):
basename = os.path.splitext(os.path.basename(filename))[0]
return basename.replace('subs_', '').replace('.srt', '')
def _create_file(content=''):
"""
Create temporary subs_somevalue.srt.sjson file.
"""
sjson_file = tempfile.NamedTemporaryFile(prefix="subs_", suffix=".srt.sjson")
sjson_file.content_type = 'application/json'
sjson_file.write(textwrap.dedent(content))
sjson_file.seek(0)
return sjson_file
def _upload_sjson_file(subs_file, location, default_filename='subs_{}.srt.sjson'):
filename = default_filename.format(_get_subs_id(subs_file.name))
_upload_file(subs_file, location, filename)
def _upload_file(subs_file, location, filename):
mime_type = subs_file.content_type
content_location = StaticContent.compute_location(
location.course_key, filename
)
content = StaticContent(content_location, filename, mime_type, subs_file.read())
contentstore().save(content)
del_cached_content(content.location)
def attach_sub(item, filename):
"""
Attach `en` transcript.
"""
item.sub = filename
def attach_bumper_transcript(item, filename, lang="en"):
"""
Attach bumper transcript.
"""
item.video_bumper["transcripts"][lang] = filename
@attr('shard_1')
class TestVideo(BaseTestXmodule):
"""Integration tests: web client + mongo."""
CATEGORY = "video"
DATA = SOURCE_XML
METADATA = {}
def test_handle_ajax_wrong_dispatch(self):
responses = {
user.username: self.clients[user.username].post(
self.get_url('whatever'),
{},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
for user in self.users
}
status_codes = {response.status_code for response in responses.values()}
self.assertEqual(status_codes.pop(), 404)
def test_handle_ajax(self):
data = [
{'speed': 2.0},
{'saved_video_position': "00:00:10"},
{'transcript_language': 'uk'},
{'bumper_do_not_show_again': True},
{'bumper_last_view_date': True},
{'demoo�': 'sample'}
]
for sample in data:
response = self.clients[self.users[0].username].post(
self.get_url('save_user_state'),
sample,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 200)
self.assertEqual(self.item_descriptor.speed, None)
self.item_descriptor.handle_ajax('save_user_state', {'speed': json.dumps(2.0)})
self.assertEqual(self.item_descriptor.speed, 2.0)
self.assertEqual(self.item_descriptor.global_speed, 2.0)
self.assertEqual(self.item_descriptor.saved_video_position, timedelta(0))
self.item_descriptor.handle_ajax('save_user_state', {'saved_video_position': "00:00:10"})
self.assertEqual(self.item_descriptor.saved_video_position, timedelta(0, 10))
self.assertEqual(self.item_descriptor.transcript_language, 'en')
self.item_descriptor.handle_ajax('save_user_state', {'transcript_language': "uk"})
self.assertEqual(self.item_descriptor.transcript_language, 'uk')
self.assertEqual(self.item_descriptor.bumper_do_not_show_again, False)
self.item_descriptor.handle_ajax('save_user_state', {'bumper_do_not_show_again': True})
self.assertEqual(self.item_descriptor.bumper_do_not_show_again, True)
with freezegun.freeze_time(datetime.now()):
self.assertEqual(self.item_descriptor.bumper_last_view_date, None)
self.item_descriptor.handle_ajax('save_user_state', {'bumper_last_view_date': True})
self.assertEqual(self.item_descriptor.bumper_last_view_date, datetime.utcnow())
response = self.item_descriptor.handle_ajax('save_user_state', {u'demoo�': "sample"})
self.assertEqual(json.loads(response)['success'], True)
def tearDown(self):
_clear_assets(self.item_descriptor.location)
super(TestVideo, self).tearDown()
@attr('shard_1')
class TestTranscriptAvailableTranslationsDispatch(TestVideo):
"""
Test video handler that provide available translations info.
Tests for `available_translations` dispatch.
"""
srt_file = _create_srt_file()
DATA = """
<video show_captions="true"
display_name="A Name"
>
<source src="example.mp4"/>
<source src="example.webm"/>
<transcript language="uk" src="{}"/>
</video>
""".format(os.path.split(srt_file.name)[1])
MODEL_DATA = {
'data': DATA
}
def setUp(self):
super(TestTranscriptAvailableTranslationsDispatch, self).setUp()
self.item_descriptor.render(STUDENT_VIEW)
self.item = self.item_descriptor.xmodule_runtime.xmodule_instance
self.subs = {"start": [10], "end": [100], "text": ["Hi, welcome to Edx."]}
def test_available_translation_en(self):
good_sjson = _create_file(json.dumps(self.subs))
_upload_sjson_file(good_sjson, self.item_descriptor.location)
self.item.sub = _get_subs_id(good_sjson.name)
request = Request.blank('/available_translations')
response = self.item.transcript(request=request, dispatch='available_translations')
self.assertEqual(json.loads(response.body), ['en'])
def test_available_translation_non_en(self):
_upload_file(self.srt_file, self.item_descriptor.location, os.path.split(self.srt_file.name)[1])
request = Request.blank('/available_translations')
response = self.item.transcript(request=request, dispatch='available_translations')
self.assertEqual(json.loads(response.body), ['uk'])
def test_multiple_available_translations(self):
good_sjson = _create_file(json.dumps(self.subs))
# Upload english transcript.
_upload_sjson_file(good_sjson, self.item_descriptor.location)
# Upload non-english transcript.
_upload_file(self.srt_file, self.item_descriptor.location, os.path.split(self.srt_file.name)[1])
self.item.sub = _get_subs_id(good_sjson.name)
request = Request.blank('/available_translations')
response = self.item.transcript(request=request, dispatch='available_translations')
self.assertEqual(json.loads(response.body), ['en', 'uk'])
@attr('shard_1')
@ddt.ddt
class TestTranscriptAvailableTranslationsBumperDispatch(TestVideo):
"""
Test video handler that provide available translations info.
Tests for `available_translations_bumper` dispatch.
"""
srt_file = _create_srt_file()
DATA = """
<video show_captions="true"
display_name="A Name"
>
<source src="example.mp4"/>
<source src="example.webm"/>
<transcript language="uk" src="{}"/>
</video>
""".format(os.path.split(srt_file.name)[1])
MODEL_DATA = {
'data': DATA
}
def setUp(self):
super(TestTranscriptAvailableTranslationsBumperDispatch, self).setUp()
self.item_descriptor.render(STUDENT_VIEW)
self.item = self.item_descriptor.xmodule_runtime.xmodule_instance
self.dispatch = "available_translations/?is_bumper=1"
self.item.video_bumper = {"transcripts": {"en": ""}}
@ddt.data("en", "uk")
def test_available_translation_en_and_non_en(self, lang):
filename = os.path.split(self.srt_file.name)[1]
_upload_file(self.srt_file, self.item_descriptor.location, filename)
self.item.video_bumper["transcripts"][lang] = filename
request = Request.blank('/' + self.dispatch)
response = self.item.transcript(request=request, dispatch=self.dispatch)
self.assertEqual(json.loads(response.body), [lang])
def test_multiple_available_translations(self):
en_translation = _create_srt_file()
en_translation_filename = os.path.split(en_translation.name)[1]
uk_translation_filename = os.path.split(self.srt_file.name)[1]
# Upload english transcript.
_upload_file(en_translation, self.item_descriptor.location, en_translation_filename)
# Upload non-english transcript.
_upload_file(self.srt_file, self.item_descriptor.location, uk_translation_filename)
self.item.video_bumper["transcripts"]["en"] = en_translation_filename
self.item.video_bumper["transcripts"]["uk"] = uk_translation_filename
request = Request.blank('/' + self.dispatch)
response = self.item.transcript(request=request, dispatch=self.dispatch)
self.assertEqual(json.loads(response.body), ['en', 'uk'])
class TestTranscriptDownloadDispatch(TestVideo):
"""
Test video handler that provide translation transcripts.
Tests for `download` dispatch.
"""
DATA = """
<video show_captions="true"
display_name="A Name"
sub='OEoXaMPEzfM'
>
<source src="example.mp4"/>
<source src="example.webm"/>
</video>
"""
MODEL_DATA = {
'data': DATA
}
def setUp(self):
super(TestTranscriptDownloadDispatch, self).setUp()
self.item_descriptor.render(STUDENT_VIEW)
self.item = self.item_descriptor.xmodule_runtime.xmodule_instance
def test_download_transcript_not_exist(self):
request = Request.blank('/download')
response = self.item.transcript(request=request, dispatch='download')
self.assertEqual(response.status, '404 Not Found')
@patch('xmodule.video_module.VideoModule.get_transcript', return_value=('Subs!', 'test_filename.srt', 'application/x-subrip; charset=utf-8'))
def test_download_srt_exist(self, __):
request = Request.blank('/download')
response = self.item.transcript(request=request, dispatch='download')
self.assertEqual(response.body, 'Subs!')
self.assertEqual(response.headers['Content-Type'], 'application/x-subrip; charset=utf-8')
self.assertEqual(response.headers['Content-Language'], 'en')
@patch('xmodule.video_module.VideoModule.get_transcript', return_value=('Subs!', 'txt', 'text/plain; charset=utf-8'))
def test_download_txt_exist(self, __):
self.item.transcript_format = 'txt'
request = Request.blank('/download')
response = self.item.transcript(request=request, dispatch='download')
self.assertEqual(response.body, 'Subs!')
self.assertEqual(response.headers['Content-Type'], 'text/plain; charset=utf-8')
self.assertEqual(response.headers['Content-Language'], 'en')
def test_download_en_no_sub(self):
request = Request.blank('/download')
response = self.item.transcript(request=request, dispatch='download')
self.assertEqual(response.status, '404 Not Found')
transcripts = self.item.get_transcripts_info()
with self.assertRaises(NotFoundError):
self.item.get_transcript(transcripts)
@patch('xmodule.video_module.VideoModule.get_transcript', return_value=('Subs!', u"塞.srt", 'application/x-subrip; charset=utf-8'))
def test_download_non_en_non_ascii_filename(self, __):
request = Request.blank('/download')
response = self.item.transcript(request=request, dispatch='download')
self.assertEqual(response.body, 'Subs!')
self.assertEqual(response.headers['Content-Type'], 'application/x-subrip; charset=utf-8')
self.assertEqual(response.headers['Content-Disposition'], 'attachment; filename="塞.srt"')
@attr('shard_1')
@ddt.ddt
class TestTranscriptTranslationGetDispatch(TestVideo):
"""
Test video handler that provide translation transcripts.
Tests for `translation` and `translation_bumper` dispatches.
"""
srt_file = _create_srt_file()
DATA = """
<video show_captions="true"
display_name="A Name"
>
<source src="example.mp4"/>
<source src="example.webm"/>
<transcript language="uk" src="{}"/>
</video>
""".format(os.path.split(srt_file.name)[1])
MODEL_DATA = {
'data': DATA
}
def setUp(self):
super(TestTranscriptTranslationGetDispatch, self).setUp()
self.item_descriptor.render(STUDENT_VIEW)
self.item = self.item_descriptor.xmodule_runtime.xmodule_instance
self.item.video_bumper = {"transcripts": {"en": ""}}
@ddt.data(
# No language
('/translation', 'translation', '400 Bad Request'),
# No videoId - HTML5 video with language that is not in available languages
('/translation/ru', 'translation/ru', '404 Not Found'),
# Language is not in available languages
('/translation/ru?videoId=12345', 'translation/ru', '404 Not Found'),
# Youtube_id is invalid or does not exist
('/translation/uk?videoId=9855256955511225', 'translation/uk', '404 Not Found'),
('/translation?is_bumper=1', 'translation', '400 Bad Request'),
('/translation/ru?is_bumper=1', 'translation/ru', '404 Not Found'),
('/translation/ru?videoId=12345&is_bumper=1', 'translation/ru', '404 Not Found'),
('/translation/uk?videoId=9855256955511225&is_bumper=1', 'translation/uk', '404 Not Found'),
)
@ddt.unpack
def test_translation_fails(self, url, dispatch, status_code):
request = Request.blank(url)
response = self.item.transcript(request=request, dispatch=dispatch)
self.assertEqual(response.status, status_code)
@ddt.data(
('translation/en?videoId={}', 'translation/en', attach_sub),
('translation/en?videoId={}&is_bumper=1', 'translation/en', attach_bumper_transcript))
@ddt.unpack
def test_translaton_en_youtube_success(self, url, dispatch, attach):
subs = {"start": [10], "end": [100], "text": ["Hi, welcome to Edx."]}
good_sjson = _create_file(json.dumps(subs))
_upload_sjson_file(good_sjson, self.item_descriptor.location)
subs_id = _get_subs_id(good_sjson.name)
attach(self.item, subs_id)
request = Request.blank(url.format(subs_id))
response = self.item.transcript(request=request, dispatch=dispatch)
self.assertDictEqual(json.loads(response.body), subs)
def test_translation_non_en_youtube_success(self):
subs = {
u'end': [100],
u'start': [12],
u'text': [
u'\u041f\u0440\u0438\u0432\u0456\u0442, edX \u0432\u0456\u0442\u0430\u0454 \u0432\u0430\u0441.'
]
}
self.srt_file.seek(0)
_upload_file(self.srt_file, self.item_descriptor.location, os.path.split(self.srt_file.name)[1])
subs_id = _get_subs_id(self.srt_file.name)
# youtube 1_0 request, will generate for all speeds for existing ids
self.item.youtube_id_1_0 = subs_id
self.item.youtube_id_0_75 = '0_75'
request = Request.blank('/translation/uk?videoId={}'.format(subs_id))
response = self.item.transcript(request=request, dispatch='translation/uk')
self.assertDictEqual(json.loads(response.body), subs)
# 0_75 subs are exist
request = Request.blank('/translation/uk?videoId={}'.format('0_75'))
response = self.item.transcript(request=request, dispatch='translation/uk')
calculated_0_75 = {
u'end': [75],
u'start': [9],
u'text': [
u'\u041f\u0440\u0438\u0432\u0456\u0442, edX \u0432\u0456\u0442\u0430\u0454 \u0432\u0430\u0441.'
]
}
self.assertDictEqual(json.loads(response.body), calculated_0_75)
# 1_5 will be generated from 1_0
self.item.youtube_id_1_5 = '1_5'
request = Request.blank('/translation/uk?videoId={}'.format('1_5'))
response = self.item.transcript(request=request, dispatch='translation/uk')
calculated_1_5 = {
u'end': [150],
u'start': [18],
u'text': [
u'\u041f\u0440\u0438\u0432\u0456\u0442, edX \u0432\u0456\u0442\u0430\u0454 \u0432\u0430\u0441.'
]
}
self.assertDictEqual(json.loads(response.body), calculated_1_5)
@ddt.data(
('translation/en', 'translation/en', attach_sub),
('translation/en?is_bumper=1', 'translation/en', attach_bumper_transcript))
@ddt.unpack
def test_translaton_en_html5_success(self, url, dispatch, attach):
good_sjson = _create_file(json.dumps(TRANSCRIPT))
_upload_sjson_file(good_sjson, self.item_descriptor.location)
subs_id = _get_subs_id(good_sjson.name)
attach(self.item, subs_id)
request = Request.blank(url)
response = self.item.transcript(request=request, dispatch=dispatch)
self.assertDictEqual(json.loads(response.body), TRANSCRIPT)
def test_translaton_non_en_html5_success(self):
subs = {
u'end': [100],
u'start': [12],
u'text': [
u'\u041f\u0440\u0438\u0432\u0456\u0442, edX \u0432\u0456\u0442\u0430\u0454 \u0432\u0430\u0441.'
]
}
self.srt_file.seek(0)
_upload_file(self.srt_file, self.item_descriptor.location, os.path.split(self.srt_file.name)[1])
# manually clean youtube_id_1_0, as it has default value
self.item.youtube_id_1_0 = ""
request = Request.blank('/translation/uk')
response = self.item.transcript(request=request, dispatch='translation/uk')
self.assertDictEqual(json.loads(response.body), subs)
def test_translation_static_transcript_xml_with_data_dirc(self):
"""
Test id data_dir is set in XML course.
Set course data_dir and ensure we get redirected to that path
if it isn't found in the contentstore.
"""
# Simulate data_dir set in course.
test_modulestore = MagicMock()
attrs = {'get_course.return_value': Mock(data_dir='dummy/static', static_asset_path='')}
test_modulestore.configure_mock(**attrs)
self.item_descriptor.runtime.modulestore = test_modulestore
# Test youtube style en
request = Request.blank('/translation/en?videoId=12345')
response = self.item.transcript(request=request, dispatch='translation/en')
self.assertEqual(response.status, '307 Temporary Redirect')
self.assertIn(
('Location', '/static/dummy/static/subs_12345.srt.sjson'),
response.headerlist
)
# Test HTML5 video style
self.item.sub = 'OEoXaMPEzfM'
request = Request.blank('/translation/en')
response = self.item.transcript(request=request, dispatch='translation/en')
self.assertEqual(response.status, '307 Temporary Redirect')
self.assertIn(
('Location', '/static/dummy/static/subs_OEoXaMPEzfM.srt.sjson'),
response.headerlist
)
# Test different language to ensure we are just ignoring it since we can't
# translate with static fallback
request = Request.blank('/translation/uk')
response = self.item.transcript(request=request, dispatch='translation/uk')
self.assertEqual(response.status, '404 Not Found')
@ddt.data(
# Test youtube style en
('/translation/en?videoId=12345', 'translation/en', '307 Temporary Redirect', '12345'),
# Test html5 style en
('/translation/en', 'translation/en', '307 Temporary Redirect', 'OEoXaMPEzfM', attach_sub),
# Test different language to ensure we are just ignoring it since we can't
# translate with static fallback
('/translation/uk', 'translation/uk', '404 Not Found'),
(
'/translation/en?is_bumper=1', 'translation/en', '307 Temporary Redirect', 'OEoXaMPEzfM',
attach_bumper_transcript
),
('/translation/uk?is_bumper=1', 'translation/uk', '404 Not Found'),
)
@ddt.unpack
def test_translation_static_transcript(self, url, dispatch, status_code, sub=None, attach=None):
"""
Set course static_asset_path and ensure we get redirected to that path
if it isn't found in the contentstore
"""
self._set_static_asset_path()
if attach:
attach(self.item, sub)
request = Request.blank(url)
response = self.item.transcript(request=request, dispatch=dispatch)
self.assertEqual(response.status, status_code)
if sub:
self.assertIn(
('Location', '/static/dummy/static/subs_{}.srt.sjson'.format(sub)),
response.headerlist
)
@patch('xmodule.video_module.VideoModule.course_id', return_value='not_a_course_locator')
def test_translation_static_non_course(self, __):
"""
Test that get_static_transcript short-circuits in the case of a non-CourseLocator.
This fixes a bug for videos inside of content libraries.
"""
self._set_static_asset_path()
# When course_id is not mocked out, these values would result in 307, as tested above.
request = Request.blank('/translation/en?videoId=12345')
response = self.item.transcript(request=request, dispatch='translation/en')
self.assertEqual(response.status, '404 Not Found')
def _set_static_asset_path(self):
""" Helper method for setting up the static_asset_path information """
self.course.static_asset_path = 'dummy/static'
self.course.save()
store = modulestore()
with store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, self.course.id):
store.update_item(self.course, self.user.id)
@attr('shard_1')
class TestStudioTranscriptTranslationGetDispatch(TestVideo):
"""
Test Studio video handler that provide translation transcripts.
Tests for `translation` dispatch GET HTTP method.
"""
srt_file = _create_srt_file()
DATA = """
<video show_captions="true"
display_name="A Name"
>
<source src="example.mp4"/>
<source src="example.webm"/>
<transcript language="uk" src="{}"/>
<transcript language="zh" src="{}"/>
</video>
""".format(os.path.split(srt_file.name)[1], u"塞.srt".encode('utf8'))
MODEL_DATA = {'data': DATA}
def test_translation_fails(self):
# No language
request = Request.blank('')
response = self.item_descriptor.studio_transcript(request=request, dispatch='translation')
self.assertEqual(response.status, '400 Bad Request')
# No filename in request.GET
request = Request.blank('')
response = self.item_descriptor.studio_transcript(request=request, dispatch='translation/uk')
self.assertEqual(response.status, '400 Bad Request')
# Correct case:
filename = os.path.split(self.srt_file.name)[1]
_upload_file(self.srt_file, self.item_descriptor.location, filename)
self.srt_file.seek(0)
request = Request.blank(u'translation/uk?filename={}'.format(filename))
response = self.item_descriptor.studio_transcript(request=request, dispatch='translation/uk')
self.assertEqual(response.body, self.srt_file.read())
self.assertEqual(response.headers['Content-Type'], 'application/x-subrip; charset=utf-8')
self.assertEqual(
response.headers['Content-Disposition'],
'attachment; filename="{}"'.format(filename)
)
self.assertEqual(response.headers['Content-Language'], 'uk')
# Non ascii file name download:
self.srt_file.seek(0)
_upload_file(self.srt_file, self.item_descriptor.location, u'塞.srt')
self.srt_file.seek(0)
request = Request.blank('translation/zh?filename={}'.format(u'塞.srt'.encode('utf8')))
response = self.item_descriptor.studio_transcript(request=request, dispatch='translation/zh')
self.assertEqual(response.body, self.srt_file.read())
self.assertEqual(response.headers['Content-Type'], 'application/x-subrip; charset=utf-8')
self.assertEqual(response.headers['Content-Disposition'], 'attachment; filename="塞.srt"')
self.assertEqual(response.headers['Content-Language'], 'zh')
@attr('shard_1')
class TestStudioTranscriptTranslationPostDispatch(TestVideo):
"""
Test Studio video handler that provide translation transcripts.
Tests for `translation` dispatch with HTTP POST method.
"""
DATA = """
<video show_captions="true"
display_name="A Name"
>
<source src="example.mp4"/>
<source src="example.webm"/>
</video>
"""
MODEL_DATA = {
'data': DATA
}
METADATA = {}
def test_studio_transcript_post(self):
# Check for exceptons:
# Language is passed, bad content or filename:
# should be first, as other tests save transcrips to store.
request = Request.blank('/translation/uk', POST={'file': ('filename.srt', SRT_content)})
with patch('xmodule.video_module.video_handlers.save_to_store'):
with self.assertRaises(TranscriptException): # transcripts were not saved to store for some reason.
response = self.item_descriptor.studio_transcript(request=request, dispatch='translation/uk')
request = Request.blank('/translation/uk', POST={'file': ('filename', 'content')})
with self.assertRaises(TranscriptsGenerationException): # Not an srt filename
self.item_descriptor.studio_transcript(request=request, dispatch='translation/uk')
request = Request.blank('/translation/uk', POST={'file': ('filename.srt', 'content')})
with self.assertRaises(TranscriptsGenerationException): # Content format is not srt.
response = self.item_descriptor.studio_transcript(request=request, dispatch='translation/uk')
request = Request.blank('/translation/uk', POST={'file': ('filename.srt', SRT_content.decode('utf8').encode('cp1251'))})
# Non-UTF8 file content encoding.
response = self.item_descriptor.studio_transcript(request=request, dispatch='translation/uk')
self.assertEqual(response.status_code, 400)
self.assertEqual(response.body, "Invalid encoding type, transcripts should be UTF-8 encoded.")
# No language is passed.
request = Request.blank('/translation', POST={'file': ('filename', SRT_content)})
response = self.item_descriptor.studio_transcript(request=request, dispatch='translation')
self.assertEqual(response.status, '400 Bad Request')
# Language, good filename and good content.
request = Request.blank('/translation/uk', POST={'file': ('filename.srt', SRT_content)})
response = self.item_descriptor.studio_transcript(request=request, dispatch='translation/uk')
self.assertEqual(response.status, '201 Created')
self.assertDictEqual(json.loads(response.body), {'filename': u'filename.srt', 'status': 'Success'})
self.assertDictEqual(self.item_descriptor.transcripts, {})
self.assertTrue(_check_asset(self.item_descriptor.location, u'filename.srt'))
@attr('shard_1')
class TestGetTranscript(TestVideo):
"""
Make sure that `get_transcript` method works correctly
"""
srt_file = _create_srt_file()
DATA = """
<video show_captions="true"
display_name="A Name"
>
<source src="example.mp4"/>
<source src="example.webm"/>
<transcript language="uk" src="{}"/>
<transcript language="zh" src="{}"/>
</video>
""".format(os.path.split(srt_file.name)[1], u"塞.srt".encode('utf8'))
MODEL_DATA = {
'data': DATA
}
METADATA = {}
def setUp(self):
super(TestGetTranscript, self).setUp()
self.item_descriptor.render(STUDENT_VIEW)
self.item = self.item_descriptor.xmodule_runtime.xmodule_instance
def test_good_transcript(self):
"""
Test for download 'en' sub with html5 video and self.sub has correct non-empty value.
"""
good_sjson = _create_file(content=textwrap.dedent("""\
{
"start": [
270,
2720
],
"end": [
2720,
5430
],
"text": [
"Hi, welcome to Edx.",
"Let's start with what is on your screen right now."
]
}
"""))
_upload_sjson_file(good_sjson, self.item.location)
self.item.sub = _get_subs_id(good_sjson.name)
transcripts = self.item.get_transcripts_info()
text, filename, mime_type = self.item.get_transcript(transcripts)
expected_text = textwrap.dedent("""\
0
00:00:00,270 --> 00:00:02,720
Hi, welcome to Edx.
1
00:00:02,720 --> 00:00:05,430
Let's start with what is on your screen right now.
""")
self.assertEqual(text, expected_text)
self.assertEqual(filename[:-4], self.item.sub)
self.assertEqual(mime_type, 'application/x-subrip; charset=utf-8')
def test_good_txt_transcript(self):
good_sjson = _create_file(content=textwrap.dedent("""\
{
"start": [
270,
2720
],
"end": [
2720,
5430
],
"text": [
"Hi, welcome to Edx.",
"Let's start with what is on your screen right now."
]
}
"""))
_upload_sjson_file(good_sjson, self.item.location)
self.item.sub = _get_subs_id(good_sjson.name)
transcripts = self.item.get_transcripts_info()
text, filename, mime_type = self.item.get_transcript(transcripts, transcript_format="txt")
expected_text = textwrap.dedent("""\
Hi, welcome to Edx.
Let's start with what is on your screen right now.""")
self.assertEqual(text, expected_text)
self.assertEqual(filename, self.item.sub + '.txt')
self.assertEqual(mime_type, 'text/plain; charset=utf-8')
def test_en_with_empty_sub(self):
transcripts = {"transcripts": {}, "sub": ""}
# no self.sub, self.youttube_1_0 exist, but no file in assets
with self.assertRaises(NotFoundError):
self.item.get_transcript(transcripts)
# no self.sub and no self.youtube_1_0, no non-en transcritps
self.item.youtube_id_1_0 = None
with self.assertRaises(ValueError):
self.item.get_transcript(transcripts)
# no self.sub but youtube_1_0 exists with file in assets
good_sjson = _create_file(content=textwrap.dedent("""\
{
"start": [
270,
2720
],
"end": [
2720,
5430
],
"text": [
"Hi, welcome to Edx.",
"Let's start with what is on your screen right now."
]
}
"""))
_upload_sjson_file(good_sjson, self.item.location)
self.item.youtube_id_1_0 = _get_subs_id(good_sjson.name)
text, filename, mime_type = self.item.get_transcript(transcripts)
expected_text = textwrap.dedent("""\
0
00:00:00,270 --> 00:00:02,720
Hi, welcome to Edx.
1
00:00:02,720 --> 00:00:05,430
Let's start with what is on your screen right now.
""")
self.assertEqual(text, expected_text)
self.assertEqual(filename, self.item.youtube_id_1_0 + '.srt')
self.assertEqual(mime_type, 'application/x-subrip; charset=utf-8')
def test_non_en_with_non_ascii_filename(self):
self.item.transcript_language = 'zh'
self.srt_file.seek(0)
_upload_file(self.srt_file, self.item_descriptor.location, u"塞.srt")
transcripts = self.item.get_transcripts_info()
text, filename, mime_type = self.item.get_transcript(transcripts)
expected_text = textwrap.dedent("""
0
00:00:00,12 --> 00:00:00,100
Привіт, edX вітає вас.
""")
self.assertEqual(text, expected_text)
self.assertEqual(filename, u"塞.srt")
self.assertEqual(mime_type, 'application/x-subrip; charset=utf-8')
def test_value_error(self):
good_sjson = _create_file(content='bad content')
_upload_sjson_file(good_sjson, self.item.location)
self.item.sub = _get_subs_id(good_sjson.name)
transcripts = self.item.get_transcripts_info()
with self.assertRaises(ValueError):
self.item.get_transcript(transcripts)
def test_key_error(self):
good_sjson = _create_file(content="""
{
"start": [
270,
2720
],
"end": [
2720,
5430
]
}
""")
_upload_sjson_file(good_sjson, self.item.location)
self.item.sub = _get_subs_id(good_sjson.name)
transcripts = self.item.get_transcripts_info()
with self.assertRaises(KeyError):
self.item.get_transcript(transcripts)
| agpl-3.0 |
chenc10/Spark-PAF | ec2/lib/boto-2.34.0/boto/pyami/launch_ami.py | 153 | 7585 | #!/usr/bin/env python
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import getopt
import sys
import imp
import time
import boto
usage_string = """
SYNOPSIS
launch_ami.py -a ami_id [-b script_bucket] [-s script_name]
[-m module] [-c class_name] [-r]
[-g group] [-k key_name] [-n num_instances]
[-w] [extra_data]
Where:
ami_id - the id of the AMI you wish to launch
module - The name of the Python module containing the class you
want to run when the instance is started. If you use this
option the Python module must already be stored on the
instance in a location that is on the Python path.
script_file - The name of a local Python module that you would like
to have copied to S3 and then run on the instance
when it is started. The specified module must be
import'able (i.e. in your local Python path). It
will then be copied to the specified bucket in S3
(see the -b option). Once the new instance(s)
start up the script will be copied from S3 and then
run locally on the instance.
class_name - The name of the class to be instantiated within the
module or script file specified.
script_bucket - the name of the bucket in which the script will be
stored
group - the name of the security group the instance will run in
key_name - the name of the keypair to use when launching the AMI
num_instances - how many instances of the AMI to launch (default 1)
input_queue_name - Name of SQS to read input messages from
output_queue_name - Name of SQS to write output messages to
extra_data - additional name-value pairs that will be passed as
userdata to the newly launched instance. These should
be of the form "name=value"
The -r option reloads the Python module to S3 without launching
another instance. This can be useful during debugging to allow
you to test a new version of your script without shutting down
your instance and starting up another one.
The -w option tells the script to run synchronously, meaning to
wait until the instance is actually up and running. It then prints
the IP address and internal and external DNS names before exiting.
"""
def usage():
print(usage_string)
sys.exit()
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], 'a:b:c:g:hi:k:m:n:o:rs:w',
['ami', 'bucket', 'class', 'group', 'help',
'inputqueue', 'keypair', 'module',
'numinstances', 'outputqueue',
'reload', 'script_name', 'wait'])
except:
usage()
params = {'module_name': None,
'script_name': None,
'class_name': None,
'script_bucket': None,
'group': 'default',
'keypair': None,
'ami': None,
'num_instances': 1,
'input_queue_name': None,
'output_queue_name': None}
reload = None
wait = None
for o, a in opts:
if o in ('-a', '--ami'):
params['ami'] = a
if o in ('-b', '--bucket'):
params['script_bucket'] = a
if o in ('-c', '--class'):
params['class_name'] = a
if o in ('-g', '--group'):
params['group'] = a
if o in ('-h', '--help'):
usage()
if o in ('-i', '--inputqueue'):
params['input_queue_name'] = a
if o in ('-k', '--keypair'):
params['keypair'] = a
if o in ('-m', '--module'):
params['module_name'] = a
if o in ('-n', '--num_instances'):
params['num_instances'] = int(a)
if o in ('-o', '--outputqueue'):
params['output_queue_name'] = a
if o in ('-r', '--reload'):
reload = True
if o in ('-s', '--script'):
params['script_name'] = a
if o in ('-w', '--wait'):
wait = True
# check required fields
required = ['ami']
for pname in required:
if not params.get(pname, None):
print('%s is required' % pname)
usage()
if params['script_name']:
# first copy the desired module file to S3 bucket
if reload:
print('Reloading module %s to S3' % params['script_name'])
else:
print('Copying module %s to S3' % params['script_name'])
l = imp.find_module(params['script_name'])
c = boto.connect_s3()
bucket = c.get_bucket(params['script_bucket'])
key = bucket.new_key(params['script_name'] + '.py')
key.set_contents_from_file(l[0])
params['script_md5'] = key.md5
# we have everything we need, now build userdata string
l = []
for k, v in params.items():
if v:
l.append('%s=%s' % (k, v))
c = boto.connect_ec2()
l.append('aws_access_key_id=%s' % c.aws_access_key_id)
l.append('aws_secret_access_key=%s' % c.aws_secret_access_key)
for kv in args:
l.append(kv)
s = '|'.join(l)
if not reload:
rs = c.get_all_images([params['ami']])
img = rs[0]
r = img.run(user_data=s, key_name=params['keypair'],
security_groups=[params['group']],
max_count=params.get('num_instances', 1))
print('AMI: %s - %s (Started)' % (params['ami'], img.location))
print('Reservation %s contains the following instances:' % r.id)
for i in r.instances:
print('\t%s' % i.id)
if wait:
running = False
while not running:
time.sleep(30)
[i.update() for i in r.instances]
status = [i.state for i in r.instances]
print(status)
if status.count('running') == len(r.instances):
running = True
for i in r.instances:
print('Instance: %s' % i.ami_launch_index)
print('Public DNS Name: %s' % i.public_dns_name)
print('Private DNS Name: %s' % i.private_dns_name)
if __name__ == "__main__":
main()
| apache-2.0 |
wetneb/django | django/contrib/gis/utils/ogrinfo.py | 564 | 1984 | """
This module includes some utility functions for inspecting the layout
of a GDAL data source -- the functionality is analogous to the output
produced by the `ogrinfo` utility.
"""
from django.contrib.gis.gdal import DataSource
from django.contrib.gis.gdal.geometries import GEO_CLASSES
def ogrinfo(data_source, num_features=10):
"""
Walks the available layers in the supplied `data_source`, displaying
the fields for the first `num_features` features.
"""
# Checking the parameters.
if isinstance(data_source, str):
data_source = DataSource(data_source)
elif isinstance(data_source, DataSource):
pass
else:
raise Exception('Data source parameter must be a string or a DataSource object.')
for i, layer in enumerate(data_source):
print("data source : %s" % data_source.name)
print("==== layer %s" % i)
print(" shape type: %s" % GEO_CLASSES[layer.geom_type.num].__name__)
print(" # features: %s" % len(layer))
print(" srs: %s" % layer.srs)
extent_tup = layer.extent.tuple
print(" extent: %s - %s" % (extent_tup[0:2], extent_tup[2:4]))
print("Displaying the first %s features ====" % num_features)
width = max(*map(len, layer.fields))
fmt = " %%%ss: %%s" % width
for j, feature in enumerate(layer[:num_features]):
print("=== Feature %s" % j)
for fld_name in layer.fields:
type_name = feature[fld_name].type_name
output = fmt % (fld_name, type_name)
val = feature.get(fld_name)
if val:
if isinstance(val, str):
val_fmt = ' ("%s")'
else:
val_fmt = ' (%s)'
output += val_fmt % val
else:
output += ' (None)'
print(output)
# For backwards compatibility.
sample = ogrinfo
| bsd-3-clause |
Llona/AJ-sub | main.py | 1 | 28565 | #!/usr/bin/env python3
"""
Ver 1.0 - First version
Ver 2.0 - Uses database for match SUB file name and read Sublist.ini for match Sub string
Ver 3.0 - Uses GUI for parameter input
Ver 4.0 - Re-develop this application by Python3
Ver 4.1 - Add GUI
Ver 4.2 - Add convert Sub file from simple chinese to TW traditional chinese function
Ver 4.2.1 - Add backup original sub file function
Ver 4.2.2 - Add About content, modify message box type for different message type
Ver 4.2.3 - Modify About content
Ver 4.3.0 -
1. Add log widge and print log into log widge
2. Add convert clipboard function
3. Modify main process for error handling
4. Add R/W setting file error handling
Ver 4.4.0 - Add rename sub file to match video file name
Ver 4.5.0 -
1. Add rename and mapping function
2. Change all text and notify message
3. Change icon
Ver 4.5.1 -
1. Add S2TW, S2T, T2S convert function
2. Add big5 type for convert function
3. Add help button in rename function
4. Add BIG5 format supported
5. Modify ST dictionary
Ver 4.5.2 - Modify ST dictionary
Ver 4.5.3 - Modify ST dictionary
Ver 4.5.4 - Fix some folder name can't access issue
Ver 4.5.5 - Add convert all sub folder function
Ver 4.5.6 -
1. Fix show error popup when file type didn't find in sub folder
2. Fix didn't convert root folder when use sub folder convert function issue
"""
from tkinter import *
from tkinter.ttk import *
from tkinter.font import Font
import tkinter.messagebox
import re
import configparser
import os
import shutil
# from tkinter.scrolledtext import ScrolledText
# from time import sleep
# from tkinter.commondialog import Dialog
from enum import Enum
import replace_sub
import langconver
import ajrename
title = "AJSub - 強力轉換! 轉碼君"
version = "v4.05.6"
sub_database_name = "SubList.sdb"
sub_setting_name = "Settings.ini"
backup_folder_name = "backfile"
subpath = "" # SUB file path, read from Settings.ini
subfiletype_list = "" # SUB file type, read from Settings.ini, ex: *.ssa, *.ass
progress_txt = "強力轉換中..."
progress_idle_txt = ""
progress_done_txt = "轉換完成!!"
help_text = \
"AJSub "+version+"\n\n"\
"本軟體會自動將指定目錄下的所有指定檔案簡轉繁或繁轉簡\n" \
"建議使用簡體轉繁體+台灣慣用語\n\n"\
"轉換完成後, AJSub會將檔案內容的字型設定部份轉為簡體\n" \
"這樣使用某些字型時系統才會認得 (例如方正系列的字型)\n\n"\
"UTF-8與UTF-16檔會照原格式儲存, 其餘會自動轉UTF-8格式\n"\
"原始檔案備份在"+backup_folder_name+"目錄下\n\n"\
"使用說明:\n"\
" 1. 將檔案路徑輸入SUB type欄位\n"\
" 2. 輸入檔案類型並用逗點隔開, 如*.ass, *.ssa\n"\
" 3. 按下轉碼按鈕~ enjoy it!!!!\n"\
" 4. 字型設定若需新增或修改, 請直接修改SubList.sdb\n"\
" 5. 按下剪貼簿轉碼按鈕, 可直接轉換剪貼簿的內容\n"\
" 6. 啟動~ 檔名君按鈕可開啟AJRen改名程式\n\n" \
"轉碼功能使用Yichen (Eugene) (https://github.com/yichen0831/opencc-python) 提供的OpenCC python版本\n\n" \
"AJSub由[Llona]設計維護, 問題回報與下載頁面: https://llona.github.io/AJ-sub/ \n\n"\
"=====\n"\
"AJSub "+version+"\n"\
"Copyright 2016\n\n"\
"This product includes OpenCC-python, develop by:\n"\
"[Yichen (Eugene)](https://github.com/yichen0831/opencc-python).\n\n" \
"AJSub is implement by [Llona], \n" \
"Bug report and download page: https://llona.github.io/AJ-sub/"
class error_Type(Enum):
NORMAL = 'NORMAL' # define normal state
FILE_ERROR = 'FILE_RW_ERROR' # define file o/r/w error type
class replace_Sub_Gui(Frame):
def __init__(self, master=None, subfilepath_ini=None, subfiletype_ini=None, help_text=None):
Frame.__init__(self, master)
self.master = master
self.subfiletype_list_ini = subfiletype_ini
self.subpath_ini = subfilepath_ini
self.help_text = help_text
self.user_input_path = ""
self.user_input_type = ""
self.app_current_path_lv = os.getcwd()
# self.checkbutton_select = IntVar()
# self.grid()
# # -----Define all GUI item-----
# self.sub_path_label = Label(self)
# self.sub_path_entry = Entry(self)
# self.sub_type_label = Label(self)
# self.sub_type_entry = Entry(self)
# self.rename_button = Button(self)
# self.start_button = Button(self)
# self.help_button = Button(self)
# self.clip_button = Button(self)
# self.empty_label = Label(self)
# self.version_label = Label(self)
# self.version_state = Label(self)
# # self.hide_log_button = Button(self)
# self.shlog_chbutton = Checkbutton(self)
# # self.log_txt = ScrolledText(self, wrap='none', state="disabled")
# # self.ren_frame_oriview_txt = Text(self, wrap='none', state="disabled")
# self.vert_scrollbar = Scrollbar(self, orient=VERTICAL)
# self.hor_scrollbar = Scrollbar(self, orient='horizontal')
# self.log_txt = Text(self, wrap='none', state="disabled",
# yscrollcommand=self.vert_scrollbar.set, xscrollcommand=self.hor_scrollbar.set)
# -----Set Text log fone color-----
root.bind('<Key-Return>', self.press_key_enter)
self.create_widgets()
def create_widgets(self):
self.top = self.winfo_toplevel()
self.style = Style()
self.style.configure('Tlog_frame.TLabelframe', font=('iLiHei', 10))
self.style.configure('Tlog_frame.TLabelframe.Label', font=('iLiHei', 10))
self.log_frame = LabelFrame(self.top, text='LOG', style='Tlog_frame.TLabelframe')
self.log_frame.place(relx=0.01, rely=0.283, relwidth=0.973, relheight=0.708)
self.style.configure('Tuser_input_frame.TLabelframe', font=('iLiHei', 10))
self.style.configure('Tuser_input_frame.TLabelframe.Label', font=('iLiHei', 10))
self.user_input_frame = LabelFrame(self.top, text='輸入', style='Tuser_input_frame.TLabelframe')
self.user_input_frame.place(relx=0.01, rely=0.011, relwidth=0.973, relheight=0.262)
self.VScroll1 = Scrollbar(self.log_frame, orient='vertical')
self.VScroll1.place(relx=0.967, rely=0.010, relwidth=0.022, relheight=0.936)
self.HScroll1 = Scrollbar(self.log_frame, orient='horizontal')
self.HScroll1.place(relx=0.01, rely=0.940, relwidth=0.958, relheight=0.055)
self.log_txtFont = Font(font=('iLiHei', 10))
self.log_txt = Text(self.log_frame, wrap='none', xscrollcommand=self.HScroll1.set, yscrollcommand=self.VScroll1.set, font=self.log_txtFont)
self.log_txt.place(relx=0.01, rely=0.010, relwidth=0.958, relheight=0.936)
# self.log_txt.insert('1.0', '')
self.HScroll1['command'] = self.log_txt.xview
self.VScroll1['command'] = self.log_txt.yview
self.style.configure('Tclip_button.TButton', font=('iLiHei', 9))
self.clip_button = Button(self.user_input_frame, text='剪貼簿轉碼', command=self.convert_clipboard, style='Tclip_button.TButton')
self.clip_button.place(relx=0.832, rely=0.497, relwidth=0.137, relheight=0.220)
self.style.configure('Thelp_button.TButton', font=('iLiHei', 9))
self.help_button = Button(self.user_input_frame, text='Help', command=self.print_about, style='Thelp_button.TButton')
self.help_button.place(relx=0.380, rely=0.788, relwidth=0.105, relheight=0.200)
self.style.configure('Tstart_button.TButton', font=('iLiHei', 9))
self.start_button = Button(self.user_input_frame, text='轉碼', command=self.replace_all_sub_in_path, style='Tstart_button.TButton')
self.start_button.place(relx=0.220, rely=0.788, relwidth=0.105, relheight=0.200)
self.style.configure('Trename_button.TButton', font=('iLiHei', 9))
self.rename_button = Button(self.user_input_frame, text='啟動~ 檔名君', command=self.show_rename_frame, style='Trename_button.TButton')
self.rename_button.place(relx=0.832, rely=0.166, relwidth=0.137, relheight=0.200)
self.sub_path_entryVar = StringVar(value=self.subpath_ini)
self.sub_path_entry = Entry(self.user_input_frame, textvariable=self.sub_path_entryVar, font=('iLiHei', 10))
self.sub_path_entry.place(relx=0.01, rely=0.180, relwidth=0.80, relheight=0.180)
self.sub_type_entryVar = StringVar(value=self.subfiletype_list_ini)
self.sub_type_entry = Entry(self.user_input_frame, textvariable=self.sub_type_entryVar, font=('iLiHei', 10))
self.sub_type_entry.place(relx=0.01, rely=0.520, relwidth=0.80, relheight=0.190)
self.style.configure('Tversion_label.TLabel', anchor='e', font=('iLiHei', 9))
self.version_label = Label(self.user_input_frame, text=version, state='disable', style='Tversion_label.TLabel')
self.version_label.place(relx=0.843, rely=0.87, relwidth=0.147, relheight=0.13)
self.style.configure('Tversion_state.TLabel', anchor='w', font=('iLiHei', 9))
self.version_state = Label(self.user_input_frame, text=progress_idle_txt, style='Tversion_state.TLabel')
self.version_state.place(relx=0.01, rely=0.87, relwidth=0.116, relheight=0.13)
self.style.configure('Tsub_type_label.TLabel', anchor='w', font=('iLiHei', 10))
self.sub_type_label = Label(self.user_input_frame, text='轉換檔案類型', style='Tsub_type_label.TLabel')
self.sub_type_label.place(relx=0.01, rely=0.380, relwidth=0.200, relheight=0.13)
self.style.configure('Tsub_path_label.TLabel', anchor='w', font=('iLiHei', 10))
self.sub_path_label = Label(self.user_input_frame, text='轉換檔案路徑', style='Tsub_path_label.TLabel')
self.sub_path_label.place(relx=0.01, rely=0.010, relwidth=0.200, relheight=0.166)
self.ComboVar = StringVar()
self.Combo = Combobox(self.user_input_frame, text='S2TW', state='readonly', textvariable=self.ComboVar,
font=('iLiHei', 9))
self.Combo['values'] = ('簡轉繁+台灣慣用語', '簡轉繁', '繁轉簡')
self.Combo.current(0)
self.Combo.place(relx=0.520, rely=0.800, relwidth=0.190)
# self.Combo.bind('<<ComboboxSelected>>', self.get_user_conv_type)
self.sub_folder_chbuttonVar = IntVar(value=0)
self.style.configure('Tlucky_sort_chbutton.TCheckbutton', font=('iLiHei', 9))
self.sub_folder_chbutton = Checkbutton(self.user_input_frame, text='包含子目錄', variable=self.sub_folder_chbuttonVar, style='Tlucky_sort_chbutton.TCheckbutton')
self.sub_folder_chbutton.place(relx=0.750, rely=0.815, relwidth=0.160)
# self.convert_clipboard
# self.print_about
# self.replace_all_sub_in_path
# self.show_rename_frame
# -----Scrollbar for log text wiege-----
# self.hor_scrollbar.config(command=self.log_txt.xview)
# self.vert_scrollbar.config(command=self.log_txt.yview)
# self.vert_scrollbar.grid(row=5, column=7, columnspan=8, sticky='NS')
# self.hor_scrollbar.grid(row=6, column=0, columnspan=8, sticky='EW')
# -----Button Hide log-----
# self.hide_log_button["text"] = "Hide Log"
# self.hide_log_button["command"] = self.hide_log_widge
# self.hide_log_button.grid(row=7, column=0)
# -----Checkbutton show/hide log-----
# self.shlog_chbutton.config(variable=self.checkbutton_select, text='Show log', command=self.hide_log_widge)
# self.shlog_chbutton.grid(row=4, column=0, columnspan=1, sticky='SNWE')
self.log_txt.tag_config("error", foreground="#CC0000")
self.log_txt.tag_config("info", foreground="#008800")
self.log_txt.tag_config("info2", foreground="#404040")
self.update_idletasks()
def get_user_conv_type(self, event=None):
conv_type_ls = self.Combo.current()
# print(conv_type_ls)
if conv_type_ls == 0:
return 's2tw'
elif conv_type_ls == 1:
return 's2t'
elif conv_type_ls == 2:
return 't2s'
else:
print('Error! combobox input is error:%s ' % conv_type_ls)
def show_rename_frame(self):
ajrename.rename_frame(self, self.sub_path_entry.get(), self.sub_type_entry.get(), sub_setting_name)
# def hide_log_widge(self):
# print(self.shlog_chbuttonVar.get())
# if not self.shlog_chbuttonVar.get():
# # self.log_frame.place_forget()
# # self.log_frame.grid_remove()
# # self.log_txt.grid_remove()
# # self.vert_scrollbar.grid_remove()
# # self.hor_scrollbar.grid_remove()
# # # self.hide_log_button.grid_remove()
# # self.version_state["text"] = progress_idle_txt
# self.update_idletasks()
# else:
# # -----Show log widge-----
# if not self.log_txt.grid_info():
# self.log_frame.place(relx=0.01, rely=0.287, relwidth=0.981, relheight=0.705)
# # self.log_txt.grid()
# # self.vert_scrollbar.grid()
# # self.hor_scrollbar.grid()
# # self.hide_log_button.grid()
def press_key_enter(self, event=None):
self.replace_all_sub_in_path()
def convert_clipboard(self):
# -----Clear text widge for log-----
# self.log_txt.config(state="normal")
# self.log_txt.delete('1.0', END)
# self.log_txt.config(state="disable")
clip_content_lv = self.clipboard_get()
self.clipboard_clear()
conv_ls = self.get_user_conv_type()
# clip_content_lv = langconver.convert_lang_select(clip_content_lv, 's2t')
# print(conv_ls)
clip_content_lv = langconver.convert_lang_select(clip_content_lv, conv_ls)
self.clipboard_append(clip_content_lv)
self.setlog("剪貼簿轉換完成!", 'info')
def print_about(self):
tkinter.messagebox.showinfo("About", self.help_text)
# def create_popup(self):
# pass
# # self.top_window = Toplevel()
# # self.top_window.overrideredirect(1)
# # msg = Label(self.top_window, text="轉換工作進行中...")
# # root.update_idletasks()
# # msg.pack(side=TOP, anchor=W, fill=X, expand=YES)
# # self.top_window['takefocus'] = True
# # self.top_window.grab_set()
# # self.top_window.focus_force()
# # msg.focus()
# # msg.grab_set()
# # root.update_idletasks()
#
# def close_popup(self):
# # self.top_window.destroy()
# pass
def setlog(self, string, level=None):
self.log_txt.config(state="normal")
if (level != 'error') and (level != 'info') and (level != 'info2'):
level = ""
self.log_txt.insert(INSERT, "%s\n" % string, level)
# -----scroll to end of text widge-----
self.log_txt.see(END)
self.update_idletasks()
self.log_txt.config(state="disabled")
def setlog_large(self, string, level=None):
self.log_txt.insert(INSERT, "%s\n" % string, level)
# -----scroll to end of text widge-----
self.log_txt.see(END)
self.update_idletasks()
def read_config(self, filename, section, key):
try:
config_lh = configparser.ConfigParser()
file_ini_lh = open(filename, 'r', encoding='utf16')
config_lh.read_file(file_ini_lh)
file_ini_lh.close()
return config_lh.get(section, key)
except:
self.setlog("Error! 讀取ini設定檔發生錯誤! "
"請在AJSub目錄下使用UTF-16格式建立 " + filename, 'error')
return error_Type.FILE_ERROR.value
def write_config(self, filename, sections, key, value):
try:
config_lh = configparser.ConfigParser()
file_ini_lh = open(filename, 'r', encoding='utf16')
config_lh.read_file(file_ini_lh)
file_ini_lh.close()
file_ini_lh = open(filename, 'w', encoding='utf16')
config_lh.set(sections, key, value)
config_lh.write(file_ini_lh)
file_ini_lh.close()
except Exception as ex:
self.setlog("Error! 寫入ini設定檔發生錯誤! "
"請在AJSub目錄下使用UTF-16格式建立 " +filename, 'error')
return error_Type.FILE_ERROR.value
def store_origin_file_to_backup_folder(self, file, back_folder):
shutil.copy2(file, back_folder)
def conv_and_replace_sub_write_file(self, subfile_list, subdata_dic):
status_lv = True
self.log_txt.config(state="normal")
subfile_list_lt = tuple(subfile_list)
for i in subfile_list_lt:
# -----Test sub file format-----
try:
subcontent_h = open(i, 'r+', encoding='utf8')
sub_content_lv = subcontent_h.read()
except:
try:
subcontent_h.close()
subcontent_h = open(i, 'r+', encoding='utf16')
sub_content_lv = subcontent_h.read()
except:
try:
subcontent_h.close()
subcontent_h = open(i, 'r', encoding='gbk')
sub_content_lv = subcontent_h.read()
except:
try:
subcontent_h.close()
subcontent_h = open(i, 'r', encoding='gb2312')
sub_content_lv = subcontent_h.read()
except:
try:
subcontent_h.close()
subcontent_h = open(i, 'r', encoding='big5')
sub_content_lv = subcontent_h.read()
except:
status_lv = False
self.setlog("Error! 無法開啟或寫入檔案, 請確認檔案非唯讀: %s " % i, 'error')
continue
# -----For GBK and GB2312 format-----
subcontent_h.close()
# -----backup origin sub file to backup folder-----
self.store_origin_file_to_backup_folder(i, self.user_input_path+'\\'+backup_folder_name)
sub_content_temp_lv = sub_content_lv
# -----convert-----
self.setlog_large("轉碼中: %s" % i)
# tw_str_lv = langconver.s2tw(sub_content_lv)
conv_ls = self.get_user_conv_type()
tw_str_lv = langconver.convert_lang_select(sub_content_lv, conv_ls)
self.setlog_large("替換字串: %s" % i, 'info2')
tw_str_lv = replace_sub.replace_specif_string(tw_str_lv, subdata_dic)
if sub_content_temp_lv != tw_str_lv:
subcontent_write_h = open(i, 'w', encoding='utf8')
subcontent_write_h.seek(0, 0)
subcontent_write_h.write(tw_str_lv)
subcontent_write_h.close()
continue
# -----backup origin sub file to backup folder-----
self.store_origin_file_to_backup_folder(i, '%s\\%s' % (self.user_input_path, backup_folder_name))
# -----for utf8 and utf16 format-----
sub_content_temp_lv = sub_content_lv
# -----convert-----
self.setlog_large("轉碼中: %s" % i)
# tw_str_lv = langconver.s2tw(sub_content_lv)
conv_ls = self.get_user_conv_type()
tw_str_lv = langconver.convert_lang_select(sub_content_lv, conv_ls)
self.setlog_large("替換字串: %s" % i, 'info2')
tw_str_lv = replace_sub.replace_specif_string(tw_str_lv, subdata_dic)
# -----if sub file content is changed, write to origin file-----
if sub_content_temp_lv != tw_str_lv:
subcontent_h.seek(0, 0)
subcontent_h.write(tw_str_lv)
subcontent_h.close()
self.log_txt.config(state="disable")
return status_lv
def replace_all_sub_in_path(self):
w_file_stat_lv = error_Type.NORMAL.value
# -----Clear text widge for log-----
self.log_txt.config(state="normal")
self.log_txt.delete('1.0', END)
self.log_txt.config(state="disable")
# -----Get user input path-----
self.user_input_path = self.sub_path_entry.get()
# -----Get user input file types and Split type string then store to list-----
self.user_input_type = self.sub_type_entry.get()
# -----Check user input in GUI-----
if self.user_input_path == "" or self.user_input_type == "":
tkinter.messagebox.showinfo("message", "請輸入路徑和類型")
return
if not os.path.exists(self.user_input_path):
tkinter.messagebox.showerror("Error", "路徑錯誤")
return
# -----get config ini file setting-----
self.subpath_ini = self.read_config(sub_setting_name, 'Global', 'subpath')
self.subfiletype_list_ini = self.read_config(sub_setting_name, 'Global', 'subtype')
if self.subpath_ini == error_Type.FILE_ERROR.value or self.subfiletype_list_ini == error_Type.FILE_ERROR.value:
tkinter.messagebox.showerror("Error",
"錯誤! 讀取ini設定檔發生錯誤! "
"請在AJSub目錄下使用UTF-16格式建立 " + sub_setting_name)
return
# -----remove '\' or '/' in end of path string-----
self.user_input_path = re.sub(r"/$", '', self.user_input_path)
self.user_input_path = re.sub(r"\\$", "", self.user_input_path)
# -----Store user input path and type into Setting.ini config file-----
if not self.user_input_path == self.subpath_ini:
self.setlog("新的路徑設定寫入設定檔: " + sub_setting_name, "info")
# print("path not match, write new path to ini")
w_file_stat_lv = self.write_config(sub_setting_name, 'Global', 'subpath', self.user_input_path)
if not self.user_input_type == self.subfiletype_list_ini:
self.setlog("新的檔案類型設定寫入設定檔: " + sub_setting_name, "info")
# print("type not match, write new type list to ini")
w_file_stat_lv = self.write_config(sub_setting_name, 'Global', 'subtype', self.user_input_type)
if w_file_stat_lv == error_Type.FILE_ERROR.value:
tkinter.messagebox.showerror("Error",
"錯誤! 寫入ini設定檔發生錯誤! "
"請在AJSub目錄下使用UTF-16格式建立 " + sub_setting_name)
return
# ----Split file type string and store to list-----
re_lv = re.sub(r' ', '', self.user_input_type)
self.user_input_type = re_lv.split(",")
# -----remove duplicate item-----
self.user_input_type = set(self.user_input_type)
# print(self.user_input_type)
# ---only convert a specific folder---
status = True
if self.sub_folder_chbuttonVar.get() == 0:
status = self.start_conversion()
else:
# ---convert all sub folder---
ori_user_input_path = self.user_input_path
sub_folder_lists = replace_sub.get_all_sub_folder_name(self.user_input_path)
for sub_folder in sub_folder_lists:
# skip backup folder
if sub_folder.find(backup_folder_name) == -1:
self.setlog("轉換子目錄: %s" % sub_folder, "info")
self.user_input_path = sub_folder
status_tmp = self.start_conversion()
if not status_tmp:
status = status_tmp
self.user_input_path = ori_user_input_path
self.show_done_popup(status)
def start_conversion(self):
# -----Get sub file list by type-----
sub_file_list = replace_sub.get_file_list(self.user_input_path, self.user_input_type)
if not sub_file_list:
# convert file list is empty
if self.sub_folder_chbuttonVar.get() == 0:
tkinter.messagebox.showwarning("Error", "錯誤! 在指定的目錄中找不到檔案! 請確認檔案路徑與類型")
return False
else:
self.setlog("此目錄中無指定檔案類型: %s" % self.user_input_path, "error")
return True
# print(sub_file_list)
# -----Dim button for string converting-----
self.version_state["text"] = progress_txt
# self.version_state["fg"] = "blue"
self.start_button["state"] = 'disable'
# self.help_button["state"] = 'disable'
self.clip_button["state"] = 'disable'
self.update_idletasks()
# -----make backup folder for store origin sub files-----
if not os.path.exists(self.user_input_path+'\\'+backup_folder_name):
os.makedirs(self.user_input_path+'\\'+backup_folder_name)
# -----Replace all file list string by dic structure-----
status = self.conv_and_replace_sub_write_file(sub_file_list, sub_data_dic)
# -----Set button and progressing state to normal-----
self.version_state["text"] = progress_done_txt
# self.version_state["fg"] = "blue"
self.start_button["state"] = 'normal'
# self.help_button["state"] = 'normal'
self.clip_button["state"] = 'normal'
self.update_idletasks()
return status
def show_done_popup(self, status):
if status:
self.setlog("***順利完成! 轉碼與取代字串成功***", "info")
tkinter.messagebox.showinfo("message", "轉碼與取代字串成功")
else:
self.setlog("***錯誤! 轉碼與取代字串發生錯誤, 請參考log視窗***", "error")
tkinter.messagebox.showerror("Error", "轉碼與取代字串發生錯誤, 請參考log視窗")
def check_all_file_status():
if not os.path.exists(sub_database_name):
return False
if not os.path.exists(sub_setting_name):
return False
if not os.path.exists('icons\\main.ico'):
return False
return True
if __name__ == '__main__':
# -----MessageBox will create tkinter, so create correct setting tkinter first
root = Tk()
root.title(title)
root.iconbitmap('icons\\main.ico')
sub_setting_name = "%s\\%s" % (os.getcwd(), sub_setting_name)
sub_database_name = "%s\\%s" % (os.getcwd(), sub_database_name)
if not check_all_file_status():
tkinter.messagebox.showerror("Error", "遺失必要檔案! \n\n請確認AJSub目錄有以下檔案存在, 或 "
"重新安裝AJSub:\n"
"1. " + sub_setting_name + "\n"
"2. " + sub_database_name + "\n"
"3. icons\\main.ico")
sys.exit(0)
try:
# -----Get setting from Settings.ini-----
file_ini_h = open(sub_setting_name, encoding='utf16')
config_h = configparser.ConfigParser()
config_h.read_file(file_ini_h)
file_ini_h.close()
subpath = config_h.get('Global', 'subpath')
subfiletype_list = config_h.get('Global', 'subtype')
config_h.clear()
except:
tkinter.messagebox.showerror("Error",
"讀取設定檔 " + sub_setting_name + " 或 " + sub_database_name + " 錯誤!\n"
"請確認檔案格式為UTF-16 (unicode format) 或重新安裝AJSub")
sys.exit(0)
# -----Get database list to dic structure-----
sub_data_dic = replace_sub.get_database_list(sub_database_name)
# -----Start GUI class-----
root.geometry('880x670')
app = replace_Sub_Gui(master=root, subfilepath_ini=subpath,
subfiletype_ini=subfiletype_list, help_text=help_text)
# -----Start main loop-----
app.mainloop()
| apache-2.0 |
dan1/horizon-x509 | openstack_dashboard/dashboards/project/overview/views.py | 15 | 2354 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.template.defaultfilters import capfirst # noqa
from django.template.defaultfilters import floatformat # noqa
from django.utils.translation import ugettext_lazy as _
from horizon.utils import csvbase
from horizon import views
from openstack_dashboard import usage
from openstack_dashboard.dashboards.project.instances \
import tables as project_tables
from openstack_dashboard.utils import filters
class ProjectUsageCsvRenderer(csvbase.BaseCsvResponse):
columns = [_("Instance Name"), _("VCPUs"), _("RAM (MB)"),
_("Disk (GB)"), _("Usage (Hours)"),
_("Time since created (Seconds)"), _("State")]
def get_row_data(self):
choices = project_tables.STATUS_DISPLAY_CHOICES
for inst in self.context['usage'].get_instances():
state_label = (
filters.get_display_label(choices, inst['state']))
yield (inst['name'],
inst['vcpus'],
inst['memory_mb'],
inst['local_gb'],
floatformat(inst['hours'], 2),
inst['uptime'],
capfirst(state_label))
class ProjectOverview(usage.UsageView):
table_class = usage.ProjectUsageTable
usage_class = usage.ProjectUsage
template_name = 'project/overview/usage.html'
csv_response_class = ProjectUsageCsvRenderer
def get_data(self):
super(ProjectOverview, self).get_data()
return self.usage.get_instances()
class WarningView(views.HorizonTemplateView):
template_name = "project/_warning.html"
| apache-2.0 |
jamesmcm/luigi | test/contrib/hdfs/webhdfs_client_test.py | 3 | 1334 | # -*- coding: utf-8 -*-
#
# Copyright 2015 VNG Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
import pytest
from helpers import with_config
from luigi.contrib.hdfs import WebHdfsClient
InsecureClient = pytest.importorskip('hdfs.InsecureClient')
KerberosClient = pytest.importorskip('hdfs.ext.kerberos.KerberosClient')
@pytest.mark.apache
class TestWebHdfsClient(unittest.TestCase):
@with_config({'webhdfs': {'client_type': 'insecure'}})
def test_insecure_client_type(self):
client = WebHdfsClient(host='localhost').client
self.assertIsInstance(client, InsecureClient)
@with_config({'webhdfs': {'client_type': 'kerberos'}})
def test_kerberos_client_type(self):
client = WebHdfsClient(host='localhost').client
self.assertIsInstance(client, KerberosClient)
| apache-2.0 |
blaze/distributed | distributed/profile.py | 1 | 13306 | """ This module contains utility functions to construct and manipulate counting
data structures for frames.
When performing statistical profiling we obtain many call stacks. We aggregate
these call stacks into data structures that maintain counts of how many times
each function in that call stack has been called. Because these stacks will
overlap this aggregation counting structure forms a tree, such as is commonly
visualized by profiling tools.
We represent this tree as a nested dictionary with the following form:
{
'identifier': 'root',
'description': 'A long description of the line of code being run.',
'count': 10 # the number of times we have seen this line
'children': { # callers of this line. Recursive dicts
'ident-b': {'description': ...
'identifier': 'ident-a',
'count': ...
'children': {...}},
'ident-b': {'description': ...
'identifier': 'ident-b',
'count': ...
'children': {...}}}
}
"""
import bisect
from collections import defaultdict, deque
import linecache
import sys
import threading
from time import sleep
import tlz as toolz
from .metrics import time
from .utils import format_time, color_of, parse_timedelta
def identifier(frame):
""" A string identifier from a frame
Strings are cheaper to use as indexes into dicts than tuples or dicts
"""
if frame is None:
return "None"
else:
return ";".join(
(
frame.f_code.co_name,
frame.f_code.co_filename,
str(frame.f_code.co_firstlineno),
)
)
def repr_frame(frame):
""" Render a frame as a line for inclusion into a text traceback """
co = frame.f_code
text = ' File "%s", line %s, in %s' % (co.co_filename, frame.f_lineno, co.co_name)
line = linecache.getline(co.co_filename, frame.f_lineno, frame.f_globals).lstrip()
return text + "\n\t" + line
def info_frame(frame):
co = frame.f_code
line = linecache.getline(co.co_filename, frame.f_lineno, frame.f_globals).lstrip()
return {
"filename": co.co_filename,
"name": co.co_name,
"line_number": frame.f_lineno,
"line": line,
}
def process(frame, child, state, stop=None, omit=None):
""" Add counts from a frame stack onto existing state
This recursively adds counts to the existing state dictionary and creates
new entries for new functions.
Examples
--------
>>> import sys, threading
>>> ident = threading.get_ident() # replace with your thread of interest
>>> frame = sys._current_frames()[ident]
>>> state = {'children': {}, 'count': 0, 'description': 'root',
... 'identifier': 'root'}
>>> process(frame, None, state)
>>> state
{'count': 1,
'identifier': 'root',
'description': 'root',
'children': {'...'}}
"""
if omit is not None and any(frame.f_code.co_filename.endswith(o) for o in omit):
return False
prev = frame.f_back
if prev is not None and (
stop is None or not prev.f_code.co_filename.endswith(stop)
):
state = process(prev, frame, state, stop=stop)
if state is False:
return False
ident = identifier(frame)
try:
d = state["children"][ident]
except KeyError:
d = {
"count": 0,
"description": info_frame(frame),
"children": {},
"identifier": ident,
}
state["children"][ident] = d
state["count"] += 1
if child is not None:
return d
else:
d["count"] += 1
def merge(*args):
""" Merge multiple frame states together """
if not args:
return create()
s = {arg["identifier"] for arg in args}
if len(s) != 1:
raise ValueError("Expected identifiers, got %s" % str(s))
children = defaultdict(list)
for arg in args:
for child in arg["children"]:
children[child].append(arg["children"][child])
children = {k: merge(*v) for k, v in children.items()}
count = sum(arg["count"] for arg in args)
return {
"description": args[0]["description"],
"children": dict(children),
"count": count,
"identifier": args[0]["identifier"],
}
def create():
return {
"count": 0,
"children": {},
"identifier": "root",
"description": {"filename": "", "name": "", "line_number": 0, "line": ""},
}
def call_stack(frame):
""" Create a call text stack from a frame
Returns
-------
list of strings
"""
L = []
while frame:
L.append(repr_frame(frame))
frame = frame.f_back
return L[::-1]
def plot_data(state, profile_interval=0.010):
""" Convert a profile state into data useful by Bokeh
See Also
--------
plot_figure
distributed.bokeh.components.ProfilePlot
"""
starts = []
stops = []
heights = []
widths = []
colors = []
states = []
times = []
filenames = []
lines = []
line_numbers = []
names = []
def traverse(state, start, stop, height):
if not state["count"]:
return
starts.append(start)
stops.append(stop)
heights.append(height)
width = stop - start
widths.append(width)
states.append(state)
times.append(format_time(state["count"] * profile_interval))
desc = state["description"]
filenames.append(desc["filename"])
lines.append(desc["line"])
line_numbers.append(desc["line_number"])
names.append(desc["name"])
ident = state["identifier"]
try:
fn = desc["filename"]
except IndexError:
colors.append("gray")
else:
if fn == "<low-level>":
colors.append("lightgray")
else:
colors.append(color_of(fn))
delta = (stop - start) / state["count"]
x = start
for name, child in state["children"].items():
width = child["count"] * delta
traverse(child, x, x + width, height + 1)
x += width
traverse(state, 0, 1, 0)
percentages = ["{:.1f}%".format(100 * w) for w in widths]
return {
"left": starts,
"right": stops,
"bottom": heights,
"width": widths,
"top": [x + 1 for x in heights],
"color": colors,
"states": states,
"filename": filenames,
"line": lines,
"line_number": line_numbers,
"name": names,
"time": times,
"percentage": percentages,
}
def _watch(thread_id, log, interval="20ms", cycle="2s", omit=None, stop=lambda: False):
interval = parse_timedelta(interval)
cycle = parse_timedelta(cycle)
recent = create()
last = time()
while not stop():
if time() > last + cycle:
log.append((time(), recent))
recent = create()
last = time()
try:
frame = sys._current_frames()[thread_id]
except KeyError:
return
process(frame, None, recent, omit=omit)
sleep(interval)
def watch(
thread_id=None,
interval="20ms",
cycle="2s",
maxlen=1000,
omit=None,
stop=lambda: False,
):
""" Gather profile information on a particular thread
This starts a new thread to watch a particular thread and returns a deque
that holds periodic profile information.
Parameters
----------
thread_id: int
interval: str
Time per sample
cycle: str
Time per refreshing to a new profile state
maxlen: int
Passed onto deque, maximum number of periods
omit: str
Don't include entries that start with this filename
stop: callable
Function to call to see if we should stop
Returns
-------
deque
"""
if thread_id is None:
thread_id = threading.get_ident()
log = deque(maxlen=maxlen)
thread = threading.Thread(
target=_watch,
name="Profile",
kwargs={
"thread_id": thread_id,
"interval": interval,
"cycle": cycle,
"log": log,
"omit": omit,
"stop": stop,
},
)
thread.daemon = True
thread.start()
return log
def get_profile(history, recent=None, start=None, stop=None, key=None):
""" Collect profile information from a sequence of profile states
Parameters
----------
history: Sequence[Tuple[time, Dict]]
A list or deque of profile states
recent: dict
The most recent accumulating state
start: time
stop: time
"""
now = time()
if start is None:
istart = 0
else:
istart = bisect.bisect_left(history, (start,))
if stop is None:
istop = None
else:
istop = bisect.bisect_right(history, (stop,)) + 1
if istop >= len(history):
istop = None # include end
if istart == 0 and istop is None:
history = list(history)
else:
iistop = len(history) if istop is None else istop
history = [history[i] for i in range(istart, iistop)]
prof = merge(*toolz.pluck(1, history))
if not history:
return create()
if recent:
prof = merge(prof, recent)
return prof
def plot_figure(data, **kwargs):
""" Plot profile data using Bokeh
This takes the output from the function ``plot_data`` and produces a Bokeh
figure
See Also
--------
plot_data
"""
from bokeh.plotting import ColumnDataSource, figure
from bokeh.models import HoverTool
if "states" in data:
data = toolz.dissoc(data, "states")
source = ColumnDataSource(data=data)
fig = figure(tools="tap,box_zoom,xwheel_zoom,reset", **kwargs)
r = fig.quad(
"left",
"right",
"top",
"bottom",
color="color",
line_color="black",
line_width=2,
source=source,
)
r.selection_glyph = None
r.nonselection_glyph = None
hover = HoverTool(
point_policy="follow_mouse",
tooltips="""
<div>
<span style="font-size: 14px; font-weight: bold;">Name:</span>
<span style="font-size: 10px; font-family: Monaco, monospace;">@name</span>
</div>
<div>
<span style="font-size: 14px; font-weight: bold;">Filename:</span>
<span style="font-size: 10px; font-family: Monaco, monospace;">@filename</span>
</div>
<div>
<span style="font-size: 14px; font-weight: bold;">Line number:</span>
<span style="font-size: 10px; font-family: Monaco, monospace;">@line_number</span>
</div>
<div>
<span style="font-size: 14px; font-weight: bold;">Line:</span>
<span style="font-size: 10px; font-family: Monaco, monospace;">@line</span>
</div>
<div>
<span style="font-size: 14px; font-weight: bold;">Time:</span>
<span style="font-size: 10px; font-family: Monaco, monospace;">@time</span>
</div>
<div>
<span style="font-size: 14px; font-weight: bold;">Percentage:</span>
<span style="font-size: 10px; font-family: Monaco, monospace;">@percentage</span>
</div>
""",
)
fig.add_tools(hover)
fig.xaxis.visible = False
fig.yaxis.visible = False
fig.grid.visible = False
return fig, source
def _remove_py_stack(frames):
for entry in frames:
if entry.is_python:
break
yield entry
def llprocess(frames, child, state):
""" Add counts from low level profile information onto existing state
This uses the ``stacktrace`` module to collect low level stack trace
information and place it onto the given sttate.
It is configured with the ``distributed.worker.profile.low-level`` config
entry.
See Also
--------
process
ll_get_stack
"""
if not frames:
return
frame = frames.pop()
if frames:
state = llprocess(frames, frame, state)
addr = hex(frame.addr - frame.offset)
ident = ";".join(map(str, (frame.name, "<low-level>", addr)))
try:
d = state["children"][ident]
except KeyError:
d = {
"count": 0,
"description": {
"filename": "<low-level>",
"name": frame.name,
"line_number": 0,
"line": str(frame),
},
"children": {},
"identifier": ident,
}
state["children"][ident] = d
state["count"] += 1
if child is not None:
return d
else:
d["count"] += 1
def ll_get_stack(tid):
""" Collect low level stack information from thread id """
from stacktrace import get_thread_stack
frames = get_thread_stack(tid, show_python=False)
llframes = list(_remove_py_stack(frames))[::-1]
return llframes
| bsd-3-clause |
AkademieOlympia/sympy | sympy/physics/vector/tests/test_frame.py | 61 | 7994 | from sympy import sin, cos, pi, zeros, ImmutableMatrix as Matrix
from sympy.physics.vector import (ReferenceFrame, Vector, CoordinateSym,
dynamicsymbols, time_derivative, express)
Vector.simp = True
def test_coordinate_vars():
"""Tests the coordinate variables functionality"""
A = ReferenceFrame('A')
assert CoordinateSym('Ax', A, 0) == A[0]
assert CoordinateSym('Ax', A, 1) == A[1]
assert CoordinateSym('Ax', A, 2) == A[2]
q = dynamicsymbols('q')
qd = dynamicsymbols('q', 1)
assert isinstance(A[0], CoordinateSym) and \
isinstance(A[0], CoordinateSym) and \
isinstance(A[0], CoordinateSym)
assert A.variable_map(A) == {A[0]:A[0], A[1]:A[1], A[2]:A[2]}
assert A[0].frame == A
B = A.orientnew('B', 'Axis', [q, A.z])
assert B.variable_map(A) == {B[2]: A[2], B[1]: -A[0]*sin(q) + A[1]*cos(q),
B[0]: A[0]*cos(q) + A[1]*sin(q)}
assert A.variable_map(B) == {A[0]: B[0]*cos(q) - B[1]*sin(q),
A[1]: B[0]*sin(q) + B[1]*cos(q), A[2]: B[2]}
assert time_derivative(B[0], A) == -A[0]*sin(q)*qd + A[1]*cos(q)*qd
assert time_derivative(B[1], A) == -A[0]*cos(q)*qd - A[1]*sin(q)*qd
assert time_derivative(B[2], A) == 0
assert express(B[0], A, variables=True) == A[0]*cos(q) + A[1]*sin(q)
assert express(B[1], A, variables=True) == -A[0]*sin(q) + A[1]*cos(q)
assert express(B[2], A, variables=True) == A[2]
assert time_derivative(A[0]*A.x + A[1]*A.y + A[2]*A.z, B) == A[1]*qd*A.x - A[0]*qd*A.y
assert time_derivative(B[0]*B.x + B[1]*B.y + B[2]*B.z, A) == - B[1]*qd*B.x + B[0]*qd*B.y
assert express(B[0]*B[1]*B[2], A, variables=True) == \
A[2]*(-A[0]*sin(q) + A[1]*cos(q))*(A[0]*cos(q) + A[1]*sin(q))
assert (time_derivative(B[0]*B[1]*B[2], A) -
(A[2]*(-A[0]**2*cos(2*q) -
2*A[0]*A[1]*sin(2*q) +
A[1]**2*cos(2*q))*qd)).trigsimp() == 0
assert express(B[0]*B.x + B[1]*B.y + B[2]*B.z, A) == \
(B[0]*cos(q) - B[1]*sin(q))*A.x + (B[0]*sin(q) + \
B[1]*cos(q))*A.y + B[2]*A.z
assert express(B[0]*B.x + B[1]*B.y + B[2]*B.z, A, variables=True) == \
A[0]*A.x + A[1]*A.y + A[2]*A.z
assert express(A[0]*A.x + A[1]*A.y + A[2]*A.z, B) == \
(A[0]*cos(q) + A[1]*sin(q))*B.x + \
(-A[0]*sin(q) + A[1]*cos(q))*B.y + A[2]*B.z
assert express(A[0]*A.x + A[1]*A.y + A[2]*A.z, B, variables=True) == \
B[0]*B.x + B[1]*B.y + B[2]*B.z
N = B.orientnew('N', 'Axis', [-q, B.z])
assert N.variable_map(A) == {N[0]: A[0], N[2]: A[2], N[1]: A[1]}
C = A.orientnew('C', 'Axis', [q, A.x + A.y + A.z])
mapping = A.variable_map(C)
assert mapping[A[0]] == 2*C[0]*cos(q)/3 + C[0]/3 - 2*C[1]*sin(q + pi/6)/3 +\
C[1]/3 - 2*C[2]*cos(q + pi/3)/3 + C[2]/3
assert mapping[A[1]] == -2*C[0]*cos(q + pi/3)/3 + \
C[0]/3 + 2*C[1]*cos(q)/3 + C[1]/3 - 2*C[2]*sin(q + pi/6)/3 + C[2]/3
assert mapping[A[2]] == -2*C[0]*sin(q + pi/6)/3 + C[0]/3 - \
2*C[1]*cos(q + pi/3)/3 + C[1]/3 + 2*C[2]*cos(q)/3 + C[2]/3
def test_ang_vel():
q1, q2, q3, q4 = dynamicsymbols('q1 q2 q3 q4')
q1d, q2d, q3d, q4d = dynamicsymbols('q1 q2 q3 q4', 1)
N = ReferenceFrame('N')
A = N.orientnew('A', 'Axis', [q1, N.z])
B = A.orientnew('B', 'Axis', [q2, A.x])
C = B.orientnew('C', 'Axis', [q3, B.y])
D = N.orientnew('D', 'Axis', [q4, N.y])
u1, u2, u3 = dynamicsymbols('u1 u2 u3')
assert A.ang_vel_in(N) == (q1d)*A.z
assert B.ang_vel_in(N) == (q2d)*B.x + (q1d)*A.z
assert C.ang_vel_in(N) == (q3d)*C.y + (q2d)*B.x + (q1d)*A.z
A2 = N.orientnew('A2', 'Axis', [q4, N.y])
assert N.ang_vel_in(N) == 0
assert N.ang_vel_in(A) == -q1d*N.z
assert N.ang_vel_in(B) == -q1d*A.z - q2d*B.x
assert N.ang_vel_in(C) == -q1d*A.z - q2d*B.x - q3d*B.y
assert N.ang_vel_in(A2) == -q4d*N.y
assert A.ang_vel_in(N) == q1d*N.z
assert A.ang_vel_in(A) == 0
assert A.ang_vel_in(B) == - q2d*B.x
assert A.ang_vel_in(C) == - q2d*B.x - q3d*B.y
assert A.ang_vel_in(A2) == q1d*N.z - q4d*N.y
assert B.ang_vel_in(N) == q1d*A.z + q2d*A.x
assert B.ang_vel_in(A) == q2d*A.x
assert B.ang_vel_in(B) == 0
assert B.ang_vel_in(C) == -q3d*B.y
assert B.ang_vel_in(A2) == q1d*A.z + q2d*A.x - q4d*N.y
assert C.ang_vel_in(N) == q1d*A.z + q2d*A.x + q3d*B.y
assert C.ang_vel_in(A) == q2d*A.x + q3d*C.y
assert C.ang_vel_in(B) == q3d*B.y
assert C.ang_vel_in(C) == 0
assert C.ang_vel_in(A2) == q1d*A.z + q2d*A.x + q3d*B.y - q4d*N.y
assert A2.ang_vel_in(N) == q4d*A2.y
assert A2.ang_vel_in(A) == q4d*A2.y - q1d*N.z
assert A2.ang_vel_in(B) == q4d*N.y - q1d*A.z - q2d*A.x
assert A2.ang_vel_in(C) == q4d*N.y - q1d*A.z - q2d*A.x - q3d*B.y
assert A2.ang_vel_in(A2) == 0
C.set_ang_vel(N, u1*C.x + u2*C.y + u3*C.z)
assert C.ang_vel_in(N) == (u1)*C.x + (u2)*C.y + (u3)*C.z
assert N.ang_vel_in(C) == (-u1)*C.x + (-u2)*C.y + (-u3)*C.z
assert C.ang_vel_in(D) == (u1)*C.x + (u2)*C.y + (u3)*C.z + (-q4d)*D.y
assert D.ang_vel_in(C) == (-u1)*C.x + (-u2)*C.y + (-u3)*C.z + (q4d)*D.y
q0 = dynamicsymbols('q0')
q0d = dynamicsymbols('q0', 1)
E = N.orientnew('E', 'Quaternion', (q0, q1, q2, q3))
assert E.ang_vel_in(N) == (
2 * (q1d * q0 + q2d * q3 - q3d * q2 - q0d * q1) * E.x +
2 * (q2d * q0 + q3d * q1 - q1d * q3 - q0d * q2) * E.y +
2 * (q3d * q0 + q1d * q2 - q2d * q1 - q0d * q3) * E.z)
F = N.orientnew('F', 'Body', (q1, q2, q3), '313')
assert F.ang_vel_in(N) == ((sin(q2)*sin(q3)*q1d + cos(q3)*q2d)*F.x +
(sin(q2)*cos(q3)*q1d - sin(q3)*q2d)*F.y + (cos(q2)*q1d + q3d)*F.z)
G = N.orientnew('G', 'Axis', (q1, N.x + N.y))
assert G.ang_vel_in(N) == q1d * (N.x + N.y).normalize()
assert N.ang_vel_in(G) == -q1d * (N.x + N.y).normalize()
def test_dcm():
q1, q2, q3, q4 = dynamicsymbols('q1 q2 q3 q4')
N = ReferenceFrame('N')
A = N.orientnew('A', 'Axis', [q1, N.z])
B = A.orientnew('B', 'Axis', [q2, A.x])
C = B.orientnew('C', 'Axis', [q3, B.y])
D = N.orientnew('D', 'Axis', [q4, N.y])
E = N.orientnew('E', 'Space', [q1, q2, q3], '123')
assert N.dcm(C) == Matrix([
[- sin(q1) * sin(q2) * sin(q3) + cos(q1) * cos(q3), - sin(q1) *
cos(q2), sin(q1) * sin(q2) * cos(q3) + sin(q3) * cos(q1)], [sin(q1) *
cos(q3) + sin(q2) * sin(q3) * cos(q1), cos(q1) * cos(q2), sin(q1) *
sin(q3) - sin(q2) * cos(q1) * cos(q3)], [- sin(q3) * cos(q2), sin(q2),
cos(q2) * cos(q3)]])
# This is a little touchy. Is it ok to use simplify in assert?
test_mat = D.dcm(C) - Matrix(
[[cos(q1) * cos(q3) * cos(q4) - sin(q3) * (- sin(q4) * cos(q2) +
sin(q1) * sin(q2) * cos(q4)), - sin(q2) * sin(q4) - sin(q1) *
cos(q2) * cos(q4), sin(q3) * cos(q1) * cos(q4) + cos(q3) * (- sin(q4) *
cos(q2) + sin(q1) * sin(q2) * cos(q4))], [sin(q1) * cos(q3) +
sin(q2) * sin(q3) * cos(q1), cos(q1) * cos(q2), sin(q1) * sin(q3) -
sin(q2) * cos(q1) * cos(q3)], [sin(q4) * cos(q1) * cos(q3) -
sin(q3) * (cos(q2) * cos(q4) + sin(q1) * sin(q2) * sin(q4)), sin(q2) *
cos(q4) - sin(q1) * sin(q4) * cos(q2), sin(q3) * sin(q4) * cos(q1) +
cos(q3) * (cos(q2) * cos(q4) + sin(q1) * sin(q2) * sin(q4))]])
assert test_mat.expand() == zeros(3, 3)
assert E.dcm(N) == Matrix(
[[cos(q2)*cos(q3), sin(q3)*cos(q2), -sin(q2)],
[sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1), sin(q1)*sin(q2)*sin(q3) +
cos(q1)*cos(q3), sin(q1)*cos(q2)], [sin(q1)*sin(q3) +
sin(q2)*cos(q1)*cos(q3), - sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1),
cos(q1)*cos(q2)]])
def test_orientnew_respects_parent_class():
class MyReferenceFrame(ReferenceFrame):
pass
B = MyReferenceFrame('B')
C = B.orientnew('C', 'Axis', [0, B.x])
assert isinstance(C, MyReferenceFrame)
| bsd-3-clause |
DeanChan/Lasagne | lasagne/tests/test_theano_extensions.py | 8 | 4248 | import pytest
import numpy as np
import theano.tensor as T
import lasagne
@pytest.mark.parametrize('impl', ['conv1d_sc', 'conv1d_mc0',
'conv1d_mc1', 'conv1d_unstrided',
'conv1d_sd', 'conv1d_md'])
@pytest.mark.parametrize('stride', [1, 2])
def test_conv(impl, stride):
import lasagne.theano_extensions.conv
conv = getattr(lasagne.theano_extensions.conv, impl)
X = T.tensor3()
W = T.tensor3()
input = lasagne.utils.floatX(np.ones((1, 1, 10)))
kernel = lasagne.utils.floatX(np.random.uniform(-1, 1, (2, 1, 6)))
conv_theano = conv(X, W, input.shape, kernel.shape, subsample=(stride,)
).eval({X: input, W: kernel})
output = []
for b in input:
temp = []
for c in kernel:
temp.append(
np.convolve(b[0, :], c[0, :], mode='valid'))
output.append(temp)
conv_np = np.array(output)[:, :, ::stride]
assert np.allclose(conv_theano, conv_np)
@pytest.mark.parametrize('impl', ['conv1d_sc', 'conv1d_mc0', 'conv1d_mc1'])
def test_conv_nones(impl):
import lasagne.theano_extensions.conv
conv = getattr(lasagne.theano_extensions.conv, impl)
X = T.tensor3()
W = T.tensor3()
input = lasagne.utils.floatX(np.ones((1, 1, 12)))
kernel = lasagne.utils.floatX(np.random.uniform(-1, 1, (2, 1, 3)))
conv_theano = conv(X, W, None, None).eval({
X: input, W: kernel
})
output = []
for b in input:
temp = []
for c in kernel:
temp.append(
np.convolve(b[0, :], c[0, :], mode='valid'))
output.append(temp)
conv_np = np.array(output)
assert np.allclose(conv_theano, conv_np)
@pytest.mark.parametrize('impl', ['conv1d_sc', 'conv1d_mc0',
'conv1d_mc1', 'conv1d_unstrided',
'conv1d_sd', 'conv1d_md'])
def test_conv_border_mode(impl):
import lasagne.theano_extensions.conv
conv = getattr(lasagne.theano_extensions.conv, impl)
X = T.tensor3()
W = T.tensor3()
with pytest.raises(Exception):
conv(X, W, (1, 1, 10), (2, 1, 3), border_mode=None)
@pytest.mark.parametrize('impl', ['conv1d_unstrided', 'conv1d_sd',
'conv1d_md'])
def test_conv_stride(impl):
import lasagne.theano_extensions.conv
conv = getattr(lasagne.theano_extensions.conv, impl)
X = T.tensor3()
W = T.tensor3()
with pytest.raises(Exception):
conv(X, W, (1, 1, 10), (2, 1, 3), subsample=(2,))
@pytest.mark.parametrize('val', [0, 7])
@pytest.mark.parametrize('batch_ndim', [1, 2])
def test_pad(batch_ndim, val, width=3):
from lasagne.theano_extensions.padding import pad
X = T.tensor4()
X0 = lasagne.utils.floatX(np.ones((2, 3, 4, 5)))
X_pad_theano = pad(X, width, val, batch_ndim).eval({X: X0})
pads = tuple((width, width) if i >= batch_ndim else (0, 0)
for i, _ in enumerate(X0.shape))
X_pad_np = np.pad(X0, pads, mode='constant', constant_values=val)
assert (X_pad_theano == X_pad_np).all()
@pytest.mark.parametrize('batch_ndim', [1, 2])
def test_pad_width_per_axis(batch_ndim, val=0):
from lasagne.theano_extensions.padding import pad
width = (1, 2, 3, 4)
X = T.tensor4()
X0 = lasagne.utils.floatX(np.ones((2, 3, 4, 5)))
X_pad_theano = pad(X, width[batch_ndim:], val, batch_ndim).eval({X: X0})
pads = tuple((w, w) if i >= batch_ndim else (0, 0)
for i, w in enumerate(width))
X_pad_np = np.pad(X0, pads, mode='constant', constant_values=val)
assert (X_pad_theano == X_pad_np).all()
@pytest.mark.parametrize('batch_ndim', [1, 2])
def test_pad_width_per_border(batch_ndim, val=0):
from lasagne.theano_extensions.padding import pad
width = [(1, 2), (3, 4), (1, 2), (3, 4)]
X = T.tensor4()
X0 = lasagne.utils.floatX(np.ones((2, 3, 4, 5)))
X_pad_theano = pad(X, width[batch_ndim:], val, batch_ndim).eval({X: X0})
pads = tuple(w if i >= batch_ndim else (0, 0)
for i, w in enumerate(width))
X_pad_np = np.pad(X0, pads, mode='constant', constant_values=val)
assert (X_pad_theano == X_pad_np).all()
| mit |
RudoCris/horizon | openstack_dashboard/dashboards/project/access_and_security/floating_ips/tests.py | 29 | 15039 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
# Copyright (c) 2012 X.commerce, a business unit of eBay Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from django.utils.http import urlencode
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.access_and_security \
.floating_ips import tables
from openstack_dashboard.test import helpers as test
from openstack_dashboard.usage import quotas
from horizon.workflows import views
INDEX_URL = reverse('horizon:project:access_and_security:index')
NAMESPACE = "horizon:project:access_and_security:floating_ips"
class FloatingIpViewTests(test.TestCase):
@test.create_stubs({api.network: ('floating_ip_target_list',
'tenant_floating_ip_list',)})
def test_associate(self):
api.network.floating_ip_target_list(IsA(http.HttpRequest)) \
.AndReturn(self.servers.list())
api.network.tenant_floating_ip_list(IsA(http.HttpRequest)) \
.AndReturn(self.floating_ips.list())
self.mox.ReplayAll()
url = reverse('%s:associate' % NAMESPACE)
res = self.client.get(url)
self.assertTemplateUsed(res, views.WorkflowView.template_name)
workflow = res.context['workflow']
choices = dict(workflow.steps[0].action.fields['ip_id'].choices)
# Verify that our "associated" floating IP isn't in the choices list.
self.assertTrue(self.floating_ips.first() not in choices)
@test.create_stubs({api.network: ('floating_ip_target_list',
'floating_ip_target_get_by_instance',
'tenant_floating_ip_list',)})
def test_associate_with_instance_id(self):
api.network.floating_ip_target_list(IsA(http.HttpRequest)) \
.AndReturn(self.servers.list())
api.network.floating_ip_target_get_by_instance(
IsA(http.HttpRequest), 'TEST-ID', self.servers.list()) \
.AndReturn('TEST-ID')
api.network.tenant_floating_ip_list(IsA(http.HttpRequest)) \
.AndReturn(self.floating_ips.list())
self.mox.ReplayAll()
base_url = reverse('%s:associate' % NAMESPACE)
params = urlencode({'instance_id': 'TEST-ID'})
url = '?'.join([base_url, params])
res = self.client.get(url)
self.assertTemplateUsed(res, views.WorkflowView.template_name)
workflow = res.context['workflow']
choices = dict(workflow.steps[0].action.fields['ip_id'].choices)
# Verify that our "associated" floating IP isn't in the choices list.
self.assertTrue(self.floating_ips.first() not in choices)
@test.create_stubs({api.network: ('floating_ip_associate',
'floating_ip_target_list',
'tenant_floating_ip_list',)})
def test_associate_post(self):
floating_ip = self.floating_ips.list()[1]
server = self.servers.first()
api.network.tenant_floating_ip_list(IsA(http.HttpRequest)) \
.AndReturn(self.floating_ips.list())
api.network.floating_ip_target_list(IsA(http.HttpRequest)) \
.AndReturn(self.servers.list())
api.network.floating_ip_associate(IsA(http.HttpRequest),
floating_ip.id,
server.id)
self.mox.ReplayAll()
form_data = {'instance_id': server.id,
'ip_id': floating_ip.id}
url = reverse('%s:associate' % NAMESPACE)
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.network: ('floating_ip_associate',
'floating_ip_target_list',
'tenant_floating_ip_list',)})
def test_associate_post_with_redirect(self):
floating_ip = self.floating_ips.list()[1]
server = self.servers.first()
api.network.tenant_floating_ip_list(IsA(http.HttpRequest)) \
.AndReturn(self.floating_ips.list())
api.network.floating_ip_target_list(IsA(http.HttpRequest)) \
.AndReturn(self.servers.list())
api.network.floating_ip_associate(IsA(http.HttpRequest),
floating_ip.id,
server.id)
self.mox.ReplayAll()
form_data = {'instance_id': server.id,
'ip_id': floating_ip.id}
url = reverse('%s:associate' % NAMESPACE)
next = reverse("horizon:project:instances:index")
res = self.client.post("%s?next=%s" % (url, next), form_data)
self.assertRedirectsNoFollow(res, next)
@test.create_stubs({api.network: ('floating_ip_associate',
'floating_ip_target_list',
'tenant_floating_ip_list',)})
def test_associate_post_with_exception(self):
floating_ip = self.floating_ips.list()[1]
server = self.servers.first()
api.network.tenant_floating_ip_list(IsA(http.HttpRequest)) \
.AndReturn(self.floating_ips.list())
api.network.floating_ip_target_list(IsA(http.HttpRequest)) \
.AndReturn(self.servers.list())
api.network.floating_ip_associate(IsA(http.HttpRequest),
floating_ip.id,
server.id) \
.AndRaise(self.exceptions.nova)
self.mox.ReplayAll()
form_data = {'instance_id': server.id,
'ip_id': floating_ip.id}
url = reverse('%s:associate' % NAMESPACE)
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.nova: ('server_list',),
api.network: ('floating_ip_disassociate',
'floating_ip_supported',
'tenant_floating_ip_get',
'tenant_floating_ip_list',)})
def test_disassociate_post(self):
floating_ip = self.floating_ips.first()
api.nova.server_list(IsA(http.HttpRequest)) \
.AndReturn([self.servers.list(), False])
api.network.floating_ip_supported(IsA(http.HttpRequest)) \
.AndReturn(True)
api.network.tenant_floating_ip_list(IsA(http.HttpRequest)) \
.AndReturn(self.floating_ips.list())
api.network.floating_ip_disassociate(IsA(http.HttpRequest),
floating_ip.id)
self.mox.ReplayAll()
action = "floating_ips__disassociate__%s" % floating_ip.id
res = self.client.post(INDEX_URL, {"action": action})
self.assertMessageCount(success=1)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.nova: ('server_list',),
api.network: ('floating_ip_disassociate',
'floating_ip_supported',
'tenant_floating_ip_get',
'tenant_floating_ip_list',)})
def test_disassociate_post_with_exception(self):
floating_ip = self.floating_ips.first()
api.nova.server_list(IsA(http.HttpRequest)) \
.AndReturn([self.servers.list(), False])
api.network.floating_ip_supported(IsA(http.HttpRequest)) \
.AndReturn(True)
api.network.tenant_floating_ip_list(IsA(http.HttpRequest)) \
.AndReturn(self.floating_ips.list())
api.network.floating_ip_disassociate(IsA(http.HttpRequest),
floating_ip.id) \
.AndRaise(self.exceptions.nova)
self.mox.ReplayAll()
action = "floating_ips__disassociate__%s" % floating_ip.id
res = self.client.post(INDEX_URL, {"action": action})
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.network: ('floating_ip_supported',
'tenant_floating_ip_list',
'security_group_list',
'floating_ip_pools_list',),
api.nova: ('keypair_list',
'server_list',),
quotas: ('tenant_quota_usages',),
api.base: ('is_service_enabled',)})
def test_allocate_button_disabled_when_quota_exceeded(self):
keypairs = self.keypairs.list()
floating_ips = self.floating_ips.list()
floating_pools = self.pools.list()
quota_data = self.quota_usages.first()
quota_data['floating_ips']['available'] = 0
sec_groups = self.security_groups.list()
api.network.floating_ip_supported(
IsA(http.HttpRequest)) \
.AndReturn(True)
api.network.tenant_floating_ip_list(
IsA(http.HttpRequest)) \
.AndReturn(floating_ips)
api.network.security_group_list(
IsA(http.HttpRequest)).MultipleTimes()\
.AndReturn(sec_groups)
api.network.floating_ip_pools_list(
IsA(http.HttpRequest)) \
.AndReturn(floating_pools)
api.nova.keypair_list(
IsA(http.HttpRequest)) \
.AndReturn(keypairs)
api.nova.server_list(
IsA(http.HttpRequest)) \
.AndReturn([self.servers.list(), False])
quotas.tenant_quota_usages(
IsA(http.HttpRequest)).MultipleTimes() \
.AndReturn(quota_data)
api.base.is_service_enabled(
IsA(http.HttpRequest),
'network').MultipleTimes() \
.AndReturn(True)
api.base.is_service_enabled(
IsA(http.HttpRequest),
'ec2').MultipleTimes() \
.AndReturn(False)
self.mox.ReplayAll()
res = self.client.get(INDEX_URL +
"?tab=access_security_tabs__floating_ips_tab")
allocate_link = tables.AllocateIP()
url = allocate_link.get_link_url()
classes = (list(allocate_link.get_default_classes())
+ list(allocate_link.classes))
link_name = "%s (%s)" % (unicode(allocate_link.verbose_name),
"Quota exceeded")
expected_string = ("<a href='%s' title='%s' class='%s disabled' "
"id='floating_ips__action_allocate'>"
"<span class='fa fa-link'>"
"</span>%s</a>"
% (url, link_name, " ".join(classes), link_name))
self.assertContains(res, expected_string, html=True,
msg_prefix="The create button is not disabled")
class FloatingIpNeutronViewTests(FloatingIpViewTests):
def setUp(self):
super(FloatingIpViewTests, self).setUp()
self._floating_ips_orig = self.floating_ips
self.floating_ips = self.floating_ips_uuid
def tearDown(self):
self.floating_ips = self._floating_ips_orig
super(FloatingIpViewTests, self).tearDown()
@test.create_stubs({api.nova: ('tenant_quota_get', 'flavor_list',
'server_list'),
api.network: ('floating_ip_pools_list',
'floating_ip_supported',
'security_group_list',
'tenant_floating_ip_list'),
api.neutron: ('is_extension_supported',
'tenant_quota_get',
'network_list',
'router_list',
'subnet_list'),
api.base: ('is_service_enabled',)})
@test.update_settings(OPENSTACK_NEUTRON_NETWORK={'enable_quotas': True})
def test_correct_quotas_displayed(self):
servers = [s for s in self.servers.list()
if s.tenant_id == self.request.user.tenant_id]
api.base.is_service_enabled(IsA(http.HttpRequest), 'volume') \
.AndReturn(False)
api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \
.MultipleTimes().AndReturn(True)
api.nova.tenant_quota_get(IsA(http.HttpRequest), '1') \
.AndReturn(self.quotas.first())
api.nova.flavor_list(IsA(http.HttpRequest)) \
.AndReturn(self.flavors.list())
search_opts = {'tenant_id': self.request.user.tenant_id}
api.nova.server_list(IsA(http.HttpRequest), search_opts=search_opts,
all_tenants=True) \
.AndReturn([servers, False])
api.neutron.is_extension_supported(
IsA(http.HttpRequest), 'security-group').AndReturn(True)
api.neutron.is_extension_supported(IsA(http.HttpRequest), 'quotas') \
.AndReturn(True)
api.neutron.tenant_quota_get(IsA(http.HttpRequest), self.tenant.id) \
.AndReturn(self.neutron_quotas.first())
api.neutron.router_list(IsA(http.HttpRequest)) \
.AndReturn(self.routers.list())
api.neutron.subnet_list(IsA(http.HttpRequest)) \
.AndReturn(self.subnets.list())
api.neutron.network_list(IsA(http.HttpRequest), shared=False) \
.AndReturn(self.networks.list())
api.network.floating_ip_supported(IsA(http.HttpRequest)) \
.AndReturn(True)
api.network.tenant_floating_ip_list(IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(self.floating_ips.list())
api.network.floating_ip_pools_list(IsA(http.HttpRequest)) \
.AndReturn(self.pools.list())
api.network.security_group_list(IsA(http.HttpRequest)) \
.AndReturn(self.security_groups.list())
self.mox.ReplayAll()
url = reverse('%s:allocate' % NAMESPACE)
res = self.client.get(url)
self.assertEqual(res.context['usages']['floating_ips']['quota'],
self.neutron_quotas.first().get('floatingip').limit)
| apache-2.0 |
mqyqlx/deeppy | deeppy/autoencoder/stacked_autoencoder.py | 13 | 2960 | import itertools
from ..base import ParamMixin
from ..loss import Loss
from .autoencoder import Autoencoder
class StackedAutoencoderLayer(Autoencoder):
def __init__(self, ae, prev_layers):
self.ae = ae
self.prev_layers = prev_layers
self._initialized = False
def _setup(self, x_shape):
# Setup layers sequentially
if self._initialized:
return
for ae in self.prev_layers:
ae._setup(x_shape)
x_shape = ae.output_shape(x_shape)
self.ae._setup(x_shape)
self._initialized = True
def _update(self, x):
for ae in self.prev_layers:
x = ae.encode(x)
return self.ae._update(x)
def _reconstruct_batch(self, x):
for ae in self.prev_layers:
x = ae.encode(x)
y = self.ae.encode(x)
x_prime = self.ae.decode(y)
for ae in reversed(self.prev_layers):
x_prime = ae.decode(x_prime)
return x_prime
def _embed_batch(self, x):
for ae in self.prev_layers:
x = ae.encode(x)
return self.ae.encode(x)
def __getattr__(self, attr):
# Wrap non-overriden Autoencoder attributes
if attr in self.__dict__:
return getattr(self, attr)
return getattr(self.ae, attr)
class StackedAutoencoder(Autoencoder):
def __init__(self, layers, loss='bce'):
self._initialized = False
self.layers = layers
self.loss = Loss.from_any(loss)
def _setup(self, x_shape):
if self._initialized:
return
for ae in self.layers:
ae._setup(x_shape)
x_shape = ae.output_shape(x_shape)
self.loss._setup(x_shape)
self._initialized = True
@property
def _params(self):
all_params = [ae._params for ae in self.layers
if isinstance(ae, ParamMixin)]
# Concatenate lists in list
return list(itertools.chain.from_iterable(all_params))
def encode(self, x):
for ae in self.layers:
x = ae.encode(x)
return x
def decode(self, y):
for ae in reversed(self.layers):
y = ae.decode(y)
return y
def decode_bprop(self, x_grad):
for ae in self.layers:
x_grad = ae.decode_bprop(x_grad)
return x_grad
def encode_bprop(self, y_grad):
for ae in reversed(self.layers):
y_grad = ae.encode_bprop(y_grad)
return y_grad
def _output_shape(self, x_shape):
for ae in self.layers:
x_shape = ae.output_shape(x_shape)
return x_shape
def feedforward_layers(self):
feedforward_layers = [ae.feedforward_layers() for ae in self.layers]
return list(itertools.chain.from_iterable(feedforward_layers))
def ae_models(self):
for i, ae in enumerate(self.layers):
yield StackedAutoencoderLayer(ae, self.layers[:i])
| mit |
rosmo/ansible | test/units/modules/network/ios/test_ios_bgp.py | 38 | 10658 | #
# (c) 2019, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.network.ios.providers.cli.config.bgp.process import Provider
from ansible.modules.network.ios import ios_bgp
from .ios_module import TestIosModule, load_fixture
class TestIosBgpModule(TestIosModule):
module = ios_bgp
def setUp(self):
super(TestIosBgpModule, self).setUp()
self._bgp_config = load_fixture('ios_bgp_config.cfg')
def test_ios_bgp(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, router_id='192.0.2.2', networks=None,
address_family=None), operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, ['router bgp 64496', 'bgp router-id 192.0.2.2', 'exit'])
def test_ios_bgp_idempotent(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, router_id='192.0.2.1', networks=None,
address_family=None), operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_ios_bgp_remove(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, networks=None, address_family=None), operation='delete'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, ['no router bgp 64496'])
def test_ios_bgp_neighbor(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, neighbors=[dict(neighbor='192.51.100.2', remote_as=64496)],
networks=None, address_family=None),
operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, ['router bgp 64496', 'neighbor 192.51.100.2 remote-as 64496', 'exit'])
def test_ios_bgp_neighbor_idempotent(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, neighbors=[dict(neighbor='192.51.100.1', remote_as=64496,
timers=dict(keepalive=120, holdtime=360,
min_neighbor_holdtime=360))],
networks=None, address_family=None),
operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_ios_bgp_network(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, networks=[dict(prefix='192.0.1.0', masklen=23, route_map='RMAP_1')],
address_family=None),
operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(sorted(commands), sorted(['router bgp 64496', 'network 192.0.1.0 mask 255.255.254.0 route-map RMAP_1',
'exit']))
def test_ios_bgp_network_idempotent(self):
obj = Provider(
params=dict(config=dict(bgp_as=64496, networks=[dict(prefix='192.0.2.0', masklen=23, route_map='RMAP_1'),
dict(prefix='198.51.100.0', masklen=25,
route_map='RMAP_2')],
address_family=None),
operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_ios_bgp_address_family_redistribute(self):
rd_1 = dict(protocol='ospf', id='233', metric=90, route_map=None)
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', safi='unicast', redistribute=[rd_1])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
cmd = ['router bgp 64496', 'address-family ipv4', 'redistribute ospf 233 metric 90',
'exit-address-family', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_ios_bgp_address_family_redistribute_idempotent(self):
rd_1 = dict(protocol='eigrp', metric=10, route_map='RMAP_3', id=None)
rd_2 = dict(protocol='static', metric=100, id=None, route_map=None)
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', safi='unicast', redistribute=[rd_1, rd_2])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_ios_bgp_address_family_neighbors(self):
af_nbr_1 = dict(neighbor='192.51.100.1', maximum_prefix=35, activate=True)
af_nbr_2 = dict(neighbor='192.51.100.3', route_reflector_client=True, activate=True)
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', safi='multicast', neighbors=[af_nbr_1, af_nbr_2])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
cmd = ['router bgp 64496', 'address-family ipv4 multicast', 'neighbor 192.51.100.1 activate',
'neighbor 192.51.100.1 maximum-prefix 35', 'neighbor 192.51.100.3 activate',
'neighbor 192.51.100.3 route-reflector-client', 'exit-address-family', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_ios_bgp_address_family_neighbors_idempotent(self):
af_nbr_1 = dict(neighbor='203.0.113.1', remove_private_as=True, maximum_prefix=100)
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', safi='unicast', neighbors=[af_nbr_1])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_ios_bgp_address_family_networks(self):
net = dict(prefix='1.0.0.0', masklen=8, route_map='RMAP_1')
net2 = dict(prefix='192.168.1.0', masklen=24, route_map='RMAP_2')
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', safi='multicast', networks=[net, net2])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
cmd = ['router bgp 64496', 'address-family ipv4 multicast', 'network 1.0.0.0 mask 255.0.0.0 route-map RMAP_1',
'network 192.168.1.0 mask 255.255.255.0 route-map RMAP_2', 'exit-address-family', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_ios_bgp_address_family_networks_idempotent(self):
net = dict(prefix='203.0.113.0', masklen=27, route_map='RMAP_1')
net2 = dict(prefix='192.0.2.0', masklen=26, route_map='RMAP_2')
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', safi='multicast', networks=[net, net2])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_ios_bgp_operation_override(self):
net_1 = dict(prefix='1.0.0.0', masklen=8, route_map='RMAP_1')
net_2 = dict(prefix='192.168.1.0', masklen=24, route_map='RMAP_2')
nbr_1 = dict(neighbor='192.51.100.1', remote_as=64496, update_source='GigabitEthernet0/1')
nbr_2 = dict(neighbor='192.51.100.3', remote_as=64496, timers=dict(keepalive=300, holdtime=360,
min_neighbor_holdtime=360))
af_nbr_1 = dict(neighbor='192.51.100.1', maximum_prefix=35)
af_nbr_2 = dict(neighbor='192.51.100.3', route_reflector_client=True)
af_1 = dict(afi='ipv4', safi='unicast', neighbors=[af_nbr_1, af_nbr_2])
af_2 = dict(afi='ipv4', safi='multicast', networks=[net_1, net_2])
config = dict(bgp_as=64496, neighbors=[nbr_1, nbr_2], address_family=[af_1, af_2], networks=None)
obj = Provider(params=dict(config=config, operation='override'))
commands = obj.render(self._bgp_config)
cmd = ['no router bgp 64496', 'router bgp 64496', 'neighbor 192.51.100.1 remote-as 64496',
'neighbor 192.51.100.1 update-source GigabitEthernet0/1', 'neighbor 192.51.100.3 remote-as 64496',
'neighbor 192.51.100.3 timers 300 360 360', 'address-family ipv4',
'neighbor 192.51.100.1 maximum-prefix 35', 'neighbor 192.51.100.3 route-reflector-client',
'exit-address-family',
'address-family ipv4 multicast', 'network 1.0.0.0 mask 255.0.0.0 route-map RMAP_1',
'network 192.168.1.0 mask 255.255.255.0 route-map RMAP_2',
'exit-address-family', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_ios_bgp_operation_replace(self):
rd = dict(protocol='ospf', id=223, metric=110, route_map=None)
net = dict(prefix='203.0.113.0', masklen=27, route_map='RMAP_1')
net2 = dict(prefix='192.0.2.0', masklen=26, route_map='RMAP_2')
af_1 = dict(afi='ipv4', safi='unicast', redistribute=[rd])
af_2 = dict(afi='ipv4', safi='multicast', networks=[net, net2])
config = dict(bgp_as=64496, address_family=[af_1, af_2], networks=None)
obj = Provider(params=dict(config=config, operation='replace'))
commands = obj.render(self._bgp_config)
cmd = ['router bgp 64496', 'address-family ipv4', 'redistribute ospf 223 metric 110',
'no redistribute eigrp',
'no redistribute static', 'exit-address-family', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_ios_bgp_operation_replace_with_new_as(self):
rd = dict(protocol='ospf', id=223, metric=110, route_map=None)
af_1 = dict(afi='ipv4', safi='unicast', redistribute=[rd])
config = dict(bgp_as=64497, address_family=[af_1], networks=None)
obj = Provider(params=dict(config=config, operation='replace'))
commands = obj.render(self._bgp_config)
cmd = ['no router bgp 64496', 'router bgp 64497', 'address-family ipv4',
'redistribute ospf 223 metric 110',
'exit-address-family', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
| gpl-3.0 |
Oire/TWBlue | src/gui/buffers/panels.py | 1 | 1532 | # -*- coding: utf-8 -*-
############################################################
# Copyright (c) 2014 Manuel Eduardo Cortéz Vallejo <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
############################################################
import wx
from multiplatform_widgets import widgets
class accountPanel(wx.Panel):
def __init__(self, parent, name_buffer):
super(accountPanel, self).__init__(parent=parent)
self.type = "account"
self.name_buffer = name_buffer
sizer = wx.BoxSizer(wx.VERTICAL)
self.list = widgets.list(self, _(u"Announce"))
sizer.Add(self.list.list, 0, wx.ALL, 5)
self.SetSizer(sizer)
def get_more_items(self):
output.speak(_(u"This action is not supported for this buffer"))
class emptyPanel(accountPanel):
def __init__(self, parent):
super(emptyPanel, self).__init__(parent=parent, name_buffer="")
self.type = "empty"
| gpl-2.0 |
dkubiak789/odoo | addons/l10n_in_hr_payroll/report/__init__.py | 424 | 1262 | #-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import report_payslip_details
import report_payroll_advice
import report_hr_salary_employee_bymonth
import payment_advice_report
import report_hr_yearly_salary_detail
import payslip_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
JTarball/generator-dockerized-django-polymer | app/templates/docker/app/app/backend/apps/_archive/accounts_maybe_good?/migrations/0001_initial.py | 4 | 3236 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import django.core.validators
import django.contrib.auth.models
class Migration(migrations.Migration):
dependencies = [
('auth', '0006_require_contenttypes_0002'),
]
operations = [
migrations.CreateModel(
name='AccountsUser',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(null=True, verbose_name='last login', blank=True)),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, max_length=30, validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username. This value may contain only letters, numbers and @/./+/-/_ characters.', 'invalid')], help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', unique=True, verbose_name='username')),
('first_name', models.CharField(max_length=30, verbose_name='first name', blank=True)),
('last_name', models.CharField(max_length=30, verbose_name='last name', blank=True)),
('email', models.EmailField(max_length=254, verbose_name='email address', blank=True)),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('activation_key', models.CharField(max_length=40, verbose_name='activation key')),
('is_subscribed', models.BooleanField(default=False, help_text='Designates whether the user can is subscribed to the newsletter.', verbose_name='subscribed')),
('groups', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', verbose_name='groups')),
('user_permissions', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Permission', blank=True, help_text='Specific permissions for this user.', verbose_name='user permissions')),
],
options={
'abstract': False,
'verbose_name': 'user',
'verbose_name_plural': 'users',
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| gpl-2.0 |
pcrouthers/Framework-One | node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py | 2779 | 1665 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypsh output module
gypsh is a GYP shell. It's not really a generator per se. All it does is
fire up an interactive Python session with a few local variables set to the
variables passed to the generator. Like gypd, it's intended as a debugging
aid, to facilitate the exploration of .gyp structures after being processed
by the input module.
The expected usage is "gyp -f gypsh -D OS=desired_os".
"""
import code
import sys
# All of this stuff about generator variables was lovingly ripped from gypd.py.
# That module has a much better description of what's going on and why.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_DIRNAME',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
generator_default_variables = {
}
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
locals = {
'target_list': target_list,
'target_dicts': target_dicts,
'data': data,
}
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
(sys.version, sys.platform, repr(sorted(locals.keys())))
code.interact(banner, local=locals)
| mit |
rrrrrr8/vnpy | vnpy/api/xtp/pyscript/generate_struct_common.py | 4 | 2953 | # encoding: UTF-8
__author__ = 'CHENXY'
from xtp_data_type import *
type_dict = {
'uint64_t': 'uint64_t',
'uint32_t': 'uint32_t',
'int64_t': 'int64_t',
'int32_t': 'int32_t',
'char': 'string',
'double': 'float'
}
typedefDict.update(type_dict)
#----------------------------------------------------------------------
def replaceTabs(f):
"""把Tab用4个空格替代"""
l = []
for line in f:
line = line.replace('\t', ' ')
l.append(line)
return l
def main():
"""主函数"""
fcpp = open('xtp_api_struct_common.h', 'r')
fpy = open('xtp_struct_common.py', 'w')
fpy.write('# encoding: UTF-8\n')
fpy.write('\n')
fpy.write('structDict = {}\n')
fpy.write('\n')
lcpp = replaceTabs(fcpp)
for n, line in enumerate(lcpp):
#print n
# 结构体申明注释
if '///' in line and '\t' not in line:
py_line = '#' + line[3:]
if ' //' in line:
py_line = '#' + line[2:]
# 结构体变量注释
elif ' ///' in line:
py_line = '#' + line[4:]
# 结构体申明
elif 'struct ' in line:
content = line.split(' ')
name = content[2].replace('\n','')
name = name.replace('\r', '')
py_line = '%s = {}\n' % name
# 结构体变量
elif ' ' == line[0:4] or '\t' == line[0] and '()' not in line and '{' not in line and '}' not in line and '=' not in line:
line = line.replace('\t', ' ')
content = line.split(' ')
content = [k for k in content if k]
typedef = content[0].replace('\t', '')
typedef = typedef.replace('()', '')
typedef = typedef.replace('\r', '')
typedef = typedef.replace('\n', '')
type_ = typedefDict[typedef]
variable = content[1]
variable = variable.replace(';', "")
variable = variable.replace('\n', "")
variable = variable.replace('\r', "")
if '[' in variable:
k = variable.index('[')
variable = variable[0:k]
py_line = '%s["%s"] = "%s"\n' % (name, variable, type_)
# 结构体结束
elif '}' in line:
py_line = "structDict['%s'] = %s\n\n" % (name, name)
otherName = line.split(' ')[1]
otherName = otherName.replace(';', '')
otherName = otherName.replace('\n', '')
otherName = otherName.replace('\r', '')
second_line = "structDict['%s'] = %s\n\n" % (otherName, name)
py_line = py_line + second_line
# 结构体开始
elif '{' in line:
py_line = ''
# 其他
else:
py_line = '\n'
fpy.write(py_line.decode('gbk').encode('utf-8'))
if __name__ == '__main__':
main() | mit |
arokem/nipype | examples/smri_antsregistration_build_template.py | 14 | 7125 | #!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
======================================================
sMRI: Using new ANTS for creating a T1 template (ITK4)
======================================================
In this tutorial we will use ANTS (new ITK4 version aka "antsRegistration") based workflow to
create a template out of multiple T1 volumes. We will also showcase how to fine tune SGE jobs requirements.
1. Tell python where to find the appropriate functions.
"""
import os
import nipype.interfaces.utility as util
import nipype.interfaces.ants as ants
import nipype.interfaces.io as io
import nipype.pipeline.engine as pe # pypeline engine
from nipype.workflows.smri.ants import antsRegistrationTemplateBuildSingleIterationWF
"""
2. Download T1 volumes into home directory
"""
import urllib2
homeDir=os.getenv("HOME")
requestedPath=os.path.join(homeDir,'nipypeTestPath')
mydatadir=os.path.realpath(requestedPath)
if not os.path.exists(mydatadir):
os.makedirs(mydatadir)
print mydatadir
MyFileURLs=[
('http://slicer.kitware.com/midas3/download?bitstream=13121','01_T1_half.nii.gz'),
('http://slicer.kitware.com/midas3/download?bitstream=13122','02_T1_half.nii.gz'),
('http://slicer.kitware.com/midas3/download?bitstream=13124','03_T1_half.nii.gz'),
('http://slicer.kitware.com/midas3/download?bitstream=13128','01_T1_inv_half.nii.gz'),
('http://slicer.kitware.com/midas3/download?bitstream=13123','02_T1_inv_half.nii.gz'),
('http://slicer.kitware.com/midas3/download?bitstream=13125','03_T1_inv_half.nii.gz'),
]
for tt in MyFileURLs:
myURL=tt[0]
localFilename=os.path.join(mydatadir,tt[1])
if not os.path.exists(localFilename):
remotefile = urllib2.urlopen(myURL)
localFile = open(localFilename, 'wb')
localFile.write(remotefile.read())
localFile.close()
print("Downloaded file: {0}".format(localFilename))
else:
print("File previously downloaded {0}".format(localFilename))
"""
ListOfImagesDictionaries - a list of dictionaries where each dictionary is
for one scan session, and the mappings in the dictionary are for all the
co-aligned images for that one scan session
"""
ListOfImagesDictionaries=[
{'T1':os.path.join(mydatadir,'01_T1_half.nii.gz'),'INV_T1':os.path.join(mydatadir,'01_T1_inv_half.nii.gz'),'LABEL_MAP':os.path.join(mydatadir,'01_T1_inv_half.nii.gz')},
{'T1':os.path.join(mydatadir,'02_T1_half.nii.gz'),'INV_T1':os.path.join(mydatadir,'02_T1_inv_half.nii.gz'),'LABEL_MAP':os.path.join(mydatadir,'02_T1_inv_half.nii.gz')},
{'T1':os.path.join(mydatadir,'03_T1_half.nii.gz'),'INV_T1':os.path.join(mydatadir,'03_T1_inv_half.nii.gz'),'LABEL_MAP':os.path.join(mydatadir,'03_T1_inv_half.nii.gz')}
]
input_passive_images=[
{'INV_T1':os.path.join(mydatadir,'01_T1_inv_half.nii.gz')},
{'INV_T1':os.path.join(mydatadir,'02_T1_inv_half.nii.gz')},
{'INV_T1':os.path.join(mydatadir,'03_T1_inv_half.nii.gz')}
]
"""
registrationImageTypes - A list of the image types to be used actively during
the estimation process of registration, any image type not in this list
will be passively resampled with the estimated transforms.
['T1','T2']
"""
registrationImageTypes=['T1']
"""
interpolationMap - A map of image types to interpolation modes. If an
image type is not listed, it will be linearly interpolated.
{ 'labelmap':'NearestNeighbor', 'FLAIR':'WindowedSinc' }
"""
interpolationMapping={'INV_T1':'LanczosWindowedSinc','LABEL_MAP':'NearestNeighbor','T1':'Linear'}
"""
3. Define the workflow and its working directory
"""
tbuilder=pe.Workflow(name="antsRegistrationTemplateBuilder")
tbuilder.base_dir=requestedPath
"""
4. Define data sources. In real life these would be replace by DataGrabbers
"""
InitialTemplateInputs=[ mdict['T1'] for mdict in ListOfImagesDictionaries ]
datasource = pe.Node(interface=util.IdentityInterface(fields=
['InitialTemplateInputs', 'ListOfImagesDictionaries',
'registrationImageTypes','interpolationMapping']),
run_without_submitting=True,
name='InputImages' )
datasource.inputs.InitialTemplateInputs=InitialTemplateInputs
datasource.inputs.ListOfImagesDictionaries=ListOfImagesDictionaries
datasource.inputs.registrationImageTypes=registrationImageTypes
datasource.inputs.interpolationMapping=interpolationMapping
datasource.inputs.sort_filelist = True
"""
5. Template is initialized by a simple average in this simple example,
any reference image could be used (i.e. a previously created template)
"""
initAvg = pe.Node(interface=ants.AverageImages(), name ='initAvg')
initAvg.inputs.dimension = 3
initAvg.inputs.normalize = True
tbuilder.connect(datasource, "InitialTemplateInputs", initAvg, "images")
"""
6. Define the first iteration of template building
"""
buildTemplateIteration1=antsRegistrationTemplateBuildSingleIterationWF('iteration01')
"""
Here we are fine tuning parameters of the SGE job (memory limit, numebr of cores etc.)
"""
BeginANTS = buildTemplateIteration1.get_node("BeginANTS")
BeginANTS.plugin_args={'qsub_args': '-S /bin/bash -pe smp1 8-12 -l mem_free=6000M -o /dev/null -e /dev/null queue_name', 'overwrite': True}
tbuilder.connect(initAvg, 'output_average_image', buildTemplateIteration1, 'inputspec.fixed_image')
tbuilder.connect(datasource, 'ListOfImagesDictionaries', buildTemplateIteration1, 'inputspec.ListOfImagesDictionaries')
tbuilder.connect(datasource, 'registrationImageTypes', buildTemplateIteration1, 'inputspec.registrationImageTypes')
tbuilder.connect(datasource, 'interpolationMapping', buildTemplateIteration1, 'inputspec.interpolationMapping')
"""
7. Define the second iteration of template building
"""
buildTemplateIteration2 = antsRegistrationTemplateBuildSingleIterationWF('iteration02')
BeginANTS = buildTemplateIteration2.get_node("BeginANTS")
BeginANTS.plugin_args={'qsub_args': '-S /bin/bash -pe smp1 8-12 -l mem_free=6000M -o /dev/null -e /dev/null queue_name', 'overwrite': True}
tbuilder.connect(buildTemplateIteration1, 'outputspec.template', buildTemplateIteration2, 'inputspec.fixed_image')
tbuilder.connect(datasource, 'ListOfImagesDictionaries', buildTemplateIteration2, 'inputspec.ListOfImagesDictionaries')
tbuilder.connect(datasource, 'registrationImageTypes', buildTemplateIteration2, 'inputspec.registrationImageTypes')
tbuilder.connect(datasource, 'interpolationMapping', buildTemplateIteration2, 'inputspec.interpolationMapping')
"""
8. Move selected files to a designated results folder
"""
datasink = pe.Node(io.DataSink(), name="datasink")
datasink.inputs.base_directory = os.path.join(requestedPath, "results")
tbuilder.connect(buildTemplateIteration2, 'outputspec.template',datasink,'PrimaryTemplate')
tbuilder.connect(buildTemplateIteration2, 'outputspec.passive_deformed_templates',datasink,'PassiveTemplate')
tbuilder.connect(initAvg, 'output_average_image', datasink,'PreRegisterAverage')
"""
9. Run the workflow
"""
tbuilder.run(plugin="SGE")
| bsd-3-clause |
fniephaus/SimpleHomeAutomation | server.py | 1 | 3225 | #/usr/bin/env python
import config
import json
import datetime
import time
import hashlib
import atexit
from sets import Set
try:
from flask import Flask
from flask import jsonify, render_template, request
except ImportError:
print '[X] Please install Flask:'
print ' $ pip install flask\n'
exit()
try:
import serial
except ImportError:
print '[X] Please install Flask:'
print ' $ pip install pySerial\n'
exit()
try:
rfm12pi = serial.Serial('/dev/ttyAMA0', baudrate=9600, timeout=3.0)
except OSError:
rfm12pi = None
print '[X] RFM12Pi not found. Start server anyway...'
app = Flask('simplehomeautomation')
active_switches = Set()
@app.route('/')
def main():
return render_template(
'index.html',
config=config,
active_switches=active_switches,
logged_in=logged_in(request)
)
@app.route('/login', methods=['POST'])
def login():
if 'password' in request.form:
if request.form['password'] == config.PASSWORD:
return signed_response(jsonify({
'status': True
}))
return jsonify({
'status': False
})
@app.route('/control', methods=['POST'])
def control():
if not logged_in(request):
return jsonify({
'status': False
})
if all(x in request.form for x in ['system', 'device']):
system = request.form['system']
device = request.form['device']
switch = ('switch-%s-%s' % (system, device))
if 'state' in request.form:
state = request.form['state']
else:
state = '0' if switch in active_switches else '1'
# Send command if available
if rfm12pi:
rfm12pi.write('%s,%s,%se' % (system, device, state))
# Remember status
if state == '1':
active_switches.add(switch)
else:
active_switches.discard(switch)
return signed_response(jsonify({
'status': True
}))
return signed_response(jsonify({
'status': False
}))
@app.route('/status')
def status():
return jsonify({
'switches': list(active_switches) if logged_in(request) else []
})
def signed_response(response):
# Add cookie
expires = time.mktime((datetime.date.today() +
datetime.timedelta(days=7)).timetuple())
response.set_cookie(
config.COOKIE,
value=str(current_secret()),
expires=expires
)
return response
def logged_in(request):
valid_cookie = (config.COOKIE in request.cookies and
request.cookies[config.COOKIE] == str(current_secret()))
valid_secret = ('secret' in request.form and
request.form['secret'] == config.SECRET)
return valid_cookie or valid_secret
def current_secret():
return sha256(str(hash(app) * hash(config.SECRET)))
def sha256(string):
return hashlib.sha224(string).hexdigest()
if __name__ == '__main__':
app.run(host=config.HOST, port=config.PORT, debug=config.DEBUG)
def close():
print '[X] Shutting down server...'
if rfm12pi:
rfm12pi.close()
atexit.register(close)
| mit |
moreati/numpy | numpy/core/tests/test_records.py | 4 | 12859 | from __future__ import division, absolute_import, print_function
import sys
import collections
import pickle
from os import path
import numpy as np
from numpy.compat import asbytes
from numpy.testing import (
TestCase, run_module_suite, assert_, assert_equal, assert_array_equal,
assert_array_almost_equal, assert_raises
)
class TestFromrecords(TestCase):
def test_fromrecords(self):
r = np.rec.fromrecords([[456, 'dbe', 1.2], [2, 'de', 1.3]],
names='col1,col2,col3')
assert_equal(r[0].item(), (456, 'dbe', 1.2))
assert_equal(r['col1'].dtype.kind, 'i')
if sys.version_info[0] >= 3:
assert_equal(r['col2'].dtype.kind, 'U')
assert_equal(r['col2'].dtype.itemsize, 12)
else:
assert_equal(r['col2'].dtype.kind, 'S')
assert_equal(r['col2'].dtype.itemsize, 3)
assert_equal(r['col3'].dtype.kind, 'f')
def test_method_array(self):
r = np.rec.array(asbytes('abcdefg') * 100, formats='i2,a3,i4', shape=3, byteorder='big')
assert_equal(r[1].item(), (25444, asbytes('efg'), 1633837924))
def test_method_array2(self):
r = np.rec.array([(1, 11, 'a'), (2, 22, 'b'), (3, 33, 'c'), (4, 44, 'd'), (5, 55, 'ex'),
(6, 66, 'f'), (7, 77, 'g')], formats='u1,f4,a1')
assert_equal(r[1].item(), (2, 22.0, asbytes('b')))
def test_recarray_slices(self):
r = np.rec.array([(1, 11, 'a'), (2, 22, 'b'), (3, 33, 'c'), (4, 44, 'd'), (5, 55, 'ex'),
(6, 66, 'f'), (7, 77, 'g')], formats='u1,f4,a1')
assert_equal(r[1::2][1].item(), (4, 44.0, asbytes('d')))
def test_recarray_fromarrays(self):
x1 = np.array([1, 2, 3, 4])
x2 = np.array(['a', 'dd', 'xyz', '12'])
x3 = np.array([1.1, 2, 3, 4])
r = np.rec.fromarrays([x1, x2, x3], names='a,b,c')
assert_equal(r[1].item(), (2, 'dd', 2.0))
x1[1] = 34
assert_equal(r.a, np.array([1, 2, 3, 4]))
def test_recarray_fromfile(self):
data_dir = path.join(path.dirname(__file__), 'data')
filename = path.join(data_dir, 'recarray_from_file.fits')
fd = open(filename, 'rb')
fd.seek(2880 * 2)
r1 = np.rec.fromfile(fd, formats='f8,i4,a5', shape=3, byteorder='big')
fd.seek(2880 * 2)
r2 = np.rec.array(fd, formats='f8,i4,a5', shape=3, byteorder='big')
fd.close()
assert_equal(r1, r2)
def test_recarray_from_obj(self):
count = 10
a = np.zeros(count, dtype='O')
b = np.zeros(count, dtype='f8')
c = np.zeros(count, dtype='f8')
for i in range(len(a)):
a[i] = list(range(1, 10))
mine = np.rec.fromarrays([a, b, c], names='date,data1,data2')
for i in range(len(a)):
assert_((mine.date[i] == list(range(1, 10))))
assert_((mine.data1[i] == 0.0))
assert_((mine.data2[i] == 0.0))
def test_recarray_from_repr(self):
a = np.array([(1,'ABC'), (2, "DEF")],
dtype=[('foo', int), ('bar', 'S4')])
recordarr = np.rec.array(a)
recarr = a.view(np.recarray)
recordview = a.view(np.dtype((np.record, a.dtype)))
recordarr_r = eval("numpy." + repr(recordarr), {'numpy': np})
recarr_r = eval("numpy." + repr(recarr), {'numpy': np})
recordview_r = eval("numpy." + repr(recordview), {'numpy': np})
assert_equal(type(recordarr_r), np.recarray)
assert_equal(recordarr_r.dtype.type, np.record)
assert_equal(recordarr, recordarr_r)
assert_equal(type(recarr_r), np.recarray)
assert_equal(recarr_r.dtype.type, np.record)
assert_equal(recarr, recarr_r)
assert_equal(type(recordview_r), np.ndarray)
assert_equal(recordview.dtype.type, np.record)
assert_equal(recordview, recordview_r)
def test_recarray_views(self):
a = np.array([(1,'ABC'), (2, "DEF")],
dtype=[('foo', int), ('bar', 'S4')])
b = np.array([1,2,3,4,5], dtype=np.int64)
#check that np.rec.array gives right dtypes
assert_equal(np.rec.array(a).dtype.type, np.record)
assert_equal(type(np.rec.array(a)), np.recarray)
assert_equal(np.rec.array(b).dtype.type, np.int64)
assert_equal(type(np.rec.array(b)), np.recarray)
#check that viewing as recarray does the same
assert_equal(a.view(np.recarray).dtype.type, np.record)
assert_equal(type(a.view(np.recarray)), np.recarray)
assert_equal(b.view(np.recarray).dtype.type, np.int64)
assert_equal(type(b.view(np.recarray)), np.recarray)
#check that view to non-structured dtype preserves type=np.recarray
r = np.rec.array(np.ones(4, dtype="f4,i4"))
rv = r.view('f8').view('f4,i4')
assert_equal(type(rv), np.recarray)
assert_equal(rv.dtype.type, np.record)
# check that accessing nested structures keep record type, but
# not for subarrays, non-void structures, non-structured voids
test_dtype = [('a', 'f4,f4'), ('b', 'V8'), ('c', ('f4',2)),
('d', ('i8', 'i4,i4'))]
r = np.rec.array([((1,1), b'11111111', [1,1], 1),
((1,1), b'11111111', [1,1], 1)], dtype=test_dtype)
assert_equal(r.a.dtype.type, np.record)
assert_equal(r.b.dtype.type, np.void)
assert_equal(r.c.dtype.type, np.float32)
assert_equal(r.d.dtype.type, np.int64)
# check the same, but for views
r = np.rec.array(np.ones(4, dtype='i4,i4'))
assert_equal(r.view('f4,f4').dtype.type, np.record)
assert_equal(r.view(('i4',2)).dtype.type, np.int32)
assert_equal(r.view('V8').dtype.type, np.void)
assert_equal(r.view(('i8', 'i4,i4')).dtype.type, np.int64)
#check that we can undo the view
arrs = [np.ones(4, dtype='f4,i4'), np.ones(4, dtype='f8')]
for arr in arrs:
rec = np.rec.array(arr)
# recommended way to view as an ndarray:
arr2 = rec.view(rec.dtype.fields or rec.dtype, np.ndarray)
assert_equal(arr2.dtype.type, arr.dtype.type)
assert_equal(type(arr2), type(arr))
def test_recarray_repr(self):
# make sure non-structured dtypes also show up as rec.array
a = np.array(np.ones(4, dtype='f8'))
assert_(repr(np.rec.array(a)).startswith('rec.array'))
# check that the 'np.record' part of the dtype isn't shown
a = np.rec.array(np.ones(3, dtype='i4,i4'))
assert_equal(repr(a).find('numpy.record'), -1)
a = np.rec.array(np.ones(3, dtype='i4'))
assert_(repr(a).find('dtype=int32') != -1)
def test_recarray_from_names(self):
ra = np.rec.array([
(1, 'abc', 3.7000002861022949, 0),
(2, 'xy', 6.6999998092651367, 1),
(0, ' ', 0.40000000596046448, 0)],
names='c1, c2, c3, c4')
pa = np.rec.fromrecords([
(1, 'abc', 3.7000002861022949, 0),
(2, 'xy', 6.6999998092651367, 1),
(0, ' ', 0.40000000596046448, 0)],
names='c1, c2, c3, c4')
assert_(ra.dtype == pa.dtype)
assert_(ra.shape == pa.shape)
for k in range(len(ra)):
assert_(ra[k].item() == pa[k].item())
def test_recarray_conflict_fields(self):
ra = np.rec.array([(1, 'abc', 2.3), (2, 'xyz', 4.2),
(3, 'wrs', 1.3)],
names='field, shape, mean')
ra.mean = [1.1, 2.2, 3.3]
assert_array_almost_equal(ra['mean'], [1.1, 2.2, 3.3])
assert_(type(ra.mean) is type(ra.var))
ra.shape = (1, 3)
assert_(ra.shape == (1, 3))
ra.shape = ['A', 'B', 'C']
assert_array_equal(ra['shape'], [['A', 'B', 'C']])
ra.field = 5
assert_array_equal(ra['field'], [[5, 5, 5]])
assert_(isinstance(ra.field, collections.Callable))
def test_fromrecords_with_explicit_dtype(self):
a = np.rec.fromrecords([(1, 'a'), (2, 'bbb')],
dtype=[('a', int), ('b', np.object)])
assert_equal(a.a, [1, 2])
assert_equal(a[0].a, 1)
assert_equal(a.b, ['a', 'bbb'])
assert_equal(a[-1].b, 'bbb')
#
ndtype = np.dtype([('a', int), ('b', np.object)])
a = np.rec.fromrecords([(1, 'a'), (2, 'bbb')], dtype=ndtype)
assert_equal(a.a, [1, 2])
assert_equal(a[0].a, 1)
assert_equal(a.b, ['a', 'bbb'])
assert_equal(a[-1].b, 'bbb')
def test_recarray_stringtypes(self):
# Issue #3993
a = np.array([('abc ', 1), ('abc', 2)],
dtype=[('foo', 'S4'), ('bar', int)])
a = a.view(np.recarray)
assert_equal(a.foo[0] == a.foo[1], False)
def test_recarray_returntypes(self):
qux_fields = {'C': (np.dtype('S5'), 0), 'D': (np.dtype('S5'), 6)}
a = np.rec.array([('abc ', (1,1), 1, ('abcde', 'fgehi')),
('abc', (2,3), 1, ('abcde', 'jklmn'))],
dtype=[('foo', 'S4'),
('bar', [('A', int), ('B', int)]),
('baz', int), ('qux', qux_fields)])
assert_equal(type(a.foo), np.ndarray)
assert_equal(type(a['foo']), np.ndarray)
assert_equal(type(a.bar), np.recarray)
assert_equal(type(a['bar']), np.recarray)
assert_equal(a.bar.dtype.type, np.record)
assert_equal(type(a['qux']), np.recarray)
assert_equal(a.qux.dtype.type, np.record)
assert_equal(dict(a.qux.dtype.fields), qux_fields)
assert_equal(type(a.baz), np.ndarray)
assert_equal(type(a['baz']), np.ndarray)
assert_equal(type(a[0].bar), np.record)
assert_equal(type(a[0]['bar']), np.record)
assert_equal(a[0].bar.A, 1)
assert_equal(a[0].bar['A'], 1)
assert_equal(a[0]['bar'].A, 1)
assert_equal(a[0]['bar']['A'], 1)
assert_equal(a[0].qux.D, asbytes('fgehi'))
assert_equal(a[0].qux['D'], asbytes('fgehi'))
assert_equal(a[0]['qux'].D, asbytes('fgehi'))
assert_equal(a[0]['qux']['D'], asbytes('fgehi'))
class TestRecord(TestCase):
def setUp(self):
self.data = np.rec.fromrecords([(1, 2, 3), (4, 5, 6)],
dtype=[("col1", "<i4"),
("col2", "<i4"),
("col3", "<i4")])
def test_assignment1(self):
a = self.data
assert_equal(a.col1[0], 1)
a[0].col1 = 0
assert_equal(a.col1[0], 0)
def test_assignment2(self):
a = self.data
assert_equal(a.col1[0], 1)
a.col1[0] = 0
assert_equal(a.col1[0], 0)
def test_invalid_assignment(self):
a = self.data
def assign_invalid_column(x):
x[0].col5 = 1
self.assertRaises(AttributeError, assign_invalid_column, a)
def test_out_of_order_fields(self):
"""Ticket #1431."""
x = self.data[['col1', 'col2']]
y = self.data[['col2', 'col1']]
assert_equal(x[0][0], y[0][1])
def test_pickle_1(self):
# Issue #1529
a = np.array([(1, [])], dtype=[('a', np.int32), ('b', np.int32, 0)])
assert_equal(a, pickle.loads(pickle.dumps(a)))
assert_equal(a[0], pickle.loads(pickle.dumps(a[0])))
def test_pickle_2(self):
a = self.data
assert_equal(a, pickle.loads(pickle.dumps(a)))
assert_equal(a[0], pickle.loads(pickle.dumps(a[0])))
def test_objview_record(self):
# https://github.com/numpy/numpy/issues/2599
dt = np.dtype([('foo', 'i8'), ('bar', 'O')])
r = np.zeros((1,3), dtype=dt).view(np.recarray)
r.foo = np.array([1, 2, 3]) # TypeError?
# https://github.com/numpy/numpy/issues/3256
ra = np.recarray((2,), dtype=[('x', object), ('y', float), ('z', int)])
ra[['x','y']] # TypeError?
def test_record_scalar_setitem(self):
# https://github.com/numpy/numpy/issues/3561
rec = np.recarray(1, dtype=[('x', float, 5)])
rec[0].x = 1
assert_equal(rec[0].x, np.ones(5))
def test_missing_field(self):
# https://github.com/numpy/numpy/issues/4806
arr = np.zeros((3,), dtype=[('x', int), ('y', int)])
assert_raises(ValueError, lambda: arr[['nofield']])
def test_find_duplicate():
l1 = [1, 2, 3, 4, 5, 6]
assert_(np.rec.find_duplicate(l1) == [])
l2 = [1, 2, 1, 4, 5, 6]
assert_(np.rec.find_duplicate(l2) == [1])
l3 = [1, 2, 1, 4, 1, 6, 2, 3]
assert_(np.rec.find_duplicate(l3) == [1, 2])
l3 = [2, 2, 1, 4, 1, 6, 2, 3]
assert_(np.rec.find_duplicate(l3) == [2, 1])
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause |
mu-editor/mu | mu/app.py | 1 | 12651 | """
Mu - a "micro" Python editor for beginner programmers.
Copyright (c) 2015-2017 Nicholas H.Tollervey and others (see the AUTHORS file).
Based upon work done for Puppy IDE by Dan Pope, Nicholas Tollervey and Damien
George.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import logging
from logging.handlers import TimedRotatingFileHandler
import os
import time
import platform
import traceback
import sys
import urllib
import webbrowser
import base64
from PyQt5.QtCore import (
Qt,
QEventLoop,
QThread,
QObject,
pyqtSignal,
)
from PyQt5.QtWidgets import QApplication, QSplashScreen
from . import i18n
from .virtual_environment import venv, logger as vlogger
from . import __version__
from .logic import Editor, LOG_FILE, LOG_DIR, ENCODING
from .interface import Window
from .resources import load_icon, load_movie, load_pixmap
from .modes import (
PythonMode,
CircuitPythonMode,
MicrobitMode,
DebugMode,
PyGameZeroMode,
ESPMode,
WebMode,
PyboardMode,
LegoMode,
PicoMode,
)
from .interface.themes import NIGHT_STYLE, DAY_STYLE, CONTRAST_STYLE
from . import settings
class AnimatedSplash(QSplashScreen):
"""
An animated splash screen for gifs. Includes a text area for logging
output.
"""
def __init__(self, animation, parent=None):
"""
Ensure signals are connected and start the animation.
"""
self.log_lines = 4
# To hold only number of log_lines of logs to display.
self.log = []
self.animation = animation
self.animation.frameChanged.connect(self.set_frame)
# Always on top.
super().__init__(
self.animation.currentPixmap(), Qt.WindowStaysOnTopHint
)
# Disable clicks.
self.setEnabled(False)
self.animation.start()
def set_frame(self):
"""
Update the splash screen with the next frame of the animation.
"""
pixmap = self.animation.currentPixmap()
self.setPixmap(pixmap)
self.setMask(pixmap.mask())
def draw_log(self, text):
"""
Draw the log entries onto the splash screen. Will only display the last
self.log_lines number of log entries. The logs will be displayed at the
bottom of the splash screen, justified left.
"""
self.log.append(text)
self.log = self.log[-self.log_lines :]
if self.log:
self.draw_text("\n".join(self.log))
def draw_text(self, text):
"""
Draw text into splash screen.
"""
if text:
self.showMessage(text, Qt.AlignBottom | Qt.AlignLeft)
def failed(self, text):
"""
Something has gone wrong during start-up, so signal this, display a
helpful message along with instructions for what to do.
"""
self.animation.stop()
pixmap = load_pixmap("splash_fail.png")
self.setPixmap(pixmap)
lines = text.split("\n")
lines.append(
"This screen will close in a few seconds. "
"Then a crash report tool will open in your browser."
)
lines = lines[-12:]
self.draw_text("\n".join(lines))
class StartupWorker(QObject):
"""
A worker class for running blocking tasks on a separate thread during
application start-up.
The animated splash screen will be shown until this thread is finished.
"""
finished = pyqtSignal() # emitted when successfully finished.
failed = pyqtSignal(str) # emitted if finished with an error.
display_text = pyqtSignal(str) # emitted to update the splash text.
def run(self):
"""
Blocking and long running tasks for application startup should be
called from here.
"""
try:
venv.ensure_and_create(self.display_text)
self.finished.emit() # Always called last.
except Exception as ex:
# Catch all exceptions just in case.
# Report the failure, along with a summary to show the user.
stack = traceback.extract_stack()[:-1]
msg = "\n".join(traceback.format_list(stack))
msg += "\n\n" + traceback.format_exc()
self.failed.emit(msg)
# Sleep a while in the thread so the user sees something is wrong.
time.sleep(7)
self.finished.emit()
# Re-raise for crash handler to kick in.
raise ex
finally:
# Always clean up the startup splash/venv logging handlers.
if vlogger.handlers:
handler = vlogger.handlers[0]
vlogger.removeHandler(handler)
def excepthook(*exc_args):
"""
Log exception and exit cleanly.
"""
logging.error("Unrecoverable error", exc_info=(exc_args))
if exc_args[0] != KeyboardInterrupt:
try:
log_file = base64.standard_b64encode(LOG_FILE.encode("utf-8"))
error = base64.standard_b64encode(
"".join(traceback.format_exception(*exc_args)).encode("utf-8")
)[-1800:]
p = platform.uname()
params = {
"v": __version__, # version
"l": str(i18n.language_code), # locale
"p": base64.standard_b64encode(
" ".join(
[p.system, p.release, p.version, p.machine]
).encode("utf-8")
), # platform
"f": log_file, # location of log file
"e": error, # error message
}
args = urllib.parse.urlencode(params)
webbrowser.open("https://codewith.mu/crash/?" + args)
except Exception as e: # The Alamo of crash handling.
logging.error("Failed to report crash", exc_info=e)
sys.__excepthook__(*exc_args)
sys.exit(1)
else: # It's harmless, don't sound the alarm.
sys.exit(0)
def setup_logging():
"""
Configure logging.
"""
if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR)
# set logging format
log_fmt = (
"%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) "
"%(levelname)s: %(message)s"
)
formatter = logging.Formatter(log_fmt)
# define log handlers such as for rotating log files
handler = TimedRotatingFileHandler(
LOG_FILE, when="midnight", backupCount=5, delay=0, encoding=ENCODING
)
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
# set up primary log
log = logging.getLogger()
log.setLevel(logging.DEBUG)
log.addHandler(handler)
# Only enable on-screen logging if the MU_LOG_TO_STDOUT env variable is set
if "MU_LOG_TO_STDOUT" in os.environ:
stdout_handler = logging.StreamHandler()
stdout_handler.setFormatter(formatter)
stdout_handler.setLevel(logging.DEBUG)
log.addHandler(stdout_handler)
else:
sys.excepthook = excepthook
def setup_modes(editor, view):
"""
Create a simple dictionary to hold instances of the available modes.
*PREMATURE OPTIMIZATION ALERT* This may become more complex in future so
splitting things out here to contain the mess. ;-)
"""
return {
"python": PythonMode(editor, view),
"circuitpython": CircuitPythonMode(editor, view),
"microbit": MicrobitMode(editor, view),
"esp": ESPMode(editor, view),
"web": WebMode(editor, view),
"pyboard": PyboardMode(editor, view),
"debugger": DebugMode(editor, view),
"pygamezero": PyGameZeroMode(editor, view),
"lego": LegoMode(editor, view),
"pico": PicoMode(editor, view),
}
def run():
"""
Creates all the top-level assets for the application, sets things up and
then runs the application. Specific tasks include:
- set up logging
- create an application object
- create an editor window and status bar
- display a splash screen while starting
- close the splash screen after startup timer ends
"""
setup_logging()
logging.info("\n\n-----------------\n\nStarting Mu {}".format(__version__))
logging.info(platform.uname())
logging.info("Platform: {}".format(platform.platform()))
logging.info("Python path: {}".format(sys.path))
logging.info("Language code: {}".format(i18n.language_code))
#
# Load settings from known locations and register them for
# autosave
#
settings.init()
# Images (such as toolbar icons) aren't scaled nicely on retina/4k displays
# unless this flag is set
os.environ["QT_AUTO_SCREEN_SCALE_FACTOR"] = "1"
if hasattr(Qt, "AA_EnableHighDpiScaling"):
QApplication.setAttribute(Qt.AA_EnableHighDpiScaling)
QApplication.setAttribute(Qt.AA_UseHighDpiPixmaps)
# An issue in PyQt5 v5.13.2 to v5.15.1 makes PyQt5 application
# hang on Mac OS 11 (Big Sur)
# Setting this environment variable fixes the problem.
# See issue #1147 for more information
os.environ["QT_MAC_WANTS_LAYER"] = "1"
# The app object is the application running on your computer.
app = QApplication(sys.argv)
# By default PyQt uses the script name (run.py)
app.setApplicationName("mu")
# Set hint as to the .desktop files name
app.setDesktopFileName("mu.codewith.editor")
app.setApplicationVersion(__version__)
app.setAttribute(Qt.AA_DontShowIconsInMenus)
def splash_context():
"""
Function context (to ensure garbage collection) for displaying the
splash screen.
"""
# Display a friendly "splash" icon.
splash = AnimatedSplash(load_movie("splash_screen"))
splash.show()
# Create a blocking thread upon which to run the StartupWorker and which
# will process the events for animating the splash screen.
initLoop = QEventLoop()
thread = QThread()
worker = StartupWorker()
worker.moveToThread(thread)
thread.started.connect(worker.run)
worker.finished.connect(thread.quit)
worker.finished.connect(worker.deleteLater)
worker.display_text.connect(splash.draw_log)
worker.failed.connect(splash.failed)
# Stop the blocking event loop when the thread is finished.
thread.finished.connect(initLoop.quit)
thread.finished.connect(thread.deleteLater)
thread.start()
initLoop.exec() # start processing the pending StartupWorker.
splash.close()
splash.deleteLater()
splash_context()
# Create the "window" we'll be looking at.
editor_window = Window()
@editor_window.load_theme.connect
def load_theme(theme):
if theme == "contrast":
app.setStyleSheet(CONTRAST_STYLE)
elif theme == "night":
app.setStyleSheet(NIGHT_STYLE)
else:
app.setStyleSheet(DAY_STYLE)
# Make sure all windows have the Mu icon as a fallback
app.setWindowIcon(load_icon(editor_window.icon))
# Create the "editor" that'll control the "window".
editor = Editor(view=editor_window)
editor.setup(setup_modes(editor, editor_window))
# Setup the window.
editor_window.closeEvent = editor.quit
editor_window.setup(editor.debug_toggle_breakpoint, editor.theme)
# Connect the various UI elements in the window to the editor.
editor_window.connect_tab_rename(editor.rename_tab, "Ctrl+Shift+S")
editor_window.connect_find_replace(editor.find_replace, "Ctrl+F")
# Connect find again both forward and backward ('Shift+F3')
find_again_handlers = (editor.find_again, editor.find_again_backward)
editor_window.connect_find_again(find_again_handlers, "F3")
editor_window.connect_toggle_comments(editor.toggle_comments, "Ctrl+K")
editor.connect_to_status_bar(editor_window.status_bar)
# Restore the previous session along with files passed by the os
editor.restore_session(sys.argv[1:])
# Stop the program after the application finishes executing.
sys.exit(app.exec_())
| gpl-3.0 |
arunkuttiyara/Arduino | arduino-core/src/processing/app/i18n/python/requests/packages/charade/escprober.py | 2936 | 3187 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel,
ISO2022KRSMModel)
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .compat import wrap_ord
class EscCharSetProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = [
CodingStateMachine(HZSMModel),
CodingStateMachine(ISO2022CNSMModel),
CodingStateMachine(ISO2022JPSMModel),
CodingStateMachine(ISO2022KRSMModel)
]
self.reset()
def reset(self):
CharSetProber.reset(self)
for codingSM in self._mCodingSM:
if not codingSM:
continue
codingSM.active = True
codingSM.reset()
self._mActiveSM = len(self._mCodingSM)
self._mDetectedCharset = None
def get_charset_name(self):
return self._mDetectedCharset
def get_confidence(self):
if self._mDetectedCharset:
return 0.99
else:
return 0.00
def feed(self, aBuf):
for c in aBuf:
# PY3K: aBuf is a byte array, so c is an int, not a byte
for codingSM in self._mCodingSM:
if not codingSM:
continue
if not codingSM.active:
continue
codingState = codingSM.next_state(wrap_ord(c))
if codingState == constants.eError:
codingSM.active = False
self._mActiveSM -= 1
if self._mActiveSM <= 0:
self._mState = constants.eNotMe
return self.get_state()
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8
return self.get_state()
return self.get_state()
| lgpl-2.1 |
tianweidut/ChemToolsWebService | chemistry/models.py | 2 | 7092 | # coding: UTF-8
import datetime
import uuid
from django.db import models
from django.conf import settings
from users.models import UserProfile
from chemistry import (MODEL_ORIGIN_CHOICES,
MODEL_CHOICES, STATUS_CHOICES, MOL_ORIGIN_CHOICES,
ORIGIN_UNDEFINED, STATUS_UNDEFINED, STATUS_WORKING,
ORIGIN_UPLOAD)
import utils
def get_sid():
return str(uuid.uuid4())
class ModelTypeCategory(models.Model):
category = models.CharField(max_length=30, blank=False, unique=True,
choices=MODEL_ORIGIN_CHOICES,
verbose_name=u"Calculate Model Type")
class Meta:
verbose_name = "计算模型类别"
verbose_name_plural = "计算模型类别"
def __unicode__(self):
return self.get_category_display()
class ModelCategory(models.Model):
category = models.CharField(max_length=30, blank=False, unique=True,
choices=MODEL_CHOICES,
verbose_name=u"Calculate Model")
origin_type = models.ForeignKey(ModelTypeCategory, blank=False)
desc = models.TextField(blank=True)
class Meta:
verbose_name = "计算模型"
verbose_name_plural = "计算模型"
def __unicode__(self):
return self.get_category_display()
class StatusCategory(models.Model):
category = models.CharField(max_length=30, blank=False, unique=True,
choices=STATUS_CHOICES,
verbose_name=u"计算状态")
class Meta:
verbose_name = "计算状态"
verbose_name_plural = "计算状态"
def __unicode__(self):
return self.get_category_display()
class FileSourceCategory(models.Model):
category = models.CharField(max_length=30, blank=False, unique=True,
choices=MOL_ORIGIN_CHOICES,
default=ORIGIN_UNDEFINED)
class meta:
verbose_name = "文件来源"
verbose_name_plural = "文件来源"
def __unicode__(self):
return self.get_category_display()
class ProcessedFile(models.Model):
"""上传及计算文件对象"""
fid = models.CharField(max_length=50, unique=True, blank=False,
primary_key=True, default=get_sid)
title = models.CharField(max_length=500, blank=False)
file_type = models.CharField(max_length=100, blank=False)
file_obj = models.FileField(upload_to=settings.PROCESS_FILE_PATH)
file_source = models.ForeignKey(FileSourceCategory,
default=lambda: FileSourceCategory.objects.get(category=ORIGIN_UPLOAD))
image = models.FileField(blank=True, null=True,
upload_to=settings.PROCESS_FILE_PATH)
smiles = models.CharField(max_length=2000, blank=True, null=True)
local_search_id = models.IntegerField(blank=True, null=True)
class Meta:
verbose_name = "计算文件"
verbose_name_plural = "计算文件"
def __unicode__(self):
return self.title
class SuiteTask(models.Model):
"""组计算任务"""
sid = models.CharField(unique=True, blank=False, max_length=50,
verbose_name="id", primary_key=True,
default=get_sid)
user = models.ForeignKey(UserProfile, blank=False, verbose_name="user")
total_tasks = models.IntegerField(blank=False, verbose_name="total tasks")
has_finished_tasks = models.IntegerField(blank=False, default=0,
verbose_name="Finished number")
start_time = models.DateTimeField(blank=False, null=True)
end_time = models.DateTimeField(blank=True, null=True)
name = models.CharField(max_length=2000, blank=True)
notes = models.CharField(max_length=5000, blank=True)
status = models.ForeignKey(StatusCategory, blank=False,
default=STATUS_UNDEFINED)
models_str = models.CharField(max_length=2000, blank=True)
models_category_str = models.CharField(max_length=200, blank=True)
result_pdf = models.FileField(blank=True, null=True,
upload_to=settings.PROCESS_FILE_PATH)
email = models.EmailField(blank=True, null=True)
is_hide = models.BooleanField(default=False)
class Meta:
verbose_name = "组计算任务"
verbose_name_plural = "组计算任务"
def __unicode__(self):
return self.sid
class SingleTask(models.Model):
"""单个计算任务"""
sid = models.ForeignKey(SuiteTask, blank=False)
pid = models.CharField(max_length=50, unique=True, blank=False,
primary_key=True, default=get_sid)
temperature = models.FloatField(blank=True, default=-0.0)
humidity = models.FloatField(blank=True, default=-0.0)
other = models.FloatField(blank=True, default=-0.0)
model = models.ForeignKey(ModelCategory, blank=False)
results = models.TextField(blank=True, null=None)
result_state = models.CharField(max_length=1000, blank=True, null=None)
status = models.ForeignKey(StatusCategory, blank=False,
default=STATUS_WORKING)
start_time = models.DateTimeField(blank=False, null=True)
end_time = models.DateTimeField(blank=True, null=True)
file_obj = models.ForeignKey(ProcessedFile, blank=False)
result_pdf = models.FileField(blank=True, null=True,
upload_to=settings.PROCESS_FILE_PATH)
is_hide = models.BooleanField(default=False)
class Meta:
verbose_name = "单个计算任务"
verbose_name_plural = "单个计算任务"
def __unicode__(self):
return self.sid.name
class ChemInfoLocal(models.Model):
"""Chemistry database for locally search"""
cas = models.CharField(max_length=200, blank=False, unique=True)
einecs = models.CharField(max_length=2000, blank=False)
einecs_name = models.CharField(max_length=2000, blank=False)
einecs_mf = models.CharField(max_length=2000, blank=False)
frequency = models.IntegerField(blank=False)
positive_atoms = models.IntegerField(blank=False)
negative_atoms = models.IntegerField(blank=False)
formal_charge = models.IntegerField(blank=False)
h_acceptors = models.IntegerField(blank=False)
h_donors = models.IntegerField(blank=False)
molecular_solubility = models.FloatField(blank=False)
alogp = models.FloatField(blank=False)
logd = models.FloatField(blank=False)
molecular_formula = models.CharField(max_length=2000, blank=False)
smiles = models.CharField(max_length=2000, blank=False)
inchl = models.CharField(max_length=2000, blank=False)
molecular_savol = models.FloatField(blank=False)
image = models.FileField(upload_to=settings.SEARCH_IMAGE_PATH, blank=True)
class Meta:
verbose_name = "欧盟既有化学品数据库"
verbose_name_plural = "欧盟既有化学品数据库"
def __unicode__(self):
return self.cas
| agpl-3.0 |
dct2012/chromeos-3.14 | tools/perf/python/twatch.py | 1565 | 1316 | #! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <[email protected]>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, watermark = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
| gpl-2.0 |
guettli/django | django/forms/models.py | 11 | 55823 | """
Helper functions for creating Form classes from Django models
and database field objects.
"""
from __future__ import unicode_literals
from collections import OrderedDict
from itertools import chain
from django.core.exceptions import (
NON_FIELD_ERRORS, FieldError, ImproperlyConfigured, ValidationError,
)
from django.forms.fields import ChoiceField, Field
from django.forms.forms import BaseForm, DeclarativeFieldsMetaclass
from django.forms.formsets import BaseFormSet, formset_factory
from django.forms.utils import ErrorList
from django.forms.widgets import (
HiddenInput, MultipleHiddenInput, SelectMultiple,
)
from django.utils import six
from django.utils.encoding import force_text
from django.utils.text import capfirst, get_text_list
from django.utils.translation import ugettext, ugettext_lazy as _
__all__ = (
'ModelForm', 'BaseModelForm', 'model_to_dict', 'fields_for_model',
'ModelChoiceField', 'ModelMultipleChoiceField', 'ALL_FIELDS',
'BaseModelFormSet', 'modelformset_factory', 'BaseInlineFormSet',
'inlineformset_factory', 'modelform_factory',
)
ALL_FIELDS = '__all__'
def construct_instance(form, instance, fields=None, exclude=None):
"""
Constructs and returns a model instance from the bound ``form``'s
``cleaned_data``, but does not save the returned instance to the
database.
"""
from django.db import models
opts = instance._meta
cleaned_data = form.cleaned_data
file_field_list = []
for f in opts.fields:
if not f.editable or isinstance(f, models.AutoField) \
or f.name not in cleaned_data:
continue
if fields is not None and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
# Leave defaults for fields that aren't in POST data, except for
# checkbox inputs because they don't appear in POST data if not checked.
if (f.has_default() and
form[f.name].field.widget.value_omitted_from_data(form.data, form.files, form.add_prefix(f.name))):
continue
# Defer saving file-type fields until after the other fields, so a
# callable upload_to can use the values from other fields.
if isinstance(f, models.FileField):
file_field_list.append(f)
else:
f.save_form_data(instance, cleaned_data[f.name])
for f in file_field_list:
f.save_form_data(instance, cleaned_data[f.name])
return instance
# ModelForms #################################################################
def model_to_dict(instance, fields=None, exclude=None):
"""
Returns a dict containing the data in ``instance`` suitable for passing as
a Form's ``initial`` keyword argument.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned dict.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned dict, even if they are listed in
the ``fields`` argument.
"""
opts = instance._meta
data = {}
for f in chain(opts.concrete_fields, opts.private_fields, opts.many_to_many):
if not getattr(f, 'editable', False):
continue
if fields and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
data[f.name] = f.value_from_object(instance)
return data
def fields_for_model(model, fields=None, exclude=None, widgets=None,
formfield_callback=None, localized_fields=None,
labels=None, help_texts=None, error_messages=None,
field_classes=None):
"""
Returns a ``OrderedDict`` containing form fields for the given model.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned fields.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned fields, even if they are listed
in the ``fields`` argument.
``widgets`` is a dictionary of model field names mapped to a widget.
``formfield_callback`` is a callable that takes a model field and returns
a form field.
``localized_fields`` is a list of names of fields which should be localized.
``labels`` is a dictionary of model field names mapped to a label.
``help_texts`` is a dictionary of model field names mapped to a help text.
``error_messages`` is a dictionary of model field names mapped to a
dictionary of error messages.
``field_classes`` is a dictionary of model field names mapped to a form
field class.
"""
field_list = []
ignored = []
opts = model._meta
# Avoid circular import
from django.db.models.fields import Field as ModelField
sortable_private_fields = [f for f in opts.private_fields if isinstance(f, ModelField)]
for f in sorted(chain(opts.concrete_fields, sortable_private_fields, opts.many_to_many)):
if not getattr(f, 'editable', False):
if (fields is not None and f.name in fields and
(exclude is None or f.name not in exclude)):
raise FieldError(
"'%s' cannot be specified for %s model form as it is a non-editable field" % (
f.name, model.__name__)
)
continue
if fields is not None and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
kwargs = {}
if widgets and f.name in widgets:
kwargs['widget'] = widgets[f.name]
if localized_fields == ALL_FIELDS or (localized_fields and f.name in localized_fields):
kwargs['localize'] = True
if labels and f.name in labels:
kwargs['label'] = labels[f.name]
if help_texts and f.name in help_texts:
kwargs['help_text'] = help_texts[f.name]
if error_messages and f.name in error_messages:
kwargs['error_messages'] = error_messages[f.name]
if field_classes and f.name in field_classes:
kwargs['form_class'] = field_classes[f.name]
if formfield_callback is None:
formfield = f.formfield(**kwargs)
elif not callable(formfield_callback):
raise TypeError('formfield_callback must be a function or callable')
else:
formfield = formfield_callback(f, **kwargs)
if formfield:
field_list.append((f.name, formfield))
else:
ignored.append(f.name)
field_dict = OrderedDict(field_list)
if fields:
field_dict = OrderedDict(
[(f, field_dict.get(f)) for f in fields
if ((not exclude) or (exclude and f not in exclude)) and (f not in ignored)]
)
return field_dict
class ModelFormOptions(object):
def __init__(self, options=None):
self.model = getattr(options, 'model', None)
self.fields = getattr(options, 'fields', None)
self.exclude = getattr(options, 'exclude', None)
self.widgets = getattr(options, 'widgets', None)
self.localized_fields = getattr(options, 'localized_fields', None)
self.labels = getattr(options, 'labels', None)
self.help_texts = getattr(options, 'help_texts', None)
self.error_messages = getattr(options, 'error_messages', None)
self.field_classes = getattr(options, 'field_classes', None)
class ModelFormMetaclass(DeclarativeFieldsMetaclass):
def __new__(mcs, name, bases, attrs):
base_formfield_callback = None
for b in bases:
if hasattr(b, 'Meta') and hasattr(b.Meta, 'formfield_callback'):
base_formfield_callback = b.Meta.formfield_callback
break
formfield_callback = attrs.pop('formfield_callback', base_formfield_callback)
new_class = super(ModelFormMetaclass, mcs).__new__(mcs, name, bases, attrs)
if bases == (BaseModelForm,):
return new_class
opts = new_class._meta = ModelFormOptions(getattr(new_class, 'Meta', None))
# We check if a string was passed to `fields` or `exclude`,
# which is likely to be a mistake where the user typed ('foo') instead
# of ('foo',)
for opt in ['fields', 'exclude', 'localized_fields']:
value = getattr(opts, opt)
if isinstance(value, six.string_types) and value != ALL_FIELDS:
msg = ("%(model)s.Meta.%(opt)s cannot be a string. "
"Did you mean to type: ('%(value)s',)?" % {
'model': new_class.__name__,
'opt': opt,
'value': value,
})
raise TypeError(msg)
if opts.model:
# If a model is defined, extract form fields from it.
if opts.fields is None and opts.exclude is None:
raise ImproperlyConfigured(
"Creating a ModelForm without either the 'fields' attribute "
"or the 'exclude' attribute is prohibited; form %s "
"needs updating." % name
)
if opts.fields == ALL_FIELDS:
# Sentinel for fields_for_model to indicate "get the list of
# fields from the model"
opts.fields = None
fields = fields_for_model(opts.model, opts.fields, opts.exclude,
opts.widgets, formfield_callback,
opts.localized_fields, opts.labels,
opts.help_texts, opts.error_messages,
opts.field_classes)
# make sure opts.fields doesn't specify an invalid field
none_model_fields = [k for k, v in six.iteritems(fields) if not v]
missing_fields = (set(none_model_fields) -
set(new_class.declared_fields.keys()))
if missing_fields:
message = 'Unknown field(s) (%s) specified for %s'
message = message % (', '.join(missing_fields),
opts.model.__name__)
raise FieldError(message)
# Override default model fields with any custom declared ones
# (plus, include all the other declared fields).
fields.update(new_class.declared_fields)
else:
fields = new_class.declared_fields
new_class.base_fields = fields
return new_class
class BaseModelForm(BaseForm):
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=None,
empty_permitted=False, instance=None, use_required_attribute=None):
opts = self._meta
if opts.model is None:
raise ValueError('ModelForm has no model class specified.')
if instance is None:
# if we didn't get an instance, instantiate a new one
self.instance = opts.model()
object_data = {}
else:
self.instance = instance
object_data = model_to_dict(instance, opts.fields, opts.exclude)
# if initial was provided, it should override the values from instance
if initial is not None:
object_data.update(initial)
# self._validate_unique will be set to True by BaseModelForm.clean().
# It is False by default so overriding self.clean() and failing to call
# super will stop validate_unique from being called.
self._validate_unique = False
super(BaseModelForm, self).__init__(
data, files, auto_id, prefix, object_data, error_class,
label_suffix, empty_permitted, use_required_attribute=use_required_attribute,
)
# Apply ``limit_choices_to`` to each field.
for field_name in self.fields:
formfield = self.fields[field_name]
if hasattr(formfield, 'queryset') and hasattr(formfield, 'get_limit_choices_to'):
limit_choices_to = formfield.get_limit_choices_to()
if limit_choices_to is not None:
formfield.queryset = formfield.queryset.complex_filter(limit_choices_to)
def _get_validation_exclusions(self):
"""
For backwards-compatibility, several types of fields need to be
excluded from model validation. See the following tickets for
details: #12507, #12521, #12553
"""
exclude = []
# Build up a list of fields that should be excluded from model field
# validation and unique checks.
for f in self.instance._meta.fields:
field = f.name
# Exclude fields that aren't on the form. The developer may be
# adding these values to the model after form validation.
if field not in self.fields:
exclude.append(f.name)
# Don't perform model validation on fields that were defined
# manually on the form and excluded via the ModelForm's Meta
# class. See #12901.
elif self._meta.fields and field not in self._meta.fields:
exclude.append(f.name)
elif self._meta.exclude and field in self._meta.exclude:
exclude.append(f.name)
# Exclude fields that failed form validation. There's no need for
# the model fields to validate them as well.
elif field in self._errors.keys():
exclude.append(f.name)
# Exclude empty fields that are not required by the form, if the
# underlying model field is required. This keeps the model field
# from raising a required error. Note: don't exclude the field from
# validation if the model field allows blanks. If it does, the blank
# value may be included in a unique check, so cannot be excluded
# from validation.
else:
form_field = self.fields[field]
field_value = self.cleaned_data.get(field)
if not f.blank and not form_field.required and field_value in form_field.empty_values:
exclude.append(f.name)
return exclude
def clean(self):
self._validate_unique = True
return self.cleaned_data
def _update_errors(self, errors):
# Override any validation error messages defined at the model level
# with those defined at the form level.
opts = self._meta
# Allow the model generated by construct_instance() to raise
# ValidationError and have them handled in the same way as others.
if hasattr(errors, 'error_dict'):
error_dict = errors.error_dict
else:
error_dict = {NON_FIELD_ERRORS: errors}
for field, messages in error_dict.items():
if (field == NON_FIELD_ERRORS and opts.error_messages and
NON_FIELD_ERRORS in opts.error_messages):
error_messages = opts.error_messages[NON_FIELD_ERRORS]
elif field in self.fields:
error_messages = self.fields[field].error_messages
else:
continue
for message in messages:
if (isinstance(message, ValidationError) and
message.code in error_messages):
message.message = error_messages[message.code]
self.add_error(None, errors)
def _post_clean(self):
opts = self._meta
exclude = self._get_validation_exclusions()
# Foreign Keys being used to represent inline relationships
# are excluded from basic field value validation. This is for two
# reasons: firstly, the value may not be supplied (#12507; the
# case of providing new values to the admin); secondly the
# object being referred to may not yet fully exist (#12749).
# However, these fields *must* be included in uniqueness checks,
# so this can't be part of _get_validation_exclusions().
for name, field in self.fields.items():
if isinstance(field, InlineForeignKeyField):
exclude.append(name)
try:
self.instance = construct_instance(self, self.instance, opts.fields, opts.exclude)
except ValidationError as e:
self._update_errors(e)
try:
self.instance.full_clean(exclude=exclude, validate_unique=False)
except ValidationError as e:
self._update_errors(e)
# Validate uniqueness if needed.
if self._validate_unique:
self.validate_unique()
def validate_unique(self):
"""
Calls the instance's validate_unique() method and updates the form's
validation errors if any were raised.
"""
exclude = self._get_validation_exclusions()
try:
self.instance.validate_unique(exclude=exclude)
except ValidationError as e:
self._update_errors(e)
def _save_m2m(self):
"""
Save the many-to-many fields and generic relations for this form.
"""
cleaned_data = self.cleaned_data
exclude = self._meta.exclude
fields = self._meta.fields
opts = self.instance._meta
# Note that for historical reasons we want to include also
# private_fields here. (GenericRelation was previously a fake
# m2m field).
for f in chain(opts.many_to_many, opts.private_fields):
if not hasattr(f, 'save_form_data'):
continue
if fields and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
if f.name in cleaned_data:
f.save_form_data(self.instance, cleaned_data[f.name])
def save(self, commit=True):
"""
Save this form's self.instance object if commit=True. Otherwise, add
a save_m2m() method to the form which can be called after the instance
is saved manually at a later time. Return the model instance.
"""
if self.errors:
raise ValueError(
"The %s could not be %s because the data didn't validate." % (
self.instance._meta.object_name,
'created' if self.instance._state.adding else 'changed',
)
)
if commit:
# If committing, save the instance and the m2m data immediately.
self.instance.save()
self._save_m2m()
else:
# If not committing, add a method to the form to allow deferred
# saving of m2m data.
self.save_m2m = self._save_m2m
return self.instance
save.alters_data = True
class ModelForm(six.with_metaclass(ModelFormMetaclass, BaseModelForm)):
pass
def modelform_factory(model, form=ModelForm, fields=None, exclude=None,
formfield_callback=None, widgets=None, localized_fields=None,
labels=None, help_texts=None, error_messages=None,
field_classes=None):
"""
Returns a ModelForm containing form fields for the given model.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned fields. If omitted or '__all__',
all fields will be used.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned fields, even if they are listed
in the ``fields`` argument.
``widgets`` is a dictionary of model field names mapped to a widget.
``localized_fields`` is a list of names of fields which should be localized.
``formfield_callback`` is a callable that takes a model field and returns
a form field.
``labels`` is a dictionary of model field names mapped to a label.
``help_texts`` is a dictionary of model field names mapped to a help text.
``error_messages`` is a dictionary of model field names mapped to a
dictionary of error messages.
``field_classes`` is a dictionary of model field names mapped to a form
field class.
"""
# Create the inner Meta class. FIXME: ideally, we should be able to
# construct a ModelForm without creating and passing in a temporary
# inner class.
# Build up a list of attributes that the Meta object will have.
attrs = {'model': model}
if fields is not None:
attrs['fields'] = fields
if exclude is not None:
attrs['exclude'] = exclude
if widgets is not None:
attrs['widgets'] = widgets
if localized_fields is not None:
attrs['localized_fields'] = localized_fields
if labels is not None:
attrs['labels'] = labels
if help_texts is not None:
attrs['help_texts'] = help_texts
if error_messages is not None:
attrs['error_messages'] = error_messages
if field_classes is not None:
attrs['field_classes'] = field_classes
# If parent form class already has an inner Meta, the Meta we're
# creating needs to inherit from the parent's inner meta.
parent = (object,)
if hasattr(form, 'Meta'):
parent = (form.Meta, object)
Meta = type(str('Meta'), parent, attrs)
if formfield_callback:
Meta.formfield_callback = staticmethod(formfield_callback)
# Give this new form class a reasonable name.
class_name = model.__name__ + str('Form')
# Class attributes for the new form class.
form_class_attrs = {
'Meta': Meta,
'formfield_callback': formfield_callback
}
if (getattr(Meta, 'fields', None) is None and
getattr(Meta, 'exclude', None) is None):
raise ImproperlyConfigured(
"Calling modelform_factory without defining 'fields' or "
"'exclude' explicitly is prohibited."
)
# Instantiate type(form) in order to use the same metaclass as form.
return type(form)(class_name, (form,), form_class_attrs)
# ModelFormSets ##############################################################
class BaseModelFormSet(BaseFormSet):
"""
A ``FormSet`` for editing a queryset and/or adding new objects to it.
"""
model = None
# Set of fields that must be unique among forms of this set.
unique_fields = set()
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
queryset=None, **kwargs):
self.queryset = queryset
self.initial_extra = kwargs.pop('initial', None)
defaults = {'data': data, 'files': files, 'auto_id': auto_id, 'prefix': prefix}
defaults.update(kwargs)
super(BaseModelFormSet, self).__init__(**defaults)
def initial_form_count(self):
"""Returns the number of forms that are required in this FormSet."""
if not (self.data or self.files):
return len(self.get_queryset())
return super(BaseModelFormSet, self).initial_form_count()
def _existing_object(self, pk):
if not hasattr(self, '_object_dict'):
self._object_dict = {o.pk: o for o in self.get_queryset()}
return self._object_dict.get(pk)
def _get_to_python(self, field):
"""
If the field is a related field, fetch the concrete field's (that
is, the ultimate pointed-to field's) to_python.
"""
while field.remote_field is not None:
field = field.remote_field.get_related_field()
return field.to_python
def _construct_form(self, i, **kwargs):
if self.is_bound and i < self.initial_form_count():
pk_key = "%s-%s" % (self.add_prefix(i), self.model._meta.pk.name)
pk = self.data[pk_key]
pk_field = self.model._meta.pk
to_python = self._get_to_python(pk_field)
pk = to_python(pk)
kwargs['instance'] = self._existing_object(pk)
if i < self.initial_form_count() and 'instance' not in kwargs:
kwargs['instance'] = self.get_queryset()[i]
if i >= self.initial_form_count() and self.initial_extra:
# Set initial values for extra forms
try:
kwargs['initial'] = self.initial_extra[i - self.initial_form_count()]
except IndexError:
pass
return super(BaseModelFormSet, self)._construct_form(i, **kwargs)
def get_queryset(self):
if not hasattr(self, '_queryset'):
if self.queryset is not None:
qs = self.queryset
else:
qs = self.model._default_manager.get_queryset()
# If the queryset isn't already ordered we need to add an
# artificial ordering here to make sure that all formsets
# constructed from this queryset have the same form order.
if not qs.ordered:
qs = qs.order_by(self.model._meta.pk.name)
# Removed queryset limiting here. As per discussion re: #13023
# on django-dev, max_num should not prevent existing
# related objects/inlines from being displayed.
self._queryset = qs
return self._queryset
def save_new(self, form, commit=True):
"""Saves and returns a new model instance for the given form."""
return form.save(commit=commit)
def save_existing(self, form, instance, commit=True):
"""Saves and returns an existing model instance for the given form."""
return form.save(commit=commit)
def delete_existing(self, obj, commit=True):
"""Deletes an existing model instance."""
if commit:
obj.delete()
def save(self, commit=True):
"""Saves model instances for every form, adding and changing instances
as necessary, and returns the list of instances.
"""
if not commit:
self.saved_forms = []
def save_m2m():
for form in self.saved_forms:
form.save_m2m()
self.save_m2m = save_m2m
return self.save_existing_objects(commit) + self.save_new_objects(commit)
save.alters_data = True
def clean(self):
self.validate_unique()
def validate_unique(self):
# Collect unique_checks and date_checks to run from all the forms.
all_unique_checks = set()
all_date_checks = set()
forms_to_delete = self.deleted_forms
valid_forms = [form for form in self.forms if form.is_valid() and form not in forms_to_delete]
for form in valid_forms:
exclude = form._get_validation_exclusions()
unique_checks, date_checks = form.instance._get_unique_checks(exclude=exclude)
all_unique_checks = all_unique_checks.union(set(unique_checks))
all_date_checks = all_date_checks.union(set(date_checks))
errors = []
# Do each of the unique checks (unique and unique_together)
for uclass, unique_check in all_unique_checks:
seen_data = set()
for form in valid_forms:
# Get the data for the set of fields that must be unique among the forms.
row_data = (
field if field in self.unique_fields else form.cleaned_data[field]
for field in unique_check if field in form.cleaned_data
)
# Reduce Model instances to their primary key values
row_data = tuple(d._get_pk_val() if hasattr(d, '_get_pk_val') else d
for d in row_data)
if row_data and None not in row_data:
# if we've already seen it then we have a uniqueness failure
if row_data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_unique_error_message(unique_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
# remove the data from the cleaned_data dict since it was invalid
for field in unique_check:
if field in form.cleaned_data:
del form.cleaned_data[field]
# mark the data as seen
seen_data.add(row_data)
# iterate over each of the date checks now
for date_check in all_date_checks:
seen_data = set()
uclass, lookup, field, unique_for = date_check
for form in valid_forms:
# see if we have data for both fields
if (form.cleaned_data and form.cleaned_data[field] is not None and
form.cleaned_data[unique_for] is not None):
# if it's a date lookup we need to get the data for all the fields
if lookup == 'date':
date = form.cleaned_data[unique_for]
date_data = (date.year, date.month, date.day)
# otherwise it's just the attribute on the date/datetime
# object
else:
date_data = (getattr(form.cleaned_data[unique_for], lookup),)
data = (form.cleaned_data[field],) + date_data
# if we've already seen it then we have a uniqueness failure
if data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_date_error_message(date_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
# remove the data from the cleaned_data dict since it was invalid
del form.cleaned_data[field]
# mark the data as seen
seen_data.add(data)
if errors:
raise ValidationError(errors)
def get_unique_error_message(self, unique_check):
if len(unique_check) == 1:
return ugettext("Please correct the duplicate data for %(field)s.") % {
"field": unique_check[0],
}
else:
return ugettext("Please correct the duplicate data for %(field)s, which must be unique.") % {
"field": get_text_list(unique_check, six.text_type(_("and"))),
}
def get_date_error_message(self, date_check):
return ugettext(
"Please correct the duplicate data for %(field_name)s "
"which must be unique for the %(lookup)s in %(date_field)s."
) % {
'field_name': date_check[2],
'date_field': date_check[3],
'lookup': six.text_type(date_check[1]),
}
def get_form_error(self):
return ugettext("Please correct the duplicate values below.")
def save_existing_objects(self, commit=True):
self.changed_objects = []
self.deleted_objects = []
if not self.initial_forms:
return []
saved_instances = []
forms_to_delete = self.deleted_forms
for form in self.initial_forms:
obj = form.instance
if form in forms_to_delete:
# If the pk is None, it means that the object can't be
# deleted again. Possible reason for this is that the
# object was already deleted from the DB. Refs #14877.
if obj.pk is None:
continue
self.deleted_objects.append(obj)
self.delete_existing(obj, commit=commit)
elif form.has_changed():
self.changed_objects.append((obj, form.changed_data))
saved_instances.append(self.save_existing(form, obj, commit=commit))
if not commit:
self.saved_forms.append(form)
return saved_instances
def save_new_objects(self, commit=True):
self.new_objects = []
for form in self.extra_forms:
if not form.has_changed():
continue
# If someone has marked an add form for deletion, don't save the
# object.
if self.can_delete and self._should_delete_form(form):
continue
self.new_objects.append(self.save_new(form, commit=commit))
if not commit:
self.saved_forms.append(form)
return self.new_objects
def add_fields(self, form, index):
"""Add a hidden field for the object's primary key."""
from django.db.models import AutoField, OneToOneField, ForeignKey
self._pk_field = pk = self.model._meta.pk
# If a pk isn't editable, then it won't be on the form, so we need to
# add it here so we can tell which object is which when we get the
# data back. Generally, pk.editable should be false, but for some
# reason, auto_created pk fields and AutoField's editable attribute is
# True, so check for that as well.
def pk_is_not_editable(pk):
return (
(not pk.editable) or (pk.auto_created or isinstance(pk, AutoField)) or (
pk.remote_field and pk.remote_field.parent_link and
pk_is_not_editable(pk.remote_field.model._meta.pk)
)
)
if pk_is_not_editable(pk) or pk.name not in form.fields:
if form.is_bound:
# If we're adding the related instance, ignore its primary key
# as it could be an auto-generated default which isn't actually
# in the database.
pk_value = None if form.instance._state.adding else form.instance.pk
else:
try:
if index is not None:
pk_value = self.get_queryset()[index].pk
else:
pk_value = None
except IndexError:
pk_value = None
if isinstance(pk, OneToOneField) or isinstance(pk, ForeignKey):
qs = pk.remote_field.model._default_manager.get_queryset()
else:
qs = self.model._default_manager.get_queryset()
qs = qs.using(form.instance._state.db)
if form._meta.widgets:
widget = form._meta.widgets.get(self._pk_field.name, HiddenInput)
else:
widget = HiddenInput
form.fields[self._pk_field.name] = ModelChoiceField(qs, initial=pk_value, required=False, widget=widget)
super(BaseModelFormSet, self).add_fields(form, index)
def modelformset_factory(model, form=ModelForm, formfield_callback=None,
formset=BaseModelFormSet, extra=1, can_delete=False,
can_order=False, max_num=None, fields=None, exclude=None,
widgets=None, validate_max=False, localized_fields=None,
labels=None, help_texts=None, error_messages=None,
min_num=None, validate_min=False, field_classes=None):
"""
Returns a FormSet class for the given Django model class.
"""
meta = getattr(form, 'Meta', None)
if (getattr(meta, 'fields', fields) is None and
getattr(meta, 'exclude', exclude) is None):
raise ImproperlyConfigured(
"Calling modelformset_factory without defining 'fields' or "
"'exclude' explicitly is prohibited."
)
form = modelform_factory(model, form=form, fields=fields, exclude=exclude,
formfield_callback=formfield_callback,
widgets=widgets, localized_fields=localized_fields,
labels=labels, help_texts=help_texts,
error_messages=error_messages, field_classes=field_classes)
FormSet = formset_factory(form, formset, extra=extra, min_num=min_num, max_num=max_num,
can_order=can_order, can_delete=can_delete,
validate_min=validate_min, validate_max=validate_max)
FormSet.model = model
return FormSet
# InlineFormSets #############################################################
class BaseInlineFormSet(BaseModelFormSet):
"""A formset for child objects related to a parent."""
def __init__(self, data=None, files=None, instance=None,
save_as_new=False, prefix=None, queryset=None, **kwargs):
if instance is None:
self.instance = self.fk.remote_field.model()
else:
self.instance = instance
self.save_as_new = save_as_new
if queryset is None:
queryset = self.model._default_manager
if self.instance.pk is not None:
qs = queryset.filter(**{self.fk.name: self.instance})
else:
qs = queryset.none()
self.unique_fields = {self.fk.name}
super(BaseInlineFormSet, self).__init__(data, files, prefix=prefix,
queryset=qs, **kwargs)
# Add the generated field to form._meta.fields if it's defined to make
# sure validation isn't skipped on that field.
if self.form._meta.fields and self.fk.name not in self.form._meta.fields:
if isinstance(self.form._meta.fields, tuple):
self.form._meta.fields = list(self.form._meta.fields)
self.form._meta.fields.append(self.fk.name)
def initial_form_count(self):
if self.save_as_new:
return 0
return super(BaseInlineFormSet, self).initial_form_count()
def _construct_form(self, i, **kwargs):
form = super(BaseInlineFormSet, self)._construct_form(i, **kwargs)
if self.save_as_new:
# Remove the primary key from the form's data, we are only
# creating new instances
form.data[form.add_prefix(self._pk_field.name)] = None
# Remove the foreign key from the form's data
form.data[form.add_prefix(self.fk.name)] = None
# Set the fk value here so that the form can do its validation.
fk_value = self.instance.pk
if self.fk.remote_field.field_name != self.fk.remote_field.model._meta.pk.name:
fk_value = getattr(self.instance, self.fk.remote_field.field_name)
fk_value = getattr(fk_value, 'pk', fk_value)
setattr(form.instance, self.fk.get_attname(), fk_value)
return form
@classmethod
def get_default_prefix(cls):
return cls.fk.remote_field.get_accessor_name(model=cls.model).replace('+', '')
def save_new(self, form, commit=True):
# Ensure the latest copy of the related instance is present on each
# form (it may have been saved after the formset was originally
# instantiated).
setattr(form.instance, self.fk.name, self.instance)
# Use commit=False so we can assign the parent key afterwards, then
# save the object.
obj = form.save(commit=False)
pk_value = getattr(self.instance, self.fk.remote_field.field_name)
setattr(obj, self.fk.get_attname(), getattr(pk_value, 'pk', pk_value))
if commit:
obj.save()
# form.save_m2m() can be called via the formset later on if commit=False
if commit and hasattr(form, 'save_m2m'):
form.save_m2m()
return obj
def add_fields(self, form, index):
super(BaseInlineFormSet, self).add_fields(form, index)
if self._pk_field == self.fk:
name = self._pk_field.name
kwargs = {'pk_field': True}
else:
# The foreign key field might not be on the form, so we poke at the
# Model field to get the label, since we need that for error messages.
name = self.fk.name
kwargs = {
'label': getattr(form.fields.get(name), 'label', capfirst(self.fk.verbose_name))
}
if self.fk.remote_field.field_name != self.fk.remote_field.model._meta.pk.name:
kwargs['to_field'] = self.fk.remote_field.field_name
# If we're adding a new object, ignore a parent's auto-generated key
# as it will be regenerated on the save request.
if self.instance._state.adding:
if kwargs.get('to_field') is not None:
to_field = self.instance._meta.get_field(kwargs['to_field'])
else:
to_field = self.instance._meta.pk
if to_field.has_default():
setattr(self.instance, to_field.attname, None)
form.fields[name] = InlineForeignKeyField(self.instance, **kwargs)
def get_unique_error_message(self, unique_check):
unique_check = [field for field in unique_check if field != self.fk.name]
return super(BaseInlineFormSet, self).get_unique_error_message(unique_check)
def _get_foreign_key(parent_model, model, fk_name=None, can_fail=False):
"""
Finds and returns the ForeignKey from model to parent if there is one
(returns None if can_fail is True and no such field exists). If fk_name is
provided, assume it is the name of the ForeignKey field. Unless can_fail is
True, an exception is raised if there is no ForeignKey from model to
parent_model.
"""
# avoid circular import
from django.db.models import ForeignKey
opts = model._meta
if fk_name:
fks_to_parent = [f for f in opts.fields if f.name == fk_name]
if len(fks_to_parent) == 1:
fk = fks_to_parent[0]
if not isinstance(fk, ForeignKey) or \
(fk.remote_field.model != parent_model and
fk.remote_field.model not in parent_model._meta.get_parent_list()):
raise ValueError(
"fk_name '%s' is not a ForeignKey to '%s'." % (fk_name, parent_model._meta.label)
)
elif len(fks_to_parent) == 0:
raise ValueError(
"'%s' has no field named '%s'." % (model._meta.label, fk_name)
)
else:
# Try to discover what the ForeignKey from model to parent_model is
fks_to_parent = [
f for f in opts.fields
if isinstance(f, ForeignKey) and (
f.remote_field.model == parent_model or
f.remote_field.model in parent_model._meta.get_parent_list()
)
]
if len(fks_to_parent) == 1:
fk = fks_to_parent[0]
elif len(fks_to_parent) == 0:
if can_fail:
return
raise ValueError(
"'%s' has no ForeignKey to '%s'." % (
model._meta.label,
parent_model._meta.label,
)
)
else:
raise ValueError(
"'%s' has more than one ForeignKey to '%s'." % (
model._meta.label,
parent_model._meta.label,
)
)
return fk
def inlineformset_factory(parent_model, model, form=ModelForm,
formset=BaseInlineFormSet, fk_name=None,
fields=None, exclude=None, extra=3, can_order=False,
can_delete=True, max_num=None, formfield_callback=None,
widgets=None, validate_max=False, localized_fields=None,
labels=None, help_texts=None, error_messages=None,
min_num=None, validate_min=False, field_classes=None):
"""
Returns an ``InlineFormSet`` for the given kwargs.
You must provide ``fk_name`` if ``model`` has more than one ``ForeignKey``
to ``parent_model``.
"""
fk = _get_foreign_key(parent_model, model, fk_name=fk_name)
# enforce a max_num=1 when the foreign key to the parent model is unique.
if fk.unique:
max_num = 1
kwargs = {
'form': form,
'formfield_callback': formfield_callback,
'formset': formset,
'extra': extra,
'can_delete': can_delete,
'can_order': can_order,
'fields': fields,
'exclude': exclude,
'min_num': min_num,
'max_num': max_num,
'widgets': widgets,
'validate_min': validate_min,
'validate_max': validate_max,
'localized_fields': localized_fields,
'labels': labels,
'help_texts': help_texts,
'error_messages': error_messages,
'field_classes': field_classes,
}
FormSet = modelformset_factory(model, **kwargs)
FormSet.fk = fk
return FormSet
# Fields #####################################################################
class InlineForeignKeyField(Field):
"""
A basic integer field that deals with validating the given value to a
given parent instance in an inline.
"""
widget = HiddenInput
default_error_messages = {
'invalid_choice': _('The inline foreign key did not match the parent instance primary key.'),
}
def __init__(self, parent_instance, *args, **kwargs):
self.parent_instance = parent_instance
self.pk_field = kwargs.pop("pk_field", False)
self.to_field = kwargs.pop("to_field", None)
if self.parent_instance is not None:
if self.to_field:
kwargs["initial"] = getattr(self.parent_instance, self.to_field)
else:
kwargs["initial"] = self.parent_instance.pk
kwargs["required"] = False
super(InlineForeignKeyField, self).__init__(*args, **kwargs)
def clean(self, value):
if value in self.empty_values:
if self.pk_field:
return None
# if there is no value act as we did before.
return self.parent_instance
# ensure the we compare the values as equal types.
if self.to_field:
orig = getattr(self.parent_instance, self.to_field)
else:
orig = self.parent_instance.pk
if force_text(value) != force_text(orig):
raise ValidationError(self.error_messages['invalid_choice'], code='invalid_choice')
return self.parent_instance
def has_changed(self, initial, data):
return False
class ModelChoiceIterator(object):
def __init__(self, field):
self.field = field
self.queryset = field.queryset
def __iter__(self):
if self.field.empty_label is not None:
yield ("", self.field.empty_label)
queryset = self.queryset.all()
# Can't use iterator() when queryset uses prefetch_related()
if not queryset._prefetch_related_lookups:
queryset = queryset.iterator()
for obj in queryset:
yield self.choice(obj)
def __len__(self):
return (len(self.queryset) + (1 if self.field.empty_label is not None else 0))
def choice(self, obj):
return (self.field.prepare_value(obj), self.field.label_from_instance(obj))
class ModelChoiceField(ChoiceField):
"""A ChoiceField whose choices are a model QuerySet."""
# This class is a subclass of ChoiceField for purity, but it doesn't
# actually use any of ChoiceField's implementation.
default_error_messages = {
'invalid_choice': _('Select a valid choice. That choice is not one of'
' the available choices.'),
}
iterator = ModelChoiceIterator
def __init__(self, queryset, empty_label="---------",
required=True, widget=None, label=None, initial=None,
help_text='', to_field_name=None, limit_choices_to=None,
*args, **kwargs):
if required and (initial is not None):
self.empty_label = None
else:
self.empty_label = empty_label
# Call Field instead of ChoiceField __init__() because we don't need
# ChoiceField.__init__().
Field.__init__(self, required, widget, label, initial, help_text,
*args, **kwargs)
self.queryset = queryset
self.limit_choices_to = limit_choices_to # limit the queryset later.
self.to_field_name = to_field_name
def get_limit_choices_to(self):
"""
Returns ``limit_choices_to`` for this form field.
If it is a callable, it will be invoked and the result will be
returned.
"""
if callable(self.limit_choices_to):
return self.limit_choices_to()
return self.limit_choices_to
def __deepcopy__(self, memo):
result = super(ChoiceField, self).__deepcopy__(memo)
# Need to force a new ModelChoiceIterator to be created, bug #11183
result.queryset = result.queryset
return result
def _get_queryset(self):
return self._queryset
def _set_queryset(self, queryset):
self._queryset = queryset
self.widget.choices = self.choices
queryset = property(_get_queryset, _set_queryset)
# this method will be used to create object labels by the QuerySetIterator.
# Override it to customize the label.
def label_from_instance(self, obj):
"""
This method is used to convert objects into strings; it's used to
generate the labels for the choices presented by this object. Subclasses
can override this method to customize the display of the choices.
"""
return force_text(obj)
def _get_choices(self):
# If self._choices is set, then somebody must have manually set
# the property self.choices. In this case, just return self._choices.
if hasattr(self, '_choices'):
return self._choices
# Otherwise, execute the QuerySet in self.queryset to determine the
# choices dynamically. Return a fresh ModelChoiceIterator that has not been
# consumed. Note that we're instantiating a new ModelChoiceIterator *each*
# time _get_choices() is called (and, thus, each time self.choices is
# accessed) so that we can ensure the QuerySet has not been consumed. This
# construct might look complicated but it allows for lazy evaluation of
# the queryset.
return self.iterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
def prepare_value(self, value):
if hasattr(value, '_meta'):
if self.to_field_name:
return value.serializable_value(self.to_field_name)
else:
return value.pk
return super(ModelChoiceField, self).prepare_value(value)
def to_python(self, value):
if value in self.empty_values:
return None
try:
key = self.to_field_name or 'pk'
value = self.queryset.get(**{key: value})
except (ValueError, TypeError, self.queryset.model.DoesNotExist):
raise ValidationError(self.error_messages['invalid_choice'], code='invalid_choice')
return value
def validate(self, value):
return Field.validate(self, value)
def has_changed(self, initial, data):
initial_value = initial if initial is not None else ''
data_value = data if data is not None else ''
return force_text(self.prepare_value(initial_value)) != force_text(data_value)
class ModelMultipleChoiceField(ModelChoiceField):
"""A MultipleChoiceField whose choices are a model QuerySet."""
widget = SelectMultiple
hidden_widget = MultipleHiddenInput
default_error_messages = {
'list': _('Enter a list of values.'),
'invalid_choice': _('Select a valid choice. %(value)s is not one of the'
' available choices.'),
'invalid_pk_value': _('"%(pk)s" is not a valid value for a primary key.')
}
def __init__(self, queryset, required=True, widget=None, label=None,
initial=None, help_text='', *args, **kwargs):
super(ModelMultipleChoiceField, self).__init__(
queryset, None, required, widget, label, initial, help_text,
*args, **kwargs
)
def to_python(self, value):
if not value:
return []
return list(self._check_values(value))
def clean(self, value):
value = self.prepare_value(value)
if self.required and not value:
raise ValidationError(self.error_messages['required'], code='required')
elif not self.required and not value:
return self.queryset.none()
if not isinstance(value, (list, tuple)):
raise ValidationError(self.error_messages['list'], code='list')
qs = self._check_values(value)
# Since this overrides the inherited ModelChoiceField.clean
# we run custom validators here
self.run_validators(value)
return qs
def _check_values(self, value):
"""
Given a list of possible PK values, returns a QuerySet of the
corresponding objects. Raises a ValidationError if a given value is
invalid (not a valid PK, not in the queryset, etc.)
"""
key = self.to_field_name or 'pk'
# deduplicate given values to avoid creating many querysets or
# requiring the database backend deduplicate efficiently.
try:
value = frozenset(value)
except TypeError:
# list of lists isn't hashable, for example
raise ValidationError(
self.error_messages['list'],
code='list',
)
for pk in value:
try:
self.queryset.filter(**{key: pk})
except (ValueError, TypeError):
raise ValidationError(
self.error_messages['invalid_pk_value'],
code='invalid_pk_value',
params={'pk': pk},
)
qs = self.queryset.filter(**{'%s__in' % key: value})
pks = set(force_text(getattr(o, key)) for o in qs)
for val in value:
if force_text(val) not in pks:
raise ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': val},
)
return qs
def prepare_value(self, value):
if (hasattr(value, '__iter__') and
not isinstance(value, six.text_type) and
not hasattr(value, '_meta')):
return [super(ModelMultipleChoiceField, self).prepare_value(v) for v in value]
return super(ModelMultipleChoiceField, self).prepare_value(value)
def has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = set(force_text(value) for value in self.prepare_value(initial))
data_set = set(force_text(value) for value in data)
return data_set != initial_set
def modelform_defines_fields(form_class):
return (form_class is not None and (
hasattr(form_class, '_meta') and
(form_class._meta.fields is not None or
form_class._meta.exclude is not None)
))
| bsd-3-clause |
walmis/APMLib | Tools/autotest/apmrover2.py | 37 | 5561 | # drive APMrover2 in SITL
import util, pexpect, sys, time, math, shutil, os
from common import *
from pymavlink import mavutil
import random
# get location of scripts
testdir=os.path.dirname(os.path.realpath(__file__))
#HOME=mavutil.location(-35.362938,149.165085,584,270)
HOME=mavutil.location(40.071374969556928,-105.22978898137808,1583.702759,246)
homeloc = None
def drive_left_circuit(mavproxy, mav):
'''drive a left circuit, 50m on a side'''
mavproxy.send('switch 6\n')
wait_mode(mav, 'MANUAL')
mavproxy.send('rc 3 2000\n')
print("Driving left circuit")
# do 4 turns
for i in range(0,4):
# hard left
print("Starting turn %u" % i)
mavproxy.send('rc 1 1000\n')
if not wait_heading(mav, 270 - (90*i), accuracy=10):
return False
mavproxy.send('rc 1 1500\n')
print("Starting leg %u" % i)
if not wait_distance(mav, 50, accuracy=7):
return False
mavproxy.send('rc 3 1500\n')
print("Circuit complete")
return True
def drive_RTL(mavproxy, mav):
'''drive to home'''
print("Driving home in RTL")
mavproxy.send('switch 3\n')
if not wait_location(mav, homeloc, accuracy=22, timeout=90):
return False
print("RTL Complete")
return True
def setup_rc(mavproxy):
'''setup RC override control'''
for chan in [1,2,3,4,5,6,7]:
mavproxy.send('rc %u 1500\n' % chan)
mavproxy.send('rc 8 1800\n')
def drive_mission(mavproxy, mav, filename):
'''drive a mission from a file'''
global homeloc
print("Driving mission %s" % filename)
mavproxy.send('wp load %s\n' % filename)
mavproxy.expect('flight plan received')
mavproxy.send('wp list\n')
mavproxy.expect('Requesting [0-9]+ waypoints')
mavproxy.send('switch 4\n') # auto mode
mavproxy.send('rc 3 1500\n')
wait_mode(mav, 'AUTO')
if not wait_waypoint(mav, 1, 4, max_dist=5):
return False
wait_mode(mav, 'HOLD')
print("Mission OK")
return True
def drive_APMrover2(viewerip=None, map=False):
'''drive APMrover2 in SIL
you can pass viewerip as an IP address to optionally send fg and
mavproxy packets too for local viewing of the mission in real time
'''
global homeloc
options = '--sitl=127.0.0.1:5501 --out=127.0.0.1:19550 --streamrate=10'
if viewerip:
options += " --out=%s:14550" % viewerip
if map:
options += ' --map'
sil = util.start_SIL('APMrover2', wipe=True)
mavproxy = util.start_MAVProxy_SIL('APMrover2', options=options)
mavproxy.expect('Received [0-9]+ parameters')
# setup test parameters
mavproxy.send("param load %s/Rover.parm\n" % testdir)
mavproxy.expect('Loaded [0-9]+ parameters')
# restart with new parms
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
sim_cmd = util.reltopdir('Tools/autotest/pysim/sim_rover.py') + ' --rate=50 --home=%f,%f,%u,%u' % (
HOME.lat, HOME.lng, HOME.alt, HOME.heading)
runsim = pexpect.spawn(sim_cmd, logfile=sys.stdout, timeout=10)
runsim.delaybeforesend = 0
util.pexpect_autoclose(runsim)
runsim.expect('Starting at lat')
sil = util.start_SIL('APMrover2')
mavproxy = util.start_MAVProxy_SIL('APMrover2', options=options)
mavproxy.expect('Logging to (\S+)')
logfile = mavproxy.match.group(1)
print("LOGFILE %s" % logfile)
buildlog = util.reltopdir("../buildlogs/APMrover2-test.tlog")
print("buildlog=%s" % buildlog)
if os.path.exists(buildlog):
os.unlink(buildlog)
try:
os.link(logfile, buildlog)
except Exception:
pass
mavproxy.expect('Received [0-9]+ parameters')
util.expect_setup_callback(mavproxy, expect_callback)
expect_list_clear()
expect_list_extend([runsim, sil, mavproxy])
print("Started simulator")
# get a mavlink connection going
try:
mav = mavutil.mavlink_connection('127.0.0.1:19550', robust_parsing=True)
except Exception, msg:
print("Failed to start mavlink connection on 127.0.0.1:19550" % msg)
raise
mav.message_hooks.append(message_hook)
mav.idle_hooks.append(idle_hook)
failed = False
e = 'None'
try:
print("Waiting for a heartbeat with mavlink protocol %s" % mav.WIRE_PROTOCOL_VERSION)
mav.wait_heartbeat()
print("Setting up RC parameters")
setup_rc(mavproxy)
print("Waiting for GPS fix")
mav.wait_gps_fix()
homeloc = mav.location()
print("Home location: %s" % homeloc)
if not drive_mission(mavproxy, mav, os.path.join(testdir, "rover1.txt")):
print("Failed mission")
failed = True
if not log_download(mavproxy, mav, util.reltopdir("../buildlogs/APMrover2-log.bin")):
print("Failed log download")
failed = True
# if not drive_left_circuit(mavproxy, mav):
# print("Failed left circuit")
# failed = True
# if not drive_RTL(mavproxy, mav):
# print("Failed RTL")
# failed = True
except pexpect.TIMEOUT, e:
print("Failed with timeout")
failed = True
mav.close()
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
util.pexpect_close(runsim)
if os.path.exists('APMrover2-valgrind.log'):
os.chmod('APMrover2-valgrind.log', 0644)
shutil.copy("APMrover2-valgrind.log", util.reltopdir("../buildlogs/APMrover2-valgrind.log"))
if failed:
print("FAILED: %s" % e)
return False
return True
| gpl-3.0 |
havt/odoo | addons/account_anglo_saxon/__openerp__.py | 264 | 2393 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Anglo-Saxon Accounting',
'version': '1.2',
'author': 'OpenERP SA, Veritos',
'website': 'https://www.odoo.com',
'description': """
This module supports the Anglo-Saxon accounting methodology by changing the accounting logic with stock transactions.
=====================================================================================================================
The difference between the Anglo-Saxon accounting countries and the Rhine
(or also called Continental accounting) countries is the moment of taking
the Cost of Goods Sold versus Cost of Sales. Anglo-Saxons accounting does
take the cost when sales invoice is created, Continental accounting will
take the cost at the moment the goods are shipped.
This module will add this functionality by using a interim account, to
store the value of shipped goods and will contra book this interim
account when the invoice is created to transfer this amount to the
debtor or creditor account. Secondly, price differences between actual
purchase price and fixed product standard price are booked on a separate
account.""",
'depends': ['product', 'purchase'],
'category': 'Accounting & Finance',
'demo': [],
'data': ['product_view.xml'],
'test': ['test/anglo_saxon.yml', 'test/anglo_saxon_avg_fifo.yml'],
'auto_install': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
liuzheng712/Misago | misago/threads/tests/test_unreadthreads_view.py | 8 | 2484 | from django.core.urlresolvers import reverse
from django.utils import timezone
from django.utils.translation import ugettext as _
from misago.forums.models import Forum
from misago.users.testutils import UserTestCase, AuthenticatedUserTestCase
from misago.threads import testutils
class AuthenticatedTests(AuthenticatedUserTestCase):
def test_empty_threads_list(self):
"""empty threads list is rendered"""
response = self.client.get(reverse('misago:unread_threads'))
self.assertEqual(response.status_code, 200)
self.assertIn("There are no threads with unread", response.content)
def test_filled_threads_list(self):
"""filled threads list is rendered"""
forum = Forum.objects.all_forums().filter(role="forum")[:1][0]
threads = [testutils.post_thread(forum) for t in xrange(10)]
# only unread tracker threads are shown on unread list
response = self.client.get(reverse('misago:unread_threads'))
self.assertEqual(response.status_code, 200)
self.assertIn("There are no threads with unread", response.content)
# we'll read and reply to first five threads
for thread in threads[5:]:
response = self.client.get(thread.get_absolute_url())
testutils.reply_thread(thread, posted_on=timezone.now())
# assert that replied threads show on list
response = self.client.get(reverse('misago:unread_threads'))
self.assertEqual(response.status_code, 200)
for thread in threads[5:]:
self.assertIn(thread.get_absolute_url(), response.content)
for thread in threads[:5]:
self.assertNotIn(thread.get_absolute_url(), response.content)
# clear list
response = self.client.post(reverse('misago:clear_unread_threads'))
self.assertEqual(response.status_code, 302)
response = self.client.get(response['location'])
self.assertEqual(response.status_code, 200)
self.assertIn("There are no threads with unread", response.content)
class AnonymousTests(UserTestCase):
def test_anon_access_to_view(self):
"""anonymous user has no access to unread threads list"""
response = self.client.get(reverse('misago:unread_threads'))
self.assertEqual(response.status_code, 403)
self.assertIn(_("You have to sign in to see your list of "
"threads with unread replies."),
response.content)
| gpl-2.0 |
kidaa/eve | eve/tests/methods/delete.py | 10 | 29272 | from eve.tests import TestBase
from eve.tests.utils import DummyEvent
from eve.tests.test_settings import MONGO_DBNAME
from eve import ETAG
from bson import ObjectId
from eve.utils import ParsedRequest
import simplejson as json
import copy
from eve.methods.delete import deleteitem_internal
class TestDelete(TestBase):
def setUp(self):
super(TestDelete, self).setUp()
# Etag used to delete an item (a contact)
self.etag_headers = [('If-Match', self.item_etag)]
def test_unknown_resource(self):
url = '%s%s/' % (self.unknown_resource_url, self.item_id)
_, status = self.delete(url)
self.assert404(status)
def test_delete_from_resource_endpoint(self):
r, status = self.delete(self.known_resource_url)
self.assert204(status)
r, status = self.parse_response(self.test_client.get(
self.known_resource_url))
self.assert200(status)
self.assertEqual(len(r['_items']), 0)
def test_delete_from_resource_endpoint_write_concern(self):
# should get a 500 since there's no replicaset on the mongod instance
self.domain['contacts']['mongo_write_concern'] = {'w': 2}
_, status = self.delete(self.known_resource_url)
self.assert500(status)
def test_delete_from_resource_endpoint_different_resource(self):
r, status = self.delete(self.different_resource_url)
self.assert204(status)
r, status = self.parse_response(self.test_client.get(
self.different_resource_url))
self.assert200(status)
self.assertEqual(len(r['_items']), 0)
# deletion of 'users' will still lave 'contacts' untouched (same db
# collection)
r, status = self.parse_response(self.test_client.get(
self.known_resource_url))
self.assert200(status)
self.assertEqual(len(r['_items']), 25)
def test_delete_empty_resource(self):
url = '%s%s/' % (self.empty_resource_url, self.item_id)
_, status = self.delete(url)
self.assert404(status)
def test_delete_readonly_resource(self):
_, status = self.delete(self.readonly_id_url)
self.assert405(status)
def test_delete_unknown_item(self):
url = '%s%s/' % (self.known_resource_url, self.unknown_item_id)
_, status = self.delete(url)
self.assert404(status)
def test_delete_ifmatch_missing(self):
_, status = self.delete(self.item_id_url)
self.assert403(status)
def test_delete_ifmatch_disabled(self):
self.app.config['IF_MATCH'] = False
_, status = self.delete(self.item_id_url)
self.assert204(status)
def test_delete_ifmatch_bad_etag(self):
_, status = self.delete(self.item_id_url,
headers=[('If-Match', 'not-quite-right')])
self.assert412(status)
def test_delete(self):
r, status = self.delete(self.item_id_url, headers=self.etag_headers)
self.assert204(status)
r = self.test_client.get(self.item_id_url)
self.assert404(r.status_code)
def test_delete_non_existant(self):
url = self.item_id_url[:-5] + "00000"
r, status = self.delete(url, headers=self.etag_headers)
self.assert404(status)
def test_delete_write_concern(self):
# should get a 500 since there's no replicaset on the mongod instance
self.domain['contacts']['mongo_write_concern'] = {'w': 2}
_, status = self.delete(self.item_id_url,
headers=[('If-Match', self.item_etag)])
self.assert500(status)
def test_delete_different_resource(self):
r, status = self.delete(self.user_id_url,
headers=[('If-Match', self.user_etag)])
self.assert204(status)
r = self.test_client.get(self.user_id_url)
self.assert404(r.status_code)
def test_delete_with_post_override(self):
# POST request with DELETE override turns into a DELETE
headers = [('X-HTTP-Method-Override', 'DELETE'),
('If-Match', self.item_etag)]
r = self.test_client.post(self.item_id_url, data={}, headers=headers)
self.assert204(r.status_code)
def test_delete_subresource(self):
_db = self.connection[MONGO_DBNAME]
# create random contact
fake_contact = self.random_contacts(1)
fake_contact_id = _db.contacts.insert(fake_contact)[0]
# grab parent collection count; we will use this later to make sure we
# didn't delete all the users in the datanase. We add one extra invoice
# to make sure that the actual count will never be 1 (which would
# invalidate the test)
_db.invoices.insert({'inv_number': 1})
response, status = self.get('invoices')
invoices = len(response[self.app.config['ITEMS']])
# update first invoice to reference the new contact
_db.invoices.update({'_id': ObjectId(self.invoice_id)},
{'$set': {'person': fake_contact_id}})
# verify that the only document retrieved is referencing the correct
# parent document
response, status = self.get('users/%s/invoices' % fake_contact_id)
person_id = ObjectId(response[self.app.config['ITEMS']][0]['person'])
self.assertEqual(person_id, fake_contact_id)
# delete all documents at the sub-resource endpoint
response, status = self.delete('users/%s/invoices' % fake_contact_id)
self.assert204(status)
# verify that the no documents are left at the sub-resource endpoint
response, status = self.get('users/%s/invoices' % fake_contact_id)
self.assertEqual(len(response['_items']), 0)
# verify that other documents in the invoices collection have not neen
# deleted
response, status = self.get('invoices')
self.assertEqual(len(response['_items']), invoices - 1)
def test_delete_subresource_item(self):
_db = self.connection[MONGO_DBNAME]
# create random contact
fake_contact = self.random_contacts(1)
fake_contact_id = _db.contacts.insert(fake_contact)[0]
# update first invoice to reference the new contact
_db.invoices.update({'_id': ObjectId(self.invoice_id)},
{'$set': {'person': fake_contact_id}})
# GET all invoices by new contact
response, status = self.get('users/%s/invoices/%s' %
(fake_contact_id, self.invoice_id))
etag = response[ETAG]
headers = [('If-Match', etag)]
response, status = self.delete('users/%s/invoices/%s' %
(fake_contact_id, self.invoice_id),
headers=headers)
self.assert204(status)
def test_deleteitem_internal(self):
# test that deleteitem_internal is available and working properly.
with self.app.test_request_context(self.item_id_url):
r, _, _, status = deleteitem_internal(
self.known_resource, concurrency_check=False,
**{'_id': self.item_id})
self.assert204(status)
r = self.test_client.get(self.item_id_url)
self.assert404(r.status_code)
def delete(self, url, headers=None):
r = self.test_client.delete(url, headers=headers)
return self.parse_response(r)
class TestSoftDelete(TestDelete):
def setUp(self):
super(TestSoftDelete, self).setUp()
# Enable soft delete
self.app.config['SOFT_DELETE'] = True
domain = copy.copy(self.domain)
for resource, settings in domain.items():
# rebuild resource settings for soft delete
del settings['soft_delete']
self.app.register_resource(resource, settings)
# alias for the configured DELETED field name
self.deleted_field = self.app.config['DELETED']
# TestDelete overrides
def test_delete(self):
"""Soft delete should mark an item as deleted and cause subsequent
requests to return 404 Not Found responses. 404s in response to GET
requests should include the document in their body with the _deleted
flag set to True.
"""
r, status = self.delete(self.item_id_url, headers=self.etag_headers)
self.assert204(status)
r = self.test_client.get(self.item_id_url)
data, status = self.parse_response(r)
self.assert404(status)
self.assertEqual(data.get(self.deleted_field), True)
self.assertNotEqual(data.get('_etag'), self.item_etag)
# 404 should still include a status and an error field
self.assertTrue(self.app.config['ERROR'] in data)
def test_deleteitem_internal(self):
"""Deleteitem internal should honor soft delete settings.
"""
# test that deleteitem_internal is available and working properly.
with self.app.test_request_context(self.item_id_url):
r, _, _, status = deleteitem_internal(
self.known_resource, concurrency_check=False,
**{'_id': self.item_id})
self.assert204(status)
r = self.test_client.get(self.item_id_url)
data, status = self.parse_response(r)
self.assert404(status)
self.assertEqual(data.get(self.deleted_field), True)
def test_delete_different_resource(self):
r, status = self.delete(self.user_id_url,
headers=[('If-Match', self.user_etag)])
self.assert204(status)
r = self.test_client.get(self.user_id_url)
data, status = self.parse_response(r)
self.assert404(status)
self.assertEqual(data.get(self.deleted_field), True)
def test_delete_from_resource_endpoint(self):
"""Soft deleting an entire resource should mark each individual item
as deleted, queries to that resource should return no items, and GETs
on any individual items should return 404 responses.
"""
# TestDelete deletes resource at known_resource_url, and confirms
# subsequent queries to the resource return zero items
super(TestSoftDelete, self).test_delete_from_resource_endpoint()
r = self.test_client.get(self.item_id_url)
data, status = self.parse_response(r)
self.assert404(status)
self.assertEqual(data.get(self.deleted_field), True)
# TetsSoftDelete specific tests
def test_restore_softdeleted(self):
"""Sending a PUT or PATCH to a soft deleted document should restore the
document.
"""
def soft_delete_item(etag):
r, status = self.delete(
self.item_id_url, headers=[('If-Match', etag)])
self.assert204(status)
# GET soft deleted etag
return self.test_client.get(self.item_id_url)
# Restore via PATCH
deleted_etag = soft_delete_item(self.item_etag).headers['ETag']
r = self.test_client.patch(
self.item_id_url,
data=json.dumps({}),
headers=[('Content-Type', 'application/json'),
('If-Match', deleted_etag)])
self.assert200(r.status_code)
r = self.test_client.get(self.item_id_url)
self.assert200(r.status_code)
new_etag = r.headers['ETag']
# Restore via PUT
r = soft_delete_item(new_etag)
deleted_etag = r.headers['ETag']
restored_doc = {"ref": "1234567890123456789012345"}
r = self.test_client.put(
self.item_id_url,
data=json.dumps(restored_doc),
headers=[('Content-Type', 'application/json'),
('If-Match', deleted_etag)])
self.assert200(r.status_code)
r = self.test_client.get(self.item_id_url)
self.assert200(r.status_code)
def test_multiple_softdelete(self):
"""After an item has been soft deleted, subsequent DELETEs should
return a 404 Not Found response.
"""
r, status = self.delete(self.item_id_url, headers=self.etag_headers)
self.assert204(status)
# GET soft deleted etag
r = self.test_client.get(self.item_id_url)
new_etag = r.headers['ETag']
# Second soft DELETE should return 404 Not Found
r, status = self.delete(
self.item_id_url, headers=[('If-Match', new_etag)])
self.assert404(status)
def test_softdelete_deleted_field(self):
"""The configured 'deleted' field should be added to all documents to indicate
whether that document has been soft deleted or not.
"""
r = self.test_client.get(self.item_id_url)
data, status = self.parse_response(r)
self.assert200(status)
self.assertEqual(data.get(self.deleted_field), False)
def test_softdelete_show_deleted(self):
"""GETs on resource endpoints should include soft deleted items when
the 'show_deleted' param is included in the query, or when the DELETED
field is explicitly included in the lookup.
"""
r, status = self.delete(self.item_id_url, headers=self.etag_headers)
self.assert204(status)
data, status = self.get(self.known_resource)
after_softdelete_count = data[self.app.config['META']]['total']
self.assertEqual(after_softdelete_count, self.known_resource_count - 1)
data, status = self.get(self.known_resource, query="?show_deleted")
show_deleted_count = data[self.app.config['META']]['total']
self.assertEqual(show_deleted_count, self.known_resource_count)
# Test show_deleted with additional queries
role_query = '?where={"role": "' + self.item['role'] + '"}'
data, status = self.get(self.known_resource, query=role_query)
role_count = data[self.app.config['META']]['total']
data, status = self.get(
self.known_resource, query=role_query + "&show_deleted")
show_deleted_role_count = data[self.app.config['META']]['total']
self.assertEqual(show_deleted_role_count, role_count + 1)
# Test explicit _deleted query
data, status = self.get(
self.known_resource, query='?where={"_deleted": true}')
deleted_query_count = data[self.app.config['META']]['total']
self.assertEqual(deleted_query_count, 1)
def test_softdeleted_embedded_doc(self):
"""Soft deleted documents embedded in other documents should not be
included. They will resolve to None as if the document was actually
deleted.
"""
# Set up and confirm embedded document
_db = self.connection[MONGO_DBNAME]
fake_contact = self.random_contacts(1)
fake_contact_id = _db.contacts.insert(fake_contact)[0]
fake_contact_url = self.known_resource_url + "/" + str(fake_contact_id)
_db.invoices.update({'_id': ObjectId(self.invoice_id)},
{'$set': {'person': fake_contact_id}})
invoices = self.domain['invoices']
invoices['embedding'] = True
invoices['schema']['person']['data_relation']['embeddable'] = True
embedded = '{"person": 1}'
r = self.test_client.get(
self.invoice_id_url + '?embedded=%s' % embedded)
data, status = self.parse_response(r)
self.assert200(status)
self.assertTrue('location' in data['person'])
# Get embedded doc etag so we can delete it
r = self.test_client.get(fake_contact_url)
embedded_contact_etag = r.headers['ETag']
# Delete embedded contact
data, status = self.delete(
fake_contact_url, headers=[('If-Match', embedded_contact_etag)])
self.assert204(status)
# embedded 'person' should now be empty
r = self.test_client.get(
self.invoice_id_url + '?embedded=%s' % embedded)
data, status = self.parse_response(r)
self.assert200(status)
self.assertEqual(data['person'], None)
def test_softdeleted_get_response_skips_embedded_expansion(self):
"""Soft deleted documents should not expand their embedded documents when
returned in a 404 Not Found response. The deleted document data should
reflect the state of the document when it was deleted, not change if
still active embedded documents are updated
"""
# Confirm embedded document works before delete
_db = self.connection[MONGO_DBNAME]
fake_contact = self.random_contacts(1)
fake_contact_id = _db.contacts.insert(fake_contact)[0]
_db.invoices.update({'_id': ObjectId(self.invoice_id)},
{'$set': {'person': fake_contact_id}})
invoices = self.domain['invoices']
invoices['embedding'] = True
invoices['schema']['person']['data_relation']['embeddable'] = True
embedded = '{"person": 1}'
r = self.test_client.get(
self.invoice_id_url + '?embedded=%s' % embedded)
invoice_etag = r.headers['ETag']
data, status = self.parse_response(r)
self.assert200(status)
self.assertTrue('location' in data['person'])
# Soft delete document
data, status = self.delete(
self.invoice_id_url, headers=[('If-Match', invoice_etag)])
self.assert204(status)
# Document in 404 should not expand person
r = self.test_client.get(
self.invoice_id_url + '?embedded=%s' % embedded)
data, status = self.parse_response(r)
self.assert404(status)
self.assertEqual(data['person'], str(fake_contact_id))
def test_softdelete_caching(self):
"""404 Not Found responses after soft delete should be cacheable
"""
# Soft delete item
r, status = self.delete(self.item_id_url, headers=self.etag_headers)
self.assert204(status)
# delete should have invalidated any previously cached 200 responses
r = self.test_client.get(
self.item_id_url, headers=[('If-None-Match', self.item_etag)])
self.assert404(r.status_code)
post_delete_etag = r.headers['ETag']
# validate cached 404 response data
r = status = self.test_client.get(
self.item_id_url, headers=[('If-None-Match', post_delete_etag)])
self.assert304(r.status_code)
def test_softdelete_datalayer(self):
"""Soft deleted items should not be returned by find methods in the Eve
data layer unless show_deleted is explicitly configured in the request,
the deleted field is included in the lookup, or the operation is 'raw'.
"""
# Soft delete item
r, status = self.delete(self.item_id_url, headers=self.etag_headers)
self.assert204(status)
with self.app.test_request_context():
# find_one should only return item if a request w/ show_deleted ==
# True is passed or if the deleted field is part of the lookup
req = ParsedRequest()
doc = self.app.data.find_one(
self.known_resource, req, _id=self.item_id)
self.assertEqual(doc, None)
req.show_deleted = True
doc = self.app.data.find_one(
self.known_resource, req, _id=self.item_id)
self.assertNotEqual(doc, None)
self.assertEqual(doc.get(self.deleted_field), True)
req.show_deleted = False
doc = self.app.data.find_one(
self.known_resource, req, _id=self.item_id, _deleted=True)
self.assertNotEqual(doc, None)
self.assertEqual(doc.get(self.deleted_field), True)
# find_one_raw should always return a document, soft deleted or not
doc = self.app.data.find_one_raw(
self.known_resource, _id=ObjectId(self.item_id))
self.assertNotEqual(doc, None)
self.assertEqual(doc.get(self.deleted_field), True)
# find should only return deleted items if a request with
# show_deleted == True is passed or if the deleted field is part of
# the lookup
req.show_deleted = False
docs = self.app.data.find(self.known_resource, req, None)
undeleted_count = docs.count()
req.show_deleted = True
docs = self.app.data.find(self.known_resource, req, None)
with_deleted_count = docs.count()
self.assertEqual(undeleted_count, with_deleted_count - 1)
req.show_deleted = False
docs = self.app.data.find(
self.known_resource, req, {self.deleted_field: True})
deleted_count = docs.count()
self.assertEqual(deleted_count, 1)
# find_list_of_ids will return deleted documents if given their id
docs = self.app.data.find_list_of_ids(
self.known_resource, [ObjectId(self.item_id)])
self.assertEqual(docs.count(), 1)
def test_softdelete_db_fields(self):
"""Documents created when soft delete is enabled should include and
maintain the DELETED field in the db.
"""
r = self.test_client.post(self.known_resource_url, data={
'ref': "1234567890123456789054321"
})
data, status = self.parse_response(r)
self.assert201(status)
new_item_id = data[self.app.config['ID_FIELD']]
new_item_etag = data[self.app.config['ETAG']]
with self.app.test_request_context():
db_stored_doc = self.app.data.find_one_raw(
self.known_resource, _id=ObjectId(new_item_id))
self.assertTrue(self.deleted_field in db_stored_doc)
# PUT updates to the document should maintain the DELETED field
r = self.test_client.put(
self.known_resource_url + "/" + new_item_id,
data={'ref': '5432109876543210987654321'},
headers=[('If-Match', new_item_etag)]
)
data, status = self.parse_response(r)
self.assert200(status)
new_item_etag = data[self.app.config['ETAG']]
with self.app.test_request_context():
db_stored_doc = self.app.data.find_one_raw(
self.known_resource, _id=ObjectId(new_item_id))
self.assertTrue(self.deleted_field in db_stored_doc)
# PATCH updates to the document should maintain the DELETED field
r = self.test_client.patch(
self.known_resource_url + "/" + new_item_id,
data={'ref': '5555544444333332222211111'},
headers=[('If-Match', new_item_etag)]
)
self.assert200(r.status_code)
with self.app.test_request_context():
db_stored_doc = self.app.data.find_one_raw(
self.known_resource, _id=ObjectId(new_item_id))
self.assertTrue(self.deleted_field in db_stored_doc)
class TestResourceSpecificSoftDelete(TestBase):
def setUp(self):
super(TestResourceSpecificSoftDelete, self).setUp()
# Enable soft delete for one resource
domain = copy.copy(self.domain)
resource_settings = domain[self.known_resource]
resource_settings['soft_delete'] = True
self.app.register_resource(self.known_resource, resource_settings)
self.deleted_field = self.app.config['DELETED']
# Etag used to delete an item (a contact)
self.etag_headers = [('If-Match', self.item_etag)]
def test_resource_specific_softdelete(self):
""" Resource level soft delete configuration should override
application configuration.
"""
# Confirm soft delete is enabled for known resource.
data, status = self.delete(self.item_id_url, headers=self.etag_headers)
self.assert204(status)
r = self.test_client.get(self.item_id_url)
data, status = self.parse_response(r)
self.assert404(status)
self.assertEqual(data.get(self.deleted_field), True)
# DELETE on other resources should be hard deletes
data, status = self.delete(
self.invoice_id_url, headers=[('If-Match', self.invoice_etag)])
self.assert204(status)
r = self.test_client.get(self.invoice_id_url)
data, status = self.parse_response(r)
self.assert404(status)
self.assertTrue(self.deleted_field not in data)
class TestDeleteEvents(TestBase):
def test_on_pre_DELETE_for_item(self):
devent = DummyEvent(self.before_delete)
self.app.on_pre_DELETE += devent
self.delete_item()
self.assertEqual('contacts', devent.called[0])
self.assertFalse(devent.called[1] is None)
def test_on_pre_DELETE_resource_for_item(self):
devent = DummyEvent(self.before_delete)
self.app.on_pre_DELETE_contacts += devent
self.delete_item()
self.assertFalse(devent.called is None)
def test_on_pre_DELETE_for_resource(self):
devent = DummyEvent(self.before_delete)
self.app.on_pre_DELETE += devent
self.delete_resource()
self.assertFalse(devent.called is None)
def test_on_pre_DELETE_resource_for_resource(self):
devent = DummyEvent(self.before_delete)
self.app.on_pre_DELETE_contacts += devent
self.delete_resource()
self.assertFalse(devent.called is None)
def test_on_pre_DELETE_dynamic_filter(self):
def filter_this(resource, request, lookup):
lookup["_id"] = self.unknown_item_id
self.app.on_pre_DELETE += filter_this
# Would normally delete the known document; will return 404 instead.
r, s = self.parse_response(self.delete_item())
self.assert404(s)
def test_on_post_DELETE_for_item(self):
devent = DummyEvent(self.after_delete)
self.app.on_post_DELETE += devent
self.delete_item()
self.assertFalse(devent.called is None)
def test_on_post_DELETE_resource_for_item(self):
devent = DummyEvent(self.after_delete)
self.app.on_post_DELETE_contacts += devent
self.delete_item()
self.assertFalse(devent.called is None)
def test_on_post_DELETE_for_resource(self):
devent = DummyEvent(self.after_delete)
self.app.on_post_DELETE += devent
self.delete_resource()
self.assertFalse(devent.called is None)
def test_on_post_DELETE_resource_for_resource(self):
devent = DummyEvent(self.after_delete)
self.app.on_post_DELETE_contacts += devent
self.delete_resource()
self.assertFalse(devent.called is None)
def test_on_delete_resource(self):
devent = DummyEvent(self.before_delete)
self.app.on_delete_resource += devent
self.delete_resource()
self.assertEqual(('contacts',), devent.called)
def test_on_delete_resource_contacts(self):
devent = DummyEvent(self.before_delete)
self.app.on_delete_resource_contacts += devent
self.delete_resource()
self.assertEqual(tuple(), devent.called)
def test_on_deleted_resource(self):
devent = DummyEvent(self.after_delete)
self.app.on_deleted_resource += devent
self.delete_resource()
self.assertEqual(('contacts',), devent.called)
def test_on_deleted_resource_contacts(self):
devent = DummyEvent(self.after_delete)
self.app.on_deleted_resource_contacts += devent
self.delete_resource()
self.assertEqual(tuple(), devent.called)
def test_on_delete_item(self):
devent = DummyEvent(self.before_delete)
self.app.on_delete_item += devent
self.delete_item()
self.assertEqual('contacts', devent.called[0])
self.assertEqual(
self.item_id, str(devent.called[1][self.app.config['ID_FIELD']]))
def test_on_delete_item_contacts(self):
devent = DummyEvent(self.before_delete)
self.app.on_delete_item_contacts += devent
self.delete_item()
self.assertEqual(
self.item_id, str(devent.called[0][self.app.config['ID_FIELD']]))
def test_on_deleted_item(self):
devent = DummyEvent(self.after_delete)
self.app.on_deleted_item += devent
self.delete_item()
self.assertEqual('contacts', devent.called[0])
self.assertEqual(
self.item_id, str(devent.called[1][self.app.config['ID_FIELD']]))
def test_on_deleted_item_contacts(self):
devent = DummyEvent(self.after_delete)
self.app.on_deleted_item_contacts += devent
self.delete_item()
self.assertEqual(
self.item_id, str(devent.called[0][self.app.config['ID_FIELD']]))
def delete_resource(self):
self.test_client.delete(self.known_resource_url)
def delete_item(self):
return self.test_client.delete(
self.item_id_url, headers=[('If-Match', self.item_etag)])
def before_delete(self):
db = self.connection[MONGO_DBNAME]
return db.contacts.find_one(ObjectId(self.item_id)) is not None
def after_delete(self):
return not self.before_delete()
| bsd-3-clause |
caot/intellij-community | python/lib/Lib/xml/Uri.py | 109 | 16394 | # pylint: disable-msg=C0103
#
# backported code from 4Suite with slight modifications, started from r1.89 of
# Ft/Lib/Uri.py, by [email protected] on 2005-02-09
#
# part if not all of this code should probably move to urlparse (or be used
# to fix some existant functions in this module)
#
#
# Copyright 2004 Fourthought, Inc. (USA).
# Detailed license and copyright information: http://4suite.org/COPYRIGHT
# Project home, documentation, distributions: http://4suite.org/
import os.path
import sys
import re
import urlparse, urllib, urllib2
def UnsplitUriRef(uriRefSeq):
"""should replace urlparse.urlunsplit
Given a sequence as would be produced by SplitUriRef(), assembles and
returns a URI reference as a string.
"""
if not isinstance(uriRefSeq, (tuple, list)):
raise TypeError("sequence expected, got %s" % type(uriRefSeq))
(scheme, authority, path, query, fragment) = uriRefSeq
uri = ''
if scheme is not None:
uri += scheme + ':'
if authority is not None:
uri += '//' + authority
uri += path
if query is not None:
uri += '?' + query
if fragment is not None:
uri += '#' + fragment
return uri
SPLIT_URI_REF_PATTERN = re.compile(r"^(?:(?P<scheme>[^:/?#]+):)?(?://(?P<authority>[^/?#]*))?(?P<path>[^?#]*)(?:\?(?P<query>[^#]*))?(?:#(?P<fragment>.*))?$")
def SplitUriRef(uriref):
"""should replace urlparse.urlsplit
Given a valid URI reference as a string, returns a tuple representing the
generic URI components, as per RFC 2396 appendix B. The tuple's structure
is (scheme, authority, path, query, fragment).
All values will be strings (possibly empty) or None if undefined.
Note that per rfc3986, there is no distinction between a path and
an "opaque part", as there was in RFC 2396.
"""
# the pattern will match every possible string, so it's safe to
# assume there's a groupdict method to call.
g = SPLIT_URI_REF_PATTERN.match(uriref).groupdict()
scheme = g['scheme']
authority = g['authority']
path = g['path']
query = g['query']
fragment = g['fragment']
return (scheme, authority, path, query, fragment)
def Absolutize(uriRef, baseUri):
"""
Resolves a URI reference to absolute form, effecting the result of RFC
3986 section 5. The URI reference is considered to be relative to the
given base URI.
It is the caller's responsibility to ensure that the base URI matches
the absolute-URI syntax rule of RFC 3986, and that its path component
does not contain '.' or '..' segments if the scheme is hierarchical.
Unexpected results may occur otherwise.
This function only conducts a minimal sanity check in order to determine
if relative resolution is possible: it raises a UriException if the base
URI does not have a scheme component. While it is true that the base URI
is irrelevant if the URI reference has a scheme, an exception is raised
in order to signal that the given string does not even come close to
meeting the criteria to be usable as a base URI.
It is the caller's responsibility to make a determination of whether the
URI reference constitutes a "same-document reference", as defined in RFC
2396 or RFC 3986. As per the spec, dereferencing a same-document
reference "should not" involve retrieval of a new representation of the
referenced resource. Note that the two specs have different definitions
of same-document reference: RFC 2396 says it is *only* the cases where the
reference is the empty string, or "#" followed by a fragment; RFC 3986
requires making a comparison of the base URI to the absolute form of the
reference (as is returned by the spec), minus its fragment component,
if any.
This function is similar to urlparse.urljoin() and urllib.basejoin().
Those functions, however, are (as of Python 2.3) outdated, buggy, and/or
designed to produce results acceptable for use with other core Python
libraries, rather than being earnest implementations of the relevant
specs. Their problems are most noticeable in their handling of
same-document references and 'file:' URIs, both being situations that
come up far too often to consider the functions reliable enough for
general use.
"""
# Reasons to avoid using urllib.basejoin() and urlparse.urljoin():
# - Both are partial implementations of long-obsolete specs.
# - Both accept relative URLs as the base, which no spec allows.
# - urllib.basejoin() mishandles the '' and '..' references.
# - If the base URL uses a non-hierarchical or relative path,
# or if the URL scheme is unrecognized, the result is not
# always as expected (partly due to issues in RFC 1808).
# - If the authority component of a 'file' URI is empty,
# the authority component is removed altogether. If it was
# not present, an empty authority component is in the result.
# - '.' and '..' segments are not always collapsed as well as they
# should be (partly due to issues in RFC 1808).
# - Effective Python 2.4, urllib.basejoin() *is* urlparse.urljoin(),
# but urlparse.urljoin() is still based on RFC 1808.
# This procedure is based on the pseudocode in RFC 3986 sec. 5.2.
#
# ensure base URI is absolute
if not baseUri:
raise ValueError('baseUri is required and must be a non empty string')
if not IsAbsolute(baseUri):
raise ValueError('%r is not an absolute URI' % baseUri)
# shortcut for the simplest same-document reference cases
if uriRef == '' or uriRef[0] == '#':
return baseUri.split('#')[0] + uriRef
# ensure a clean slate
tScheme = tAuth = tPath = tQuery = None
# parse the reference into its components
(rScheme, rAuth, rPath, rQuery, rFrag) = SplitUriRef(uriRef)
# if the reference is absolute, eliminate '.' and '..' path segments
# and skip to the end
if rScheme is not None:
tScheme = rScheme
tAuth = rAuth
tPath = RemoveDotSegments(rPath)
tQuery = rQuery
else:
# the base URI's scheme, and possibly more, will be inherited
(bScheme, bAuth, bPath, bQuery, bFrag) = SplitUriRef(baseUri)
# if the reference is a net-path, just eliminate '.' and '..' path
# segments; no other changes needed.
if rAuth is not None:
tAuth = rAuth
tPath = RemoveDotSegments(rPath)
tQuery = rQuery
# if it's not a net-path, we need to inherit pieces of the base URI
else:
# use base URI's path if the reference's path is empty
if not rPath:
tPath = bPath
# use the reference's query, if any, or else the base URI's,
tQuery = rQuery is not None and rQuery or bQuery
# the reference's path is not empty
else:
# just use the reference's path if it's absolute
if rPath[0] == '/':
tPath = RemoveDotSegments(rPath)
# merge the reference's relative path with the base URI's path
else:
if bAuth is not None and not bPath:
tPath = '/' + rPath
else:
tPath = bPath[:bPath.rfind('/')+1] + rPath
tPath = RemoveDotSegments(tPath)
# use the reference's query
tQuery = rQuery
# since the reference isn't a net-path,
# use the authority from the base URI
tAuth = bAuth
# inherit the scheme from the base URI
tScheme = bScheme
# always use the reference's fragment (but no need to define another var)
#tFrag = rFrag
# now compose the target URI (RFC 3986 sec. 5.3)
return UnsplitUriRef((tScheme, tAuth, tPath, tQuery, rFrag))
REG_NAME_HOST_PATTERN = re.compile(r"^(?:(?:[0-9A-Za-z\-_\.!~*'();&=+$,]|(?:%[0-9A-Fa-f]{2}))*)$")
def MakeUrllibSafe(uriRef):
"""
Makes the given RFC 3986-conformant URI reference safe for passing
to legacy urllib functions. The result may not be a valid URI.
As of Python 2.3.3, urllib.urlopen() does not fully support
internationalized domain names, it does not strip fragment components,
and on Windows, it expects file URIs to use '|' instead of ':' in the
path component corresponding to the drivespec. It also relies on
urllib.unquote(), which mishandles unicode arguments. This function
produces a URI reference that will work around these issues, although
the IDN workaround is limited to Python 2.3 only. May raise a
UnicodeEncodeError if the URI reference is Unicode and erroneously
contains non-ASCII characters.
"""
# IDN support requires decoding any percent-encoded octets in the
# host part (if it's a reg-name) of the authority component, and when
# doing DNS lookups, applying IDNA encoding to that string first.
# As of Python 2.3, there is an IDNA codec, and the socket and httplib
# modules accept Unicode strings and apply IDNA encoding automatically
# where necessary. However, urllib.urlopen() has not yet been updated
# to do the same; it raises an exception if you give it a Unicode
# string, and does no conversion on non-Unicode strings, meaning you
# have to give it an IDNA string yourself. We will only support it on
# Python 2.3 and up.
#
# see if host is a reg-name, as opposed to IPv4 or IPv6 addr.
if isinstance(uriRef, unicode):
try:
uriRef = uriRef.encode('us-ascii') # parts of urllib are not unicode safe
except UnicodeError:
raise ValueError("uri %r must consist of ASCII characters." % uriRef)
(scheme, auth, path, query, frag) = urlparse.urlsplit(uriRef)
if auth and auth.find('@') > -1:
userinfo, hostport = auth.split('@')
else:
userinfo = None
hostport = auth
if hostport and hostport.find(':') > -1:
host, port = hostport.split(':')
else:
host = hostport
port = None
if host and REG_NAME_HOST_PATTERN.match(host):
# percent-encoded hostnames will always fail DNS lookups
host = urllib.unquote(host) #PercentDecode(host)
# IDNA-encode if possible.
# We shouldn't do this for schemes that don't need DNS lookup,
# but are there any (that you'd be calling urlopen for)?
if sys.version_info[0:2] >= (2, 3):
if isinstance(host, str):
host = host.decode('utf-8')
host = host.encode('idna')
# reassemble the authority with the new hostname
# (percent-decoded, and possibly IDNA-encoded)
auth = ''
if userinfo:
auth += userinfo + '@'
auth += host
if port:
auth += ':' + port
# On Windows, ensure that '|', not ':', is used in a drivespec.
if os.name == 'nt' and scheme == 'file':
path = path.replace(':', '|', 1)
# Note that we drop fragment, if any. See RFC 3986 sec. 3.5.
uri = urlparse.urlunsplit((scheme, auth, path, query, None))
return uri
def BaseJoin(base, uriRef):
"""
Merges a base URI reference with another URI reference, returning a
new URI reference.
It behaves exactly the same as Absolutize(), except the arguments
are reversed, and it accepts any URI reference (even a relative URI)
as the base URI. If the base has no scheme component, it is
evaluated as if it did, and then the scheme component of the result
is removed from the result, unless the uriRef had a scheme. Thus, if
neither argument has a scheme component, the result won't have one.
This function is named BaseJoin because it is very much like
urllib.basejoin(), but it follows the current rfc3986 algorithms
for path merging, dot segment elimination, and inheritance of query
and fragment components.
WARNING: This function exists for 2 reasons: (1) because of a need
within the 4Suite repository to perform URI reference absolutization
using base URIs that are stored (inappropriately) as absolute paths
in the subjects of statements in the RDF model, and (2) because of
a similar need to interpret relative repo paths in a 4Suite product
setup.xml file as being relative to a path that can be set outside
the document. When these needs go away, this function probably will,
too, so it is not advisable to use it.
"""
if IsAbsolute(base):
return Absolutize(uriRef, base)
else:
dummyscheme = 'basejoin'
res = Absolutize(uriRef, '%s:%s' % (dummyscheme, base))
if IsAbsolute(uriRef):
# scheme will be inherited from uriRef
return res
else:
# no scheme in, no scheme out
return res[len(dummyscheme)+1:]
def RemoveDotSegments(path):
"""
Supports Absolutize() by implementing the remove_dot_segments function
described in RFC 3986 sec. 5.2. It collapses most of the '.' and '..'
segments out of a path without eliminating empty segments. It is intended
to be used during the path merging process and may not give expected
results when used independently. Use NormalizePathSegments() or
NormalizePathSegmentsInUri() if more general normalization is desired.
semi-private because it is not for general use. I've implemented it
using two segment stacks, as alluded to in the spec, rather than the
explicit string-walking algorithm that would be too inefficient. (mbrown)
"""
# return empty string if entire path is just "." or ".."
if path == '.' or path == '..':
return path[0:0] # preserves string type
# remove all "./" or "../" segments at the beginning
while path:
if path[:2] == './':
path = path[2:]
elif path[:3] == '../':
path = path[3:]
else:
break
# We need to keep track of whether there was a leading slash,
# because we're going to drop it in order to prevent our list of
# segments from having an ambiguous empty first item when we call
# split().
leading_slash = 0
if path[:1] == '/':
path = path[1:]
leading_slash = 1
# replace a trailing "/." with just "/"
if path[-2:] == '/.':
path = path[:-1]
# convert the segments into a list and process each segment in
# order from left to right.
segments = path.split('/')
keepers = []
segments.reverse()
while segments:
seg = segments.pop()
# '..' means drop the previous kept segment, if any.
# If none, and if the path is relative, then keep the '..'.
# If the '..' was the last segment, ensure
# that the result ends with '/'.
if seg == '..':
if keepers:
keepers.pop()
elif not leading_slash:
keepers.append(seg)
if not segments:
keepers.append('')
# ignore '.' segments and keep all others, even empty ones
elif seg != '.':
keepers.append(seg)
# reassemble the kept segments
return leading_slash * '/' + '/'.join(keepers)
SCHEME_PATTERN = re.compile(r'([a-zA-Z][a-zA-Z0-9+\-.]*):')
def GetScheme(uriRef):
"""
Obtains, with optimum efficiency, just the scheme from a URI reference.
Returns a string, or if no scheme could be found, returns None.
"""
# Using a regex seems to be the best option. Called 50,000 times on
# different URIs, on a 1.0-GHz PIII with FreeBSD 4.7 and Python
# 2.2.1, this method completed in 0.95s, and 0.05s if there was no
# scheme to find. By comparison,
# urllib.splittype()[0] took 1.5s always;
# Ft.Lib.Uri.SplitUriRef()[0] took 2.5s always;
# urlparse.urlparse()[0] took 3.5s always.
m = SCHEME_PATTERN.match(uriRef)
if m is None:
return None
else:
return m.group(1)
def IsAbsolute(identifier):
"""
Given a string believed to be a URI or URI reference, tests that it is
absolute (as per RFC 2396), not relative -- i.e., that it has a scheme.
"""
# We do it this way to avoid compiling another massive regex.
return GetScheme(identifier) is not None
| apache-2.0 |
stewartmiles/flatbuffers | tests/MyGame/Example/Monster.py | 5 | 29683 | # automatically generated by the FlatBuffers compiler, do not modify
# namespace: Example
import flatbuffers
# an example documentation comment: monster object
class Monster(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsMonster(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = Monster()
x.Init(buf, n + offset)
return x
@classmethod
def MonsterBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4D\x4F\x4E\x53", size_prefixed=size_prefixed)
# Monster
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# Monster
def Pos(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
x = o + self._tab.Pos
from .Vec3 import Vec3
obj = Vec3()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Monster
def Mana(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos)
return 150
# Monster
def Hp(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos)
return 100
# Monster
def Name(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10))
if o != 0:
return self._tab.String(o + self._tab.Pos)
return None
# Monster
def Inventory(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1))
return 0
# Monster
def InventoryAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o)
return 0
# Monster
def InventoryLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def Color(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos)
return 8
# Monster
def TestType(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos)
return 0
# Monster
def Test(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20))
if o != 0:
from flatbuffers.table import Table
obj = Table(bytearray(), 0)
self._tab.Union(obj, o)
return obj
return None
# Monster
def Test4(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22))
if o != 0:
x = self._tab.Vector(o)
x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4
from .Test import Test
obj = Test()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Monster
def Test4Length(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def Testarrayofstring(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24))
if o != 0:
a = self._tab.Vector(o)
return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4))
return ""
# Monster
def TestarrayofstringLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24))
if o != 0:
return self._tab.VectorLen(o)
return 0
# an example documentation comment: this will end up in the generated code
# multiline too
# Monster
def Testarrayoftables(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26))
if o != 0:
x = self._tab.Vector(o)
x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4
x = self._tab.Indirect(x)
from .Monster import Monster
obj = Monster()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Monster
def TestarrayoftablesLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def Enemy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28))
if o != 0:
x = self._tab.Indirect(o + self._tab.Pos)
from .Monster import Monster
obj = Monster()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Monster
def Testnestedflatbuffer(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1))
return 0
# Monster
def TestnestedflatbufferAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o)
return 0
# Monster
def TestnestedflatbufferLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def Testempty(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32))
if o != 0:
x = self._tab.Indirect(o + self._tab.Pos)
from .Stat import Stat
obj = Stat()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Monster
def Testbool(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(34))
if o != 0:
return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos))
return False
# Monster
def Testhashs32Fnv1(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(36))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
# Monster
def Testhashu32Fnv1(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(38))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos)
return 0
# Monster
def Testhashs64Fnv1(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(40))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos)
return 0
# Monster
def Testhashu64Fnv1(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(42))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos)
return 0
# Monster
def Testhashs32Fnv1a(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(44))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
# Monster
def Testhashu32Fnv1a(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(46))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos)
return 0
# Monster
def Testhashs64Fnv1a(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(48))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos)
return 0
# Monster
def Testhashu64Fnv1a(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(50))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos)
return 0
# Monster
def Testarrayofbools(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(52))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.BoolFlags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1))
return 0
# Monster
def TestarrayofboolsAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(52))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.BoolFlags, o)
return 0
# Monster
def TestarrayofboolsLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(52))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def Testf(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(54))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
return 3.14159
# Monster
def Testf2(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(56))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
return 3.0
# Monster
def Testf3(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(58))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
return 0.0
# Monster
def Testarrayofstring2(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(60))
if o != 0:
a = self._tab.Vector(o)
return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4))
return ""
# Monster
def Testarrayofstring2Length(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(60))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def Testarrayofsortedstruct(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(62))
if o != 0:
x = self._tab.Vector(o)
x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 8
from .Ability import Ability
obj = Ability()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Monster
def TestarrayofsortedstructLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(62))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def Flex(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(64))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1))
return 0
# Monster
def FlexAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(64))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o)
return 0
# Monster
def FlexLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(64))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def Test5(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(66))
if o != 0:
x = self._tab.Vector(o)
x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4
from .Test import Test
obj = Test()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Monster
def Test5Length(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(66))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def VectorOfLongs(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(68))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8))
return 0
# Monster
def VectorOfLongsAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(68))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o)
return 0
# Monster
def VectorOfLongsLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(68))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def VectorOfDoubles(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(70))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Float64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8))
return 0
# Monster
def VectorOfDoublesAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(70))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float64Flags, o)
return 0
# Monster
def VectorOfDoublesLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(70))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def ParentNamespaceTest(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(72))
if o != 0:
x = self._tab.Indirect(o + self._tab.Pos)
from .InParentNamespace import InParentNamespace
obj = InParentNamespace()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Monster
def VectorOfReferrables(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(74))
if o != 0:
x = self._tab.Vector(o)
x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4
x = self._tab.Indirect(x)
from .Referrable import Referrable
obj = Referrable()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Monster
def VectorOfReferrablesLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(74))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def SingleWeakReference(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(76))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos)
return 0
# Monster
def VectorOfWeakReferences(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(78))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8))
return 0
# Monster
def VectorOfWeakReferencesAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(78))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o)
return 0
# Monster
def VectorOfWeakReferencesLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(78))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def VectorOfStrongReferrables(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(80))
if o != 0:
x = self._tab.Vector(o)
x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4
x = self._tab.Indirect(x)
from .Referrable import Referrable
obj = Referrable()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Monster
def VectorOfStrongReferrablesLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(80))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def CoOwningReference(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(82))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos)
return 0
# Monster
def VectorOfCoOwningReferences(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(84))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8))
return 0
# Monster
def VectorOfCoOwningReferencesAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(84))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o)
return 0
# Monster
def VectorOfCoOwningReferencesLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(84))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def NonOwningReference(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(86))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos)
return 0
# Monster
def VectorOfNonOwningReferences(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(88))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8))
return 0
# Monster
def VectorOfNonOwningReferencesAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(88))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o)
return 0
# Monster
def VectorOfNonOwningReferencesLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(88))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Monster
def AnyUniqueType(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(90))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos)
return 0
# Monster
def AnyUnique(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(92))
if o != 0:
from flatbuffers.table import Table
obj = Table(bytearray(), 0)
self._tab.Union(obj, o)
return obj
return None
# Monster
def AnyAmbiguousType(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(94))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos)
return 0
# Monster
def AnyAmbiguous(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(96))
if o != 0:
from flatbuffers.table import Table
obj = Table(bytearray(), 0)
self._tab.Union(obj, o)
return obj
return None
# Monster
def VectorOfEnums(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(98))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1))
return 0
# Monster
def VectorOfEnumsAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(98))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o)
return 0
# Monster
def VectorOfEnumsLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(98))
if o != 0:
return self._tab.VectorLen(o)
return 0
def MonsterStart(builder): builder.StartObject(48)
def MonsterAddPos(builder, pos): builder.PrependStructSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(pos), 0)
def MonsterAddMana(builder, mana): builder.PrependInt16Slot(1, mana, 150)
def MonsterAddHp(builder, hp): builder.PrependInt16Slot(2, hp, 100)
def MonsterAddName(builder, name): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0)
def MonsterAddInventory(builder, inventory): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(inventory), 0)
def MonsterStartInventoryVector(builder, numElems): return builder.StartVector(1, numElems, 1)
def MonsterAddColor(builder, color): builder.PrependUint8Slot(6, color, 8)
def MonsterAddTestType(builder, testType): builder.PrependUint8Slot(7, testType, 0)
def MonsterAddTest(builder, test): builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(test), 0)
def MonsterAddTest4(builder, test4): builder.PrependUOffsetTRelativeSlot(9, flatbuffers.number_types.UOffsetTFlags.py_type(test4), 0)
def MonsterStartTest4Vector(builder, numElems): return builder.StartVector(4, numElems, 2)
def MonsterAddTestarrayofstring(builder, testarrayofstring): builder.PrependUOffsetTRelativeSlot(10, flatbuffers.number_types.UOffsetTFlags.py_type(testarrayofstring), 0)
def MonsterStartTestarrayofstringVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def MonsterAddTestarrayoftables(builder, testarrayoftables): builder.PrependUOffsetTRelativeSlot(11, flatbuffers.number_types.UOffsetTFlags.py_type(testarrayoftables), 0)
def MonsterStartTestarrayoftablesVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def MonsterAddEnemy(builder, enemy): builder.PrependUOffsetTRelativeSlot(12, flatbuffers.number_types.UOffsetTFlags.py_type(enemy), 0)
def MonsterAddTestnestedflatbuffer(builder, testnestedflatbuffer): builder.PrependUOffsetTRelativeSlot(13, flatbuffers.number_types.UOffsetTFlags.py_type(testnestedflatbuffer), 0)
def MonsterStartTestnestedflatbufferVector(builder, numElems): return builder.StartVector(1, numElems, 1)
def MonsterAddTestempty(builder, testempty): builder.PrependUOffsetTRelativeSlot(14, flatbuffers.number_types.UOffsetTFlags.py_type(testempty), 0)
def MonsterAddTestbool(builder, testbool): builder.PrependBoolSlot(15, testbool, 0)
def MonsterAddTesthashs32Fnv1(builder, testhashs32Fnv1): builder.PrependInt32Slot(16, testhashs32Fnv1, 0)
def MonsterAddTesthashu32Fnv1(builder, testhashu32Fnv1): builder.PrependUint32Slot(17, testhashu32Fnv1, 0)
def MonsterAddTesthashs64Fnv1(builder, testhashs64Fnv1): builder.PrependInt64Slot(18, testhashs64Fnv1, 0)
def MonsterAddTesthashu64Fnv1(builder, testhashu64Fnv1): builder.PrependUint64Slot(19, testhashu64Fnv1, 0)
def MonsterAddTesthashs32Fnv1a(builder, testhashs32Fnv1a): builder.PrependInt32Slot(20, testhashs32Fnv1a, 0)
def MonsterAddTesthashu32Fnv1a(builder, testhashu32Fnv1a): builder.PrependUint32Slot(21, testhashu32Fnv1a, 0)
def MonsterAddTesthashs64Fnv1a(builder, testhashs64Fnv1a): builder.PrependInt64Slot(22, testhashs64Fnv1a, 0)
def MonsterAddTesthashu64Fnv1a(builder, testhashu64Fnv1a): builder.PrependUint64Slot(23, testhashu64Fnv1a, 0)
def MonsterAddTestarrayofbools(builder, testarrayofbools): builder.PrependUOffsetTRelativeSlot(24, flatbuffers.number_types.UOffsetTFlags.py_type(testarrayofbools), 0)
def MonsterStartTestarrayofboolsVector(builder, numElems): return builder.StartVector(1, numElems, 1)
def MonsterAddTestf(builder, testf): builder.PrependFloat32Slot(25, testf, 3.14159)
def MonsterAddTestf2(builder, testf2): builder.PrependFloat32Slot(26, testf2, 3.0)
def MonsterAddTestf3(builder, testf3): builder.PrependFloat32Slot(27, testf3, 0.0)
def MonsterAddTestarrayofstring2(builder, testarrayofstring2): builder.PrependUOffsetTRelativeSlot(28, flatbuffers.number_types.UOffsetTFlags.py_type(testarrayofstring2), 0)
def MonsterStartTestarrayofstring2Vector(builder, numElems): return builder.StartVector(4, numElems, 4)
def MonsterAddTestarrayofsortedstruct(builder, testarrayofsortedstruct): builder.PrependUOffsetTRelativeSlot(29, flatbuffers.number_types.UOffsetTFlags.py_type(testarrayofsortedstruct), 0)
def MonsterStartTestarrayofsortedstructVector(builder, numElems): return builder.StartVector(8, numElems, 4)
def MonsterAddFlex(builder, flex): builder.PrependUOffsetTRelativeSlot(30, flatbuffers.number_types.UOffsetTFlags.py_type(flex), 0)
def MonsterStartFlexVector(builder, numElems): return builder.StartVector(1, numElems, 1)
def MonsterAddTest5(builder, test5): builder.PrependUOffsetTRelativeSlot(31, flatbuffers.number_types.UOffsetTFlags.py_type(test5), 0)
def MonsterStartTest5Vector(builder, numElems): return builder.StartVector(4, numElems, 2)
def MonsterAddVectorOfLongs(builder, vectorOfLongs): builder.PrependUOffsetTRelativeSlot(32, flatbuffers.number_types.UOffsetTFlags.py_type(vectorOfLongs), 0)
def MonsterStartVectorOfLongsVector(builder, numElems): return builder.StartVector(8, numElems, 8)
def MonsterAddVectorOfDoubles(builder, vectorOfDoubles): builder.PrependUOffsetTRelativeSlot(33, flatbuffers.number_types.UOffsetTFlags.py_type(vectorOfDoubles), 0)
def MonsterStartVectorOfDoublesVector(builder, numElems): return builder.StartVector(8, numElems, 8)
def MonsterAddParentNamespaceTest(builder, parentNamespaceTest): builder.PrependUOffsetTRelativeSlot(34, flatbuffers.number_types.UOffsetTFlags.py_type(parentNamespaceTest), 0)
def MonsterAddVectorOfReferrables(builder, vectorOfReferrables): builder.PrependUOffsetTRelativeSlot(35, flatbuffers.number_types.UOffsetTFlags.py_type(vectorOfReferrables), 0)
def MonsterStartVectorOfReferrablesVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def MonsterAddSingleWeakReference(builder, singleWeakReference): builder.PrependUint64Slot(36, singleWeakReference, 0)
def MonsterAddVectorOfWeakReferences(builder, vectorOfWeakReferences): builder.PrependUOffsetTRelativeSlot(37, flatbuffers.number_types.UOffsetTFlags.py_type(vectorOfWeakReferences), 0)
def MonsterStartVectorOfWeakReferencesVector(builder, numElems): return builder.StartVector(8, numElems, 8)
def MonsterAddVectorOfStrongReferrables(builder, vectorOfStrongReferrables): builder.PrependUOffsetTRelativeSlot(38, flatbuffers.number_types.UOffsetTFlags.py_type(vectorOfStrongReferrables), 0)
def MonsterStartVectorOfStrongReferrablesVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def MonsterAddCoOwningReference(builder, coOwningReference): builder.PrependUint64Slot(39, coOwningReference, 0)
def MonsterAddVectorOfCoOwningReferences(builder, vectorOfCoOwningReferences): builder.PrependUOffsetTRelativeSlot(40, flatbuffers.number_types.UOffsetTFlags.py_type(vectorOfCoOwningReferences), 0)
def MonsterStartVectorOfCoOwningReferencesVector(builder, numElems): return builder.StartVector(8, numElems, 8)
def MonsterAddNonOwningReference(builder, nonOwningReference): builder.PrependUint64Slot(41, nonOwningReference, 0)
def MonsterAddVectorOfNonOwningReferences(builder, vectorOfNonOwningReferences): builder.PrependUOffsetTRelativeSlot(42, flatbuffers.number_types.UOffsetTFlags.py_type(vectorOfNonOwningReferences), 0)
def MonsterStartVectorOfNonOwningReferencesVector(builder, numElems): return builder.StartVector(8, numElems, 8)
def MonsterAddAnyUniqueType(builder, anyUniqueType): builder.PrependUint8Slot(43, anyUniqueType, 0)
def MonsterAddAnyUnique(builder, anyUnique): builder.PrependUOffsetTRelativeSlot(44, flatbuffers.number_types.UOffsetTFlags.py_type(anyUnique), 0)
def MonsterAddAnyAmbiguousType(builder, anyAmbiguousType): builder.PrependUint8Slot(45, anyAmbiguousType, 0)
def MonsterAddAnyAmbiguous(builder, anyAmbiguous): builder.PrependUOffsetTRelativeSlot(46, flatbuffers.number_types.UOffsetTFlags.py_type(anyAmbiguous), 0)
def MonsterAddVectorOfEnums(builder, vectorOfEnums): builder.PrependUOffsetTRelativeSlot(47, flatbuffers.number_types.UOffsetTFlags.py_type(vectorOfEnums), 0)
def MonsterStartVectorOfEnumsVector(builder, numElems): return builder.StartVector(1, numElems, 1)
def MonsterEnd(builder): return builder.EndObject()
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.