repo_name
stringlengths
5
100
path
stringlengths
4
299
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
666
1.03M
license
stringclasses
15 values
hash
int64
-9,223,351,895,964,839,000
9,223,297,778B
line_mean
float64
3.17
100
line_max
int64
7
1k
alpha_frac
float64
0.25
0.98
autogenerated
bool
1 class
novopl/sphinx-refdoc
src/refdoc/objects/module.py
1
1803
# -*- coding: utf-8 -*- """ Python module encapsulation. """ from __future__ import absolute_import, unicode_literals from os.path import abspath, basename import attr from .. import rst from .base import DocObjBase @attr.s class Module(DocObjBase): """ Represents a python module. This is all the information needed to generate the documentation for the given module. """ @classmethod def create(cls, path, owner=None): """ Create a new module from the given path. :param str|unicode path: Path to the python module file. :param Package owner: The module owner. This is the package the module belongs to. :return Module: Newly created Module instance. """ if not Module.is_module(path): raise ValueError("Not a module: {}".format(path)) name = basename(path)[0:-3] mod = Module( path=abspath(path), name=name, fullname=name, owner=owner ) if owner is not None: mod.fullname = owner.get_relative_name(mod) return mod @classmethod def is_module(cls, path): """ Return *True* if the given path is a python module. """ return path.endswith('.py') and basename(path) != '__init__.py' @property def type(self): """ Hard override of the base .type property. """ return 'module' def to_rst(self): """ Return reST document describing this module. """ doc_src = rst.title('``{}``'.format(self.fullname)) doc_src += rst.automodule(self.fullname) return doc_src def __str__(self): """ Return the package fullname as it's string representation. """ return self.fullname
mit
-2,269,849,587,264,274,200
25.910448
78
0.5868
false
myint/cppclean
cpp/tokenize.py
1
10119
# Copyright 2007 Neal Norwitz # Portions Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tokenize C++ source code.""" from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals __author__ = '[email protected] (Neal Norwitz)' # Add $ as a valid identifier char since so much code uses it. _letters = 'abcdefghijklmnopqrstuvwxyz' _valid_identifier_first_char = _letters + _letters.upper() + '_$' _valid_identifier_char = _valid_identifier_first_char + '0123456789' VALID_IDENTIFIER_FIRST_CHARS = frozenset(_valid_identifier_first_char) VALID_IDENTIFIER_CHARS = frozenset(_valid_identifier_char) HEX_DIGITS = frozenset('0123456789abcdefABCDEF') INT_OR_FLOAT_DIGITS = frozenset('01234567890eE-+') # C++0x string prefixes. _STR_PREFIXES = frozenset(('R', 'u8', 'u8R', 'u', 'uR', 'U', 'UR', 'L', 'LR')) # Token types. UNKNOWN = 'UNKNOWN' SYNTAX = 'SYNTAX' CONSTANT = 'CONSTANT' NAME = 'NAME' PREPROCESSOR = 'PREPROCESSOR' class TokenError(Exception): """Raised when tokenization fails.""" class Token(object): """Data container to represent a C++ token. Tokens can be identifiers, syntax char(s), constants, or pre-processor directives. start contains the index of the first char of the token in the source end contains the index of the last char of the token in the source """ def __init__(self, token_type, name, start, end): self.token_type = token_type self.name = name self.start = start self.end = end def __str__(self): return 'Token(%r, %s, %s)' % (self.name, self.start, self.end) __repr__ = __str__ def _get_string(source, i): i = source.find('"', i + 1) while source[i - 1] == '\\': # Count the trailing backslashes. backslash_count = 1 j = i - 2 while source[j] == '\\': backslash_count += 1 j -= 1 # When trailing backslashes are even, they escape each other. if (backslash_count % 2) == 0: break i = source.find('"', i + 1) return i + 1 def _get_char(source, start, i): # NOTE(nnorwitz): may not be quite correct, should be good enough. i = source.find("'", i + 1) while i != -1 and source[i - 1] == '\\': # Need to special case '\\'. if source[i - 2] == '\\': break i = source.find("'", i + 1) # Try to handle unterminated single quotes. return i + 1 if i != -1 else start + 1 def get_tokens(source): """Returns a sequence of Tokens. Args: source: string of C++ source code. Yields: Token that represents the next token in the source. """ if not source.endswith('\n'): source += '\n' # Cache various valid character sets for speed. valid_identifier_first_chars = VALID_IDENTIFIER_FIRST_CHARS valid_identifier_chars = VALID_IDENTIFIER_CHARS hex_digits = HEX_DIGITS int_or_float_digits = INT_OR_FLOAT_DIGITS int_or_float_digits2 = int_or_float_digits | set('.') # Ignore tokens while in a #if 0 block. count_ifs = 0 i = 0 end = len(source) while i < end: # Skip whitespace. while i < end and source[i].isspace(): i += 1 if i >= end: return token_type = UNKNOWN start = i c = source[i] # Find a string token. if c in valid_identifier_first_chars or c == '_': token_type = NAME while source[i] in valid_identifier_chars: i += 1 # String and character constants can look like a name if # they are something like L"". if source[i] == "'" and source[start:i] in _STR_PREFIXES: token_type = CONSTANT i = _get_char(source, start, i) elif source[i] == '"' and source[start:i] in _STR_PREFIXES: token_type = CONSTANT i = _get_string(source, i) elif c == '/' and source[i + 1] == '/': # Find // comments. i = _find(source, '\n', i) continue elif c == '/' and source[i + 1] == '*': # Find /* comments. */ i = _find(source, '*/', i) + 2 continue elif c in '<>': # Handle '<' and '>' tokens. token_type = SYNTAX i += 1 new_ch = source[i] # Do not merge '>>' or '>>=' into a single token if new_ch == c and c != '>': i += 1 new_ch = source[i] if new_ch == '=': i += 1 elif c in ':+-&|=': # Handle 'XX' and 'X=' tokens. token_type = SYNTAX i += 1 new_ch = source[i] if new_ch == c: i += 1 elif c == '-' and new_ch == '>': i += 1 elif new_ch == '=': i += 1 elif c in '!*^%/': # Handle 'X=' tokens. token_type = SYNTAX i += 1 new_ch = source[i] if new_ch == '=': i += 1 elif c in '()[]{}~?;.,': # Handle single char tokens. token_type = SYNTAX i += 1 if c == '.' and source[i].isdigit(): token_type = CONSTANT i += 1 while source[i] in int_or_float_digits: i += 1 # Handle float suffixes. for suffix in ('l', 'f'): if suffix == source[i:i + 1].lower(): i += 1 break elif c.isdigit(): # Find integer. token_type = CONSTANT if c == '0' and source[i + 1] in 'xX': # Handle hex digits. i += 2 while source[i] in hex_digits: i += 1 else: while source[i] in int_or_float_digits2: i += 1 # Handle integer (and float) suffixes. if source[i].isalpha(): for suffix in ('ull', 'll', 'ul', 'l', 'f', 'u'): size = len(suffix) if suffix == source[i:i + size].lower(): i += size break elif c == '"': # Find string. token_type = CONSTANT i = _get_string(source, i) elif c == "'": # Find char. token_type = CONSTANT i = _get_char(source, start, i) elif c == '#': # Find pre-processor command. token_type = PREPROCESSOR got_if = source[i:i + 3] == '#if' if count_ifs and source[i:i + 6] == '#endif': count_ifs -= 1 if count_ifs == 0: source = source[:i].ljust(i + 6) + source[i + 6:] continue # Handle preprocessor statements (\ continuations). while True: i1 = source.find('\n', i) i2 = source.find('//', i) i3 = source.find('/*', i) i4 = source.find('"', i) # Get the first important symbol (newline, comment, EOF/end). i = min([x for x in (i1, i2, i3, i4, end) if x != -1]) # Handle comments in #define macros. if i == i3: i = _find(source, '*/', i) + 2 source = source[:i3].ljust(i) + source[i:] continue # Handle #include "dir//foo.h" properly. if source[i] == '"': i = _find(source, '"', i + 1) + 1 continue # Keep going if end of the line and the line ends with \. if i == i1 and source[i - 1] == '\\': i += 1 continue if got_if: begin = source.find('(', start, i) if begin == -1: begin = source.find(' ', start) begin = begin + 1 s1 = source.find(' ', begin) s2 = source.find(')', begin) s3 = source.find('\n', begin) s = min([x for x in (s1, s2, s3, end) if x != -1]) condition = source[begin:s] if ( count_ifs or condition == '0' or condition == '__OBJC__' ): count_ifs += 1 break elif c == '\\': # Handle \ in code. # This is different from the pre-processor \ handling. i += 1 continue elif count_ifs: # Ignore bogus code when we are inside an #if block. i += 1 continue else: raise TokenError("unexpected token '{0}'".format(c)) if count_ifs: continue assert i > 0 yield Token(token_type, source[start:i], start, i) def _find(string, sub_string, start_index): """Return index of sub_string in string. Raise TokenError if sub_string is not found. """ result = string.find(sub_string, start_index) if result == -1: raise TokenError("expected '{0}'".format(sub_string)) return result
apache-2.0
5,799,962,709,868,037,000
33.070707
79
0.476332
false
wangyushun/myblog
project/project/settings.py
1
3093
""" Django settings for project project. Generated by 'django-admin startproject' using Django 1.8.16. For more information on this file, see https://docs.djangoproject.com/en/1.8/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.8/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '3n8rd*fki0g(ucn3^$shegs2(+@^1wfls+#$w*&x)0*gv3&wo8' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'blog', 'comments', 'haystack', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) ROOT_URLCONF = 'project.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'project.wsgi.application' # Database # https://docs.djangoproject.com/en/1.8/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/1.8/topics/i18n/ LANGUAGE_CODE = 'zh-hans' TIME_ZONE = 'Asia/Shanghai' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.8/howto/static-files/ STATIC_URL = '/static/' HAYSTACK_CONNECTIONS = { 'default':{ 'ENGINE': 'blog.whoosh_cn_backend.WhooshEngine', 'PATH': os.path.join(BASE_DIR, 'whoosh_index'), }, } HAYSTACK_SEARCH_RESULTS_PER_PAGE = 10 HAYSTACK_SIGNAL_PROCESSOR = 'haystack.signals.RealtimeSignalProcessor' MEDIA_ROOT = os.path.join(BASE_DIR, 'media').replace("\\", "/") MEDIA_URL='/media/'
gpl-3.0
1,316,615,652,272,308,500
25.211864
71
0.685095
false
dhomeier/astropy
astropy/coordinates/builtin_frames/skyoffset.py
3
8147
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from astropy import units as u from astropy.coordinates.transformations import DynamicMatrixTransform, FunctionTransform from astropy.coordinates.baseframe import (frame_transform_graph, BaseCoordinateFrame) from astropy.coordinates.attributes import CoordinateAttribute, QuantityAttribute from astropy.coordinates.matrix_utilities import (rotation_matrix, matrix_product, matrix_transpose) _skyoffset_cache = {} def make_skyoffset_cls(framecls): """ Create a new class that is the sky offset frame for a specific class of origin frame. If such a class has already been created for this frame, the same class will be returned. The new class will always have component names for spherical coordinates of ``lon``/``lat``. Parameters ---------- framecls : coordinate frame class (i.e., subclass of `~astropy.coordinates.BaseCoordinateFrame`) The class to create the SkyOffsetFrame of. Returns ------- skyoffsetframecls : class The class for the new skyoffset frame. Notes ----- This function is necessary because Astropy's frame transformations depend on connection between specific frame *classes*. So each type of frame needs its own distinct skyoffset frame class. This function generates just that class, as well as ensuring that only one example of such a class actually gets created in any given python session. """ if framecls in _skyoffset_cache: return _skyoffset_cache[framecls] # Create a new SkyOffsetFrame subclass for this frame class. name = 'SkyOffset' + framecls.__name__ _SkyOffsetFramecls = type( name, (SkyOffsetFrame, framecls), {'origin': CoordinateAttribute(frame=framecls, default=None), # The following two have to be done because otherwise we use the # defaults of SkyOffsetFrame set by BaseCoordinateFrame. '_default_representation': framecls._default_representation, '_default_differential': framecls._default_differential, '__doc__': SkyOffsetFrame.__doc__, }) @frame_transform_graph.transform(FunctionTransform, _SkyOffsetFramecls, _SkyOffsetFramecls) def skyoffset_to_skyoffset(from_skyoffset_coord, to_skyoffset_frame): """Transform between two skyoffset frames.""" # This transform goes through the parent frames on each side. # from_frame -> from_frame.origin -> to_frame.origin -> to_frame intermediate_from = from_skyoffset_coord.transform_to(from_skyoffset_coord.origin) intermediate_to = intermediate_from.transform_to(to_skyoffset_frame.origin) return intermediate_to.transform_to(to_skyoffset_frame) @frame_transform_graph.transform(DynamicMatrixTransform, framecls, _SkyOffsetFramecls) def reference_to_skyoffset(reference_frame, skyoffset_frame): """Convert a reference coordinate to an sky offset frame.""" # Define rotation matrices along the position angle vector, and # relative to the origin. origin = skyoffset_frame.origin.spherical mat1 = rotation_matrix(-skyoffset_frame.rotation, 'x') mat2 = rotation_matrix(-origin.lat, 'y') mat3 = rotation_matrix(origin.lon, 'z') return matrix_product(mat1, mat2, mat3) @frame_transform_graph.transform(DynamicMatrixTransform, _SkyOffsetFramecls, framecls) def skyoffset_to_reference(skyoffset_coord, reference_frame): """Convert an sky offset frame coordinate to the reference frame""" # use the forward transform, but just invert it R = reference_to_skyoffset(reference_frame, skyoffset_coord) # transpose is the inverse because R is a rotation matrix return matrix_transpose(R) _skyoffset_cache[framecls] = _SkyOffsetFramecls return _SkyOffsetFramecls class SkyOffsetFrame(BaseCoordinateFrame): """ A frame which is relative to some specific position and oriented to match its frame. SkyOffsetFrames always have component names for spherical coordinates of ``lon``/``lat``, *not* the component names for the frame of ``origin``. This is useful for calculating offsets and dithers in the frame of the sky relative to an arbitrary position. Coordinates in this frame are both centered on the position specified by the ``origin`` coordinate, *and* they are oriented in the same manner as the ``origin`` frame. E.g., if ``origin`` is `~astropy.coordinates.ICRS`, this object's ``lat`` will be pointed in the direction of Dec, while ``lon`` will point in the direction of RA. For more on skyoffset frames, see :ref:`astropy-skyoffset-frames`. Parameters ---------- representation : `~astropy.coordinates.BaseRepresentation` or None A representation object or None to have no data (or use the other keywords) origin : `~astropy.coordinates.SkyCoord` or low-level coordinate object. The coordinate which specifies the origin of this frame. Note that this origin is used purely for on-sky location/rotation. It can have a ``distance`` but it will not be used by this ``SkyOffsetFrame``. rotation : `~astropy.coordinates.Angle` or `~astropy.units.Quantity` with angle units The final rotation of the frame about the ``origin``. The sign of the rotation is the left-hand rule. That is, an object at a particular position angle in the un-rotated system will be sent to the positive latitude (z) direction in the final frame. Notes ----- ``SkyOffsetFrame`` is a factory class. That is, the objects that it yields are *not* actually objects of class ``SkyOffsetFrame``. Instead, distinct classes are created on-the-fly for whatever the frame class is of ``origin``. """ rotation = QuantityAttribute(default=0, unit=u.deg) origin = CoordinateAttribute(default=None, frame=None) def __new__(cls, *args, **kwargs): # We don't want to call this method if we've already set up # an skyoffset frame for this class. if not (issubclass(cls, SkyOffsetFrame) and cls is not SkyOffsetFrame): # We get the origin argument, and handle it here. try: origin_frame = kwargs['origin'] except KeyError: raise TypeError("Can't initialize an SkyOffsetFrame without origin= keyword.") if hasattr(origin_frame, 'frame'): origin_frame = origin_frame.frame newcls = make_skyoffset_cls(origin_frame.__class__) return newcls.__new__(newcls, *args, **kwargs) # http://stackoverflow.com/questions/19277399/why-does-object-new-work-differently-in-these-three-cases # See above for why this is necessary. Basically, because some child # may override __new__, we must override it here to never pass # arguments to the object.__new__ method. if super().__new__ is object.__new__: return super().__new__(cls) return super().__new__(cls, *args, **kwargs) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if self.origin is not None and not self.origin.has_data: raise ValueError('The origin supplied to SkyOffsetFrame has no ' 'data.') if self.has_data: self._set_skyoffset_data_lon_wrap_angle(self.data) @staticmethod def _set_skyoffset_data_lon_wrap_angle(data): if hasattr(data, 'lon'): data.lon.wrap_angle = 180. * u.deg return data def represent_as(self, base, s='base', in_frame_units=False): """ Ensure the wrap angle for any spherical representations. """ data = super().represent_as(base, s, in_frame_units=in_frame_units) self._set_skyoffset_data_lon_wrap_angle(data) return data
bsd-3-clause
-4,277,491,515,703,179,300
44.513966
115
0.66638
false
WoLpH/onkyo-eiscp
eiscp/commands.py
3
136214
# Generated # by generate_commands_module.py # from eiscp-commands.yaml # at 2013-08-04 14:40:46.165040 from collections import OrderedDict COMMANDS = OrderedDict([('main', OrderedDict([('PWR', {'description': 'System Power Command', 'name': 'system-power', 'values': OrderedDict([('00', {'description': 'sets System Standby', 'name': 'standby'}), ('01', {'description': 'sets System On', 'name': 'on'}), ('QSTN', {'description': 'gets the System Power Status', 'name': 'query'})])}), ('AMT', {'description': 'Audio Muting Command', 'name': 'audio-muting', 'values': OrderedDict([('00', {'description': 'sets Audio Muting Off', 'name': 'off'}), ('01', {'description': 'sets Audio Muting On', 'name': 'on'}), ('TG', {'description': 'sets Audio Muting Wrap-Around', 'name': 'toggle'}), ('QSTN', {'description': 'gets the Audio Muting State', 'name': 'query'})])}), ('SPA', {'description': 'Speaker A Command', 'name': 'speaker-a', 'values': OrderedDict([('00', {'description': 'sets Speaker Off', 'name': 'off'}), ('01', {'description': 'sets Speaker On', 'name': 'on'}), ('UP', {'description': 'sets Speaker Switch Wrap-Around', 'name': 'up'}), ('QSTN', {'description': 'gets the Speaker State', 'name': 'query'})])}), ('SPB', {'description': 'Speaker B Command', 'name': 'speaker-b', 'values': OrderedDict([('00', {'description': 'sets Speaker Off', 'name': 'off'}), ('01', {'description': 'sets Speaker On', 'name': 'on'}), ('UP', {'description': 'sets Speaker Switch Wrap-Around', 'name': 'up'}), ('QSTN', {'description': 'gets the Speaker State', 'name': 'query'})])}), ('SPL', {'description': 'Speaker Layout Command', 'name': 'speaker-layout', 'values': OrderedDict([('SB', {'description': 'sets SurrBack Speaker', 'name': 'surrback'}), ('FH', {'description': 'sets Front High Speaker / SurrBack+Front High Speakers', 'name': ('front-high', 'surrback-front-high-speakers')}), ('FW', {'description': 'sets Front Wide Speaker / SurrBack+Front Wide Speakers', 'name': ('front-wide', 'surrback-front-wide-speakers')}), ('HW', {'description': 'sets, Front High+Front Wide Speakers', 'name': ('front-high-front-wide-speakers',)}), ('UP', {'description': 'sets Speaker Switch Wrap-Around', 'name': 'up'}), ('QSTN', {'description': 'gets the Speaker State', 'name': 'query'})])}), ('MVL', {'description': 'Master Volume Command', 'name': ('master-volume', 'volume'), 'values': OrderedDict([((0, 100), {'description': u'Volume Level 0 \u2013 100 ( In hexadecimal representation)', 'name': None}), ((0, 80), {'description': u'Volume Level 0 \u2013 80 ( In hexadecimal representation)', 'name': None}), ('UP', {'description': 'sets Volume Level Up', 'name': 'level-up'}), ('DOWN', {'description': 'sets Volume Level Down', 'name': 'level-down'}), ('UP1', {'description': 'sets Volume Level Up 1dB Step', 'name': 'level-up-1db-step'}), ('DOWN1', {'description': 'sets Volume Level Down 1dB Step', 'name': 'level-down-1db-step'}), ('QSTN', {'description': 'gets the Volume Level', 'name': 'query'})])}), ('TFR', {'description': 'Tone(Front) Command', 'name': 'tone-front', 'values': OrderedDict([('B{xx}', {'description': 'Front Bass (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 'b-xx'}), ('T{xx}', {'description': 'Front Treble (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 't-xx'}), ('BUP', {'description': 'sets Front Bass up(2 step)', 'name': 'bass-up'}), ('BDOWN', {'description': 'sets Front Bass down(2 step)', 'name': 'bass-down'}), ('TUP', {'description': 'sets Front Treble up(2 step)', 'name': 'treble-up'}), ('TDOWN', {'description': 'sets Front Treble down(2 step)', 'name': 'treble-down'}), ('QSTN', {'description': 'gets Front Tone ("BxxTxx")', 'name': 'query'})])}), ('TFW', {'description': 'Tone(Front Wide) Command', 'name': 'tone-front-wide', 'values': OrderedDict([('B{xx}', {'description': 'Front Wide Bass (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 'b-xx'}), ('T{xx}', {'description': 'Front Wide Treble (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 't-xx'}), ('BUP', {'description': 'sets Front Wide Bass up(2 step)', 'name': 'bass-up'}), ('BDOWN', {'description': 'sets Front Wide Bass down(2 step)', 'name': 'bass-down'}), ('TUP', {'description': 'sets Front Wide Treble up(2 step)', 'name': 'treble-up'}), ('TDOWN', {'description': 'sets Front Wide Treble down(2 step)', 'name': 'treble-down'}), ('QSTN', {'description': 'gets Front Wide Tone ("BxxTxx")', 'name': 'query'})])}), ('TFH', {'description': 'Tone(Front High) Command', 'name': 'tone-front-high', 'values': OrderedDict([('B{xx}', {'description': 'Front High Bass (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 'b-xx'}), ('T{xx}', {'description': 'Front High Treble (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 't-xx'}), ('BUP', {'description': 'sets Front High Bass up(2 step)', 'name': 'bass-up'}), ('BDOWN', {'description': 'sets Front High Bass down(2 step)', 'name': 'bass-down'}), ('TUP', {'description': 'sets Front High Treble up(2 step)', 'name': 'treble-up'}), ('TDOWN', {'description': 'sets Front High Treble down(2 step)', 'name': 'treble-down'}), ('QSTN', {'description': 'gets Front High Tone ("BxxTxx")', 'name': 'query'})])}), ('TCT', {'description': 'Tone(Center) Command', 'name': 'tone-center', 'values': OrderedDict([('B{xx}', {'description': 'Center Bass (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 'b-xx'}), ('T{xx}', {'description': 'Center Treble (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 't-xx'}), ('BUP', {'description': 'sets Center Bass up(2 step)', 'name': 'bass-up'}), ('BDOWN', {'description': 'sets Center Bass down(2 step)', 'name': 'bass-down'}), ('TUP', {'description': 'sets Center Treble up(2 step)', 'name': 'treble-up'}), ('TDOWN', {'description': 'sets Center Treble down(2 step)', 'name': 'treble-down'}), ('QSTN', {'description': 'gets Cetner Tone ("BxxTxx")', 'name': 'query'})])}), ('TSR', {'description': 'Tone(Surround) Command', 'name': 'tone-surround', 'values': OrderedDict([('B{xx}', {'description': 'Surround Bass (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 'b-xx'}), ('T{xx}', {'description': 'Surround Treble (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 't-xx'}), ('BUP', {'description': 'sets Surround Bass up(2 step)', 'name': 'bass-up'}), ('BDOWN', {'description': 'sets Surround Bass down(2 step)', 'name': 'bass-down'}), ('TUP', {'description': 'sets Surround Treble up(2 step)', 'name': 'treble-up'}), ('TDOWN', {'description': 'sets Surround Treble down(2 step)', 'name': 'treble-down'}), ('QSTN', {'description': 'gets Surround Tone ("BxxTxx")', 'name': 'query'})])}), ('TSB', {'description': 'Tone(Surround Back) Command', 'name': 'tone-surround-back', 'values': OrderedDict([('B{xx}', {'description': 'Surround Back Bass (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 'b-xx'}), ('T{xx}', {'description': 'Surround Back Treble (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 't-xx'}), ('BUP', {'description': 'sets Surround Back Bass up(2 step)', 'name': 'bass-up'}), ('BDOWN', {'description': 'sets Surround Back Bass down(2 step)', 'name': 'bass-down'}), ('TUP', {'description': 'sets Surround Back Treble up(2 step)', 'name': 'treble-up'}), ('TDOWN', {'description': 'sets Surround Back Treble down(2 step)', 'name': 'treble-down'}), ('QSTN', {'description': 'gets Surround Back Tone ("BxxTxx")', 'name': 'query'})])}), ('TSW', {'description': 'Tone(Subwoofer) Command', 'name': 'tone-subwoofer', 'values': OrderedDict([('B{xx}', {'description': 'Subwoofer Bass (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 'b-xx'}), ('BUP', {'description': 'sets Subwoofer Bass up(2 step)', 'name': 'bass-up'}), ('BDOWN', {'description': 'sets Subwoofer Bass down(2 step)', 'name': 'bass-down'}), ('QSTN', {'description': 'gets Subwoofer Tone ("BxxTxx")', 'name': 'query'})])}), ('SLP', {'description': 'Sleep Set Command', 'name': 'sleep-set', 'values': OrderedDict([((1, 90), {'description': 'sets Sleep Time 1 - 90min ( In hexadecimal representation)', 'name': 'time-1-90min'}), ('OFF', {'description': 'sets Sleep Time Off', 'name': 'time-off'}), ('UP', {'description': 'sets Sleep Time Wrap-Around UP', 'name': 'up'}), ('QSTN', {'description': 'gets The Sleep Time', 'name': 'query'})])}), ('SLC', {'description': 'Speaker Level Calibration Command', 'name': 'speaker-level-calibration', 'values': OrderedDict([('TEST', {'description': 'TEST Key', 'name': 'test'}), ('CHSEL', {'description': 'CH SEL Key', 'name': 'chsel'}), ('UP', {'description': 'LEVEL + Key', 'name': 'up'}), ('DOWN', {'description': u'LEVEL \u2013 KEY', 'name': 'down'})])}), ('SWL', {'description': 'Subwoofer (temporary) Level Command', 'name': 'subwoofer-temporary-level', 'values': OrderedDict([((-15, 0, 12), {'description': 'sets Subwoofer Level -15dB - 0dB - +12dB', 'name': '15db-0db-12db'}), ('UP', {'description': 'LEVEL + Key', 'name': 'up'}), ('DOWN', {'description': u'LEVEL \u2013 KEY', 'name': 'down'}), ('QSTN', {'description': 'gets the Subwoofer Level', 'name': 'query'})])}), ('CTL', {'description': 'Center (temporary) Level Command', 'name': 'center-temporary-level', 'values': OrderedDict([((-12, 0, 12), {'description': 'sets Center Level -12dB - 0dB - +12dB', 'name': '12db-0db-12db'}), ('UP', {'description': 'LEVEL + Key', 'name': 'up'}), ('DOWN', {'description': u'LEVEL \u2013 KEY', 'name': 'down'}), ('QSTN', {'description': 'gets the Subwoofer Level', 'name': 'query'})])}), ('DIF', {'description': 'Display Mode Command', 'name': 'display-mode', 'values': OrderedDict([('00', {'description': 'sets Selector + Volume Display Mode', 'name': 'selector-volume'}), ('01', {'description': 'sets Selector + Listening Mode Display Mode', 'name': 'selector-listening'}), ('02', {'description': 'Display Digital Format(temporary display)', 'name': '02'}), ('03', {'description': 'Display Video Format(temporary display)', 'name': '03'}), ('TG', {'description': 'sets Display Mode Wrap-Around Up', 'name': 'toggle'}), ('QSTN', {'description': 'gets The Display Mode', 'name': 'query'})])}), ('DIM', {'description': 'Dimmer Level Command', 'name': 'dimmer-level', 'values': OrderedDict([('00', {'description': 'sets Dimmer Level "Bright"', 'name': 'bright'}), ('01', {'description': 'sets Dimmer Level "Dim"', 'name': 'dim'}), ('02', {'description': 'sets Dimmer Level "Dark"', 'name': 'dark'}), ('03', {'description': 'sets Dimmer Level "Shut-Off"', 'name': 'shut-off'}), ('08', {'description': 'sets Dimmer Level "Bright & LED OFF"', 'name': 'bright-led-off'}), ('DIM', {'description': 'sets Dimmer Level Wrap-Around Up', 'name': 'dim'}), ('QSTN', {'description': 'gets The Dimmer Level', 'name': 'query'})])}), ('OSD', {'description': 'Setup Operation Command', 'name': 'setup', 'values': OrderedDict([('MENU', {'description': 'Menu Key', 'name': 'menu'}), ('UP', {'description': 'Up Key', 'name': 'up'}), ('DOWN', {'description': 'Down Key', 'name': 'down'}), ('RIGHT', {'description': 'Right Key', 'name': 'right'}), ('LEFT', {'description': 'Left Key', 'name': 'left'}), ('ENTER', {'description': 'Enter Key', 'name': 'enter'}), ('EXIT', {'description': 'Exit Key', 'name': 'exit'}), ('AUDIO', {'description': 'Audio Adjust Key', 'name': 'audio'}), ('VIDEO', {'description': 'Video Adjust Key', 'name': 'video'}), ('HOME', {'description': 'Home Key', 'name': 'home'})])}), ('MEM', {'description': 'Memory Setup Command', 'name': 'memory-setup', 'values': OrderedDict([('STR', {'description': 'stores memory', 'name': 'str'}), ('RCL', {'description': 'recalls memory', 'name': 'rcl'}), ('LOCK', {'description': 'locks memory', 'name': 'lock'}), ('UNLK', {'description': 'unlocks memory', 'name': 'unlk'})])}), ('IFA', {'description': 'Audio Infomation Command', 'name': 'audio-infomation', 'values': OrderedDict([('nnnnn:nnnnn', {'description': "Infomation of Audio(Same Immediate Display ',' is separator of infomations)", 'name': None}), ('QSTN', {'description': 'gets Infomation of Audio', 'name': 'query'})])}), ('IFV', {'description': 'Video Infomation Command', 'name': 'video-infomation', 'values': OrderedDict([('nnnnn:nnnnn', {'description': "infomation of Video(Same Immediate Display ',' is separator of infomations)", 'name': None}), ('QSTN', {'description': 'gets Infomation of Video', 'name': 'query'})])}), ('SLI', {'description': 'Input Selector Command', 'name': 'input-selector', 'values': OrderedDict([('00', {'description': 'sets VIDEO1, VCR/DVR', 'name': ('video1', 'vcr', 'dvr')}), ('01', {'description': 'sets VIDEO2, CBL/SAT', 'name': ('video2', 'cbl', 'sat')}), ('02', {'description': 'sets VIDEO3, GAME/TV, GAME', 'name': ('video3', 'game', 'tv', 'game')}), ('03', {'description': 'sets VIDEO4, AUX1(AUX)', 'name': ('video4', 'aux1')}), ('04', {'description': 'sets VIDEO5, AUX2', 'name': ('video5', 'aux2')}), ('05', {'description': 'sets VIDEO6, PC', 'name': ('video6', 'pc')}), ('06', {'description': 'sets VIDEO7', 'name': 'video7'}), ('07', {'description': 'Hidden1', 'name': '07'}), ('08', {'description': 'Hidden2', 'name': '08'}), ('09', {'description': 'Hidden3', 'name': '09'}), ('10', {'description': 'sets DVD, BD/DVD', 'name': ('dvd', 'bd', 'dvd')}), ('20', {'description': 'sets TAPE(1), TV/TAPE', 'name': ('tape-1', 'tv', 'tape')}), ('21', {'description': 'sets TAPE2', 'name': 'tape2'}), ('22', {'description': 'sets PHONO', 'name': 'phono'}), ('23', {'description': 'sets CD, TV/CD', 'name': ('cd', 'tv', 'cd')}), ('24', {'description': 'sets FM', 'name': 'fm'}), ('25', {'description': 'sets AM', 'name': 'am'}), ('26', {'description': 'sets TUNER', 'name': 'tuner'}), ('27', {'description': 'sets MUSIC SERVER, P4S, DLNA', 'name': ('music-server', 'p4s', 'dlna')}), ('28', {'description': 'sets INTERNET RADIO, iRadio Favorite', 'name': ('internet-radio', 'iradio-favorite')}), ('29', {'description': 'sets USB/USB(Front)', 'name': ('usb', 'usb')}), ('2A', {'description': 'sets USB(Rear)', 'name': 'usb'}), ('2B', {'description': 'sets NETWORK, NET', 'name': ('network', 'net')}), ('2C', {'description': 'sets USB(toggle)', 'name': 'usb'}), ('40', {'description': 'sets Universal PORT', 'name': 'universal-port'}), ('30', {'description': 'sets MULTI CH', 'name': 'multi-ch'}), ('31', {'description': 'sets XM', 'name': 'xm'}), ('32', {'description': 'sets SIRIUS', 'name': 'sirius'}), ('UP', {'description': 'sets Selector Position Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Selector Position Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Selector Position', 'name': 'query'})])}), ('SLR', {'description': 'RECOUT Selector Command', 'name': 'recout-selector', 'values': OrderedDict([('00', {'description': 'sets VIDEO1', 'name': 'video1'}), ('01', {'description': 'sets VIDEO2', 'name': 'video2'}), ('02', {'description': 'sets VIDEO3', 'name': 'video3'}), ('03', {'description': 'sets VIDEO4', 'name': 'video4'}), ('04', {'description': 'sets VIDEO5', 'name': 'video5'}), ('05', {'description': 'sets VIDEO6', 'name': 'video6'}), ('06', {'description': 'sets VIDEO7', 'name': 'video7'}), ('10', {'description': 'sets DVD', 'name': 'dvd'}), ('20', {'description': 'sets TAPE(1)', 'name': 'tape'}), ('21', {'description': 'sets TAPE2', 'name': 'tape2'}), ('22', {'description': 'sets PHONO', 'name': 'phono'}), ('23', {'description': 'sets CD', 'name': 'cd'}), ('24', {'description': 'sets FM', 'name': 'fm'}), ('25', {'description': 'sets AM', 'name': 'am'}), ('26', {'description': 'sets TUNER', 'name': 'tuner'}), ('27', {'description': 'sets MUSIC SERVER', 'name': 'music-server'}), ('28', {'description': 'sets INTERNET RADIO', 'name': 'internet-radio'}), ('30', {'description': 'sets MULTI CH', 'name': 'multi-ch'}), ('31', {'description': 'sets XM', 'name': 'xm'}), ('7F', {'description': 'sets OFF', 'name': 'off'}), ('80', {'description': 'sets SOURCE', 'name': 'source'}), ('QSTN', {'description': 'gets The Selector Position', 'name': 'query'})])}), ('SLA', {'description': 'Audio Selector Command', 'name': 'audio-selector', 'values': OrderedDict([('00', {'description': 'sets AUTO', 'name': 'auto'}), ('01', {'description': 'sets MULTI-CHANNEL', 'name': 'multi-channel'}), ('02', {'description': 'sets ANALOG', 'name': 'analog'}), ('03', {'description': 'sets iLINK', 'name': 'ilink'}), ('04', {'description': 'sets HDMI', 'name': 'hdmi'}), ('05', {'description': 'sets COAX/OPT', 'name': ('coax', 'opt')}), ('06', {'description': 'sets BALANCE', 'name': 'balance'}), ('07', {'description': 'sets ARC', 'name': 'arc'}), ('UP', {'description': 'sets Audio Selector Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets The Audio Selector Status', 'name': 'query'})])}), ('TGA', {'description': '12V Trigger A Command', 'name': '12v-trigger-a', 'values': OrderedDict([('00', {'description': 'sets 12V Trigger A Off', 'name': 'off'}), ('01', {'description': 'sets 12V Trigger A On', 'name': 'on'})])}), ('TGB', {'description': '12V Trigger B Command', 'name': '12v-trigger-b', 'values': OrderedDict([('00', {'description': 'sets 12V Trigger B Off', 'name': 'off'}), ('01', {'description': 'sets 12V Trigger B On', 'name': 'on'})])}), ('TGC', {'description': '12V Trigger C Command', 'name': '12v-trigger-c', 'values': OrderedDict([('00', {'description': 'sets 12V Trigger C Off', 'name': 'off'}), ('01', {'description': 'sets 12V Trigger C On', 'name': 'on'})])}), ('VOS', {'description': 'Video Output Selector (Japanese Model Only)', 'name': 'video-output-selector', 'values': OrderedDict([('00', {'description': 'sets D4', 'name': 'd4'}), ('01', {'description': 'sets Component', 'name': 'component'}), ('QSTN', {'description': 'gets The Selector Position', 'name': 'query'})])}), ('HDO', {'description': 'HDMI Output Selector', 'name': 'hdmi-output-selector', 'values': OrderedDict([('00', {'description': 'sets No, Analog', 'name': ('no', 'analog')}), ('01', {'description': 'sets Yes/Out Main, HDMI Main', 'name': ('yes', 'out')}), ('02', {'description': 'sets Out Sub, HDMI Sub', 'name': ('out-sub', 'sub')}), ('03', {'description': 'sets, Both', 'name': ('both',)}), ('04', {'description': 'sets, Both(Main)', 'name': ('both',)}), ('05', {'description': 'sets, Both(Sub)', 'name': ('both',)}), ('UP', {'description': 'sets HDMI Out Selector Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets The HDMI Out Selector', 'name': 'query'})])}), ('HAO', {'description': 'HDMI Audio Out', 'name': 'hdmi-audio-out', 'values': OrderedDict([('00', {'description': 'sets Off', 'name': 'off'}), ('01', {'description': 'sets On', 'name': 'on'}), ('02', {'description': 'sets Auto', 'name': 'auto'}), ('UP', {'description': 'sets HDMI Audio Out Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets HDMI Audio Out', 'name': 'query'})])}), ('RES', {'description': 'Monitor Out Resolution', 'name': 'monitor-out-resolution', 'values': OrderedDict([('00', {'description': 'sets Through', 'name': 'through'}), ('01', {'description': 'sets Auto(HDMI Output Only)', 'name': 'auto'}), ('02', {'description': 'sets 480p', 'name': '480p'}), ('03', {'description': 'sets 720p', 'name': '720p'}), ('04', {'description': 'sets 1080i', 'name': '1080i'}), ('05', {'description': 'sets 1080p(HDMI Output Only)', 'name': '1080p'}), ('07', {'description': 'sets 1080p/24fs(HDMI Output Only)', 'name': ('1080p', '24fs')}), ('08', {'description': 'sets 4K Upcaling(HDMI Output Only)', 'name': '4k-upcaling'}), ('06', {'description': 'sets Source', 'name': 'source'}), ('UP', {'description': 'sets Monitor Out Resolution Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets The Monitor Out Resolution', 'name': 'query'})])}), ('ISF', {'description': 'ISF Mode', 'name': 'isf-mode', 'values': OrderedDict([('00', {'description': 'sets ISF Mode Custom', 'name': 'custom'}), ('01', {'description': 'sets ISF Mode Day', 'name': 'day'}), ('02', {'description': 'sets ISF Mode Night', 'name': 'night'}), ('UP', {'description': 'sets ISF Mode State Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets The ISF Mode State', 'name': 'query'})])}), ('VWM', {'description': 'Video Wide Mode', 'name': 'video-wide-mode', 'values': OrderedDict([('00', {'description': 'sets Auto', 'name': 'auto'}), ('01', {'description': 'sets 4:3', 'name': '4-3'}), ('02', {'description': 'sets Full', 'name': 'full'}), ('03', {'description': 'sets Zoom', 'name': 'zoom'}), ('04', {'description': 'sets Wide Zoom', 'name': 'zoom'}), ('05', {'description': 'sets Smart Zoom', 'name': 'smart-zoom'}), ('UP', {'description': 'sets Video Zoom Mode Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets Video Zoom Mode', 'name': 'query'})])}), ('VPM', {'description': 'Video Picture Mode', 'name': 'video-picture-mode', 'values': OrderedDict([('00', {'description': 'sets Through', 'name': 'through'}), ('01', {'description': 'sets Custom', 'name': 'custom'}), ('02', {'description': 'sets Cinema', 'name': 'cinema'}), ('03', {'description': 'sets Game', 'name': 'game'}), ('05', {'description': 'sets ISF Day', 'name': 'isf-day'}), ('06', {'description': 'sets ISF Night', 'name': 'isf-night'}), ('07', {'description': 'sets Streaming', 'name': 'streaming'}), ('08', {'description': 'sets Direct', 'name': 'direct'}), ('UP', {'description': 'sets Video Zoom Mode Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets Video Zoom Mode', 'name': 'query'})])}), ('LMD', {'description': 'Listening Mode Command', 'name': 'listening-mode', 'values': OrderedDict([('00', {'description': 'sets STEREO', 'name': 'stereo'}), ('01', {'description': 'sets DIRECT', 'name': 'direct'}), ('02', {'description': 'sets SURROUND', 'name': 'surround'}), ('03', {'description': 'sets FILM, Game-RPG', 'name': ('film', 'game-rpg')}), ('04', {'description': 'sets THX', 'name': 'thx'}), ('05', {'description': 'sets ACTION, Game-Action', 'name': ('action', 'game-action')}), ('06', {'description': 'sets MUSICAL, Game-Rock', 'name': ('musical', 'game-rock')}), ('07', {'description': 'sets MONO MOVIE', 'name': 'mono-movie'}), ('08', {'description': 'sets ORCHESTRA', 'name': 'orchestra'}), ('09', {'description': 'sets UNPLUGGED', 'name': 'unplugged'}), ('0A', {'description': 'sets STUDIO-MIX', 'name': 'studio-mix'}), ('0B', {'description': 'sets TV LOGIC', 'name': 'tv-logic'}), ('0C', {'description': 'sets ALL CH STEREO', 'name': 'all-ch-stereo'}), ('0D', {'description': 'sets THEATER-DIMENSIONAL', 'name': 'theater-dimensional'}), ('0E', {'description': 'sets ENHANCED 7/ENHANCE, Game-Sports', 'name': ('enhanced-7', 'enhance', 'game-sports')}), ('0F', {'description': 'sets MONO', 'name': 'mono'}), ('11', {'description': 'sets PURE AUDIO', 'name': 'pure-audio'}), ('12', {'description': 'sets MULTIPLEX', 'name': 'multiplex'}), ('13', {'description': 'sets FULL MONO', 'name': 'full-mono'}), ('14', {'description': 'sets DOLBY VIRTUAL', 'name': 'dolby-virtual'}), ('15', {'description': 'sets DTS Surround Sensation', 'name': 'dts-surround-sensation'}), ('16', {'description': 'sets Audyssey DSX', 'name': 'audyssey-dsx'}), ('1F', {'description': 'sets Whole House Mode', 'name': 'whole-house'}), ('40', {'description': 'sets Straight Decode', 'name': 'straight-decode'}), ('41', {'description': 'sets Dolby EX', 'name': 'dolby-ex'}), ('42', {'description': 'sets THX Cinema', 'name': 'thx-cinema'}), ('43', {'description': 'sets THX Surround EX', 'name': 'thx-surround-ex'}), ('44', {'description': 'sets THX Music', 'name': 'thx-music'}), ('45', {'description': 'sets THX Games', 'name': 'thx-games'}), ('50', {'description': 'sets THX U2/S2/I/S Cinema/Cinema2', 'name': ('thx-u2', 's2', 'i', 's-cinema', 'cinema2')}), ('51', {'description': 'sets THX MusicMode,THX U2/S2/I/S Music', 'name': ('thx-musicmode', 'thx-u2', 's2', 'i', 's-music')}), ('52', {'description': 'sets THX Games Mode,THX U2/S2/I/S Games', 'name': ('thx-games', 'thx-u2', 's2', 'i', 's-games')}), ('80', {'description': 'sets PLII/PLIIx Movie', 'name': ('plii', 'pliix-movie')}), ('81', {'description': 'sets PLII/PLIIx Music', 'name': ('plii', 'pliix-music')}), ('82', {'description': 'sets Neo:6 Cinema/Neo:X Cinema', 'name': ('neo-6-cinema', 'neo-x-cinema')}), ('83', {'description': 'sets Neo:6 Music/Neo:X Music', 'name': ('neo-6-music', 'neo-x-music')}), ('84', {'description': 'sets PLII/PLIIx THX Cinema', 'name': ('plii', 'pliix-thx-cinema')}), ('85', {'description': 'sets Neo:6/Neo:X THX Cinema', 'name': ('neo-6', 'neo-x-thx-cinema')}), ('86', {'description': 'sets PLII/PLIIx Game', 'name': ('plii', 'pliix-game')}), ('87', {'description': 'sets Neural Surr', 'name': 'neural-surr'}), ('88', {'description': 'sets Neural THX/Neural Surround', 'name': ('neural-thx', 'neural-surround')}), ('89', {'description': 'sets PLII/PLIIx THX Games', 'name': ('plii', 'pliix-thx-games')}), ('8A', {'description': 'sets Neo:6/Neo:X THX Games', 'name': ('neo-6', 'neo-x-thx-games')}), ('8B', {'description': 'sets PLII/PLIIx THX Music', 'name': ('plii', 'pliix-thx-music')}), ('8C', {'description': 'sets Neo:6/Neo:X THX Music', 'name': ('neo-6', 'neo-x-thx-music')}), ('8D', {'description': 'sets Neural THX Cinema', 'name': 'neural-thx-cinema'}), ('8E', {'description': 'sets Neural THX Music', 'name': 'neural-thx-music'}), ('8F', {'description': 'sets Neural THX Games', 'name': 'neural-thx-games'}), ('90', {'description': 'sets PLIIz Height', 'name': 'pliiz-height'}), ('91', {'description': 'sets Neo:6 Cinema DTS Surround Sensation', 'name': 'neo-6-cinema-dts-surround-sensation'}), ('92', {'description': 'sets Neo:6 Music DTS Surround Sensation', 'name': 'neo-6-music-dts-surround-sensation'}), ('93', {'description': 'sets Neural Digital Music', 'name': 'neural-digital-music'}), ('94', {'description': 'sets PLIIz Height + THX Cinema', 'name': 'pliiz-height-thx-cinema'}), ('95', {'description': 'sets PLIIz Height + THX Music', 'name': 'pliiz-height-thx-music'}), ('96', {'description': 'sets PLIIz Height + THX Games', 'name': 'pliiz-height-thx-games'}), ('97', {'description': 'sets PLIIz Height + THX U2/S2 Cinema', 'name': ('pliiz-height-thx-u2', 's2-cinema')}), ('98', {'description': 'sets PLIIz Height + THX U2/S2 Music', 'name': ('pliiz-height-thx-u2', 's2-music')}), ('99', {'description': 'sets PLIIz Height + THX U2/S2 Games', 'name': ('pliiz-height-thx-u2', 's2-games')}), ('9A', {'description': 'sets Neo:X Game', 'name': 'neo-x-game'}), ('A0', {'description': 'sets PLIIx/PLII Movie + Audyssey DSX', 'name': ('pliix', 'plii-movie-audyssey-dsx')}), ('A1', {'description': 'sets PLIIx/PLII Music + Audyssey DSX', 'name': ('pliix', 'plii-music-audyssey-dsx')}), ('A2', {'description': 'sets PLIIx/PLII Game + Audyssey DSX', 'name': ('pliix', 'plii-game-audyssey-dsx')}), ('A3', {'description': 'sets Neo:6 Cinema + Audyssey DSX', 'name': 'neo-6-cinema-audyssey-dsx'}), ('A4', {'description': 'sets Neo:6 Music + Audyssey DSX', 'name': 'neo-6-music-audyssey-dsx'}), ('A5', {'description': 'sets Neural Surround + Audyssey DSX', 'name': 'neural-surround-audyssey-dsx'}), ('A6', {'description': 'sets Neural Digital Music + Audyssey DSX', 'name': 'neural-digital-music-audyssey-dsx'}), ('A7', {'description': 'sets Dolby EX + Audyssey DSX', 'name': 'dolby-ex-audyssey-dsx'}), ('UP', {'description': 'sets Listening Mode Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Listening Mode Wrap-Around Down', 'name': 'down'}), ('MOVIE', {'description': 'sets Listening Mode Wrap-Around Up', 'name': 'movie'}), ('MUSIC', {'description': 'sets Listening Mode Wrap-Around Up', 'name': 'music'}), ('GAME', {'description': 'sets Listening Mode Wrap-Around Up', 'name': 'game'}), ('QSTN', {'description': 'gets The Listening Mode', 'name': 'query'})])}), ('LTN', {'description': 'Late Night Command', 'name': 'late-night', 'values': OrderedDict([('00', {'description': 'sets Late Night Off', 'name': 'off'}), ('01', {'description': 'sets Late Night Low@DolbyDigital,On@Dolby TrueHD', 'name': ('low-dolbydigital', 'on-dolby-truehd')}), ('02', {'description': 'sets Late Night High@DolbyDigital,(On@Dolby TrueHD)', 'name': ('high-dolbydigital',)}), ('03', {'description': 'sets Late Night Auto@Dolby TrueHD', 'name': 'auto-dolby-truehd'}), ('UP', {'description': 'sets Late Night State Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets The Late Night Level', 'name': 'query'})])}), ('RAS', {'description': 'Cinema Filter Command', 'name': 'cinema-filter', 'values': OrderedDict([('00', {'description': 'sets Cinema Filter Off', 'name': 'off'}), ('01', {'description': 'sets Cinema Filter On', 'name': 'on'}), ('UP', {'description': 'sets Cinema Filter State Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets The Cinema Filter State', 'name': 'query'})])}), ('ADY', {'description': 'Audyssey 2EQ/MultEQ/MultEQ XT', 'name': 'audyssey-2eq-multeq-multeq-xt', 'values': OrderedDict([('00', {'description': 'sets Audyssey 2EQ/MultEQ/MultEQ XT Off', 'name': ('off',)}), ('01', {'description': 'sets Audyssey 2EQ/MultEQ/MultEQ XT On/Movie', 'name': ('on', 'movie')}), ('02', {'description': 'sets Audyssey 2EQ/MultEQ/MultEQ XT Music', 'name': ('music',)}), ('UP', {'description': 'sets Audyssey 2EQ/MultEQ/MultEQ XT State Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets The Audyssey 2EQ/MultEQ/MultEQ XT State', 'name': 'query'})])}), ('ADQ', {'description': 'Audyssey Dynamic EQ', 'name': 'audyssey-dynamic-eq', 'values': OrderedDict([('00', {'description': 'sets Audyssey Dynamic EQ Off', 'name': 'off'}), ('01', {'description': 'sets Audyssey Dynamic EQ On', 'name': 'on'}), ('UP', {'description': 'sets Audyssey Dynamic EQ State Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets The Audyssey Dynamic EQ State', 'name': 'query'})])}), ('ADV', {'description': 'Audyssey Dynamic Volume', 'name': 'audyssey-dynamic-volume', 'values': OrderedDict([('00', {'description': 'sets Audyssey Dynamic Volume Off', 'name': 'off'}), ('01', {'description': 'sets Audyssey Dynamic Volume Light', 'name': 'light'}), ('02', {'description': 'sets Audyssey Dynamic Volume Medium', 'name': 'medium'}), ('03', {'description': 'sets Audyssey Dynamic Volume Heavy', 'name': 'heavy'}), ('UP', {'description': 'sets Audyssey Dynamic Volume State Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets The Audyssey Dynamic Volume State', 'name': 'query'})])}), ('DVL', {'description': 'Dolby Volume', 'name': 'dolby-volume', 'values': OrderedDict([('00', {'description': 'sets Dolby Volume Off', 'name': 'off'}), ('01', {'description': 'sets Dolby Volume Low/On', 'name': ('low', 'on')}), ('02', {'description': 'sets Dolby Volume Mid', 'name': 'mid'}), ('03', {'description': 'sets Dolby Volume High', 'name': 'high'}), ('UP', {'description': 'sets Dolby Volume State Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets The Dolby Volume State', 'name': 'query'})])}), ('MOT', {'description': 'Music Optimizer', 'name': 'music-optimizer', 'values': OrderedDict([('00', {'description': 'sets Music Optimizer Off', 'name': 'off'}), ('01', {'description': 'sets Music Optimizer On', 'name': 'on'}), ('UP', {'description': 'sets Music Optimizer State Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets The Dolby Volume State', 'name': 'query'})])}), ('TUN', {'description': 'Tuning Command (Include Tuner Pack Model Only)', 'name': 'tuning', 'values': OrderedDict([('nnnnn', {'description': 'sets Directly Tuning Frequency (FM nnn.nn MHz / AM nnnnn kHz / SR nnnnn ch)\nput 0 in the first two digits of nnnnn at SR', 'name': None}), ('DIRECT', {'description': 'starts/restarts Direct Tuning Mode', 'name': 'direct'}), ('0', {'description': 'sets 0 in Direct Tuning Mode', 'name': '0-in-direct-mode'}), ('1', {'description': 'sets 1 in Direct Tuning Mode', 'name': '1-in-direct-mode'}), ('2', {'description': 'sets 2 in Direct Tuning Mode', 'name': '2-in-direct-mode'}), ('3', {'description': 'sets 3 in Direct Tuning Mode', 'name': '3-in-direct-mode'}), ('4', {'description': 'sets 4 in Direct Tuning Mode', 'name': '4-in-direct-mode'}), ('5', {'description': 'sets 5 in Direct Tuning Mode', 'name': '5-in-direct-mode'}), ('6', {'description': 'sets 6 in Direct Tuning Mode', 'name': '6-in-direct-mode'}), ('7', {'description': 'sets 7 in Direct Tuning Mode', 'name': '7-in-direct-mode'}), ('8', {'description': 'sets 8 in Direct Tuning Mode', 'name': '8-in-direct-mode'}), ('9', {'description': 'sets 9 in Direct Tuning Mode', 'name': '9-in-direct-mode'}), ('UP', {'description': 'sets Tuning Frequency Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Tuning Frequency Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Tuning Frequency', 'name': 'query'})])}), ('PRS', {'description': 'Preset Command (Include Tuner Pack Model Only)', 'name': 'preset', 'values': OrderedDict([((1, 40), {'description': 'sets Preset No. 1 - 40 ( In hexadecimal representation)', 'name': 'no-1-40'}), ((1, 30), {'description': 'sets Preset No. 1 - 30 ( In hexadecimal representation)', 'name': 'no-1-30'}), ('UP', {'description': 'sets Preset No. Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Preset No. Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Preset No.', 'name': 'query'})])}), ('PRM', {'description': 'Preset Memory Command (Include Tuner Pack Model Only)', 'name': 'preset-memory', 'values': OrderedDict([((1, 40), {'description': 'sets Preset No. 1 - 40 ( In hexadecimal representation)', 'name': 'no-1-40'}), ((1, 30), {'description': 'sets Preset No. 1 - 30 ( In hexadecimal representation)', 'name': 'no-1-30'})])}), ('RDS', {'description': 'RDS Information Command (RDS Model Only)', 'name': 'rds-information', 'values': OrderedDict([('00', {'description': 'Display RT Information', 'name': '00'}), ('01', {'description': 'Display PTY Information', 'name': '01'}), ('02', {'description': 'Display TP Information', 'name': '02'}), ('UP', {'description': 'Display RDS Information Wrap-Around Change', 'name': 'up'})])}), ('PTS', {'description': 'PTY Scan Command (RDS Model Only)', 'name': 'pty-scan', 'values': OrderedDict([((0, 30), {'description': u'sets PTY No \u201c0 - 30\u201d ( In hexadecimal representation)', 'name': 'no-0-30'}), ('ENTER', {'description': 'Finish PTY Scan', 'name': 'enter'})])}), ('TPS', {'description': 'TP Scan Command (RDS Model Only)', 'name': 'tp-scan', 'values': OrderedDict([('', {'description': u'Start TP Scan (When Don\u2019t Have Parameter)', 'name': None}), ('ENTER', {'description': 'Finish TP Scan', 'name': 'enter'})])}), ('XCN', {'description': 'XM Channel Name Info (XM Model Only)', 'name': 'xm-channel-name-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'XM Channel Name', 'name': None}), ('QSTN', {'description': 'gets XM Channel Name', 'name': 'query'})])}), ('XAT', {'description': 'XM Artist Name Info (XM Model Only)', 'name': 'xm-artist-name-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'XM Artist Name', 'name': None}), ('QSTN', {'description': 'gets XM Artist Name', 'name': 'query'})])}), ('XTI', {'description': 'XM Title Info (XM Model Only)', 'name': 'xm-title-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'XM Title', 'name': None}), ('QSTN', {'description': 'gets XM Title', 'name': 'query'})])}), ('XCH', {'description': 'XM Channel Number Command (XM Model Only)', 'name': 'xm-channel-number', 'values': OrderedDict([((0, 597), {'description': u'XM Channel Number \u201c000 - 255\u201d', 'name': None}), ('UP', {'description': 'sets XM Channel Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets XM Channel Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets XM Channel Number', 'name': 'query'})])}), ('XCT', {'description': 'XM Category Command (XM Model Only)', 'name': 'xm-category', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'XM Category Info', 'name': None}), ('UP', {'description': 'sets XM Category Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets XM Category Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets XM Category', 'name': 'query'})])}), ('SCN', {'description': 'SIRIUS Channel Name Info (SIRIUS Model Only)', 'name': 'sirius-channel-name-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'SIRIUS Channel Name', 'name': None}), ('QSTN', {'description': 'gets SIRIUS Channel Name', 'name': 'query'})])}), ('SAT', {'description': 'SIRIUS Artist Name Info (SIRIUS Model Only)', 'name': 'sirius-artist-name-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'SIRIUS Artist Name', 'name': None}), ('QSTN', {'description': 'gets SIRIUS Artist Name', 'name': 'query'})])}), ('STI', {'description': 'SIRIUS Title Info (SIRIUS Model Only)', 'name': 'sirius-title-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'SIRIUS Title', 'name': None}), ('QSTN', {'description': 'gets SIRIUS Title', 'name': 'query'})])}), ('SCH', {'description': 'SIRIUS Channel Number Command (SIRIUS Model Only)', 'name': 'sirius-channel-number', 'values': OrderedDict([((0, 597), {'description': u'SIRIUS Channel Number \u201c000 - 255\u201d', 'name': None}), ('UP', {'description': 'sets SIRIUS Channel Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets SIRIUS Channel Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets SIRIUS Channel Number', 'name': 'query'})])}), ('SCT', {'description': 'SIRIUS Category Command (SIRIUS Model Only)', 'name': 'sirius-category', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'SIRIUS Category Info', 'name': None}), ('UP', {'description': 'sets SIRIUS Category Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets SIRIUS Category Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets SIRIUS Category', 'name': 'query'})])}), ('SLK', {'description': 'SIRIUS Parental Lock Command (SIRIUS Model Only)', 'name': 'sirius-parental-lock', 'values': OrderedDict([('nnnn', {'description': 'Lock Password (4Digits)', 'name': None}), ('INPUT', {'description': 'displays "Please input the Lock password"', 'name': 'input'}), ('WRONG', {'description': 'displays "The Lock password is wrong"', 'name': 'wrong'})])}), ('HAT', {'description': 'HD Radio Artist Name Info (HD Radio Model Only)', 'name': 'hd-radio-artist-name-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'HD Radio Artist Name (variable-length, 64 digits max)', 'name': None}), ('QSTN', {'description': 'gets HD Radio Artist Name', 'name': 'query'})])}), ('HCN', {'description': 'HD Radio Channel Name Info (HD Radio Model Only)', 'name': 'hd-radio-channel-name-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'HD Radio Channel Name (Station Name) (7 digits)', 'name': None}), ('QSTN', {'description': 'gets HD Radio Channel Name', 'name': 'query'})])}), ('HTI', {'description': 'HD Radio Title Info (HD Radio Model Only)', 'name': 'hd-radio-title-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'HD Radio Title (variable-length, 64 digits max)', 'name': None}), ('QSTN', {'description': 'gets HD Radio Title', 'name': 'query'})])}), ('HDS', {'description': 'HD Radio Detail Info (HD Radio Model Only)', 'name': 'hd-radio-detail-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'HD Radio Title', 'name': None}), ('QSTN', {'description': 'gets HD Radio Title', 'name': 'query'})])}), ('HPR', {'description': 'HD Radio Channel Program Command (HD Radio Model Only)', 'name': 'hd-radio-channel-program', 'values': OrderedDict([((1, 8), {'description': 'sets directly HD Radio Channel Program', 'name': 'directly'}), ('QSTN', {'description': 'gets HD Radio Channel Program', 'name': 'query'})])}), ('HBL', {'description': 'HD Radio Blend Mode Command (HD Radio Model Only)', 'name': 'hd-radio-blend-mode', 'values': OrderedDict([('00', {'description': 'sets HD Radio Blend Mode "Auto"', 'name': 'auto'}), ('01', {'description': 'sets HD Radio Blend Mode "Analog"', 'name': 'analog'}), ('QSTN', {'description': 'gets the HD Radio Blend Mode Status', 'name': 'query'})])}), ('HTS', {'description': 'HD Radio Tuner Status (HD Radio Model Only)', 'name': 'hd-radio-tuner-status', 'values': OrderedDict([('mmnnoo', {'description': 'HD Radio Tuner Status (3 bytes)\nmm -> "00" not HD, "01" HD\nnn -> current Program "01"-"08"\noo -> receivable Program (8 bits are represented in hexadecimal notation. Each bit shows receivable or not.)', 'name': 'mmnnoo'}), ('QSTN', {'description': 'gets the HD Radio Tuner Status', 'name': 'query'})])}), ('NTC', {'description': 'Network/USB Operation Command (Network Model Only after TX-NR905)', 'name': 'network-usb', 'values': OrderedDict([('PLAY', {'description': 'PLAY KEY', 'name': 'play'}), ('STOP', {'description': 'STOP KEY', 'name': 'stop'}), ('PAUSE', {'description': 'PAUSE KEY', 'name': 'pause'}), ('TRUP', {'description': 'TRACK UP KEY', 'name': 'trup'}), ('TRDN', {'description': 'TRACK DOWN KEY', 'name': 'trdn'}), ('FF', {'description': 'FF KEY (CONTINUOUS*)', 'name': 'ff'}), ('REW', {'description': 'REW KEY (CONTINUOUS*)', 'name': 'rew'}), ('REPEAT', {'description': 'REPEAT KEY', 'name': 'repeat'}), ('RANDOM', {'description': 'RANDOM KEY', 'name': 'random'}), ('DISPLAY', {'description': 'DISPLAY KEY', 'name': 'display'}), ('ALBUM', {'description': 'ALBUM KEY', 'name': 'album'}), ('ARTIST', {'description': 'ARTIST KEY', 'name': 'artist'}), ('GENRE', {'description': 'GENRE KEY', 'name': 'genre'}), ('PLAYLIST', {'description': 'PLAYLIST KEY', 'name': 'playlist'}), ('RIGHT', {'description': 'RIGHT KEY', 'name': 'right'}), ('LEFT', {'description': 'LEFT KEY', 'name': 'left'}), ('UP', {'description': 'UP KEY', 'name': 'up'}), ('DOWN', {'description': 'DOWN KEY', 'name': 'down'}), ('SELECT', {'description': 'SELECT KEY', 'name': 'select'}), ('0', {'description': '0 KEY', 'name': '0'}), ('1', {'description': '1 KEY', 'name': '1'}), ('2', {'description': '2 KEY', 'name': '2'}), ('3', {'description': '3 KEY', 'name': '3'}), ('4', {'description': '4 KEY', 'name': '4'}), ('5', {'description': '5 KEY', 'name': '5'}), ('6', {'description': '6 KEY', 'name': '6'}), ('7', {'description': '7 KEY', 'name': '7'}), ('8', {'description': '8 KEY', 'name': '8'}), ('9', {'description': '9 KEY', 'name': '9'}), ('DELETE', {'description': 'DELETE KEY', 'name': 'delete'}), ('CAPS', {'description': 'CAPS KEY', 'name': 'caps'}), ('LOCATION', {'description': 'LOCATION KEY', 'name': 'location'}), ('LANGUAGE', {'description': 'LANGUAGE KEY', 'name': 'language'}), ('SETUP', {'description': 'SETUP KEY', 'name': 'setup'}), ('RETURN', {'description': 'RETURN KEY', 'name': 'return'}), ('CHUP', {'description': 'CH UP(for iRadio)', 'name': 'chup'}), ('CHDN', {'description': 'CH DOWN(for iRadio)', 'name': 'chdn'}), ('MENU', {'description': 'MENU', 'name': 'menu'}), ('TOP', {'description': 'TOP MENU', 'name': 'top'}), ('MODE', {'description': 'MODE(for iPod) STD<->EXT', 'name': 'mode'}), ('LIST', {'description': 'LIST <-> PLAYBACK', 'name': 'list'})])}), ('NAT', {'description': 'NET/USB Artist Name Info', 'name': 'net-usb-artist-name-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'NET/USB Artist Name (variable-length, 64 Unicode letters [UTF-8 encoded] max , for Network Control only)', 'name': None}), ('QSTN', {'description': 'gets iPod Artist Name', 'name': 'query'})])}), ('NAL', {'description': 'NET/USB Album Name Info', 'name': 'net-usb-album-name-info', 'values': OrderedDict([('nnnnnnn', {'description': 'NET/USB Album Name (variable-length, 64 Unicode letters [UTF-8 encoded] max , for Network Control only)', 'name': None}), ('QSTN', {'description': 'gets iPod Album Name', 'name': 'query'})])}), ('NTI', {'description': 'NET/USB Title Name', 'name': 'net-usb-title-name', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'NET/USB Title Name (variable-length, 64 Unicode letters [UTF-8 encoded] max , for Network Control only)', 'name': None}), ('QSTN', {'description': 'gets HD Radio Title', 'name': 'query'})])}), ('NTM', {'description': 'NET/USB Time Info', 'name': 'net-usb-time-info', 'values': OrderedDict([('mm:ss/mm:ss', {'description': 'NET/USB Time Info (Elapsed time/Track Time Max 99:59)', 'name': 'mm-ss-mm-ss'}), ('QSTN', {'description': 'gets iPod Time Info', 'name': 'query'})])}), ('NTR', {'description': 'NET/USB Track Info', 'name': 'net-usb-track-info', 'values': OrderedDict([('cccc/tttt', {'description': 'NET/USB Track Info (Current Track/Toral Track Max 9999)', 'name': 'cccc-tttt'}), ('QSTN', {'description': 'gets iPod Time Info', 'name': 'query'})])}), ('NST', {'description': 'NET/USB Play Status', 'name': 'net-usb-play-status', 'values': OrderedDict([('prs', {'description': 'NET/USB Play Status (3 letters)\np -> Play Status: "S": STOP, "P": Play, "p": Pause, "F": FF, "R": FR\nr -> Repeat Status: "-": Off, "R": All, "F": Folder, "1": Repeat 1,\ns -> Shuffle Status: "-": Off, "S": All , "A": Album, "F": Folder', 'name': 'prs'}), ('QSTN', {'description': 'gets the Net/USB Status', 'name': 'query'})])}), ('NPR', {'description': 'Internet Radio Preset Command', 'name': 'internet-radio-preset', 'values': OrderedDict([((1, 40), {'description': 'sets Preset No. 1 - 40 ( In hexadecimal representation)', 'name': 'no-1-40'}), ('SET', {'description': 'preset memory current station', 'name': 'set'})])}), ('NLS', {'description': 'NET/USB List Info', 'name': 'net-usb-list-info', 'values': OrderedDict([('tlpnnnnnnnnnn', {'description': u'NET/USB List Info\nt ->Information Type (A : ASCII letter, C : Cursor Info, U : Unicode letter)\nwhen t = A,\n l ->Line Info (0-9 : 1st to 10th Line)\n nnnnnnnnn:Listed data (variable-length, 64 ASCII letters max)\n when AVR is not displayed NET/USB List(Ketboard,Menu,Popup\u2026), "nnnnnnnnn" is "See TV".\n p ->Property (- : no)\nwhen t = C,\n l ->Cursor Position (0-9 : 1st to 10th Line, - : No Cursor)\n p ->Update Type (P : Page Infomation Update ( Page Clear or Disable List Info) , C : Cursor Position Update)\nwhen t = U, (for Network Control Only)\n l ->Line Info (0-9 : 1st to 10th Line)\n nnnnnnnnn:Listed data (variable-length, 64 Unicode letters [UTF-8 encoded] max)\n when AVR is not displayed NET/USB List(Ketboard,Menu,Popup\u2026), "nnnnnnnnn" is "See TV".\n p ->Property (- : no)', 'name': None}), ('ti', {'description': 'select the listed item (from Network Control Only)\n t -> Index Type (L : Line, I : Index)\nwhen t = L,\n i -> Line number (0-9 : 1st to 10th Line [1 digit] )\nwhen t = I,\n iiiii -> Index number (00001-99999 : 1st to 99999th Item [5 digits] )', 'name': 'ti'})])}), ('NJA', {'description': 'NET/USB Jacket Art (When Jacket Art is available and Output for Network Control Only)', 'name': 'net-usb-jacket-art', 'values': OrderedDict([('tp{xx}{xx}{xx}{xx}{xx}{xx}', {'description': 'NET/USB Jacket Art/Album Art Data\nt-> Image type 0:BMP,1:JPEG\np-> Packet flag 0:Start, 1:Next, 2:End\nxxxxxxxxxxxxxx -> Jacket/Album Art Data (valiable length, 1024 ASCII HEX letters max)', 'name': 'tp-xx-xx-xx-xx-xx-xx'})])}), ('NSV', {'description': 'NET Service(for Network Control Only)', 'name': 'net-service', 'values': OrderedDict([(u'ssiaaaa\u2026aaaabbbb\u2026bbbb', {'description': 'select Network Service directly\nss -> Network Serveice\n 00:Media Server (DLNA)\n 01:Favorite\n 02:vTuner\n 03:SIRIUS\n 04:Pandora\n 05:Rhapsody\n 06:Last.fm\n 07:Napster\n 08:Slacker\n 09:Mediafly\n 0A:Spotify\n 0B:AUPEO!\n 0C:Radiko\n 0D:e-onkyo\n\ni-> Acount Info\n 0: No\n 1: Yes\n"aaaa...aaaa": User Name ( 128 Unicode letters [UTF-8 encoded] max )\n"bbbb...bbbb": Password ( 128 Unicode letters [UTF-8 encoded] max )', 'name': None})])}), ('NKY', {'description': 'NET Keyboard(for Network Control Only)', 'name': 'net-keyboard', 'values': OrderedDict([('ll', {'description': 'waiting Keyboard Input\nll -> category\n 00: Off ( Exit Keyboard Input )\n 01: User Name\n 02: Password\n 03: Artist Name\n 04: Album Name\n 05: Song Name\n 06: Station Name\n 07: Tag Name\n 08: Artist or Song\n 09: Episode Name\n 0A: Pin Code (some digit Number [0-9])\n 0B: User Name (available ISO 8859-1 character set)\n 0C: Password (available ISO 8859-1 character set)', 'name': 'll'}), ('nnnnnnnnn', {'description': 'set Keyboard Input letter\n"nnnnnnnn" is variable-length, 128 Unicode letters [UTF-8 encoded] max', 'name': None})])}), ('NPU', {'description': 'NET Popup Message(for Network Control Only)', 'name': 'net-popup-message', 'values': OrderedDict([(u'xaaa\u2026aaaybbb\u2026bbb', {'description': "x -> Popup Display Type\n 'T': Popup text is top\n 'B': Popup text is bottom\n 'L': Popup text is list format\n\naaa...aaa -> Popup Title, Massage\n when x = 'T' or 'B'\n Top Title [0x00] Popup Title [0x00] Popup Message [0x00]\n (valiable-length Unicode letter [UTF-8 encoded] )\n\n when x = 'L'\n Top Title [0x00] Item Title 1 [0x00] Item Parameter 1 [0x00] ... [0x00] Item Title 6 [0x00] Item Parameter 6 [0x00]\n (valiable-length Unicode letter [UTF-8 encoded] )\n\ny -> Cursor Position on button\n '0' : Button is not Displayed\n '1' : Cursor is on the button 1\n '2' : Cursor is on the button 2\n\nbbb...bbb -> Text of Button\n Text of Button 1 [0x00] Text of Button 2 [0x00]\n (valiable-length Unicode letter [UTF-8 encoded] )", 'name': None})])}), ('NMD', {'description': 'iPod Mode Change (with USB Connection Only)', 'name': 'ipod-mode-change', 'values': OrderedDict([('STD', {'description': 'Standerd Mode', 'name': 'std'}), ('EXT', {'description': 'Extend Mode(If available)', 'name': 'ext'}), ('VDC', {'description': 'Video Contents in Extended Mode', 'name': 'vdc'}), ('QSTN', {'description': 'gets iPod Mode Status', 'name': 'query'})])}), ('CCD', {'description': 'CD Player Operation Command', 'name': 'cd-player', 'values': OrderedDict([('POWER', {'description': 'POWER ON/OFF', 'name': 'power'}), ('TRACK', {'description': 'TRACK+', 'name': 'track'}), ('PLAY', {'description': 'PLAY', 'name': 'play'}), ('STOP', {'description': 'STOP', 'name': 'stop'}), ('PAUSE', {'description': 'PAUSE', 'name': 'pause'}), ('SKIP.F', {'description': '>>I', 'name': 'skip-f'}), ('SKIP.R', {'description': 'I<<', 'name': 'skip-r'}), ('MEMORY', {'description': 'MEMORY', 'name': 'memory'}), ('CLEAR', {'description': 'CLEAR', 'name': 'clear'}), ('REPEAT', {'description': 'REPEAT', 'name': 'repeat'}), ('RANDOM', {'description': 'RANDOM', 'name': 'random'}), ('DISP', {'description': 'DISPLAY', 'name': 'disp'}), ('D.MODE', {'description': 'D.MODE', 'name': 'd-mode'}), ('FF', {'description': 'FF >>', 'name': 'ff'}), ('REW', {'description': 'REW <<', 'name': 'rew'}), ('OP/CL', {'description': 'OPEN/CLOSE', 'name': 'op-cl'}), ('1', {'description': '1.0', 'name': '1'}), ('2', {'description': '2.0', 'name': '2'}), ('3', {'description': '3.0', 'name': '3'}), ('4', {'description': '4.0', 'name': '4'}), ('5', {'description': '5.0', 'name': '5'}), ('6', {'description': '6.0', 'name': '6'}), ('7', {'description': '7.0', 'name': '7'}), ('8', {'description': '8.0', 'name': '8'}), ('9', {'description': '9.0', 'name': '9'}), ('0', {'description': '0.0', 'name': '0'}), ('10', {'description': '10.0', 'name': '10'}), ('+10', {'description': '+10', 'name': '10'}), ('D.SKIP', {'description': 'DISC +', 'name': 'd-skip'}), ('DISC.F', {'description': 'DISC +', 'name': 'disc-f'}), ('DISC.R', {'description': 'DISC -', 'name': 'disc-r'}), ('DISC1', {'description': 'DISC1', 'name': 'disc1'}), ('DISC2', {'description': 'DISC2', 'name': 'disc2'}), ('DISC3', {'description': 'DISC3', 'name': 'disc3'}), ('DISC4', {'description': 'DISC4', 'name': 'disc4'}), ('DISC5', {'description': 'DISC5', 'name': 'disc5'}), ('DISC6', {'description': 'DISC6', 'name': 'disc6'}), ('STBY', {'description': 'STANDBY', 'name': 'stby'}), ('PON', {'description': 'POWER ON', 'name': 'pon'})])}), ('CT1', {'description': 'TAPE1(A) Operation Command', 'name': 'tape1-a', 'values': OrderedDict([('PLAY.F', {'description': 'PLAY >', 'name': 'play-f'}), ('PLAY.R', {'description': 'PLAY <', 'name': 'play-r'}), ('STOP', {'description': 'STOP', 'name': 'stop'}), ('RC/PAU', {'description': 'REC/PAUSE', 'name': 'rc-pau'}), ('FF', {'description': 'FF >>', 'name': 'ff'}), ('REW', {'description': 'REW <<', 'name': 'rew'})])}), ('CT2', {'description': 'TAPE2(B) Operation Command', 'name': 'tape2-b', 'values': OrderedDict([('PLAY.F', {'description': 'PLAY >', 'name': 'play-f'}), ('PLAY.R', {'description': 'PLAY <', 'name': 'play-r'}), ('STOP', {'description': 'STOP', 'name': 'stop'}), ('RC/PAU', {'description': 'REC/PAUSE', 'name': 'rc-pau'}), ('FF', {'description': 'FF >>', 'name': 'ff'}), ('REW', {'description': 'REW <<', 'name': 'rew'}), ('OP/CL', {'description': 'OPEN/CLOSE', 'name': 'op-cl'}), ('SKIP.F', {'description': '>>I', 'name': 'skip-f'}), ('SKIP.R', {'description': 'I<<', 'name': 'skip-r'}), ('REC', {'description': 'REC', 'name': 'rec'})])}), ('CEQ', {'description': 'Graphics Equalizer Operation Command', 'name': 'graphics-equalizer', 'values': OrderedDict([('POWER', {'description': 'POWER ON/OFF', 'name': 'power'}), ('PRESET', {'description': 'PRESET', 'name': 'preset'})])}), ('CDT', {'description': 'DAT Recorder Operation Command', 'name': 'dat-recorder', 'values': OrderedDict([('PLAY', {'description': 'PLAY', 'name': 'play'}), ('RC/PAU', {'description': 'REC/PAUSE', 'name': 'rc-pau'}), ('STOP', {'description': 'STOP', 'name': 'stop'}), ('SKIP.F', {'description': '>>I', 'name': 'skip-f'}), ('SKIP.R', {'description': 'I<<', 'name': 'skip-r'}), ('FF', {'description': 'FF >>', 'name': 'ff'}), ('REW', {'description': 'REW <<', 'name': 'rew'})])}), ('CDV', {'description': 'DVD Player Operation Command (via RIHD only after TX-NR509)', 'name': 'dvd-player', 'values': OrderedDict([('POWER', {'description': 'POWER ON/OFF', 'name': 'power'}), ('PWRON', {'description': 'POWER ON', 'name': 'pwron'}), ('PWROFF', {'description': 'POWER OFF', 'name': 'pwroff'}), ('PLAY', {'description': 'PLAY', 'name': 'play'}), ('STOP', {'description': 'STOP', 'name': 'stop'}), ('SKIP.F', {'description': '>>I', 'name': 'skip-f'}), ('SKIP.R', {'description': 'I<<', 'name': 'skip-r'}), ('FF', {'description': 'FF >>', 'name': 'ff'}), ('REW', {'description': 'REW <<', 'name': 'rew'}), ('PAUSE', {'description': 'PAUSE', 'name': 'pause'}), ('LASTPLAY', {'description': 'LAST PLAY', 'name': 'lastplay'}), ('SUBTON/OFF', {'description': 'SUBTITLE ON/OFF', 'name': 'subton-off'}), ('SUBTITLE', {'description': 'SUBTITLE', 'name': 'subtitle'}), ('SETUP', {'description': 'SETUP', 'name': 'setup'}), ('TOPMENU', {'description': 'TOPMENU', 'name': 'topmenu'}), ('MENU', {'description': 'MENU', 'name': 'menu'}), ('UP', {'description': 'UP', 'name': 'up'}), ('DOWN', {'description': 'DOWN', 'name': 'down'}), ('LEFT', {'description': 'LEFT', 'name': 'left'}), ('RIGHT', {'description': 'RIGHT', 'name': 'right'}), ('ENTER', {'description': 'ENTER', 'name': 'enter'}), ('RETURN', {'description': 'RETURN', 'name': 'return'}), ('DISC.F', {'description': 'DISC +', 'name': 'disc-f'}), ('DISC.R', {'description': 'DISC -', 'name': 'disc-r'}), ('AUDIO', {'description': 'AUDIO', 'name': 'audio'}), ('RANDOM', {'description': 'RANDOM', 'name': 'random'}), ('OP/CL', {'description': 'OPEN/CLOSE', 'name': 'op-cl'}), ('ANGLE', {'description': 'ANGLE', 'name': 'angle'}), ('1', {'description': '1.0', 'name': '1'}), ('2', {'description': '2.0', 'name': '2'}), ('3', {'description': '3.0', 'name': '3'}), ('4', {'description': '4.0', 'name': '4'}), ('5', {'description': '5.0', 'name': '5'}), ('6', {'description': '6.0', 'name': '6'}), ('7', {'description': '7.0', 'name': '7'}), ('8', {'description': '8.0', 'name': '8'}), ('9', {'description': '9.0', 'name': '9'}), ('10', {'description': '10.0', 'name': '10'}), ('0', {'description': '0.0', 'name': '0'}), ('SEARCH', {'description': 'SEARCH', 'name': 'search'}), ('DISP', {'description': 'DISPLAY', 'name': 'disp'}), ('REPEAT', {'description': 'REPEAT', 'name': 'repeat'}), ('MEMORY', {'description': 'MEMORY', 'name': 'memory'}), ('CLEAR', {'description': 'CLEAR', 'name': 'clear'}), ('ABR', {'description': 'A-B REPEAT', 'name': 'abr'}), ('STEP.F', {'description': 'STEP', 'name': 'step-f'}), ('STEP.R', {'description': 'STEP BACK', 'name': 'step-r'}), ('SLOW.F', {'description': 'SLOW', 'name': 'slow-f'}), ('SLOW.R', {'description': 'SLOW BACK', 'name': 'slow-r'}), ('ZOOMTG', {'description': 'ZOOM', 'name': 'zoomtg'}), ('ZOOMUP', {'description': 'ZOOM UP', 'name': 'zoomup'}), ('ZOOMDN', {'description': 'ZOOM DOWN', 'name': 'zoomdn'}), ('PROGRE', {'description': 'PROGRESSIVE', 'name': 'progre'}), ('VDOFF', {'description': 'VIDEO ON/OFF', 'name': 'vdoff'}), ('CONMEM', {'description': 'CONDITION MEMORY', 'name': 'conmem'}), ('FUNMEM', {'description': 'FUNCTION MEMORY', 'name': 'funmem'}), ('DISC1', {'description': 'DISC1', 'name': 'disc1'}), ('DISC2', {'description': 'DISC2', 'name': 'disc2'}), ('DISC3', {'description': 'DISC3', 'name': 'disc3'}), ('DISC4', {'description': 'DISC4', 'name': 'disc4'}), ('DISC5', {'description': 'DISC5', 'name': 'disc5'}), ('DISC6', {'description': 'DISC6', 'name': 'disc6'}), ('FOLDUP', {'description': 'FOLDER UP', 'name': 'foldup'}), ('FOLDDN', {'description': 'FOLDER DOWN', 'name': 'folddn'}), ('P.MODE', {'description': 'PLAY MODE', 'name': 'p-mode'}), ('ASCTG', {'description': 'ASPECT(Toggle)', 'name': 'asctg'}), ('CDPCD', {'description': 'CD CHAIN REPEAT', 'name': 'cdpcd'}), ('MSPUP', {'description': 'MULTI SPEED UP', 'name': 'mspup'}), ('MSPDN', {'description': 'MULTI SPEED DOWN', 'name': 'mspdn'}), ('PCT', {'description': 'PICTURE CONTROL', 'name': 'pct'}), ('RSCTG', {'description': 'RESOLUTION(Toggle)', 'name': 'rsctg'}), ('INIT', {'description': 'Return to Factory Settings', 'name': 'init'})])}), ('CMD', {'description': 'MD Recorder Operation Command', 'name': 'md-recorder', 'values': OrderedDict([('POWER', {'description': 'POWER ON/OFF', 'name': 'power'}), ('PLAY', {'description': 'PLAY', 'name': 'play'}), ('STOP', {'description': 'STOP', 'name': 'stop'}), ('FF', {'description': 'FF >>', 'name': 'ff'}), ('REW', {'description': 'REW <<', 'name': 'rew'}), ('P.MODE', {'description': 'PLAY MODE', 'name': 'p-mode'}), ('SKIP.F', {'description': '>>I', 'name': 'skip-f'}), ('SKIP.R', {'description': 'I<<', 'name': 'skip-r'}), ('PAUSE', {'description': 'PAUSE', 'name': 'pause'}), ('REC', {'description': 'REC', 'name': 'rec'}), ('MEMORY', {'description': 'MEMORY', 'name': 'memory'}), ('DISP', {'description': 'DISPLAY', 'name': 'disp'}), ('SCROLL', {'description': 'SCROLL', 'name': 'scroll'}), ('M.SCAN', {'description': 'MUSIC SCAN', 'name': 'm-scan'}), ('CLEAR', {'description': 'CLEAR', 'name': 'clear'}), ('RANDOM', {'description': 'RANDOM', 'name': 'random'}), ('REPEAT', {'description': 'REPEAT', 'name': 'repeat'}), ('ENTER', {'description': 'ENTER', 'name': 'enter'}), ('EJECT', {'description': 'EJECT', 'name': 'eject'}), ('1', {'description': '1.0', 'name': '1'}), ('2', {'description': '2.0', 'name': '2'}), ('3', {'description': '3.0', 'name': '3'}), ('4', {'description': '4.0', 'name': '4'}), ('5', {'description': '5.0', 'name': '5'}), ('6', {'description': '6.0', 'name': '6'}), ('7', {'description': '7.0', 'name': '7'}), ('8', {'description': '8.0', 'name': '8'}), ('9', {'description': '9.0', 'name': '9'}), ('10/0', {'description': '10/0', 'name': '10-0'}), ('nn/nnn', {'description': '--/---', 'name': None}), ('NAME', {'description': 'NAME', 'name': 'name'}), ('GROUP', {'description': 'GROUP', 'name': 'group'}), ('STBY', {'description': 'STANDBY', 'name': 'stby'})])}), ('CCR', {'description': 'CD-R Recorder Operation Command', 'name': 'cd-r-recorder', 'values': OrderedDict([('POWER', {'description': 'POWER ON/OFF', 'name': 'power'}), ('P.MODE', {'description': 'PLAY MODE', 'name': 'p-mode'}), ('PLAY', {'description': 'PLAY', 'name': 'play'}), ('STOP', {'description': 'STOP', 'name': 'stop'}), ('SKIP.F', {'description': '>>I', 'name': 'skip-f'}), ('SKIP.R', {'description': 'I<<', 'name': 'skip-r'}), ('PAUSE', {'description': 'PAUSE', 'name': 'pause'}), ('REC', {'description': 'REC', 'name': 'rec'}), ('CLEAR', {'description': 'CLEAR', 'name': 'clear'}), ('REPEAT', {'description': 'REPEAT', 'name': 'repeat'}), ('1', {'description': '1.0', 'name': '1'}), ('2', {'description': '2.0', 'name': '2'}), ('3', {'description': '3.0', 'name': '3'}), ('4', {'description': '4.0', 'name': '4'}), ('5', {'description': '5.0', 'name': '5'}), ('6', {'description': '6.0', 'name': '6'}), ('7', {'description': '7.0', 'name': '7'}), ('8', {'description': '8.0', 'name': '8'}), ('9', {'description': '9.0', 'name': '9'}), ('10/0', {'description': '10/0', 'name': '10-0'}), ('nn/nnn', {'description': '--/---', 'name': None}), ('SCROLL', {'description': 'SCROLL', 'name': 'scroll'}), ('OP/CL', {'description': 'OPEN/CLOSE', 'name': 'op-cl'}), ('DISP', {'description': 'DISPLAY', 'name': 'disp'}), ('RANDOM', {'description': 'RANDOM', 'name': 'random'}), ('MEMORY', {'description': 'MEMORY', 'name': 'memory'}), ('FF', {'description': 'FF', 'name': 'ff'}), ('REW', {'description': 'REW', 'name': 'rew'}), ('STBY', {'description': 'STANDBY', 'name': 'stby'})])}), ('CPT', {'description': 'Universal PORT Operation Command', 'name': 'universal-port', 'values': OrderedDict([('SETUP', {'description': 'SETUP', 'name': 'setup'}), ('UP', {'description': 'UP/Tuning Up', 'name': 'up'}), ('DOWN', {'description': 'DOWN/Tuning Down', 'name': 'down'}), ('LEFT', {'description': 'LEFT/Multicast Down', 'name': 'left'}), ('RIGHT', {'description': 'RIGHT/Multicast Up', 'name': 'right'}), ('ENTER', {'description': 'ENTER', 'name': 'enter'}), ('RETURN', {'description': 'RETURN', 'name': 'return'}), ('DISP', {'description': 'DISPLAY', 'name': 'disp'}), ('PLAY', {'description': 'PLAY/BAND', 'name': 'play'}), ('STOP', {'description': 'STOP', 'name': 'stop'}), ('PAUSE', {'description': 'PAUSE', 'name': 'pause'}), ('SKIP.F', {'description': '>>I', 'name': 'skip-f'}), ('SKIP.R', {'description': 'I<<', 'name': 'skip-r'}), ('FF', {'description': 'FF >>', 'name': 'ff'}), ('REW', {'description': 'REW <<', 'name': 'rew'}), ('REPEAT', {'description': 'REPEAT', 'name': 'repeat'}), ('SHUFFLE', {'description': 'SHUFFLE', 'name': 'shuffle'}), ('PRSUP', {'description': 'PRESET UP', 'name': 'prsup'}), ('PRSDN', {'description': 'PRESET DOWN', 'name': 'prsdn'}), ('0', {'description': '0.0', 'name': '0'}), ('1', {'description': '1.0', 'name': '1'}), ('2', {'description': '2.0', 'name': '2'}), ('3', {'description': '3.0', 'name': '3'}), ('4', {'description': '4.0', 'name': '4'}), ('5', {'description': '5.0', 'name': '5'}), ('6', {'description': '6.0', 'name': '6'}), ('7', {'description': '7.0', 'name': '7'}), ('8', {'description': '8.0', 'name': '8'}), ('9', {'description': '9.0', 'name': '9'}), ('10', {'description': '10/+10/Direct Tuning', 'name': '10'}), ('MODE', {'description': 'MODE', 'name': 'mode'})])}), ('IAT', {'description': 'iPod Artist Name Info (Universal Port Dock Only)', 'name': 'ipod-artist-name-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'iPod Artist Name (variable-length, 64 letters max ASCII letter only)', 'name': None}), ('QSTN', {'description': 'gets iPod Artist Name', 'name': 'query'})])}), ('IAL', {'description': 'iPod Album Name Info (Universal Port Dock Only)', 'name': 'ipod-album-name-info', 'values': OrderedDict([('nnnnnnn', {'description': 'iPod Album Name (variable-length, 64 letters max ASCII letter only)', 'name': None}), ('QSTN', {'description': 'gets iPod Album Name', 'name': 'query'})])}), ('ITI', {'description': 'iPod Title Name (Universal Port Dock Only)', 'name': 'ipod-title-name', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'iPod Title Name (variable-length, 64 letters max ASCII letter only)', 'name': None}), ('QSTN', {'description': 'gets iPod Title Name', 'name': 'query'})])}), ('ITM', {'description': 'iPod Time Info (Universal Port Dock Only)', 'name': 'ipod-time-info', 'values': OrderedDict([('mm:ss/mm:ss', {'description': 'iPod Time Info (Elapsed time/Track Time Max 99:59)', 'name': 'mm-ss-mm-ss'}), ('QSTN', {'description': 'gets iPod Time Info', 'name': 'query'})])}), ('ITR', {'description': 'iPod Track Info (Universal Port Dock Only)', 'name': 'ipod-track-info', 'values': OrderedDict([('cccc/tttt', {'description': 'iPod Track Info (Current Track/Toral Track Max 9999)', 'name': 'cccc-tttt'}), ('QSTN', {'description': 'gets iPod Time Info', 'name': 'query'})])}), ('IST', {'description': 'iPod Play Status (Universal Port Dock Only)', 'name': 'ipod-play-status', 'values': OrderedDict([('prs', {'description': 'iPod Play Status (3 letters)\np -> Play Status "S" STOP, "P" Play, "p" Pause, "F" FF, "R" FR\nr -> Repeat Status "-" no Repeat, "R" All Repeat, "1" Repeat 1,\ns -> Shuffle Status "-" no Shuffle, "S" Shuffle, "A" Album Shuffle', 'name': 'prs'}), ('QSTN', {'description': 'gets the iPod Play Status', 'name': 'query'})])}), ('ILS', {'description': 'iPod List Info (Universal Port Dock Extend Mode Only)', 'name': 'ipod-list-info', 'values': OrderedDict([('tlpnnnnnnnnnn', {'description': 'iPod List Info\nt ->Information Type (A : ASCII letter, C : Cursor Info)\nwhen t = A,\n l ->Line Info (0-9 : 1st to 10th Line)\n nnnnnnnnn:Listed data (variable-length, 64 letters max ASCII letter only)\n p ->Property (- : no)\nwhen t = C,\n l ->Cursor Position (0-9 : 1st to 10th Line, - : No Cursor)\n p ->Update Type (P : Page Infomation Update ( Page Clear or Disable List Info) , C : Cursor Position Update)', 'name': None})])}), ('IMD', {'description': 'iPod Mode Change (Universal Port Dock Only)', 'name': 'ipod-mode-change', 'values': OrderedDict([('STD', {'description': 'Standerd Mode', 'name': 'std'}), ('EXT', {'description': 'Extend Mode(If available)', 'name': 'ext'}), ('VDC', {'description': 'Video Contents in Extended Mode', 'name': 'vdc'}), ('QSTN', {'description': 'gets iPod Mode Status', 'name': 'query'})])}), ('UTN', {'description': 'Tuning Command (Universal Port Dock Only)', 'name': 'tuning', 'values': OrderedDict([('nnnnn', {'description': 'sets Directly Tuning Frequency (FM nnn.nn MHz / AM nnnnn kHz)', 'name': None}), ('UP', {'description': 'sets Tuning Frequency Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Tuning Frequency Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Tuning Frequency', 'name': 'query'})])}), ('UPR', {'description': 'DAB Preset Command (Universal Port Dock Only)', 'name': 'dab-preset', 'values': OrderedDict([((1, 40), {'description': 'sets Preset No. 1 - 40 ( In hexadecimal representation)', 'name': 'no-1-40'}), ('UP', {'description': 'sets Preset No. Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Preset No. Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Preset No.', 'name': 'query'})])}), ('UPM', {'description': 'Preset Memory Command (Universal Port Dock Only)', 'name': 'preset-memory', 'values': OrderedDict([((1, 40), {'description': 'Memory Preset No. 1 - 40 ( In hexadecimal representation)', 'name': None})])}), ('UHP', {'description': 'HD Radio Channel Program Command (Universal Port Dock Only)', 'name': 'hd-radio-channel-program', 'values': OrderedDict([((1, 8), {'description': 'sets directly HD Radio Channel Program', 'name': 'directly'}), ('QSTN', {'description': 'gets HD Radio Channel Program', 'name': 'query'})])}), ('UHB', {'description': 'HD Radio Blend Mode Command (Universal Port Dock Only)', 'name': 'hd-radio-blend-mode', 'values': OrderedDict([('00', {'description': 'sets HD Radio Blend Mode "Auto"', 'name': 'auto'}), ('01', {'description': 'sets HD Radio Blend Mode "Analog"', 'name': 'analog'}), ('QSTN', {'description': 'gets the HD Radio Blend Mode Status', 'name': 'query'})])}), ('UHA', {'description': 'HD Radio Artist Name Info (Universal Port Dock Only)', 'name': 'hd-radio-artist-name-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'HD Radio Artist Name (variable-length, 64 letters max)', 'name': None}), ('QSTN', {'description': 'gets HD Radio Artist Name', 'name': 'query'})])}), ('UHC', {'description': 'HD Radio Channel Name Info (Universal Port Dock Only)', 'name': 'hd-radio-channel-name-info', 'values': OrderedDict([('nnnnnnn', {'description': 'HD Radio Channel Name (Station Name) (7lettters)', 'name': None}), ('QSTN', {'description': 'gets HD Radio Channel Name', 'name': 'query'})])}), ('UHT', {'description': 'HD Radio Title Info (Universal Port Dock Only)', 'name': 'hd-radio-title-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'HD Radio Title (variable-length, 64 letters max)', 'name': None}), ('QSTN', {'description': 'gets HD Radio Title', 'name': 'query'})])}), ('UHD', {'description': 'HD Radio Detail Info (Universal Port Dock Only)', 'name': 'hd-radio-detail-info', 'values': OrderedDict([('nnnnnnnnnn', {'description': 'HD Radio Title', 'name': None}), ('QSTN', {'description': 'gets HD Radio Title', 'name': 'query'})])}), ('UHS', {'description': 'HD Radio Tuner Status (Universal Port Dock Only)', 'name': 'hd-radio-tuner-status', 'values': OrderedDict([('mmnnoo', {'description': 'HD Radio Tuner Status (3 bytes)\nmm -> "00" not HD, "01" HD\nnn -> current Program "01"-"08"\noo -> receivable Program (8 bits are represented in hexadecimal notation. Each bit shows receivable or not.)', 'name': 'mmnnoo'}), ('QSTN', {'description': 'gets the HD Radio Tuner Status', 'name': 'query'})])}), ('UDS', {'description': 'DAB Sation Name (Universal Port Dock Only)', 'name': 'dab-sation-name', 'values': OrderedDict([('nnnnnnnnn', {'description': 'Sation Name (9 letters)', 'name': None}), ('QSTN', {'description': 'gets The Tuning Frequency', 'name': 'query'})])}), ('UDD', {'description': 'DAB Display Info (Universal Port Dock Only)', 'name': 'dab-display-info', 'values': OrderedDict([('PT:nnnnnnnn', {'description': 'DAB Program Type (8 letters)', 'name': None}), ('AT:mmmkbps/nnnnnn', {'description': 'DAB Bitrate & Audio Type (m:Bitrate xxxkbps,n:Audio Type Stereo/Mono)', 'name': None}), ('MN:nnnnnnnnn', {'description': 'DAB Multiplex Name (9 letters)', 'name': None}), ('MF:mmm/nnnn.nnMHz', {'description': 'DAB Multiplex Band ID(mmm) & Freq(nnnn.nnMHz) Info', 'name': None}), ('PT', {'description': 'gets & display DAB Program Info', 'name': 'pt'}), ('AT', {'description': 'gets & display DAB Bitrate & Audio Type', 'name': 'at'}), ('MN', {'description': 'gets & display DAB Multicast Name', 'name': 'mn'}), ('MF', {'description': 'gets & display DAB Multicast Band & Freq Info', 'name': 'mf'}), ('UP', {'description': 'gets & dispaly DAB Infomation Wrap-Around Up', 'name': 'up'})])})])), ('zone2', OrderedDict([('ZPW', {'description': 'Zone2 Power Command', 'name': 'power', 'values': OrderedDict([('00', {'description': 'sets Zone2 Standby', 'name': 'standby'}), ('01', {'description': 'sets Zone2 On', 'name': 'on'}), ('QSTN', {'description': 'gets the Zone2 Power Status', 'name': 'query'})])}), ('ZMT', {'description': 'Zone2 Muting Command', 'name': 'muting', 'values': OrderedDict([('00', {'description': 'sets Zone2 Muting Off', 'name': 'off'}), ('01', {'description': 'sets Zone2 Muting On', 'name': 'on'}), ('TG', {'description': 'sets Zone2 Muting Wrap-Around', 'name': 'toggle'}), ('QSTN', {'description': 'gets the Zone2 Muting Status', 'name': 'query'})])}), ('ZVL', {'description': 'Zone2 Volume Command', 'name': 'volume', 'values': OrderedDict([((0, 100), {'description': u'Volume Level 0 \u2013 100 ( In hexadecimal representation)', 'name': None}), ((0, 80), {'description': u'Volume Level 0 \u2013 80 ( In hexadecimal representation)', 'name': None}), ('UP', {'description': 'sets Volume Level Up', 'name': 'level-up'}), ('DOWN', {'description': 'sets Volume Level Down', 'name': 'level-down'}), ('QSTN', {'description': 'gets the Volume Level', 'name': 'query'})])}), ('ZTN', {'description': 'Zone2 Tone Command', 'name': 'tone', 'values': OrderedDict([('B{xx}', {'description': 'sets Zone2 Bass (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 'bass-xx-is-a-00-a-10-0-10-2-step'}), ('T{xx}', {'description': 'sets Zone2 Treble (xx is "-A"..."00"..."+A"[-10...0...+10 2 step]', 'name': 'treble-xx-is-a-00-a-10-0-10-2-step'}), ('BUP', {'description': 'sets Bass Up (2 Step)', 'name': 'bass-up'}), ('BDOWN', {'description': 'sets Bass Down (2 Step)', 'name': 'bass-down'}), ('TUP', {'description': 'sets Treble Up (2 Step)', 'name': 'treble-up'}), ('TDOWN', {'description': 'sets Treble Down (2 Step)', 'name': 'treble-down'}), ('QSTN', {'description': 'gets Zone2 Tone ("BxxTxx")', 'name': 'query'})])}), ('ZBL', {'description': 'Zone2 Balance Command', 'name': 'balance', 'values': OrderedDict([('{xx}', {'description': 'sets Zone2 Balance (xx is "-A"..."00"..."+A"[L+10...0...R+10 2 step]', 'name': 'xx-is-a-00-a-l-10-0-r-10-2-step'}), ('UP', {'description': 'sets Balance Up (to R 2 Step)', 'name': 'up'}), ('DOWN', {'description': 'sets Balance Down (to L 2 Step)', 'name': 'down'}), ('QSTN', {'description': 'gets Zone2 Balance', 'name': 'query'})])}), ('SLZ', {'description': 'ZONE2 Selector Command', 'name': 'selector', 'values': OrderedDict([('00', {'description': 'sets VIDEO1, VCR/DVR', 'name': ('video1', 'vcr', 'dvr')}), ('01', {'description': 'sets VIDEO2, CBL/SAT', 'name': ('video2', 'cbl', 'sat')}), ('02', {'description': 'sets VIDEO3, GAME/TV, GAME', 'name': ('video3', 'game', 'tv', 'game')}), ('03', {'description': 'sets VIDEO4, AUX1(AUX)', 'name': ('video4', 'aux1')}), ('04', {'description': 'sets VIDEO5, AUX2', 'name': ('video5', 'aux2')}), ('05', {'description': 'sets VIDEO6, PC', 'name': ('video6', 'pc')}), ('06', {'description': 'sets VIDEO7', 'name': 'video7'}), ('07', {'description': 'sets Hidden1', 'name': 'hidden1'}), ('08', {'description': 'sets Hidden2', 'name': 'hidden2'}), ('09', {'description': 'sets Hidden3', 'name': 'hidden3'}), ('10', {'description': 'sets DVD, BD/DVD', 'name': ('dvd', 'bd', 'dvd')}), ('20', {'description': 'sets TAPE(1)', 'name': 'tape'}), ('21', {'description': 'sets TAPE2', 'name': 'tape2'}), ('22', {'description': 'sets PHONO', 'name': 'phono'}), ('23', {'description': 'sets CD, TV/CD', 'name': ('cd', 'tv', 'cd')}), ('24', {'description': 'sets FM', 'name': 'fm'}), ('25', {'description': 'sets AM', 'name': 'am'}), ('26', {'description': 'sets TUNER', 'name': 'tuner'}), ('27', {'description': 'sets MUSIC SERVER, P4S, DLNA', 'name': ('music-server', 'p4s', 'dlna')}), ('28', {'description': 'sets INTERNET RADIO, iRadio Favorite', 'name': ('internet-radio', 'iradio-favorite')}), ('29', {'description': 'sets USB/USB(Front)', 'name': ('usb', 'usb')}), ('2A', {'description': 'sets USB(Rear)', 'name': 'usb'}), ('2B', {'description': 'sets NETWORK, NET', 'name': ('network', 'net')}), ('2C', {'description': 'sets USB(toggle)', 'name': 'usb'}), ('40', {'description': 'sets Universal PORT', 'name': 'universal-port'}), ('30', {'description': 'sets MULTI CH', 'name': 'multi-ch'}), ('31', {'description': 'sets XM', 'name': 'xm'}), ('32', {'description': 'sets SIRIUS', 'name': 'sirius'}), ('7F', {'description': 'sets OFF', 'name': 'off'}), ('80', {'description': 'sets SOURCE', 'name': 'source'}), ('UP', {'description': 'sets Selector Position Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Selector Position Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Selector Position', 'name': 'query'})])}), ('TUN', {'description': 'Tuning Command', 'name': 'tuning', 'values': OrderedDict([('nnnnn', {'description': 'sets Directly Tuning Frequency (FM nnn.nn MHz / AM nnnnn kHz / XM nnnnn ch)', 'name': None}), ('UP', {'description': 'sets Tuning Frequency Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Tuning Frequency Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Tuning Frequency', 'name': 'query'})])}), ('TUZ', {'description': 'Tuning Command', 'name': 'tuning', 'values': OrderedDict([('nnnnn', {'description': 'sets Directly Tuning Frequency (FM nnn.nn MHz / AM nnnnn kHz / SR nnnnn ch)', 'name': None}), ('DIRECT', {'description': 'starts/restarts Direct Tuning Mode', 'name': 'direct'}), ('0', {'description': 'sets 0 in Direct Tuning Mode', 'name': '0-in-direct-mode'}), ('1', {'description': 'sets 1 in Direct Tuning Mode', 'name': '1-in-direct-mode'}), ('2', {'description': 'sets 2 in Direct Tuning Mode', 'name': '2-in-direct-mode'}), ('3', {'description': 'sets 3 in Direct Tuning Mode', 'name': '3-in-direct-mode'}), ('4', {'description': 'sets 4 in Direct Tuning Mode', 'name': '4-in-direct-mode'}), ('5', {'description': 'sets 5 in Direct Tuning Mode', 'name': '5-in-direct-mode'}), ('6', {'description': 'sets 6 in Direct Tuning Mode', 'name': '6-in-direct-mode'}), ('7', {'description': 'sets 7 in Direct Tuning Mode', 'name': '7-in-direct-mode'}), ('8', {'description': 'sets 8 in Direct Tuning Mode', 'name': '8-in-direct-mode'}), ('9', {'description': 'sets 9 in Direct Tuning Mode', 'name': '9-in-direct-mode'}), ('UP', {'description': 'sets Tuning Frequency Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Tuning Frequency Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Tuning Frequency', 'name': 'query'})])}), ('PRS', {'description': 'Preset Command', 'name': 'preset', 'values': OrderedDict([((1, 40), {'description': 'sets Preset No. 1 - 40 ( In hexadecimal representation)', 'name': 'no-1-40'}), ((1, 30), {'description': 'sets Preset No. 1 - 30 ( In hexadecimal representation)', 'name': 'no-1-30'}), ('UP', {'description': 'sets Preset No. Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Preset No. Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Preset No.', 'name': 'query'})])}), ('PRZ', {'description': 'Preset Command', 'name': 'preset', 'values': OrderedDict([((1, 40), {'description': 'sets Preset No. 1 - 40 ( In hexadecimal representation)', 'name': 'no-1-40'}), ((1, 30), {'description': 'sets Preset No. 1 - 30 ( In hexadecimal representation)', 'name': 'no-1-30'}), ('UP', {'description': 'sets Preset No. Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Preset No. Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Preset No.', 'name': 'query'})])}), ('NTC', {'description': 'Net-Tune/Network Operation Command(Net-Tune Model Only)', 'name': 'net-tune-network', 'values': OrderedDict([('PLAYz', {'description': 'PLAY KEY', 'name': 'playz'}), ('STOPz', {'description': 'STOP KEY', 'name': 'stopz'}), ('PAUSEz', {'description': 'PAUSE KEY', 'name': 'pausez'}), ('TRUPz', {'description': 'TRACK UP KEY', 'name': 'trupz'}), ('TRDNz', {'description': 'TRACK DOWN KEY', 'name': 'trdnz'})])}), ('NTZ', {'description': 'Net-Tune/Network Operation Command(Network Model Only)', 'name': 'net-tune-network', 'values': OrderedDict([('PLAY', {'description': 'PLAY KEY', 'name': 'play'}), ('STOP', {'description': 'STOP KEY', 'name': 'stop'}), ('PAUSE', {'description': 'PAUSE KEY', 'name': 'pause'}), ('TRUP', {'description': 'TRACK UP KEY', 'name': 'trup'}), ('TRDN', {'description': 'TRACK DOWN KEY', 'name': 'trdn'}), ('CHUP', {'description': 'CH UP(for iRadio)', 'name': 'chup'}), ('CHDN', {'description': 'CH DOWN(for iRadio)', 'name': 'chdn'}), ('FF', {'description': 'FF KEY (CONTINUOUS*) (for iPod 1wire)', 'name': 'ff'}), ('REW', {'description': 'REW KEY (CONTINUOUS*) (for iPod 1wire)', 'name': 'rew'}), ('REPEAT', {'description': 'REPEAT KEY(for iPod 1wire)', 'name': 'repeat'}), ('RANDOM', {'description': 'RANDOM KEY(for iPod 1wire)', 'name': 'random'}), ('DISPLAY', {'description': 'DISPLAY KEY(for iPod 1wire)', 'name': 'display'}), ('RIGHT', {'description': 'RIGHT KEY(for iPod 1wire)', 'name': 'right'}), ('LEFT', {'description': 'LEFT KEY(for iPod 1wire)', 'name': 'left'}), ('UP', {'description': 'UP KEY(for iPod 1wire)', 'name': 'up'}), ('DOWN', {'description': 'DOWN KEY(for iPod 1wire)', 'name': 'down'}), ('SELECT', {'description': 'SELECT KEY(for iPod 1wire)', 'name': 'select'}), ('RETURN', {'description': 'RETURN KEY(for iPod 1wire)', 'name': 'return'})])}), ('NPZ', {'description': 'Internet Radio Preset Command (Network Model Only)', 'name': 'internet-radio-preset', 'values': OrderedDict([((1, 40), {'description': 'sets Preset No. 1 - 40 ( In hexadecimal representation)', 'name': 'no-1-40'})])}), ('LMZ', {'description': 'Listening Mode Command', 'name': 'listening-mode', 'values': OrderedDict([('00', {'description': 'sets STEREO', 'name': 'stereo'}), ('01', {'description': 'sets DIRECT', 'name': 'direct'}), ('0F', {'description': 'sets MONO', 'name': 'mono'}), ('12', {'description': 'sets MULTIPLEX', 'name': 'multiplex'}), ('87', {'description': 'sets DVS(Pl2)', 'name': 'dvs'}), ('88', {'description': 'sets DVS(NEO6)', 'name': 'dvs'})])}), ('LTZ', {'description': 'Late Night Command', 'name': 'late-night', 'values': OrderedDict([('00', {'description': 'sets Late Night Off', 'name': 'off'}), ('01', {'description': 'sets Late Night Low', 'name': 'low'}), ('02', {'description': 'sets Late Night High', 'name': 'high'}), ('UP', {'description': 'sets Late Night State Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets The Late Night Level', 'name': 'query'})])}), ('RAZ', {'description': 'Re-EQ/Academy Filter Command', 'name': 're-eq-academy-filter', 'values': OrderedDict([('00', {'description': 'sets Both Off', 'name': 'both-off'}), ('01', {'description': 'sets Re-EQ On', 'name': 'on'}), ('02', {'description': 'sets Academy On', 'name': 'on'}), ('UP', {'description': 'sets Re-EQ/Academy State Wrap-Around Up', 'name': 'up'}), ('QSTN', {'description': 'gets The Re-EQ/Academy State', 'name': 'query'})])})])), ('zone3', OrderedDict([('PW3', {'description': 'Zone3 Power Command', 'name': 'power', 'values': OrderedDict([('00', {'description': 'sets Zone3 Standby', 'name': 'standby'}), ('01', {'description': 'sets Zone3 On', 'name': 'on'}), ('QSTN', {'description': 'gets the Zone3 Power Status', 'name': 'query'})])}), ('MT3', {'description': 'Zone3 Muting Command', 'name': 'muting', 'values': OrderedDict([('00', {'description': 'sets Zone3 Muting Off', 'name': 'off'}), ('01', {'description': 'sets Zone3 Muting On', 'name': 'on'}), ('TG', {'description': 'sets Zone3 Muting Wrap-Around', 'name': 'toggle'}), ('QSTN', {'description': 'gets the Zone3 Muting Status', 'name': 'query'})])}), ('VL3', {'description': 'Zone3 Volume Command', 'name': 'volume', 'values': OrderedDict([((0, 100), {'description': u'Volume Level 0 \u2013 100 ( In hexadecimal representation)', 'name': None}), ((0, 80), {'description': u'Volume Level 0 \u2013 80 ( In hexadecimal representation)', 'name': None}), ('UP', {'description': 'sets Volume Level Up', 'name': 'level-up'}), ('DOWN', {'description': 'sets Volume Level Down', 'name': 'level-down'}), ('QSTN', {'description': 'gets the Volume Level', 'name': 'query'})])}), ('TN3', {'description': 'Zone3 Tone Command', 'name': 'tone', 'values': OrderedDict([('B{xx}', {'description': 'Zone3 Bass (xx is "-A"..."00"..."+A"[-10...0...+10 2 step])', 'name': 'b-xx'}), ('T{xx}', {'description': 'Zone3 Treble (xx is "-A"..."00"..."+A"[-10...0...+10 2 step])', 'name': 't-xx'}), ('BUP', {'description': 'sets Bass Up (2 Step)', 'name': 'bass-up'}), ('BDOWN', {'description': 'sets Bass Down (2 Step)', 'name': 'bass-down'}), ('TUP', {'description': 'sets Treble Up (2 Step)', 'name': 'treble-up'}), ('TDOWN', {'description': 'sets Treble Down (2 Step)', 'name': 'treble-down'}), ('QSTN', {'description': 'gets Zone3 Tone ("BxxTxx")', 'name': 'query'})])}), ('BL3', {'description': 'Zone3 Balance Command', 'name': 'balance', 'values': OrderedDict([('{xx}', {'description': 'Zone3 Balance (xx is "-A"..."00"..."+A"[L+10...0...R+10 2 step])', 'name': 'xx'}), ('UP', {'description': 'sets Balance Up (to R 2 Step)', 'name': 'up'}), ('DOWN', {'description': 'sets Balance Down (to L 2 Step)', 'name': 'down'}), ('QSTN', {'description': 'gets Zone3 Balance', 'name': 'query'})])}), ('SL3', {'description': 'ZONE3 Selector Command', 'name': 'selector', 'values': OrderedDict([('00', {'description': 'sets VIDEO1, VCR/DVR', 'name': ('video1', 'vcr', 'dvr')}), ('01', {'description': 'sets VIDEO2, CBL/SAT', 'name': ('video2', 'cbl', 'sat')}), ('02', {'description': 'sets VIDEO3, GAME/TV, GAME', 'name': ('video3', 'game', 'tv', 'game')}), ('03', {'description': 'sets VIDEO4, AUX1(AUX)', 'name': ('video4', 'aux1')}), ('04', {'description': 'sets VIDEO5, AUX2', 'name': ('video5', 'aux2')}), ('05', {'description': 'sets VIDEO6, PC', 'name': ('video6', 'pc')}), ('06', {'description': 'sets VIDEO7', 'name': 'video7'}), ('07', {'description': 'sets Hidden1', 'name': 'hidden1'}), ('08', {'description': 'sets Hidden2', 'name': 'hidden2'}), ('09', {'description': 'sets Hidden3', 'name': 'hidden3'}), ('10', {'description': 'sets DVD', 'name': 'dvd'}), ('20', {'description': 'sets TAPE(1)', 'name': 'tape'}), ('21', {'description': 'sets TAPE2', 'name': 'tape2'}), ('22', {'description': 'sets PHONO', 'name': 'phono'}), ('23', {'description': 'sets CD, TV/CD', 'name': ('cd', 'tv', 'cd')}), ('24', {'description': 'sets FM', 'name': 'fm'}), ('25', {'description': 'sets AM', 'name': 'am'}), ('26', {'description': 'sets TUNER', 'name': 'tuner'}), ('27', {'description': 'sets MUSIC SERVER, P4S, DLNA', 'name': ('music-server', 'p4s', 'dlna')}), ('28', {'description': 'sets INTERNET RADIO, iRadio Favorite', 'name': ('internet-radio', 'iradio-favorite')}), ('29', {'description': 'sets USB/USB(Front)', 'name': ('usb', 'usb')}), ('2A', {'description': 'sets USB(Rear)', 'name': 'usb'}), ('2B', {'description': 'sets NETWORK, NET', 'name': ('network', 'net')}), ('2C', {'description': 'sets USB(toggle)', 'name': 'usb'}), ('40', {'description': 'sets Universal PORT', 'name': 'universal-port'}), ('30', {'description': 'sets MULTI CH', 'name': 'multi-ch'}), ('31', {'description': 'sets XM', 'name': 'xm'}), ('32', {'description': 'sets SIRIUS', 'name': 'sirius'}), ('80', {'description': 'sets SOURCE', 'name': 'source'}), ('UP', {'description': 'sets Selector Position Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Selector Position Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Selector Position', 'name': 'query'})])}), ('TUN', {'description': 'Tuning Command', 'name': 'tuning', 'values': OrderedDict([('nnnnn', {'description': 'sets Directly Tuning Frequency (FM nnn.nn MHz / AM nnnnn kHz)', 'name': None}), ('UP', {'description': 'sets Tuning Frequency Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Tuning Frequency Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Tuning Frequency', 'name': 'query'})])}), ('TU3', {'description': 'Tuning Command', 'name': 'tuning', 'values': OrderedDict([('nnnnn', {'description': 'sets Directly Tuning Frequency (FM nnn.nn MHz / AM nnnnn kHz / SR nnnnn ch)', 'name': None}), ('DIRECT', {'description': 'starts/restarts Direct Tuning Mode', 'name': 'direct'}), ('0', {'description': 'sets 0 in Direct Tuning Mode', 'name': '0-in-direct-mode'}), ('1', {'description': 'sets 1 in Direct Tuning Mode', 'name': '1-in-direct-mode'}), ('2', {'description': 'sets 2 in Direct Tuning Mode', 'name': '2-in-direct-mode'}), ('3', {'description': 'sets 3 in Direct Tuning Mode', 'name': '3-in-direct-mode'}), ('4', {'description': 'sets 4 in Direct Tuning Mode', 'name': '4-in-direct-mode'}), ('5', {'description': 'sets 5 in Direct Tuning Mode', 'name': '5-in-direct-mode'}), ('6', {'description': 'sets 6 in Direct Tuning Mode', 'name': '6-in-direct-mode'}), ('7', {'description': 'sets 7 in Direct Tuning Mode', 'name': '7-in-direct-mode'}), ('8', {'description': 'sets 8 in Direct Tuning Mode', 'name': '8-in-direct-mode'}), ('9', {'description': 'sets 9 in Direct Tuning Mode', 'name': '9-in-direct-mode'}), ('UP', {'description': 'sets Tuning Frequency Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Tuning Frequency Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Tuning Frequency', 'name': 'query'})])}), ('PRS', {'description': 'Preset Command', 'name': 'preset', 'values': OrderedDict([((1, 40), {'description': 'sets Preset No. 1 - 40 ( In hexadecimal representation)', 'name': 'no-1-40'}), ((1, 30), {'description': 'sets Preset No. 1 - 30 ( In hexadecimal representation)', 'name': 'no-1-30'}), ('UP', {'description': 'sets Preset No. Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Preset No. Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Preset No.', 'name': 'query'})])}), ('PR3', {'description': 'Preset Command', 'name': 'preset', 'values': OrderedDict([((1, 40), {'description': 'sets Preset No. 1 - 40 ( In hexadecimal representation)', 'name': 'no-1-40'}), ((1, 30), {'description': 'sets Preset No. 1 - 30 ( In hexadecimal representation)', 'name': 'no-1-30'}), ('UP', {'description': 'sets Preset No. Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Preset No. Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Preset No.', 'name': 'query'})])}), ('NTC', {'description': 'Net-Tune/Network Operation Command(Net-Tune Model Only)', 'name': 'net-tune-network', 'values': OrderedDict([('PLAYz', {'description': 'PLAY KEY', 'name': 'playz'}), ('STOPz', {'description': 'STOP KEY', 'name': 'stopz'}), ('PAUSEz', {'description': 'PAUSE KEY', 'name': 'pausez'}), ('TRUPz', {'description': 'TRACK UP KEY', 'name': 'trupz'}), ('TRDNz', {'description': 'TRACK DOWN KEY', 'name': 'trdnz'})])}), ('NT3', {'description': 'Net-Tune/Network Operation Command(Network Model Only)', 'name': 'net-tune-network', 'values': OrderedDict([('PLAY', {'description': 'PLAY KEY', 'name': 'play'}), ('STOP', {'description': 'STOP KEY', 'name': 'stop'}), ('PAUSE', {'description': 'PAUSE KEY', 'name': 'pause'}), ('TRUP', {'description': 'TRACK UP KEY', 'name': 'trup'}), ('TRDN', {'description': 'TRACK DOWN KEY', 'name': 'trdn'}), ('CHUP', {'description': 'CH UP(for iRadio)', 'name': 'chup'}), ('CHDN', {'description': 'CH DOWNP(for iRadio)', 'name': 'chdn'}), ('FF', {'description': 'FF KEY (CONTINUOUS*) (for iPod 1wire)', 'name': 'ff'}), ('REW', {'description': 'REW KEY (CONTINUOUS*) (for iPod 1wire)', 'name': 'rew'}), ('REPEAT', {'description': 'REPEAT KEY(for iPod 1wire)', 'name': 'repeat'}), ('RANDOM', {'description': 'RANDOM KEY(for iPod 1wire)', 'name': 'random'}), ('DISPLAY', {'description': 'DISPLAY KEY(for iPod 1wire)', 'name': 'display'}), ('RIGHT', {'description': 'RIGHT KEY(for iPod 1wire)', 'name': 'right'}), ('LEFT', {'description': 'LEFT KEY(for iPod 1wire)', 'name': 'left'}), ('UP', {'description': 'UP KEY(for iPod 1wire)', 'name': 'up'}), ('DOWN', {'description': 'DOWN KEY(for iPod 1wire)', 'name': 'down'}), ('SELECT', {'description': 'SELECT KEY(for iPod 1wire)', 'name': 'select'}), ('RETURN', {'description': 'RETURN KEY(for iPod 1wire)', 'name': 'return'})])}), ('NP3', {'description': 'Internet Radio Preset Command (Network Model Only)', 'name': 'internet-radio-preset', 'values': OrderedDict([((1, 40), {'description': 'sets Preset No. 1 - 40 ( In hexadecimal representation)', 'name': 'no-1-40'})])})])), ('zone4', OrderedDict([('PW4', {'description': 'Zone4 Power Command', 'name': 'power', 'values': OrderedDict([('00', {'description': 'sets Zone4 Standby', 'name': 'standby'}), ('01', {'description': 'sets Zone4 On', 'name': 'on'}), ('QSTN', {'description': 'gets the Zone4 Power Status', 'name': 'query'})])}), ('MT4', {'description': 'Zone4 Muting Command', 'name': 'muting', 'values': OrderedDict([('00', {'description': 'sets Zone4 Muting Off', 'name': 'off'}), ('01', {'description': 'sets Zone4 Muting On', 'name': 'on'}), ('TG', {'description': 'sets Zone4 Muting Wrap-Around', 'name': 'toggle'}), ('QSTN', {'description': 'gets the Zone4 Muting Status', 'name': 'query'})])}), ('VL4', {'description': 'Zone4 Volume Command', 'name': 'volume', 'values': OrderedDict([((0, 100), {'description': u'Volume Level 0 \u2013 100 ( In hexadecimal representation)', 'name': None}), ((0, 80), {'description': u'Volume Level 0 \u2013 80 ( In hexadecimal representation)', 'name': None}), ('UP', {'description': 'sets Volume Level Up', 'name': 'level-up'}), ('DOWN', {'description': 'sets Volume Level Down', 'name': 'level-down'}), ('QSTN', {'description': 'gets the Volume Level', 'name': 'query'})])}), ('SL4', {'description': 'ZONE4 Selector Command', 'name': 'selector', 'values': OrderedDict([('00', {'description': 'sets VIDEO1, VCR/DVR', 'name': ('video1', 'vcr', 'dvr')}), ('01', {'description': 'sets VIDEO2, CBL/SAT', 'name': ('video2', 'cbl', 'sat')}), ('02', {'description': 'sets VIDEO3, GAME/TV, GAME', 'name': ('video3', 'game', 'tv', 'game')}), ('03', {'description': 'sets VIDEO4, AUX1(AUX)', 'name': ('video4', 'aux1')}), ('04', {'description': 'sets VIDEO5, AUX2', 'name': ('video5', 'aux2')}), ('05', {'description': 'sets VIDEO6', 'name': 'video6'}), ('06', {'description': 'sets VIDEO7', 'name': 'video7'}), ('07', {'description': 'sets Hidden1', 'name': 'hidden1'}), ('08', {'description': 'sets Hidden2', 'name': 'hidden2'}), ('09', {'description': 'sets Hidden3', 'name': 'hidden3'}), ('10', {'description': 'sets DVD', 'name': 'dvd'}), ('20', {'description': 'sets TAPE(1), TV/TAPE', 'name': ('tape-1', 'tv', 'tape')}), ('21', {'description': 'sets TAPE2', 'name': 'tape2'}), ('22', {'description': 'sets PHONO', 'name': 'phono'}), ('23', {'description': 'sets CD, TV/CD', 'name': ('cd', 'tv', 'cd')}), ('24', {'description': 'sets FM', 'name': 'fm'}), ('25', {'description': 'sets AM', 'name': 'am'}), ('26', {'description': 'sets TUNER', 'name': 'tuner'}), ('27', {'description': 'sets MUSIC SERVER, P4S, DLNA', 'name': ('music-server', 'p4s', 'dlna')}), ('28', {'description': 'sets INTERNET RADIO, iRadio Favorite', 'name': ('internet-radio', 'iradio-favorite')}), ('29', {'description': 'sets USB/USB(Front)', 'name': ('usb', 'usb')}), ('2A', {'description': 'sets USB(Rear)', 'name': 'usb'}), ('2B', {'description': 'sets NETWORK, NET', 'name': ('network', 'net')}), ('2C', {'description': 'sets USB(toggle)', 'name': 'usb'}), ('40', {'description': 'sets Universal PORT', 'name': 'universal-port'}), ('30', {'description': 'sets MULTI CH', 'name': 'multi-ch'}), ('31', {'description': 'sets XM', 'name': 'xm'}), ('32', {'description': 'sets SIRIUS', 'name': 'sirius'}), ('80', {'description': 'sets SOURCE', 'name': 'source'}), ('UP', {'description': 'sets Selector Position Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Selector Position Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Selector Position', 'name': 'query'})])}), ('TUN', {'description': 'Tuning Command', 'name': 'tuning', 'values': OrderedDict([('nnnnn', {'description': 'sets Directly Tuning Frequency (FM nnn.nn MHz / AM nnnnn kHz)', 'name': None}), ('UP', {'description': 'sets Tuning Frequency Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Tuning Frequency Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Tuning Frequency', 'name': 'query'})])}), ('TU4', {'description': 'Tuning Command', 'name': 'tuning', 'values': OrderedDict([('nnnnn', {'description': 'sets Directly Tuning Frequency (FM nnn.nn MHz / AM nnnnn kHz)', 'name': None}), ('DIRECT', {'description': 'starts/restarts Direct Tuning Mode', 'name': 'direct'}), ('0', {'description': 'sets 0 in Direct Tuning Mode', 'name': '0-in-direct-mode'}), ('1', {'description': 'sets 1 in Direct Tuning Mode', 'name': '1-in-direct-mode'}), ('2', {'description': 'sets 2 in Direct Tuning Mode', 'name': '2-in-direct-mode'}), ('3', {'description': 'sets 3 in Direct Tuning Mode', 'name': '3-in-direct-mode'}), ('4', {'description': 'sets 4 in Direct Tuning Mode', 'name': '4-in-direct-mode'}), ('5', {'description': 'sets 5 in Direct Tuning Mode', 'name': '5-in-direct-mode'}), ('6', {'description': 'sets 6 in Direct Tuning Mode', 'name': '6-in-direct-mode'}), ('7', {'description': 'sets 7 in Direct Tuning Mode', 'name': '7-in-direct-mode'}), ('8', {'description': 'sets 8 in Direct Tuning Mode', 'name': '8-in-direct-mode'}), ('9', {'description': 'sets 9 in Direct Tuning Mode', 'name': '9-in-direct-mode'}), ('UP', {'description': 'sets Tuning Frequency Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Tuning Frequency Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Tuning Frequency', 'name': 'query'})])}), ('PRS', {'description': 'Preset Command', 'name': 'preset', 'values': OrderedDict([((1, 40), {'description': 'sets Preset No. 1 - 40 ( In hexadecimal representation)', 'name': 'no-1-40'}), ((1, 30), {'description': 'sets Preset No. 1 - 30 ( In hexadecimal representation)', 'name': 'no-1-30'}), ('UP', {'description': 'sets Preset No. Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Preset No. Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Preset No.', 'name': 'query'})])}), ('PR4', {'description': 'Preset Command', 'name': 'preset', 'values': OrderedDict([((1, 40), {'description': 'sets Preset No. 1 - 40 ( In hexadecimal representation)', 'name': 'no-1-40'}), ((1, 30), {'description': 'sets Preset No. 1 - 30 ( In hexadecimal representation)', 'name': 'no-1-30'}), ('UP', {'description': 'sets Preset No. Wrap-Around Up', 'name': 'up'}), ('DOWN', {'description': 'sets Preset No. Wrap-Around Down', 'name': 'down'}), ('QSTN', {'description': 'gets The Preset No.', 'name': 'query'})])}), ('NTC', {'description': 'Net-Tune/Network Operation Command(Net-Tune Model Only)', 'name': 'net-tune-network', 'values': OrderedDict([('PLAYz', {'description': 'PLAY KEY', 'name': 'playz'}), ('STOPz', {'description': 'STOP KEY', 'name': 'stopz'}), ('PAUSEz', {'description': 'PAUSE KEY', 'name': 'pausez'}), ('TRUPz', {'description': 'TRACK UP KEY', 'name': 'trupz'}), ('TRDNz', {'description': 'TRACK DOWN KEY', 'name': 'trdnz'})])}), ('NT4', {'description': 'Net-Tune/Network Operation Command(Network Model Only)', 'name': 'net-tune-network', 'values': OrderedDict([('PLAY', {'description': 'PLAY KEY', 'name': 'play'}), ('STOP', {'description': 'STOP KEY', 'name': 'stop'}), ('PAUSE', {'description': 'PAUSE KEY', 'name': 'pause'}), ('TRUP', {'description': 'TRACK UP KEY', 'name': 'trup'}), ('TRDN', {'description': 'TRACK DOWN KEY', 'name': 'trdn'}), ('FF', {'description': 'FF KEY (CONTINUOUS*) (for iPod 1wire)', 'name': 'ff'}), ('REW', {'description': 'REW KEY (CONTINUOUS*) (for iPod 1wire)', 'name': 'rew'}), ('REPEAT', {'description': 'REPEAT KEY(for iPod 1wire)', 'name': 'repeat'}), ('RANDOM', {'description': 'RANDOM KEY(for iPod 1wire)', 'name': 'random'}), ('DISPLAY', {'description': 'DISPLAY KEY(for iPod 1wire)', 'name': 'display'}), ('RIGHT', {'description': 'RIGHT KEY(for iPod 1wire)', 'name': 'right'}), ('LEFT', {'description': 'LEFT KEY(for iPod 1wire)', 'name': 'left'}), ('UP', {'description': 'UP KEY(for iPod 1wire)', 'name': 'up'}), ('DOWN', {'description': 'DOWN KEY(for iPod 1wire)', 'name': 'down'}), ('SELECT', {'description': 'SELECT KEY(for iPod 1wire)', 'name': 'select'}), ('RETURN', {'description': 'RETURN KEY(for iPod 1wire)', 'name': 'return'})])}), ('NP4', {'description': 'Internet Radio Preset Command (Network Model Only)', 'name': 'internet-radio-preset', 'values': OrderedDict([((1, 40), {'description': 'sets Preset No. 1 - 40 ( In hexadecimal representation)', 'name': 'no-1-40'})])})])), ('dock', OrderedDict([('CDS', {'description': 'Command for Docking Station via RI', 'name': 'command-for-docking-station-via-ri', 'values': OrderedDict([('PWRON', {'description': 'sets Dock On', 'name': 'on'}), ('PWROFF', {'description': 'sets Dock Standby', 'name': 'standby'}), ('PLY/RES', {'description': 'PLAY/RESUME Key', 'name': 'ply-res'}), ('STOP', {'description': 'STOP Key', 'name': 'stop'}), ('SKIP.F', {'description': 'TRACK UP Key', 'name': 'skip-f'}), ('SKIP.R', {'description': 'TRACK DOWN Key', 'name': 'skip-r'}), ('PAUSE', {'description': 'PAUSE Key', 'name': 'pause'}), ('PLY/PAU', {'description': 'PLAY/PAUSE Key', 'name': 'ply-pau'}), ('FF', {'description': 'FF Key', 'name': 'ff'}), ('REW', {'description': 'FR Key', 'name': 'rew'}), ('ALBUM+', {'description': 'ALBUM UP Key', 'name': 'album'}), ('ALBUM-', {'description': 'ALBUM DONW Key', 'name': 'album'}), ('PLIST+', {'description': 'PLAYLIST UP Key', 'name': 'plist'}), ('PLIST-', {'description': 'PLAYLIST DOWN Key', 'name': 'plist'}), ('CHAPT+', {'description': 'CHAPTER UP Key', 'name': 'chapt'}), ('CHAPT-', {'description': 'CHAPTER DOWN Key', 'name': 'chapt'}), ('RANDOM', {'description': 'SHUFFLE Key', 'name': 'random'}), ('REPEAT', {'description': 'REPEAT Key', 'name': 'repeat'}), ('MUTE', {'description': 'MUTE Key', 'name': 'mute'}), ('BLIGHT', {'description': 'BACKLIGHT Key', 'name': 'blight'}), ('MENU', {'description': 'MENU Key', 'name': 'menu'}), ('ENTER', {'description': 'SELECT Key', 'name': 'enter'}), ('UP', {'description': 'CUSOR UP Key', 'name': 'up'}), ('DOWN', {'description': 'CURSOR DOWN Key', 'name': 'down'})])})]))]) ZONE_MAPPINGS = {None: 'main', '': 'main'} COMMAND_MAPPINGS = {'dock': {'command-for-docking-station-via-ri': 'CDS'}, 'main': {'12v-trigger-a': 'TGA', '12v-trigger-b': 'TGB', '12v-trigger-c': 'TGC', 'audio-infomation': 'IFA', 'audio-muting': 'AMT', 'audio-selector': 'SLA', 'audyssey-2eq-multeq-multeq-xt': 'ADY', 'audyssey-dynamic-eq': 'ADQ', 'audyssey-dynamic-volume': 'ADV', 'cd-player': 'CCD', 'cd-r-recorder': 'CCR', 'center-temporary-level': 'CTL', 'cinema-filter': 'RAS', 'dab-display-info': 'UDD', 'dab-preset': 'UPR', 'dab-sation-name': 'UDS', 'dat-recorder': 'CDT', 'dimmer-level': 'DIM', 'display-mode': 'DIF', 'dolby-volume': 'DVL', 'dvd-player': 'CDV', 'graphics-equalizer': 'CEQ', 'hd-radio-artist-name-info': 'UHA', 'hd-radio-blend-mode': 'UHB', 'hd-radio-channel-name-info': 'UHC', 'hd-radio-channel-program': 'UHP', 'hd-radio-detail-info': 'UHD', 'hd-radio-title-info': 'UHT', 'hd-radio-tuner-status': 'UHS', 'hdmi-audio-out': 'HAO', 'hdmi-output-selector': 'HDO', 'input-selector': 'SLI', 'internet-radio-preset': 'NPR', 'ipod-album-name-info': 'IAL', 'ipod-artist-name-info': 'IAT', 'ipod-list-info': 'ILS', 'ipod-mode-change': 'IMD', 'ipod-play-status': 'IST', 'ipod-time-info': 'ITM', 'ipod-title-name': 'ITI', 'ipod-track-info': 'ITR', 'isf-mode': 'ISF', 'late-night': 'LTN', 'listening-mode': 'LMD', 'master-volume': 'MVL', 'md-recorder': 'CMD', 'memory-setup': 'MEM', 'monitor-out-resolution': 'RES', 'music-optimizer': 'MOT', 'net-keyboard': 'NKY', 'net-popup-message': 'NPU', 'net-service': 'NSV', 'net-usb-album-name-info': 'NAL', 'net-usb-artist-name-info': 'NAT', 'net-usb-jacket-art': 'NJA', 'net-usb-list-info': 'NLS', 'net-usb-play-status': 'NST', 'net-usb-time-info': 'NTM', 'net-usb-title-name': 'NTI', 'net-usb-track-info': 'NTR', 'network-usb': 'NTC', 'preset': 'PRS', 'preset-memory': 'UPM', 'pty-scan': 'PTS', 'rds-information': 'RDS', 'recout-selector': 'SLR', 'setup': 'OSD', 'sirius-artist-name-info': 'SAT', 'sirius-category': 'SCT', 'sirius-channel-name-info': 'SCN', 'sirius-channel-number': 'SCH', 'sirius-parental-lock': 'SLK', 'sirius-title-info': 'STI', 'sleep-set': 'SLP', 'speaker-a': 'SPA', 'speaker-b': 'SPB', 'speaker-layout': 'SPL', 'speaker-level-calibration': 'SLC', 'subwoofer-temporary-level': 'SWL', 'system-power': 'PWR', 'tape1-a': 'CT1', 'tape2-b': 'CT2', 'tone-center': 'TCT', 'tone-front': 'TFR', 'tone-front-high': 'TFH', 'tone-front-wide': 'TFW', 'tone-subwoofer': 'TSW', 'tone-surround': 'TSR', 'tone-surround-back': 'TSB', 'tp-scan': 'TPS', 'tuning': 'UTN', 'universal-port': 'CPT', 'video-infomation': 'IFV', 'video-output-selector': 'VOS', 'video-picture-mode': 'VPM', 'video-wide-mode': 'VWM', 'volume': 'MVL', 'xm-artist-name-info': 'XAT', 'xm-category': 'XCT', 'xm-channel-name-info': 'XCN', 'xm-channel-number': 'XCH', 'xm-title-info': 'XTI'}, 'zone2': {'balance': 'ZBL', 'internet-radio-preset': 'NPZ', 'late-night': 'LTZ', 'listening-mode': 'LMZ', 'muting': 'ZMT', 'net-tune-network': 'NTZ', 'power': 'ZPW', 'preset': 'PRZ', 're-eq-academy-filter': 'RAZ', 'selector': 'SLZ', 'tone': 'ZTN', 'tuning': 'TUZ', 'volume': 'ZVL'}, 'zone3': {'balance': 'BL3', 'internet-radio-preset': 'NP3', 'muting': 'MT3', 'net-tune-network': 'NT3', 'power': 'PW3', 'preset': 'PR3', 'selector': 'SL3', 'tone': 'TN3', 'tuning': 'TU3', 'volume': 'VL3'}, 'zone4': {'internet-radio-preset': 'NP4', 'muting': 'MT4', 'net-tune-network': 'NT4', 'power': 'PW4', 'preset': 'PR4', 'selector': 'SL4', 'tuning': 'TU4', 'volume': 'VL4'}} VALUE_MAPPINGS = {'dock': {'CDS': {'album': 'ALBUM-', 'blight': 'BLIGHT', 'chapt': 'CHAPT-', 'down': 'DOWN', 'enter': 'ENTER', 'ff': 'FF', 'menu': 'MENU', 'mute': 'MUTE', 'on': 'PWRON', 'pause': 'PAUSE', 'plist': 'PLIST-', 'ply-pau': 'PLY/PAU', 'ply-res': 'PLY/RES', 'random': 'RANDOM', 'repeat': 'REPEAT', 'rew': 'REW', 'skip-f': 'SKIP.F', 'skip-r': 'SKIP.R', 'standby': 'PWROFF', 'stop': 'STOP', 'up': 'UP'}}, 'main': {'ADQ': {'off': '00', 'on': '01', 'query': 'QSTN', 'up': 'UP'}, 'ADV': {'heavy': '03', 'light': '01', 'medium': '02', 'off': '00', 'query': 'QSTN', 'up': 'UP'}, 'ADY': {'movie': '01', 'music': '02', 'off': '00', 'on': '01', 'query': 'QSTN', 'up': 'UP'}, 'AMT': {'off': '00', 'on': '01', 'query': 'QSTN', 'toggle': 'TG'}, 'CCD': {'0': '0', '1': '1', '10': '+10', '2': '2', '3': '3', '4': '4', '5': '5', '6': '6', '7': '7', '8': '8', '9': '9', 'clear': 'CLEAR', 'd-mode': 'D.MODE', 'd-skip': 'D.SKIP', 'disc-f': 'DISC.F', 'disc-r': 'DISC.R', 'disc1': 'DISC1', 'disc2': 'DISC2', 'disc3': 'DISC3', 'disc4': 'DISC4', 'disc5': 'DISC5', 'disc6': 'DISC6', 'disp': 'DISP', 'ff': 'FF', 'memory': 'MEMORY', 'op-cl': 'OP/CL', 'pause': 'PAUSE', 'play': 'PLAY', 'pon': 'PON', 'power': 'POWER', 'random': 'RANDOM', 'repeat': 'REPEAT', 'rew': 'REW', 'skip-f': 'SKIP.F', 'skip-r': 'SKIP.R', 'stby': 'STBY', 'stop': 'STOP', 'track': 'TRACK'}, 'CCR': {'1': '1', '10-0': '10/0', '2': '2', '3': '3', '4': '4', '5': '5', '6': '6', '7': '7', '8': '8', '9': '9', 'clear': 'CLEAR', 'disp': 'DISP', 'ff': 'FF', 'memory': 'MEMORY', 'op-cl': 'OP/CL', 'p-mode': 'P.MODE', 'pause': 'PAUSE', 'play': 'PLAY', 'power': 'POWER', 'random': 'RANDOM', 'rec': 'REC', 'repeat': 'REPEAT', 'rew': 'REW', 'scroll': 'SCROLL', 'skip-f': 'SKIP.F', 'skip-r': 'SKIP.R', 'stby': 'STBY', 'stop': 'STOP'}, 'CDT': {'ff': 'FF', 'play': 'PLAY', 'rc-pau': 'RC/PAU', 'rew': 'REW', 'skip-f': 'SKIP.F', 'skip-r': 'SKIP.R', 'stop': 'STOP'}, 'CDV': {'0': '0', '1': '1', '10': '10', '2': '2', '3': '3', '4': '4', '5': '5', '6': '6', '7': '7', '8': '8', '9': '9', 'abr': 'ABR', 'angle': 'ANGLE', 'asctg': 'ASCTG', 'audio': 'AUDIO', 'cdpcd': 'CDPCD', 'clear': 'CLEAR', 'conmem': 'CONMEM', 'disc-f': 'DISC.F', 'disc-r': 'DISC.R', 'disc1': 'DISC1', 'disc2': 'DISC2', 'disc3': 'DISC3', 'disc4': 'DISC4', 'disc5': 'DISC5', 'disc6': 'DISC6', 'disp': 'DISP', 'down': 'DOWN', 'enter': 'ENTER', 'ff': 'FF', 'folddn': 'FOLDDN', 'foldup': 'FOLDUP', 'funmem': 'FUNMEM', 'init': 'INIT', 'lastplay': 'LASTPLAY', 'left': 'LEFT', 'memory': 'MEMORY', 'menu': 'MENU', 'mspdn': 'MSPDN', 'mspup': 'MSPUP', 'op-cl': 'OP/CL', 'p-mode': 'P.MODE', 'pause': 'PAUSE', 'pct': 'PCT', 'play': 'PLAY', 'power': 'POWER', 'progre': 'PROGRE', 'pwroff': 'PWROFF', 'pwron': 'PWRON', 'random': 'RANDOM', 'repeat': 'REPEAT', 'return': 'RETURN', 'rew': 'REW', 'right': 'RIGHT', 'rsctg': 'RSCTG', 'search': 'SEARCH', 'setup': 'SETUP', 'skip-f': 'SKIP.F', 'skip-r': 'SKIP.R', 'slow-f': 'SLOW.F', 'slow-r': 'SLOW.R', 'step-f': 'STEP.F', 'step-r': 'STEP.R', 'stop': 'STOP', 'subtitle': 'SUBTITLE', 'subton-off': 'SUBTON/OFF', 'topmenu': 'TOPMENU', 'up': 'UP', 'vdoff': 'VDOFF', 'zoomdn': 'ZOOMDN', 'zoomtg': 'ZOOMTG', 'zoomup': 'ZOOMUP'}, 'CEQ': {'power': 'POWER', 'preset': 'PRESET'}, 'CMD': {'1': '1', '10-0': '10/0', '2': '2', '3': '3', '4': '4', '5': '5', '6': '6', '7': '7', '8': '8', '9': '9', 'clear': 'CLEAR', 'disp': 'DISP', 'eject': 'EJECT', 'enter': 'ENTER', 'ff': 'FF', 'group': 'GROUP', 'm-scan': 'M.SCAN', 'memory': 'MEMORY', 'name': 'NAME', 'p-mode': 'P.MODE', 'pause': 'PAUSE', 'play': 'PLAY', 'power': 'POWER', 'random': 'RANDOM', 'rec': 'REC', 'repeat': 'REPEAT', 'rew': 'REW', 'scroll': 'SCROLL', 'skip-f': 'SKIP.F', 'skip-r': 'SKIP.R', 'stby': 'STBY', 'stop': 'STOP'}, 'CPT': {'0': '0', '1': '1', '10': '10', '2': '2', '3': '3', '4': '4', '5': '5', '6': '6', '7': '7', '8': '8', '9': '9', 'disp': 'DISP', 'down': 'DOWN', 'enter': 'ENTER', 'ff': 'FF', 'left': 'LEFT', 'mode': 'MODE', 'pause': 'PAUSE', 'play': 'PLAY', 'prsdn': 'PRSDN', 'prsup': 'PRSUP', 'repeat': 'REPEAT', 'return': 'RETURN', 'rew': 'REW', 'right': 'RIGHT', 'setup': 'SETUP', 'shuffle': 'SHUFFLE', 'skip-f': 'SKIP.F', 'skip-r': 'SKIP.R', 'stop': 'STOP', 'up': 'UP'}, 'CT1': {'ff': 'FF', 'play-f': 'PLAY.F', 'play-r': 'PLAY.R', 'rc-pau': 'RC/PAU', 'rew': 'REW', 'stop': 'STOP'}, 'CT2': {'ff': 'FF', 'op-cl': 'OP/CL', 'play-f': 'PLAY.F', 'play-r': 'PLAY.R', 'rc-pau': 'RC/PAU', 'rec': 'REC', 'rew': 'REW', 'skip-f': 'SKIP.F', 'skip-r': 'SKIP.R', 'stop': 'STOP'}, 'CTL': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP', xrange(-12, 0, 12): (-12, 0, 12)}, 'DIF': {'02': '02', '03': '03', 'query': 'QSTN', 'selector-listening': '01', 'selector-volume': '00', 'toggle': 'TG'}, 'DIM': {'bright': '00', 'bright-led-off': '08', 'dark': '02', 'dim': 'DIM', 'query': 'QSTN', 'shut-off': '03'}, 'DVL': {'high': '03', 'low': '01', 'mid': '02', 'off': '00', 'on': '01', 'query': 'QSTN', 'up': 'UP'}, 'HAO': {'auto': '02', 'off': '00', 'on': '01', 'query': 'QSTN', 'up': 'UP'}, 'HAT': {'query': 'QSTN'}, 'HBL': {'analog': '01', 'auto': '00', 'query': 'QSTN'}, 'HCN': {'query': 'QSTN'}, 'HDO': {'analog': '00', 'both': '05', 'no': '00', 'out': '01', 'out-sub': '02', 'query': 'QSTN', 'sub': '02', 'up': 'UP', 'yes': '01'}, 'HDS': {'query': 'QSTN'}, 'HPR': {'query': 'QSTN', xrange(1, 8): (1, 8)}, 'HTI': {'query': 'QSTN'}, 'HTS': {'mmnnoo': 'mmnnoo', 'query': 'QSTN'}, 'IAL': {'query': 'QSTN'}, 'IAT': {'query': 'QSTN'}, 'IFA': {'query': 'QSTN'}, 'IFV': {'query': 'QSTN'}, 'ILS': {}, 'IMD': {'ext': 'EXT', 'query': 'QSTN', 'std': 'STD', 'vdc': 'VDC'}, 'ISF': {'custom': '00', 'day': '01', 'night': '02', 'query': 'QSTN', 'up': 'UP'}, 'IST': {'prs': 'prs', 'query': 'QSTN'}, 'ITI': {'query': 'QSTN'}, 'ITM': {'mm-ss-mm-ss': 'mm:ss/mm:ss', 'query': 'QSTN'}, 'ITR': {'cccc-tttt': 'cccc/tttt', 'query': 'QSTN'}, 'LMD': {'action': '05', 'all-ch-stereo': '0C', 'audyssey-dsx': '16', 'cinema2': '50', 'direct': '01', 'dolby-ex': '41', 'dolby-ex-audyssey-dsx': 'A7', 'dolby-virtual': '14', 'down': 'DOWN', 'dts-surround-sensation': '15', 'enhance': '0E', 'enhanced-7': '0E', 'film': '03', 'full-mono': '13', 'game': 'GAME', 'game-action': '05', 'game-rock': '06', 'game-rpg': '03', 'game-sports': '0E', 'i': '52', 'mono': '0F', 'mono-movie': '07', 'movie': 'MOVIE', 'multiplex': '12', 'music': 'MUSIC', 'musical': '06', 'neo-6': '8C', 'neo-6-cinema': '82', 'neo-6-cinema-audyssey-dsx': 'A3', 'neo-6-cinema-dts-surround-sensation': '91', 'neo-6-music': '83', 'neo-6-music-audyssey-dsx': 'A4', 'neo-6-music-dts-surround-sensation': '92', 'neo-x-cinema': '82', 'neo-x-game': '9A', 'neo-x-music': '83', 'neo-x-thx-cinema': '85', 'neo-x-thx-games': '8A', 'neo-x-thx-music': '8C', 'neural-digital-music': '93', 'neural-digital-music-audyssey-dsx': 'A6', 'neural-surr': '87', 'neural-surround': '88', 'neural-surround-audyssey-dsx': 'A5', 'neural-thx': '88', 'neural-thx-cinema': '8D', 'neural-thx-games': '8F', 'neural-thx-music': '8E', 'orchestra': '08', 'plii': '8B', 'plii-game-audyssey-dsx': 'A2', 'plii-movie-audyssey-dsx': 'A0', 'plii-music-audyssey-dsx': 'A1', 'pliix': 'A2', 'pliix-game': '86', 'pliix-movie': '80', 'pliix-music': '81', 'pliix-thx-cinema': '84', 'pliix-thx-games': '89', 'pliix-thx-music': '8B', 'pliiz-height': '90', 'pliiz-height-thx-cinema': '94', 'pliiz-height-thx-games': '96', 'pliiz-height-thx-music': '95', 'pliiz-height-thx-u2': '99', 'pure-audio': '11', 'query': 'QSTN', 's-cinema': '50', 's-games': '52', 's-music': '51', 's2': '52', 's2-cinema': '97', 's2-games': '99', 's2-music': '98', 'stereo': '00', 'straight-decode': '40', 'studio-mix': '0A', 'surround': '02', 'theater-dimensional': '0D', 'thx': '04', 'thx-cinema': '42', 'thx-games': '52', 'thx-music': '44', 'thx-musicmode': '51', 'thx-surround-ex': '43', 'thx-u2': '52', 'tv-logic': '0B', 'unplugged': '09', 'up': 'UP', 'whole-house': '1F'}, 'LTN': {'auto-dolby-truehd': '03', 'high-dolbydigital': '02', 'low-dolbydigital': '01', 'off': '00', 'on-dolby-truehd': '01', 'query': 'QSTN', 'up': 'UP'}, 'MEM': {'lock': 'LOCK', 'rcl': 'RCL', 'str': 'STR', 'unlk': 'UNLK'}, 'MOT': {'off': '00', 'on': '01', 'query': 'QSTN', 'up': 'UP'}, 'MVL': {'level-down': 'DOWN', 'level-down-1db-step': 'DOWN1', 'level-up': 'UP', 'level-up-1db-step': 'UP1', 'query': 'QSTN', xrange(100): (0, 100), xrange(80): (0, 80)}, 'NAL': {'query': 'QSTN'}, 'NAT': {'query': 'QSTN'}, 'NJA': {'tp-xx-xx-xx-xx-xx-xx': 'tp{xx}{xx}{xx}{xx}{xx}{xx}'}, 'NKY': {'ll': 'll'}, 'NLS': {'ti': 'ti'}, 'NMD': {'ext': 'EXT', 'query': 'QSTN', 'std': 'STD', 'vdc': 'VDC'}, 'NPR': {'set': 'SET', xrange(1, 40): (1, 40)}, 'NPU': {}, 'NST': {'prs': 'prs', 'query': 'QSTN'}, 'NSV': {}, 'NTC': {'0': '0', '1': '1', '2': '2', '3': '3', '4': '4', '5': '5', '6': '6', '7': '7', '8': '8', '9': '9', 'album': 'ALBUM', 'artist': 'ARTIST', 'caps': 'CAPS', 'chdn': 'CHDN', 'chup': 'CHUP', 'delete': 'DELETE', 'display': 'DISPLAY', 'down': 'DOWN', 'ff': 'FF', 'genre': 'GENRE', 'language': 'LANGUAGE', 'left': 'LEFT', 'list': 'LIST', 'location': 'LOCATION', 'menu': 'MENU', 'mode': 'MODE', 'pause': 'PAUSE', 'play': 'PLAY', 'playlist': 'PLAYLIST', 'random': 'RANDOM', 'repeat': 'REPEAT', 'return': 'RETURN', 'rew': 'REW', 'right': 'RIGHT', 'select': 'SELECT', 'setup': 'SETUP', 'stop': 'STOP', 'top': 'TOP', 'trdn': 'TRDN', 'trup': 'TRUP', 'up': 'UP'}, 'NTI': {'query': 'QSTN'}, 'NTM': {'mm-ss-mm-ss': 'mm:ss/mm:ss', 'query': 'QSTN'}, 'NTR': {'cccc-tttt': 'cccc/tttt', 'query': 'QSTN'}, 'OSD': {'audio': 'AUDIO', 'down': 'DOWN', 'enter': 'ENTER', 'exit': 'EXIT', 'home': 'HOME', 'left': 'LEFT', 'menu': 'MENU', 'right': 'RIGHT', 'up': 'UP', 'video': 'VIDEO'}, 'PRM': {xrange(1, 40): (1, 40), xrange(1, 30): (1, 30)}, 'PRS': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP', xrange(1, 40): (1, 40), xrange(1, 30): (1, 30)}, 'PTS': {'enter': 'ENTER', xrange(30): (0, 30)}, 'PWR': {'on': '01', 'query': 'QSTN', 'standby': '00'}, 'RAS': {'off': '00', 'on': '01', 'query': 'QSTN', 'up': 'UP'}, 'RDS': {'00': '00', '01': '01', '02': '02', 'up': 'UP'}, 'RES': {'1080i': '04', '1080p': '07', '24fs': '07', '480p': '02', '4k-upcaling': '08', '720p': '03', 'auto': '01', 'query': 'QSTN', 'source': '06', 'through': '00', 'up': 'UP'}, 'SAT': {'query': 'QSTN'}, 'SCH': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP', xrange(597): (0, 597)}, 'SCN': {'query': 'QSTN'}, 'SCT': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP'}, 'SLA': {'analog': '02', 'arc': '07', 'auto': '00', 'balance': '06', 'coax': '05', 'hdmi': '04', 'ilink': '03', 'multi-channel': '01', 'opt': '05', 'query': 'QSTN', 'up': 'UP'}, 'SLC': {'chsel': 'CHSEL', 'down': 'DOWN', 'test': 'TEST', 'up': 'UP'}, 'SLI': {'07': '07', '08': '08', '09': '09', 'am': '25', 'aux1': '03', 'aux2': '04', 'bd': '10', 'cbl': '01', 'cd': '23', 'dlna': '27', 'down': 'DOWN', 'dvd': '10', 'dvr': '00', 'fm': '24', 'game': '02', 'internet-radio': '28', 'iradio-favorite': '28', 'multi-ch': '30', 'music-server': '27', 'net': '2B', 'network': '2B', 'p4s': '27', 'pc': '05', 'phono': '22', 'query': 'QSTN', 'sat': '01', 'sirius': '32', 'tape': '20', 'tape-1': '20', 'tape2': '21', 'tuner': '26', 'tv': '23', 'universal-port': '40', 'up': 'UP', 'usb': '2C', 'vcr': '00', 'video1': '00', 'video2': '01', 'video3': '02', 'video4': '03', 'video5': '04', 'video6': '05', 'video7': '06', 'xm': '31'}, 'SLK': {'input': 'INPUT', 'wrong': 'WRONG'}, 'SLP': {'query': 'QSTN', 'time-off': 'OFF', 'up': 'UP', xrange(1, 90): (1, 90)}, 'SLR': {'am': '25', 'cd': '23', 'dvd': '10', 'fm': '24', 'internet-radio': '28', 'multi-ch': '30', 'music-server': '27', 'off': '7F', 'phono': '22', 'query': 'QSTN', 'source': '80', 'tape': '20', 'tape2': '21', 'tuner': '26', 'video1': '00', 'video2': '01', 'video3': '02', 'video4': '03', 'video5': '04', 'video6': '05', 'video7': '06', 'xm': '31'}, 'SPA': {'off': '00', 'on': '01', 'query': 'QSTN', 'up': 'UP'}, 'SPB': {'off': '00', 'on': '01', 'query': 'QSTN', 'up': 'UP'}, 'SPL': {'front-high': 'FH', 'front-high-front-wide-speakers': 'HW', 'front-wide': 'FW', 'query': 'QSTN', 'surrback': 'SB', 'surrback-front-high-speakers': 'FH', 'surrback-front-wide-speakers': 'FW', 'up': 'UP'}, 'STI': {'query': 'QSTN'}, 'SWL': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP', xrange(-15, 9, 12): (-15, 0, 12)}, 'TCT': {'b-xx': 'B{xx}', 'bass-down': 'BDOWN', 'bass-up': 'BUP', 'query': 'QSTN', 't-xx': 'T{xx}', 'treble-down': 'TDOWN', 'treble-up': 'TUP'}, 'TFH': {'b-xx': 'B{xx}', 'bass-down': 'BDOWN', 'bass-up': 'BUP', 'query': 'QSTN', 't-xx': 'T{xx}', 'treble-down': 'TDOWN', 'treble-up': 'TUP'}, 'TFR': {'b-xx': 'B{xx}', 'bass-down': 'BDOWN', 'bass-up': 'BUP', 'query': 'QSTN', 't-xx': 'T{xx}', 'treble-down': 'TDOWN', 'treble-up': 'TUP'}, 'TFW': {'b-xx': 'B{xx}', 'bass-down': 'BDOWN', 'bass-up': 'BUP', 'query': 'QSTN', 't-xx': 'T{xx}', 'treble-down': 'TDOWN', 'treble-up': 'TUP'}, 'TGA': {'off': '00', 'on': '01'}, 'TGB': {'off': '00', 'on': '01'}, 'TGC': {'off': '00', 'on': '01'}, 'TPS': {'enter': 'ENTER'}, 'TSB': {'b-xx': 'B{xx}', 'bass-down': 'BDOWN', 'bass-up': 'BUP', 'query': 'QSTN', 't-xx': 'T{xx}', 'treble-down': 'TDOWN', 'treble-up': 'TUP'}, 'TSR': {'b-xx': 'B{xx}', 'bass-down': 'BDOWN', 'bass-up': 'BUP', 'query': 'QSTN', 't-xx': 'T{xx}', 'treble-down': 'TDOWN', 'treble-up': 'TUP'}, 'TSW': {'b-xx': 'B{xx}', 'bass-down': 'BDOWN', 'bass-up': 'BUP', 'query': 'QSTN'}, 'TUN': {'0-in-direct-mode': '0', '1-in-direct-mode': '1', '2-in-direct-mode': '2', '3-in-direct-mode': '3', '4-in-direct-mode': '4', '5-in-direct-mode': '5', '6-in-direct-mode': '6', '7-in-direct-mode': '7', '8-in-direct-mode': '8', '9-in-direct-mode': '9', 'direct': 'DIRECT', 'down': 'DOWN', 'query': 'QSTN', 'up': 'UP'}, 'UDD': {'at': 'AT', 'mf': 'MF', 'mn': 'MN', 'pt': 'PT', 'up': 'UP'}, 'UDS': {'query': 'QSTN'}, 'UHA': {'query': 'QSTN'}, 'UHB': {'analog': '01', 'auto': '00', 'query': 'QSTN'}, 'UHC': {'query': 'QSTN'}, 'UHD': {'query': 'QSTN'}, 'UHP': {'query': 'QSTN', xrange(1, 8): (1, 8)}, 'UHS': {'mmnnoo': 'mmnnoo', 'query': 'QSTN'}, 'UHT': {'query': 'QSTN'}, 'UPM': {xrange(1, 40): (1, 40)}, 'UPR': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP', xrange(1, 40): (1, 40)}, 'UTN': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP'}, 'VOS': {'component': '01', 'd4': '00', 'query': 'QSTN'}, 'VPM': {'cinema': '02', 'custom': '01', 'direct': '08', 'game': '03', 'isf-day': '05', 'isf-night': '06', 'query': 'QSTN', 'streaming': '07', 'through': '00', 'up': 'UP'}, 'VWM': {'4-3': '01', 'auto': '00', 'full': '02', 'query': 'QSTN', 'smart-zoom': '05', 'up': 'UP', 'zoom': '04'}, 'XAT': {'query': 'QSTN'}, 'XCH': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP', xrange(597): (0, 597)}, 'XCN': {'query': 'QSTN'}, 'XCT': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP'}, 'XTI': {'query': 'QSTN'}}, 'zone2': {'LMZ': {'direct': '01', 'dvs': '88', 'mono': '0F', 'multiplex': '12', 'stereo': '00'}, 'LTZ': {'high': '02', 'low': '01', 'off': '00', 'query': 'QSTN', 'up': 'UP'}, 'NPZ': {xrange(1, 40): (1, 40)}, 'NTC': {'pausez': 'PAUSEz', 'playz': 'PLAYz', 'stopz': 'STOPz', 'trdnz': 'TRDNz', 'trupz': 'TRUPz'}, 'NTZ': {'chdn': 'CHDN', 'chup': 'CHUP', 'display': 'DISPLAY', 'down': 'DOWN', 'ff': 'FF', 'left': 'LEFT', 'pause': 'PAUSE', 'play': 'PLAY', 'random': 'RANDOM', 'repeat': 'REPEAT', 'return': 'RETURN', 'rew': 'REW', 'right': 'RIGHT', 'select': 'SELECT', 'stop': 'STOP', 'trdn': 'TRDN', 'trup': 'TRUP', 'up': 'UP'}, 'PRS': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP', xrange(1, 40): (1, 40), xrange(1, 30): (1, 30)}, 'PRZ': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP', xrange(1, 40): (1, 40), xrange(1, 30): (1, 30)}, 'RAZ': {'both-off': '00', 'on': '02', 'query': 'QSTN', 'up': 'UP'}, 'SLZ': {'am': '25', 'aux1': '03', 'aux2': '04', 'bd': '10', 'cbl': '01', 'cd': '23', 'dlna': '27', 'down': 'DOWN', 'dvd': '10', 'dvr': '00', 'fm': '24', 'game': '02', 'hidden1': '07', 'hidden2': '08', 'hidden3': '09', 'internet-radio': '28', 'iradio-favorite': '28', 'multi-ch': '30', 'music-server': '27', 'net': '2B', 'network': '2B', 'off': '7F', 'p4s': '27', 'pc': '05', 'phono': '22', 'query': 'QSTN', 'sat': '01', 'sirius': '32', 'source': '80', 'tape': '20', 'tape2': '21', 'tuner': '26', 'tv': '23', 'universal-port': '40', 'up': 'UP', 'usb': '2C', 'vcr': '00', 'video1': '00', 'video2': '01', 'video3': '02', 'video4': '03', 'video5': '04', 'video6': '05', 'video7': '06', 'xm': '31'}, 'TUN': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP'}, 'TUZ': {'0-in-direct-mode': '0', '1-in-direct-mode': '1', '2-in-direct-mode': '2', '3-in-direct-mode': '3', '4-in-direct-mode': '4', '5-in-direct-mode': '5', '6-in-direct-mode': '6', '7-in-direct-mode': '7', '8-in-direct-mode': '8', '9-in-direct-mode': '9', 'direct': 'DIRECT', 'down': 'DOWN', 'query': 'QSTN', 'up': 'UP'}, 'ZBL': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP', 'xx-is-a-00-a-l-10-0-r-10-2-step': '{xx}'}, 'ZMT': {'off': '00', 'on': '01', 'query': 'QSTN', 'toggle': 'TG'}, 'ZPW': {'on': '01', 'query': 'QSTN', 'standby': '00'}, 'ZTN': {'bass-down': 'BDOWN', 'bass-up': 'BUP', 'bass-xx-is-a-00-a-10-0-10-2-step': 'B{xx}', 'query': 'QSTN', 'treble-down': 'TDOWN', 'treble-up': 'TUP', 'treble-xx-is-a-00-a-10-0-10-2-step': 'T{xx}'}, 'ZVL': {'level-down': 'DOWN', 'level-up': 'UP', 'query': 'QSTN', xrange(100): (0, 100), xrange(80): (0, 80)}}, 'zone3': {'BL3': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP', 'xx': '{xx}'}, 'MT3': {'off': '00', 'on': '01', 'query': 'QSTN', 'toggle': 'TG'}, 'NP3': {xrange(1, 40): (1, 40)}, 'NT3': {'chdn': 'CHDN', 'chup': 'CHUP', 'display': 'DISPLAY', 'down': 'DOWN', 'ff': 'FF', 'left': 'LEFT', 'pause': 'PAUSE', 'play': 'PLAY', 'random': 'RANDOM', 'repeat': 'REPEAT', 'return': 'RETURN', 'rew': 'REW', 'right': 'RIGHT', 'select': 'SELECT', 'stop': 'STOP', 'trdn': 'TRDN', 'trup': 'TRUP', 'up': 'UP'}, 'NTC': {'pausez': 'PAUSEz', 'playz': 'PLAYz', 'stopz': 'STOPz', 'trdnz': 'TRDNz', 'trupz': 'TRUPz'}, 'PR3': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP', xrange(1, 40): (1, 40), xrange(1, 30): (1, 30)}, 'PRS': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP', xrange(1, 40): (1, 40), xrange(1, 30): (1, 30)}, 'PW3': {'on': '01', 'query': 'QSTN', 'standby': '00'}, 'SL3': {'am': '25', 'aux1': '03', 'aux2': '04', 'cbl': '01', 'cd': '23', 'dlna': '27', 'down': 'DOWN', 'dvd': '10', 'dvr': '00', 'fm': '24', 'game': '02', 'hidden1': '07', 'hidden2': '08', 'hidden3': '09', 'internet-radio': '28', 'iradio-favorite': '28', 'multi-ch': '30', 'music-server': '27', 'net': '2B', 'network': '2B', 'p4s': '27', 'pc': '05', 'phono': '22', 'query': 'QSTN', 'sat': '01', 'sirius': '32', 'source': '80', 'tape': '20', 'tape2': '21', 'tuner': '26', 'tv': '23', 'universal-port': '40', 'up': 'UP', 'usb': '2C', 'vcr': '00', 'video1': '00', 'video2': '01', 'video3': '02', 'video4': '03', 'video5': '04', 'video6': '05', 'video7': '06', 'xm': '31'}, 'TN3': {'b-xx': 'B{xx}', 'bass-down': 'BDOWN', 'bass-up': 'BUP', 'query': 'QSTN', 't-xx': 'T{xx}', 'treble-down': 'TDOWN', 'treble-up': 'TUP'}, 'TU3': {'0-in-direct-mode': '0', '1-in-direct-mode': '1', '2-in-direct-mode': '2', '3-in-direct-mode': '3', '4-in-direct-mode': '4', '5-in-direct-mode': '5', '6-in-direct-mode': '6', '7-in-direct-mode': '7', '8-in-direct-mode': '8', '9-in-direct-mode': '9', 'direct': 'DIRECT', 'down': 'DOWN', 'query': 'QSTN', 'up': 'UP'}, 'TUN': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP'}, 'VL3': {'level-down': 'DOWN', 'level-up': 'UP', 'query': 'QSTN', xrange(100): (0, 100), xrange(80): (0, 80)}}, 'zone4': {'MT4': {'off': '00', 'on': '01', 'query': 'QSTN', 'toggle': 'TG'}, 'NP4': {xrange(1, 40): (1, 40)}, 'NT4': {'display': 'DISPLAY', 'down': 'DOWN', 'ff': 'FF', 'left': 'LEFT', 'pause': 'PAUSE', 'play': 'PLAY', 'random': 'RANDOM', 'repeat': 'REPEAT', 'return': 'RETURN', 'rew': 'REW', 'right': 'RIGHT', 'select': 'SELECT', 'stop': 'STOP', 'trdn': 'TRDN', 'trup': 'TRUP', 'up': 'UP'}, 'NTC': {'pausez': 'PAUSEz', 'playz': 'PLAYz', 'stopz': 'STOPz', 'trdnz': 'TRDNz', 'trupz': 'TRUPz'}, 'PR4': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP', xrange(1, 40): (1, 40), xrange(1, 30): (1, 30)}, 'PRS': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP', xrange(1, 40): (1, 40), xrange(1, 30): (1, 30)}, 'PW4': {'on': '01', 'query': 'QSTN', 'standby': '00'}, 'SL4': {'am': '25', 'aux1': '03', 'aux2': '04', 'cbl': '01', 'cd': '23', 'dlna': '27', 'down': 'DOWN', 'dvd': '10', 'dvr': '00', 'fm': '24', 'game': '02', 'hidden1': '07', 'hidden2': '08', 'hidden3': '09', 'internet-radio': '28', 'iradio-favorite': '28', 'multi-ch': '30', 'music-server': '27', 'net': '2B', 'network': '2B', 'p4s': '27', 'phono': '22', 'query': 'QSTN', 'sat': '01', 'sirius': '32', 'source': '80', 'tape': '20', 'tape-1': '20', 'tape2': '21', 'tuner': '26', 'tv': '23', 'universal-port': '40', 'up': 'UP', 'usb': '2C', 'vcr': '00', 'video1': '00', 'video2': '01', 'video3': '02', 'video4': '03', 'video5': '04', 'video6': '05', 'video7': '06', 'xm': '31'}, 'TU4': {'0-in-direct-mode': '0', '1-in-direct-mode': '1', '2-in-direct-mode': '2', '3-in-direct-mode': '3', '4-in-direct-mode': '4', '5-in-direct-mode': '5', '6-in-direct-mode': '6', '7-in-direct-mode': '7', '8-in-direct-mode': '8', '9-in-direct-mode': '9', 'direct': 'DIRECT', 'down': 'DOWN', 'query': 'QSTN', 'up': 'UP'}, 'TUN': {'down': 'DOWN', 'query': 'QSTN', 'up': 'UP'}, 'VL4': {'level-down': 'DOWN', 'level-up': 'UP', 'query': 'QSTN', xrange(100): (0, 100), xrange(80): (0, 80)}}}
mit
-8,840,003,991,964,150,000
43.196626
876
0.53899
false
javo100/plugin.video.PAQUETEDIVIERTAS2
servers/hulkshare.py
43
2308
# -*- coding: utf-8 -*- #------------------------------------------------------------ # pelisalacarta - XBMC Plugin # Conector para hulkshare # http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/ #------------------------------------------------------------ import urlparse,urllib2,urllib,re import os from core import scrapertools from core import logger from core import config def test_video_exists( page_url ): return True,"" def get_video_url( page_url , premium = False , user="" , password="", video_password="" ): logger.info("[hulkshare.py] get_video_url(page_url='%s')" % page_url) video_urls = [] location = scrapertools.get_header_from_response(page_url, header_to_get="location") extension = scrapertools.get_filename_from_url(location)[-4:] video_urls.append( [ "[hulkshare]",location ] ) return video_urls # Encuentra vídeos del servidor en el texto pasado def find_videos(data): encontrados = set() devuelve = [] #http://www.hulkshare.com/dl/bp62cf2510h8 #http://www.hulkshare.com/dl/e633tphub8jk patronvideos = '(hulkshare.com/dl/[a-z0-9]+)' logger.info("[hulkshare.py] find_videos #"+patronvideos+"#") matches = re.compile(patronvideos,re.DOTALL).findall(data) for match in matches: titulo = "[hulkshare]" url = "http://www."+match if url not in encontrados: logger.info(" url="+url) devuelve.append( [ titulo , url , 'hulkshare' ] ) encontrados.add(url) else: logger.info(" url duplicada="+url) #http://www.tusnovelas.com/hl.php?v=5ju6iuif5e68 patronvideos = 'tusnovelas.com/hl.php\?v\=([a-z0-9]+)' logger.info("[hulkshare.py] find_videos #"+patronvideos+"#") matches = re.compile(patronvideos,re.DOTALL).findall(data) for match in matches: titulo = "[hulkshare]" url = "http://www.hulkshare.com/dl/"+match if url not in encontrados: logger.info(" url="+url) devuelve.append( [ titulo , url , 'hulkshare' ] ) encontrados.add(url) else: logger.info(" url duplicada="+url) # return devuelve def test(): video_urls = get_video_url("http://www.hulkshare.com/dl/5ju6iuif5e68") return len(video_urls)>0
gpl-2.0
7,841,621,588,759,307,000
31.507042
91
0.595145
false
SmithsonianEnterprises/django-cms
cms/test_utils/project/placeholderapp_urls.py
45
1293
from cms.utils.compat.dj import is_installed from cms.utils.conf import get_cms_setting from django.conf import settings from django.conf.urls import include, url from django.conf.urls.i18n import i18n_patterns from django.contrib import admin admin.autodiscover() urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^jsi18n/(?P<packages>\S+?)/$', 'django.views.i18n.javascript_catalog'), url(r'^media/cms/(?P<path>.*)$', 'django.views.static.serve', {'document_root': get_cms_setting('MEDIA_ROOT'), 'show_indexes': True}), url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT, 'show_indexes': True}), ] urlpatterns += i18n_patterns('', url(r'^detail/(?P<id>[0-9]+)/$', 'cms.test_utils.project.placeholderapp.views.detail_view', name="detail"), url(r'^detail/(?P<pk>[0-9]+)/$', 'cms.test_utils.project.placeholderapp.views.detail_view', name="example_detail"), url(r'^detail_multi/(?P<id>[0-9]+)/$', 'cms.test_utils.project.placeholderapp.views.detail_view_multi', name="detail_multi"), url(r'^', include('cms.urls')), ) if settings.DEBUG and is_installed('debug_toolbar'): import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ]
bsd-3-clause
-3,585,035,955,383,362,600
40.709677
129
0.66744
false
asalomatov/variants
variants/test_known.py
1
2504
#!/mnt/xfs1/home/asalomatov/miniconda2/bin/python from __future__ import print_function import sys #sys.path.insert(0, '/mnt/xfs1/home/asalomatov/projects/update_vars/variants/variants') import train import numpy import pandas import os #from ggplot import * from sklearn.externals import joblib from keras.models import model_from_json print(sys.argv) m = sys.argv[1] lvl = int(sys.argv[2]) n_extra = int(sys.argv[3]) prob_threshold = float(sys.argv[4]) known_vars = sys.argv[5] extra_vars = sys.argv[6] is_keras = bool(int(sys.argv[7])) m_pkl = joblib.load(m) list_of_features = m_pkl['features'] tst = train.TrainTest(known_vars, list_of_features, m_pkl['y_name'], m_pkl['extra_col_names']) # + # ['DP_offspring', 'DP_father', 'DP_mother']) tst.feature_list = list_of_features if is_keras: tst.is_keras = True tst.readDataSet() tst.addLabels(level=lvl) print('data_set shape is %s' % ' '.join(map(str, tst.data_set.shape))) if tst.data_set.empty: sys.exit('data set is empty') #n_extra = tst.data_set.shape[0] #roughly balanced classes print('adding %s extra negative examples' % n_extra) if n_extra > 0: tst.readExtraVars(extra_vars, n_extra=n_extra) tst.dropNA('label') print('data_set shape is %s' % ' '.join(map(str, tst.data_set.shape))) print('label balance is ') print(tst.data_set.label.value_counts()) if tst.is_keras: tst.model = model_from_json(m_pkl['model']) tst.model.load_weights(m_pkl['weights_file']) else: tst.model = m_pkl['model'] tst.stdize = m_pkl['stdize'] # bool(int(sys.argv[6])) tst.threshold = prob_threshold print('probability treshold is %s' % tst.threshold) tst.train_set_var_id = [] #m_pkl['train_var_id'] tst.data2Test() print('test_set_X shape is') print(tst.test_set_X.shape) tst.predictClass(tst.threshold) tst.getMetrics() tst.perf_mertics['method'] = tst.method tst.perf_mertics['prob_cutoff'] = tst.threshold # myplot = ggplot(tst.roc, aes(x='fpr', y='tpr')) +\ # geom_line() +\ # geom_abline(linetype='dashed') # myplot1 = ggplot(tst.roc, aes('threshold')) +\ # geom_line(aes(y='tpr')) +\ # geom_line(aes(y='fpr')) # ggsave(plot=myplot, filename='roc_curve_1.png') # ggsave(plot=myplot1, filename='roc_curve_2.png') #m_name = os.path.basename(m) #m_name = '.'.join(m_name.split('.')[:-1]) + '_tstlvl' + str(lvl) #res['method'] = m_name #res.to_csv(os.path.join(os.path.dirname(test_set_pat), m_name + '.csv'), index=False)
mit
7,369,483,448,799,165,000
29.536585
87
0.658147
false
ampotos/dynStruct
_dynStruct/access.py
1
10001
import binascii import _dynStruct import capstone unsigned_int_instr = [capstone.x86.X86_INS_ADCX, capstone.x86.X86_INS_ADOX, capstone.x86.X86_INS_DIV, capstone.x86.X86_INS_MUL, capstone.x86.X86_INS_MULX] xmm_regs = [xmm for xmm in range(capstone.x86.X86_REG_XMM0 - 1, capstone.x86.X86_REG_XMM31)] class Access: def __init__(self, access, orig, addr_start, block, t): self.block = block self.offset = access self.addr = addr_start + self.offset self.size = orig["size_access"] self.t = t if len(orig["opcode"]) % 2: orig["opcode"] = "0" + orig["opcode"] self.instr_op = orig["opcode"] if orig["ctx_opcode"] and len(orig["ctx_opcode"]) % 2: orig["ctx_opcode"] = "0" + orig["ctx_opcode"] self.ctx_opcode = orig["ctx_opcode"] json_attrib = ["nb_access", "pc", "func_pc", "func_sym", "func_module", "ctx_addr"] for k in json_attrib: setattr(self, k, (orig[k])) self.disass() self.instr_display = '<span class="text-success"><strong>%s</strong>\ </span><span class="text-info">%s</span>' % (self.instr.mnemonic, self.instr.op_str) self.instr_search = '%s %s' % (self.instr.mnemonic, self.instr.op_str) if self.ctx_opcode: if self.ctx_addr > self.pc: self.ctx_instr_display = "Next : " else: self.ctx_instr_display = "Prev : " self.ctx_instr_display += '<span class="text-success"><strong>%s</strong>\ </span><span class="text-info">%s</span>' % (self.ctx_instr.mnemonic, self.ctx_instr.op_str) self.ctx_instr_search = '%s %s' % (self.ctx_instr.mnemonic, self.ctx_instr.op_str) else: self.ctx_instr_search = 'No context' self.ctx_instr_display = '<span class="text-danger">No context</span>' def is_offset(self, offset): return self.offset == offset def is_in_range(self, start, end): if self.offset >= start and self.offset < end: return True if self.offset < start and self.offset + self.size >start: return True return False def in_member(self, member): if self.is_offset(member.offset): return True if self.offset >= member.offset and\ self.offset < member.offset + member.size: return True return False def disass(self): if not _dynStruct.disasm: _dynStruct.create_disasm() if not hasattr(self, 'instr'): self.instr = [instr for instr in _dynStruct.disasm.disasm(binascii.unhexlify(self.instr_op), self.pc)][0] if self.ctx_opcode: self.ctx_instr = [instr for instr in _dynStruct.disasm.disasm(binascii.unhexlify(self.ctx_opcode), self.ctx_addr)][0] def analyse_ctx(self, size): #TODO extend analyse to other instruction and # SSEX, AVX and other intel extension if not hasattr(self, 'instr'): self.disass() if self.t == 'write': # Detect if the written val is the result from a floating point register if self.instr.mnemonic.startswith('mov'): src_op = self.instr.operands[1] if src_op.type == capstone.x86.X86_OP_FP or\ (src_op.type == capstone.x86.X86_OP_REG and src_op.reg in xmm_regs): if size == 4: return _dynStruct.float_str elif size == 8: return _dynStruct.double_str else: return None elif self.ctx_opcode and self.ctx_instr.mnemonic.startswith('mov'): dest_ctx_reg = self.ctx_instr.operands[0].reg src_ctx_op = self.ctx_instr.operands[1] if self.instr.operands[1].reg == dest_ctx_reg and\ src_ctx_op.type == capstone.x86.X86_OP_REG and src_ctx_op.reg in xmm_regs: if size == 4: return _dynStruct.float_str elif size == 8: return _dynStruct.double_str else: return None # Next analysis need a ctx_instr if not self.ctx_opcode: return None # detect ptr if ctx = lea and instr = mov with the reg value # get from lea. If yes it's a ptr if self.ctx_instr.id == capstone.x86.X86_INS_LEA: dest_reg = self.ctx_instr.operands[0].reg if self.instr.mnemonic.startswith('mov') and\ self.instr.op_find(capstone.x86.X86_OP_REG, 1) and\ self.instr.op_find(capstone.x86.X86_OP_REG, 1).reg == dest_reg: # if ptr is on the same memory page than rip/eip it's a func ptr op_src = self.ctx_instr.operands[1] if op_src.type == capstone.x86.X86_OP_MEM: if op_src.mem.base in [capstone.x86.X86_REG_RIP, capstone.x86.X86_REG_EIP]: if op_src.mem.index == 0 and\ int((op_src.mem.disp + self.instr.address) / 4096)\ == int(self.instr.address / 4096): return _dynStruct.ptr_func_str # if not it's just a ptr because we cannot have more information return _dynStruct.ptr_str # when the mov is an imm value on the same page than rip => func_ptr if self.instr.mnemonic.startswith('mov') and\ self.instr.op_find(capstone.x86.X86_OP_IMM, 1) and\ size == _dynStruct.bits / 8: if int(self.instr.address / 4096) ==\ int(self.instr.operands[1].imm / 4096): return _dynStruct.ptr_func_str # detecting if signed or unsigned if self.instr.mnemonic.startswith('mov') and len(self.ctx_instr.operands) == 2: dest_ctx_op = self.ctx_instr.operands[0] src_op = self.instr.operands[1] if dest_ctx_op.type == capstone.x86.X86_OP_REG and\ src_op.type == capstone.x86.X86_OP_REG and\ src_op.reg == dest_ctx_op.reg: if self.instr.id in unsigned_int_instr: return _dynStruct.unsigned_str % (size) # For read access we can only detect ptr because a use of the value read # Basically member is pointer if the value read is dereferenced else: if self.instr.id == capstone.x86.X86_INS_CALL: return _dynStruct.ptr_func_str # For other instruction we need context to perform the analysis if not self.ctx_instr: return None if not self.instr.mnemonic.startswith('mov'): return None # usually if the value is used later (not just a copy) the value # is load into a register dest_op = self.instr.operands[0] if dest_op.type == capstone.x86.X86_OP_REG: # if the register is an xmm register, the value is a floating # point if dest_op.reg in xmm_regs: if size == 4: return _dynStruct.float_str elif size == 8: return _dynStruct.double_str else: return None # if the context instr is a call using the previously right # reg, the value is a ptr to func if self.ctx_instr.id == capstone.x86.X86_INS_CALL and\ self.ctx_instr.operands[0].type == capstone.x86.X86_INS_CALL and\ self.ctx_instr.operands[0].reg == dest_op.reg: return _dynStruct.ptr_func_str for ctx_src_op in self.ctx_instr.operands: # if it's a mov with just base + disp and base == written register # it's likely to be a ptr sur struct or array if ctx_src_op.type == capstone.x86.X86_OP_MEM and\ ctx_src_op.mem.base == dest_op.reg: # if disp != 0 it's certainly a struct ptr if ctx_src_op.mem.segment == 0 and ctx_src_op.mem.disp != 0: return _dynStruct.ptr_struct_str # if disp == 0 and index != 0 it's certainly an array if ctx_src_op.mem.segment == 0 and ctx_src_op.mem.index != 0: return _dynStruct.ptr_array_str # else it's a pointer with no more information return _dynStruct.ptr_str # if the context instr have 2 operand and the second one use # the written ptr as base, it's ptr if (self.ctx_instr.operands) == 2 and\ self.ctx_instr.operands[1].type == capstone.x86.X86_OP_MEM and\ self.ctx_instr.operands[1].reg == ctx_src_op: return _dynStruct.ptr_str return None @staticmethod def remove_instrs(access_list): for access in access_list: if hasattr(access, 'instr'): del access.instr if hasattr(access, 'ctx_instr'): del access.ctx_instr
mit
6,079,976,538,210,272,000
42.672489
97
0.50375
false
nickveenhof/sixpack
sixpack/test/experiment_model_test.py
2
14589
import unittest # from numbers import Number # from sixpack.db import _key from datetime import datetime import fakeredis from sixpack.models import Experiment, Alternative, Client class TestExperimentModel(unittest.TestCase): unit = True def setUp(self): self.redis = fakeredis.FakeStrictRedis() self.alternatives = ['yes', 'no'] self.exp_1 = Experiment('show-something-awesome', self.alternatives, redis=self.redis) self.exp_2 = Experiment('dales-lagunitas', ['dales', 'lagunitas'], redis=self.redis) self.exp_3 = Experiment('mgd-budheavy', ['mgd', 'bud-heavy'], redis=self.redis) self.exp_1.save() self.exp_2.save() self.exp_3.save() def tearDown(self): pipe = self.redis.pipeline() pipe.flushdb() pipe.execute() def test_constructor(self): with self.assertRaises(ValueError): Experiment('not-enough-args', ['1'], redis=self.redis) def test_save(self): pass def test_control(self): control = self.exp_1.control self.assertEqual(control.name, 'yes') def test_created_at(self): exp = Experiment('bench-press', ['joe', 'think'], redis=self.redis) date = exp.created_at self.assertIsNone(date) exp.save() date = exp.created_at self.assertTrue(isinstance(date, str)) def test_get_alternative_names(self): exp = Experiment('show-something', self.alternatives, redis=self.redis) names = exp.get_alternative_names() self.assertEqual(sorted(self.alternatives), sorted(names)) def test_is_new_record(self): exp = Experiment('show-something-is-new-record', self.alternatives, redis=self.redis) self.assertTrue(exp.is_new_record()) exp.save() self.assertFalse(exp.is_new_record()) # fakeredis does not currently support bitcount # todo, fix fakeredis and def _test_total_participants(self): pass def _test_total_conversions(self): pass def test_description(self): exp = Experiment.find_or_create('never-gonna', ['give', 'you', 'up'], redis=self.redis) self.assertEqual(exp.description, None) exp.update_description('hallo') self.assertEqual(exp.description, 'hallo') def test_change_alternatives(self): exp = Experiment.find_or_create('never-gonna-x', ['let', 'you', 'down'], redis=self.redis) with self.assertRaises(ValueError): Experiment.find_or_create('never-gonna-x', ['let', 'you', 'down', 'give', 'you', 'up'], redis=self.redis) exp.delete() Experiment.find_or_create('never-gonna-x', ['let', 'you', 'down', 'give', 'you', 'up'], redis=self.redis) def test_delete(self): exp = Experiment('delete-me', self.alternatives, redis=self.redis) exp.save() exp.delete() with self.assertRaises(ValueError): Experiment.find('delete-me', redis=self.redis) def test_leaky_delete(self): exp = Experiment('delete-me-1', self.alternatives, redis=self.redis) exp.save() exp2 = Experiment('delete', self.alternatives, redis=self.redis) exp2.save() exp2.delete() exp3 = Experiment.find('delete-me-1', redis=self.redis) self.assertEqual(exp3.get_alternative_names(), self.alternatives) def test_archive(self): self.assertFalse(self.exp_1.is_archived()) self.exp_1.archive() self.assertTrue(self.exp_1.is_archived()) self.exp_1.unarchive() self.assertFalse(self.exp_1.is_archived()) def test_unarchive(self): self.exp_1.archive() self.assertTrue(self.exp_1.is_archived()) self.exp_1.unarchive() self.assertFalse(self.exp_1.is_archived()) def test_set_winner(self): exp = Experiment('test-winner', ['1', '2'], redis=self.redis) exp.set_winner('1') self.assertTrue(exp.winner is not None) exp.set_winner('1') self.assertEqual(exp.winner.name, '1') def test_winner(self): exp = Experiment.find_or_create('test-get-winner', ['1', '2'], redis=self.redis) self.assertIsNone(exp.winner) exp.set_winner('1') self.assertEqual(exp.winner.name, '1') def test_reset_winner(self): exp = Experiment('show-something-reset-winner', self.alternatives, redis=self.redis) exp.save() exp.set_winner('yes') self.assertTrue(exp.winner is not None) self.assertEqual(exp.winner.name, 'yes') exp.reset_winner() self.assertIsNone(exp.winner) def test_winner_key(self): exp = Experiment.find_or_create('winner-key', ['win', 'lose'], redis=self.redis) self.assertEqual(exp._winner_key, "{0}:winner".format(exp.key())) def test_get_alternative(self): client = Client(10, redis=self.redis) exp = Experiment.find_or_create('archived-control', ['w', 'l'], redis=self.redis) exp.archive() # should return control on archived test with no winner alt = exp.get_alternative(client) self.assertEqual(alt.name, 'w') # should return current participation exp.unarchive() selected_for_client = exp.get_alternative(client) self.assertIn(selected_for_client.name, ['w', 'l']) # should check to see if client is participating and only return the same alt # unsure how to currently test since fakeredis obviously doesn't parse lua # most likely integration tests # See above note for the next 5 tests def _test_existing_alternative(self): pass def _test_has_converted_by_client(self): pass def _test_choose_alternative(self): pass def _test_random_choice(self): pass def test_find(self): exp = Experiment('crunches-situps', ['crunches', 'situps'], redis=self.redis) exp.save() with self.assertRaises(ValueError): Experiment.find('this-does-not-exist', redis=self.redis) try: Experiment.find('crunches-situps', redis=self.redis) except: self.fail('known exp not found') def test_find_or_create(self): # should throw a ValueError if alters are invalid with self.assertRaises(ValueError): Experiment.find_or_create('party-time', ['1'], redis=self.redis) with self.assertRaises(ValueError): Experiment.find_or_create('party-time', ['1', '*****'], redis=self.redis) # should create a -NEW- experiment if experiment has never been used with self.assertRaises(ValueError): Experiment.find('dance-dance', redis=self.redis) def test_all(self): # there are three created in setUp() all_of_them = Experiment.all(redis=self.redis) self.assertEqual(len(all_of_them), 3) exp_1 = Experiment('archive-this', ['archived', 'unarchive'], redis=self.redis) exp_1.save() all_again = Experiment.all(redis=self.redis) self.assertEqual(len(all_again), 4) exp_1.archive() all_archived = Experiment.all(redis=self.redis) self.assertEqual(len(all_archived), 3) all_with_archived = Experiment.all(exclude_archived=False, redis=self.redis) self.assertEqual(len(all_with_archived), 4) all_archived = Experiment.archived(redis=self.redis) self.assertEqual(len(all_archived), 1) def test_load_alternatives(self): exp = Experiment.find_or_create('load-alts-test', ['yes', 'no', 'call-me-maybe'], redis=self.redis) alts = Experiment.load_alternatives(exp.name, redis=self.redis) self.assertEqual(sorted(alts), sorted(['yes', 'no', 'call-me-maybe'])) def test_differing_alternatives_fails(self): exp = Experiment.find_or_create('load-differing-alts', ['yes', 'zack', 'PBR'], redis=self.redis) alts = Experiment.load_alternatives(exp.name, redis=self.redis) self.assertEqual(sorted(alts), sorted(['PBR', 'yes', 'zack'])) with self.assertRaises(ValueError): exp = Experiment.find_or_create('load-differing-alts', ['kyle', 'zack', 'PBR'], redis=self.redis) def _test_initialize_alternatives(self): # Should throw ValueError with self.assertRaises(ValueError): Experiment.initialize_alternatives('n', ['*'], redis=self.redis) # each item in list should be Alternative Instance alt_objs = Experiment.initialize_alternatives('n', ['1', '2', '3']) for alt in alt_objs: self.assertTrue(isinstance(alt, Alternative)) self.assertTrue(alt.name in ['1', '2', '3']) def test_is_not_valid(self): not_valid = Experiment.is_valid(1) self.assertFalse(not_valid) not_valid = Experiment.is_valid(':123:name') self.assertFalse(not_valid) not_valid = Experiment.is_valid('_123name') self.assertFalse(not_valid) not_valid = Experiment.is_valid('&123name') self.assertFalse(not_valid) def test_valid_options(self): Experiment.find_or_create('red-white', ['red', 'white'], traffic_fraction=1, redis=self.redis) Experiment.find_or_create('red-white-2', ['red', 'white'], traffic_fraction=0.4, redis=self.redis) def test_invalid_traffic_fraction(self): with self.assertRaises(ValueError): Experiment.find_or_create('dist-2', ['dist', '2'], traffic_fraction=2, redis=self.redis) with self.assertRaises(ValueError): Experiment.find_or_create('dist-100', ['dist', '100'], traffic_fraction=101, redis=self.redis) with self.assertRaises(ValueError): Experiment.find_or_create('dist-100', ['dist', '100'], traffic_fraction="x", redis=self.redis) def test_fail_when_changing_traffic(self): Experiment.find_or_create('red-white', ['red', 'white'], traffic_fraction=0.8, redis=self.redis) with self.assertRaises(ValueError): Experiment.find_or_create('red-white', ['red', 'white'], traffic_fraction=0.4, redis=self.redis) def test_dont_fail_when_participating_in_nondefault_traffic_experiment_without_traffic_param(self): Experiment.find_or_create('red-white', ['red', 'white'], traffic_fraction=0.5, redis=self.redis) Experiment.find_or_create('red-white', ['red', 'white'], redis=self.redis) def test_valid_traffic_fractions_save(self): # test the hidden prop gets set exp = Experiment.find_or_create('dist-02', ['dist', '100'], traffic_fraction=0.02, redis=self.redis) self.assertEqual(exp._traffic_fraction, 0.02) exp = Experiment.find_or_create('dist-100', ['dist', '100'], traffic_fraction=0.4, redis=self.redis) self.assertEqual(exp._traffic_fraction, 0.40) # test is set in redis def test_traffic_fraction(self): exp = Experiment.find_or_create('d-test-10', ['d', 'c'], traffic_fraction=0.1, redis=self.redis) exp.save() self.assertEqual(exp.traffic_fraction, 0.1) def test_valid_kpi(self): ret = Experiment.validate_kpi('hello-jose') self.assertTrue(ret) ret = Experiment.validate_kpi('123') self.assertTrue(ret) ret = Experiment.validate_kpi('foreigner') self.assertTrue(ret) ret = Experiment.validate_kpi('boston') self.assertTrue(ret) ret = Experiment.validate_kpi('1_not-two-times-two-times') self.assertTrue(ret) def test_invalid_kpi(self): ret = Experiment.validate_kpi('!hello-jose') self.assertFalse(ret) ret = Experiment.validate_kpi('thunder storm') self.assertFalse(ret) ret = Experiment.validate_kpi('&!&&!&') self.assertFalse(ret) def test_set_kpi(self): exp = Experiment.find_or_create('multi-kpi', ['kpi', '123'], redis=self.redis) # We shouldn't beable to manually set a KPI. Only via web request with self.assertRaises(ValueError): exp.set_kpi('bananza') # simulate conversion via webrequest client = Client(100, redis=self.redis) exp.get_alternative(client) exp.convert(client, None, 'bananza') exp2 = Experiment.find_or_create('multi-kpi', ['kpi', '123'], redis=self.redis) self.assertEqual(exp2.kpi, None) exp2.set_kpi('bananza') self.assertEqual(exp2.kpi, 'bananza') def test_add_kpi(self): exp = Experiment.find_or_create('multi-kpi-add', ['asdf', '999'], redis=self.redis) kpi = 'omg-pop' exp.add_kpi(kpi) key = "{0}:kpis".format(exp.key(include_kpi=False)) self.assertIn(kpi, self.redis.smembers(key)) exp.delete() def test_kpis(self): exp = Experiment.find_or_create('multi-kpi-add', ['asdf', '999'], redis=self.redis) kpis = ['omg-pop', 'zynga'] exp.add_kpi(kpis[0]) exp.add_kpi(kpis[1]) ekpi = exp.kpis self.assertIn(kpis[0], ekpi) self.assertIn(kpis[1], ekpi) exp.delete() def test_excluded_clients(self): e = Experiment.find_or_create('count-excluded-clients', ['red', 'blue'], redis=self.redis) for i in range(10): c = Client("c-%d" % i, self.redis) e.exclude_client(c) # there is a very small chance that a client was not excluded. self.assertEqual(e.excluded_clients(), i + 1) def test_excluded_client(self): # need proper redis to register the msetbit script import sixpack.db sixpack.db.REDIS.flushall() e = Experiment.find_or_create('excluded-client', ['option-a', 'option-b'], redis=sixpack.db.REDIS) self.assertEqual(e.control.participant_count(), 0) self.assertEqual(e.control.completed_count(), 0) # force participate 1 proper client on the control alternative cnil = Client("cnil", redis=sixpack.db.REDIS) e.control.record_participation(cnil) e.convert(cnil) # exclude client, gets control alternative & try to convert c = Client("c", redis=sixpack.db.REDIS) e.exclude_client(c) self.assertTrue(e.control == e.get_alternative(c)) self.assertTrue(None == e.existing_alternative(c)) with self.assertRaises(ValueError): e.convert(c) # participation & completed count should be 1 self.assertEqual(e.control.participant_count(), 1) self.assertEqual(e.control.completed_count(), 1)
bsd-2-clause
6,040,342,911,589,705,000
36.216837
117
0.62554
false
sometallgit/AutoUploader
Python27/Lib/distutils/tests/test_msvc9compiler.py
9
6049
"""Tests for distutils.msvc9compiler.""" import sys import unittest import os from distutils.errors import DistutilsPlatformError from distutils.tests import support from test.test_support import run_unittest # A manifest with the only assembly reference being the msvcrt assembly, so # should have the assembly completely stripped. Note that although the # assembly has a <security> reference the assembly is removed - that is # currently a "feature", not a bug :) _MANIFEST_WITH_ONLY_MSVC_REFERENCE = """\ <?xml version="1.0" encoding="UTF-8" standalone="yes"?> <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> <security> <requestedPrivileges> <requestedExecutionLevel level="asInvoker" uiAccess="false"> </requestedExecutionLevel> </requestedPrivileges> </security> </trustInfo> <dependency> <dependentAssembly> <assemblyIdentity type="win32" name="Microsoft.VC90.CRT" version="9.0.21022.8" processorArchitecture="x86" publicKeyToken="XXXX"> </assemblyIdentity> </dependentAssembly> </dependency> </assembly> """ # A manifest with references to assemblies other than msvcrt. When processed, # this assembly should be returned with just the msvcrt part removed. _MANIFEST_WITH_MULTIPLE_REFERENCES = """\ <?xml version="1.0" encoding="UTF-8" standalone="yes"?> <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> <security> <requestedPrivileges> <requestedExecutionLevel level="asInvoker" uiAccess="false"> </requestedExecutionLevel> </requestedPrivileges> </security> </trustInfo> <dependency> <dependentAssembly> <assemblyIdentity type="win32" name="Microsoft.VC90.CRT" version="9.0.21022.8" processorArchitecture="x86" publicKeyToken="XXXX"> </assemblyIdentity> </dependentAssembly> </dependency> <dependency> <dependentAssembly> <assemblyIdentity type="win32" name="Microsoft.VC90.MFC" version="9.0.21022.8" processorArchitecture="x86" publicKeyToken="XXXX"></assemblyIdentity> </dependentAssembly> </dependency> </assembly> """ _CLEANED_MANIFEST = """\ <?xml version="1.0" encoding="UTF-8" standalone="yes"?> <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> <security> <requestedPrivileges> <requestedExecutionLevel level="asInvoker" uiAccess="false"> </requestedExecutionLevel> </requestedPrivileges> </security> </trustInfo> <dependency> </dependency> <dependency> <dependentAssembly> <assemblyIdentity type="win32" name="Microsoft.VC90.MFC" version="9.0.21022.8" processorArchitecture="x86" publicKeyToken="XXXX"></assemblyIdentity> </dependentAssembly> </dependency> </assembly>""" if sys.platform=="win32": from distutils.msvccompiler import get_build_version if get_build_version()>=8.0: SKIP_MESSAGE = None else: SKIP_MESSAGE = "These tests are only for MSVC8.0 or above" else: SKIP_MESSAGE = "These tests are only for win32" @unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE) class msvc9compilerTestCase(support.TempdirManager, unittest.TestCase): def test_no_compiler(self): # makes sure query_vcvarsall raises # a DistutilsPlatformError if the compiler # is not found from distutils.msvc9compiler import query_vcvarsall def _find_vcvarsall(version): return None from distutils import msvc9compiler old_find_vcvarsall = msvc9compiler.find_vcvarsall msvc9compiler.find_vcvarsall = _find_vcvarsall try: self.assertRaises(DistutilsPlatformError, query_vcvarsall, 'wont find this version') finally: msvc9compiler.find_vcvarsall = old_find_vcvarsall def test_reg_class(self): from distutils.msvc9compiler import Reg self.assertRaises(KeyError, Reg.get_value, 'xxx', 'xxx') # looking for values that should exist on all # windows registry versions. path = r'Control Panel\Desktop' v = Reg.get_value(path, u'dragfullwindows') self.assertIn(v, (u'0', u'1', u'2')) import _winreg HKCU = _winreg.HKEY_CURRENT_USER keys = Reg.read_keys(HKCU, 'xxxx') self.assertEqual(keys, None) keys = Reg.read_keys(HKCU, r'Control Panel') self.assertIn('Desktop', keys) def test_remove_visual_c_ref(self): from distutils.msvc9compiler import MSVCCompiler tempdir = self.mkdtemp() manifest = os.path.join(tempdir, 'manifest') f = open(manifest, 'w') try: f.write(_MANIFEST_WITH_MULTIPLE_REFERENCES) finally: f.close() compiler = MSVCCompiler() compiler._remove_visual_c_ref(manifest) # see what we got f = open(manifest) try: # removing trailing spaces content = '\n'.join([line.rstrip() for line in f.readlines()]) finally: f.close() # makes sure the manifest was properly cleaned self.assertEqual(content, _CLEANED_MANIFEST) def test_remove_entire_manifest(self): from distutils.msvc9compiler import MSVCCompiler tempdir = self.mkdtemp() manifest = os.path.join(tempdir, 'manifest') f = open(manifest, 'w') try: f.write(_MANIFEST_WITH_ONLY_MSVC_REFERENCE) finally: f.close() compiler = MSVCCompiler() got = compiler._remove_visual_c_ref(manifest) self.assertIsNone(got) def test_suite(): return unittest.makeSuite(msvc9compilerTestCase) if __name__ == "__main__": run_unittest(test_suite())
mit
-3,585,948,453,612,352,000
31.875
78
0.653662
false
johnkeepmoving/oss-ftp
python27/win32/Lib/site-packages/setuptools/tests/test_develop.py
41
3496
"""develop tests """ import os import shutil import site import sys import tempfile import unittest from distutils.errors import DistutilsError from setuptools.command.develop import develop from setuptools.dist import Distribution SETUP_PY = """\ from setuptools import setup setup(name='foo', packages=['foo'], use_2to3=True, ) """ INIT_PY = """print "foo" """ class TestDevelopTest(unittest.TestCase): def setUp(self): if sys.version < "2.6" or hasattr(sys, 'real_prefix'): return # Directory structure self.dir = tempfile.mkdtemp() os.mkdir(os.path.join(self.dir, 'foo')) # setup.py setup = os.path.join(self.dir, 'setup.py') f = open(setup, 'w') f.write(SETUP_PY) f.close() self.old_cwd = os.getcwd() # foo/__init__.py init = os.path.join(self.dir, 'foo', '__init__.py') f = open(init, 'w') f.write(INIT_PY) f.close() os.chdir(self.dir) self.old_base = site.USER_BASE site.USER_BASE = tempfile.mkdtemp() self.old_site = site.USER_SITE site.USER_SITE = tempfile.mkdtemp() def tearDown(self): if sys.version < "2.6" or hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix): return os.chdir(self.old_cwd) shutil.rmtree(self.dir) shutil.rmtree(site.USER_BASE) shutil.rmtree(site.USER_SITE) site.USER_BASE = self.old_base site.USER_SITE = self.old_site def test_develop(self): if sys.version < "2.6" or hasattr(sys, 'real_prefix'): return dist = Distribution( dict(name='foo', packages=['foo'], use_2to3=True, version='0.0', )) dist.script_name = 'setup.py' cmd = develop(dist) cmd.user = 1 cmd.ensure_finalized() cmd.install_dir = site.USER_SITE cmd.user = 1 old_stdout = sys.stdout #sys.stdout = StringIO() try: cmd.run() finally: sys.stdout = old_stdout # let's see if we got our egg link at the right place content = os.listdir(site.USER_SITE) content.sort() self.assertEqual(content, ['easy-install.pth', 'foo.egg-link']) # Check that we are using the right code. egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt') try: path = egg_link_file.read().split()[0].strip() finally: egg_link_file.close() init_file = open(os.path.join(path, 'foo', '__init__.py'), 'rt') try: init = init_file.read().strip() finally: init_file.close() if sys.version < "3": self.assertEqual(init, 'print "foo"') else: self.assertEqual(init, 'print("foo")') def notest_develop_with_setup_requires(self): wanted = ("Could not find suitable distribution for " "Requirement.parse('I-DONT-EXIST')") old_dir = os.getcwd() os.chdir(self.dir) try: try: Distribution({'setup_requires': ['I_DONT_EXIST']}) except DistutilsError: e = sys.exc_info()[1] error = str(e) if error == wanted: pass finally: os.chdir(old_dir)
mit
8,935,290,389,563,014,000
27.655738
129
0.536327
false
hdmetor/scikit-learn
examples/cluster/plot_kmeans_digits.py
53
4524
""" =========================================================== A demo of K-Means clustering on the handwritten digits data =========================================================== In this example we compare the various initialization strategies for K-means in terms of runtime and quality of the results. As the ground truth is known here, we also apply different cluster quality metrics to judge the goodness of fit of the cluster labels to the ground truth. Cluster quality metrics evaluated (see :ref:`clustering_evaluation` for definitions and discussions of the metrics): =========== ======================================================== Shorthand full name =========== ======================================================== homo homogeneity score compl completeness score v-meas V measure ARI adjusted Rand index AMI adjusted mutual information silhouette silhouette coefficient =========== ======================================================== """ print(__doc__) from time import time import numpy as np import matplotlib.pyplot as plt from sklearn import metrics from sklearn.cluster import KMeans from sklearn.datasets import load_digits from sklearn.decomposition import PCA from sklearn.preprocessing import scale np.random.seed(42) digits = load_digits() data = scale(digits.data) n_samples, n_features = data.shape n_digits = len(np.unique(digits.target)) labels = digits.target sample_size = 300 print("n_digits: %d, \t n_samples %d, \t n_features %d" % (n_digits, n_samples, n_features)) print(79 * '_') print('% 9s' % 'init' ' time inertia homo compl v-meas ARI AMI silhouette') def bench_k_means(estimator, name, data): t0 = time() estimator.fit(data) print('% 9s %.2fs %i %.3f %.3f %.3f %.3f %.3f %.3f' % (name, (time() - t0), estimator.inertia_, metrics.homogeneity_score(labels, estimator.labels_), metrics.completeness_score(labels, estimator.labels_), metrics.v_measure_score(labels, estimator.labels_), metrics.adjusted_rand_score(labels, estimator.labels_), metrics.adjusted_mutual_info_score(labels, estimator.labels_), metrics.silhouette_score(data, estimator.labels_, metric='euclidean', sample_size=sample_size))) bench_k_means(KMeans(init='k-means++', n_clusters=n_digits, n_init=10), name="k-means++", data=data) bench_k_means(KMeans(init='random', n_clusters=n_digits, n_init=10), name="random", data=data) # in this case the seeding of the centers is deterministic, hence we run the # kmeans algorithm only once with n_init=1 pca = PCA(n_components=n_digits).fit(data) bench_k_means(KMeans(init=pca.components_, n_clusters=n_digits, n_init=1), name="PCA-based", data=data) print(79 * '_') ############################################################################### # Visualize the results on PCA-reduced data reduced_data = PCA(n_components=2).fit_transform(data) kmeans = KMeans(init='k-means++', n_clusters=n_digits, n_init=10) kmeans.fit(reduced_data) # Step size of the mesh. Decrease to increase the quality of the VQ. h = .02 # point in the mesh [x_min, m_max]x[y_min, y_max]. # Plot the decision boundary. For that, we will assign a color to each x_min, x_max = reduced_data[:, 0].min() + 1, reduced_data[:, 0].max() - 1 y_min, y_max = reduced_data[:, 1].min() + 1, reduced_data[:, 1].max() - 1 xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) # Obtain labels for each point in mesh. Use last trained model. Z = kmeans.predict(np.c_[xx.ravel(), yy.ravel()]) # Put the result into a color plot Z = Z.reshape(xx.shape) plt.figure(1) plt.clf() plt.imshow(Z, interpolation='nearest', extent=(xx.min(), xx.max(), yy.min(), yy.max()), cmap=plt.cm.Paired, aspect='auto', origin='lower') plt.plot(reduced_data[:, 0], reduced_data[:, 1], 'k.', markersize=2) # Plot the centroids as a white X centroids = kmeans.cluster_centers_ plt.scatter(centroids[:, 0], centroids[:, 1], marker='x', s=169, linewidths=3, color='w', zorder=10) plt.title('K-means clustering on the digits dataset (PCA-reduced data)\n' 'Centroids are marked with white cross') plt.xlim(x_min, x_max) plt.ylim(y_min, y_max) plt.xticks(()) plt.yticks(()) plt.show()
bsd-3-clause
-8,434,303,465,154,016,000
34.622047
79
0.595933
false
zerobatu/edx-platform
lms/djangoapps/instructor/views/api.py
7
101880
""" Instructor Dashboard API views JSON views which the instructor dashboard requests. Many of these GETs may become PUTs in the future. """ import StringIO import json import logging import re import time import requests from django.conf import settings from django.views.decorators.csrf import ensure_csrf_cookie from django.views.decorators.http import require_POST from django.views.decorators.cache import cache_control from django.core.exceptions import ValidationError, PermissionDenied from django.core.mail.message import EmailMessage from django.db import IntegrityError from django.core.urlresolvers import reverse from django.core.validators import validate_email from django.utils.translation import ugettext as _ from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotFound from django.utils.html import strip_tags from django.shortcuts import redirect import string # pylint: disable=deprecated-module import random import unicodecsv import urllib import decimal from student import auth from student.roles import GlobalStaff, CourseSalesAdminRole, CourseFinanceAdminRole from util.file import ( store_uploaded_file, course_and_time_based_filename_generator, FileValidationException, UniversalNewlineIterator ) from util.json_request import JsonResponse from instructor.views.instructor_task_helpers import extract_email_features, extract_task_features from microsite_configuration import microsite from courseware.access import has_access from courseware.courses import get_course_with_access, get_course_by_id from django.contrib.auth.models import User from django_comment_client.utils import has_forum_access from django_comment_common.models import ( Role, FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA, ) from edxmako.shortcuts import render_to_response, render_to_string from courseware.models import StudentModule from shoppingcart.models import ( Coupon, CourseRegistrationCode, RegistrationCodeRedemption, Invoice, CourseMode, CourseRegistrationCodeInvoiceItem, ) from student.models import ( CourseEnrollment, unique_id_for_user, anonymous_id_for_user, UserProfile, Registration, EntranceExamConfiguration, ManualEnrollmentAudit, UNENROLLED_TO_ALLOWEDTOENROLL, ALLOWEDTOENROLL_TO_ENROLLED, ENROLLED_TO_ENROLLED, ENROLLED_TO_UNENROLLED, UNENROLLED_TO_ENROLLED, UNENROLLED_TO_UNENROLLED, ALLOWEDTOENROLL_TO_UNENROLLED, DEFAULT_TRANSITION_STATE ) import instructor_task.api from instructor_task.api_helper import AlreadyRunningError from instructor_task.models import ReportStore import instructor.enrollment as enrollment from instructor.enrollment import ( get_user_email_language, enroll_email, send_mail_to_student, get_email_params, send_beta_role_email, unenroll_email, ) from instructor.access import list_with_level, allow_access, revoke_access, ROLES, update_forum_role from instructor.offline_gradecalc import student_grades import instructor_analytics.basic import instructor_analytics.distributions import instructor_analytics.csvs import csv from openedx.core.djangoapps.user_api.preferences.api import get_user_preference, set_user_preference from instructor.views import INVOICE_KEY from submissions import api as sub_api # installed from the edx-submissions repository from certificates import api as certs_api from bulk_email.models import CourseEmail from .tools import ( dump_student_extensions, dump_module_extensions, find_unit, get_student_from_identifier, require_student_from_identifier, handle_dashboard_error, parse_datetime, set_due_date_extension, strip_if_string, bulk_email_is_enabled_for_course, add_block_ids, ) from opaque_keys.edx.keys import CourseKey from opaque_keys.edx.locations import SlashSeparatedCourseKey from opaque_keys import InvalidKeyError from openedx.core.djangoapps.course_groups.cohorts import is_course_cohorted log = logging.getLogger(__name__) def common_exceptions_400(func): """ Catches common exceptions and renders matching 400 errors. (decorator without arguments) """ def wrapped(request, *args, **kwargs): # pylint: disable=missing-docstring use_json = (request.is_ajax() or request.META.get("HTTP_ACCEPT", "").startswith("application/json")) try: return func(request, *args, **kwargs) except User.DoesNotExist: message = _("User does not exist.") if use_json: return JsonResponse({"error": message}, 400) else: return HttpResponseBadRequest(message) except AlreadyRunningError: message = _("Task is already running.") if use_json: return JsonResponse({"error": message}, 400) else: return HttpResponseBadRequest(message) return wrapped def require_query_params(*args, **kwargs): """ Checks for required paremters or renders a 400 error. (decorator with arguments) `args` is a *list of required GET parameter names. `kwargs` is a **dict of required GET parameter names to string explanations of the parameter """ required_params = [] required_params += [(arg, None) for arg in args] required_params += [(key, kwargs[key]) for key in kwargs] # required_params = e.g. [('action', 'enroll or unenroll'), ['emails', None]] def decorator(func): # pylint: disable=missing-docstring def wrapped(*args, **kwargs): # pylint: disable=missing-docstring request = args[0] error_response_data = { 'error': 'Missing required query parameter(s)', 'parameters': [], 'info': {}, } for (param, extra) in required_params: default = object() if request.GET.get(param, default) == default: error_response_data['parameters'].append(param) error_response_data['info'][param] = extra if len(error_response_data['parameters']) > 0: return JsonResponse(error_response_data, status=400) else: return func(*args, **kwargs) return wrapped return decorator def require_post_params(*args, **kwargs): """ Checks for required parameters or renders a 400 error. (decorator with arguments) Functions like 'require_query_params', but checks for POST parameters rather than GET parameters. """ required_params = [] required_params += [(arg, None) for arg in args] required_params += [(key, kwargs[key]) for key in kwargs] # required_params = e.g. [('action', 'enroll or unenroll'), ['emails', None]] def decorator(func): # pylint: disable=missing-docstring def wrapped(*args, **kwargs): # pylint: disable=missing-docstring request = args[0] error_response_data = { 'error': 'Missing required query parameter(s)', 'parameters': [], 'info': {}, } for (param, extra) in required_params: default = object() if request.POST.get(param, default) == default: error_response_data['parameters'].append(param) error_response_data['info'][param] = extra if len(error_response_data['parameters']) > 0: return JsonResponse(error_response_data, status=400) else: return func(*args, **kwargs) return wrapped return decorator def require_level(level): """ Decorator with argument that requires an access level of the requesting user. If the requirement is not satisfied, returns an HttpResponseForbidden (403). Assumes that request is in args[0]. Assumes that course_id is in kwargs['course_id']. `level` is in ['instructor', 'staff'] if `level` is 'staff', instructors will also be allowed, even if they are not in the staff group. """ if level not in ['instructor', 'staff']: raise ValueError("unrecognized level '{}'".format(level)) def decorator(func): # pylint: disable=missing-docstring def wrapped(*args, **kwargs): # pylint: disable=missing-docstring request = args[0] course = get_course_by_id(CourseKey.from_string(kwargs['course_id'])) if has_access(request.user, level, course): return func(*args, **kwargs) else: return HttpResponseForbidden() return wrapped return decorator def require_global_staff(func): """View decorator that requires that the user have global staff permissions. """ def wrapped(request, *args, **kwargs): # pylint: disable=missing-docstring if GlobalStaff().has_user(request.user): return func(request, *args, **kwargs) else: return HttpResponseForbidden( u"Must be {platform_name} staff to perform this action.".format( platform_name=settings.PLATFORM_NAME ) ) return wrapped def require_sales_admin(func): """ Decorator for checking sales administrator access before executing an HTTP endpoint. This decorator is designed to be used for a request based action on a course. It assumes that there will be a request object as well as a course_id attribute to leverage to check course level privileges. If the user does not have privileges for this operation, this will return HttpResponseForbidden (403). """ def wrapped(request, course_id): # pylint: disable=missing-docstring try: course_key = CourseKey.from_string(course_id) except InvalidKeyError: log.error(u"Unable to find course with course key %s", course_id) return HttpResponseNotFound() access = auth.user_has_role(request.user, CourseSalesAdminRole(course_key)) if access: return func(request, course_id) else: return HttpResponseForbidden() return wrapped def require_finance_admin(func): """ Decorator for checking finance administrator access before executing an HTTP endpoint. This decorator is designed to be used for a request based action on a course. It assumes that there will be a request object as well as a course_id attribute to leverage to check course level privileges. If the user does not have privileges for this operation, this will return HttpResponseForbidden (403). """ def wrapped(request, course_id): # pylint: disable=missing-docstring try: course_key = CourseKey.from_string(course_id) except InvalidKeyError: log.error(u"Unable to find course with course key %s", course_id) return HttpResponseNotFound() access = auth.user_has_role(request.user, CourseFinanceAdminRole(course_key)) if access: return func(request, course_id) else: return HttpResponseForbidden() return wrapped EMAIL_INDEX = 0 USERNAME_INDEX = 1 NAME_INDEX = 2 COUNTRY_INDEX = 3 @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def register_and_enroll_students(request, course_id): # pylint: disable=too-many-statements """ Create new account and Enroll students in this course. Passing a csv file that contains a list of students. Order in csv should be the following email = 0; username = 1; name = 2; country = 3. Requires staff access. -If the email address and username already exists and the user is enrolled in the course, do nothing (including no email gets sent out) -If the email address already exists, but the username is different, match on the email address only and continue to enroll the user in the course using the email address as the matching criteria. Note the change of username as a warning message (but not a failure). Send a standard enrollment email which is the same as the existing manual enrollment -If the username already exists (but not the email), assume it is a different user and fail to create the new account. The failure will be messaged in a response in the browser. """ if not microsite.get_value('ALLOW_AUTOMATED_SIGNUPS', settings.FEATURES.get('ALLOW_AUTOMATED_SIGNUPS', False)): return HttpResponseForbidden() course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) warnings = [] row_errors = [] general_errors = [] if 'students_list' in request.FILES: students = [] try: upload_file = request.FILES.get('students_list') if upload_file.name.endswith('.csv'): students = [row for row in csv.reader(upload_file.read().splitlines())] course = get_course_by_id(course_id) else: general_errors.append({ 'username': '', 'email': '', 'response': _('Make sure that the file you upload is in CSV format with no extraneous characters or rows.') }) except Exception: # pylint: disable=broad-except general_errors.append({ 'username': '', 'email': '', 'response': _('Could not read uploaded file.') }) finally: upload_file.close() generated_passwords = [] row_num = 0 for student in students: row_num = row_num + 1 # verify that we have exactly four columns in every row but allow for blank lines if len(student) != 4: if len(student) > 0: general_errors.append({ 'username': '', 'email': '', 'response': _('Data in row #{row_num} must have exactly four columns: email, username, full name, and country').format(row_num=row_num) }) continue # Iterate each student in the uploaded csv file. email = student[EMAIL_INDEX] username = student[USERNAME_INDEX] name = student[NAME_INDEX] country = student[COUNTRY_INDEX][:2] email_params = get_email_params(course, True, secure=request.is_secure()) try: validate_email(email) # Raises ValidationError if invalid except ValidationError: row_errors.append({ 'username': username, 'email': email, 'response': _('Invalid email {email_address}.').format(email_address=email)}) else: if User.objects.filter(email=email).exists(): # Email address already exists. assume it is the correct user # and just register the user in the course and send an enrollment email. user = User.objects.get(email=email) # see if it is an exact match with email and username # if it's not an exact match then just display a warning message, but continue onwards if not User.objects.filter(email=email, username=username).exists(): warning_message = _( 'An account with email {email} exists but the provided username {username} ' 'is different. Enrolling anyway with {email}.' ).format(email=email, username=username) warnings.append({ 'username': username, 'email': email, 'response': warning_message }) log.warning(u'email %s already exist', email) else: log.info( u"user already exists with username '%s' and email '%s'", username, email ) # make sure user is enrolled in course if not CourseEnrollment.is_enrolled(user, course_id): enrollment_obj = CourseEnrollment.enroll(user, course_id) reason = 'Enrolling via csv upload' ManualEnrollmentAudit.create_manual_enrollment_audit( request.user, email, UNENROLLED_TO_ENROLLED, reason, enrollment_obj ) log.info( u'user %s enrolled in the course %s', username, course.id, ) enroll_email(course_id=course_id, student_email=email, auto_enroll=True, email_students=True, email_params=email_params) else: # This email does not yet exist, so we need to create a new account # If username already exists in the database, then create_and_enroll_user # will raise an IntegrityError exception. password = generate_unique_password(generated_passwords) try: enrollment_obj = create_and_enroll_user(email, username, name, country, password, course_id) reason = 'Enrolling via csv upload' ManualEnrollmentAudit.create_manual_enrollment_audit( request.user, email, UNENROLLED_TO_ENROLLED, reason, enrollment_obj ) except IntegrityError: row_errors.append({ 'username': username, 'email': email, 'response': _('Username {user} already exists.').format(user=username)}) except Exception as ex: log.exception(type(ex).__name__) row_errors.append({ 'username': username, 'email': email, 'response': type(ex).__name__}) else: # It's a new user, an email will be sent to each newly created user. email_params['message'] = 'account_creation_and_enrollment' email_params['email_address'] = email email_params['password'] = password email_params['platform_name'] = microsite.get_value('platform_name', settings.PLATFORM_NAME) send_mail_to_student(email, email_params) log.info(u'email sent to new created user at %s', email) else: general_errors.append({ 'username': '', 'email': '', 'response': _('File is not attached.') }) results = { 'row_errors': row_errors, 'general_errors': general_errors, 'warnings': warnings } return JsonResponse(results) def generate_random_string(length): """ Create a string of random characters of specified length """ chars = [ char for char in string.ascii_uppercase + string.digits + string.ascii_lowercase if char not in 'aAeEiIoOuU1l' ] return string.join((random.choice(chars) for __ in range(length)), '') def generate_unique_password(generated_passwords, password_length=12): """ generate a unique password for each student. """ password = generate_random_string(password_length) while password in generated_passwords: password = generate_random_string(password_length) generated_passwords.append(password) return password def create_and_enroll_user(email, username, name, country, password, course_id): """ Creates a user and enroll him/her in the course""" user = User.objects.create_user(username, email, password) reg = Registration() reg.register(user) profile = UserProfile(user=user) profile.name = name profile.country = country profile.save() # try to enroll the user in this course return CourseEnrollment.enroll(user, course_id) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_post_params(action="enroll or unenroll", identifiers="stringified list of emails and/or usernames") def students_update_enrollment(request, course_id): """ Enroll or unenroll students by email. Requires staff access. Query Parameters: - action in ['enroll', 'unenroll'] - identifiers is string containing a list of emails and/or usernames separated by anything split_input_list can handle. - auto_enroll is a boolean (defaults to false) If auto_enroll is false, students will be allowed to enroll. If auto_enroll is true, students will be enrolled as soon as they register. - email_students is a boolean (defaults to false) If email_students is true, students will be sent email notification If email_students is false, students will not be sent email notification Returns an analog to this JSON structure: { "action": "enroll", "auto_enroll": false, "results": [ { "email": "[email protected]", "before": { "enrollment": false, "auto_enroll": false, "user": true, "allowed": false }, "after": { "enrollment": true, "auto_enroll": false, "user": true, "allowed": false } } ] } """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) action = request.POST.get('action') identifiers_raw = request.POST.get('identifiers') identifiers = _split_input_list(identifiers_raw) auto_enroll = request.POST.get('auto_enroll') in ['true', 'True', True] email_students = request.POST.get('email_students') in ['true', 'True', True] is_white_label = CourseMode.is_white_label(course_id) reason = request.POST.get('reason') if is_white_label: if not reason: return JsonResponse( { 'action': action, 'results': [{'error': True}], 'auto_enroll': auto_enroll, }, status=400) enrollment_obj = None state_transition = DEFAULT_TRANSITION_STATE email_params = {} if email_students: course = get_course_by_id(course_id) email_params = get_email_params(course, auto_enroll, secure=request.is_secure()) results = [] for identifier in identifiers: # First try to get a user object from the identifer user = None email = None language = None try: user = get_student_from_identifier(identifier) except User.DoesNotExist: email = identifier else: email = user.email language = get_user_email_language(user) try: # Use django.core.validators.validate_email to check email address # validity (obviously, cannot check if email actually /exists/, # simply that it is plausibly valid) validate_email(email) # Raises ValidationError if invalid if action == 'enroll': before, after, enrollment_obj = enroll_email( course_id, email, auto_enroll, email_students, email_params, language=language ) before_enrollment = before.to_dict()['enrollment'] before_user_registered = before.to_dict()['user'] before_allowed = before.to_dict()['allowed'] after_enrollment = after.to_dict()['enrollment'] after_allowed = after.to_dict()['allowed'] if before_user_registered: if after_enrollment: if before_enrollment: state_transition = ENROLLED_TO_ENROLLED else: if before_allowed: state_transition = ALLOWEDTOENROLL_TO_ENROLLED else: state_transition = UNENROLLED_TO_ENROLLED else: if after_allowed: state_transition = UNENROLLED_TO_ALLOWEDTOENROLL elif action == 'unenroll': before, after = unenroll_email( course_id, email, email_students, email_params, language=language ) before_enrollment = before.to_dict()['enrollment'] before_allowed = before.to_dict()['allowed'] if before_enrollment: state_transition = ENROLLED_TO_UNENROLLED else: if before_allowed: state_transition = ALLOWEDTOENROLL_TO_UNENROLLED else: state_transition = UNENROLLED_TO_UNENROLLED else: return HttpResponseBadRequest(strip_tags( "Unrecognized action '{}'".format(action) )) except ValidationError: # Flag this email as an error if invalid, but continue checking # the remaining in the list results.append({ 'identifier': identifier, 'invalidIdentifier': True, }) except Exception as exc: # pylint: disable=broad-except # catch and log any exceptions # so that one error doesn't cause a 500. log.exception(u"Error while #{}ing student") log.exception(exc) results.append({ 'identifier': identifier, 'error': True, }) else: ManualEnrollmentAudit.create_manual_enrollment_audit( request.user, email, state_transition, reason, enrollment_obj ) results.append({ 'identifier': identifier, 'before': before.to_dict(), 'after': after.to_dict(), }) response_payload = { 'action': action, 'results': results, 'auto_enroll': auto_enroll, } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('instructor') @common_exceptions_400 @require_post_params( identifiers="stringified list of emails and/or usernames", action="add or remove", ) def bulk_beta_modify_access(request, course_id): """ Enroll or unenroll users in beta testing program. Query parameters: - identifiers is string containing a list of emails and/or usernames separated by anything split_input_list can handle. - action is one of ['add', 'remove'] """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) action = request.POST.get('action') identifiers_raw = request.POST.get('identifiers') identifiers = _split_input_list(identifiers_raw) email_students = request.POST.get('email_students') in ['true', 'True', True] auto_enroll = request.POST.get('auto_enroll') in ['true', 'True', True] results = [] rolename = 'beta' course = get_course_by_id(course_id) email_params = {} if email_students: secure = request.is_secure() email_params = get_email_params(course, auto_enroll=auto_enroll, secure=secure) for identifier in identifiers: try: error = False user_does_not_exist = False user = get_student_from_identifier(identifier) if action == 'add': allow_access(course, user, rolename) elif action == 'remove': revoke_access(course, user, rolename) else: return HttpResponseBadRequest(strip_tags( "Unrecognized action '{}'".format(action) )) except User.DoesNotExist: error = True user_does_not_exist = True # catch and log any unexpected exceptions # so that one error doesn't cause a 500. except Exception as exc: # pylint: disable=broad-except log.exception(u"Error while #{}ing student") log.exception(exc) error = True else: # If no exception thrown, see if we should send an email if email_students: send_beta_role_email(action, user, email_params) # See if we should autoenroll the student if auto_enroll: # Check if student is already enrolled if not CourseEnrollment.is_enrolled(user, course_id): CourseEnrollment.enroll(user, course_id) finally: # Tabulate the action result of this email address results.append({ 'identifier': identifier, 'error': error, 'userDoesNotExist': user_does_not_exist }) response_payload = { 'action': action, 'results': results, } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('instructor') @common_exceptions_400 @require_query_params( unique_student_identifier="email or username of user to change access", rolename="'instructor', 'staff', 'beta', or 'ccx_coach'", action="'allow' or 'revoke'" ) def modify_access(request, course_id): """ Modify staff/instructor access of other user. Requires instructor access. NOTE: instructors cannot remove their own instructor access. Query parameters: unique_student_identifer is the target user's username or email rolename is one of ['instructor', 'staff', 'beta', 'ccx_coach'] action is one of ['allow', 'revoke'] """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_with_access( request.user, 'instructor', course_id, depth=None ) try: user = get_student_from_identifier(request.GET.get('unique_student_identifier')) except User.DoesNotExist: response_payload = { 'unique_student_identifier': request.GET.get('unique_student_identifier'), 'userDoesNotExist': True, } return JsonResponse(response_payload) # Check that user is active, because add_users # in common/djangoapps/student/roles.py fails # silently when we try to add an inactive user. if not user.is_active: response_payload = { 'unique_student_identifier': user.username, 'inactiveUser': True, } return JsonResponse(response_payload) rolename = request.GET.get('rolename') action = request.GET.get('action') if rolename not in ROLES: error = strip_tags("unknown rolename '{}'".format(rolename)) log.error(error) return HttpResponseBadRequest(error) # disallow instructors from removing their own instructor access. if rolename == 'instructor' and user == request.user and action != 'allow': response_payload = { 'unique_student_identifier': user.username, 'rolename': rolename, 'action': action, 'removingSelfAsInstructor': True, } return JsonResponse(response_payload) if action == 'allow': allow_access(course, user, rolename) elif action == 'revoke': revoke_access(course, user, rolename) else: return HttpResponseBadRequest(strip_tags( "unrecognized action '{}'".format(action) )) response_payload = { 'unique_student_identifier': user.username, 'rolename': rolename, 'action': action, 'success': 'yes', } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('instructor') @require_query_params(rolename="'instructor', 'staff', or 'beta'") def list_course_role_members(request, course_id): """ List instructors and staff. Requires instructor access. rolename is one of ['instructor', 'staff', 'beta', 'ccx_coach'] Returns JSON of the form { "course_id": "some/course/id", "staff": [ { "username": "staff1", "email": "[email protected]", "first_name": "Joe", "last_name": "Shmoe", } ] } """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_with_access( request.user, 'instructor', course_id, depth=None ) rolename = request.GET.get('rolename') if rolename not in ROLES: return HttpResponseBadRequest() def extract_user_info(user): """ convert user into dicts for json view """ return { 'username': user.username, 'email': user.email, 'first_name': user.first_name, 'last_name': user.last_name, } response_payload = { 'course_id': course_id.to_deprecated_string(), rolename: map(extract_user_info, list_with_level( course, rolename )), } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def get_grading_config(request, course_id): """ Respond with json which contains a html formatted grade summary. """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_with_access( request.user, 'staff', course_id, depth=None ) grading_config_summary = instructor_analytics.basic.dump_grading_context(course) response_payload = { 'course_id': course_id.to_deprecated_string(), 'grading_config_summary': grading_config_summary, } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def get_sale_records(request, course_id, csv=False): # pylint: disable=unused-argument, redefined-outer-name """ return the summary of all sales records for a particular course """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) query_features = [ 'company_name', 'company_contact_name', 'company_contact_email', 'total_codes', 'total_used_codes', 'total_amount', 'created_at', 'customer_reference_number', 'recipient_name', 'recipient_email', 'created_by', 'internal_reference', 'invoice_number', 'codes', 'course_id' ] sale_data = instructor_analytics.basic.sale_record_features(course_id, query_features) if not csv: for item in sale_data: item['created_by'] = item['created_by'].username response_payload = { 'course_id': course_id.to_deprecated_string(), 'sale': sale_data, 'queried_features': query_features } return JsonResponse(response_payload) else: header, datarows = instructor_analytics.csvs.format_dictlist(sale_data, query_features) return instructor_analytics.csvs.create_csv_response("e-commerce_sale_invoice_records.csv", header, datarows) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def get_sale_order_records(request, course_id): # pylint: disable=unused-argument, redefined-outer-name """ return the summary of all sales records for a particular course """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) query_features = [ ('id', 'Order Id'), ('company_name', 'Company Name'), ('company_contact_name', 'Company Contact Name'), ('company_contact_email', 'Company Contact Email'), ('logged_in_username', 'Login Username'), ('logged_in_email', 'Login User Email'), ('purchase_time', 'Date of Sale'), ('customer_reference_number', 'Customer Reference Number'), ('recipient_name', 'Recipient Name'), ('recipient_email', 'Recipient Email'), ('bill_to_street1', 'Street 1'), ('bill_to_street2', 'Street 2'), ('bill_to_city', 'City'), ('bill_to_state', 'State'), ('bill_to_postalcode', 'Postal Code'), ('bill_to_country', 'Country'), ('order_type', 'Order Type'), ('status', 'Order Item Status'), ('coupon_code', 'Coupon Code'), ('list_price', 'List Price'), ('unit_cost', 'Unit Price'), ('quantity', 'Quantity'), ('total_discount', 'Total Discount'), ('total_amount', 'Total Amount Paid'), ] db_columns = [x[0] for x in query_features] csv_columns = [x[1] for x in query_features] sale_data = instructor_analytics.basic.sale_order_record_features(course_id, db_columns) header, datarows = instructor_analytics.csvs.format_dictlist(sale_data, db_columns) # pylint: disable=unused-variable return instructor_analytics.csvs.create_csv_response("e-commerce_sale_order_records.csv", csv_columns, datarows) @require_level('staff') @require_POST def sale_validation(request, course_id): """ This method either invalidate or re validate the sale against the invoice number depending upon the event type """ try: invoice_number = request.POST["invoice_number"] except KeyError: return HttpResponseBadRequest("Missing required invoice_number parameter") try: invoice_number = int(invoice_number) except ValueError: return HttpResponseBadRequest( "invoice_number must be an integer, {value} provided".format( value=invoice_number ) ) try: event_type = request.POST["event_type"] except KeyError: return HttpResponseBadRequest("Missing required event_type parameter") course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) try: obj_invoice = CourseRegistrationCodeInvoiceItem.objects.select_related('invoice').get( invoice_id=invoice_number, course_id=course_id ) obj_invoice = obj_invoice.invoice except CourseRegistrationCodeInvoiceItem.DoesNotExist: # Check for old type invoices return HttpResponseNotFound(_("Invoice number '{num}' does not exist.").format(num=invoice_number)) if event_type == "invalidate": return invalidate_invoice(obj_invoice) else: return re_validate_invoice(obj_invoice) def invalidate_invoice(obj_invoice): """ This method invalidate the sale against the invoice number """ if not obj_invoice.is_valid: return HttpResponseBadRequest(_("The sale associated with this invoice has already been invalidated.")) obj_invoice.is_valid = False obj_invoice.save() message = _('Invoice number {0} has been invalidated.').format(obj_invoice.id) return JsonResponse({'message': message}) def re_validate_invoice(obj_invoice): """ This method re-validate the sale against the invoice number """ if obj_invoice.is_valid: return HttpResponseBadRequest(_("This invoice is already active.")) obj_invoice.is_valid = True obj_invoice.save() message = _('The registration codes for invoice {0} have been re-activated.').format(obj_invoice.id) return JsonResponse({'message': message}) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def get_students_features(request, course_id, csv=False): # pylint: disable=redefined-outer-name """ Respond with json which contains a summary of all enrolled students profile information. Responds with JSON {"students": [{-student-info-}, ...]} TO DO accept requests for different attribute sets. """ course_key = CourseKey.from_string(course_id) course = get_course_by_id(course_key) available_features = instructor_analytics.basic.AVAILABLE_FEATURES # Allow for microsites to be able to define additional columns (e.g. ) query_features = microsite.get_value('student_profile_download_fields') if not query_features: query_features = [ 'id', 'username', 'name', 'email', 'language', 'location', 'year_of_birth', 'gender', 'level_of_education', 'mailing_address', 'goals' ] # Provide human-friendly and translatable names for these features. These names # will be displayed in the table generated in data_download.coffee. It is not (yet) # used as the header row in the CSV, but could be in the future. query_features_names = { 'id': _('User ID'), 'username': _('Username'), 'name': _('Name'), 'email': _('Email'), 'language': _('Language'), 'location': _('Location'), 'year_of_birth': _('Birth Year'), 'gender': _('Gender'), 'level_of_education': _('Level of Education'), 'mailing_address': _('Mailing Address'), 'goals': _('Goals'), } if is_course_cohorted(course.id): # Translators: 'Cohort' refers to a group of students within a course. query_features.append('cohort') query_features_names['cohort'] = _('Cohort') if not csv: student_data = instructor_analytics.basic.enrolled_students_features(course_key, query_features) response_payload = { 'course_id': unicode(course_key), 'students': student_data, 'students_count': len(student_data), 'queried_features': query_features, 'feature_names': query_features_names, 'available_features': available_features, } return JsonResponse(response_payload) else: try: instructor_task.api.submit_calculate_students_features_csv(request, course_key, query_features) success_status = _("The enrolled learner profile report is being created." " To view the status of the report, see Pending Instructor Tasks below.") return JsonResponse({"status": success_status}) except AlreadyRunningError: already_running_status = _( "This enrollment report is currently being created." " To view the status of the report, see Pending Instructor Tasks below." " You will be able to download the report when it is complete.") return JsonResponse({"status": already_running_status}) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def get_students_who_may_enroll(request, course_id): """ Initiate generation of a CSV file containing information about students who may enroll in a course. Responds with JSON {"status": "... status message ..."} """ course_key = CourseKey.from_string(course_id) query_features = ['email'] try: instructor_task.api.submit_calculate_may_enroll_csv(request, course_key, query_features) success_status = _( "The enrollment report is being created. This report contains" " information about learners who can enroll in the course." " To view the status of the report, see Pending Instructor Tasks below." ) return JsonResponse({"status": success_status}) except AlreadyRunningError: already_running_status = _( "This enrollment report is currently being created." " To view the status of the report, see Pending Instructor Tasks below." " You will be able to download the report when it is complete." ) return JsonResponse({"status": already_running_status}) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_POST @require_level('staff') def add_users_to_cohorts(request, course_id): """ View method that accepts an uploaded file (using key "uploaded-file") containing cohort assignments for users. This method spawns a celery task to do the assignments, and a CSV file with results is provided via data downloads. """ course_key = SlashSeparatedCourseKey.from_string(course_id) try: def validator(file_storage, file_to_validate): """ Verifies that the expected columns are present. """ with file_storage.open(file_to_validate) as f: reader = unicodecsv.reader(UniversalNewlineIterator(f), encoding='utf-8') try: fieldnames = next(reader) except StopIteration: fieldnames = [] msg = None if "cohort" not in fieldnames: msg = _("The file must contain a 'cohort' column containing cohort names.") elif "email" not in fieldnames and "username" not in fieldnames: msg = _("The file must contain a 'username' column, an 'email' column, or both.") if msg: raise FileValidationException(msg) __, filename = store_uploaded_file( request, 'uploaded-file', ['.csv'], course_and_time_based_filename_generator(course_key, "cohorts"), max_file_size=2000000, # limit to 2 MB validator=validator ) # The task will assume the default file storage. instructor_task.api.submit_cohort_students(request, course_key, filename) except (FileValidationException, PermissionDenied) as err: return JsonResponse({"error": unicode(err)}, status=400) return JsonResponse() @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def get_coupon_codes(request, course_id): # pylint: disable=unused-argument """ Respond with csv which contains a summary of all Active Coupons. """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) coupons = Coupon.objects.filter(course_id=course_id) query_features = [ ('code', _('Coupon Code')), ('course_id', _('Course Id')), ('percentage_discount', _('% Discount')), ('description', _('Description')), ('expiration_date', _('Expiration Date')), ('is_active', _('Is Active')), ('code_redeemed_count', _('Code Redeemed Count')), ('total_discounted_seats', _('Total Discounted Seats')), ('total_discounted_amount', _('Total Discounted Amount')), ] db_columns = [x[0] for x in query_features] csv_columns = [x[1] for x in query_features] coupons_list = instructor_analytics.basic.coupon_codes_features(db_columns, coupons, course_id) __, data_rows = instructor_analytics.csvs.format_dictlist(coupons_list, db_columns) return instructor_analytics.csvs.create_csv_response('Coupons.csv', csv_columns, data_rows) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_finance_admin def get_enrollment_report(request, course_id): """ get the enrollment report for the particular course. """ course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) try: instructor_task.api.submit_detailed_enrollment_features_csv(request, course_key) success_status = _("The detailed enrollment report is being created." " To view the status of the report, see Pending Instructor Tasks below.") return JsonResponse({"status": success_status}) except AlreadyRunningError: already_running_status = _("The detailed enrollment report is being created." " To view the status of the report, see Pending Instructor Tasks below." " You will be able to download the report when it is complete.") return JsonResponse({ "status": already_running_status }) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_finance_admin def get_exec_summary_report(request, course_id): """ get the executive summary report for the particular course. """ course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) try: instructor_task.api.submit_executive_summary_report(request, course_key) status_response = _("The executive summary report is being created." " To view the status of the report, see Pending Instructor Tasks below.") except AlreadyRunningError: status_response = _( "The executive summary report is currently being created." " To view the status of the report, see Pending Instructor Tasks below." " You will be able to download the report when it is complete." ) return JsonResponse({ "status": status_response }) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def get_proctored_exam_results(request, course_id): """ get the proctored exam resultsreport for the particular course. """ query_features = [ 'user_email', 'exam_name', 'allowed_time_limit_mins', 'is_sample_attempt', 'started_at', 'completed_at', 'status', ] course_key = CourseKey.from_string(course_id) try: instructor_task.api.submit_proctored_exam_results_report(request, course_key, query_features) status_response = _("The proctored exam results report is being created." " To view the status of the report, see Pending Instructor Tasks below.") except AlreadyRunningError: status_response = _( "The proctored exam results report is currently being created." " To view the status of the report, see Pending Instructor Tasks below." " You will be able to download the report when it is complete." ) return JsonResponse({ "status": status_response }) def save_registration_code(user, course_id, mode_slug, invoice=None, order=None, invoice_item=None): """ recursive function that generate a new code every time and saves in the Course Registration Table if validation check passes Args: user (User): The user creating the course registration codes. course_id (str): The string representation of the course ID. mode_slug (str): The Course Mode Slug associated with any enrollment made by these codes. invoice (Invoice): (Optional) The associated invoice for this code. order (Order): (Optional) The associated order for this code. invoice_item (CourseRegistrationCodeInvoiceItem) : (Optional) The associated CourseRegistrationCodeInvoiceItem Returns: The newly created CourseRegistrationCode. """ code = random_code_generator() # check if the generated code is in the Coupon Table matching_coupons = Coupon.objects.filter(code=code, is_active=True) if matching_coupons: return save_registration_code( user, course_id, mode_slug, invoice=invoice, order=order, invoice_item=invoice_item ) course_registration = CourseRegistrationCode( code=code, course_id=unicode(course_id), created_by=user, invoice=invoice, order=order, mode_slug=mode_slug, invoice_item=invoice_item ) try: course_registration.save() return course_registration except IntegrityError: return save_registration_code( user, course_id, mode_slug, invoice=invoice, order=order, invoice_item=invoice_item ) def registration_codes_csv(file_name, codes_list, csv_type=None): """ Respond with the csv headers and data rows given a dict of codes list :param file_name: :param codes_list: :param csv_type: """ # csv headers query_features = [ 'code', 'redeem_code_url', 'course_id', 'company_name', 'created_by', 'redeemed_by', 'invoice_id', 'purchaser', 'customer_reference_number', 'internal_reference', 'is_valid' ] registration_codes = instructor_analytics.basic.course_registration_features(query_features, codes_list, csv_type) header, data_rows = instructor_analytics.csvs.format_dictlist(registration_codes, query_features) return instructor_analytics.csvs.create_csv_response(file_name, header, data_rows) def random_code_generator(): """ generate a random alphanumeric code of length defined in REGISTRATION_CODE_LENGTH settings """ code_length = getattr(settings, 'REGISTRATION_CODE_LENGTH', 8) return generate_random_string(code_length) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_POST def get_registration_codes(request, course_id): # pylint: disable=unused-argument """ Respond with csv which contains a summary of all Registration Codes. """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) #filter all the course registration codes registration_codes = CourseRegistrationCode.objects.filter( course_id=course_id ).order_by('invoice_item__invoice__company_name') company_name = request.POST['download_company_name'] if company_name: registration_codes = registration_codes.filter(invoice_item__invoice__company_name=company_name) csv_type = 'download' return registration_codes_csv("Registration_Codes.csv", registration_codes, csv_type) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_sales_admin @require_POST def generate_registration_codes(request, course_id): """ Respond with csv which contains a summary of all Generated Codes. """ course_id = CourseKey.from_string(course_id) invoice_copy = False # covert the course registration code number into integer try: course_code_number = int(request.POST['total_registration_codes']) except ValueError: course_code_number = int(float(request.POST['total_registration_codes'])) company_name = request.POST['company_name'] company_contact_name = request.POST['company_contact_name'] company_contact_email = request.POST['company_contact_email'] unit_price = request.POST['unit_price'] try: unit_price = ( decimal.Decimal(unit_price) ).quantize( decimal.Decimal('.01'), rounding=decimal.ROUND_DOWN ) except decimal.InvalidOperation: return HttpResponse( status=400, content=_(u"Could not parse amount as a decimal") ) recipient_name = request.POST['recipient_name'] recipient_email = request.POST['recipient_email'] address_line_1 = request.POST['address_line_1'] address_line_2 = request.POST['address_line_2'] address_line_3 = request.POST['address_line_3'] city = request.POST['city'] state = request.POST['state'] zip_code = request.POST['zip'] country = request.POST['country'] internal_reference = request.POST['internal_reference'] customer_reference_number = request.POST['customer_reference_number'] recipient_list = [recipient_email] if request.POST.get('invoice', False): recipient_list.append(request.user.email) invoice_copy = True sale_price = unit_price * course_code_number set_user_preference(request.user, INVOICE_KEY, invoice_copy) sale_invoice = Invoice.objects.create( total_amount=sale_price, company_name=company_name, company_contact_email=company_contact_email, company_contact_name=company_contact_name, course_id=course_id, recipient_name=recipient_name, recipient_email=recipient_email, address_line_1=address_line_1, address_line_2=address_line_2, address_line_3=address_line_3, city=city, state=state, zip=zip_code, country=country, internal_reference=internal_reference, customer_reference_number=customer_reference_number ) invoice_item = CourseRegistrationCodeInvoiceItem.objects.create( invoice=sale_invoice, qty=course_code_number, unit_price=unit_price, course_id=course_id ) course = get_course_by_id(course_id, depth=0) paid_modes = CourseMode.paid_modes_for_course(course_id) if len(paid_modes) != 1: msg = ( u"Generating Code Redeem Codes for Course '{course_id}', which must have a single paid course mode. " u"This is a configuration issue. Current course modes with payment options: {paid_modes}" ).format(course_id=course_id, paid_modes=paid_modes) log.error(msg) return HttpResponse( status=500, content=_(u"Unable to generate redeem codes because of course misconfiguration.") ) course_mode = paid_modes[0] course_price = course_mode.min_price registration_codes = [] for __ in range(course_code_number): # pylint: disable=redefined-outer-name generated_registration_code = save_registration_code( request.user, course_id, course_mode.slug, invoice=sale_invoice, order=None, invoice_item=invoice_item ) registration_codes.append(generated_registration_code) site_name = microsite.get_value('SITE_NAME', 'localhost') quantity = course_code_number discount = (float(quantity * course_price) - float(sale_price)) course_url = '{base_url}{course_about}'.format( base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME), course_about=reverse('about_course', kwargs={'course_id': course_id.to_deprecated_string()}) ) dashboard_url = '{base_url}{dashboard}'.format( base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME), dashboard=reverse('dashboard') ) try: pdf_file = sale_invoice.generate_pdf_invoice(course, course_price, int(quantity), float(sale_price)) except Exception: # pylint: disable=broad-except log.exception('Exception at creating pdf file.') pdf_file = None from_address = microsite.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL) context = { 'invoice': sale_invoice, 'site_name': site_name, 'course': course, 'course_price': course_price, 'sub_total': course_price * quantity, 'discount': discount, 'sale_price': sale_price, 'quantity': quantity, 'registration_codes': registration_codes, 'currency_symbol': settings.PAID_COURSE_REGISTRATION_CURRENCY[1], 'course_url': course_url, 'platform_name': microsite.get_value('platform_name', settings.PLATFORM_NAME), 'dashboard_url': dashboard_url, 'contact_email': from_address, 'corp_address': microsite.get_value('invoice_corp_address', settings.INVOICE_CORP_ADDRESS), 'payment_instructions': microsite.get_value('invoice_payment_instructions', settings. INVOICE_PAYMENT_INSTRUCTIONS), 'date': time.strftime("%m/%d/%Y") } # composes registration codes invoice email subject = u'Confirmation and Invoice for {course_name}'.format(course_name=course.display_name) message = render_to_string('emails/registration_codes_sale_email.txt', context) invoice_attachment = render_to_string('emails/registration_codes_sale_invoice_attachment.txt', context) #send_mail(subject, message, from_address, recipient_list, fail_silently=False) csv_file = StringIO.StringIO() csv_writer = csv.writer(csv_file) for registration_code in registration_codes: full_redeem_code_url = 'http://{base_url}{redeem_code_url}'.format( base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME), redeem_code_url=reverse('register_code_redemption', kwargs={'registration_code': registration_code.code}) ) csv_writer.writerow([registration_code.code, full_redeem_code_url]) finance_email = microsite.get_value('finance_email', settings.FINANCE_EMAIL) if finance_email: # append the finance email into the recipient_list recipient_list.append(finance_email) # send a unique email for each recipient, don't put all email addresses in a single email for recipient in recipient_list: email = EmailMessage() email.subject = subject email.body = message email.from_email = from_address email.to = [recipient] email.attach(u'RegistrationCodes.csv', csv_file.getvalue(), 'text/csv') email.attach(u'Invoice.txt', invoice_attachment, 'text/plain') if pdf_file is not None: email.attach(u'Invoice.pdf', pdf_file.getvalue(), 'application/pdf') else: file_buffer = StringIO.StringIO(_('pdf download unavailable right now, please contact support.')) email.attach(u'pdf_unavailable.txt', file_buffer.getvalue(), 'text/plain') email.send() return registration_codes_csv("Registration_Codes.csv", registration_codes) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_POST def active_registration_codes(request, course_id): # pylint: disable=unused-argument """ Respond with csv which contains a summary of all Active Registration Codes. """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) # find all the registration codes in this course registration_codes_list = CourseRegistrationCode.objects.filter( course_id=course_id ).order_by('invoice_item__invoice__company_name') company_name = request.POST['active_company_name'] if company_name: registration_codes_list = registration_codes_list.filter(invoice_item__invoice__company_name=company_name) # find the redeemed registration codes if any exist in the db code_redemption_set = RegistrationCodeRedemption.objects.select_related( 'registration_code', 'registration_code__invoice_item__invoice' ).filter(registration_code__course_id=course_id) if code_redemption_set.exists(): redeemed_registration_codes = [code.registration_code.code for code in code_redemption_set] # exclude the redeemed registration codes from the registration codes list and you will get # all the registration codes that are active registration_codes_list = registration_codes_list.exclude(code__in=redeemed_registration_codes) return registration_codes_csv("Active_Registration_Codes.csv", registration_codes_list) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_POST def spent_registration_codes(request, course_id): # pylint: disable=unused-argument """ Respond with csv which contains a summary of all Spent(used) Registration Codes. """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) # find the redeemed registration codes if any exist in the db code_redemption_set = RegistrationCodeRedemption.objects.select_related('registration_code').filter( registration_code__course_id=course_id ) spent_codes_list = [] if code_redemption_set.exists(): redeemed_registration_codes = [code.registration_code.code for code in code_redemption_set] # filter the Registration Codes by course id and the redeemed codes and # you will get a list of all the spent(Redeemed) Registration Codes spent_codes_list = CourseRegistrationCode.objects.filter( course_id=course_id, code__in=redeemed_registration_codes ).order_by('invoice_item__invoice__company_name').select_related('invoice_item__invoice') company_name = request.POST['spent_company_name'] if company_name: spent_codes_list = spent_codes_list.filter(invoice_item__invoice__company_name=company_name) # pylint: disable=maybe-no-member csv_type = 'spent' return registration_codes_csv("Spent_Registration_Codes.csv", spent_codes_list, csv_type) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def get_anon_ids(request, course_id): # pylint: disable=unused-argument """ Respond with 2-column CSV output of user-id, anonymized-user-id """ # TODO: the User.objects query and CSV generation here could be # centralized into instructor_analytics. Currently instructor_analytics # has similar functionality but not quite what's needed. course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) def csv_response(filename, header, rows): """Returns a CSV http response for the given header and rows (excel/utf-8).""" response = HttpResponse(mimetype='text/csv') response['Content-Disposition'] = 'attachment; filename={0}'.format(unicode(filename).encode('utf-8')) writer = csv.writer(response, dialect='excel', quotechar='"', quoting=csv.QUOTE_ALL) # In practice, there should not be non-ascii data in this query, # but trying to do the right thing anyway. encoded = [unicode(s).encode('utf-8') for s in header] writer.writerow(encoded) for row in rows: encoded = [unicode(s).encode('utf-8') for s in row] writer.writerow(encoded) return response students = User.objects.filter( courseenrollment__course_id=course_id, ).order_by('id') header = ['User ID', 'Anonymized User ID', 'Course Specific Anonymized User ID'] rows = [[s.id, unique_id_for_user(s, save=False), anonymous_id_for_user(s, course_id, save=False)] for s in students] return csv_response(course_id.to_deprecated_string().replace('/', '-') + '-anon-ids.csv', header, rows) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @common_exceptions_400 @require_level('staff') @require_query_params( unique_student_identifier="email or username of student for whom to get progress url" ) def get_student_progress_url(request, course_id): """ Get the progress url of a student. Limited to staff access. Takes query paremeter unique_student_identifier and if the student exists returns e.g. { 'progress_url': '/../...' } """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) user = get_student_from_identifier(request.GET.get('unique_student_identifier')) progress_url = reverse('student_progress', kwargs={'course_id': course_id.to_deprecated_string(), 'student_id': user.id}) response_payload = { 'course_id': course_id.to_deprecated_string(), 'progress_url': progress_url, } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_query_params( problem_to_reset="problem urlname to reset" ) @common_exceptions_400 def reset_student_attempts(request, course_id): """ Resets a students attempts counter or starts a task to reset all students attempts counters. Optionally deletes student state for a problem. Limited to staff access. Some sub-methods limited to instructor access. Takes some of the following query paremeters - problem_to_reset is a urlname of a problem - unique_student_identifier is an email or username - all_students is a boolean requires instructor access mutually exclusive with delete_module mutually exclusive with delete_module - delete_module is a boolean requires instructor access mutually exclusive with all_students """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_with_access( request.user, 'staff', course_id, depth=None ) problem_to_reset = strip_if_string(request.GET.get('problem_to_reset')) student_identifier = request.GET.get('unique_student_identifier', None) student = None if student_identifier is not None: student = get_student_from_identifier(student_identifier) all_students = request.GET.get('all_students', False) in ['true', 'True', True] delete_module = request.GET.get('delete_module', False) in ['true', 'True', True] # parameter combinations if all_students and student: return HttpResponseBadRequest( "all_students and unique_student_identifier are mutually exclusive." ) if all_students and delete_module: return HttpResponseBadRequest( "all_students and delete_module are mutually exclusive." ) # instructor authorization if all_students or delete_module: if not has_access(request.user, 'instructor', course): return HttpResponseForbidden("Requires instructor access.") try: module_state_key = course_id.make_usage_key_from_deprecated_string(problem_to_reset) except InvalidKeyError: return HttpResponseBadRequest() response_payload = {} response_payload['problem_to_reset'] = problem_to_reset if student: try: enrollment.reset_student_attempts(course_id, student, module_state_key, delete_module=delete_module) except StudentModule.DoesNotExist: return HttpResponseBadRequest(_("Module does not exist.")) except sub_api.SubmissionError: # Trust the submissions API to log the error error_msg = _("An error occurred while deleting the score.") return HttpResponse(error_msg, status=500) response_payload['student'] = student_identifier elif all_students: instructor_task.api.submit_reset_problem_attempts_for_all_students(request, module_state_key) response_payload['task'] = 'created' response_payload['student'] = 'All Students' else: return HttpResponseBadRequest() return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @common_exceptions_400 def reset_student_attempts_for_entrance_exam(request, course_id): # pylint: disable=invalid-name """ Resets a students attempts counter or starts a task to reset all students attempts counters for entrance exam. Optionally deletes student state for entrance exam. Limited to staff access. Some sub-methods limited to instructor access. Following are possible query parameters - unique_student_identifier is an email or username - all_students is a boolean requires instructor access mutually exclusive with delete_module - delete_module is a boolean requires instructor access mutually exclusive with all_students """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_with_access( request.user, 'staff', course_id, depth=None ) if not course.entrance_exam_id: return HttpResponseBadRequest( _("Course has no entrance exam section.") ) student_identifier = request.GET.get('unique_student_identifier', None) student = None if student_identifier is not None: student = get_student_from_identifier(student_identifier) all_students = request.GET.get('all_students', False) in ['true', 'True', True] delete_module = request.GET.get('delete_module', False) in ['true', 'True', True] # parameter combinations if all_students and student: return HttpResponseBadRequest( _("all_students and unique_student_identifier are mutually exclusive.") ) if all_students and delete_module: return HttpResponseBadRequest( _("all_students and delete_module are mutually exclusive.") ) # instructor authorization if all_students or delete_module: if not has_access(request.user, 'instructor', course): return HttpResponseForbidden(_("Requires instructor access.")) try: entrance_exam_key = course_id.make_usage_key_from_deprecated_string(course.entrance_exam_id) if delete_module: instructor_task.api.submit_delete_entrance_exam_state_for_student(request, entrance_exam_key, student) else: instructor_task.api.submit_reset_problem_attempts_in_entrance_exam(request, entrance_exam_key, student) except InvalidKeyError: return HttpResponseBadRequest(_("Course has no valid entrance exam section.")) response_payload = {'student': student_identifier or _('All Students'), 'task': 'created'} return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('instructor') @require_query_params(problem_to_reset="problem urlname to reset") @common_exceptions_400 def rescore_problem(request, course_id): """ Starts a background process a students attempts counter. Optionally deletes student state for a problem. Limited to instructor access. Takes either of the following query paremeters - problem_to_reset is a urlname of a problem - unique_student_identifier is an email or username - all_students is a boolean all_students and unique_student_identifier cannot both be present. """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) problem_to_reset = strip_if_string(request.GET.get('problem_to_reset')) student_identifier = request.GET.get('unique_student_identifier', None) student = None if student_identifier is not None: student = get_student_from_identifier(student_identifier) all_students = request.GET.get('all_students') in ['true', 'True', True] if not (problem_to_reset and (all_students or student)): return HttpResponseBadRequest("Missing query parameters.") if all_students and student: return HttpResponseBadRequest( "Cannot rescore with all_students and unique_student_identifier." ) try: module_state_key = course_id.make_usage_key_from_deprecated_string(problem_to_reset) except InvalidKeyError: return HttpResponseBadRequest("Unable to parse problem id") response_payload = {} response_payload['problem_to_reset'] = problem_to_reset if student: response_payload['student'] = student_identifier instructor_task.api.submit_rescore_problem_for_student(request, module_state_key, student) response_payload['task'] = 'created' elif all_students: instructor_task.api.submit_rescore_problem_for_all_students(request, module_state_key) response_payload['task'] = 'created' else: return HttpResponseBadRequest() return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('instructor') @common_exceptions_400 def rescore_entrance_exam(request, course_id): """ Starts a background process a students attempts counter for entrance exam. Optionally deletes student state for a problem. Limited to instructor access. Takes either of the following query parameters - unique_student_identifier is an email or username - all_students is a boolean all_students and unique_student_identifier cannot both be present. """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_with_access( request.user, 'staff', course_id, depth=None ) student_identifier = request.GET.get('unique_student_identifier', None) student = None if student_identifier is not None: student = get_student_from_identifier(student_identifier) all_students = request.GET.get('all_students') in ['true', 'True', True] if not course.entrance_exam_id: return HttpResponseBadRequest( _("Course has no entrance exam section.") ) if all_students and student: return HttpResponseBadRequest( _("Cannot rescore with all_students and unique_student_identifier.") ) try: entrance_exam_key = course_id.make_usage_key_from_deprecated_string(course.entrance_exam_id) except InvalidKeyError: return HttpResponseBadRequest(_("Course has no valid entrance exam section.")) response_payload = {} if student: response_payload['student'] = student_identifier else: response_payload['student'] = _("All Students") instructor_task.api.submit_rescore_entrance_exam_for_student(request, entrance_exam_key, student) response_payload['task'] = 'created' return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def list_background_email_tasks(request, course_id): # pylint: disable=unused-argument """ List background email tasks. """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) task_type = 'bulk_course_email' # Specifying for the history of a single task type tasks = instructor_task.api.get_instructor_task_history(course_id, task_type=task_type) response_payload = { 'tasks': map(extract_task_features, tasks), } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def list_email_content(request, course_id): # pylint: disable=unused-argument """ List the content of bulk emails sent """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) task_type = 'bulk_course_email' # First get tasks list of bulk emails sent emails = instructor_task.api.get_instructor_task_history(course_id, task_type=task_type) response_payload = { 'emails': map(extract_email_features, emails), } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def list_instructor_tasks(request, course_id): """ List instructor tasks. Takes optional query paremeters. - With no arguments, lists running tasks. - `problem_location_str` lists task history for problem - `problem_location_str` and `unique_student_identifier` lists task history for problem AND student (intersection) """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) problem_location_str = strip_if_string(request.GET.get('problem_location_str', False)) student = request.GET.get('unique_student_identifier', None) if student is not None: student = get_student_from_identifier(student) if student and not problem_location_str: return HttpResponseBadRequest( "unique_student_identifier must accompany problem_location_str" ) if problem_location_str: try: module_state_key = course_id.make_usage_key_from_deprecated_string(problem_location_str) except InvalidKeyError: return HttpResponseBadRequest() if student: # Specifying for a single student's history on this problem tasks = instructor_task.api.get_instructor_task_history(course_id, module_state_key, student) else: # Specifying for single problem's history tasks = instructor_task.api.get_instructor_task_history(course_id, module_state_key) else: # If no problem or student, just get currently running tasks tasks = instructor_task.api.get_running_instructor_tasks(course_id) response_payload = { 'tasks': map(extract_task_features, tasks), } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def list_entrance_exam_instructor_tasks(request, course_id): # pylint: disable=invalid-name """ List entrance exam related instructor tasks. Takes either of the following query parameters - unique_student_identifier is an email or username - all_students is a boolean """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_by_id(course_id) student = request.GET.get('unique_student_identifier', None) if student is not None: student = get_student_from_identifier(student) try: entrance_exam_key = course_id.make_usage_key_from_deprecated_string(course.entrance_exam_id) except InvalidKeyError: return HttpResponseBadRequest(_("Course has no valid entrance exam section.")) if student: # Specifying for a single student's entrance exam history tasks = instructor_task.api.get_entrance_exam_instructor_task_history(course_id, entrance_exam_key, student) else: # Specifying for all student's entrance exam history tasks = instructor_task.api.get_entrance_exam_instructor_task_history(course_id, entrance_exam_key) response_payload = { 'tasks': map(extract_task_features, tasks), } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def list_report_downloads(_request, course_id): """ List grade CSV files that are available for download for this course. """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD') response_payload = { 'downloads': [ dict(name=name, url=url, link='<a href="{}">{}</a>'.format(url, name)) for name, url in report_store.links_for(course_id) ] } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_finance_admin def list_financial_report_downloads(_request, course_id): """ List grade CSV files that are available for download for this course. """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) report_store = ReportStore.from_config(config_name='FINANCIAL_REPORTS') response_payload = { 'downloads': [ dict(name=name, url=url, link='<a href="{}">{}</a>'.format(url, name)) for name, url in report_store.links_for(course_id) ] } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def calculate_grades_csv(request, course_id): """ AlreadyRunningError is raised if the course's grades are already being updated. """ course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) try: instructor_task.api.submit_calculate_grades_csv(request, course_key) success_status = _("The grade report is being created." " To view the status of the report, see Pending Instructor Tasks below.") return JsonResponse({"status": success_status}) except AlreadyRunningError: already_running_status = _("The grade report is currently being created." " To view the status of the report, see Pending Instructor Tasks below." " You will be able to download the report when it is complete.") return JsonResponse({ "status": already_running_status }) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def problem_grade_report(request, course_id): """ Request a CSV showing students' grades for all problems in the course. AlreadyRunningError is raised if the course's grades are already being updated. """ course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) try: instructor_task.api.submit_problem_grade_report(request, course_key) success_status = _("The problem grade report is being created." " To view the status of the report, see Pending Instructor Tasks below.") return JsonResponse({"status": success_status}) except AlreadyRunningError: already_running_status = _("A problem grade report is already being generated." " To view the status of the report, see Pending Instructor Tasks below." " You will be able to download the report when it is complete.") return JsonResponse({ "status": already_running_status }) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_query_params('rolename') def list_forum_members(request, course_id): """ Lists forum members of a certain rolename. Limited to staff access. The requesting user must be at least staff. Staff forum admins can access all roles EXCEPT for FORUM_ROLE_ADMINISTRATOR which is limited to instructors. Takes query parameter `rolename`. """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_by_id(course_id) has_instructor_access = has_access(request.user, 'instructor', course) has_forum_admin = has_forum_access( request.user, course_id, FORUM_ROLE_ADMINISTRATOR ) rolename = request.GET.get('rolename') # default roles require either (staff & forum admin) or (instructor) if not (has_forum_admin or has_instructor_access): return HttpResponseBadRequest( "Operation requires staff & forum admin or instructor access" ) # EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor) if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access: return HttpResponseBadRequest("Operation requires instructor access.") # filter out unsupported for roles if rolename not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]: return HttpResponseBadRequest(strip_tags( "Unrecognized rolename '{}'.".format(rolename) )) try: role = Role.objects.get(name=rolename, course_id=course_id) users = role.users.all().order_by('username') except Role.DoesNotExist: users = [] def extract_user_info(user): """ Convert user to dict for json rendering. """ return { 'username': user.username, 'email': user.email, 'first_name': user.first_name, 'last_name': user.last_name, } response_payload = { 'course_id': course_id.to_deprecated_string(), rolename: map(extract_user_info, users), } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_post_params(send_to="sending to whom", subject="subject line", message="message text") def send_email(request, course_id): """ Send an email to self, staff, or everyone involved in a course. Query Parameters: - 'send_to' specifies what group the email should be sent to Options are defined by the CourseEmail model in lms/djangoapps/bulk_email/models.py - 'subject' specifies email's subject - 'message' specifies email's content """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) if not bulk_email_is_enabled_for_course(course_id): return HttpResponseForbidden("Email is not enabled for this course.") send_to = request.POST.get("send_to") subject = request.POST.get("subject") message = request.POST.get("message") # allow two branding points to come from Microsites: which CourseEmailTemplate should be used # and what the 'from' field in the email should be # # If these are None (because we are not in a Microsite or they are undefined in Microsite config) than # the system will use normal system defaults template_name = microsite.get_value('course_email_template_name') from_addr = microsite.get_value('course_email_from_addr') # Create the CourseEmail object. This is saved immediately, so that # any transaction that has been pending up to this point will also be # committed. email = CourseEmail.create( course_id, request.user, send_to, subject, message, template_name=template_name, from_addr=from_addr ) # Submit the task, so that the correct InstructorTask object gets created (for monitoring purposes) instructor_task.api.submit_bulk_course_email(request, course_id, email.id) # pylint: disable=no-member response_payload = { 'course_id': course_id.to_deprecated_string(), 'success': True, } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_query_params( unique_student_identifier="email or username of user to change access", rolename="the forum role", action="'allow' or 'revoke'", ) @common_exceptions_400 def update_forum_role_membership(request, course_id): """ Modify user's forum role. The requesting user must be at least staff. Staff forum admins can access all roles EXCEPT for FORUM_ROLE_ADMINISTRATOR which is limited to instructors. No one can revoke an instructors FORUM_ROLE_ADMINISTRATOR status. Query parameters: - `email` is the target users email - `rolename` is one of [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA] - `action` is one of ['allow', 'revoke'] """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_by_id(course_id) has_instructor_access = has_access(request.user, 'instructor', course) has_forum_admin = has_forum_access( request.user, course_id, FORUM_ROLE_ADMINISTRATOR ) unique_student_identifier = request.GET.get('unique_student_identifier') rolename = request.GET.get('rolename') action = request.GET.get('action') # default roles require either (staff & forum admin) or (instructor) if not (has_forum_admin or has_instructor_access): return HttpResponseBadRequest( "Operation requires staff & forum admin or instructor access" ) # EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor) if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access: return HttpResponseBadRequest("Operation requires instructor access.") if rolename not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]: return HttpResponseBadRequest(strip_tags( "Unrecognized rolename '{}'.".format(rolename) )) user = get_student_from_identifier(unique_student_identifier) try: update_forum_role(course_id, user, rolename, action) except Role.DoesNotExist: return HttpResponseBadRequest("Role does not exist.") response_payload = { 'course_id': course_id.to_deprecated_string(), 'action': action, } return JsonResponse(response_payload) @require_POST def get_user_invoice_preference(request, course_id): # pylint: disable=unused-argument """ Gets invoice copy user's preferences. """ invoice_copy_preference = True invoice_preference_value = get_user_preference(request.user, INVOICE_KEY) if invoice_preference_value is not None: invoice_copy_preference = invoice_preference_value == 'True' return JsonResponse({ 'invoice_copy': invoice_copy_preference }) def _display_unit(unit): """ Gets string for displaying unit to user. """ name = getattr(unit, 'display_name', None) if name: return u'{0} ({1})'.format(name, unit.location.to_deprecated_string()) else: return unit.location.to_deprecated_string() @handle_dashboard_error @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_query_params('student', 'url', 'due_datetime') def change_due_date(request, course_id): """ Grants a due date extension to a student for a particular unit. """ course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id)) student = require_student_from_identifier(request.GET.get('student')) unit = find_unit(course, request.GET.get('url')) due_date = parse_datetime(request.GET.get('due_datetime')) set_due_date_extension(course, unit, student, due_date) return JsonResponse(_( 'Successfully changed due date for student {0} for {1} ' 'to {2}').format(student.profile.name, _display_unit(unit), due_date.strftime('%Y-%m-%d %H:%M'))) @handle_dashboard_error @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_query_params('student', 'url') def reset_due_date(request, course_id): """ Rescinds a due date extension for a student on a particular unit. """ course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id)) student = require_student_from_identifier(request.GET.get('student')) unit = find_unit(course, request.GET.get('url')) set_due_date_extension(course, unit, student, None) if not getattr(unit, "due", None): # It's possible the normal due date was deleted after an extension was granted: return JsonResponse( _("Successfully removed invalid due date extension (unit has no due date).") ) original_due_date_str = unit.due.strftime('%Y-%m-%d %H:%M') return JsonResponse(_( 'Successfully reset due date for student {0} for {1} ' 'to {2}').format(student.profile.name, _display_unit(unit), original_due_date_str)) @handle_dashboard_error @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_query_params('url') def show_unit_extensions(request, course_id): """ Shows all of the students which have due date extensions for the given unit. """ course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id)) unit = find_unit(course, request.GET.get('url')) return JsonResponse(dump_module_extensions(course, unit)) @handle_dashboard_error @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_query_params('student') def show_student_extensions(request, course_id): """ Shows all of the due date extensions granted to a particular student in a particular course. """ student = require_student_from_identifier(request.GET.get('student')) course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id)) return JsonResponse(dump_student_extensions(course, student)) def _split_input_list(str_list): """ Separate out individual student email from the comma, or space separated string. e.g. in: "[email protected], [email protected]\[email protected]\r [email protected]\r, [email protected]" out: ['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]'] `str_list` is a string coming from an input text area returns a list of separated values """ new_list = re.split(r'[\n\r\s,]', str_list) new_list = [s.strip() for s in new_list] new_list = [s for s in new_list if s != ''] return new_list def _instructor_dash_url(course_key, section=None): """Return the URL for a section in the instructor dashboard. Arguments: course_key (CourseKey) Keyword Arguments: section (str): The name of the section to load. Returns: unicode: The URL of a section in the instructor dashboard. """ url = reverse('instructor_dashboard', kwargs={'course_id': unicode(course_key)}) if section is not None: url += u'#view-{section}'.format(section=section) return url @require_global_staff @require_POST def generate_example_certificates(request, course_id=None): # pylint: disable=unused-argument """Start generating a set of example certificates. Example certificates are used to verify that certificates have been configured correctly for the course. Redirects back to the intructor dashboard once certificate generation has begun. """ course_key = CourseKey.from_string(course_id) certs_api.generate_example_certificates(course_key) return redirect(_instructor_dash_url(course_key, section='certificates')) @require_global_staff @require_POST def enable_certificate_generation(request, course_id=None): """Enable/disable self-generated certificates for a course. Once self-generated certificates have been enabled, students who have passed the course will be able to generate certificates. Redirects back to the intructor dashboard once the setting has been updated. """ course_key = CourseKey.from_string(course_id) is_enabled = (request.POST.get('certificates-enabled', 'false') == 'true') certs_api.set_cert_generation_enabled(course_key, is_enabled) return redirect(_instructor_dash_url(course_key, section='certificates')) #---- Gradebook (shown to small courses only) ---- @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def spoc_gradebook(request, course_id): """ Show the gradebook for this course: - Only shown for courses with enrollment < settings.FEATURES.get("MAX_ENROLLMENT_INSTR_BUTTONS") - Only displayed to course staff """ course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_with_access(request.user, 'staff', course_key, depth=None) enrolled_students = User.objects.filter( courseenrollment__course_id=course_key, courseenrollment__is_active=1 ).order_by('username').select_related("profile") # possible extension: implement pagination to show to large courses student_info = [ { 'username': student.username, 'id': student.id, 'email': student.email, 'grade_summary': student_grades(student, request, course), 'realname': student.profile.name, } for student in enrolled_students ] return render_to_response('courseware/gradebook.html', { 'students': student_info, 'course': course, 'course_id': course_key, # Checked above 'staff_access': True, 'ordered_grades': sorted(course.grade_cutoffs.items(), key=lambda i: i[1], reverse=True), }) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_POST def mark_student_can_skip_entrance_exam(request, course_id): # pylint: disable=invalid-name """ Mark a student to skip entrance exam. Takes `unique_student_identifier` as required POST parameter. """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) student_identifier = request.POST.get('unique_student_identifier') student = get_student_from_identifier(student_identifier) __, created = EntranceExamConfiguration.objects.get_or_create(user=student, course_id=course_id) if created: message = _('This student (%s) will skip the entrance exam.') % student_identifier else: message = _('This student (%s) is already allowed to skip the entrance exam.') % student_identifier response_payload = { 'message': message, } return JsonResponse(response_payload) @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_global_staff @require_POST def start_certificate_generation(request, course_id): """ Start generating certificates for all students enrolled in given course. """ course_key = CourseKey.from_string(course_id) task = instructor_task.api.generate_certificates_for_all_students(request, course_key) message = _('Certificate generation task for all students of this course has been started. ' 'You can view the status of the generation task in the "Pending Tasks" section.') response_payload = { 'message': message, 'task_id': task.task_id } return JsonResponse(response_payload)
agpl-3.0
-9,094,045,273,673,961,000
38.275251
159
0.653121
false
hlzz/dotfiles
graphics/VTK-7.0.0/IO/GeoJSON/Testing/Python/TestGeoJSONReader.py
1
3943
from __future__ import print_function import sys import vtk from vtk.test import Testing def load_geojson(input_string, feature_properties={}): '''Parses input_string with vtkGeoJSONReader, returns vtkPolyData feature_properties is a dictionary of name-default_values to attach as cell data in the returned vtkPolyData. ''' reader = vtk.vtkGeoJSONReader() #reader.DebugOn() reader.StringInputModeOn() reader.SetStringInput(input_string) for name,default_value in feature_properties.items(): reader.AddFeatureProperty(name, default_value) reader.Update() return reader.GetOutput() if __name__ == '__main__' : # Use feature collection example taken from the geojson spec. # Coped from http://geojson.org/geojson-spec.html (October 2014). # Features are in/near the island of Summatra (in western Indonesia). input_string = \ """ { "type": "FeatureCollection", "features": [ { "type": "Feature", "geometry": {"type": "Point", "coordinates": [102.0, 0.5]}, "properties": {"prop0": "value0"} }, { "type": "Feature", "geometry": { "type": "LineString", "coordinates": [ [102.0, 0.0], [103.0, 1.0], [104.0, 0.0], [105.0, 1.0] ] }, "properties": { "prop0": "value0", "prop1": 0.0 } }, { "type": "Feature", "geometry": { "type": "Polygon", "coordinates": [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ] ] }, "properties": { "prop0": "value0", "prop1": {"this": "that"} } } ] } """ prop0_default = vtk.vtkVariant('default') feature_properties = {'prop0': prop0_default} polydata = load_geojson(input_string, feature_properties) if polydata is None: print('Failed to read input string and return vtkPolyData') sys.exit(1) num_errors = 0 # Check cell counts expected_verts = 1 expected_lines = 1 expected_polys = 1 num_verts = polydata.GetNumberOfVerts() if num_verts != expected_verts: print('Wrong number of verts: returned %s, should be %s' % \ (num_verts, expected_verts)) num_errors += 1 num_lines = polydata.GetNumberOfLines() if num_lines != expected_lines: print('Wrong number of lines: returned %s, should be %s' % \ (num_lines, expected_lines)) num_errors += 1 else: # Check number of points in the (first) polyline id_list = vtk.vtkIdList() polydata.GetLines().GetCell(0, id_list) if id_list.GetNumberOfIds() != 4: print('Wrong number of points in line 0: returned %s, should be %s' % \ (id_list.GetNumberOfIds(), 4)) num_errors += 1 num_polys = polydata.GetNumberOfPolys() if num_polys != expected_polys: print('Wrong number of polys: returned %s, should be %s' % \ (num_polys, expected_polys)) num_errors += 1 else: # Check number of points in the (first) polygon id_list = vtk.vtkIdList() polydata.GetPolys().GetCell(0, id_list) if id_list.GetNumberOfIds() != 4: print('Wrong number of points in poly 0: returned %s, should be %s' % \ (id_list.GetNumberOfIds(), 4)) num_errors += 1 # Check cell data cell_data = polydata.GetCellData() # All polydata generated from GeoJSON have feature-id array feature_id_array = cell_data.GetAbstractArray('feature-id') if feature_id_array is None: print('feature-id array missing') num_errors += 1 # Test case also specified a prop0 array prop0_array = cell_data.GetAbstractArray('prop0') if prop0_array is None: print('prop0 array missing') num_errors += 1 print('num_errors:', num_errors) sys.exit(num_errors)
bsd-3-clause
7,147,107,941,439,486,000
29.047244
77
0.585087
false
TNT-Samuel/Coding-Projects
DNS Server/Source - Copy/Lib/email/utils.py
22
13488
# Copyright (C) 2001-2010 Python Software Foundation # Author: Barry Warsaw # Contact: [email protected] """Miscellaneous utilities.""" __all__ = [ 'collapse_rfc2231_value', 'decode_params', 'decode_rfc2231', 'encode_rfc2231', 'formataddr', 'formatdate', 'format_datetime', 'getaddresses', 'make_msgid', 'mktime_tz', 'parseaddr', 'parsedate', 'parsedate_tz', 'parsedate_to_datetime', 'unquote', ] import os import re import time import random import socket import datetime import urllib.parse from email._parseaddr import quote from email._parseaddr import AddressList as _AddressList from email._parseaddr import mktime_tz from email._parseaddr import parsedate, parsedate_tz, _parsedate_tz # Intrapackage imports from email.charset import Charset COMMASPACE = ', ' EMPTYSTRING = '' UEMPTYSTRING = '' CRLF = '\r\n' TICK = "'" specialsre = re.compile(r'[][\\()<>@,:;".]') escapesre = re.compile(r'[\\"]') def _has_surrogates(s): """Return True if s contains surrogate-escaped binary data.""" # This check is based on the fact that unless there are surrogates, utf8 # (Python's default encoding) can encode any string. This is the fastest # way to check for surrogates, see issue 11454 for timings. try: s.encode() return False except UnicodeEncodeError: return True # How to deal with a string containing bytes before handing it to the # application through the 'normal' interface. def _sanitize(string): # Turn any escaped bytes into unicode 'unknown' char. If the escaped # bytes happen to be utf-8 they will instead get decoded, even if they # were invalid in the charset the source was supposed to be in. This # seems like it is not a bad thing; a defect was still registered. original_bytes = string.encode('utf-8', 'surrogateescape') return original_bytes.decode('utf-8', 'replace') # Helpers def formataddr(pair, charset='utf-8'): """The inverse of parseaddr(), this takes a 2-tuple of the form (realname, email_address) and returns the string value suitable for an RFC 2822 From, To or Cc header. If the first element of pair is false, then the second element is returned unmodified. Optional charset if given is the character set that is used to encode realname in case realname is not ASCII safe. Can be an instance of str or a Charset-like object which has a header_encode method. Default is 'utf-8'. """ name, address = pair # The address MUST (per RFC) be ascii, so raise a UnicodeError if it isn't. address.encode('ascii') if name: try: name.encode('ascii') except UnicodeEncodeError: if isinstance(charset, str): charset = Charset(charset) encoded_name = charset.header_encode(name) return "%s <%s>" % (encoded_name, address) else: quotes = '' if specialsre.search(name): quotes = '"' name = escapesre.sub(r'\\\g<0>', name) return '%s%s%s <%s>' % (quotes, name, quotes, address) return address def getaddresses(fieldvalues): """Return a list of (REALNAME, EMAIL) for each fieldvalue.""" all = COMMASPACE.join(fieldvalues) a = _AddressList(all) return a.addresslist def _format_timetuple_and_zone(timetuple, zone): return '%s, %02d %s %04d %02d:%02d:%02d %s' % ( ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'][timetuple[6]], timetuple[2], ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][timetuple[1] - 1], timetuple[0], timetuple[3], timetuple[4], timetuple[5], zone) def formatdate(timeval=None, localtime=False, usegmt=False): """Returns a date string as specified by RFC 2822, e.g.: Fri, 09 Nov 2001 01:08:47 -0000 Optional timeval if given is a floating point time value as accepted by gmtime() and localtime(), otherwise the current time is used. Optional localtime is a flag that when True, interprets timeval, and returns a date relative to the local timezone instead of UTC, properly taking daylight savings time into account. Optional argument usegmt means that the timezone is written out as an ascii string, not numeric one (so "GMT" instead of "+0000"). This is needed for HTTP, and is only used when localtime==False. """ # Note: we cannot use strftime() because that honors the locale and RFC # 2822 requires that day and month names be the English abbreviations. if timeval is None: timeval = time.time() if localtime or usegmt: dt = datetime.datetime.fromtimestamp(timeval, datetime.timezone.utc) else: dt = datetime.datetime.utcfromtimestamp(timeval) if localtime: dt = dt.astimezone() usegmt = False return format_datetime(dt, usegmt) def format_datetime(dt, usegmt=False): """Turn a datetime into a date string as specified in RFC 2822. If usegmt is True, dt must be an aware datetime with an offset of zero. In this case 'GMT' will be rendered instead of the normal +0000 required by RFC2822. This is to support HTTP headers involving date stamps. """ now = dt.timetuple() if usegmt: if dt.tzinfo is None or dt.tzinfo != datetime.timezone.utc: raise ValueError("usegmt option requires a UTC datetime") zone = 'GMT' elif dt.tzinfo is None: zone = '-0000' else: zone = dt.strftime("%z") return _format_timetuple_and_zone(now, zone) def make_msgid(idstring=None, domain=None): """Returns a string suitable for RFC 2822 compliant Message-ID, e.g: <142480216486.20800.16526388040877946887@nightshade.la.mastaler.com> Optional idstring if given is a string used to strengthen the uniqueness of the message id. Optional domain if given provides the portion of the message id after the '@'. It defaults to the locally defined hostname. """ timeval = int(time.time()*100) pid = os.getpid() randint = random.getrandbits(64) if idstring is None: idstring = '' else: idstring = '.' + idstring if domain is None: domain = socket.getfqdn() msgid = '<%d.%d.%d%s@%s>' % (timeval, pid, randint, idstring, domain) return msgid def parsedate_to_datetime(data): *dtuple, tz = _parsedate_tz(data) if tz is None: return datetime.datetime(*dtuple[:6]) return datetime.datetime(*dtuple[:6], tzinfo=datetime.timezone(datetime.timedelta(seconds=tz))) def parseaddr(addr): """ Parse addr into its constituent realname and email address parts. Return a tuple of realname and email address, unless the parse fails, in which case return a 2-tuple of ('', ''). """ addrs = _AddressList(addr).addresslist if not addrs: return '', '' return addrs[0] # rfc822.unquote() doesn't properly de-backslash-ify in Python pre-2.3. def unquote(str): """Remove quotes from a string.""" if len(str) > 1: if str.startswith('"') and str.endswith('"'): return str[1:-1].replace('\\\\', '\\').replace('\\"', '"') if str.startswith('<') and str.endswith('>'): return str[1:-1] return str # RFC2231-related functions - parameter encoding and decoding def decode_rfc2231(s): """Decode string according to RFC 2231""" parts = s.split(TICK, 2) if len(parts) <= 2: return None, None, s return parts def encode_rfc2231(s, charset=None, language=None): """Encode string according to RFC 2231. If neither charset nor language is given, then s is returned as-is. If charset is given but not language, the string is encoded using the empty string for language. """ s = urllib.parse.quote(s, safe='', encoding=charset or 'ascii') if charset is None and language is None: return s if language is None: language = '' return "%s'%s'%s" % (charset, language, s) rfc2231_continuation = re.compile(r'^(?P<name>\w+)\*((?P<num>[0-9]+)\*?)?$', re.ASCII) def decode_params(params): """Decode parameters list according to RFC 2231. params is a sequence of 2-tuples containing (param name, string value). """ # Copy params so we don't mess with the original params = params[:] new_params = [] # Map parameter's name to a list of continuations. The values are a # 3-tuple of the continuation number, the string value, and a flag # specifying whether a particular segment is %-encoded. rfc2231_params = {} name, value = params.pop(0) new_params.append((name, value)) while params: name, value = params.pop(0) if name.endswith('*'): encoded = True else: encoded = False value = unquote(value) mo = rfc2231_continuation.match(name) if mo: name, num = mo.group('name', 'num') if num is not None: num = int(num) rfc2231_params.setdefault(name, []).append((num, value, encoded)) else: new_params.append((name, '"%s"' % quote(value))) if rfc2231_params: for name, continuations in rfc2231_params.items(): value = [] extended = False # Sort by number continuations.sort() # And now append all values in numerical order, converting # %-encodings for the encoded segments. If any of the # continuation names ends in a *, then the entire string, after # decoding segments and concatenating, must have the charset and # language specifiers at the beginning of the string. for num, s, encoded in continuations: if encoded: # Decode as "latin-1", so the characters in s directly # represent the percent-encoded octet values. # collapse_rfc2231_value treats this as an octet sequence. s = urllib.parse.unquote(s, encoding="latin-1") extended = True value.append(s) value = quote(EMPTYSTRING.join(value)) if extended: charset, language, value = decode_rfc2231(value) new_params.append((name, (charset, language, '"%s"' % value))) else: new_params.append((name, '"%s"' % value)) return new_params def collapse_rfc2231_value(value, errors='replace', fallback_charset='us-ascii'): if not isinstance(value, tuple) or len(value) != 3: return unquote(value) # While value comes to us as a unicode string, we need it to be a bytes # object. We do not want bytes() normal utf-8 decoder, we want a straight # interpretation of the string as character bytes. charset, language, text = value if charset is None: # Issue 17369: if charset/lang is None, decode_rfc2231 couldn't parse # the value, so use the fallback_charset. charset = fallback_charset rawbytes = bytes(text, 'raw-unicode-escape') try: return str(rawbytes, charset, errors) except LookupError: # charset is not a known codec. return unquote(text) # # datetime doesn't provide a localtime function yet, so provide one. Code # adapted from the patch in issue 9527. This may not be perfect, but it is # better than not having it. # def localtime(dt=None, isdst=-1): """Return local time as an aware datetime object. If called without arguments, return current time. Otherwise *dt* argument should be a datetime instance, and it is converted to the local time zone according to the system time zone database. If *dt* is naive (that is, dt.tzinfo is None), it is assumed to be in local time. In this case, a positive or zero value for *isdst* causes localtime to presume initially that summer time (for example, Daylight Saving Time) is or is not (respectively) in effect for the specified time. A negative value for *isdst* causes the localtime() function to attempt to divine whether summer time is in effect for the specified time. """ if dt is None: return datetime.datetime.now(datetime.timezone.utc).astimezone() if dt.tzinfo is not None: return dt.astimezone() # We have a naive datetime. Convert to a (localtime) timetuple and pass to # system mktime together with the isdst hint. System mktime will return # seconds since epoch. tm = dt.timetuple()[:-1] + (isdst,) seconds = time.mktime(tm) localtm = time.localtime(seconds) try: delta = datetime.timedelta(seconds=localtm.tm_gmtoff) tz = datetime.timezone(delta, localtm.tm_zone) except AttributeError: # Compute UTC offset and compare with the value implied by tm_isdst. # If the values match, use the zone name implied by tm_isdst. delta = dt - datetime.datetime(*time.gmtime(seconds)[:6]) dst = time.daylight and localtm.tm_isdst > 0 gmtoff = -(time.altzone if dst else time.timezone) if delta == datetime.timedelta(seconds=gmtoff): tz = datetime.timezone(delta, time.tzname[dst]) else: tz = datetime.timezone(delta) return dt.replace(tzinfo=tz)
gpl-3.0
-4,224,727,550,451,570,000
34.87234
79
0.637974
false
todaychi/hue
desktop/core/ext-py/Mako-0.8.1/test/test_lexer.py
36
28240
from mako.lexer import Lexer from mako import exceptions, util, compat from test.util import flatten_result from mako.template import Template import re from test import TemplateTest, eq_, assert_raises_message # create fake parsetree classes which are constructed # exactly as the repr() of a real parsetree object. # this allows us to use a Python construct as the source # of a comparable repr(), which is also hit by the 2to3 tool. def repr_arg(x): if isinstance(x, dict): return util.sorted_dict_repr(x) else: return repr(x) def _as_unicode(arg): if isinstance(arg, compat.string_types): return compat.text_type(arg) elif isinstance(arg, dict): return dict( (_as_unicode(k), _as_unicode(v)) for k, v in arg.items() ) else: return arg from mako import parsetree for cls in list(parsetree.__dict__.values()): if isinstance(cls, type) and \ issubclass(cls, parsetree.Node): clsname = cls.__name__ exec((""" class %s(object): def __init__(self, *args): self.args = [_as_unicode(arg) for arg in args] def __repr__(self): return "%%s(%%s)" %% ( self.__class__.__name__, ", ".join(repr_arg(x) for x in self.args) ) """ % clsname), locals()) # NOTE: most assertion expressions were generated, then formatted # by PyTidy, hence the dense formatting. class LexerTest(TemplateTest): def _compare(self, node, expected): eq_(repr(node), repr(expected)) def test_text_and_tag(self): template = """ <b>Hello world</b> <%def name="foo()"> this is a def. </%def> and some more text. """ node = Lexer(template).parse() self._compare(node, TemplateNode({}, [Text('''\n<b>Hello world</b>\n ''', (1, 1)), DefTag('def', {'name': 'foo()'}, (3, 9), [Text('''\n this is a def.\n ''', (3, 28))]), Text('''\n\n and some more text.\n''', (5, 16))])) def test_unclosed_tag(self): template = """ <%def name="foo()"> other text """ try: nodes = Lexer(template).parse() assert False except exceptions.SyntaxException: eq_( str(compat.exception_as()), "Unclosed tag: <%def> at line: 5 char: 9" ) def test_onlyclosed_tag(self): template = \ """ <%def name="foo()"> foo </%def> </%namespace> hi. """ self.assertRaises(exceptions.SyntaxException, Lexer(template).parse) def test_noexpr_allowed(self): template = \ """ <%namespace name="${foo}"/> """ self.assertRaises(exceptions.CompileException, Lexer(template).parse) def test_unmatched_tag(self): template = \ """ <%namespace name="bar"> <%def name="foo()"> foo </%namespace> </%def> hi. """ self.assertRaises(exceptions.SyntaxException, Lexer(template).parse) def test_nonexistent_tag(self): template = """ <%lala x="5"/> """ self.assertRaises(exceptions.CompileException, Lexer(template).parse) def test_wrongcase_tag(self): template = \ """ <%DEF name="foo()"> </%def> """ self.assertRaises(exceptions.CompileException, Lexer(template).parse) def test_percent_escape(self): template = \ """ %% some whatever. %% more some whatever % if foo: % endif """ node = Lexer(template).parse() self._compare(node, TemplateNode({}, [Text('''\n\n''', (1, 1)), Text('''% some whatever.\n\n''', (3, 2)), Text(' %% more some whatever\n', (5, 2)), ControlLine('if', 'if foo:', False, (6, 1)), ControlLine('if', 'endif', True, (7, 1)), Text(' ', (8, 1))])) def test_text_tag(self): template = \ """ ## comment % if foo: hi % endif <%text> # more code % more code <%illegal compionent>/></> <%def name="laal()">def</%def> </%text> <%def name="foo()">this is foo</%def> % if bar: code % endif """ node = Lexer(template).parse() self._compare(node, TemplateNode({}, [Text('\n', (1, 1)), Comment('comment', (2, 1)), ControlLine('if', 'if foo:', False, (3, 1)), Text(' hi\n', (4, 1)), ControlLine('if', 'endif', True, (5, 1)), Text(' ', (6, 1)), TextTag('text', {}, (6, 9), [Text('\n # more code\n\n ' ' % more code\n <%illegal compionent>/></>\n' ' <%def name="laal()">def</%def>\n\n\n ', (6, 16))]), Text('\n\n ', (14, 17)), DefTag('def', {'name': 'foo()'}, (16, 9), [Text('this is foo', (16, 28))]), Text('\n\n', (16, 46)), ControlLine('if', 'if bar:', False, (18, 1)), Text(' code\n', (19, 1)), ControlLine('if', 'endif', True, (20, 1)), Text(' ', (21, 1))]) ) def test_def_syntax(self): template = \ """ <%def lala> hi </%def> """ self.assertRaises(exceptions.CompileException, Lexer(template).parse) def test_def_syntax_2(self): template = \ """ <%def name="lala"> hi </%def> """ self.assertRaises(exceptions.CompileException, Lexer(template).parse) def test_whitespace_equals(self): template = \ """ <%def name = "adef()" > adef </%def> """ node = Lexer(template).parse() self._compare(node, TemplateNode({}, [Text('\n ', (1, 1)), DefTag('def', {'name': 'adef()'}, (2, 13), [Text('''\n adef\n ''', (2, 36))]), Text('\n ', (4, 20))])) def test_ns_tag_closed(self): template = \ """ <%self:go x="1" y="2" z="${'hi' + ' ' + 'there'}"/> """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Text(''' ''', (1, 1)), CallNamespaceTag('self:go', {'x': '1', 'y' : '2', 'z': "${'hi' + ' ' + 'there'}"}, (3, 13), []), Text('\n ', (3, 64))])) def test_ns_tag_empty(self): template = \ """ <%form:option value=""></%form:option> """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Text('\n ', (1, 1)), CallNamespaceTag('form:option', {'value': ''}, (2, 13), []), Text('\n ' , (2, 51))])) def test_ns_tag_open(self): template = \ """ <%self:go x="1" y="${process()}"> this is the body </%self:go> """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Text(''' ''', (1, 1)), CallNamespaceTag('self:go', {'x': '1', 'y' : '${process()}'}, (3, 13), [Text(''' this is the body ''', (3, 46))]), Text('\n ', (5, 24))])) def test_expr_in_attribute(self): """test some slightly trickier expressions. you can still trip up the expression parsing, though, unless we integrated really deeply somehow with AST.""" template = \ """ <%call expr="foo>bar and 'lala' or 'hoho'"/> <%call expr='foo<bar and hoho>lala and "x" + "y"'/> """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Text('\n ', (1, 1)), CallTag('call', {'expr' : "foo>bar and 'lala' or 'hoho'"}, (2, 13), []), Text('\n ', (2, 57)), CallTag('call' , {'expr': 'foo<bar and hoho>lala and "x" + "y"' }, (3, 13), []), Text('\n ', (3, 64))])) def test_pagetag(self): template = \ """ <%page cached="True", args="a, b"/> some template """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Text('\n ', (1, 1)), PageTag('page', {'args': 'a, b', 'cached': 'True'}, (2, 13), []), Text(''' some template ''', (2, 48))])) def test_nesting(self): template = \ """ <%namespace name="ns"> <%def name="lala(hi, there)"> <%call expr="something()"/> </%def> </%namespace> """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Text(''' ''', (1, 1)), NamespaceTag('namespace', {'name': 'ns'}, (3, 9), [Text('\n ', (3, 31)), DefTag('def', {'name': 'lala(hi, there)'}, (4, 13), [Text('\n ', (4, 42)), CallTag('call', {'expr': 'something()'}, (5, 17), []), Text('\n ', (5, 44))]), Text('\n ', (6, 20))]), Text(''' ''', (7, 22))])) if compat.py3k: def test_code(self): template = \ """text <% print("hi") for x in range(1,5): print(x) %> more text <%! import foo %> """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [ Text('text\n ', (1, 1)), Code('\nprint("hi")\nfor x in range(1,5):\n ' 'print(x)\n \n', False, (2, 5)), Text('\nmore text\n ', (6, 7)), Code('\nimport foo\n \n', True, (8, 5)), Text('\n', (10, 7))]) ) else: def test_code(self): template = \ """text <% print "hi" for x in range(1,5): print x %> more text <%! import foo %> """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [ Text('text\n ', (1, 1)), Code('\nprint "hi"\nfor x in range(1,5):\n ' 'print x\n \n', False, (2, 5)), Text('\nmore text\n ', (6, 7)), Code('\nimport foo\n \n', True, (8, 5)), Text('\n', (10, 7))]) ) def test_code_and_tags(self): template = \ """ <%namespace name="foo"> <%def name="x()"> this is x </%def> <%def name="y()"> this is y </%def> </%namespace> <% result = [] data = get_data() for x in data: result.append(x+7) %> result: <%call expr="foo.x(result)"/> """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Text('\n', (1, 1)), NamespaceTag('namespace', {'name': 'foo'}, (2, 1), [Text('\n ', (2, 24)), DefTag('def', {'name': 'x()'}, (3, 5), [Text('''\n this is x\n ''', (3, 22))]), Text('\n ', (5, 12)), DefTag('def', {'name' : 'y()'}, (6, 5), [Text('''\n this is y\n ''', (6, 22))]), Text('\n', (8, 12))]), Text('''\n\n''', (9, 14)), Code('''\nresult = []\ndata = get_data()\n''' '''for x in data:\n result.append(x+7)\n\n''', False, (11, 1)), Text('''\n\n result: ''', (16, 3)), CallTag('call', {'expr': 'foo.x(result)' }, (18, 13), []), Text('\n', (18, 42))])) def test_expression(self): template = \ """ this is some ${text} and this is ${textwith | escapes, moreescapes} <%def name="hi()"> give me ${foo()} and ${bar()} </%def> ${hi()} """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Text('\n this is some ', (1, 1)), Expression('text', [], (2, 22)), Text(' and this is ', (2, 29)), Expression('textwith ', ['escapes', 'moreescapes' ], (2, 42)), Text('\n ', (2, 76)), DefTag('def', {'name': 'hi()'}, (3, 9), [Text('\n give me ', (3, 27)), Expression('foo()', [], (4, 21)), Text(' and ', (4, 29)), Expression('bar()', [], (4, 34)), Text('\n ', (4, 42))]), Text('\n ' , (5, 16)), Expression('hi()', [], (6, 9)), Text('\n', (6, 16))])) def test_tricky_expression(self): template = """ ${x and "|" or "hi"} """ nodes = Lexer(template).parse() self._compare( nodes, TemplateNode({}, [ Text('\n\n ', (1, 1)), Expression('x and "|" or "hi"', [], (3, 13)), Text('\n ', (3, 33)) ]) ) template = """ ${hello + '''heres '{|}' text | | }''' | escape1} """ nodes = Lexer(template).parse() self._compare( nodes, TemplateNode({}, [ Text('\n\n ', (1, 1)), Expression("hello + '''heres '{|}' text | | }''' ", ['escape1'], (3, 13)), Text('\n ', (3, 62)) ]) ) def test_tricky_code(self): if compat.py3k: template = """<% print('hi %>') %>""" nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Code("print('hi %>') \n", False, (1, 1))])) else: template = """<% print 'hi %>' %>""" nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Code("print 'hi %>' \n", False, (1, 1))])) def test_tricky_code_2(self): template = \ """<% # someone's comment %> """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Code(""" # someone's comment """, False, (1, 1)), Text('\n ', (3, 3))])) if compat.py3k: def test_tricky_code_3(self): template = \ """<% print('hi') # this is a comment # another comment x = 7 # someone's '''comment print(''' there ''') # someone else's comment %> '''and now some text '''""" nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Code(""" print('hi') # this is a comment # another comment x = 7 # someone's '''comment print(''' there ''') # someone else's comment """, False, (1, 1)), Text(" '''and now some text '''", (10, 3))])) else: def test_tricky_code_3(self): template = \ """<% print 'hi' # this is a comment # another comment x = 7 # someone's '''comment print ''' there ''' # someone else's comment %> '''and now some text '''""" nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Code("""\nprint 'hi'\n# this is a comment\n""" """# another comment\nx = 7 """ """# someone's '''comment\nprint '''\n """ """there\n '''\n# someone else's """ """comment\n\n""", False, (1, 1)), Text(" '''and now some text '''", (10, 3))])) def test_tricky_code_4(self): template = \ """<% foo = "\\"\\\\" %>""" nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Code("""foo = "\\"\\\\" \n""", False, (1, 1))])) def test_tricky_code_5(self): template = \ """before ${ {'key': 'value'} } after""" nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Text('before ', (1, 1)), Expression(" {'key': 'value'} ", [], (1, 8)), Text(' after', (1, 29))])) def test_control_lines(self): template = \ """ text text la la % if foo(): mroe text la la blah blah % endif and osme more stuff % for l in range(1,5): tex tesl asdl l is ${l} kfmas d % endfor tetx text """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Text('''\ntext text la la\n''', (1, 1)), ControlLine('if', 'if foo():', False, (3, 1)), Text(' mroe text la la blah blah\n', (4, 1)), ControlLine('if', 'endif', True, (5, 1)), Text('''\n and osme more stuff\n''', (6, 1)), ControlLine('for', 'for l in range(1,5):', False, (8, 1)), Text(' tex tesl asdl l is ', (9, 1)), Expression('l', [], (9, 24)), Text(' kfmas d\n', (9, 28)), ControlLine('for', 'endfor', True, (10, 1)), Text(''' tetx text\n\n''', (11, 1))])) def test_control_lines_2(self): template = \ """% for file in requestattr['toc'].filenames: x % endfor """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [ControlLine('for', "for file in requestattr['toc'].filenames:", False, (1, 1)), Text(' x\n', (2, 1)), ControlLine('for', 'endfor', True, (3, 1))])) def test_long_control_lines(self): template = \ """ % for file in \\ requestattr['toc'].filenames: x % endfor """ nodes = Lexer(template).parse() self._compare( nodes, TemplateNode({}, [ Text('\n', (1, 1)), ControlLine('for', "for file in \\\n " "requestattr['toc'].filenames:", False, (2, 1)), Text(' x\n', (4, 1)), ControlLine('for', 'endfor', True, (5, 1)), Text(' ', (6, 1)) ]) ) def test_unmatched_control(self): template = """ % if foo: % for x in range(1,5): % endif """ assert_raises_message( exceptions.SyntaxException, "Keyword 'endif' doesn't match keyword 'for' at line: 5 char: 1", Lexer(template).parse ) def test_unmatched_control_2(self): template = """ % if foo: % for x in range(1,5): % endfor """ assert_raises_message( exceptions.SyntaxException, "Unterminated control keyword: 'if' at line: 3 char: 1", Lexer(template).parse ) def test_unmatched_control_3(self): template = """ % if foo: % for x in range(1,5): % endlala % endif """ assert_raises_message( exceptions.SyntaxException, "Keyword 'endlala' doesn't match keyword 'for' at line: 5 char: 1", Lexer(template).parse ) def test_ternary_control(self): template = \ """ % if x: hi % elif y+7==10: there % elif lala: lala % else: hi % endif """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Text('\n', (1, 1)), ControlLine('if', 'if x:', False, (2, 1)), Text(' hi\n', (3, 1)), ControlLine('elif', 'elif y+7==10:', False, (4, 1)), Text(' there\n', (5, 1)), ControlLine('elif', 'elif lala:', False, (6, 1)), Text(' lala\n', (7, 1)), ControlLine('else', 'else:', False, (8, 1)), Text(' hi\n', (9, 1)), ControlLine('if', 'endif', True, (10, 1))])) def test_integration(self): template = \ """<%namespace name="foo" file="somefile.html"/> ## inherit from foobar.html <%inherit file="foobar.html"/> <%def name="header()"> <div>header</div> </%def> <%def name="footer()"> <div> footer</div> </%def> <table> % for j in data(): <tr> % for x in j: <td>Hello ${x| h}</td> % endfor </tr> % endfor </table> """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [NamespaceTag('namespace' , {'file': 'somefile.html', 'name': 'foo'}, (1, 1), []), Text('\n', (1, 46)), Comment('inherit from foobar.html', (2, 1)), InheritTag('inherit', {'file': 'foobar.html'}, (3, 1), []), Text('''\n\n''', (3, 31)), DefTag('def', {'name': 'header()'}, (5, 1), [Text('''\n <div>header</div>\n''', (5, 23))]), Text('\n', (7, 8)), DefTag('def', {'name': 'footer()'}, (8, 1), [Text('''\n <div> footer</div>\n''', (8, 23))]), Text('''\n\n<table>\n''', (10, 8)), ControlLine('for', 'for j in data():', False, (13, 1)), Text(' <tr>\n', (14, 1)), ControlLine('for', 'for x in j:', False, (15, 1)), Text(' <td>Hello ', (16, 1)), Expression('x', ['h'], (16, 23)), Text('</td>\n' , (16, 30)), ControlLine('for', 'endfor', True, (17, 1)), Text(' </tr>\n', (18, 1)), ControlLine('for', 'endfor', True, (19, 1)), Text('</table>\n', (20, 1))])) def test_comment_after_statement(self): template = \ """ % if x: #comment hi % else: #next hi % endif #end """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Text('\n', (1, 1)), ControlLine('if', 'if x: #comment', False, (2, 1)), Text(' hi\n', (3, 1)), ControlLine('else', 'else: #next', False, (4, 1)), Text(' hi\n', (5, 1)), ControlLine('if', 'endif #end', True, (6, 1))])) def test_crlf(self): template = util.read_file(self._file_path("crlf.html")) nodes = Lexer(template).parse() self._compare( nodes, TemplateNode({}, [ Text('<html>\r\n\r\n', (1, 1)), PageTag('page', { 'args': "a=['foo',\n 'bar']" }, (3, 1), []), Text('\r\n\r\nlike the name says.\r\n\r\n', (4, 26)), ControlLine('for', 'for x in [1,2,3]:', False, (8, 1)), Text(' ', (9, 1)), Expression('x', [], (9, 9)), ControlLine('for', 'endfor', True, (10, 1)), Text('\r\n', (11, 1)), Expression("trumpeter == 'Miles' and " "trumpeter or \\\n 'Dizzy'", [], (12, 1)), Text('\r\n\r\n', (13, 15)), DefTag('def', {'name': 'hi()'}, (15, 1), [ Text('\r\n hi!\r\n', (15, 19))]), Text('\r\n\r\n</html>\r\n', (17, 8)) ]) ) assert flatten_result(Template(template).render()) \ == """<html> like the name says. 1 2 3 Dizzy </html>""" def test_comments(self): template = \ """ <style> #someselector # other non comment stuff </style> ## a comment # also not a comment ## this is a comment this is ## not a comment <%doc> multiline comment </%doc> hi """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Text('''\n<style>\n #someselector\n # ''' '''other non comment stuff\n</style>\n''', (1, 1)), Comment('a comment', (6, 1)), Text('''\n# also not a comment\n\n''', (7, 1)), Comment('this is a comment', (10, 1)), Text('''\nthis is ## not a comment\n\n''', (11, 1)), Comment(''' multiline\ncomment\n''', (14, 1)), Text(''' hi ''', (16, 8))])) def test_docs(self): template = \ """ <%doc> this is a comment </%doc> <%def name="foo()"> <%doc> this is the foo func </%doc> </%def> """ nodes = Lexer(template).parse() self._compare(nodes, TemplateNode({}, [Text('\n ', (1, 1)), Comment('''\n this is a comment\n ''', (2, 9)), Text('\n ', (4, 16)), DefTag('def', {'name': 'foo()'}, (5, 9), [Text('\n ', (5, 28)), Comment('''\n this is the foo func\n''' ''' ''', (6, 13)), Text('\n ', (8, 20))]), Text('\n ', (9, 16))])) def test_preprocess(self): def preproc(text): return re.sub(r'(?<=\n)\s*#[^#]', '##', text) template = \ """ hi # old style comment # another comment """ nodes = Lexer(template, preprocessor=preproc).parse() self._compare(nodes, TemplateNode({}, [Text('''\n hi\n''', (1, 1)), Comment('old style comment', (3, 1)), Comment('another comment', (4, 1))]))
apache-2.0
4,229,367,894,190,315,000
31.164009
83
0.383994
false
ammarkhann/FinalSeniorCode
lib/python2.7/site-packages/pandas/tests/dtypes/test_generic.py
7
2098
# -*- coding: utf-8 -*- from warnings import catch_warnings import numpy as np import pandas as pd from pandas.core.dtypes import generic as gt class TestABCClasses(object): tuples = [[1, 2, 2], ['red', 'blue', 'red']] multi_index = pd.MultiIndex.from_arrays(tuples, names=('number', 'color')) datetime_index = pd.to_datetime(['2000/1/1', '2010/1/1']) timedelta_index = pd.to_timedelta(np.arange(5), unit='s') period_index = pd.period_range('2000/1/1', '2010/1/1/', freq='M') categorical = pd.Categorical([1, 2, 3], categories=[2, 3, 1]) categorical_df = pd.DataFrame({"values": [1, 2, 3]}, index=categorical) df = pd.DataFrame({'names': ['a', 'b', 'c']}, index=multi_index) sparse_series = pd.Series([1, 2, 3]).to_sparse() sparse_array = pd.SparseArray(np.random.randn(10)) def test_abc_types(self): assert isinstance(pd.Index(['a', 'b', 'c']), gt.ABCIndex) assert isinstance(pd.Int64Index([1, 2, 3]), gt.ABCInt64Index) assert isinstance(pd.UInt64Index([1, 2, 3]), gt.ABCUInt64Index) assert isinstance(pd.Float64Index([1, 2, 3]), gt.ABCFloat64Index) assert isinstance(self.multi_index, gt.ABCMultiIndex) assert isinstance(self.datetime_index, gt.ABCDatetimeIndex) assert isinstance(self.timedelta_index, gt.ABCTimedeltaIndex) assert isinstance(self.period_index, gt.ABCPeriodIndex) assert isinstance(self.categorical_df.index, gt.ABCCategoricalIndex) assert isinstance(pd.Index(['a', 'b', 'c']), gt.ABCIndexClass) assert isinstance(pd.Int64Index([1, 2, 3]), gt.ABCIndexClass) assert isinstance(pd.Series([1, 2, 3]), gt.ABCSeries) assert isinstance(self.df, gt.ABCDataFrame) with catch_warnings(record=True): assert isinstance(self.df.to_panel(), gt.ABCPanel) assert isinstance(self.sparse_series, gt.ABCSparseSeries) assert isinstance(self.sparse_array, gt.ABCSparseArray) assert isinstance(self.categorical, gt.ABCCategorical) assert isinstance(pd.Period('2012', freq='A-DEC'), gt.ABCPeriod)
mit
-7,111,623,542,017,201,000
51.45
78
0.663489
false
hamedhsn/incubator-airflow
tests/contrib/hooks/test_ftp_hook.py
35
3084
# -*- coding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import mock import unittest from airflow.contrib.hooks import ftp_hook as fh class TestFTPHook(unittest.TestCase): def setUp(self): super(TestFTPHook, self).setUp() self.path = '/some/path' self.conn_mock = mock.MagicMock(name='conn') self.get_conn_orig = fh.FTPHook.get_conn def _get_conn_mock(hook): hook.conn = self.conn_mock return self.conn_mock fh.FTPHook.get_conn = _get_conn_mock def tearDown(self): fh.FTPHook.get_conn = self.get_conn_orig super(TestFTPHook, self).tearDown() def test_close_conn(self): ftp_hook = fh.FTPHook() ftp_hook.get_conn() ftp_hook.close_conn() self.conn_mock.quit.assert_called_once_with() def test_list_directory(self): with fh.FTPHook() as ftp_hook: ftp_hook.list_directory(self.path) self.conn_mock.cwd.assert_called_once_with(self.path) self.conn_mock.nlst.assert_called_once_with() def test_create_directory(self): with fh.FTPHook() as ftp_hook: ftp_hook.create_directory(self.path) self.conn_mock.mkd.assert_called_once_with(self.path) def test_delete_directory(self): with fh.FTPHook() as ftp_hook: ftp_hook.delete_directory(self.path) self.conn_mock.rmd.assert_called_once_with(self.path) def test_delete_file(self): with fh.FTPHook() as ftp_hook: ftp_hook.delete_file(self.path) self.conn_mock.delete.assert_called_once_with(self.path) def test_rename(self): from_path = '/path/from' to_path = '/path/to' with fh.FTPHook() as ftp_hook: ftp_hook.rename(from_path, to_path) self.conn_mock.rename.assert_called_once_with(from_path, to_path) self.conn_mock.quit.assert_called_once_with() def test_mod_time(self): self.conn_mock.sendcmd.return_value = '213 20170428010138' path = '/path/file' with fh.FTPHook() as ftp_hook: ftp_hook.get_mod_time(path) self.conn_mock.sendcmd.assert_called_once_with('MDTM ' + path) def test_mod_time_micro(self): self.conn_mock.sendcmd.return_value = '213 20170428010138.003' path = '/path/file' with fh.FTPHook() as ftp_hook: ftp_hook.get_mod_time(path) self.conn_mock.sendcmd.assert_called_once_with('MDTM ' + path) if __name__ == '__main__': unittest.main()
apache-2.0
-7,293,907,567,266,367,000
29.534653
74
0.633268
false
gram526/VTK
Examples/Graphics/Python/SegmentAndBrokenLineSources.py
15
1389
#!/usr/bin/env python ############################################################ from vtk import * ############################################################ # Create sources line1 = vtkLineSource() line1.SetPoint1( 1, 0, 0 ) line1.SetPoint2( -1, 0, 0 ) line1.SetResolution( 32 ) points = vtkPoints() points.InsertNextPoint( 1, 0, 0 ) points.InsertNextPoint( -.5, 1, 0 ) points.InsertNextPoint( 0, 1, 2 ) points.InsertNextPoint( 2, 1, -1 ) points.InsertNextPoint( -1, 0, 0 ) line2 = vtkLineSource() line2.SetPoints( points ) line2.SetResolution( 16 ) # Create mappers mapper1 = vtkPolyDataMapper() mapper1.SetInputConnection( line1.GetOutputPort() ) mapper2 = vtkPolyDataMapper() mapper2.SetInputConnection( line2.GetOutputPort() ) # Create actors actor1 = vtkActor() actor1.SetMapper( mapper1 ) actor1.GetProperty().SetColor( 1., 0., 0. ) actor2 = vtkActor() actor2.SetMapper( mapper2 ) actor2.GetProperty().SetColor( 0., 0., 1. ) actor2.GetProperty().SetLineWidth( 2.5 ) # Create renderer renderer = vtkRenderer() renderer.AddViewProp( actor1 ) renderer.AddViewProp( actor2 ) renderer.SetBackground( .3, .4 ,.5 ) # Create render window window = vtkRenderWindow() window.AddRenderer( renderer ) window.SetSize( 500, 500 ) # Create interactor interactor = vtkRenderWindowInteractor() interactor.SetRenderWindow( window ) # Start interaction window.Render() interactor.Start()
bsd-3-clause
2,219,672,494,517,183,500
24.254545
60
0.676026
false
davidyezsetz/kuma
vendor/packages/ipython/IPython/FakeModule.py
12
2056
# -*- coding: utf-8 -*- """ Class which mimics a module. Needed to allow pickle to correctly resolve namespaces during IPython sessions. """ #***************************************************************************** # Copyright (C) 2002-2004 Fernando Perez. <[email protected]> # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #***************************************************************************** import types def init_fakemod_dict(fm,adict=None): """Initialize a FakeModule instance __dict__. Kept as a standalone function and not a method so the FakeModule API can remain basically empty. This should be considered for private IPython use, used in managing namespaces for %run. Parameters ---------- fm : FakeModule instance adict : dict, optional """ dct = {} # It seems pydoc (and perhaps others) needs any module instance to # implement a __nonzero__ method, so we add it if missing: dct.setdefault('__nonzero__',lambda : True) dct.setdefault('__file__',__file__) if adict is not None: dct.update(adict) # Hard assignment of the object's __dict__. This is nasty but deliberate. fm.__dict__.clear() fm.__dict__.update(dct) class FakeModule(types.ModuleType): """Simple class with attribute access to fake a module. This is not meant to replace a module, but to allow inserting a fake module in sys.modules so that systems which rely on run-time module importing (like shelve and pickle) work correctly in interactive IPython sessions. Do NOT use this code for anything other than this IPython private hack.""" def __init__(self,adict=None): # tmp to force __dict__ instance creation, else self.__dict__ fails self.__iptmp = None # cleanup our temp trick del self.__iptmp # Now, initialize the actual data in the instance dict. init_fakemod_dict(self,adict)
mpl-2.0
3,784,876,172,737,993,700
30.151515
78
0.615759
false
dgitz/icarus_jet_pc
net/nodes/createtrainingimages.py
2
5416
#!/usr/bin/python min_factor = 0.08 #Minimum scaling factor max_factor = 0.12 #Maximum scaling factor leftmax_rotate_angle = -30.0 #Left-most maximum rotation angle, in degrees rightmax_rotate_angle = 30.0 #Right-most maximum rotation angle, in degrees import cv2 import os from os import listdir from os.path import isfile,join import random import time import pp import math,sys import pdb import numpy as np from optparse import OptionParser parser = OptionParser("createtrainingimages.py [options]") parser.add_option("--Trials",dest="Trials",default="1") parser.add_option("--Patterns",dest="Patterns",default="5") parser.add_option("--UseParallel",dest="UseParallel",default='True') (opts,args) = parser.parse_args() Trials = int(opts.Trials) if opts.UseParallel=='False': UseParallel=False elif opts.UseParallel=='True': UseParallel=True Patterns=int(opts.Patterns) ppservers = () job_server = pp.Server(4,ppservers=ppservers) masterimage_dir = os.getcwd()+'/../media/MasterImages/' trainimage_dir = os.getcwd()+'/../media/TrainImages/' environmentimage_dir = os.getcwd()+'/../media/EnvironmentImages/' avg_seqtime = 0.0 avg_partime = 0.0 masterimage_names = [ f for f in listdir(masterimage_dir) if isfile(join(masterimage_dir,f)) ] masterimage_paths = [''] * len(masterimage_names) environmentimage_names = [f for f in listdir(environmentimage_dir) if isfile(join(environmentimage_dir,f))] environmentimage_paths = [''] * len(environmentimage_names) for i in range(len(environmentimage_names)): environmentimage_paths[i] = environmentimage_dir + environmentimage_names[i] env_image = cv2.imread(environmentimage_paths[0]) for i in range(len(environmentimage_names)): environmentimage_paths[i] = environmentimage_dir + environmentimage_names[i] environmentimage_names[i] = environmentimage_names[i][0:environmentimage_names[i].find('.')] for i in range(len(masterimage_names)): masterimage_paths[i] = masterimage_dir + masterimage_names[i] masterimage_names[i] = masterimage_names[i][0:masterimage_names[i].find('.')] def deleteimages(): files = listdir(trainimage_dir) for f in files: os.remove(trainimage_dir+f) def createtrainimages(masterimage_index,paths,names,patterns,minf,maxf,newdir,environmentdir,envpaths): master_images = [] env_image = cv2.imread(envpaths[0]) for i in range(len(names)): master_images.append(cv2.imread(paths[i])) #height,width = env_image.shape[:2] #master_images[i] = cv2.resize(master_images[i],(width,height)) for trainimage_index in range(patterns): #Load Environment Image pick = int(random.uniform(0,len(envpaths)-1)) env_image = cv2.imread(envpaths[pick]) #Drop/Scale Target Image into Environment Image '''Resize Target Image''' factor_x = random.uniform(minf,maxf) factor_y = factor_x#random.uniform(minf,maxf) size_x,size_y = int(master_images[masterimage_index].shape[1]*factor_x),int(master_images[masterimage_index].shape[0]*factor_y) tempimage = cv2.resize(master_images[masterimage_index],(size_x,size_y)) cur_size_x,cur_size_y = tempimage.shape[1],tempimage.shape[0] center_y = int(random.uniform(tempimage.shape[0]/2,env_image.shape[0]-tempimage.shape[0]/2)) center_x = int(random.uniform(tempimage.shape[1]/2,env_image.shape[1]-tempimage.shape[1]/2)) newimage = env_image y_min = center_y-math.floor(tempimage.shape[0]/2) y_max = center_y+math.floor(tempimage.shape[0]/2) x_min = center_x-math.floor(tempimage.shape[1]/2) x_max = center_x+math.floor(tempimage.shape[1]/2) tempsize = newimage[y_min:y_max,x_min:x_max,:] height,width = tempsize.shape[:2] tempimage = cv2.resize(tempimage,(width,height)) newimage[y_min:y_max,x_min:x_max,:] = tempimage #rotateangle = random.uniform(leftmax_rotate_angle,rightmax_rotate_angle) #rotatematrix = cv2.getRotationMatrix2D((0,0),rotateangle,.3) #tempimage = cv2.warpAffine(tempimage,rotatematrix,(cur_size_y*3,cur_size_x*3)) newpath = newdir+names[masterimage_index]+'_'+str(trainimage_index)+'_'+str(center_y)+'_'+str(center_x)+'.png' cv2.imwrite(newpath,newimage) for t in range(Trials): #Sequential Code if Trials > 1: print 'Trial: {}'.format(t+1) #Parallel Code if UseParallel: deleteimages() start_time = time.time() masterimage_range = range(len(masterimage_names)) jobs = [(index, job_server.submit(func=createtrainimages,args=(index,masterimage_paths,masterimage_names,Patterns,min_factor,max_factor,trainimage_dir,environmentimage_dir,environmentimage_paths), modules=("cv2","random","math",))) for index in masterimage_range] for index,job in jobs: job() elapsed_time = time.time() - start_time print 'Elapsed Time (Parallel): {:.4f} Seconds'.format(elapsed_time) job_server.print_stats() avg_partime = avg_partime + elapsed_time else: deleteimages() start_time = time.time() for index in range(len(masterimage_names)): createtrainimages(index,masterimage_paths,masterimage_names,Patterns,min_factor,max_factor,trainimage_dir,environmentimage_dir,environmentimage_paths) elapsed_time = time.time() - start_time print 'Elapsed Time (Sequential): {:.4f} Seconds'.format(elapsed_time) avg_seqtime = avg_seqtime+elapsed_time if UseParallel: print 'Average Elapsed Time for Parallel Execution: {:.4f} Seconds'.format(avg_partime/Trials) else: print 'Average Elapsed Time for Sequential Execution: {:.4f} Seconds'.format(avg_seqtime/Trials)
gpl-2.0
5,795,614,841,449,660,000
37.140845
265
0.736706
false
claudep/pootle
tests/commands/find_duplicate_emails.py
8
1602
# -*- coding: utf-8 -*- # # Copyright (C) Pootle contributors. # # This file is a part of the Pootle project. It is distributed under the GPL3 # or later license. See the LICENSE file for a copy of the license and the # AUTHORS file for copyright and authorship information. import pytest from django.core.management import call_command @pytest.mark.cmd @pytest.mark.django_db def test_find_duplicate_emails_nodups(capfd, no_extra_users): """No duplicates found. Standard users shouldn't flag any error. """ call_command('find_duplicate_emails') out, err = capfd.readouterr() assert "There are no accounts with duplicate emails" in out @pytest.mark.cmd @pytest.mark.django_db def test_find_duplicate_emails_noemails(capfd, member, member2): """User have no email set.""" call_command('find_duplicate_emails') out, err = capfd.readouterr() assert "The following users have no email set" in out assert "member " in out assert "member2" in out @pytest.mark.cmd @pytest.mark.django_db def test_find_duplicate_emails_withdups(capfd, member_with_email, member2_with_email): """Find duplicate emails for removal where we have dups. Standard users shouldn't flag any error. """ member2_with_email.email = member_with_email.email member2_with_email.save() call_command('find_duplicate_emails') out, err = capfd.readouterr() assert "The following users have the email: [email protected]" in out assert "member_with_email" in out assert "member2_with_email" in out
gpl-3.0
6,190,927,134,669,527,000
30.411765
83
0.698502
false
egabancho/invenio
invenio/modules/annotations/testsuite/test_api.py
3
3609
# -*- coding: utf-8 -*- ## ## This file is part of Invenio. ## Copyright (C) 2014 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. __revision__ = "$Id$" from datetime import datetime from invenio.base.wrappers import lazy_import from invenio.testsuite import make_test_suite, run_test_suite, nottest, \ InvenioTestCase CFG = lazy_import('invenio.base.globals.cfg') USER = lazy_import('invenio.modules.accounts.models.User') API = lazy_import('invenio.modules.annotations.api') NOTEUTILS = lazy_import('invenio.modules.annotations.noteutils') COMMENT = lazy_import('invenio.modules.comments.models.CmtRECORDCOMMENT') class AnnotationTestCase(InvenioTestCase): def setUp(self): self.app.config['ANNOTATIONS_ENGINE'] = \ "invenio.modules.jsonalchemy.jsonext.engines.memory:MemoryStorage" class TestAnnotation(AnnotationTestCase): def test_initialization(self): u = USER(id=1) a = API.Annotation.create({"who": u, "what": "lorem", "where": "/"}) self.assert_(len(a.validate()) == 0) self.assert_(type(a["when"]) == datetime) self.assert_(a["who"].get_id() == 1) # invalid annotation a = API.Annotation.create({"who": u, "what": "lorem", "where": "/", "perm": {"public": True, "groups": []}, "uuid": "1m"}) self.assert_(len(a.validate()) == 1) def test_jsonld(self): u = USER(id=1, nickname="johndoe") a = API.Annotation.create({"who": u, "what": "lorem", "where": "/", "perm": {"public": True, "groups": []}}) ld = a.get_jsonld("oaf") self.assert_(ld["hasTarget"]["@id"] == CFG["CFG_SITE_URL"] + "/") self.assert_(ld["hasBody"]["chars"] == "lorem") class TestJSONLD(AnnotationTestCase): @nottest def test(self): u = USER(id=1) data = {"who": u, "what": "lorem", "where": {"record": 1, "marker": "P.1_T.2a.2_L.100"}, "comment": 1} a = API.add_annotation(model='annotation_note', **data) # JSONAlchemy issue with overwriting fields self.assert_(len(a.validate()) == 0) ld = a.get_jsonld("oaf", new_context={"ST": "http://www.w3.org/ns/oa#" "FragmentSelector"}, format="compacted") self.assert_(ld["http://www.w3.org/ns/oa#hasTarget"] ["http://www.w3.org/ns/oa#hasSelector"] ["@type"] == "ST") self.assert_(ld["http://www.w3.org/ns/oa#hasTarget"] ["http://www.w3.org/ns/oa#hasSelector"] ["http://www.w3.org/1999/02/22-rdf-syntax-ns#value"] == "P.1_T.2a.2_L.100") TEST_SUITE = make_test_suite(TestAnnotation, TestJSONLD) if __name__ == "__main__": run_test_suite(TEST_SUITE)
gpl-2.0
2,065,326,644,972,854,300
36.59375
78
0.582156
false
cernops/nova
nova/tests/unit/virt/vmwareapi/test_read_write_util.py
10
1887
# Copyright 2013 IBM Corp. # Copyright 2011 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import urllib import mock from nova import test from nova.virt.vmwareapi import read_write_util class ReadWriteUtilTestCase(test.NoDBTestCase): def test_ipv6_host_read(self): ipv6_host = 'fd8c:215d:178e:c51e:200:c9ff:fed1:584c' port = 7443 folder = 'tmp/fake.txt' # NOTE(sdague): the VMwareHTTPReadFile makes implicit http # call via requests during construction, block that from # happening here in the test. with mock.patch.object(read_write_util.VMwareHTTPReadFile, '_create_read_connection'): reader = read_write_util.VMwareHTTPReadFile(ipv6_host, port, 'fake_dc', 'fake_ds', dict(), folder) param_list = {"dcPath": 'fake_dc', "dsName": 'fake_ds'} base_url = 'https://[%s]:%s/folder/%s' % (ipv6_host, port, folder) base_url += '?' + urllib.urlencode(param_list) self.assertEqual(base_url, reader._base_url)
apache-2.0
9,035,297,542,819,451,000
41.886364
78
0.567568
false
luckydonald/python-utils
luckydonaldUtils/dependencies/install.py
2
5316
# -*- coding: utf-8 -*- from luckydonaldUtils.logger import logging try: from .pip_interface import pip_install except ImportError as e: logger = logging.getLogger(__name__) logger.warn("Could not apply logger workaround. Falling back to using pip directly.") from .pip_interface_fallback import pip_install # end try from setuptools import find_packages try: import importlib except ImportError: # pip install importlib pip_install(["importlib"]) import importlib # end try __author__ = 'luckydonald' logger = logging.getLogger(__name__) def import_or_install(package_name, pip_name=None): """ Tries to import an package. If that fails it tries to install it via pip, using the given `pip_name` or if not given, the `package_name`. :param package_name: Package name to import. (E.g. "PIL") :param pip_name: The name to install it like `$pip install <pip_name>` would do. (E.g. "Pillow") :return: """ # if pip name is given, just use that. if pip_name: return import_or_install_with_exact_pip_name(package_name, pip_name) # if pip name is not given, try posibilities, by splitting the dots. # # Example: # >> import_or_install("imgurpython.client.ImgurClient") # will try to install: # - "imgurpython" # - "imgurpython.client" # - "imgurpython.client.ImgurClient" # # This also allows package names containing dots like "ruamel.yaml". pip_name = "" err = None for part in package_name.split("."): pip_name = (pip_name + "." if pip_name else "") + part try: return import_or_install_with_exact_pip_name(package_name, pip_name) except ImportError as e: err = e logger.debug("Import failed.", exc_info=True) # end try # end for raise err # should store the last occurred error. # end def def import_or_install_with_exact_pip_name(package_name, pip_name): """ Just a helper for import_or_install() Also Littlepip is best pony. """ err = None for try_i in [1, 2, 3]: try: return import_only(package_name) except ImportError as e: err = e logger.debug("Import failed.", exc_info=True) upgrade = try_i >= 2 # import failed twice (one after doing a normal install) install_only(pip_name, upgrade) raise err # should store the last occurred error. # end def def import_only(package_name, module_list=None): # "pytz.timzone" -> from pytz import timezone -> package_name = "pytz", from_package = ["timezone"] if not module_list: if "." in package_name: package_name, module_list = package_name.rsplit('.', 1) else: module_list = None if module_list: logger.debug("Trying import: form \"{module_name}\" import \"{module_list}\".".format(module_name=package_name, module_list=module_list)) else: logger.debug("Trying to import module \"{module_name}\".".format(module_name=package_name)) try: imp = importlib.import_module(package_name) if module_list: if hasattr(imp, module_list): imp = getattr(imp, module_list) logger.debug("\"{module_list}\" is an attribute of \"{module_name}\".".format(module_name=package_name, module_list=module_list)) else: imp = importlib.import_module(package_name, package=module_list) logger.debug("\"{module_list}\" is an module in \"{module_name}\".".format(module_name=package_name, module_list=module_list)) else: imp = importlib.import_module(package_name, package=module_list) logger.debug("module \"{module_name}\".".format(module_name=package_name)) except ImportError: try: imp = importlib.import_module(package_name, package=module_list) except (SystemError, ValueError) as e: # https://github.com/luckydonald/luckydonald-utils/issues/2 # https://bugs.python.org/issue18018 raise ImportError(str(e)) return imp def install_only(pip_name, upgrade=False): logger.warn("{install_or_upgrade} package '{pip_name}'.\n" "If that fails, install it manually:\n" "pip install {pip_name}\n" "".format(pip_name=pip_name, install_or_upgrade="Upgrading" if upgrade else "Installing")) args = [pip_name, "--verbose"] if upgrade and "--upgrade" not in args: args.append("--upgrade") logger.debug("Trying to install \"{pip_name}\" with pip using the following arguments: {pip_args}...".format( pip_name=pip_name, pip_args=args)) return pip_install(args) def upgrade(pip_name): install_only(pip_name, upgrade=True) def find_submodules(main_package): packages = [main_package] for package in find_packages(where=main_package): packages.append(main_package + "." + package) return packages
gpl-2.0
-4,576,213,748,962,197,500
35.916667
119
0.592363
false
Phonebooth/depot_tools
third_party/logilab/astng/inspector.py
19
10260
# This program is free software; you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the Free Software # Foundation; either version 2 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License along with # this program; if not, write to the Free Software Foundation, Inc., # 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:[email protected] # copyright 2003-2010 Sylvain Thenault, all rights reserved. # contact mailto:[email protected] # # This file is part of logilab-astng. # # logilab-astng is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published by the # Free Software Foundation, either version 2.1 of the License, or (at your # option) any later version. # # logilab-astng is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License # for more details. # # You should have received a copy of the GNU Lesser General Public License along # with logilab-astng. If not, see <http://www.gnu.org/licenses/>. """visitor doing some postprocessing on the astng tree. Try to resolve definitions (namespace) dictionary, relationship... This module has been imported from pyreverse """ __docformat__ = "restructuredtext en" from os.path import dirname from logilab.common.modutils import get_module_part, is_relative, \ is_standard_module from logilab import astng from logilab.astng.exceptions import InferenceError from logilab.astng.utils import LocalsVisitor class IdGeneratorMixIn: """ Mixin adding the ability to generate integer uid """ def __init__(self, start_value=0): self.id_count = start_value def init_counter(self, start_value=0): """init the id counter """ self.id_count = start_value def generate_id(self): """generate a new identifier """ self.id_count += 1 return self.id_count class Linker(IdGeneratorMixIn, LocalsVisitor): """ walk on the project tree and resolve relationships. According to options the following attributes may be added to visited nodes: * uid, a unique identifier for the node (on astng.Project, astng.Module, astng.Class and astng.locals_type). Only if the linker has been instantiated with tag=True parameter (False by default). * Function a mapping from locals names to their bounded value, which may be a constant like a string or an integer, or an astng node (on astng.Module, astng.Class and astng.Function). * instance_attrs_type as locals_type but for klass member attributes (only on astng.Class) * implements, list of implemented interface _objects_ (only on astng.Class nodes) """ def __init__(self, project, inherited_interfaces=0, tag=False): IdGeneratorMixIn.__init__(self) LocalsVisitor.__init__(self) # take inherited interface in consideration or not self.inherited_interfaces = inherited_interfaces # tag nodes or not self.tag = tag # visited project self.project = project def visit_project(self, node): """visit an astng.Project node * optionally tag the node with a unique id """ if self.tag: node.uid = self.generate_id() for module in node.modules: self.visit(module) def visit_package(self, node): """visit an astng.Package node * optionally tag the node with a unique id """ if self.tag: node.uid = self.generate_id() for subelmt in node.values(): self.visit(subelmt) def visit_module(self, node): """visit an astng.Module node * set the locals_type mapping * set the depends mapping * optionally tag the node with a unique id """ if hasattr(node, 'locals_type'): return node.locals_type = {} node.depends = [] if self.tag: node.uid = self.generate_id() def visit_class(self, node): """visit an astng.Class node * set the locals_type and instance_attrs_type mappings * set the implements list and build it * optionally tag the node with a unique id """ if hasattr(node, 'locals_type'): return node.locals_type = {} if self.tag: node.uid = self.generate_id() # resolve ancestors for baseobj in node.ancestors(recurs=False): specializations = getattr(baseobj, 'specializations', []) specializations.append(node) baseobj.specializations = specializations # resolve instance attributes node.instance_attrs_type = {} for assattrs in node.instance_attrs.values(): for assattr in assattrs: self.handle_assattr_type(assattr, node) # resolve implemented interface try: node.implements = list(node.interfaces(self.inherited_interfaces)) except InferenceError: node.implements = () def visit_function(self, node): """visit an astng.Function node * set the locals_type mapping * optionally tag the node with a unique id """ if hasattr(node, 'locals_type'): return node.locals_type = {} if self.tag: node.uid = self.generate_id() link_project = visit_project link_module = visit_module link_class = visit_class link_function = visit_function def visit_assname(self, node): """visit an astng.AssName node handle locals_type """ # avoid double parsing done by different Linkers.visit # running over the same project: if hasattr(node, '_handled'): return node._handled = True if node.name in node.frame(): frame = node.frame() else: # the name has been defined as 'global' in the frame and belongs # there. Btw the frame is not yet visited as the name is in the # root locals; the frame hence has no locals_type attribute frame = node.root() try: values = node.infered() try: already_infered = frame.locals_type[node.name] for valnode in values: if not valnode in already_infered: already_infered.append(valnode) except KeyError: frame.locals_type[node.name] = values except astng.InferenceError: pass def handle_assattr_type(self, node, parent): """handle an astng.AssAttr node handle instance_attrs_type """ try: values = list(node.infer()) try: already_infered = parent.instance_attrs_type[node.attrname] for valnode in values: if not valnode in already_infered: already_infered.append(valnode) except KeyError: parent.instance_attrs_type[node.attrname] = values except astng.InferenceError: pass def visit_import(self, node): """visit an astng.Import node resolve module dependencies """ context_file = node.root().file for name in node.names: relative = is_relative(name[0], context_file) self._imported_module(node, name[0], relative) def visit_from(self, node): """visit an astng.From node resolve module dependencies """ basename = node.modname context_file = node.root().file if context_file is not None: relative = is_relative(basename, context_file) else: relative = False for name in node.names: if name[0] == '*': continue # analyze dependencies fullname = '%s.%s' % (basename, name[0]) if fullname.find('.') > -1: try: # XXX: don't use get_module_part, missing package precedence fullname = get_module_part(fullname) except ImportError: continue if fullname != basename: self._imported_module(node, fullname, relative) def compute_module(self, context_name, mod_path): """return true if the module should be added to dependencies""" package_dir = dirname(self.project.path) if context_name == mod_path: return 0 elif is_standard_module(mod_path, (package_dir,)): return 1 return 0 # protected methods ######################################################## def _imported_module(self, node, mod_path, relative): """notify an imported module, used to analyze dependencies """ module = node.root() context_name = module.name if relative: mod_path = '%s.%s' % ('.'.join(context_name.split('.')[:-1]), mod_path) if self.compute_module(context_name, mod_path): # handle dependencies if not hasattr(module, 'depends'): module.depends = [] mod_paths = module.depends if not mod_path in mod_paths: mod_paths.append(mod_path)
bsd-3-clause
-4,001,310,612,407,806,500
34.50173
87
0.595419
false
Arno-Nymous/pyload
module/plugins/hoster/TenluaVn.py
7
2990
# -*- coding: utf-8 -*- import random import re from module.network.RequestFactory import getURL as get_url from ..internal.SimpleHoster import SimpleHoster from ..internal.misc import json def gen_r(): return "0." + "".join([random.choice("0123456789") for x in range(16)]) class TenluaVn(SimpleHoster): __name__ = "TenluaVn" __type__ = "hoster" __version__ = "0.03" __status__ = "testing" __pattern__ = r'https?://(?:www\.)?tenlua\.vn(?!/folder)/.+?/(?P<ID>[0-9a-f]+)/' __config__ = [("activated", "bool", "Activated", True), ("use_premium", "bool", "Use premium account if available", True), ("fallback", "bool", "Fallback to free download if premium fails", True), ("chk_filesize", "bool", "Check file size", True), ("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10)] __description__ = """Tenlua.vn hoster plugin""" __license__ = "GPLv3" __authors__ = [("GammaC0de", "nitzo2001[AT]yahoo[DOT]com")] API_URL = "https://api2.tenlua.vn/" @classmethod def api_response(cls, method, **kwargs): kwargs['a'] = method sid = kwargs.pop('sid', None) return json.loads(get_url(cls.API_URL, get={'sid': sid} if sid is not None else {}, post=json.dumps([kwargs]))) @classmethod def api_info(cls, url): file_id = re.match(cls.__pattern__, url).group('ID') file_info = cls.api_response("filemanager_builddownload_getinfo", n=file_id, r=gen_r())[0] if file_info['type'] == "none": return {'status': 1} else: return {'name': file_info['n'], 'size': file_info['real_size'], 'status': 2, 'tenlua': {'link': file_info['dlink'], 'password': bool(file_info['passwd'])}} def handle_free(self, pyfile): self.handle_download() def handle_premium(self, pyfile): sid = self.account.info['data']['sid'] self.handle_download(sid) def handle_download(self, sid=None): if self.info['tenlua']['password']: password = self.get_password() if password: file_id = self.info['pattern']['ID'] args = dict(n=file_id, p=password, r=gen_r()) if sid is not None: args['sid'] = sid password_status = self.api_response("filemanager_builddownload_checkpassword", **args) if password_status['status'] == "0": self.fail(_("Wrong password")) else: url = password_status['url'] else: self.fail(_("Download is password protected")) else: url = self.info['tenlua']['link'] if sid is None: self.wait(30) self.link = url
gpl-3.0
5,634,794,185,604,538,000
32.595506
102
0.515385
false
gugahoi/maraschino
modules/log.py
8
1033
from flask import render_template from maraschino import app, LOG_FILE from maraschino.noneditable import * from maraschino import logger from pastebin.pastebin import PastebinAPI from maraschino.tools import requires_auth import maraschino @app.route('/xhr/log') @requires_auth def xhr_log(): return render_template('dialogs/log_dialog.html', log=maraschino.LOG_LIST, ) @app.route('/xhr/log/pastebin') @requires_auth def xhr_log_pastebin(): file = open(LOG_FILE) log = [] log_str = '' for line in reversed(file.readlines()): log.append(line.rstrip()) log_str += line.rstrip() log_str += '\n' file.close() x = PastebinAPI() try: url = x.paste('feed610f82c2c948f430b43cc0048258', log_str) logger.log('LOG :: Log successfully uploaded to %s' % url, 'INFO') except Exception as e: logger.log('LOG :: Log failed to upload - %s' % e, 'INFO') return render_template('dialogs/log_dialog.html', log=log, url=url, )
mit
-478,173,583,844,203,500
24.825
74
0.647628
false
metal88888/F2E.im
model/favorite.py
9
2085
#!/usr/bin/env python # coding=utf-8 # # Copyright 2012 F2E.im # Do have a faith in what you're doing. # Make your life a story worth telling. import time from lib.query import Query class FavoriteModel(Query): def __init__(self, db): self.db = db self.table_name = "favorite" super(FavoriteModel, self).__init__() def add_new_favorite(self, favorite_info): return self.data(favorite_info).add() def get_favorite_by_topic_id_and_owner_user_id(self, topic_id, owner_user_id): where = "involved_topic_id = %s AND owner_user_id = %s" % (topic_id, owner_user_id) return self.where(where).find() def get_user_favorite_count(self, owner_user_id): where = "owner_user_id = %s" % owner_user_id return self.where(where).count() def get_user_all_favorites(self, uid, num = 16, current_page = 1): where = "favorite.owner_user_id = %s" % uid join = "LEFT JOIN topic ON favorite.involved_topic_id = topic.id \ LEFT JOIN user AS author_user ON topic.author_id = author_user.uid \ LEFT JOIN node ON topic.node_id = node.id \ LEFT JOIN user AS last_replied_user ON topic.last_replied_by = last_replied_user.uid" order = "favorite.id DESC" field = "topic.*, \ author_user.username as author_username, \ author_user.nickname as author_nickname, \ author_user.avatar as author_avatar, \ author_user.uid as author_uid, \ author_user.reputation as author_reputation, \ node.name as node_name, \ node.slug as node_slug, \ last_replied_user.username as last_replied_username, \ last_replied_user.nickname as last_replied_nickname" return self.where(where).order(order).join(join).field(field).pages(current_page = current_page, list_rows = num) def cancel_exist_favorite_by_id(self, favorite_id): where = "id = %s" % favorite_id return self.where(where).delete()
bsd-3-clause
-7,219,605,412,098,331,000
40.7
121
0.609592
false
rtucker/sycamore
Sycamore/support/pytz/zoneinfo/Asia/Beirut.py
9
6152
'''tzinfo timezone information for Asia/Beirut.''' from pytz.tzinfo import DstTzInfo from pytz.tzinfo import memorized_datetime as d from pytz.tzinfo import memorized_ttinfo as i class Beirut(DstTzInfo): '''Asia/Beirut timezone definition. See datetime.tzinfo for details''' zone = 'Asia/Beirut' _utc_transition_times = [ d(1,1,1,0,0,0), d(1920,3,27,22,0,0), d(1920,10,24,21,0,0), d(1921,4,2,22,0,0), d(1921,10,2,21,0,0), d(1922,3,25,22,0,0), d(1922,10,7,21,0,0), d(1923,4,21,22,0,0), d(1923,9,15,21,0,0), d(1957,4,30,22,0,0), d(1957,9,30,21,0,0), d(1958,4,30,22,0,0), d(1958,9,30,21,0,0), d(1959,4,30,22,0,0), d(1959,9,30,21,0,0), d(1960,4,30,22,0,0), d(1960,9,30,21,0,0), d(1961,4,30,22,0,0), d(1961,9,30,21,0,0), d(1972,6,21,22,0,0), d(1972,9,30,21,0,0), d(1973,4,30,22,0,0), d(1973,9,30,21,0,0), d(1974,4,30,22,0,0), d(1974,9,30,21,0,0), d(1975,4,30,22,0,0), d(1975,9,30,21,0,0), d(1976,4,30,22,0,0), d(1976,9,30,21,0,0), d(1977,4,30,22,0,0), d(1977,9,30,21,0,0), d(1978,4,29,22,0,0), d(1978,9,29,21,0,0), d(1984,4,30,22,0,0), d(1984,10,15,21,0,0), d(1985,4,30,22,0,0), d(1985,10,15,21,0,0), d(1986,4,30,22,0,0), d(1986,10,15,21,0,0), d(1987,4,30,22,0,0), d(1987,10,15,21,0,0), d(1988,5,31,22,0,0), d(1988,10,15,21,0,0), d(1989,5,9,22,0,0), d(1989,10,15,21,0,0), d(1990,4,30,22,0,0), d(1990,10,15,21,0,0), d(1991,4,30,22,0,0), d(1991,10,15,21,0,0), d(1992,4,30,22,0,0), d(1992,10,3,21,0,0), d(1993,3,27,22,0,0), d(1993,9,25,21,0,0), d(1994,3,26,22,0,0), d(1994,9,24,21,0,0), d(1995,3,25,22,0,0), d(1995,9,23,21,0,0), d(1996,3,30,22,0,0), d(1996,9,28,21,0,0), d(1997,3,29,22,0,0), d(1997,9,27,21,0,0), d(1998,3,28,22,0,0), d(1998,9,26,21,0,0), d(1999,3,27,22,0,0), d(1999,10,30,21,0,0), d(2000,3,25,22,0,0), d(2000,10,28,21,0,0), d(2001,3,24,22,0,0), d(2001,10,27,21,0,0), d(2002,3,30,22,0,0), d(2002,10,26,21,0,0), d(2003,3,29,22,0,0), d(2003,10,25,21,0,0), d(2004,3,27,22,0,0), d(2004,10,30,21,0,0), d(2005,3,26,22,0,0), d(2005,10,29,21,0,0), d(2006,3,25,22,0,0), d(2006,10,28,21,0,0), d(2007,3,24,22,0,0), d(2007,10,27,21,0,0), d(2008,3,29,22,0,0), d(2008,10,25,21,0,0), d(2009,3,28,22,0,0), d(2009,10,24,21,0,0), d(2010,3,27,22,0,0), d(2010,10,30,21,0,0), d(2011,3,26,22,0,0), d(2011,10,29,21,0,0), d(2012,3,24,22,0,0), d(2012,10,27,21,0,0), d(2013,3,30,22,0,0), d(2013,10,26,21,0,0), d(2014,3,29,22,0,0), d(2014,10,25,21,0,0), d(2015,3,28,22,0,0), d(2015,10,24,21,0,0), d(2016,3,26,22,0,0), d(2016,10,29,21,0,0), d(2017,3,25,22,0,0), d(2017,10,28,21,0,0), d(2018,3,24,22,0,0), d(2018,10,27,21,0,0), d(2019,3,30,22,0,0), d(2019,10,26,21,0,0), d(2020,3,28,22,0,0), d(2020,10,24,21,0,0), d(2021,3,27,22,0,0), d(2021,10,30,21,0,0), d(2022,3,26,22,0,0), d(2022,10,29,21,0,0), d(2023,3,25,22,0,0), d(2023,10,28,21,0,0), d(2024,3,30,22,0,0), d(2024,10,26,21,0,0), d(2025,3,29,22,0,0), d(2025,10,25,21,0,0), d(2026,3,28,22,0,0), d(2026,10,24,21,0,0), d(2027,3,27,22,0,0), d(2027,10,30,21,0,0), d(2028,3,25,22,0,0), d(2028,10,28,21,0,0), d(2029,3,24,22,0,0), d(2029,10,27,21,0,0), d(2030,3,30,22,0,0), d(2030,10,26,21,0,0), d(2031,3,29,22,0,0), d(2031,10,25,21,0,0), d(2032,3,27,22,0,0), d(2032,10,30,21,0,0), d(2033,3,26,22,0,0), d(2033,10,29,21,0,0), d(2034,3,25,22,0,0), d(2034,10,28,21,0,0), d(2035,3,24,22,0,0), d(2035,10,27,21,0,0), d(2036,3,29,22,0,0), d(2036,10,25,21,0,0), d(2037,3,28,22,0,0), d(2037,10,24,21,0,0), ] _transition_info = [ i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), i(10800,3600,'EEST'), i(7200,0,'EET'), ] Beirut = Beirut()
gpl-2.0
6,341,336,817,846,684,000
19.506667
74
0.590052
false
AZCompTox/AZOrange
orange/OrangeWidgets/Classify/OWParamOpt.py
2
42608
""" <name>Parameter Optimizer</name> <description>Automatic optimization of learners parameters</description> <icon>icons/Opt.png</icon> <contact>Pedro Rafael Almeida</contact> <priority>7</priority> """ import string from AZutilities import dataUtilities from OWWidget import * import OWGUI import orange import AZOrangeConfig as AZOC import os, types from AZutilities import paramOptUtilities from AZutilities import miscUtilities import time import AZLearnersParamsConfig #from qttable import * #from qt import QColor #from qt import Qt from copy import deepcopy #import qt version = 9 class OWParamOpt(OWWidget): def __init__(self, parent=None, signalManager = None, name='ParamOptimizer'): OWWidget.__init__(self, parent, signalManager, name, 1) # Define the input and output channels self.inputs = [("Classified Examples", ExampleTable, self.setData), ("Learner", orange.Learner, self.setLearner)] self.outputs = [("Learner - Tuned", orange.Learner), ("Examples - Optimization Steps", ExampleTable)] self.requiredParamVer = 12 self.name = name self.dataset = None self.learner = None self.optimizer = paramOptUtilities.Appspack() self.verbose = 0 self.tunedPars = None self.intRes = None self.paramsNames=["Name","Optimize","Lower Limit","Upper Limit","Distribution","Step","Default","Actual Learner parameter"] self.learnerType = None self.parameters = None self.nParameters = 0 self.nFolds = 5 self.SMethod = 1 self.execEnv = 0 # Serial self.CMethod = 0 self.RMethod = 0 self.GUIparams = {} self.OptimizeChBox = {} self.DistCombo = {} self.DefaultCombo = {} self.UseGridSearch = False self.nInnerPoints = 5 #Define Evaluation Methods: ["LabelName", "EvaluateFunction", "True=Best is the Min | False=Best is the Max"] self.CMethods = [("CA", "AZutilities.evalUtilities.CA",False)] self.RMethods = [("RMSE", "AZutilities.evalUtilities.RMSE", True)] # ("R^2", "AZutilities.evalUtilities.Rsqrt", False)] self.SMethods = [("Leave-One-Out", 0), ("Cross Validation", 1)] self.execEnvs = AZOC.OWParamOptExecEnvs self.defineGUI() def defineGUI(self): self.sBox = OWGUI.widgetBox(self.controlArea, "Execution environment") itms = [e[0] for e in self.execEnvs] OWGUI.radioButtonsInBox(self.sBox, self, "execEnv", btnLabels=itms) boxGrid = OWGUI.widgetBox(self.controlArea,'Initial Point for the Optimizer') OWGUI.checkBox(boxGrid, self, 'UseGridSearch','Use Grid-Search', tooltip='Use Grid-Search to find the best initial point to start the optimization.<br>If not checked, the midrange point will be used as optimization initial point.') OWGUI.spin(boxGrid, self, 'nInnerPoints', 1, 100, step=1, label=' Number of inner points:', tooltip='Number of points to break down each variable to evaluate the initial point.<br>It will evaluate nInnerPoints^nOptimizedVars') OWGUI.separator(self.controlArea) self.sBox = OWGUI.widgetBox(self.controlArea, "Sampling Method") itms = [e[0] for e in self.SMethods] self.comboDistItems = ["Continuous","Power2","By Step","Specific Values"] OWGUI.radioButtonsInBox(self.sBox, self, "SMethod", btnLabels=itms) hBox = OWGUI.widgetBox(OWGUI.indentedBox(self.sBox)) #QWidget(hBox).setFixedSize(19, 8) OWGUI.spin(hBox, self, 'nFolds', 2, 100, step=1, label='Number of Folds: ') OWGUI.separator(self.controlArea) box2 = OWGUI.widgetBox(self.controlArea,'Evaluation Method') width = 150 itms = [e[0] for e in self.CMethods] OWGUI.comboBox(box2, self, 'CMethod', items=itms, label='For Classifiers:', labelWidth=width, orientation='horizontal', tooltip='Method used for evaluation in case of classifiers.') itms = [e[0] for e in self.RMethods] OWGUI.comboBox(box2, self, 'RMethod', items=itms, label='For Regressors:', labelWidth=width, orientation='horizontal', tooltip='Method used for evaluation in case of regressors.') OWGUI.separator(self.controlArea) OWGUI.button(self.controlArea, self,"&Reload Defaults ", callback=self.reloadDefaults) OWGUI.separator(self.controlArea) #OWGUI.separator(self.controlArea, height=24) infoBox = OWGUI.widgetBox(self.controlArea, "Optimizer status") self.infoStatus = OWGUI.label(infoBox,self,'Waiting for inputs...') self.infoPars = OWGUI.label(infoBox,self,'') self.infoRes = OWGUI.label(infoBox,self,'') OWGUI.label(infoBox,self,'') self.infoErr = OWGUI.label(infoBox,self,'') OWGUI.separator(self.controlArea) OWGUI.button(self.controlArea, self,"&Apply Settings ", callback=self.optimizeParameters) # Main area GUI import sip sip.delete(self.mainArea.layout()) self.mainLayout = QGridLayout(self.mainArea) mainRight = OWGUI.widgetBox(self.mainArea, "Parameters Configuration") mainRight.setSizePolicy(QSizePolicy(QSizePolicy.MinimumExpanding, QSizePolicy.MinimumExpanding)) self.paramsTable = OWGUI.table(self.mainArea, rows = 0, columns = 0, selectionMode = QTableWidget.MultiSelection, addToLayout = 0) #self.paramsTable.setLeftMargin(0) self.paramsTable.verticalHeader().hide() self.paramsTable.setSelectionMode(QTableWidget.NoSelection) self.paramsTable.setColumnCount(len(self.paramsNames)) #for i, m in enumerate(self.paramsNames): # self.paramsTable.setColumnStretchable(i, 0) # header.setLabel(i, m) self.paramsTable.setHorizontalHeaderLabels(self.paramsNames) #self.mainLayout.setColumnStretch(1, 100) #self.mainLayout.setRowStretch(2, 100) self.mainLayout.addWidget(self.paramsTable, 0, 0, 1,2) self.mainLayout.addWidget(OWGUI.label(mainRight,self,'Red - Parameter selected to be optimized\r\nGreen - Parameter optimized\r\nBlack - Parameter will not be optimized'),1,0) self.mainLayout.addWidget(OWGUI.label(mainRight,self,'N_EX - Number of Examples in dataset\r\nN_ATTR - Number of attributes in dataset'),1,1) self.adjustSize() self.create() def reloadDefaults(self): if self.learner: self.originalParameters = eval("AZLearnersParamsConfig." + self.learnerType) self.parameters = deepcopy(self.originalParameters) self.updateTable() def clearTable(self): self.learnerType = None self.parameters = None for row in range(self.paramsTable.rowCount()): self.paramsTable.removeRow(row) self.paramsTable.setRowCount(0) def getRangeParsAndDistType(self, parametersOrig ,param): """Get from the parametersOrig the Distribution type and the Range for the parameter named as the input var 'param' """ N_EX = 5 #Just a number to test the avaluation of the expressions on atributes definition N_ATTR = 5 #Just a number to test the avaluation of the expressions on atributes definition if parametersOrig[param][1] == "interval": distType = 0 elif parametersOrig[param][1] == "values": if "power2Range" in parametersOrig[param][2]: distType = 1 elif "Range" in parametersOrig[param][2]: distType = 2 else: distType = 3 else: self.setErrors("Invalid keyword in Configuration File") return [None,None] if distType in (1,2): txt = parametersOrig[param][2] RangePars = txt[txt.find("Range")+6:txt.find(")",txt.find("Range"))].split(",") if len(RangePars)<3: RangePars.append("1") #the default step size try: if distType == 1 and miscUtilities.power2Range(eval(RangePars[0]),eval(RangePars[1]),eval(RangePars[2])) != eval(parametersOrig[param][2]): return [[],3] elif distType == 2 and miscUtilities.Range(eval(RangePars[0]),eval(RangePars[1]),eval(RangePars[2])) != eval(parametersOrig[param][2]): return [[],3] except: return [[],3] elif distType == 0: txt = parametersOrig[param][2] RangePars = [txt[1:-1].split(",")[0],txt[1:-1].split(",")[-1]] else:#distType==3 txt = parametersOrig[param][2] RangePars = [x.strip() for x in txt[1:-1].split(" , ")] if eval(parametersOrig[param][0]) in types.StringTypes or \ (type(eval(parametersOrig[param][0]))==types.ListType and (type(eval(parametersOrig[param][0])[0]) in types.StringTypes)): newPars = [] for par in RangePars: if par[0]==par[-1] and par[0] in ("'",'"'): newPars.append(par[1:-1]) else: newPars.append(par) RangePars = newPars RangePars=[x.strip() for x in RangePars] return [RangePars,distType] def setCellText(self,table,row,col,text): table.removeCellWidget( row, col) it = QTableWidgetItem() it.setFlags(Qt.ItemIsEnabled | (Qt.ItemIsSelectable or Qt.NoItemFlags)) it.setTextAlignment(Qt.AlignRight) it.setText(text) table.setItem(row, col, it) return it def setCellComboBox(self,table,row,col,items): table.removeCellWidget( row, col) #it = OWGUI.comboBox(None, self, None, items = items, tooltip = "") combo = QComboBox() combo.addItems([unicode(i) for i in items]) table.setCellWidget(row,col,combo) return combo def updateTable(self): """Updates the GUI table parameters with the ones specified on self.parameters variable Also updates the optimized parameters column if the self.optimizer is optimized with success """ if not self.parameters: return #self.parameters["NAME"]: [ParameterType, valuesRangeType,valuesRange,valuesAlias,Default,Optimize, EditableRange] #self.paramsNames: ["Name","Optimize","Lower Limit","Upper Limit","Distribution","Step","Default"] #self.comboDistItems: ["Continuous","Power2","By Step","Specific Values"] self.nParameters = len(self.parameters) self.paramsTable.setRowCount(len(self.parameters)) self.OptimizeChBox = {} self.DistCombo = {} self.DefaultCombo = {} #QcomboDistItems = QStringList() #[QcomboDistItems.append(str(x)) for x in self.comboDistItems] for row, param in enumerate(self.parameters): origRangePars,origDistType = self.getRangeParsAndDistType(self.originalParameters,param) RangePars,distType = self.getRangeParsAndDistType(self.parameters,param) if RangePars==None or distType==None: self.setErrors("Parameter "+str(param)+" has undefined distribution!","WARNING") return for col, m in enumerate(self.paramsNames): if col==0:#Name self.setCellText(self.paramsTable, row, col, str(param)) elif col==1:#Optimize Flag self.paramsTable.removeCellWidget( row, col) self.OptimizeChBox[str(param)] = QCheckBox() self.OptimizeChBox[str(param)].setTristate(False) self.OptimizeChBox[str(param)].setCheckState(int(self.parameters[param][5])*2) #0-Unchecked 1-Partially checked 2-Checked self.paramsTable.setCellWidget(row,col,self.OptimizeChBox[str(param)]) if not self.parameters[param][5]: self.nParameters -= 1 elif col==2:#Lower if distType != 3: self.setCellText(self.paramsTable, row, col, str(RangePars[0])) else: self.setCellText(self.paramsTable, row, col, "") elif col==3:#Upper if distType != 3: self.setCellText(self.paramsTable, row, col, str(RangePars[1])) else: self.setCellText(self.paramsTable, row, col, "") elif col==4:#Distribution Type self.paramsTable.removeCellWidget( row, col) if "Combo" in str(type(self.paramsTable.cellWidget(row,6))) and not self.parameters[param][6]: self.setCellText(self.paramsTable, row, col, self.comboDistItems[-1]) else: if distType == len(self.comboDistItems)-1: self.DistCombo[str(param)] = self.setCellComboBox(self.paramsTable,row,col,[str(x) for x in self.comboDistItems]) else: self.DistCombo[str(param)] = self.setCellComboBox(self.paramsTable,row,col,[str(x) for x in self.comboDistItems][:-1]) self.DistCombo[str(param)].setCurrentIndex(distType) #if distType==3: # if self.paramsTable.item(row,7) != None and hasattr(self.paramsTable.item(row,7),"setToolTip"): # print "Seted" # self.paramsTable.item(row,7).setToolTip("is This OK?") # self.paramsTable.updateCell(row, col) #QToolTip.add(self.paramsTable,self.paramsTable.cellGeometry(row+1, col-2),str(self.originalParameters[param][2])) elif col==5: #Step if distType in (0,3): cellItem = "" elif distType in (1,2): cellItem = str(RangePars[2]) else: cellItem = str(self.parameters[param][2]) self.setCellText(self.paramsTable, row, col, cellItem) if distType==3 and self.originalParameters[param][7]: if self.paramsTable.item(row,col) != None and hasattr(self.paramsTable.item(row,col),"setToolTip"): toolTipTXT = self.originalParameters[param][7] self.paramsTable.item(row,col).setToolTip("Specific Values from Config File:\n"+toolTipTXT) #self.paramsTable.updateCell(row, col) self.paramsTable.update() elif col ==6:#Default #QComboDef = QStringList() self.paramsTable.removeCellWidget( row, col) if distType == 3 and len(origRangePars) > 0 and len(self.parameters[param][3])==len(origRangePars) and not self.parameters[param][6]: if self.parameters[param][4].strip() in origRangePars: #[QComboDef.append(str(x)) for x in self.parameters[param][3]] self.DefaultCombo[str(param)] = self.setCellComboBox(self.paramsTable,row,col,[str(x) for x in self.parameters[param][3]]) self.DefaultCombo[str(param)].setCurrentIndex(origRangePars.index(self.parameters[param][4].strip())) else:#The alias are present, but could not find a match if self.verbose > 0: self.setErrors("No match found for the Default parameter of " + str(param) + " to resolve the alias","WARNING") self.DefaultCombo[str(param)] = self.setCellComboBox(self.paramsTable,row,col,[str(x) for x in self.parameters[param][3]]+[str(self.parameters[param][4].strip())]) self.DefaultCombo[str(param)].setCurrentIndex(self.DefaultCombo[str(param)].count-1) #self.setCellText(self.paramsTable, row, col, str(self.parameters[param][4]).strip()) elif len(origRangePars)>1 and distType == 3 and not self.parameters[param][6]: if self.parameters[param][4].strip() in origRangePars: self.DefaultCombo[str(param)] = self.setCellComboBox(self.paramsTable,row,col,[str(x) for x in origRangePars]) self.DefaultCombo[str(param)].setCurrentIndex(origRangePars.index(self.parameters[param][4].strip())) else: if self.verbose > 0: self.setErrors("The default parameter for " + str(param) + " is not listed in the values list","WARNING") self.DefaultCombo[str(param)] = self.setCellComboBox(self.paramsTable,row,col,[str(x) for x in origRangePars]+[str(self.parameters[param][4].strip())]) self.DefaultCombo[str(param)].setCurrentItem(self.DefaultCombo[str(param)].count-1) else: self.setCellText(self.paramsTable, row, col, str(self.parameters[param][4]).strip()) elif col==7: #Optimization self.paramsTable.removeCellWidget( row, col) if self.learner: if hasattr(self.learner,str(param)): aParam = str(eval("self.learner."+str(param))) # Check if an alias for this value exists if aParam in origRangePars: aPidx = origRangePars.index(aParam) if len(self.parameters[param][3]) > aPidx: aParam = self.parameters[param][3][aPidx] #cellItem = ColorTableItem(self.paramsTable,aParam ,QTableWidgetItem.Never) cellItem = self.setCellText(self.paramsTable, row, col, aParam) if hasattr(self.learner,"optimized") and self.learner.optimized and self.parameters[param][5]: cellItem.setForeground(QBrush(QColor("Green"))) elif not self.parameters[param][5]: cellItem.setForeground(QBrush(QColor("black"))) else: cellItem.setForeground(QBrush(QColor("red"))) else: self.setCellText(self.paramsTable, row, col, "N/A") #self.paramsTable.updateCell(row, col) self.paramsTable.update() else: pass #for i in range(len(self.paramsNames)): # self.paramsTable.adjustColumn(i) self.paramsTable.resizeColumnsToContents() self.paramsTable.setColumnWidth(5,30) def updateParametersFromTable(self): """Updates the parameters of the optimizer with the ones present on GUI table Returns True if all OK Returns False if Errors occurred """ #self.paramsNames: ["Name","Optimize","Lower Limit","Upper Limit","Distribution","Step","Default"] #self.comboDistItems: ["Continuous","Power2","By Step","Specific Values"] if self.paramsTable.columnCount() < 7: self.setErrors("Wrong number of columns in table!") return False RangePars,distType = [None,None] self.nParameters = len(self.parameters) for row in range(self.paramsTable.rowCount()): for col in range(7): if col == 0:#Name name=str(self.paramsTable.item(row,col).text()).strip() RangePars,distType = self.getRangeParsAndDistType(self.originalParameters,name) if RangePars==None or distType==None: self.setErrors("It was not possible to identify the range parameters") return False elif col == 1:#Optimize if self.paramsTable.cellWidget(row,col).checkState()==2: optimize=True else: optimize = False self.nParameters -= 1 elif col == 2:#Lower Limit Llimit = str(self.paramsTable.item(row,col).text()).strip() elif col == 3:#Upper Limit Ulimit = str(self.paramsTable.item(row,col).text()).strip() elif col == 4:#Distribution if "Combo" in str(type(self.paramsTable.cellWidget(row,col))): dist = str(self.paramsTable.cellWidget(row,col).currentIndex()) else: dist = self.comboDistItems.index(self.paramsTable.item(row,col).text()) elif col == 5:#Step step = str(self.paramsTable.item(row,col).text()).strip() elif col == 6:#Default if "Combo" in str(type(self.paramsTable.cellWidget(row,col))): default = self.paramsTable.cellWidget(row,col).currentIndex() if len(self.parameters[name][3]) <= default: #the parameter did not have an alias, the text is the value default = str(self.paramsTable.cellWidget(row,col).currentText()).strip() else: default = RangePars[default] else:#is string default = str(self.paramsTable.item(row,col).text()).strip() #['types.StringType', 'values', "['kernel' , 'pls1' , 'simpls']", ['Kernel', 'PLS1', 'SimPLS'], "'simpls'", True, False] if optimize: self.parameters[name][5] = True if "Combo" in str(type(self.paramsTable.cellWidget(row,4))): comboDistType = self.paramsTable.cellWidget(row,4).currentIndex() else: comboDistType = self.comboDistItems.index(self.paramsTable.item(row,4).text()) if comboDistType==0: #Continuous if not miscUtilities.isNumber(Llimit): if "N_EX" not in Llimit and "N_ATTR" not in Llimit: QMessageBox.warning(self,"Invalid parameter","Parameter "+name+" has invalid Lower limit",QMessageBox.Ok) return False if not miscUtilities.isNumber(Ulimit): if "N_EX" not in Ulimit and "N_ATTR" not in Ulimit: QMessageBox.warning(self,"Invalid parameter","Parameter "+name+" has invalid Upper limit",QMessageBox.Ok) return False self.parameters[name][1] = "interval" self.parameters[name][2] = "[" + Llimit + " , " + Ulimit + "]" self.parameters[name][3] = "" self.parameters[name][4] = default elif comboDistType==1: #Power2 if not miscUtilities.isNumber(Llimit): if "N_EX" not in Llimit and "N_ATTR" not in Llimit: QMessageBox.warning(self,"Invalid parameter","Parameter "+name+" has invalid Lower limit",QMessageBox.Ok) return False if not miscUtilities.isNumber(Ulimit): if "N_EX" not in Ulimit and "N_ATTR" not in Ulimit: QMessageBox.warning(self,"Invalid parameter","Parameter "+name+" has invalid Upper limit",QMessageBox.Ok) return False if not miscUtilities.isNumber(step): QMessageBox.warning(self,"Invalid parameter","Parameter "+name+" has an invalid step value",QMessageBox.Ok) return False self.parameters[name][1] = "values" self.parameters[name][2] = "miscUtilities.power2Range(" + Llimit + "," + Ulimit + "," + step + ")" self.parameters[name][3] = "" self.parameters[name][4] = default elif comboDistType==2: #By Step if not miscUtilities.isNumber(Llimit): if "N_EX" not in Llimit and "N_ATTR" not in Llimit: QMessageBox.warning(self,"Invalid parameter","Parameter "+name+" has invalid Lower limit",QMessageBox.Ok) return False if not miscUtilities.isNumber(Ulimit): if "N_EX" not in Ulimit and "N_ATTR" not in Ulimit: QMessageBox.warning(self,"Invalid parameter","Parameter "+name+" has invalid Upper limit",QMessageBox.Ok) return False if not miscUtilities.isNumber(step): QMessageBox.warning(self,"Invalid parameter","Parameter "+name+" has an invalid step value",QMessageBox.Ok) return False self.parameters[name][1] = "values" self.parameters[name][2] = "miscUtilities.Range(" + Llimit + "," + Ulimit + "," + step + ")" self.parameters[name][3] = "" self.parameters[name][4] = default else: #Specific Values #The 'Specific Values' refere to the parameters specified in the original AZLearnersParamsConfig.py file self.parameters[name][1] = self.originalParameters[name][1] self.parameters[name][2] = self.originalParameters[name][2] self.parameters[name][3] = self.originalParameters[name][3] self.parameters[name][4] = default else: self.parameters[name][5] = False #self.parameters[name][1]='values' if "Combo" in str(type(self.paramsTable.cellWidget(row,6))): default = self.paramsTable.cellWidget(row,6).currentIndex() if len(RangePars) <= default: #the parameter did not have an alias, the text is the value #self.parameters[name][2]='[' + str(self.paramsTable.cellWidget(row,6).currentText()).strip() + ']' self.parameters[name][4]=str(self.paramsTable.cellWidget(row,6).currentText()).strip() else: #self.parameters[name][2]='[' + RangePars[default] + ']' self.parameters[name][4]= RangePars[default] else:#is string #self.parameters[name][2]='[' + str(self.paramsTable.item(row,6).text()).strip() + ']' self.parameters[name][4]=str(self.paramsTable.item(row,6).text()).strip() return True def showEvent(self, ev): self.updateInfo() def onDeleteWidget(self): #self.linksOut.clear() if self.dataset: del self.dataset if self.intRes: del self.intRes def updateInfo(self): self.infoStatus.setText('') self.infoPars.setText('') self.infoRes.setText('') if not self.learner and not self.dataset: self.infoStatus.setText('Waiting for inputs...') self.updateTable() self.adjustSize() return elif not self.learner: self.infoStatus.setText('Waiting for learner...') self.updateTable() self.adjustSize() return elif not self.dataset: self.infoStatus.setText('Waiting for data...') self.updateTable() self.adjustSize() return if type(self.tunedPars) != types.ListType or (not hasattr(self.learner,"optimized")): self.setErrors("Some error occurred!\nTuned parameters: \r\n "+str(self.tunedPars), "WARNING") self.infoPars.setText("Check the returned Parametrs\nin the output window") if self.verbose > 0: self.setErrors("Tuned parameters:" + str(self.tunedPars),"WARNING") else: if self.learner.optimized == True: if len(self.intRes) <= 2: self.setErrors("It was detected that only the default and initial points were evaluated.\nIt seems that there was no optimization!\nPlease check for any other printed errors/warnings.","WARNING") else: if self.verbose > 0: print "Learner Optimized!" self.infoStatus.setText('== Learner Optimized! ==') else: if self.verbose > 0: self.setErrors("optimization Flag = "+str(self.learner.optimized),"WARNING") self.setErrors('Optimization seems to be done, but repective flag is not checked ('+str(self.learner.optimized)+')',"WARNING") LPars = "" self.infoPars.setText("") if self.verbose > 0: for parName in self.tunedPars[1]: if hasattr(self.learner,parName): LPars += parName + " = " if self.parameters != None and self.parameters[str(parName)][1]=="values" and len(self.parameters[str(parName)])>=3 and self.parameters[str(parName)][3]!="": try: LPars += str(self.parameters[str(parName)][3][ eval(self.parameters[str(parName)][2]).index(eval("self.learner."+parName)) ]) except: LPars += str(eval("self.learner."+parName)) +" (Could not find parameter value alias)" print "Missing alias for: ",eval("self.learner."+parName) else: LPars += str(eval("self.learner."+parName)) LPars += " ("+str(self.tunedPars[1][parName])+")\r\n\t" else: LPars += parName+ " = NoParameter" + " ("+str(self.tunedPars[1][parName])+")\r\n\t" self.infoPars.setText("Check the returned Parametrs\nin the output window") print "Learner parameters (tuned):\r\n\t" + LPars self.infoRes.setText("Best optimization result = " + str(self.tunedPars[0])) self.adjustSize() self.updateTable() def setData(self, dataset): if dataset: self.dataset = dataset else: self.dataset = None self.optimizeParameters() def setLearner(self, learner): self.infoStatus.setText('') self.infoPars.setText('') self.infoRes.setText('') self.clearTable() if learner: self.learner = learner else: self.learner = None return # check if learner is defined in AZLearnersParamsConfig self.learnerType = str(self.learner).split()[0] if self.learnerType[0] == "<": self.learnerType = self.learnerType[1:] if self.learnerType.rfind('.') >=0: self.learnerType = self.learnerType[self.learnerType.rfind('.')+1:] if AZLearnersParamsConfig.version != self.requiredParamVer: self.setErrors("The version of AZLearnersParamsConfig.py is not the correct for this widget. Please use the version "+str(self.requiredParamVer)) self.learnerType = None self.parameters = None return if not hasattr(AZLearnersParamsConfig, self.learnerType): self.setErrors("The learner " + str(self.learnerType) +" is not compatible with the Optimizer!") self.learnerType = None self.parameters = None return else: self.originalParameters = eval("AZLearnersParamsConfig." + self.learnerType) self.parameters = deepcopy(self.originalParameters) for parameter in self.parameters: if len(self.parameters[parameter]) != 8: self.setErrors("The version of AZLearnersParamsConfig.py is not the correct for this widget") self.parameters = None return self.updateTable() self.optimizeParameters() def clearErrors(self): self.warning() self.infoErr.setText('') def setErrors(self, msg, errType = "ERROR"): self.warning(0,msg) self.infoErr.setText("=== "+errType+" ===\n Please Check the output window\nfor more details.") print errType," (",time.asctime(),"):" print " ",msg def optimizeParameters(self): """ Sets up the input learner with tuned parameters """ self.clearErrors() self.tunedPars = None if hasattr(self.learner,"optimized"): self.learner.optimized = False if not self.learner: self.send("Learner - Tuned", None) self.send("Examples - Optimization Steps", None) self.updateInfo() return # Apply the parameters var with values on configuration table of GUI (user could have changed them!) if not self.updateParametersFromTable(): return if not self.dataset: self.dataset = None self.send("Learner - Tuned", None) self.send("Examples - Optimization Steps", None) self.updateInfo() return # Progess Bar 1 optSteps = 3 progress1 = QProgressDialog("Gathering data and configuring the optimizer...", "Cancel", 0, optSteps, self,Qt.Dialog)#, "progress", True ) progress1.setWindowModality(Qt.WindowModal) bar1 = QProgressBar(progress1) bar1.show() progress1.setBar(bar1) #progress1.setTotalSteps(optSteps) progress1.setMinimumDuration(0) progress1.forceShow() progress1.setValue(0) time.sleep(0.1) progress1.setValue(0) # Create path for running the optimizer randNr = random.randint(0,10000) if self.execEnv == 0: scratchdir = miscUtilities.createScratchDir(desc = "OWParamOpt_Serial") else: scratchdir = miscUtilities.createScratchDir(desc ="OWParamOpt_MPI", baseDir = AZOC.NFS_SCRATCHDIR) # Save the dataset to the optimizer running path OrngFile = os.path.join(scratchdir,"OrngData.tab") orange.saveTabDelimited(OrngFile,self.dataset) # Advance Progress Bar progress1.setValue(1) # Define the evaluation method to use if self.dataset.domain.classVar.varType == orange.VarTypes.Continuous: fMin = self.RMethods[self.RMethod][2] evalM = self.RMethods[self.RMethod][1] else: fMin = self.CMethods[self.CMethod][2] evalM= self.CMethods[self.CMethod][1] try: if os.path.exists(os.path.join(scratchdir,"AZLearnersParamsConfig.py")): os.system("rm "+str(os.path.join(scratchdir,"AZLearnersParamsConfig.py"))) paramFile=file(os.path.join(scratchdir,"AZLearnersParamsConfig.py"),"w") paramFile.write(self.learnerType + "= " + str(self.parameters)+"\r\n") paramFile.close() progress1.setValue(2) # Run the optimizer which will configure the input learner and aditionaly return [<minimum of objective function found>, <optimized parameters>] # Serial print "ENV:",self.execEnv if self.execEnv == 0: print "Executing the optimizer in serial mode on local machine" optPID = self.optimizer(learner=self.learner, dataSet=OrngFile, evaluateMethod = evalM , findMin=fMin, nFolds = self.nFolds, samplingMethod = self.SMethods[self.SMethod][1], runPath = scratchdir, verbose = self.verbose, externalControl = 1,useParameters = self.parameters, useGridSearchFirst = self.UseGridSearch, gridSearchInnerPoints=self.nInnerPoints, np = None, machinefile = None, advancedMPIoptions = "",) # Local mpi elif self.execEnv == 1: print "Executing the optimizer in parallel mode on local machine" optPID = self.optimizer(learner=self.learner, dataSet=OrngFile, evaluateMethod = evalM , findMin=fMin, nFolds = self.nFolds, samplingMethod = self.SMethods[self.SMethod][1], runPath = scratchdir, verbose = self.verbose, externalControl = 1,useParameters = self.parameters, useGridSearchFirst = self.UseGridSearch, gridSearchInnerPoints=self.nInnerPoints, machinefile = 0) # Sge Molndal elif self.execEnv == 2: print "Executing the optimizer in parallel mode in the batch queue on the sge" print "*****************runPath*****************" optPID = self.optimizer(learner=self.learner, dataSet=OrngFile, evaluateMethod = evalM , findMin=fMin, nFolds = self.nFolds, samplingMethod = self.SMethods[self.SMethod][1], runPath = scratchdir, verbose = self.verbose, externalControl = 1,useParameters = self.parameters, useGridSearchFirst = self.UseGridSearch, gridSearchInnerPoints=self.nInnerPoints, np = 8,machinefile = "qsub")#, sgeEnv = "sge_seml") elif self.execEnv == 3: print "Executing the optimizer in parallel mode in the quick queue on the sge" print "*****************runPath*****************" optPID = self.optimizer(learner=self.learner, dataSet=OrngFile, evaluateMethod = evalM , findMin=fMin, nFolds = self.nFolds, samplingMethod = self.SMethods[self.SMethod][1], runPath = scratchdir, verbose = self.verbose, externalControl = 1,useParameters = self.parameters, useGridSearchFirst = self.UseGridSearch, gridSearchInnerPoints=self.nInnerPoints, np = 8,machinefile = "qsub",queueType = "quick.q")#, sgeEnv = "sge_seml") else: print "No SGE Env. selected. Nothing will happen." except: progress1.close() self.updateInfo() self.setErrors("Some error(s) occurred during the optimization.\nCheck the "+str(scratchdir)+" and the output terminal for more information") self.send("Learner - Tuned", None) self.send("Examples - Optimization Steps", None) return progress1.setValue(3) if type(optPID)!=types.IntType: progress1.close() self.updateInfo() self.setErrors("Some error(s) occurred during optimization:\n"+str(optPID)) self.send("Learner - Tuned", None) self.send("Examples - Optimization Steps", None) return progress1.close() # Progess Bar optSteps = (1+round((len(self.dataset)*len(self.dataset.domain.attributes)*self.nParameters)/1000))*8 print "Learner optimization started at "+time.asctime() print "Optimization steps = ",int(optSteps)," (estimated to aprox. ",optSteps/2," seconds)" progress = QProgressDialog("Learner optimization started at "+time.asctime()+" ,please wait...", "Abort Optimization", 0,optSteps ,self,Qt.Dialog)#, "progress", True ) progress.setWindowModality(Qt.WindowModal) bar = QProgressBar(progress) bar.show() progress.setBar(bar) #progress.setTotalSteps(optSteps) progress.setMinimumDuration(0) stepsDone = 0 progress.setValue(stepsDone) progress.forceShow() #Loop waiting for the optimizer to finish while 1: if stepsDone < (progress.maximum()-1): progress.setValue(stepsDone) stepsDone+=1 time.sleep(0.5) else: bar.setTextVisible(False) progress.setLabelText("The optimizer is taking longer than expected, please wait some more time...") stepsDone = 0 progress.setValue(stepsDone) time.sleep(0.5) if progress.wasCanceled(): if not self.optimizer.stop(): progress.setLabelText("Could not stop the optimizer! Please wait until it finish...") else: self.setErrors("Learner optimization stopped by user at "+time.asctime(),"WARNING") break if self.optimizer.isFinished(): print "Learner optimization finished at "+time.asctime() break progress.setValue(progress.maximum()-1) time.sleep(0.5) progress.setValue(progress.maximum()) self.tunedPars = self.optimizer.tunedParameters if self.verbose > 0: if self.optimizer.usedMPI: print "appspack version used in fact: MPI" else: print "appspack version used in fact: SERIAL" if type(self.tunedPars) != types.ListType or self.learner.optimized == False: self.send("Learner - Tuned", None) self.send("Examples - Optimization Steps", None) else: self.send("Learner - Tuned", self.learner) self.intRes = dataUtilities.DataTable(scratchdir+"/optimizationLog.txt") self.send("Examples - Optimization Steps", self.intRes) self.updateInfo() if self.verbose == 0: miscUtilities.removeDir(scratchdir) else: self.setErrors("The directory " + str(scratchdir) + " was not deleted because verbose flag is ON","DEBUG") class ProgressBar: def __init__(self, widget, iterations): self.iter = iterations self.widget = widget self.count = 0 self.widget.progressBarInit() def advance(self): self.count += 1 self.widget.progressBarSet(int(self.count*100/self.iter)) def finish(self): self.widget.progressBarFinished() if __name__ == "__main__": appl = QApplication(sys.argv) ow = OWParamOpt() appl.setMainWidget(ow) ow.show() dataset = dataUtilities.DataTable('iris.tab') ow.data(dataset) appl.exec_loop()
lgpl-3.0
-7,613,772,914,191,800,000
52.127182
445
0.577615
false
ychen820/microblog
y/google-cloud-sdk/lib/googlecloudsdk/compute/subcommands/backend_services/add_backend.py
4
3198
# Copyright 2014 Google Inc. All Rights Reserved. """Command for adding a backend to a backend service.""" import copy from googlecloudsdk.calliope import exceptions from googlecloudsdk.compute.lib import backend_services_utils from googlecloudsdk.compute.lib import base_classes class AddBackend(base_classes.ReadWriteCommand): """Add a backend to a backend service.""" @staticmethod def Args(parser): backend_services_utils.AddUpdatableBackendArgs(parser) parser.add_argument( 'name', help='The name of the backend service.') @property def service(self): return self.compute.backendServices @property def resource_type(self): return 'backendServices' def CreateReference(self, args): return self.CreateGlobalReference(args.name) def GetGetRequest(self, args): return (self.service, 'Get', self.messages.ComputeBackendServicesGetRequest( backendService=self.ref.Name(), project=self.project)) def GetSetRequest(self, args, replacement, existing): return (self.service, 'Update', self.messages.ComputeBackendServicesUpdateRequest( backendService=self.ref.Name(), backendServiceResource=replacement, project=self.project)) def Modify(self, args, existing): replacement = copy.deepcopy(existing) group_ref = self.CreateZonalReference( args.group, args.zone, resource_type='zoneViews') group_uri = group_ref.SelfLink() for backend in existing.backends: if group_uri == backend.group: raise exceptions.ToolException( 'Backend [{0}] in zone [{1}] already exists in backend service ' '[{2}].'.format(args.group, args.zone, args.name)) if args.balancing_mode: balancing_mode = self.messages.Backend.BalancingModeValueValuesEnum( args.balancing_mode) else: balancing_mode = None backend = self.messages.Backend( balancingMode=balancing_mode, capacityScaler=args.capacity_scaler, description=args.description, group=group_uri, maxRate=args.max_rate, maxRatePerInstance=args.max_rate_per_instance, maxUtilization=args.max_utilization) replacement.backends.append(backend) return replacement AddBackend.detailed_help = { 'brief': 'Add a backend to a backend service', 'DESCRIPTION': """ *{command}* is used to add a backend to a backend service. A backend is a group of tasks that can handle requests sent to a backend service. Currently, the group of tasks can be one or more Google Compute Engine virtual machine instances grouped together using a resource view. Traffic is first spread evenly across all virtual machines in the group. When the group is full, traffic is sent to the next nearest group(s) that still have remaining capacity. To modify the parameters of a backend after it has been added to the backend service, use 'gcloud compute backend-services update-backend' or 'gcloud compute backend-services edit'. """, }
bsd-3-clause
-3,025,253,970,019,805,000
32.663158
76
0.679487
false
EmanueleCannizzaro/scons
test/Libs/SharedLibrary-update-deps.py
1
2591
#!/usr/bin/env python # # Copyright (c) 2001 - 2016 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "test/Libs/SharedLibrary-update-deps.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog" """ Test that SharedLibrary() updates when a different lib is linked, even if it has the same md5. This is http://scons.tigris.org/issues/show_bug.cgi?id=2903 """ import sys import os.path import TestSCons test = TestSCons.TestSCons() test.dir_fixture( "bug2903" ) # Build the sub-libs (don't care about details of this) test.run(arguments='-f SConstruct-libs') # This should build the main lib, using libfoo.so test.run(arguments='libname=foo') # This should rebuild the main lib, using libbar.so; # it should NOT say it's already up to date. test.run(arguments='libname=bar') test.must_not_contain_any_line(test.stdout(), ["is up to date"]) # Try it again, in reverse, to make sure: test.run(arguments='libname=foo') test.must_not_contain_any_line(test.stdout(), ["is up to date"]) # Now try changing the link command line (in an innocuous way); should rebuild. if sys.platform == 'win32': extraflags='shlinkflags=/DEBUG' else: extraflags='shlinkflags=-g' test.run(arguments=['libname=foo', extraflags]) test.must_not_contain_any_line(test.stdout(), ["is up to date"]) test.run(arguments=['libname=foo', extraflags, '--debug=explain']) test.must_contain_all_lines(test.stdout(), ["is up to date"]) test.pass_test() # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
mit
-7,112,565,602,712,549,000
36.014286
112
0.745658
false
denisff/python-for-android
python3-alpha/python3-src/Lib/ctypes/test/test_bitfields.py
46
8643
from ctypes import * import unittest import os import ctypes import _ctypes_test class BITS(Structure): _fields_ = [("A", c_int, 1), ("B", c_int, 2), ("C", c_int, 3), ("D", c_int, 4), ("E", c_int, 5), ("F", c_int, 6), ("G", c_int, 7), ("H", c_int, 8), ("I", c_int, 9), ("M", c_short, 1), ("N", c_short, 2), ("O", c_short, 3), ("P", c_short, 4), ("Q", c_short, 5), ("R", c_short, 6), ("S", c_short, 7)] func = CDLL(_ctypes_test.__file__).unpack_bitfields func.argtypes = POINTER(BITS), c_char ##for n in "ABCDEFGHIMNOPQRS": ## print n, hex(getattr(BITS, n).size), getattr(BITS, n).offset class C_Test(unittest.TestCase): def test_ints(self): for i in range(512): for name in "ABCDEFGHI": b = BITS() setattr(b, name, i) self.assertEqual(getattr(b, name), func(byref(b), name.encode('ascii'))) def test_shorts(self): for i in range(256): for name in "MNOPQRS": b = BITS() setattr(b, name, i) self.assertEqual(getattr(b, name), func(byref(b), name.encode('ascii'))) signed_int_types = (c_byte, c_short, c_int, c_long, c_longlong) unsigned_int_types = (c_ubyte, c_ushort, c_uint, c_ulong, c_ulonglong) int_types = unsigned_int_types + signed_int_types class BitFieldTest(unittest.TestCase): def test_longlong(self): class X(Structure): _fields_ = [("a", c_longlong, 1), ("b", c_longlong, 62), ("c", c_longlong, 1)] self.assertEqual(sizeof(X), sizeof(c_longlong)) x = X() x.a, x.b, x.c = -1, 7, -1 self.assertEqual((x.a, x.b, x.c), (-1, 7, -1)) def test_ulonglong(self): class X(Structure): _fields_ = [("a", c_ulonglong, 1), ("b", c_ulonglong, 62), ("c", c_ulonglong, 1)] self.assertEqual(sizeof(X), sizeof(c_longlong)) x = X() self.assertEqual((x.a, x.b, x.c), (0, 0, 0)) x.a, x.b, x.c = 7, 7, 7 self.assertEqual((x.a, x.b, x.c), (1, 7, 1)) def test_signed(self): for c_typ in signed_int_types: class X(Structure): _fields_ = [("dummy", c_typ), ("a", c_typ, 3), ("b", c_typ, 3), ("c", c_typ, 1)] self.assertEqual(sizeof(X), sizeof(c_typ)*2) x = X() self.assertEqual((c_typ, x.a, x.b, x.c), (c_typ, 0, 0, 0)) x.a = -1 self.assertEqual((c_typ, x.a, x.b, x.c), (c_typ, -1, 0, 0)) x.a, x.b = 0, -1 self.assertEqual((c_typ, x.a, x.b, x.c), (c_typ, 0, -1, 0)) def test_unsigned(self): for c_typ in unsigned_int_types: class X(Structure): _fields_ = [("a", c_typ, 3), ("b", c_typ, 3), ("c", c_typ, 1)] self.assertEqual(sizeof(X), sizeof(c_typ)) x = X() self.assertEqual((c_typ, x.a, x.b, x.c), (c_typ, 0, 0, 0)) x.a = -1 self.assertEqual((c_typ, x.a, x.b, x.c), (c_typ, 7, 0, 0)) x.a, x.b = 0, -1 self.assertEqual((c_typ, x.a, x.b, x.c), (c_typ, 0, 7, 0)) def fail_fields(self, *fields): return self.get_except(type(Structure), "X", (), {"_fields_": fields}) def test_nonint_types(self): # bit fields are not allowed on non-integer types. result = self.fail_fields(("a", c_char_p, 1)) self.assertEqual(result, (TypeError, 'bit fields not allowed for type c_char_p')) result = self.fail_fields(("a", c_void_p, 1)) self.assertEqual(result, (TypeError, 'bit fields not allowed for type c_void_p')) if c_int != c_long: result = self.fail_fields(("a", POINTER(c_int), 1)) self.assertEqual(result, (TypeError, 'bit fields not allowed for type LP_c_int')) result = self.fail_fields(("a", c_char, 1)) self.assertEqual(result, (TypeError, 'bit fields not allowed for type c_char')) try: c_wchar except NameError: pass else: result = self.fail_fields(("a", c_wchar, 1)) self.assertEqual(result, (TypeError, 'bit fields not allowed for type c_wchar')) class Dummy(Structure): _fields_ = [] result = self.fail_fields(("a", Dummy, 1)) self.assertEqual(result, (TypeError, 'bit fields not allowed for type Dummy')) def test_single_bitfield_size(self): for c_typ in int_types: result = self.fail_fields(("a", c_typ, -1)) self.assertEqual(result, (ValueError, 'number of bits invalid for bit field')) result = self.fail_fields(("a", c_typ, 0)) self.assertEqual(result, (ValueError, 'number of bits invalid for bit field')) class X(Structure): _fields_ = [("a", c_typ, 1)] self.assertEqual(sizeof(X), sizeof(c_typ)) class X(Structure): _fields_ = [("a", c_typ, sizeof(c_typ)*8)] self.assertEqual(sizeof(X), sizeof(c_typ)) result = self.fail_fields(("a", c_typ, sizeof(c_typ)*8 + 1)) self.assertEqual(result, (ValueError, 'number of bits invalid for bit field')) def test_multi_bitfields_size(self): class X(Structure): _fields_ = [("a", c_short, 1), ("b", c_short, 14), ("c", c_short, 1)] self.assertEqual(sizeof(X), sizeof(c_short)) class X(Structure): _fields_ = [("a", c_short, 1), ("a1", c_short), ("b", c_short, 14), ("c", c_short, 1)] self.assertEqual(sizeof(X), sizeof(c_short)*3) self.assertEqual(X.a.offset, 0) self.assertEqual(X.a1.offset, sizeof(c_short)) self.assertEqual(X.b.offset, sizeof(c_short)*2) self.assertEqual(X.c.offset, sizeof(c_short)*2) class X(Structure): _fields_ = [("a", c_short, 3), ("b", c_short, 14), ("c", c_short, 14)] self.assertEqual(sizeof(X), sizeof(c_short)*3) self.assertEqual(X.a.offset, sizeof(c_short)*0) self.assertEqual(X.b.offset, sizeof(c_short)*1) self.assertEqual(X.c.offset, sizeof(c_short)*2) def get_except(self, func, *args, **kw): try: func(*args, **kw) except Exception as detail: return detail.__class__, str(detail) def test_mixed_1(self): class X(Structure): _fields_ = [("a", c_byte, 4), ("b", c_int, 4)] if os.name in ("nt", "ce"): self.assertEqual(sizeof(X), sizeof(c_int)*2) else: self.assertEqual(sizeof(X), sizeof(c_int)) def test_mixed_2(self): class X(Structure): _fields_ = [("a", c_byte, 4), ("b", c_int, 32)] self.assertEqual(sizeof(X), sizeof(c_int)*2) def test_mixed_3(self): class X(Structure): _fields_ = [("a", c_byte, 4), ("b", c_ubyte, 4)] self.assertEqual(sizeof(X), sizeof(c_byte)) def test_mixed_4(self): class X(Structure): _fields_ = [("a", c_short, 4), ("b", c_short, 4), ("c", c_int, 24), ("d", c_short, 4), ("e", c_short, 4), ("f", c_int, 24)] # MSVC does NOT combine c_short and c_int into one field, GCC # does (unless GCC is run with '-mms-bitfields' which # produces code compatible with MSVC). if os.name in ("nt", "ce"): self.assertEqual(sizeof(X), sizeof(c_int) * 4) else: self.assertEqual(sizeof(X), sizeof(c_int) * 2) def test_anon_bitfields(self): # anonymous bit-fields gave a strange error message class X(Structure): _fields_ = [("a", c_byte, 4), ("b", c_ubyte, 4)] class Y(Structure): _anonymous_ = ["_"] _fields_ = [("_", X)] if __name__ == "__main__": unittest.main()
apache-2.0
393,931,840,046,793,100
34.422131
93
0.469629
false
eayunstack/horizon
openstack_dashboard/dashboards/admin/routers/tabs.py
50
1549
# Copyright 2012, Nachi Ueno, NTT MCL, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack_dashboard.dashboards.admin.routers.extensions.extraroutes\ import tables as ertbl from openstack_dashboard.dashboards.admin.routers.ports import tables as ptbl from openstack_dashboard.dashboards.project.routers.extensions.extraroutes\ import tabs as er_tabs from openstack_dashboard.dashboards.project.routers.extensions.routerrules\ import tabs as rr_tabs from openstack_dashboard.dashboards.project.routers import tabs as r_tabs class OverviewTab(r_tabs.OverviewTab): template_name = "project/routers/_detail_overview.html" class ExtraRoutesTab(er_tabs.ExtraRoutesTab): table_classes = (ertbl.AdminRouterRoutesTable,) class InterfacesTab(r_tabs.InterfacesTab): table_classes = (ptbl.PortsTable,) class RouterDetailTabs(r_tabs.RouterDetailTabs): tabs = (OverviewTab, InterfacesTab, ExtraRoutesTab, rr_tabs.RulesGridTab, rr_tabs.RouterRulesTab) sticky = True
apache-2.0
5,346,667,285,619,817,000
37.725
78
0.761136
false
rdhyee/cookiecutter_test
setup.py
1
1548
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup with open('README.rst') as readme_file: readme = readme_file.read() with open('HISTORY.rst') as history_file: history = history_file.read().replace('.. :changelog:', '') requirements = [ # TODO: put package requirements here ] test_requirements = [ # TODO: put package test requirements here ] setup( name='cookiecutter_test', version='0.1.0', description="Cookiecutter test", long_description=readme + '\n\n' + history, author="Raymond Yee", author_email='[email protected]', url='https://github.com/rdhyee/cookiecutter_test', packages=[ 'cookiecutter_test', ], package_dir={'cookiecutter_test': 'cookiecutter_test'}, include_package_data=True, install_requires=requirements, license="BSD", zip_safe=False, keywords='cookiecutter_test', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', "Programming Language :: Python :: 2", 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], test_suite='tests', tests_require=test_requirements )
bsd-3-clause
-6,297,815,846,297,164,000
26.157895
63
0.620155
false
TeamHG-Memex/autologin
tests/test_autologin.py
1
3571
import pytest import unittest from autologin import AutoLogin, AutoLoginException from tests.mockserver import MockServer, PORT, Login, LoginNoChangeCookie, \ LoginCheckProxy from tests.proxy import PROXY_PORT def test_login_request(): al = AutoLogin() html = ''' <form method="POST" action="."> <input type="email" name="login"> <input type="password" name="password"> <input type="submit" value="Login"> </form> ''' req = al.login_request(html, username='admin', password='secret') assert req == { 'body': 'login=admin&password=secret', 'headers': {b'Content-Type': b'application/x-www-form-urlencoded'}, 'method': 'POST', 'url': '.'} req = al.login_request(html, username='admin', password='secret', base_url='/login/') assert req == { 'body': 'login=admin&password=secret', 'headers': {b'Content-Type': b'application/x-www-form-urlencoded'}, 'method': 'POST', 'url': '/login/'} # These tests should be run last as it uses crochet, and normal scrapy spider # is not finalized correctly after a call to crochet.setup. @pytest.mark.last class TestAuthCookiesFromUrl(unittest.TestCase): base_url = 'http://127.0.0.1:{}'.format(PORT) url = base_url + Login.url url_no_change_cookie = base_url + LoginNoChangeCookie.url url_check_proxy = base_url + LoginCheckProxy.url def setUp(self): self.al = AutoLogin() self.mockserver = MockServer() self.mockserver.__enter__() def tearDown(self): self.mockserver.__exit__(None, None, None) def test_no_login_form(self): with pytest.raises(AutoLoginException) as e: self.al.auth_cookies_from_url( self.url + '?hide=', 'admin', 'secret') assert e.value.args[0] == 'nologinform' def test_wrong_password(self): with pytest.raises(AutoLoginException) as e: self.al.auth_cookies_from_url(self.url, 'admin', 'wrong') assert e.value.args[0] == 'badauth' def test_normal_auth(self): cookies = self.al.auth_cookies_from_url( self.url + '?foo=', 'admin', 'secret') assert {c.name: c.value for c in cookies} == {'_auth': 'yes'} def test_redirect_to_same_url(self): cookies = self.al.auth_cookies_from_url(self.url, 'admin', 'secret') assert {c.name: c.value for c in cookies} == {'_auth': 'yes'} def test_proxy(self): assert 'localhost' not in self.url, 'proxy_bypass bypasses localhost' with MockServer('tests.proxy'): with pytest.raises(AutoLoginException) as e: self.al.auth_cookies_from_url( self.url_check_proxy, 'admin', 'secret') cookies = self.al.auth_cookies_from_url( self.url_check_proxy, 'admin', 'secret', settings={ 'HTTP_PROXY': 'http://127.0.0.1:{}'.format(PROXY_PORT) }, ) assert {c.name: c.value for c in cookies} == {'_auth': 'yes'} def test_no_change_cookie(self): cookies = self.al.auth_cookies_from_url( self.url_no_change_cookie, 'admin', 'secret') assert {c.name: c.value for c in cookies} == {'session': '1'} def test_no_change_cookie_wrong_password(self): with pytest.raises(AutoLoginException) as e: self.al.auth_cookies_from_url( self.url_no_change_cookie, 'admin', 'wrong') assert e.value.args[0] == 'badauth'
apache-2.0
8,318,888,454,725,512,000
36.589474
77
0.593391
false
mcclurmc/xen
tools/python/xen/xend/server/ConsoleController.py
49
1310
from xen.xend.server.DevController import DevController from xen.xend.XendLogging import log from xen.xend.XendError import VmError class ConsoleController(DevController): """A dummy controller for us to represent serial and vnc console devices with persistent UUIDs. """ valid_cfg = ['location', 'uuid', 'protocol'] def __init__(self, vm): DevController.__init__(self, vm) self.hotplug = False def getDeviceDetails(self, config): back = dict([(k, config[k]) for k in self.valid_cfg if k in config]) return (self.allocateDeviceID(), back, {}) def getDeviceConfiguration(self, devid, transaction = None): result = DevController.getDeviceConfiguration(self, devid, transaction) if transaction is None: devinfo = self.readBackend(devid, *self.valid_cfg) else: devinfo = self.readBackendTxn(transaction, devid, *self.valid_cfg) config = dict(zip(self.valid_cfg, devinfo)) config = dict([(key, val) for key, val in config.items() if val != None]) return config def migrate(self, deviceConfig, network, dst, step, domName): return 0 def destroyDevice(self, devid, force): DevController.destroyDevice(self, devid, True)
gpl-2.0
-6,386,599,191,117,471,000
33.473684
79
0.647328
false
khchine5/xl
lino_xl/lib/events/fixtures/vor.py
1
10131
# -*- coding: UTF-8 -*- # Copyright 2013-2014 Luc Saffre # License: BSD (see file COPYING for details) """ """ from __future__ import unicode_literals from lino.api import dd, rt from lino.utils import i2d Country = dd.resolve_model("countries.Country") City = dd.resolve_model("countries.Place") Type = dd.resolve_model("events.Type") Event = dd.resolve_model("events.Event") Stage = dd.resolve_model("events.Stage") Place = dd.resolve_model("events.Place") Feature = dd.resolve_model("events.Feature") from lino_xl.lib.countries.models import PlaceTypes def get_city(name): flt = rt.lookup_filter('name', name) try: return City.objects.exclude( type__in=[PlaceTypes.county, PlaceTypes.province]).get(flt) except City.DoesNotExist: raise Exception("No city named %r" % name) def event(type, date, name, name_nl, name_fr, *features, **kw): #~ features = [f.pk for f in features] cities = kw.pop('cities', None) e = Event(type=type, date=i2d(date), name=name, name_nl=name_nl, name_fr=name_fr, **kw) e.full_clean() e.save() if features: e.features = features if cities: for name in cities: stage = Stage(event=e, city=get_city(name)) stage.full_clean() stage.save() #~ e.cities = [get_city(n) for n in cities] return e def objects(): BE = Country.objects.get(pk='BE') DE = Country.objects.get(pk='DE') #~ u = User(username='root') #~ yield u breitensport = Type( name="Breitensport", name_nl="Sport voor allen", name_fr="Sport pour tous") yield breitensport strasse = Type( name="Radrennen Straße", name_nl="Koersen op de weg", name_fr="Courses sur route", events_column_names="where:40 when:40") yield strasse mtb = Type( name="MTB Rennen ≥ 15-jährige", name_nl="MTB koersen ≥ 15 jaarige", name_fr="Courses Mountain Bike pour ≥ 15 ans") yield mtb trophy = Type( name="Mountainbike Rennsport -- Kids Trophy O2 Biker/V.O.R.-Lotto", name_nl="Mountainbike koersen -- Kids Trophy O2 Biker/V.O.R.-Lotto", name_fr="Courses Mountain Bike -- Kids Trophy O2 Biker/V.O.R.-Lotto", events_column_names="when:40 where:40" ) yield trophy kelmis = City.objects.get(name="Kelmis") # raeren = City.objects.get(name="Raeren") eupen = City.objects.get(name="Eupen") # ottignies = City.objects.get(name="Ottignies") # ans = City.objects.get(name="Ans") bbach = City.objects.get(name="Bütgenbach") # bullingen = City.objects.get(name="Büllingen") stvith = City.objects.get(name="Sankt Vith") # monschau = City.objects.get(name="Montjoie") yield City(name="Lontzen", country=BE) yield City(name="Dinant", country=BE) yield City(name="Erezée", country=BE) #~ stvith = City(name="Sankt Vith",name_fr="Saint-Vith",country=BE) #~ yield stvith #~ monschau = City(name="Monschau",name_fr="Montjoie",country=DE) #~ yield monschau #~ bullingen = City(name="Büllingen",name_fr="Bullange",country=BE) #~ yield bullingen #~ bbach = City(name="Bütgenbach",name_fr="Butgenbach",country=BE) #~ yield bbach irmep = Place(name="IRMEP-Kaserne", name_fr="Caserne IRMEP", city=eupen) yield irmep domaine = Place(name="Zur Domäne", name_fr="«Zur Domäne»", city=bbach) yield domaine triangel = Place(name="Triangel", city=stvith) yield triangel galmei = Place(name="Galmeiplatz (Koul-Gelände)", name_fr="Place Galmei (domaine «Koul»)", city=kelmis) yield galmei f1 = Feature(name="Mountain-Bike-Ausfahrt", name_nl="Mountain Bike tocht", name_fr="Sortie Mountain Bike") yield f1 f2 = Feature(name="Volksradfahren", name_nl="Recreatiev fietsen", name_fr="Cyclisme récréatif") yield f2 f3 = Feature( name="Straße- und Mountain Bike Touren", name_nl="Straße- und Mountain Bike Touren", name_fr="Randonnées route et Mountain Bike") yield f3 f4 = Feature( name="Radtag der DG", name_nl="Fietsdag van de DG", name_fr="Journée vélo de la CG") yield f4 # 2013 yield event(breitensport, 20130324, "18\. Bike-Day IRMEP-RSK Eupen", "18\. Bike-Day IRMEP-RSK Eupen", "18e Bike-Day de l'IRMEP-RSK Eupen", f1, f2, place=irmep) yield event(breitensport, 20130505, "24\. Eifel-Biker event", "24\. Eifel-Biker event", "24e event des Eifel-Bikers", f1, f2, place=domaine) yield event(breitensport, 20130706, "Internationale 3 Länderfahrt", "Internationale 3 Länderfahrt", "Randonnée internationale des 3 frontières", f3, f2, place=triangel) yield event(breitensport, 20130901, "Radtag der DG", "Fietsdag van de DG", "Journée vélo de la CG", f3, f2, place=galmei, url="http://www.vclc.be") yield event(strasse, 20130510, "1\. Etappe des Triptyque Ardennais", "1\. etappe Triptyque Ardennais", "1e étape du Triptyque Ardennais", cities=["Raeren", "Büllingen"]) yield event(strasse, 20130511, "2\. Etappe des Triptyque Ardennais", "2\. etappe Triptyque Ardennais", "2e étape du Triptyque Ardennais", cities=["Monschau", "Eupen"]) yield event(strasse, 20130720, "Etappenankunft Tour de la Région Wallonne (TRW)", "Aankomst etappe Tour de la Région Wallonne (TRW)", "Arrivée d'étape du Tour de la Région Wallonne (TRW)", cities=["Ans", "Eupen"]) yield event(trophy, 20130316, '', '', '', cities=["Ottignies"]) yield event(trophy, 20130323, '', '', '', cities=["Thieusies"]) yield event(trophy, 20130427, '', '', '', cities=["Cuesmes"]) yield event(trophy, 20130505, '', '', '', cities=["Bütgenbach"]) yield event(trophy, 20130519, '', '', '', cities=["La Reid"]) yield event(trophy, 20130525, '', '', '', cities=["Eupen"]) yield event(trophy, 20130706, '', '', '', cities=["Sankt Vith"]) #~ yield event(trophy,20130713,'','','',cities=["Ouren"]) yield event(trophy, 20130824, '', '', '', cities=["Blégny"]) yield event(trophy, 20130901, '', '', '', cities=["Kelmis"], url="http://www.vclc.be") yield event(trophy, 20130914, '', '', '', cities=["Cerfontaine"]) yield event(trophy, 20130921, '', '', '', cities=["Burdinne"]) yield event(mtb, 20130526, "Merida Cup – 5\. Lauf", "Merida Cup – 5de manche", "Merida Cup – 5e manche", cities=["Eupen"]) yield event(mtb, 20130706, "UCI 2 MTB Rennen", "UCI 2 MTB koers", "Course MTB UCI 2", cities=["Sankt Vith"]) #~ yield event(mtb,20130714, #~ 'Merida Cup – 6\. Lauf', #~ "Merida Cup – 6de manche", #~ "Merida Cup – 6e manche", #~ cities=["Ouren"]) yield event(breitensport, 20140323, "19\. Bike-Day IRMEP-RSK Eupen", "19\. Bike-Day IRMEP-RSK Eupen", "19e Bike-Day de l'IRMEP-RSK Eupen", f1, f2, f4, place=irmep) yield event(breitensport, 20140504, "25\. Eifel-Biker event", "25\. Eifel-Biker event", "25e event des Eifel-Bikers", f1, f2, f4, place=domaine) # url="http://www.eifel-biker.be") yield event(breitensport, 20140605, "Internationale Dreiländerfahrt", "Internationale Drielandentocht", "Randonnée internationale des trois frontières", f3, f2, f4, place=triangel) # url="http://www.rsv.be/dreilanderfahrt-2") yield event(breitensport, 20140518, "Radtag der DG", "Fietsdag van de DG", "Journée vélo de la CG", f3, f2, place=galmei, url="") yield event(strasse, 20140523, "1\. Etappe des Triptyque Ardennais", "1\. etappe Triptyque Ardennais", "1e étape du Triptyque Ardennais", cities=["Kelmis", "Büllingen", "Raeren"]) # url="http://www.cchawy.be/") yield event(strasse, 20140524, "2\. Etappe des Triptyque Ardennais", "2\. etappe Triptyque Ardennais", "2e étape du Triptyque Ardennais", cities=["Bütgenbach", "Eupen", "Lontzen"]) # url="http://www.cchawy.be/") yield event(mtb, 20140705, "UCI 2 MTB Rennen", "UCI 2 MTB koers", "Course MTB UCI 2", cities=["Sankt Vith"]) yield event(mtb, 20140907, 'Wallonia Cup – 6\. Lauf', "Wallonia Cup – 6de manche", "Wallonia Cup – 6e manche", cities=["Eupen"]) yield event(trophy, 20140315, '', '', '', cities=["Dinant"]) yield event(trophy, 20140322, '', '', '', cities=["Thieusies"]) yield event(trophy, 20140426, '', '', '', cities=["Cuesmes"]) yield event(trophy, 20140518, '', '', '', cities=["Kelmis"]) yield event(trophy, 20140531, '', '', '', cities=["Erezée"]) yield event(trophy, 20140608, '', '', '', cities=["La Reid"]) yield event(trophy, 20140705, '', '', '', cities=["Sankt Vith"]) yield event(trophy, 20140823, '', '', '', cities=["Blégny"]) yield event(trophy, 20140906, '', '', '', cities=["Eupen"]) yield event(trophy, 20140913, '', '', '', cities=["Cerfontaine"]) yield event(trophy, 20140920, '', '', '', cities=["Burdinne"])
bsd-2-clause
-7,455,656,731,788,142,000
36.674157
77
0.5593
false
JTarball/docker-django-polymer-starter-kit
docker/app/app/backend/apps/_archive/content/admin.py
8
2614
from django.contrib import admin from content.models import Post, Language, Category, SubCategory, PostNode, PostCard, Update, Comment, CommentCard, Dependency, Troubleshooting, Issue class PostAdmin(admin.ModelAdmin): #date_hierarchy = "created_at" #field = ("published", "title", "slug", "updated_at", "author") #list_display = ["published", "title", "updated_at"] #list_display_links = ["title"] #list_editable = ["published"] #list_filter = ["published", "updated_at", "author"] prepopulated_fields = {"slug": ("title",)} #search_fields = ["title", "content"] class PostCardAdmin(admin.ModelAdmin): #date_hierarchy = "created_at" field = ("title", "slug", "updated_at", "author") list_display = ["title", "updated_at"] list_display_links = ["title"] #list_editable = ["published"] #list_filter = ["published", "updated_at", "author"] #prepopulated_fields = {"slug": ("title",)} #search_fields = ["title", "content"] class UpdateAdmin(admin.ModelAdmin): pass class IssueAdmin(admin.ModelAdmin): pass class TroubleshootingAdmin(admin.ModelAdmin): pass class DependencyAdmin(admin.ModelAdmin): pass class CommentAdmin(admin.ModelAdmin): pass class CommentCardAdmin(admin.ModelAdmin): pass class SubCategoryAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} class CategoryAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} #date_hierarchy = "title" class PostNodeAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ["slug"] #date_hierarchy = "title" class LanguageAdmin(admin.ModelAdmin): #date_hierarchy = "updated_at" #field = ("published", "title", "slug", "updated_at", "author") #list_display = ["published", "title", "updated_at"] #list_display_links = ["title"] #list_editable = ["published"] #list_filter = ["published", "updated_at", "author"] prepopulated_fields = {"slug": ("name",)} #search_fields = ["title", "content"] admin.site.register(Post, PostAdmin) admin.site.register(PostCard, PostCardAdmin) admin.site.register(Category, CategoryAdmin) admin.site.register(SubCategory, SubCategoryAdmin) admin.site.register(Language, LanguageAdmin) admin.site.register(PostNode, PostNodeAdmin) admin.site.register(Update, UpdateAdmin) admin.site.register(Comment, CommentAdmin) admin.site.register(CommentCard, CommentCardAdmin) admin.site.register(Dependency, DependencyAdmin) admin.site.register(Issue, IssueAdmin) admin.site.register(Troubleshooting, TroubleshootingAdmin)
isc
8,995,701,318,955,002,000
29.045977
150
0.694721
false
emsrc/hitaext
lib/htxt/gui.py
1
31920
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2007-2013 by # Erwin Marsi and TST-Centrale # # # This file is part of the Hitaext program. # # The Hitaext program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # The Hitaext program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. ''' Hitaext: hierarchical text aligment tool ''' __version__ = "1.0" __author__ = "Erwin Marsi" import wx from xml.parsers.expat import ExpatError from os import getcwd, getenv from os.path import basename, dirname, isabs, realpath, join as joinpaths from platform import system #from htxt.ielemtree import IndexElemTree from daeso.ptc.ielemtree import IndexElemTree from htxt.treectrl import HitaextTreeCtrl from htxt.helpframe import HelpViewFrame from daeso.ptc.document import HitaextDoc class HitaextException(Exception): pass class HitaextError(HitaextException): pass class HitaextWarning(HitaextException): pass class AlignFrame(wx.Frame): ''' a frame containing the two HitaextElemTreeCtrl's representing the ElementTrees for two XML documents ''' def __init__(self, parent, docTrees, alignTree): wx.Frame.__init__(self, parent, -1, title=basename(alignTree.filename), style=wx.MINIMIZE_BOX|wx.MAXIMIZE_BOX|wx.RESIZE_BORDER|wx.CAPTION) self.log = parent.log self.isChanged = False self.docTrees = docTrees self.sync = False self.blockSync = False sizer = wx.BoxSizer(wx.VERTICAL) self.makeTreeCtrls(sizer, alignTree) self.makeOptionCtrls(sizer) self.makeTextFrame() #self.Bind(wx.EVT_CHAR, self.onKeyDown) self.SetSizer(sizer) self.Show() def makeTreeCtrls(self, sizer, alignTree): treeSizer = wx.BoxSizer(wx.HORIZONTAL) self.treeCtrls = {} for side in ("from", "to"): self.treeCtrls[side] = HitaextTreeCtrl(self, self.docTrees[side]) self.treeCtrls[side].processElemTree(alignTree.get_pseudo_root(side), alignTree.get_tags(side, "ignore"), alignTree.get_tags(side, "skip")) treeSizer.Add(self.treeCtrls[side], 1, wx.EXPAND) self.Bind(wx.EVT_TREE_KEY_DOWN, self.onKeyDown, self.treeCtrls[side]) self.Bind(wx.EVT_TREE_SEL_CHANGED, self.onFromTreeSelChanged, self.treeCtrls["from"]) self.Bind(wx.EVT_TREE_SEL_CHANGED, self.onToTreeSelChanged, self.treeCtrls["to"]) sizer.Add(treeSizer, 1, wx.EXPAND) def makeOptionCtrls(self, sizer): optionSizer = wx.BoxSizer(wx.HORIZONTAL) syncCheckBox = wx.CheckBox(self, -1, "sync") optionSizer.Add(syncCheckBox, 0, wx.ALL, 8) self.Bind(wx.EVT_CHECKBOX, self.onSync, syncCheckBox) sizer.Add(optionSizer) def makeTextFrame(self): title = ( basename(self.docTrees['from'].filename) + ' | ' + basename(self.docTrees['to'].filename) ) self.textFrame = TextFrame(self, title) self.updateText('from') self.updateText('to') # binding slider events self.Bind(wx.EVT_SCROLL_THUMBRELEASE, self.onFromSliderChanged, self.textFrame.sliders["from"]) self.Bind(wx.EVT_SCROLL_THUMBRELEASE, self.onToSliderChanged, self.textFrame.sliders["to"]) # ------------------------------------------------------------------------ # event methods # ------------------------------------------------------------------------ # treeCtrl events def onFromTreeSelChanged(self, evt): self.updateText("from") self.updateFocus(self.treeCtrls["from"], self.treeCtrls["to"]) def onToTreeSelChanged(self, evt): self.updateText("to") self.updateFocus(self.treeCtrls["to"], self.treeCtrls["from"]) # slider events def onFromSliderChanged(self, evt): self.updateText("from") def onToSliderChanged(self, evt): self.updateText("to") # options def onSync(self, evt): checkBox = evt.GetEventObject() self.sync = checkBox.GetValue() # key events def onKeyDown(self, evt): keycode = evt.GetKeyCode() if keycode == wx.WXK_TAB: # if one of the TreeCtrls is selected, # evt is wxTree_Event, so get to the wxKey_Event first if evt.GetKeyEvent().ControlDown(): self.selectNextAlignedItem() else: self.changeTreeCtrlFocus() elif keycode == wx.WXK_SPACE: self.toggleAlignSelection() else: evt.Skip() # ------------------------------------------------------------------------ # GUI update methods # ------------------------------------------------------------------------ def updateText(self, side): elem = self.treeCtrls[side].getSelectedElem() start, end = elem.get("_start"), elem.get("_end") # prevent empty elements from triggering scrolling if start < end: text = self.docTrees[side].text context = self.textFrame.sliders[side].GetValue() # prevent out of bounds viewStart = max(0, start - context) viewEnd = min(len(text), end + context) textCtrl = self.textFrame.texts[side] textCtrl.Clear() textCtrl.SetDefaultStyle(wx.TextAttr("grey", "white")) textCtrl.AppendText(text[viewStart:start]) if elem.get("_alignments"): textCtrl.SetDefaultStyle(wx.TextAttr("forest green", "white")) else: textCtrl.SetDefaultStyle(wx.TextAttr("orange", "white")) # TODO: upper limit should come from setting in <render> section if end - start < 10000: textCtrl.AppendText(text[start:end]) else: textCtrl.AppendText(text[start:start + 5000]) textCtrl.AppendText("\n[...]\n") textCtrl.AppendText(text[end - 5000:end]) textCtrl.SetDefaultStyle(wx.TextAttr("grey", "white")) textCtrl.AppendText(text[end:viewEnd]) # this works on MSW, but it not convenient, # because you have to scroll upwards all the time # to see the right context #textStart = start - viewStart #textCtrl.ShowPosition(textStart) # instead, simply do this (works fine on MSW) textCtrl.ShowPosition(0) # no way to enforce the right positioning at OS X ... :-( def updateFocus(self, thisTree, otherTree): # Called after the selected item in one tree has changed # to focus the aliged item(s) in other tree otherTree.clearFocus() thisElem = thisTree.getSelectedElem() otherAlignElems = thisElem.get("_alignments") if otherAlignElems: for otherElem in otherAlignElems: item = otherElem.get("_item") otherTree.setItemFocus(item, focused=True) otherTree.EnsureVisible(item) if self.sync: # When "sync" is checked, the aligned item in the # other tree is automatically selected. However, on # MSW, this fires another EVT_TREE_SEL_CHANGED, which # causes selection of the aligned item in *this* # tree, etc, ad inifintum. Introduced a # self.blockSync boolean to break this endless loop. if system() == "Windows": if self.blockSync: self.blockSync = False else: self.blockSync = True otherTree.selectElem(otherAlignElems[0]) else: otherTree.selectElem(otherAlignElems[0]) def changeTreeCtrlFocus(self): if self.FindFocus() is self.treeCtrls['from']: self.treeCtrls['to'].SetFocus() else: self.treeCtrls['from'].SetFocus() def selectNextAlignedItem(self): if self.FindFocus() is self.treeCtrls['from']: thisTree = self.treeCtrls['from'] otherTree = self.treeCtrls['to'] elif self.FindFocus() is self.treeCtrls['to']: thisTree = self.treeCtrls['to'] otherTree = self.treeCtrls['from'] else: # another control has focus # should never happen return thisSelElem = thisTree.getSelectedElem() otherSelElem = otherTree.getSelectedElem() # alignments are assumed to be in text order otherAlignedElems = thisSelElem.get("_alignments") if not otherAlignedElems: # shortcut return try: i = otherAlignedElems.index(otherSelElem) otherSelElem = otherAlignedElems[i+1] except (IndexError, ValueError): # Incase of a ValueError, the selected element of the other tree # is not aligned to the selected element of this tree. # In case of an IndexError, the selected element of the other tree # is the last of aligned elements. # In both cases, we fall back to the first aligned element. otherSelElem = otherAlignedElems[0] otherTree.selectElem(otherSelElem) # ------------------------------------------------------------------------ # alignment methods # ------------------------------------------------------------------------ def toggleAlignSelection(self): fromElem = self.treeCtrls["from"].getSelectedElem() toElem = self.treeCtrls["to"].getSelectedElem() try: # disalign fromElem.get("_alignments").remove(toElem) self.treeCtrls["from"].setElemAlign(fromElem) self.treeCtrls["from"].setElemFocus(fromElem, False) toElem.get("_alignments").remove(fromElem) self.treeCtrls["to"].setElemAlign(toElem) self.treeCtrls["to"].setElemFocus(toElem, False) self.log("Disaligning <%s> #%s to <%s> #%s" % ( fromElem.tag, fromElem.get("_n"), toElem.tag, toElem.get("_n") )) except ValueError, inst: # align fromAlignments = fromElem.get("_alignments") toAlignments = toElem.get("_alignments") fromAlignments.append(toElem) toAlignments.append(fromElem) # keep alignments sorted in text order fromAlignments.sort(lambda e1, e2: int(e1.get("_start")) - int(e2.get("_start"))) toAlignments.sort(lambda e1, e2: int(e1.get("_start")) - int(e2.get("_start"))) self.treeCtrls["from"].setElemAlign(fromElem) self.treeCtrls["to"].setElemAlign(toElem) #if self.FindFocus() is self.treeCtrls["from"]: self.treeCtrls["to"].setElemFocus(toElem, True) #else: self.treeCtrls["from"].setElemFocus(fromElem, True) self.log("Aligning <%s> #%s to <%s> #%s" % ( fromElem.tag, fromElem.get("_n"), toElem.tag, toElem.get("_n") )) self.updateText('from') self.updateText('to') self.isChanged = True class TextFrame(wx.Frame): def __init__(self, parent, title='Hitaext Text'): wx.Frame.__init__(self, parent, size=(800,600), title=title, style=wx.MINIMIZE_BOX|wx.MAXIMIZE_BOX|wx.RESIZE_BORDER|wx.CAPTION) self.title = title panel = wx.Panel(self) self.texts = {} self.sliders = {} sizer = wx.BoxSizer(wx.HORIZONTAL) subSizers = {} for id in ("from", "to"): subSizers[id] = wx.BoxSizer(wx.VERTICAL) #self.texts[id] = wx.TextCtrl(self, style=wx.TE_MULTILINE|wx.TE_READONLY)#|wx.TE_NOHIDESEL|wx.TE_RICH) self.texts[id] = wx.TextCtrl(panel, style=wx.TE_MULTILINE|wx.TE_READONLY|wx.TE_RICH2) # this currently only works with develop version wxPython! self.texts[id].MacCheckSpelling(False) subSizers[id].Add(self.texts[id], 1, flag=wx.EXPAND) self.sliders[id] = wx.Slider(panel, -1, 0, 0, 1000, style=wx.SL_HORIZONTAL|wx.SL_AUTOTICKS|wx.SL_LABELS) subSizers[id].Add(self.sliders[id], 0, wx.EXPAND|wx.TOP|wx.BOTTOM, 4) sizer.Add(subSizers[id], 1, wx.EXPAND) panel.SetSizer(sizer) panel.Layout() self.Show() class HitaextFrame(wx.Frame): alignWildcard = "Parallel text Corpus (*.ptc)|*.ptc|Alignment (*.xml)|*.xml" docWildcard = "XML Document (*.xml)|*.xml|All (*.*)|*.*" def __init__(self): wx.Frame.__init__(self, parent=None, id=-1, title="Hitaext", size=(800,200)) self.makeLogText() self.log("Hitaext (%s)" % __version__) self.makeMenuBar() self.CreateStatusBar() self.Bind(wx.EVT_CLOSE, self.onClose) self.reset() def reset(self): # MSW requires explicit kill try: self.align.Destroy() except AttributeError: pass self.align = None self.alignTree = None self.docTrees = {'from': None, 'to': None} # ------------------------------------------------------------------------ # widget construction methods # ------------------------------------------------------------------------ def makeLogText(self): self.logText = wx.TextCtrl(self, style=wx.TE_MULTILINE|wx.TE_READONLY|wx.TE_RICH) def menuData(self): # Using the "\tKeyName" syntax automatically creates a # wx.AcceleratorTable for this frame and binds the keys to # the menu items. return ( ("&File", ("&New\tCtrl-N", "New alignment", self.onNew), ("&Open\tCtrl-O", "Open alignment", self.onOpen), #("&Reopen\tCtrl-R", "Reopen alignment", self.onReopen), ("&Save\tCtrl-S", "Save alignment", self.onSave), ("Save &As\tCtrl-A", "Save alignment as", self.onSaveAs), #("&Quit\tCtrl-Q", "Quit", self.onClose) ), ("&Help", ("&Hitaext Help\tCtrl-?", "Read online help", self.onHelp), ("&About Hitaext", "Information about Hitaext", self.onAbout) ) ) def makeMenuBar(self): menuBar = wx.MenuBar() for eachMenuData in self.menuData(): menuLabel = eachMenuData[0] menuItems = eachMenuData[1:] menuBar.Append(self.makeMenu(menuItems), menuLabel) self.SetMenuBar(menuBar) def makeMenu(self, menuData): menu = wx.Menu() for eachLabel, eachStatus, eachHandler in menuData: if not eachLabel: menu.AppendSeparator() continue # a hack to bind the About menu from application menu in OS X if eachLabel.startswith("&About"): id = wx.ID_ABOUT else: id = -1 menuItem = menu.Append(id, eachLabel, eachStatus) self.Bind(wx.EVT_MENU, eachHandler, menuItem) return menu def onClose(self, evt): self.saveChanges() if not self.isChanged(): self.Destroy() # ------------------------------------------------------------------------ # event methods # ------------------------------------------------------------------------ def onNew(self, evt): self.saveChanges() if not self.isChanged(): try: self.alignTree = HitaextDoc() for side in ('from','to'): self.docTrees[side] = self.openDocument(side) self.alignTree.set_filename(side, self.docTrees[side].filename) self.alignTree.init_elems(side, self.docTrees[side]) self.docTrees[side].update() self.alignTree.inject_alignments(self.docTrees["from"], self.docTrees["to"]) self.align = AlignFrame(self, self.docTrees, self.alignTree) except HitaextWarning, inst: # user canceled self.log(str(inst), level='WARNING') #except Exception, inst: # something else went wrong # self.log(str(inst), level='ERROR') # self.reset() def onOpen(self, evt): self.saveChanges() if not self.isChanged(): self.reset() try: self.alignTree = self.openAlignment() for side in ('from','to'): docFilename = self.alignTree.get_filename(side) self.docTrees[side] = self.smartReadDocument(side, docFilename) # update is not included in __init__, # because when starting a new alignment # we have to init the <render> section first, # which requires a document tree self.docTrees[side].update(self.alignTree.get_tags(side, "ignore"), self.alignTree.get_tags(side, "newline"), self.alignTree.get_tags(side, "blankline")) self.alignTree.inject_alignments(self.docTrees["from"], self.docTrees["to"]) # constructing other align and text frames self.align = AlignFrame(self, self.docTrees, self.alignTree) except HitaextWarning, inst: # user canceled self.log(str(inst), level='WARNING') except Exception, inst: # something went wrong self.log(str(inst), level='ERROR') self.reset() # uncomment while debugging ## raise def onReopen(self, evt): pass def onSave(self, evt=None): if self.alignTree.filename: self.saveAlignment(self.alignTree.filename) else: self.onSaveAs() def onSaveAs(self, evt=None): filename = wx.FileSelector("Save alignment as...", default_extension=".xml", wildcard=self.alignWildcard, flags=wx.SAVE|wx.OVERWRITE_PROMPT|wx.CHANGE_DIR) if filename: self.saveAlignment(filename) else: self.log('Save alignment canceled by user', level='WARNING') def onAbout(self, evt): info = wx.AboutDialogInfo() info.AddDeveloper("Erwin Marsi") info.SetName("Hitaext") info.SetDescription("Hierarchical text alignment tool") info.SetVersion("Version: " + __version__) info.SetCopyright("GNU Public License") info.SetWebSite("https://github.com/emsrc/hitaext") wx.AboutBox(info) def onHelp(self, evt): HelpViewFrame(self, title="Hitaext help") # ------------------------------------------------------------------------ # log methods # ------------------------------------------------------------------------ def log(self, text, level='INFO', newline=True): text = level + ": " + text if newline: text += '\n' self.logText.AppendText(text) # ------------------------------------------------------------------------ # file IO methods # ------------------------------------------------------------------------ def openAlignment(self): ''' select and read alignment file ''' filename = wx.FileSelector("Open alignment...", wildcard=self.alignWildcard) if filename: return self.readAlignment(filename) else: raise HitaextWarning('Open alignment canceled by user') def readAlignment(self, filename): ''' read alignment file ''' # TODO: use current dir or a search path env var, # instead of the full path self.log("Reading alignment from %s" % filename) # errors are reported but are still raised try: return HitaextDoc(file=filename) except IOError, inst: self.log('unable to open file, ' + str(inst), level='ERROR') raise except ExpatError, inst: self.log('cannot parse XML, ' + str(inst), level='ERROR') raise except AssertionError, inst: self.log('XML invalid, ' + str(inst), level='ERROR') raise except Exception, inst: self.log('unknown error, ' + str(inst), level='ERROR') raise def saveAlignment(self, filename): ''' save alignment file ''' self.log('Writing alignment to %s...' % filename) try: self.alignTree.extract_alignments(self.docTrees["from"], self.docTrees["to"]) self.alignTree.write(filename) except Exception, inst: # catching all exception here to be on the safe side self.log('unable to save alignment, ' + str(inst), level='ERROR') # uncomment whhen debuggging: raise else: # this function is also called from onSaveAs, so filename may have changed self.alignTree.filename= filename self.align.SetTitle(basename(filename)) self.align.isChanged = False def openDocument(self, side): ''' select and read source or target XML document ''' if side == 'from': title = 'Open source document...' else: title = 'Open target document...' filename = wx.FileSelector(title, wildcard=self.docWildcard, flags=wx.OPEN|wx.CHANGE_DIR) if filename: return self.readDocument(side, filename) else: raise HitaextWarning("Open document canceled by user") def smartReadDocument(self, side, filename): if not isabs(filename): # Unless absolute, document file paths are interpreted relative to # the directory containing the alignment file (rather than the # current working dir) filename = joinpaths( dirname(self.alignTree.filename), filename) try: return self.readDocument(side, filename) except IOError: try: self.log('Searching for document in directory of alignment file') filename = joinpaths(dirname(self.alignTree.filename), basename(filename)) # Document file path is *not* updated in alignment file. This # is good when shipping files for annotation, with alignment # and document files in the same directory, because it # preserves the original document file paths. return self.readDocument(side, filename) except IOError: # Not sure if this is really smart, because it allows # selection of a totally different document self.log('Asking user to locate document') docTree = self.openDocument(side) filename = docTree.filename # In this case, the document file path does get updated self.log('Updating document filename to ' + filename) self.alignTree.set_filename(side, filename) # FIXME: self.align is not there yet # self.align.isChanged = True return docTree def readDocument(self, side, filename): ''' read a source or target XML document ''' if side == 'from': self.log("Reading source document from %s" % filename) else: self.log("Reading target document from %s" % filename) # errors are reported but are still raised try: return IndexElemTree(filename) except IOError, inst: self.log('unable to open file, ' + str(inst), level='ERROR') raise except ExpatError, inst: self.log('cannot parse XML, ' + str(inst), level='ERROR') raise except Exception, inst: self.log('unknown error, ' + str(inst), level='ERROR') raise def saveChanges(self): if self.isChanged(): dlg = wx.MessageDialog(self, 'The alignment has been modified. Save changes?', 'Save', wx.YES_NO|wx.CANCEL) answer = dlg.ShowModal() if answer == wx.ID_YES: self.onSave() elif answer == wx.ID_NO: # pretend nothing has changed self.align.isChanged = False # in case on wx.CANCEL, self.isChanged remains True dlg.Destroy() ### ------------------------------------------------------------------------ ### alignment update methods ### ------------------------------------------------------------------------ ##def injectAlignment(self): ##''' ##Update the _alignments attributes in both document trees ##according to the links in the <alignments> section ##of the Hitaext XML document. ##''' ### Called when opening an Hitaext XML document. ### Assume "_alignments" attribute is initialized with empty list ### TODO: ### - error handling ##for elem in self.alignTree.get_alignments(): ##fromTag = elem.get('from_tag') ##fromN = int(elem.get('from_n')) ### fromN/toN counts from 1 -- tagCountTable counts from zero!!! ##fromElem = self.docTrees["from"].tagCountTable[fromTag][fromN - 1] ##toTag = elem.get('to_tag') ##toN = int(elem.get('to_n')) ### fromN/toN counts from 1 -- tagCountTable counts from zero!!! ##toElem = self.docTrees["to"].tagCountTable[toTag][toN - 1] ##fromElem.get("_alignments").append(toElem) ##toElem.get("_alignments").append(fromElem) ##def extractAlignment(self): ##''' ##Replace the links in the <alignments> section ##of the Hitaext XML document according to ##the _alignments attributes in both document trees. ##''' ###Called before saving an Hitaext XML document. ### TODO: ### - saving with id ##self.alignTree.clear_alignments() ##for fromElem in self.docTrees["from"].getiterator(): ##for toElem in fromElem.get("_alignments", []): ##self.alignTree.add_n_alignment(fromElem.tag, fromElem.get("_n"), ##toElem.tag, toElem.get("_n")) def isChanged(self): return ( self.align and self.align.isChanged ) class Hitaext(wx.App): def __init__(self, cl_args=None, redirect=False, filename=None): self.cl_args = cl_args wx.App.__init__(self, redirect=redirect, filename=filename) def OnInit(self): self.frame = HitaextFrame() self.frame.Show() self.SetTopWindow(self.frame) if self.cl_args: self.handleCommandLineOptions() return True def handleCommandLineOptions(self): if self.cl_args.corpus_file: # This is a ugly hack. The current design should be rewritten to # seperate model actions (like reading a parallel text corpus) # from view actions (like open dialogues). try: frame = self.frame frame.alignTree = frame.readAlignment(self.cl_args.corpus_file) for side in ('from','to'): docFilename = frame.alignTree.get_filename(side) frame.docTrees[side] = frame.smartReadDocument(side, docFilename) # update is not included in __init__, # because when starting a new alignment # we have to init the <render> section first, # which requires a document tree frame.docTrees[side].update(frame.alignTree.get_tags(side, "ignore"), frame.alignTree.get_tags(side, "newline"), frame.alignTree.get_tags(side, "blankline")) frame.alignTree.inject_alignments(frame.docTrees["from"], frame.docTrees["to"]) # constructing other align and text frames frame.align = AlignFrame(frame, frame.docTrees, frame.alignTree) except HitaextWarning, inst: # user canceled frame.log(str(inst), level='WARNING') except Exception, inst: # something went wrong frame.log(str(inst), level='ERROR') frame.reset() # uncomment while debugging ## raise
gpl-3.0
-4,358,360,901,107,539,000
36.954816
114
0.509211
false
aoakeson/home-assistant
tests/util/test_dt.py
4
4802
"""Test Home Assistant date util methods.""" # pylint: disable=too-many-public-methods import unittest from datetime import datetime, timedelta import homeassistant.util.dt as dt_util TEST_TIME_ZONE = 'America/Los_Angeles' class TestDateUtil(unittest.TestCase): """Test util date methods.""" def setUp(self): """Setup the tests.""" self.orig_default_time_zone = dt_util.DEFAULT_TIME_ZONE def tearDown(self): """Stop everything that was started.""" dt_util.set_default_time_zone(self.orig_default_time_zone) def test_get_time_zone_retrieves_valid_time_zone(self): """Test getting a time zone.""" time_zone = dt_util.get_time_zone(TEST_TIME_ZONE) self.assertIsNotNone(time_zone) self.assertEqual(TEST_TIME_ZONE, time_zone.zone) def test_get_time_zone_returns_none_for_garbage_time_zone(self): """Test getting a non existing time zone.""" time_zone = dt_util.get_time_zone("Non existing time zone") self.assertIsNone(time_zone) def test_set_default_time_zone(self): """Test setting default time zone.""" time_zone = dt_util.get_time_zone(TEST_TIME_ZONE) dt_util.set_default_time_zone(time_zone) # We cannot compare the timezones directly because of DST self.assertEqual(time_zone.zone, dt_util.now().tzinfo.zone) def test_utcnow(self): """Test the UTC now method.""" self.assertAlmostEqual( dt_util.utcnow().replace(tzinfo=None), datetime.utcnow(), delta=timedelta(seconds=1)) def test_now(self): """Test the now method.""" dt_util.set_default_time_zone(dt_util.get_time_zone(TEST_TIME_ZONE)) self.assertAlmostEqual( dt_util.as_utc(dt_util.now()).replace(tzinfo=None), datetime.utcnow(), delta=timedelta(seconds=1)) def test_as_utc_with_naive_object(self): """Test the now method.""" utcnow = datetime.utcnow() self.assertEqual(utcnow, dt_util.as_utc(utcnow).replace(tzinfo=None)) def test_as_utc_with_utc_object(self): """Test UTC time with UTC object.""" utcnow = dt_util.utcnow() self.assertEqual(utcnow, dt_util.as_utc(utcnow)) def test_as_utc_with_local_object(self): """Test the UTC time with local object.""" dt_util.set_default_time_zone(dt_util.get_time_zone(TEST_TIME_ZONE)) localnow = dt_util.now() utcnow = dt_util.as_utc(localnow) self.assertEqual(localnow, utcnow) self.assertNotEqual(localnow.tzinfo, utcnow.tzinfo) def test_as_local_with_naive_object(self): """Test local time with native object.""" now = dt_util.now() self.assertAlmostEqual( now, dt_util.as_local(datetime.utcnow()), delta=timedelta(seconds=1)) def test_as_local_with_local_object(self): """Test local with local object.""" now = dt_util.now() self.assertEqual(now, now) def test_as_local_with_utc_object(self): """Test local time with UTC object.""" dt_util.set_default_time_zone(dt_util.get_time_zone(TEST_TIME_ZONE)) utcnow = dt_util.utcnow() localnow = dt_util.as_local(utcnow) self.assertEqual(localnow, utcnow) self.assertNotEqual(localnow.tzinfo, utcnow.tzinfo) def test_utc_from_timestamp(self): """Test utc_from_timestamp method.""" self.assertEqual( datetime(1986, 7, 9, tzinfo=dt_util.UTC), dt_util.utc_from_timestamp(521251200)) def test_datetime_to_str(self): """Test datetime_to_str.""" self.assertEqual( "12:00:00 09-07-1986", dt_util.datetime_to_str(datetime(1986, 7, 9, 12, 0, 0))) def test_datetime_to_local_str(self): """Test datetime_to_local_str.""" self.assertEqual( dt_util.datetime_to_str(dt_util.now()), dt_util.datetime_to_local_str(dt_util.utcnow())) def test_str_to_datetime_converts_correctly(self): """Test str_to_datetime converts strings.""" self.assertEqual( datetime(1986, 7, 9, 12, 0, 0, tzinfo=dt_util.UTC), dt_util.str_to_datetime("12:00:00 09-07-1986")) def test_str_to_datetime_returns_none_for_incorrect_format(self): """Test str_to_datetime returns None if incorrect format.""" self.assertIsNone(dt_util.str_to_datetime("not a datetime string")) def test_strip_microseconds(self): """Test the now method.""" test_time = datetime(2015, 1, 1, microsecond=5000) self.assertNotEqual(0, test_time.microsecond) self.assertEqual(0, dt_util.strip_microseconds(test_time).microsecond)
mit
6,260,780,298,936,296,000
34.051095
78
0.622865
false
davidfather/TizenRT
external/iotjs/deps/jerry/tools/js2c.py
33
3950
#!/usr/bin/env python # Copyright JS Foundation and other contributors, http://js.foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # This file converts ./js/*.js to a C-array in ./source/jerry-targetjs.h file import argparse import glob import os import re from gen_c_source import LICENSE, format_code HEADER = '''#ifndef JERRY_TARGETJS_H #define JERRY_TARGETJS_H ''' FOOTER = ''' #endif ''' NATIVE_STRUCT = ''' struct js_source_all { const char* name; const char* source; const int length; }; #define DECLARE_JS_CODES \\ struct js_source_all js_codes[] = \\ { \\''' def extract_name(path): special_chars = re.compile(r'[-\\?\'".]') return special_chars.sub('_', os.path.splitext(os.path.basename(path))[0]) def reduce_code(code): code = re.sub(r"/\*.*?\*/", "", code, flags=re.DOTALL) # remove all occurance streamed comments code = re.sub(r"//.*?\n", "", code) # remove all occurance singleline comments code = re.sub('\n+', '\n', re.sub('\n +', '\n', code)) # remove white spaces return code def js_to_native_code(path, name, build_type): with open(path, 'r') as js_source: code = js_source.read() if build_type != 'debug': code = reduce_code(code) data = format_code(code, 1, 2) native_code = """const static char {0}_n[] = "{0}"; const static char {0}_s[] = {{ {1} }}; const static int {0}_l = {2}; """.format(name, data, len(code)) return native_code def main(): parser = argparse.ArgumentParser(description="js2c") parser.add_argument('--build-type', help='build type', default='release', choices=['release', 'debug']) parser.add_argument('--ignore', help='files to ignore', dest='ignore_files', default=[], action='append') parser.add_argument('--no-main', help="don't require a 'main.js' file", dest='main', action='store_false', default=True) parser.add_argument('--js-source', dest='js_source_path', default='./js', help='Source directory of JavaScript files" (default: %(default)s)') parser.add_argument('--dest', dest='output_path', default='./source', help="Destination directory of 'jerry-targetjs.h' (default: %(default)s)") script_args = parser.parse_args() gen_line = "/* This file is generated by %s. Please do not modify. */" % os.path.basename(__file__) gen_output = [LICENSE, "", gen_line, "", HEADER] gen_structs = [NATIVE_STRUCT] if script_args.main: gen_structs.append(' {{ {0}_n, {0}_s, {0}_l }}, \\'.format("main")) files = glob.glob(os.path.join(script_args.js_source_path, '*.js')) for path in files: if os.path.basename(path) not in script_args.ignore_files: name = extract_name(path) gen_output.append(js_to_native_code(path, name, script_args.build_type)) if name != 'main': gen_structs.append(' {{ {0}_n, {0}_s, {0}_l }}, \\'.format(name)) gen_structs.append(' { NULL, NULL, 0 } \\\n};') gen_output.append("\n".join(gen_structs)) gen_output.append(FOOTER) with open(os.path.join(script_args.output_path, 'jerry-targetjs.h'), 'w') as gen_file: gen_file.write("\n".join(gen_output)) if __name__ == "__main__": main()
apache-2.0
-7,376,069,163,883,445,000
30.349206
109
0.598987
false
turbokongen/home-assistant
homeassistant/components/ping/device_tracker.py
9
4625
"""Tracks devices by sending a ICMP echo request (ping).""" from datetime import timedelta import logging import subprocess import sys from icmplib import SocketPermissionError, ping as icmp_ping import voluptuous as vol from homeassistant import const, util from homeassistant.components.device_tracker import PLATFORM_SCHEMA from homeassistant.components.device_tracker.const import ( CONF_SCAN_INTERVAL, SCAN_INTERVAL, SOURCE_TYPE_ROUTER, ) import homeassistant.helpers.config_validation as cv from homeassistant.util.async_ import run_callback_threadsafe from homeassistant.util.process import kill_subprocess from . import async_get_next_ping_id from .const import PING_ATTEMPTS_COUNT, PING_TIMEOUT _LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 0 CONF_PING_COUNT = "count" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(const.CONF_HOSTS): {cv.slug: cv.string}, vol.Optional(CONF_PING_COUNT, default=1): cv.positive_int, } ) class HostSubProcess: """Host object with ping detection.""" def __init__(self, ip_address, dev_id, hass, config): """Initialize the Host pinger.""" self.hass = hass self.ip_address = ip_address self.dev_id = dev_id self._count = config[CONF_PING_COUNT] if sys.platform == "win32": self._ping_cmd = ["ping", "-n", "1", "-w", "1000", self.ip_address] else: self._ping_cmd = ["ping", "-n", "-q", "-c1", "-W1", self.ip_address] def ping(self): """Send an ICMP echo request and return True if success.""" pinger = subprocess.Popen( self._ping_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL ) try: pinger.communicate(timeout=1 + PING_TIMEOUT) return pinger.returncode == 0 except subprocess.TimeoutExpired: kill_subprocess(pinger) return False except subprocess.CalledProcessError: return False def update(self, see): """Update device state by sending one or more ping messages.""" failed = 0 while failed < self._count: # check more times if host is unreachable if self.ping(): see(dev_id=self.dev_id, source_type=SOURCE_TYPE_ROUTER) return True failed += 1 _LOGGER.debug("No response from %s failed=%d", self.ip_address, failed) class HostICMPLib: """Host object with ping detection.""" def __init__(self, ip_address, dev_id, hass, config): """Initialize the Host pinger.""" self.hass = hass self.ip_address = ip_address self.dev_id = dev_id self._count = config[CONF_PING_COUNT] def ping(self): """Send an ICMP echo request and return True if success.""" next_id = run_callback_threadsafe( self.hass.loop, async_get_next_ping_id, self.hass ).result() return icmp_ping( self.ip_address, count=PING_ATTEMPTS_COUNT, timeout=1, id=next_id ).is_alive def update(self, see): """Update device state by sending one or more ping messages.""" if self.ping(): see(dev_id=self.dev_id, source_type=SOURCE_TYPE_ROUTER) return True _LOGGER.debug( "No response from %s (%s) failed=%d", self.ip_address, self.dev_id, PING_ATTEMPTS_COUNT, ) def setup_scanner(hass, config, see, discovery_info=None): """Set up the Host objects and return the update function.""" try: # Verify we can create a raw socket, or # fallback to using a subprocess icmp_ping("127.0.0.1", count=0, timeout=0) host_cls = HostICMPLib except SocketPermissionError: host_cls = HostSubProcess hosts = [ host_cls(ip, dev_id, hass, config) for (dev_id, ip) in config[const.CONF_HOSTS].items() ] interval = config.get( CONF_SCAN_INTERVAL, timedelta(seconds=len(hosts) * config[CONF_PING_COUNT]) + SCAN_INTERVAL, ) _LOGGER.debug( "Started ping tracker with interval=%s on hosts: %s", interval, ",".join([host.ip_address for host in hosts]), ) def update_interval(now): """Update all the hosts on every interval time.""" try: for host in hosts: host.update(see) finally: hass.helpers.event.track_point_in_utc_time( update_interval, util.dt.utcnow() + interval ) update_interval(None) return True
apache-2.0
-5,301,725,473,579,390,000
30.25
80
0.610595
false
LumPenPacK/NetworkExtractionFromImages
win_build/nefi2_win_amd64_msvc_2015/tools/pyqtdeploy/pyqtdeploy/gui/exception_handlers.py
1
1651
# Copyright (c) 2014, Riverbank Computing Limited # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from PyQt5.QtWidgets import QMessageBox def handle_user_exception(e, title, parent): """ Handle a UserException. """ msg_box = QMessageBox(QMessageBox.Warning, title, e.text, parent=parent) if e.detail != '': msg_box.setDetailedText(e.detail) msg_box.exec()
bsd-2-clause
6,005,242,608,227,723,000
42.447368
78
0.76378
false
wong2/gunicorn
examples/when_ready.conf.py
4
1067
import signal import commands import threading import time max_mem = 100000 class MemoryWatch(threading.Thread): def __init__(self, server, max_mem): super(MemoryWatch, self).__init__() self.daemon = True self.server = server self.max_mem = max_mem self.timeout = server.timeout / 2 def memory_usage(self, pid): try: out = commands.getoutput("ps -o rss -p %s" % pid) except IOError: return -1 used_mem = sum(int(x) for x in out.split('\n')[1:]) return used_mem def run(self): while True: for (pid, worker) in list(self.server.WORKERS.items()): if self.memory_usage(pid) > self.max_mem: self.server.log.info("Pid %s killed (memory usage > %s)", pid, self.max_mem) self.server.kill_worker(pid, signal.SIGTERM) time.sleep(self.timeout) def when_ready(server): mw = MemoryWatch(server, max_mem) mw.start()
mit
2,480,432,556,739,973,600
27.078947
78
0.540769
false
Laurawly/tvm-1
tests/python/unittest/test_te_verify_compute.py
5
2428
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import tvm from tvm import te def test_verify_compute(): n = te.size_var("n") m = te.size_var("m") A = te.placeholder((n, m), name="A") k = te.reduce_axis((0, m), "k") k_ = te.reduce_axis((0, m - 1), "k_") f1 = lambda i: te.sum(A[i, k], axis=k) f2 = lambda i: A[i, 0] + 1 f3 = lambda i: te.sum(A[i, k], axis=k) + 1 f4 = lambda i: A[i, 0] * (te.sum(A[i, k], axis=k) + 1) f5 = lambda i: (te.sum(A[i, k], axis=k), A[i, 0] + 1) f6 = lambda i: (te.sum(A[i, k], axis=k), te.sum(A[i, k_], axis=k_)) # # Valid compute try: B = te.compute((n,), f1, name="B") except tvm._ffi.base.TVMError as ex: assert False # # Valid compute try: B = te.compute((n,), f2, name="B") except tvm._ffi.base.TVMError as ex: assert False # # Invalid compute with non top level reduction try: B = te.compute((n,), f3, name="B") assert False except tvm._ffi.base.TVMError as ex: pass # # Invalid compute with non top level reduction try: B = te.compute((n,), f4, name="B") assert False except tvm._ffi.base.TVMError as ex: pass # # Invalid compute with reduction and non-reduction batch ops try: B0, B1 = te.compute((n,), f5, name="B") assert False except tvm._ffi.base.TVMError as ex: pass # # Invalid compute with unequal batch reduction ops try: B0, B1 = te.compute((n,), f6, name="B") assert False except tvm._ffi.base.TVMError as ex: pass if __name__ == "__main__": test_verify_compute()
apache-2.0
-2,562,394,316,911,468,000
28.609756
71
0.606672
false
yephper/django
tests/raw_query/models.py
1
1298
from django.db import models class Author(models.Model): first_name = models.CharField(max_length=255) last_name = models.CharField(max_length=255) dob = models.DateField() def __init__(self, *args, **kwargs): super(Author, self).__init__(*args, **kwargs) # Protect against annotations being passed to __init__ -- # this'll make the test suite get angry if annotations aren't # treated differently than fields. for k in kwargs: assert k in [f.attname for f in self._meta.fields], \ "Author.__init__ got an unexpected parameter: %s" % k class Book(models.Model): title = models.CharField(max_length=255) author = models.ForeignKey(Author, models.CASCADE) paperback = models.BooleanField(default=False) opening_line = models.TextField() class BookFkAsPk(models.Model): book = models.ForeignKey(Book, models.CASCADE, primary_key=True, db_column="not_the_default") class Coffee(models.Model): brand = models.CharField(max_length=255, db_column="name") price = models.DecimalField(max_digits=10, decimal_places=2, default=0) class Reviewer(models.Model): reviewed = models.ManyToManyField(Book) class FriendlyAuthor(Author): pass
bsd-3-clause
-5,808,809,182,959,689,000
30.45
97
0.656394
false
geosolutions-it/ckanext-geonetwork
ckanext/geonetwork/harvesters/utils.py
2
1941
# -*- coding: utf-8 -*- import logging #import re import urllib import urllib2 import zipfile from StringIO import StringIO from lxml import etree GEONETWORK_V26 = "2.6" GEONETWORK_V210 = "2.10" GEONETWORK_VERSIONS = [GEONETWORK_V26, GEONETWORK_V210] logger = logging.getLogger(__name__) class GeoNetworkClient(object): def __init__(self, base, version): if version is None: version = GEONETWORK_V210 assert version in GEONETWORK_VERSIONS self.version = version self.base = base def retrieveInfo(self, uuid): if self.version == GEONETWORK_V26: url = "%s/srv/en/mef.export" % self.base #headers = { #"Content-Type": "application/x-www-form-urlencoded", #"Accept": "text/plain" #} query = urllib.urlencode({ "uuid": uuid }) logger.info('Loading MEF for %s', uuid) request = urllib2.Request(url, query) opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(), urllib2.HTTPRedirectHandler()) response = opener.open(request) # will get a ZIP file content = response.read() #logger.info('----> %s', content) #print 'RESPONSE ', content zdata = StringIO(content) zfile = zipfile.ZipFile(zdata) xml = None for name in zfile.namelist(): #logger.info(' MEF entry: %s', name) #print ' MEF entry: ', name if name == 'info.xml': uncompressed = zfile.read(name) xml = etree.fromstring(uncompressed) return xml def retrieveMetadataCategories(self, uuid): xml = self.retrieveInfo(uuid) cats = [] for cat in xml.findall('categories/category'): cats.append(cat.get('name')) return cats
gpl-3.0
-8,594,300,413,167,136,000
25.958333
103
0.556414
false
kose-y/pylearn2
pylearn2/scripts/mlp/predict_csv.py
44
4691
#!/usr/bin/env python # coding: utf-8 """ Script to predict values using a pkl model file. This is a configurable script to make predictions. Basic usage: .. code-block:: none predict_csv.py pkl_file.pkl test.csv output.csv Optionally it is possible to specify if the prediction is regression or classification (default is classification). The predicted variables are integer by default. Based on this script: http://fastml.com/how-to-get-predictions-from-pylearn2/. This script doesn't use batches. If you run out of memory it could be resolved by implementing a batch version. """ from __future__ import print_function __authors__ = ["Zygmunt Zając", "Marco De Nadai"] __license__ = "GPL" import sys import os import argparse import numpy as np from pylearn2.utils import serial from theano import tensor as T from theano import function def make_argument_parser(): """ Creates an ArgumentParser to read the options for this script from sys.argv """ parser = argparse.ArgumentParser( description="Launch a prediction from a pkl file" ) parser.add_argument('model_filename', help='Specifies the pkl model file') parser.add_argument('test_filename', help='Specifies the csv file with the values to predict') parser.add_argument('output_filename', help='Specifies the predictions output file') parser.add_argument('--prediction_type', '-P', default="classification", help='Prediction type (classification/regression)') parser.add_argument('--output_type', '-T', default="int", help='Output variable type (int/float)') parser.add_argument('--has-headers', '-H', dest='has_headers', action='store_true', help='Indicates the first row in the input file is feature labels') parser.add_argument('--has-row-label', '-L', dest='has_row_label', action='store_true', help='Indicates the first column in the input file is row labels') parser.add_argument('--delimiter', '-D', default=',', help="Specifies the CSV delimiter for the test file. Usual values are \ comma (default) ',' semicolon ';' colon ':' tabulation '\\t' and space ' '") return parser def predict(model_path, test_path, output_path, predictionType="classification", outputType="int", headers=False, first_col_label=False, delimiter=","): """ Predict from a pkl file. Parameters ---------- modelFilename : str The file name of the model file. testFilename : str The file name of the file to test/predict. outputFilename : str The file name of the output file. predictionType : str, optional Type of prediction (classification/regression). outputType : str, optional Type of predicted variable (int/float). headers : bool, optional Indicates whether the first row in the input file is feature labels first_col_label : bool, optional Indicates whether the first column in the input file is row labels (e.g. row numbers) """ print("loading model...") try: model = serial.load(model_path) except Exception as e: print("error loading {}:".format(model_path)) print(e) return False print("setting up symbolic expressions...") X = model.get_input_space().make_theano_batch() Y = model.fprop(X) if predictionType == "classification": Y = T.argmax(Y, axis=1) f = function([X], Y, allow_input_downcast=True) print("loading data and predicting...") # x is a numpy array # x = pickle.load(open(test_path, 'rb')) skiprows = 1 if headers else 0 x = np.loadtxt(test_path, delimiter=delimiter, skiprows=skiprows) if first_col_label: x = x[:,1:] y = f(x) print("writing predictions...") variableType = "%d" if outputType != "int": variableType = "%f" np.savetxt(output_path, y, fmt=variableType) return True if __name__ == "__main__": """ See module-level docstring for a description of the script. """ parser = make_argument_parser() args = parser.parse_args() ret = predict(args.model_filename, args.test_filename, args.output_filename, args.prediction_type, args.output_type, args.has_headers, args.has_row_label, args.delimiter) if not ret: sys.exit(-1)
bsd-3-clause
-6,073,500,448,119,417,000
31.344828
105
0.615352
false
slimcoin-project/Slimcoin
contrib/bitrpc/bitrpc.py
4
7840
from jsonrpc import ServiceProxy import sys import string # ===== BEGIN USER SETTINGS ===== # if you do not set these you will be prompted for a password for every command rpcuser = "" rpcpass = "" # ====== END USER SETTINGS ====== if rpcpass == "": access = ServiceProxy("http://127.0.0.1:41683") else: access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:41683") cmd = sys.argv[1].lower() if cmd == "backupwallet": try: path = raw_input("Enter destination path/filename: ") print access.backupwallet(path) except: print "\n---An error occurred---\n" elif cmd == "getaccount": try: addr = raw_input("Enter a Slimcoin address: ") print access.getaccount(addr) except: print "\n---An error occurred---\n" elif cmd == "getaccountaddress": try: acct = raw_input("Enter an account name: ") print access.getaccountaddress(acct) except: print "\n---An error occurred---\n" elif cmd == "getaddressesbyaccount": try: acct = raw_input("Enter an account name: ") print access.getaddressesbyaccount(acct) except: print "\n---An error occurred---\n" elif cmd == "getbalance": try: acct = raw_input("Enter an account (optional): ") mc = raw_input("Minimum confirmations (optional): ") try: print access.getbalance(acct, mc) except: print access.getbalance() except: print "\n---An error occurred---\n" elif cmd == "getblockbycount": try: height = raw_input("Height: ") print access.getblockbycount(height) except: print "\n---An error occurred---\n" elif cmd == "getblockcount": try: print access.getblockcount() except: print "\n---An error occurred---\n" elif cmd == "getblocknumber": try: print access.getblocknumber() except: print "\n---An error occurred---\n" elif cmd == "getconnectioncount": try: print access.getconnectioncount() except: print "\n---An error occurred---\n" elif cmd == "getdifficulty": try: print access.getdifficulty() except: print "\n---An error occurred---\n" elif cmd == "getgenerate": try: print access.getgenerate() except: print "\n---An error occurred---\n" elif cmd == "gethashespersec": try: print access.gethashespersec() except: print "\n---An error occurred---\n" elif cmd == "getinfo": try: print access.getinfo() except: print "\n---An error occurred---\n" elif cmd == "getnewaddress": try: acct = raw_input("Enter an account name: ") try: print access.getnewaddress(acct) except: print access.getnewaddress() except: print "\n---An error occurred---\n" elif cmd == "getreceivedbyaccount": try: acct = raw_input("Enter an account (optional): ") mc = raw_input("Minimum confirmations (optional): ") try: print access.getreceivedbyaccount(acct, mc) except: print access.getreceivedbyaccount() except: print "\n---An error occurred---\n" elif cmd == "getreceivedbyaddress": try: addr = raw_input("Enter a Slimcoin address (optional): ") mc = raw_input("Minimum confirmations (optional): ") try: print access.getreceivedbyaddress(addr, mc) except: print access.getreceivedbyaddress() except: print "\n---An error occurred---\n" elif cmd == "gettransaction": try: txid = raw_input("Enter a transaction ID: ") print access.gettransaction(txid) except: print "\n---An error occurred---\n" elif cmd == "getwork": try: data = raw_input("Data (optional): ") try: print access.gettransaction(data) except: print access.gettransaction() except: print "\n---An error occurred---\n" elif cmd == "help": try: cmd = raw_input("Command (optional): ") try: print access.help(cmd) except: print access.help() except: print "\n---An error occurred---\n" elif cmd == "listaccounts": try: mc = raw_input("Minimum confirmations (optional): ") try: print access.listaccounts(mc) except: print access.listaccounts() except: print "\n---An error occurred---\n" elif cmd == "listreceivedbyaccount": try: mc = raw_input("Minimum confirmations (optional): ") incemp = raw_input("Include empty? (true/false, optional): ") try: print access.listreceivedbyaccount(mc, incemp) except: print access.listreceivedbyaccount() except: print "\n---An error occurred---\n" elif cmd == "listreceivedbyaddress": try: mc = raw_input("Minimum confirmations (optional): ") incemp = raw_input("Include empty? (true/false, optional): ") try: print access.listreceivedbyaddress(mc, incemp) except: print access.listreceivedbyaddress() except: print "\n---An error occurred---\n" elif cmd == "listtransactions": try: acct = raw_input("Account (optional): ") count = raw_input("Number of transactions (optional): ") frm = raw_input("Skip (optional):") try: print access.listtransactions(acct, count, frm) except: print access.listtransactions() except: print "\n---An error occurred---\n" elif cmd == "move": try: frm = raw_input("From: ") to = raw_input("To: ") amt = raw_input("Amount:") mc = raw_input("Minimum confirmations (optional): ") comment = raw_input("Comment (optional): ") try: print access.move(frm, to, amt, mc, comment) except: print access.move(frm, to, amt) except: print "\n---An error occurred---\n" elif cmd == "sendfrom": try: frm = raw_input("From: ") to = raw_input("To: ") amt = raw_input("Amount:") mc = raw_input("Minimum confirmations (optional): ") comment = raw_input("Comment (optional): ") commentto = raw_input("Comment-to (optional): ") try: print access.sendfrom(frm, to, amt, mc, comment, commentto) except: print access.sendfrom(frm, to, amt) except: print "\n---An error occurred---\n" elif cmd == "sendmany": try: frm = raw_input("From: ") to = raw_input("To (in format address1:amount1,address2:amount2,...): ") mc = raw_input("Minimum confirmations (optional): ") comment = raw_input("Comment (optional): ") try: print access.sendmany(frm,to,mc,comment) except: print access.sendmany(frm,to) except: print "\n---An error occurred---\n" elif cmd == "sendtoaddress": try: to = raw_input("To (in format address1:amount1,address2:amount2,...): ") amt = raw_input("Amount:") comment = raw_input("Comment (optional): ") commentto = raw_input("Comment-to (optional): ") try: print access.sendtoaddress(to,amt,comment,commentto) except: print access.sendtoaddress(to,amt) except: print "\n---An error occurred---\n" elif cmd == "setaccount": try: addr = raw_input("Address: ") acct = raw_input("Account:") print access.setaccount(addr,acct) except: print "\n---An error occurred---\n" elif cmd == "setgenerate": try: gen= raw_input("Generate? (true/false): ") cpus = raw_input("Max processors/cores (-1 for unlimited, optional):") try: print access.setgenerate(gen, cpus) except: print access.setgenerate(gen) except: print "\n---An error occurred---\n" elif cmd == "settxfee": try: amt = raw_input("Amount:") print access.settxfee(amt) except: print "\n---An error occurred---\n" elif cmd == "stop": try: print access.stop() except: print "\n---An error occurred---\n" elif cmd == "validateaddress": try: addr = raw_input("Address: ") print access.validateaddress(addr) except: print "\n---An error occurred---\n" elif cmd == "walletpassphrase": try: pwd = raw_input("Enter wallet passphrase: ") access.walletpassphrase(pwd, 60) print "\n---Wallet unlocked---\n" except: print "\n---An error occurred---\n" elif cmd == "walletpassphrasechange": try: pwd = raw_input("Enter old wallet passphrase: ") pwd2 = raw_input("Enter new wallet passphrase: ") access.walletpassphrasechange(pwd, pwd2) print print "\n---Passphrase changed---\n" except: print print "\n---An error occurred---\n" print else: print "Command not found or not supported"
mit
5,070,777,812,789,766,000
23.197531
79
0.661862
false
frederick-masterton/django
django/contrib/gis/tests/geoapp/test_feeds.py
12
4346
from __future__ import unicode_literals from unittest import skipUnless from xml.dom import minidom from django.conf import settings from django.contrib.sites.models import Site from django.contrib.gis.geos import HAS_GEOS from django.contrib.gis.tests.utils import HAS_SPATIAL_DB from django.test import TestCase, modify_settings, override_settings if HAS_GEOS: from .models import City @modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'}) @override_settings(ROOT_URLCONF='django.contrib.gis.tests.geoapp.urls') @skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.") class GeoFeedTest(TestCase): def setUp(self): Site(id=settings.SITE_ID, domain="example.com", name="example.com").save() def assertChildNodes(self, elem, expected): "Taken from syndication/tests.py." actual = set(n.nodeName for n in elem.childNodes) expected = set(expected) self.assertEqual(actual, expected) def test_geofeed_rss(self): "Tests geographic feeds using GeoRSS over RSSv2." # Uses `GEOSGeometry` in `item_geometry` doc1 = minidom.parseString(self.client.get('/feeds/rss1/').content) # Uses a 2-tuple in `item_geometry` doc2 = minidom.parseString(self.client.get('/feeds/rss2/').content) feed1, feed2 = doc1.firstChild, doc2.firstChild # Making sure the box got added to the second GeoRSS feed. self.assertChildNodes(feed2.getElementsByTagName('channel')[0], ['title', 'link', 'description', 'language', 'lastBuildDate', 'item', 'georss:box', 'atom:link'] ) # Incrementing through the feeds. for feed in [feed1, feed2]: # Ensuring the georss namespace was added to the <rss> element. self.assertEqual(feed.getAttribute('xmlns:georss'), 'http://www.georss.org/georss') chan = feed.getElementsByTagName('channel')[0] items = chan.getElementsByTagName('item') self.assertEqual(len(items), City.objects.count()) # Ensuring the georss element was added to each item in the feed. for item in items: self.assertChildNodes(item, ['title', 'link', 'description', 'guid', 'georss:point']) def test_geofeed_atom(self): "Testing geographic feeds using GeoRSS over Atom." doc1 = minidom.parseString(self.client.get('/feeds/atom1/').content) doc2 = minidom.parseString(self.client.get('/feeds/atom2/').content) feed1, feed2 = doc1.firstChild, doc2.firstChild # Making sure the box got added to the second GeoRSS feed. self.assertChildNodes(feed2, ['title', 'link', 'id', 'updated', 'entry', 'georss:box']) for feed in [feed1, feed2]: # Ensuring the georsss namespace was added to the <feed> element. self.assertEqual(feed.getAttribute('xmlns:georss'), 'http://www.georss.org/georss') entries = feed.getElementsByTagName('entry') self.assertEqual(len(entries), City.objects.count()) # Ensuring the georss element was added to each entry in the feed. for entry in entries: self.assertChildNodes(entry, ['title', 'link', 'id', 'summary', 'georss:point']) def test_geofeed_w3c(self): "Testing geographic feeds using W3C Geo." doc = minidom.parseString(self.client.get('/feeds/w3cgeo1/').content) feed = doc.firstChild # Ensuring the geo namespace was added to the <feed> element. self.assertEqual(feed.getAttribute('xmlns:geo'), 'http://www.w3.org/2003/01/geo/wgs84_pos#') chan = feed.getElementsByTagName('channel')[0] items = chan.getElementsByTagName('item') self.assertEqual(len(items), City.objects.count()) # Ensuring the geo:lat and geo:lon element was added to each item in the feed. for item in items: self.assertChildNodes(item, ['title', 'link', 'description', 'guid', 'geo:lat', 'geo:lon']) # Boxes and Polygons aren't allowed in W3C Geo feeds. self.assertRaises(ValueError, self.client.get, '/feeds/w3cgeo2/') # Box in <channel> self.assertRaises(ValueError, self.client.get, '/feeds/w3cgeo3/') # Polygons in <entry>
bsd-3-clause
7,450,322,027,230,693,000
46.758242
103
0.650023
false
jordan-developer/pyOCNI
pyocni/suppliers/resourceSupplier.py
2
6610
# -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*- # Copyright (C) 2011 Houssem Medhioub - Institut Mines-Telecom # # This library is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with this library. If not, see <http://www.gnu.org/licenses/>. """ Created on Oct 03, 2012 @author: Bilel Msekni @contact: [email protected] @author: Houssem Medhioub @contact: [email protected] @organization: Institut Mines-Telecom - Telecom SudParis @version: 0.3 @license: LGPL - Lesser General Public License """ import pyocni.pyocni_tools.config as config # getting the Logger logger = config.logger class ResourceSupplier(): """ Consults the database to get the data asked for by the dataBakers """ def __init__(self): self.database = config.prepare_PyOCNI_db() def get_my_resources(self,path_url): try: query = self.database.view('/db_views/my_resources',key=path_url) except Exception as e: logger.error("===== Get_resources : " + e.message + " ===== ") return None return query def get_for_update_entities(self,path_url): try: query = self.database.view('/db_views/for_update_entities',key=path_url) except Exception as e: logger.error("===== Get_old_occi_resource_description : " + e.message + " ===== ") return None return query def get_for_register_entities(self): try: query = self.database.view('/db_views/for_register_entities') except Exception as e: logger.error("===== Get_for_register_entities : " + e.message + " ===== ") return None return query def get_for_trigger_action(self, path_url): try: query = self.database.view('/db_views/for_trigger_action', key=path_url) except Exception as e: logger.error("===== Get_for_trigger_action : " + e.message + " ===== ") return None return query def get_actions_of_kind_mix(self, kind_id): try: query = self.database.view('/db_views/actions_of_kind_mix', key=kind_id) except Exception as e: logger.error("===== Get_actions_of_kind_mix : " + e.message + " ===== ") return None return query def get_my_mixins(self,url_path): try: query = self.database.view('/db_views/my_mixins',key = url_path) except Exception as e: logger.error("===== Get_my_mixins : " + e.message + " ===== ") return None return query def get_for_associate_mixin(self, item): try: query = self.database.view('/db_views/for_associate_mixin',key=[item]) except Exception as e: logger.error("===== Get_for_associate_mixin : " + e.message + " ===== ") return None return query def get_for_get_entities(self, req_path): try: query = self.database.view('/db_views/for_get_entities',key=req_path) except Exception as e: logger.error("===== Get_for_get_entities : " + e.message + " ===== ") return None return query def get_entities_of_kind(self, cat_id): try: query = self.database.view('/db_views/entities_of_kind',key = cat_id) except Exception as e: logger.error("===== Get_entities_of_kind : " + e.message + " ===== ") return None return query def get_entities_of_mixin(self, cat_id): try: query = self.database.view('/db_views/entities_of_mixin',key = cat_id) except Exception as e: logger.error("===== Get_entities_of_mixin : " + e.message + " ===== ") return None return query def get_my_occi_locations(self): try: query = self.database.view('/db_views/my_occi_locations') except Exception as e: logger.error("===== Get_my_occi_locations : " + e.message + " ===== ") return None return query def get_for_get_filtered(self, entity): try: query = self.database.view('/db_views/for_get_filtered',key=entity) except Exception as e: logger.error("===== Get_for_get_filtered : " + e.message + " ===== ") return None return query def get_default_attributes_from_kind(self, req_path): try: query = self.database.view('/db_views/get_default_attributes_from_kind',key=req_path) except Exception as e: logger.error("===== Get_for_get_filtered : " + e.message + " ===== ") return None return query def get_providers(self, kind_id): try: query = self.database.view('/db_views/my_providers',key=kind_id) except Exception as e: logger.error("===== Get_for_get_providers: " + e.message + " ===== ") return None return query def get_delete_on_path(self): try: query = self.database.view('/db_views/for_delete_entities') except Exception as e: logger.error("===== Get_delete_on_Path: " + e.message + " ===== ") return None return query #def recursive_for_attribute(attributes): # """ # # """ # # att_http = list() # for key in attributes.keys(): # if type(attributes[key]) is dict: # items = recursive_for_attribute(attributes[key]) # for item in items: # if not (item.find('{')): # att_http.append(key + item) # else: # att_http.append(key + "." + item) # else: # attributes = treat_attribute_members(attributes) # return attributes # final_att = list() # for item in att_http: # if item.endswith('.'): # final_att.append(item[:-1]) # else: # final_att.append(item) # return final_att
apache-2.0
4,743,949,110,524,539,000
26.890295
97
0.569592
false
sdague/home-assistant
homeassistant/auth/permissions/merge.py
19
1786
"""Merging of policies.""" from typing import Dict, List, Set, cast from .types import CategoryType, PolicyType def merge_policies(policies: List[PolicyType]) -> PolicyType: """Merge policies.""" new_policy: Dict[str, CategoryType] = {} seen: Set[str] = set() for policy in policies: for category in policy: if category in seen: continue seen.add(category) new_policy[category] = _merge_policies( [policy.get(category) for policy in policies] ) cast(PolicyType, new_policy) return new_policy def _merge_policies(sources: List[CategoryType]) -> CategoryType: """Merge a policy.""" # When merging policies, the most permissive wins. # This means we order it like this: # True > Dict > None # # True: allow everything # Dict: specify more granular permissions # None: no opinion # # If there are multiple sources with a dict as policy, we recursively # merge each key in the source. policy: CategoryType = None seen: Set[str] = set() for source in sources: if source is None: continue # A source that's True will always win. Shortcut return. if source is True: return True assert isinstance(source, dict) if policy is None: policy = cast(CategoryType, {}) assert isinstance(policy, dict) for key in source: if key in seen: continue seen.add(key) key_sources = [] for src in sources: if isinstance(src, dict): key_sources.append(src.get(key)) policy[key] = _merge_policies(key_sources) return policy
apache-2.0
737,417,408,198,421,800
26.476923
73
0.581747
false
HybridF5/jacket
jacket/tests/storage/unit/volume/drivers/emc/scaleio/test_manage_existing.py
1
5492
# Copyright (c) 2016 EMC Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from jacket import context from jacket.storage import exception from jacket.tests.storage.unit import fake_volume from jacket.tests.storage.unit.volume.drivers.emc import scaleio from jacket.tests.storage.unit.volume.drivers.emc.scaleio import mocks from jacket.storage.volume import volume_types from mock import patch from six.moves import urllib class TestManageExisting(scaleio.TestScaleIODriver): """Test cases for ``ScaleIODriver.manage_existing()``""" def setUp(self): """Setup a test case environment. Creates a fake volume object and sets up the required API responses. """ super(TestManageExisting, self).setUp() ctx = context.RequestContext('fake', 'fake', auth_token=True) self.volume = fake_volume.fake_volume_obj( ctx, **{'provider_id': 'pid_1'}) self.volume_attached = fake_volume.fake_volume_obj( ctx, **{'provider_id': 'pid_2'}) self.volume_no_provider_id = fake_volume.fake_volume_obj(ctx) self.volume_name_2x_enc = urllib.parse.quote( urllib.parse.quote(self.driver._id_to_base64(self.volume.id)) ) self.HTTPS_MOCK_RESPONSES = { self.RESPONSE_MODE.Valid: { 'instances/Volume::' + self.volume['provider_id']: mocks.MockHTTPSResponse({ 'id': 'pid_1', 'sizeInKb': 8388608, 'mappedSdcInfo': None }, 200) }, self.RESPONSE_MODE.BadStatus: { 'instances/Volume::' + self.volume['provider_id']: mocks.MockHTTPSResponse({ 'errorCode': 401, 'message': 'BadStatus Volume Test', }, 401), 'instances/Volume::' + self.volume_attached['provider_id']: mocks.MockHTTPSResponse({ 'id': 'pid_2', 'sizeInKb': 8388608, 'mappedSdcInfo': 'Mapped' }, 200) } } def test_no_source_id(self): existing_ref = {'source-name': 'scaleioVolName'} self.assertRaises(exception.ManageExistingInvalidReference, self.driver.manage_existing, self.volume, existing_ref) def test_no_type_id(self): self.volume['volume_type_id'] = None existing_ref = {'source-id': 'pid_1'} self.assertRaises(exception.ManageExistingVolumeTypeMismatch, self.driver.manage_existing, self.volume, existing_ref) @patch.object( volume_types, 'get_volume_type', return_value={'extra_specs': {'volume_backend_name': 'ScaleIO'}}) def test_volume_not_found(self, _mock_volume_type): self.volume['volume_type_id'] = 'ScaleIO' existing_ref = {'source-id': 'pid_1'} self.set_https_response_mode(self.RESPONSE_MODE.BadStatus) self.assertRaises(exception.ManageExistingInvalidReference, self.driver.manage_existing, self.volume, existing_ref) @patch.object( volume_types, 'get_volume_type', return_value={'extra_specs': {'volume_backend_name': 'ScaleIO'}}) def test_volume_attached(self, _mock_volume_type): self.volume_attached['volume_type_id'] = 'ScaleIO' existing_ref = {'source-id': 'pid_2'} self.set_https_response_mode(self.RESPONSE_MODE.BadStatus) self.assertRaises(exception.ManageExistingInvalidReference, self.driver.manage_existing, self.volume_attached, existing_ref) @patch.object( volume_types, 'get_volume_type', return_value={'extra_specs': {'volume_backend_name': 'ScaleIO'}}) def test_manage_get_size_calc(self, _mock_volume_type): self.volume['volume_type_id'] = 'ScaleIO' existing_ref = {'source-id': 'pid_1'} self.set_https_response_mode(self.RESPONSE_MODE.Valid) result = self.driver.manage_existing_get_size(self.volume, existing_ref) self.assertEqual(8, result) @patch.object( volume_types, 'get_volume_type', return_value={'extra_specs': {'volume_backend_name': 'ScaleIO'}}) def test_manage_existing_valid(self, _mock_volume_type): self.volume['volume_type_id'] = 'ScaleIO' existing_ref = {'source-id': 'pid_1'} result = self.driver.manage_existing(self.volume, existing_ref) self.assertEqual('pid_1', result['provider_id'])
apache-2.0
8,863,677,949,633,148,000
41.587302
78
0.576475
false
vit2/vit-e2
lib/python/Screens/InfoBarGenerics.py
1
100355
from ChannelSelection import ChannelSelection, BouquetSelector, SilentBouquetSelector from Components.ActionMap import ActionMap, HelpableActionMap from Components.ActionMap import NumberActionMap from Components.Harddisk import harddiskmanager from Components.Input import Input from Components.Label import Label from Components.MovieList import AUDIO_EXTENSIONS from Components.PluginComponent import plugins from Components.ServiceEventTracker import ServiceEventTracker from Components.Sources.Boolean import Boolean from Components.config import config, ConfigBoolean, ConfigClock from Components.SystemInfo import SystemInfo from Components.UsageConfig import preferredInstantRecordPath, defaultMoviePath, ConfigSelection from EpgSelection import EPGSelection from Plugins.Plugin import PluginDescriptor from Screen import Screen from Screens import ScreenSaver from Screens import Standby from Screens.ChoiceBox import ChoiceBox from Screens.Dish import Dish from Screens.EventView import EventViewEPGSelect, EventViewSimple from Screens.InputBox import InputBox from Screens.MessageBox import MessageBox from Screens.MinuteInput import MinuteInput from Screens.TimerSelection import TimerSelection from Screens.PictureInPicture import PictureInPicture import Screens.Standby from Screens.SubtitleDisplay import SubtitleDisplay from Screens.RdsDisplay import RdsInfoDisplay, RassInteractive from Screens.TimeDateInput import TimeDateInput from Screens.UnhandledKey import UnhandledKey from ServiceReference import ServiceReference, isPlayableForCur from Tools import Notifications, ASCIItranslit from Tools.Directories import fileExists, getRecordingFilename, moveFiles from enigma import eTimer, eServiceCenter, eDVBServicePMTHandler, iServiceInformation, \ iPlayableService, eServiceReference, eEPGCache, eActionMap from time import time, localtime, strftime from os import stat as os_stat from os import rename as os_rename import os from bisect import insort from sys import maxint from RecordTimer import RecordTimerEntry, RecordTimer, findSafeRecordPath # hack alert! from Menu import MainMenu, mdom def isStandardInfoBar(self): return self.__class__.__name__ == "InfoBar" def setResumePoint(session): global resumePointCache, resumePointCacheLast service = session.nav.getCurrentService() ref = session.nav.getCurrentlyPlayingServiceOrGroup() if (service is not None) and (ref is not None): # and (ref.type != 1): # ref type 1 has its own memory... seek = service.seek() if seek: pos = seek.getPlayPosition() if not pos[0]: key = ref.toString() lru = int(time()) l = seek.getLength() if l: l = l[1] else: l = None resumePointCache[key] = [lru, pos[1], l] if len(resumePointCache) > 50: candidate = key for k,v in resumePointCache.items(): if v[0] < lru: candidate = k del resumePointCache[candidate] if lru - resumePointCacheLast > 3600: saveResumePoints() def delResumePoint(ref): global resumePointCache, resumePointCacheLast try: del resumePointCache[ref.toString()] except KeyError: pass if int(time()) - resumePointCacheLast > 3600: saveResumePoints() def getResumePoint(session): global resumePointCache ref = session.nav.getCurrentlyPlayingServiceOrGroup() if (ref is not None) and (ref.type != 1): try: entry = resumePointCache[ref.toString()] entry[0] = int(time()) # update LRU timestamp return entry[1] except KeyError: return None def saveResumePoints(): global resumePointCache, resumePointCacheLast import cPickle try: f = open('/home/root/resumepoints.pkl', 'wb') cPickle.dump(resumePointCache, f, cPickle.HIGHEST_PROTOCOL) except Exception, ex: print "[InfoBar] Failed to write resumepoints:", ex resumePointCacheLast = int(time()) def loadResumePoints(): import cPickle try: return cPickle.load(open('/home/root/resumepoints.pkl', 'rb')) except Exception, ex: print "[InfoBar] Failed to load resumepoints:", ex return {} resumePointCache = loadResumePoints() resumePointCacheLast = int(time()) class InfoBarDish: def __init__(self): self.dishDialog = self.session.instantiateDialog(Dish) class InfoBarUnhandledKey: def __init__(self): self.unhandledKeyDialog = self.session.instantiateDialog(UnhandledKey) self.hideUnhandledKeySymbolTimer = eTimer() self.hideUnhandledKeySymbolTimer.callback.append(self.unhandledKeyDialog.hide) self.checkUnusedTimer = eTimer() self.checkUnusedTimer.callback.append(self.checkUnused) self.onLayoutFinish.append(self.unhandledKeyDialog.hide) eActionMap.getInstance().bindAction('', -maxint -1, self.actionA) #highest prio eActionMap.getInstance().bindAction('', maxint, self.actionB) #lowest prio self.flags = (1<<1) self.uflags = 0 #this function is called on every keypress! def actionA(self, key, flag): self.unhandledKeyDialog.hide() if flag != 4: if self.flags & (1<<1): self.flags = self.uflags = 0 self.flags |= (1<<flag) if flag == 1: # break self.checkUnusedTimer.start(0, True) return 0 #this function is only called when no other action has handled this key def actionB(self, key, flag): if flag != 4: self.uflags |= (1<<flag) def checkUnused(self): if self.flags == self.uflags: self.unhandledKeyDialog.show() self.hideUnhandledKeySymbolTimer.start(2000, True) class InfoBarScreenSaver: def __init__(self): self.onExecBegin.append(self.__onExecBegin) self.onExecEnd.append(self.__onExecEnd) self.screenSaverTimer = eTimer() self.screenSaverTimer.callback.append(self.screensaverTimeout) self.screensaver = self.session.instantiateDialog(ScreenSaver.Screensaver) self.onLayoutFinish.append(self.__layoutFinished) def __layoutFinished(self): self.screensaver.hide() def __onExecBegin(self): self.ScreenSaverTimerStart() def __onExecEnd(self): if self.screensaver.shown: self.screensaver.hide() eActionMap.getInstance().unbindAction('', self.keypressScreenSaver) self.screenSaverTimer.stop() def ScreenSaverTimerStart(self): time = int(config.usage.screen_saver.value) flag = self.seekstate[0] if not flag: ref = self.session.nav.getCurrentlyPlayingServiceOrGroup() if ref: ref = ref.toString().split(":") flag = ref[2] == "2" or os.path.splitext(ref[10])[1].lower() in AUDIO_EXTENSIONS if time and flag: self.screenSaverTimer.startLongTimer(time) else: self.screenSaverTimer.stop() def screensaverTimeout(self): if self.execing and not Standby.inStandby and not Standby.inTryQuitMainloop: self.hide() if hasattr(self, "pvrStateDialog"): self.pvrStateDialog.hide() self.screensaver.show() eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressScreenSaver) def keypressScreenSaver(self, key, flag): if flag: self.screensaver.hide() self.show() self.ScreenSaverTimerStart() eActionMap.getInstance().unbindAction('', self.keypressScreenSaver) class SecondInfoBar(Screen): def __init__(self, session): Screen.__init__(self, session) self.skin = None class InfoBarShowHide(InfoBarScreenSaver): """ InfoBar show/hide control, accepts toggleShow and hide actions, might start fancy animations. """ STATE_HIDDEN = 0 STATE_HIDING = 1 STATE_SHOWING = 2 STATE_SHOWN = 3 def __init__(self): self["ShowHideActions"] = ActionMap( ["InfobarShowHideActions"] , { "toggleShow": self.toggleShow, "hide": self.keyHide, }, 1) # lower prio to make it possible to override ok and cancel.. self.__event_tracker = ServiceEventTracker(screen=self, eventmap= { iPlayableService.evStart: self.serviceStarted, }) InfoBarScreenSaver.__init__(self) self.__state = self.STATE_SHOWN self.__locked = 0 self.hideTimer = eTimer() self.hideTimer.callback.append(self.doTimerHide) self.hideTimer.start(5000, True) self.onShow.append(self.__onShow) self.onHide.append(self.__onHide) self.onShowHideNotifiers = [] self.secondInfoBarScreen = "" if isStandardInfoBar(self): self.secondInfoBarScreen = self.session.instantiateDialog(SecondInfoBar) self.secondInfoBarScreen.show() self.onLayoutFinish.append(self.__layoutFinished) def __layoutFinished(self): if self.secondInfoBarScreen: self.secondInfoBarScreen.hide() def __onShow(self): self.__state = self.STATE_SHOWN for x in self.onShowHideNotifiers: x(True) self.startHideTimer() def __onHide(self): self.__state = self.STATE_HIDDEN if self.secondInfoBarScreen: self.secondInfoBarScreen.hide() for x in self.onShowHideNotifiers: x(False) def keyHide(self): if self.__state == self.STATE_SHOWN: self.hide() elif self.session.pipshown and "popup" in config.usage.pip_hideOnExit.value: if config.usage.pip_hideOnExit.value == "popup": self.session.openWithCallback(self.hidePipOnExitCallback, MessageBox, _("Disable Picture in Picture"), simple=True) else: self.hidePipOnExitCallback(True) def hidePipOnExitCallback(self, answer): if answer == True: self.showPiP() def connectShowHideNotifier(self, fnc): if not fnc in self.onShowHideNotifiers: self.onShowHideNotifiers.append(fnc) def disconnectShowHideNotifier(self, fnc): if fnc in self.onShowHideNotifiers: self.onShowHideNotifiers.remove(fnc) def serviceStarted(self): if self.execing: if config.usage.show_infobar_on_zap.value: self.doShow() def startHideTimer(self): if self.__state == self.STATE_SHOWN and not self.__locked: self.hideTimer.stop() if self.secondInfoBarScreen and self.secondInfoBarScreen.shown: idx = config.usage.show_second_infobar.index - 1 else: idx = config.usage.infobar_timeout.index if idx: self.hideTimer.start(idx*1000, True) def doShow(self): self.show() self.startHideTimer() def doTimerHide(self): self.hideTimer.stop() if self.__state == self.STATE_SHOWN: self.hide() def toggleShow(self): if self.__state == self.STATE_HIDDEN: self.show() if self.secondInfoBarScreen: self.secondInfoBarScreen.hide() elif isStandardInfoBar(self) and config.usage.show_second_infobar.value == "EPG": self.showDefaultEPG() elif self.secondInfoBarScreen and config.usage.show_second_infobar.value and not self.secondInfoBarScreen.shown: self.secondInfoBarScreen.show() self.startHideTimer() else: self.hide() self.hideTimer.stop() def lockShow(self): self.__locked = self.__locked + 1 if self.execing: self.show() self.hideTimer.stop() def unlockShow(self): self.__locked = self.__locked - 1 if self.execing: self.startHideTimer() # def startShow(self): # self.instance.m_animation.startMoveAnimation(ePoint(0, 600), ePoint(0, 380), 100) # self.__state = self.STATE_SHOWN # # def startHide(self): # self.instance.m_animation.startMoveAnimation(ePoint(0, 380), ePoint(0, 600), 100) # self.__state = self.STATE_HIDDEN class NumberZap(Screen): def quit(self): self.Timer.stop() self.close() def keyOK(self): self.Timer.stop() self.close(self.service, self.bouquet) def handleServiceName(self): if self.searchNumber: self.service, self.bouquet = self.searchNumber(int(self["number"].getText())) self ["servicename"].text = ServiceReference(self.service).getServiceName() if not self.startBouquet: self.startBouquet = self.bouquet def keyBlue(self): self.Timer.start(3000, True) if self.searchNumber: if self.startBouquet == self.bouquet: self.service, self.bouquet = self.searchNumber(int(self["number"].getText()), firstBouquetOnly = True) else: self.service, self.bouquet = self.searchNumber(int(self["number"].getText())) self ["servicename"].text = ServiceReference(self.service).getServiceName() def keyNumberGlobal(self, number): self.Timer.start(1000, True) self.field = self.field + str(number) self["number"].setText(self.field) self.handleServiceName() if len(self.field) >= 5: self.keyOK() def __init__(self, session, number, searchNumberFunction = None): Screen.__init__(self, session) self.field = str(number) self.searchNumber = searchNumberFunction self.startBouquet = None self["channel"] = Label(_("Channel:")) self["number"] = Label(self.field) self["servicename"] = Label() self.handleServiceName() self["actions"] = NumberActionMap( [ "SetupActions", "ShortcutActions" ], { "cancel": self.quit, "ok": self.keyOK, "blue": self.keyBlue, "1": self.keyNumberGlobal, "2": self.keyNumberGlobal, "3": self.keyNumberGlobal, "4": self.keyNumberGlobal, "5": self.keyNumberGlobal, "6": self.keyNumberGlobal, "7": self.keyNumberGlobal, "8": self.keyNumberGlobal, "9": self.keyNumberGlobal, "0": self.keyNumberGlobal }) self.Timer = eTimer() self.Timer.callback.append(self.keyOK) self.Timer.start(3000, True) class InfoBarNumberZap: """ Handles an initial number for NumberZapping """ def __init__(self): self["NumberActions"] = NumberActionMap( [ "NumberActions"], { "1": self.keyNumberGlobal, "2": self.keyNumberGlobal, "3": self.keyNumberGlobal, "4": self.keyNumberGlobal, "5": self.keyNumberGlobal, "6": self.keyNumberGlobal, "7": self.keyNumberGlobal, "8": self.keyNumberGlobal, "9": self.keyNumberGlobal, "0": self.keyNumberGlobal, }) def keyNumberGlobal(self, number): if number == 0: if isinstance(self, InfoBarPiP) and self.pipHandles0Action(): self.pipDoHandle0Action() else: self.servicelist.recallPrevService() else: if self.has_key("TimeshiftActions") and self.timeshiftEnabled(): ts = self.getTimeshift() if ts and ts.isTimeshiftActive(): return self.session.openWithCallback(self.numberEntered, NumberZap, number, self.searchNumber) def numberEntered(self, service = None, bouquet = None): if service: self.selectAndStartService(service, bouquet) def searchNumberHelper(self, serviceHandler, num, bouquet): servicelist = serviceHandler.list(bouquet) if servicelist: serviceIterator = servicelist.getNext() while serviceIterator.valid(): if num == serviceIterator.getChannelNum(): return serviceIterator serviceIterator = servicelist.getNext() return None def searchNumber(self, number, firstBouquetOnly = False): bouquet = self.servicelist.getRoot() service = None serviceHandler = eServiceCenter.getInstance() if not firstBouquetOnly: service = self.searchNumberHelper(serviceHandler, number, bouquet) if config.usage.multibouquet.value and not service: bouquet = self.servicelist.bouquet_root bouquetlist = serviceHandler.list(bouquet) if bouquetlist: bouquet = bouquetlist.getNext() while bouquet.valid(): if bouquet.flags & eServiceReference.isDirectory: service = self.searchNumberHelper(serviceHandler, number, bouquet) if service: playable = not (service.flags & (eServiceReference.isMarker|eServiceReference.isDirectory)) or (service.flags & eServiceReference.isNumberedMarker) if not playable: service = None break if config.usage.alternative_number_mode.value or firstBouquetOnly: break bouquet = bouquetlist.getNext() return service, bouquet def selectAndStartService(self, service, bouquet): if service: if self.servicelist.getRoot() != bouquet: #already in correct bouquet? self.servicelist.clearPath() if self.servicelist.bouquet_root != bouquet: self.servicelist.enterPath(self.servicelist.bouquet_root) self.servicelist.enterPath(bouquet) self.servicelist.setCurrentSelection(service) #select the service in servicelist self.servicelist.zap(enable_pipzap = True) self.servicelist.correctChannelNumber() self.servicelist.startRoot = None def zapToNumber(self, number): service, bouquet = self.searchNumber(number) self.selectAndStartService(service, bouquet) config.misc.initialchannelselection = ConfigBoolean(default = True) class InfoBarChannelSelection: """ ChannelSelection - handles the channelSelection dialog and the initial channelChange actions which open the channelSelection dialog """ def __init__(self): #instantiate forever self.servicelist = self.session.instantiateDialog(ChannelSelection) if config.misc.initialchannelselection.value: self.onShown.append(self.firstRun) self["ChannelSelectActions"] = HelpableActionMap(self, "InfobarChannelSelection", { "switchChannelUp": (self.switchChannelUp, _("Open service list and select previous channel")), "switchChannelDown": (self.switchChannelDown, _("Open service list and select next channel")), "zapUp": (self.zapUp, _("Switch to previous channel")), "zapDown": (self.zapDown, _("Switch next channel")), "historyBack": (self.historyBack, _("Switch to previous channel in history")), "historyNext": (self.historyNext, _("Switch to next channel in history")), "openServiceList": (self.openServiceList, _("Open service list")), }) def showTvChannelList(self, zap=False): self.servicelist.setModeTv() if zap: self.servicelist.zap() def showRadioChannelList(self, zap=False): self.servicelist.setModeRadio() if zap: self.servicelist.zap() def firstRun(self): self.onShown.remove(self.firstRun) config.misc.initialchannelselection.value = False config.misc.initialchannelselection.save() self.switchChannelDown() def historyBack(self): self.checkTimeshiftRunning(self.historyBackCheckTimeshiftCallback) def historyBackCheckTimeshiftCallback(self, answer): if answer: self.servicelist.historyBack() def historyNext(self): self.checkTimeshiftRunning(self.historyNextCheckTimeshiftCallback) def historyNextCheckTimeshiftCallback(self, answer): if answer: self.servicelist.historyNext() def switchChannelUp(self): if "keep" not in config.usage.servicelist_cursor_behavior.value: self.servicelist.moveUp() self.session.execDialog(self.servicelist) def switchChannelDown(self): if "keep" not in config.usage.servicelist_cursor_behavior.value: self.servicelist.moveDown() self.session.execDialog(self.servicelist) def openServiceList(self): self.session.execDialog(self.servicelist) def zapUp(self): if self.servicelist.inBouquet(): prev = self.servicelist.getCurrentSelection() if prev: prev = prev.toString() while True: if config.usage.quickzap_bouquet_change.value: if self.servicelist.atBegin(): self.servicelist.prevBouquet() self.servicelist.moveUp() cur = self.servicelist.getCurrentSelection() if cur: if self.servicelist.dopipzap: isPlayable = self.session.pip.isPlayableForPipService(cur) else: isPlayable = isPlayableForCur(cur) if cur and (cur.toString() == prev or isPlayable): break else: self.servicelist.moveUp() self.servicelist.zap(enable_pipzap = True) def zapDown(self): if self.servicelist.inBouquet(): prev = self.servicelist.getCurrentSelection() if prev: prev = prev.toString() while True: if config.usage.quickzap_bouquet_change.value and self.servicelist.atEnd(): self.servicelist.nextBouquet() else: self.servicelist.moveDown() cur = self.servicelist.getCurrentSelection() if cur: if self.servicelist.dopipzap: isPlayable = self.session.pip.isPlayableForPipService(cur) else: isPlayable = isPlayableForCur(cur) if cur and (cur.toString() == prev or isPlayable): break else: self.servicelist.moveDown() self.servicelist.zap(enable_pipzap = True) class InfoBarMenu: """ Handles a menu action, to open the (main) menu """ def __init__(self): self["MenuActions"] = HelpableActionMap(self, "InfobarMenuActions", { "mainMenu": (self.mainMenu, _("Enter main menu...")), }) self.session.infobar = None def mainMenu(self): print "loading mainmenu XML..." menu = mdom.getroot() assert menu.tag == "menu", "root element in menu must be 'menu'!" self.session.infobar = self # so we can access the currently active infobar from screens opened from within the mainmenu # at the moment used from the SubserviceSelection self.session.openWithCallback(self.mainMenuClosed, MainMenu, menu) def mainMenuClosed(self, *val): self.session.infobar = None class InfoBarSimpleEventView: """ Opens the Eventview for now/next """ def __init__(self): self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions", { "showEventInfo": (self.openEventView, _("Show event details")), "showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible, }) def showEventInfoWhenNotVisible(self): if self.shown: self.openEventView() else: self.toggleShow() return 1 def openEventView(self): epglist = [ ] self.epglist = epglist service = self.session.nav.getCurrentService() ref = self.session.nav.getCurrentlyPlayingServiceOrGroup() info = service.info() ptr=info.getEvent(0) if ptr: epglist.append(ptr) ptr=info.getEvent(1) if ptr: epglist.append(ptr) if epglist: self.session.open(EventViewSimple, epglist[0], ServiceReference(ref), self.eventViewCallback) def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying epglist = self.epglist if len(epglist) > 1: tmp = epglist[0] epglist[0] = epglist[1] epglist[1] = tmp setEvent(epglist[0]) class SimpleServicelist: def __init__(self, services): self.services = services self.length = len(services) self.current = 0 def selectService(self, service): if not self.length: self.current = -1 return False else: self.current = 0 while self.services[self.current].ref != service: self.current += 1 if self.current >= self.length: return False return True def nextService(self): if not self.length: return if self.current+1 < self.length: self.current += 1 else: self.current = 0 def prevService(self): if not self.length: return if self.current-1 > -1: self.current -= 1 else: self.current = self.length - 1 def currentService(self): if not self.length or self.current >= self.length: return None return self.services[self.current] class InfoBarEPG: """ EPG - Opens an EPG list when the showEPGList action fires """ def __init__(self): self.is_now_next = False self.dlg_stack = [ ] self.bouquetSel = None self.eventView = None self.epglist = [] self.defaultEPGType = self.getDefaultEPGtype() self.defaultGuideType = self.getDefaultGuidetype() self.__event_tracker = ServiceEventTracker(screen=self, eventmap= { iPlayableService.evUpdatedEventInfo: self.__evEventInfoChanged, }) self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions", { "showEventInfo": (self.showDefaultEPG, _("Show EPG...")), "showEventInfoSingleEPG": (self.showSingleEPG, _("Show single service EPG")), "showEventInfoMultiEPG": (self.showMultiEPG, _("Show multi channel EPG")), "showEventInfoPlugin": (self.showEventInfoPlugins, _("List EPG functions...")), "showEventGuidePlugin": (self.showEventGuidePlugins, _("List EPG functions...")), "showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible, }) def getEPGPluginList(self): pluginlist = [(p.name, boundFunction(self.runPlugin, p)) for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EVENTINFO)] if pluginlist: pluginlist.append((_("Show EPG for current channel..."), self.openSingleServiceEPG)) pluginlist.append((_("Multi EPG"), self.openMultiServiceEPG)) pluginlist.append((_("Current event EPG"), self.openEventView)) return pluginlist def getDefaultEPGtype(self): pluginlist = self.getEPGPluginList() config.usage.defaultEPGType=ConfigSelection(default = "None", choices = pluginlist) for plugin in pluginlist: if plugin[0] == config.usage.defaultEPGType.value: return plugin[1] return None def getDefaultGuidetype(self): pluginlist = self.getEPGPluginList() config.usage.defaultGuideType=ConfigSelection(default = "None", choices = pluginlist) for plugin in pluginlist: if plugin[0] == config.usage.defaultGuideType.value: return plugin[1] return None def showEventInfoWhenNotVisible(self): if self.shown: self.openEventView() else: self.toggleShow() return 1 def zapToService(self, service, preview = False, zapback = False): if self.servicelist.startServiceRef is None: self.servicelist.startServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup() if service is not None: if self.servicelist.getRoot() != self.epg_bouquet: #already in correct bouquet? self.servicelist.clearPath() if self.servicelist.bouquet_root != self.epg_bouquet: self.servicelist.enterPath(self.servicelist.bouquet_root) self.servicelist.enterPath(self.epg_bouquet) self.servicelist.setCurrentSelection(service) #select the service in servicelist if not zapback or preview: self.servicelist.zap(enable_pipzap = True) if (self.servicelist.dopipzap or zapback) and not preview: self.servicelist.zapBack() if not preview: self.servicelist.startServiceRef = None self.servicelist.startRoot = None def getBouquetServices(self, bouquet): services = [ ] servicelist = eServiceCenter.getInstance().list(bouquet) if not servicelist is None: while True: service = servicelist.getNext() if not service.valid(): #check if end of list break if service.flags & (eServiceReference.isDirectory | eServiceReference.isMarker): #ignore non playable services continue services.append(ServiceReference(service)) return services def openBouquetEPG(self, bouquet, withCallback=True): services = self.getBouquetServices(bouquet) if services: self.epg_bouquet = bouquet if withCallback: self.dlg_stack.append(self.session.openWithCallback(self.closed, EPGSelection, services, self.zapToService, None, self.changeBouquetCB)) else: self.session.open(EPGSelection, services, self.zapToService, None, self.changeBouquetCB) def changeBouquetCB(self, direction, epg): if self.bouquetSel: if direction > 0: self.bouquetSel.down() else: self.bouquetSel.up() bouquet = self.bouquetSel.getCurrent() services = self.getBouquetServices(bouquet) if services: self.epg_bouquet = bouquet epg.setServices(services) def closed(self, ret=False): closedScreen = self.dlg_stack.pop() if self.bouquetSel and closedScreen == self.bouquetSel: self.bouquetSel = None elif self.eventView and closedScreen == self.eventView: self.eventView = None if ret: dlgs=len(self.dlg_stack) if dlgs > 0: self.dlg_stack[dlgs-1].close(dlgs > 1) def openMultiServiceEPG(self, withCallback=True): bouquets = self.servicelist.getBouquetList() if bouquets is None: cnt = 0 else: cnt = len(bouquets) if config.usage.multiepg_ask_bouquet.value: self.openMultiServiceEPGAskBouquet(bouquets, cnt, withCallback) else: self.openMultiServiceEPGSilent(bouquets, cnt, withCallback) def openMultiServiceEPGAskBouquet(self, bouquets, cnt, withCallback): if cnt > 1: # show bouquet list if withCallback: self.bouquetSel = self.session.openWithCallback(self.closed, BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True) self.dlg_stack.append(self.bouquetSel) else: self.bouquetSel = self.session.open(BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True) elif cnt == 1: self.openBouquetEPG(bouquets[0][1], withCallback) def openMultiServiceEPGSilent(self, bouquets, cnt, withCallback): root = self.servicelist.getRoot() rootstr = root.toCompareString() current = 0 for bouquet in bouquets: if bouquet[1].toCompareString() == rootstr: break current += 1 if current >= cnt: current = 0 if cnt > 1: # create bouquet list for bouq+/- self.bouquetSel = SilentBouquetSelector(bouquets, True, self.servicelist.getBouquetNumOffset(root)) if cnt >= 1: self.openBouquetEPG(root, withCallback) def changeServiceCB(self, direction, epg): if self.serviceSel: if direction > 0: self.serviceSel.nextService() else: self.serviceSel.prevService() epg.setService(self.serviceSel.currentService()) def SingleServiceEPGClosed(self, ret=False): self.serviceSel = None def openSingleServiceEPG(self): ref = self.session.nav.getCurrentlyPlayingServiceOrGroup() if ref: if self.servicelist.getMutableList() is not None: # bouquet in channellist current_path = self.servicelist.getRoot() services = self.getBouquetServices(current_path) self.serviceSel = SimpleServicelist(services) if self.serviceSel.selectService(ref): self.epg_bouquet = current_path self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref, self.zapToService, serviceChangeCB = self.changeServiceCB) else: self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref) else: self.session.open(EPGSelection, ref) def runPlugin(self, plugin): plugin(session = self.session, servicelist = self.servicelist) def showEventInfoPlugins(self): pluginlist = self.getEPGPluginList() if pluginlist: pluginlist.append((_("Select default EPG type..."), self.SelectDefaultInfoPlugin)) self.session.openWithCallback(self.EventInfoPluginChosen, ChoiceBox, title=_("Please choose an extension..."), list = pluginlist, skin_name = "EPGExtensionsList") else: self.openSingleServiceEPG() def EventInfoPluginChosen(self, answer): if answer is not None: answer[1]() def SelectDefaultInfoPlugin(self): self.session.openWithCallback(self.DefaultInfoPluginChosen, ChoiceBox, title=_("Please select a default EPG type..."), list = self.getEPGPluginList(), skin_name = "EPGExtensionsList") def DefaultInfoPluginChosen(self, answer): if answer is not None: self.defaultEPGType = answer[1] config.usage.defaultEPGType.value = answer[0] config.usage.defaultEPGType.save() def showEventGuidePlugins(self): pluginlist = self.getEPGPluginList() if pluginlist: pluginlist.append((_("Select default EPG type..."), self.SelectDefaultGuidePlugin)) self.session.openWithCallback(self.EventGuidePluginChosen, ChoiceBox, title=_("Please choose an extension..."), list = pluginlist, skin_name = "EPGExtensionsList") else: self.openSingleServiceEPG() def EventGuidePluginChosen(self, answer): if answer is not None: answer[1]() def SelectDefaultGuidePlugin(self): self.session.openWithCallback(self.DefaultGuidePluginChosen, ChoiceBox, title=_("Please select a default EPG type..."), list = self.getEPGPluginList(), skin_name = "EPGExtensionsList") def DefaultGuidePluginChosen(self, answer): if answer is not None: self.defaultGuideType = answer[1] config.usage.defaultGuideType.value = answer[0] config.usage.defaultGuideType.save() def openSimilarList(self, eventid, refstr): self.session.open(EPGSelection, refstr, None, eventid) def getNowNext(self): epglist = [ ] service = self.session.nav.getCurrentService() info = service and service.info() ptr = info and info.getEvent(0) if ptr: epglist.append(ptr) ptr = info and info.getEvent(1) if ptr: epglist.append(ptr) self.epglist = epglist def __evEventInfoChanged(self): if self.is_now_next and len(self.dlg_stack) == 1: self.getNowNext() if self.eventView and self.epglist: self.eventView.setEvent(self.epglist[0]) def showDefaultEPG(self): if self.defaultEPGType is not None: self.defaultEPGType() return self.openEventView() def showSingleEPG(self): if self.defaultGuideType is not None: self.defaultGuideType() return pluginlist = self.getEPGPluginList() self.openSingleServiceEPG() def showMultiEPG(self): if self.defaultGuideType is not None: self.defaultGuideType() return pluginlist = self.getEPGPluginList() self.openMultiServiceEPG() def openEventView(self): ref = self.session.nav.getCurrentlyPlayingServiceOrGroup() self.getNowNext() epglist = self.epglist if not epglist: self.is_now_next = False epg = eEPGCache.getInstance() ptr = ref and ref.valid() and epg.lookupEventTime(ref, -1) if ptr: epglist.append(ptr) ptr = epg.lookupEventTime(ref, ptr.getBeginTime(), +1) if ptr: epglist.append(ptr) else: self.is_now_next = True if epglist: self.eventView = self.session.openWithCallback(self.closed, EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList) self.dlg_stack.append(self.eventView) else: print "no epg for the service avail.. so we show multiepg instead of eventinfo" self.openMultiServiceEPG(False) def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying epglist = self.epglist if len(epglist) > 1: tmp = epglist[0] epglist[0]=epglist[1] epglist[1]=tmp setEvent(epglist[0]) class InfoBarRdsDecoder: """provides RDS and Rass support/display""" def __init__(self): self.rds_display = self.session.instantiateDialog(RdsInfoDisplay) self.session.instantiateSummaryDialog(self.rds_display) self.rass_interactive = None self.__event_tracker = ServiceEventTracker(screen=self, eventmap= { iPlayableService.evEnd: self.__serviceStopped, iPlayableService.evUpdatedRassSlidePic: self.RassSlidePicChanged }) self["RdsActions"] = ActionMap(["InfobarRdsActions"], { "startRassInteractive": self.startRassInteractive },-1) self["RdsActions"].setEnabled(False) self.onLayoutFinish.append(self.rds_display.show) self.rds_display.onRassInteractivePossibilityChanged.append(self.RassInteractivePossibilityChanged) def RassInteractivePossibilityChanged(self, state): self["RdsActions"].setEnabled(state) def RassSlidePicChanged(self): if not self.rass_interactive: service = self.session.nav.getCurrentService() decoder = service and service.rdsDecoder() if decoder: decoder.showRassSlidePicture() def __serviceStopped(self): if self.rass_interactive is not None: rass_interactive = self.rass_interactive self.rass_interactive = None rass_interactive.close() def startRassInteractive(self): self.rds_display.hide() self.rass_interactive = self.session.openWithCallback(self.RassInteractiveClosed, RassInteractive) def RassInteractiveClosed(self, *val): if self.rass_interactive is not None: self.rass_interactive = None self.RassSlidePicChanged() self.rds_display.show() class InfoBarSeek: """handles actions like seeking, pause""" SEEK_STATE_PLAY = (0, 0, 0, ">") SEEK_STATE_PAUSE = (1, 0, 0, "||") SEEK_STATE_EOF = (1, 0, 0, "END") def __init__(self, actionmap = "InfobarSeekActions"): self.__event_tracker = ServiceEventTracker(screen=self, eventmap= { iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged, iPlayableService.evStart: self.__serviceStarted, iPlayableService.evEOF: self.__evEOF, iPlayableService.evSOF: self.__evSOF, }) self.fast_winding_hint_message_showed = False class InfoBarSeekActionMap(HelpableActionMap): def __init__(self, screen, *args, **kwargs): HelpableActionMap.__init__(self, screen, *args, **kwargs) self.screen = screen def action(self, contexts, action): print "action:", action if action[:5] == "seek:": time = int(action[5:]) self.screen.doSeekRelative(time * 90000) return 1 elif action[:8] == "seekdef:": key = int(action[8:]) time = (-config.seek.selfdefined_13.value, False, config.seek.selfdefined_13.value, -config.seek.selfdefined_46.value, False, config.seek.selfdefined_46.value, -config.seek.selfdefined_79.value, False, config.seek.selfdefined_79.value)[key-1] self.screen.doSeekRelative(time * 90000) return 1 else: return HelpableActionMap.action(self, contexts, action) self["SeekActions"] = InfoBarSeekActionMap(self, actionmap, { "playpauseService": self.playpauseService, "pauseService": (self.pauseService, _("Pause playback")), "unPauseService": (self.unPauseService, _("Continue playback")), "seekFwd": (self.seekFwd, _("Seek forward")), "seekFwdManual": (self.seekFwdManual, _("Seek forward (enter time)")), "seekBack": (self.seekBack, _("Seek backward")), "seekBackManual": (self.seekBackManual, _("Seek backward (enter time)")), "jumpPreviousMark": (self.seekPreviousMark, _("Jump to previous marked position")), "jumpNextMark": (self.seekNextMark, _("Jump to next marked position")), }, prio=-1) # give them a little more priority to win over color buttons self["SeekActions"].setEnabled(False) self.seekstate = self.SEEK_STATE_PLAY self.lastseekstate = self.SEEK_STATE_PLAY self.onPlayStateChanged = [ ] self.lockedBecauseOfSkipping = False self.__seekableStatusChanged() def makeStateForward(self, n): return (0, n, 0, ">> %dx" % n) def makeStateBackward(self, n): return (0, -n, 0, "<< %dx" % n) def makeStateSlowMotion(self, n): return (0, 0, n, "/%d" % n) def isStateForward(self, state): return state[1] > 1 def isStateBackward(self, state): return state[1] < 0 def isStateSlowMotion(self, state): return state[1] == 0 and state[2] > 1 def getHigher(self, n, lst): for x in lst: if x > n: return x return False def getLower(self, n, lst): lst = lst[:] lst.reverse() for x in lst: if x < n: return x return False def showAfterSeek(self): if isinstance(self, InfoBarShowHide): self.doShow() def up(self): pass def down(self): pass def getSeek(self): service = self.session.nav.getCurrentService() if service is None: return None seek = service.seek() if seek is None or not seek.isCurrentlySeekable(): return None return seek def isSeekable(self): if self.getSeek() is None or (isStandardInfoBar(self) and not self.timeshiftEnabled()): return False return True def __seekableStatusChanged(self): # print "seekable status changed!" if not self.isSeekable(): self["SeekActions"].setEnabled(False) # print "not seekable, return to play" self.setSeekState(self.SEEK_STATE_PLAY) else: self["SeekActions"].setEnabled(True) # print "seekable" def __serviceStarted(self): self.fast_winding_hint_message_showed = False self.setSeekState(self.SEEK_STATE_PLAY) self.__seekableStatusChanged() def setSeekState(self, state): service = self.session.nav.getCurrentService() if service is None: return False if not self.isSeekable(): if state not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE): state = self.SEEK_STATE_PLAY pauseable = service.pause() if pauseable is None: print "not pauseable." state = self.SEEK_STATE_PLAY self.seekstate = state if pauseable is not None: if self.seekstate[0]: print "resolved to PAUSE" pauseable.pause() elif self.seekstate[1]: if not pauseable.setFastForward(self.seekstate[1]): print "resolved to FAST FORWARD" else: self.seekstate = self.SEEK_STATE_PLAY print "FAST FORWARD not possible: resolved to PLAY" elif self.seekstate[2]: if not pauseable.setSlowMotion(self.seekstate[2]): print "resolved to SLOW MOTION" else: self.seekstate = self.SEEK_STATE_PAUSE print "SLOW MOTION not possible: resolved to PAUSE" else: print "resolved to PLAY" pauseable.unpause() for c in self.onPlayStateChanged: c(self.seekstate) self.checkSkipShowHideLock() if hasattr(self, "ScreenSaverTimerStart"): self.ScreenSaverTimerStart() return True def playpauseService(self): if self.seekstate != self.SEEK_STATE_PLAY: self.unPauseService() else: self.pauseService() def pauseService(self): if self.seekstate == self.SEEK_STATE_PAUSE: if config.seek.on_pause.value == "play": self.unPauseService() elif config.seek.on_pause.value == "step": self.doSeekRelative(1) elif config.seek.on_pause.value == "last": self.setSeekState(self.lastseekstate) self.lastseekstate = self.SEEK_STATE_PLAY else: if self.seekstate != self.SEEK_STATE_EOF: self.lastseekstate = self.seekstate self.setSeekState(self.SEEK_STATE_PAUSE) def unPauseService(self): print "unpause" if self.seekstate == self.SEEK_STATE_PLAY: return 0 self.setSeekState(self.SEEK_STATE_PLAY) def doSeek(self, pts): seekable = self.getSeek() if seekable is None: return seekable.seekTo(pts) def doSeekRelative(self, pts): seekable = self.getSeek() if seekable is None: return prevstate = self.seekstate if self.seekstate == self.SEEK_STATE_EOF: if prevstate == self.SEEK_STATE_PAUSE: self.setSeekState(self.SEEK_STATE_PAUSE) else: self.setSeekState(self.SEEK_STATE_PLAY) seekable.seekRelative(pts<0 and -1 or 1, abs(pts)) if abs(pts) > 100 and config.usage.show_infobar_on_skip.value: self.showAfterSeek() def seekFwd(self): seek = self.getSeek() if seek and not (seek.isCurrentlySeekable() & 2): if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1): self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10) self.fast_winding_hint_message_showed = True return return 0 # trade as unhandled action if self.seekstate == self.SEEK_STATE_PLAY: self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value))) elif self.seekstate == self.SEEK_STATE_PAUSE: if len(config.seek.speeds_slowmotion.value): self.setSeekState(self.makeStateSlowMotion(config.seek.speeds_slowmotion.value[-1])) else: self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value))) elif self.seekstate == self.SEEK_STATE_EOF: pass elif self.isStateForward(self.seekstate): speed = self.seekstate[1] if self.seekstate[2]: speed /= self.seekstate[2] speed = self.getHigher(speed, config.seek.speeds_forward.value) or config.seek.speeds_forward.value[-1] self.setSeekState(self.makeStateForward(speed)) elif self.isStateBackward(self.seekstate): speed = -self.seekstate[1] if self.seekstate[2]: speed /= self.seekstate[2] speed = self.getLower(speed, config.seek.speeds_backward.value) if speed: self.setSeekState(self.makeStateBackward(speed)) else: self.setSeekState(self.SEEK_STATE_PLAY) elif self.isStateSlowMotion(self.seekstate): speed = self.getLower(self.seekstate[2], config.seek.speeds_slowmotion.value) or config.seek.speeds_slowmotion.value[0] self.setSeekState(self.makeStateSlowMotion(speed)) def seekBack(self): seek = self.getSeek() if seek and not (seek.isCurrentlySeekable() & 2): if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1): self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10) self.fast_winding_hint_message_showed = True return return 0 # trade as unhandled action seekstate = self.seekstate if seekstate == self.SEEK_STATE_PLAY: self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value))) elif seekstate == self.SEEK_STATE_EOF: self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value))) self.doSeekRelative(-6) elif seekstate == self.SEEK_STATE_PAUSE: self.doSeekRelative(-1) elif self.isStateForward(seekstate): speed = seekstate[1] if seekstate[2]: speed /= seekstate[2] speed = self.getLower(speed, config.seek.speeds_forward.value) if speed: self.setSeekState(self.makeStateForward(speed)) else: self.setSeekState(self.SEEK_STATE_PLAY) elif self.isStateBackward(seekstate): speed = -seekstate[1] if seekstate[2]: speed /= seekstate[2] speed = self.getHigher(speed, config.seek.speeds_backward.value) or config.seek.speeds_backward.value[-1] self.setSeekState(self.makeStateBackward(speed)) elif self.isStateSlowMotion(seekstate): speed = self.getHigher(seekstate[2], config.seek.speeds_slowmotion.value) if speed: self.setSeekState(self.makeStateSlowMotion(speed)) else: self.setSeekState(self.SEEK_STATE_PAUSE) def seekFwdManual(self): self.session.openWithCallback(self.fwdSeekTo, MinuteInput) def fwdSeekTo(self, minutes): print "Seek", minutes, "minutes forward" self.doSeekRelative(minutes * 60 * 90000) def seekBackManual(self): self.session.openWithCallback(self.rwdSeekTo, MinuteInput) def rwdSeekTo(self, minutes): print "rwdSeekTo" self.doSeekRelative(-minutes * 60 * 90000) def checkSkipShowHideLock(self): wantlock = self.seekstate != self.SEEK_STATE_PLAY if config.usage.show_infobar_on_skip.value: if self.lockedBecauseOfSkipping and not wantlock: self.unlockShow() self.lockedBecauseOfSkipping = False if wantlock and not self.lockedBecauseOfSkipping: self.lockShow() self.lockedBecauseOfSkipping = True def calcRemainingTime(self): seekable = self.getSeek() if seekable is not None: len = seekable.getLength() try: tmp = self.cueGetEndCutPosition() if tmp: len = (False, tmp) except: pass pos = seekable.getPlayPosition() speednom = self.seekstate[1] or 1 speedden = self.seekstate[2] or 1 if not len[0] and not pos[0]: if len[1] <= pos[1]: return 0 time = (len[1] - pos[1])*speedden/(90*speednom) return time return False def __evEOF(self): if self.seekstate == self.SEEK_STATE_EOF: return # if we are seeking forward, we try to end up ~1s before the end, and pause there. seekstate = self.seekstate if self.seekstate != self.SEEK_STATE_PAUSE: self.setSeekState(self.SEEK_STATE_EOF) if seekstate not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE): # if we are seeking seekable = self.getSeek() if seekable is not None: seekable.seekTo(-1) if seekstate == self.SEEK_STATE_PLAY: # regular EOF self.doEofInternal(True) else: self.doEofInternal(False) def doEofInternal(self, playing): pass # Defined in subclasses def __evSOF(self): self.setSeekState(self.SEEK_STATE_PLAY) self.doSeek(0) # This is needed, because some Mediaplayer use InfoBarSeek but not InfoBarCueSheetSupport def seekPreviousMark(self): if isinstance(self, InfoBarCueSheetSupport): self.jumpPreviousMark() def seekNextMark(self): if isinstance(self, InfoBarCueSheetSupport): self.jumpNextMark() from Screens.PVRState import PVRState, TimeshiftState class InfoBarPVRState: def __init__(self, screen=PVRState, force_show = False): self.onPlayStateChanged.append(self.__playStateChanged) self.pvrStateDialog = self.session.instantiateDialog(screen) self.onShow.append(self._mayShow) self.onHide.append(self.pvrStateDialog.hide) self.force_show = force_show def _mayShow(self): if self.shown and self.seekstate != self.SEEK_STATE_PLAY: self.pvrStateDialog.show() def __playStateChanged(self, state): playstateString = state[3] self.pvrStateDialog["state"].setText(playstateString) # if we return into "PLAY" state, ensure that the dialog gets hidden if there will be no infobar displayed if not config.usage.show_infobar_on_skip.value and self.seekstate == self.SEEK_STATE_PLAY and not self.force_show: self.pvrStateDialog.hide() else: self._mayShow() class InfoBarTimeshiftState(InfoBarPVRState): def __init__(self): InfoBarPVRState.__init__(self, screen=TimeshiftState, force_show = True) self.__hideTimer = eTimer() self.__hideTimer.callback.append(self.__hideTimeshiftState) def _mayShow(self): if self.shown and self.timeshiftEnabled(): self.pvrStateDialog.show() if self.seekstate == self.SEEK_STATE_PLAY and not self.shown: self.__hideTimer.start(5*1000, True) def __hideTimeshiftState(self): self.pvrStateDialog.hide() class InfoBarShowMovies: # i don't really like this class. # it calls a not further specified "movie list" on up/down/movieList, # so this is not more than an action map def __init__(self): self["MovieListActions"] = HelpableActionMap(self, "InfobarMovieListActions", { "movieList": (self.showMovies, _("Open the movie list")), "up": (self.up, _("Open the movie list")), "down": (self.down, _("Open the movie list")) }) # InfoBarTimeshift requires InfoBarSeek, instantiated BEFORE! # Hrmf. # # Timeshift works the following way: # demux0 demux1 "TimeshiftActions" "TimeshiftActivateActions" "SeekActions" # - normal playback TUNER unused PLAY enable disable disable # - user presses "yellow" button. FILE record PAUSE enable disable enable # - user presess pause again FILE record PLAY enable disable enable # - user fast forwards FILE record FF enable disable enable # - end of timeshift buffer reached TUNER record PLAY enable enable disable # - user backwards FILE record BACK # !! enable disable enable # # in other words: # - when a service is playing, pressing the "timeshiftStart" button ("yellow") enables recording ("enables timeshift"), # freezes the picture (to indicate timeshift), sets timeshiftMode ("activates timeshift") # now, the service becomes seekable, so "SeekActions" are enabled, "TimeshiftEnableActions" are disabled. # - the user can now PVR around # - if it hits the end, the service goes into live mode ("deactivates timeshift", it's of course still "enabled") # the service looses it's "seekable" state. It can still be paused, but just to activate timeshift right # after! # the seek actions will be disabled, but the timeshiftActivateActions will be enabled # - if the user rewinds, or press pause, timeshift will be activated again # note that a timeshift can be enabled ("recording") and # activated (currently time-shifting). class InfoBarTimeshift: def __init__(self): self["TimeshiftActions"] = HelpableActionMap(self, "InfobarTimeshiftActions", { "timeshiftStart": (self.startTimeshift, _("Start timeshift")), # the "yellow key" "timeshiftStop": (self.stopTimeshift, _("Stop timeshift")) # currently undefined :), probably 'TV' }, prio=1) self["TimeshiftActivateActions"] = ActionMap(["InfobarTimeshiftActivateActions"], { "timeshiftActivateEnd": self.activateTimeshiftEnd, # something like "rewind key" "timeshiftActivateEndAndPause": self.activateTimeshiftEndAndPause # something like "pause key" }, prio=-1) # priority over record self["TimeshiftActivateActions"].setEnabled(False) self.ts_rewind_timer = eTimer() self.ts_rewind_timer.callback.append(self.rewindService) self.ts_start_delay_timer = eTimer() self.ts_start_delay_timer.callback.append(self.startTimeshiftWithoutPause) self.save_timeshift_file = False self.timeshift_was_activated = False self.__event_tracker = ServiceEventTracker(screen=self, eventmap= { iPlayableService.evStart: self.__serviceStarted, iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged, iPlayableService.evEnd: self.__serviceEnd }) def getTimeshift(self): service = self.session.nav.getCurrentService() return service and service.timeshift() def timeshiftEnabled(self): ts = self.getTimeshift() return ts and ts.isTimeshiftEnabled() def startTimeshift(self, pauseService = True): print "enable timeshift" ts = self.getTimeshift() if ts is None: if not pauseService and not int(config.usage.timeshift_start_delay.value): self.session.open(MessageBox, _("Timeshift not possible!"), MessageBox.TYPE_ERROR, simple = True) print "no ts interface" return 0 if ts.isTimeshiftEnabled(): print "hu, timeshift already enabled?" else: if not ts.startTimeshift(): # we remove the "relative time" for now. #self.pvrStateDialog["timeshift"].setRelative(time.time()) if pauseService: # PAUSE. #self.setSeekState(self.SEEK_STATE_PAUSE) self.activateTimeshiftEnd(False) # enable the "TimeshiftEnableActions", which will override # the startTimeshift actions self.__seekableStatusChanged() # get current timeshift filename and calculate new self.save_timeshift_file = False self.save_timeshift_in_movie_dir = False self.current_timeshift_filename = ts.getTimeshiftFilename() self.new_timeshift_filename = self.generateNewTimeshiftFileName() else: print "timeshift failed" def startTimeshiftWithoutPause(self): self.startTimeshift(False) def stopTimeshift(self): ts = self.getTimeshift() if ts and ts.isTimeshiftEnabled(): if int(config.usage.timeshift_start_delay.value): ts.switchToLive() else: self.checkTimeshiftRunning(self.stopTimeshiftcheckTimeshiftRunningCallback) else: return 0 def stopTimeshiftcheckTimeshiftRunningCallback(self, answer): ts = self.getTimeshift() if answer and ts: ts.stopTimeshift() self.pvrStateDialog.hide() # disable actions self.__seekableStatusChanged() # activates timeshift, and seeks to (almost) the end def activateTimeshiftEnd(self, back = True): ts = self.getTimeshift() print "activateTimeshiftEnd" if ts is None: return if ts.isTimeshiftActive(): print "!! activate timeshift called - but shouldn't this be a normal pause?" self.pauseService() else: print "play, ..." ts.activateTimeshift() # activate timeshift will automatically pause self.setSeekState(self.SEEK_STATE_PAUSE) seekable = self.getSeek() if seekable is not None: seekable.seekTo(-90000) # seek approx. 1 sec before end self.timeshift_was_activated = True if back: self.ts_rewind_timer.start(200, 1) def rewindService(self): self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value))) # generates only filename without path def generateNewTimeshiftFileName(self): name = "timeshift record" info = { } self.getProgramInfoAndEvent(info, name) serviceref = info["serviceref"] service_name = "" if isinstance(serviceref, eServiceReference): service_name = ServiceReference(serviceref).getServiceName() begin_date = strftime("%Y%m%d %H%M", localtime(time())) filename = begin_date + " - " + service_name if config.recording.filename_composition.value == "short": filename = strftime("%Y%m%d", localtime(time())) + " - " + info["name"] elif config.recording.filename_composition.value == "long": filename += " - " + info["name"] + " - " + info["description"] else: filename += " - " + info["name"] # standard if config.recording.ascii_filenames.value: filename = ASCIItranslit.legacyEncode(filename) print "New timeshift filename: ", filename return filename # same as activateTimeshiftEnd, but pauses afterwards. def activateTimeshiftEndAndPause(self): print "activateTimeshiftEndAndPause" #state = self.seekstate self.activateTimeshiftEnd(False) def __seekableStatusChanged(self): self["TimeshiftActivateActions"].setEnabled(not self.isSeekable() and self.timeshiftEnabled()) state = self.getSeek() is not None and self.timeshiftEnabled() self["SeekActions"].setEnabled(state) if not state: self.setSeekState(self.SEEK_STATE_PLAY) self.restartSubtitle() def __serviceStarted(self): self.pvrStateDialog.hide() self.__seekableStatusChanged() if self.ts_start_delay_timer.isActive(): self.ts_start_delay_timer.stop() if int(config.usage.timeshift_start_delay.value): self.ts_start_delay_timer.start(int(config.usage.timeshift_start_delay.value) * 1000, True) def checkTimeshiftRunning(self, returnFunction): if self.timeshiftEnabled() and config.usage.check_timeshift.value and self.timeshift_was_activated: message = _("Stop timeshift?") if not self.save_timeshift_file: choice = [(_("yes"), "stop"), (_("no"), "continue"), (_("Yes and save"), "save"), (_("Yes and save in movie dir"), "save_movie")] else: choice = [(_("yes"), "stop"), (_("no"), "continue")] message += "\n" + _("Reminder, you have chosen to save timeshift file.") self.session.openWithCallback(boundFunction(self.checkTimeshiftRunningCallback, returnFunction), MessageBox, message, simple = True, list = choice) else: returnFunction(True) def checkTimeshiftRunningCallback(self, returnFunction, answer): if answer: if "movie" in answer: self.save_timeshift_in_movie_dir = True if "save" in answer: self.save_timeshift_file = True ts = self.getTimeshift() if ts: ts.saveTimeshiftFile() del ts if "continue" not in answer: self.saveTimeshiftFiles() returnFunction(answer and answer != "continue") # renames/moves timeshift files if requested def __serviceEnd(self): self.saveTimeshiftFiles() self.timeshift_was_activated = False def saveTimeshiftFiles(self): if self.save_timeshift_file and self.current_timeshift_filename and self.new_timeshift_filename: if config.usage.timeshift_path.value and not self.save_timeshift_in_movie_dir: dirname = config.usage.timeshift_path.value else: dirname = defaultMoviePath() filename = getRecordingFilename(self.new_timeshift_filename, dirname) + ".ts" fileList = [] fileList.append((self.current_timeshift_filename, filename)) if fileExists(self.current_timeshift_filename + ".sc"): fileList.append((self.current_timeshift_filename + ".sc", filename + ".sc")) if fileExists(self.current_timeshift_filename + ".cuts"): fileList.append((self.current_timeshift_filename + ".cuts", filename + ".cuts")) moveFiles(fileList) self.save_timeshift_file = False from Screens.PiPSetup import PiPSetup class InfoBarExtensions: EXTENSION_SINGLE = 0 EXTENSION_LIST = 1 def __init__(self): self.list = [] self["InstantExtensionsActions"] = HelpableActionMap(self, "InfobarExtensions", { "extensions": (self.showExtensionSelection, _("Show extensions...")), }, 1) # lower priority def addExtension(self, extension, key = None, type = EXTENSION_SINGLE): self.list.append((type, extension, key)) def updateExtension(self, extension, key = None): self.extensionsList.append(extension) if key is not None: if self.extensionKeys.has_key(key): key = None if key is None: for x in self.availableKeys: if not self.extensionKeys.has_key(x): key = x break if key is not None: self.extensionKeys[key] = len(self.extensionsList) - 1 def updateExtensions(self): self.extensionsList = [] self.availableKeys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue" ] self.extensionKeys = {} for x in self.list: if x[0] == self.EXTENSION_SINGLE: self.updateExtension(x[1], x[2]) else: for y in x[1](): self.updateExtension(y[0], y[1]) def showExtensionSelection(self): self.updateExtensions() extensionsList = self.extensionsList[:] keys = [] list = [] for x in self.availableKeys: if self.extensionKeys.has_key(x): entry = self.extensionKeys[x] extension = self.extensionsList[entry] if extension[2](): name = str(extension[0]()) list.append((extension[0](), extension)) keys.append(x) extensionsList.remove(extension) else: extensionsList.remove(extension) list.extend([(x[0](), x) for x in extensionsList]) keys += [""] * len(extensionsList) self.session.openWithCallback(self.extensionCallback, ChoiceBox, title=_("Please choose an extension..."), list = list, keys = keys, skin_name = "ExtensionsList") def extensionCallback(self, answer): if answer is not None: answer[1][1]() from Tools.BoundFunction import boundFunction import inspect # depends on InfoBarExtensions class InfoBarPlugins: def __init__(self): self.addExtension(extension = self.getPluginList, type = InfoBarExtensions.EXTENSION_LIST) def getPluginName(self, name): return name def getPluginList(self): l = [] for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EXTENSIONSMENU): args = inspect.getargspec(p.__call__)[0] if len(args) == 1 or len(args) == 2 and isinstance(self, InfoBarChannelSelection): l.append(((boundFunction(self.getPluginName, p.name), boundFunction(self.runPlugin, p), lambda: True), None, p.name)) l.sort(key = lambda e: e[2]) # sort by name return l def runPlugin(self, plugin): if isinstance(self, InfoBarChannelSelection): plugin(session = self.session, servicelist = self.servicelist) else: plugin(session = self.session) from Components.Task import job_manager class InfoBarJobman: def __init__(self): self.addExtension(extension = self.getJobList, type = InfoBarExtensions.EXTENSION_LIST) def getJobList(self): return [((boundFunction(self.getJobName, job), boundFunction(self.showJobView, job), lambda: True), None) for job in job_manager.getPendingJobs()] def getJobName(self, job): return "%s: %s (%d%%)" % (job.getStatustext(), job.name, int(100*job.progress/float(job.end))) def showJobView(self, job): from Screens.TaskView import JobView job_manager.in_background = False self.session.openWithCallback(self.JobViewCB, JobView, job) def JobViewCB(self, in_background): job_manager.in_background = in_background # depends on InfoBarExtensions class InfoBarPiP: def __init__(self): try: self.session.pipshown except: self.session.pipshown = False if SystemInfo.get("NumVideoDecoders", 1) > 1: self["PiPActions"] = HelpableActionMap(self, "InfobarPiPActions", { "activatePiP": (self.showPiP, _("Activate PiP")), }) if (self.allowPiP): self.addExtension((self.getShowHideName, self.showPiP, lambda: True), "blue") self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green") self.addExtension((self.getSwapName, self.swapPiP, self.pipShown), "yellow") self.addExtension((self.getTogglePipzapName, self.togglePipzap, self.pipShown), "red") else: self.addExtension((self.getShowHideName, self.showPiP, self.pipShown), "blue") self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green") def pipShown(self): return self.session.pipshown def pipHandles0Action(self): return self.pipShown() and config.usage.pip_zero_button.value != "standard" def getShowHideName(self): if self.session.pipshown: return _("Disable Picture in Picture") else: return _("Activate Picture in Picture") def getSwapName(self): return _("Swap services") def getMoveName(self): return _("Move Picture in Picture") def getTogglePipzapName(self): slist = self.servicelist if slist and slist.dopipzap: return _("Zap focus to main screen") return _("Zap focus to Picture in Picture") def togglePipzap(self): if not self.session.pipshown: self.showPiP() slist = self.servicelist if slist and self.session.pipshown: slist.togglePipzap() if slist.dopipzap: currentServicePath = self.servicelist.getCurrentServicePath() self.servicelist.setCurrentServicePath(self.session.pip.servicePath, doZap=False) self.session.pip.servicePath = currentServicePath def showPiP(self): if self.session.pipshown: slist = self.servicelist if slist and slist.dopipzap: self.togglePipzap() if self.session.pipshown: del self.session.pip self.session.pipshown = False else: self.session.pip = self.session.instantiateDialog(PictureInPicture) self.session.pip.show() newservice = self.session.nav.getCurrentlyPlayingServiceReference() or self.servicelist.servicelist.getCurrent() if self.session.pip.playService(newservice): self.session.pipshown = True self.session.pip.servicePath = self.servicelist.getCurrentServicePath() else: self.session.pipshown = False del self.session.pip def swapPiP(self): swapservice = self.session.nav.getCurrentlyPlayingServiceOrGroup() pipref = self.session.pip.getCurrentService() if swapservice and pipref and pipref.toString() != swapservice.toString(): currentServicePath = self.servicelist.getCurrentServicePath() self.servicelist.setCurrentServicePath(self.session.pip.servicePath, doZap=False) self.session.pip.playService(swapservice) self.session.nav.playService(pipref, checkParentalControl=False, adjust=False) self.session.pip.servicePath = currentServicePath if self.servicelist.dopipzap: # This unfortunately won't work with subservices self.servicelist.setCurrentSelection(self.session.pip.getCurrentService()) def movePiP(self): self.session.open(PiPSetup, pip = self.session.pip) def pipDoHandle0Action(self): use = config.usage.pip_zero_button.value if "swap" == use: self.swapPiP() elif "swapstop" == use: self.swapPiP() self.showPiP() elif "stop" == use: self.showPiP() from RecordTimer import parseEvent, RecordTimerEntry class InfoBarInstantRecord: """Instant Record - handles the instantRecord action in order to start/stop instant records""" def __init__(self): self["InstantRecordActions"] = HelpableActionMap(self, "InfobarInstantRecord", { "instantRecord": (self.instantRecord, _("Instant recording...")), }) if isStandardInfoBar(self): self.recording = [] else: from Screens.InfoBar import InfoBar InfoBarInstance = InfoBar.instance if InfoBarInstance: self.recording = InfoBarInstance.recording def stopCurrentRecording(self, entry = -1): if entry is not None and entry != -1: self.session.nav.RecordTimer.removeEntry(self.recording[entry]) self.recording.remove(self.recording[entry]) def getProgramInfoAndEvent(self, info, name): info["serviceref"] = self.session.nav.getCurrentlyPlayingServiceOrGroup() # try to get event info event = None try: service = self.session.nav.getCurrentService() epg = eEPGCache.getInstance() event = epg.lookupEventTime(info["serviceref"], -1, 0) if event is None: event = service.info().getEvent(0) except: pass info["event"] = event info["name"] = name info["description"] = "" info["eventid"] = None if event is not None: curEvent = parseEvent(event) info["name"] = curEvent[2] info["description"] = curEvent[3] info["eventid"] = curEvent[4] info["end"] = curEvent[1] def startInstantRecording(self, limitEvent = False): begin = int(time()) end = begin + 3600 # dummy name = "instant record" info = { } self.getProgramInfoAndEvent(info, name) serviceref = info["serviceref"] event = info["event"] if event is not None: if limitEvent: end = info["end"] else: if limitEvent: self.session.open(MessageBox, _("No event info found, recording indefinitely."), MessageBox.TYPE_INFO) if isinstance(serviceref, eServiceReference): serviceref = ServiceReference(serviceref) recording = RecordTimerEntry(serviceref, begin, end, info["name"], info["description"], info["eventid"], dirname = preferredInstantRecordPath()) recording.dontSave = True if event is None or limitEvent == False: recording.autoincrease = True recording.setAutoincreaseEnd() simulTimerList = self.session.nav.RecordTimer.record(recording) if simulTimerList is None: # no conflict recording.autoincrease = False self.recording.append(recording) else: if len(simulTimerList) > 1: # with other recording name = simulTimerList[1].name name_date = ' '.join((name, strftime('%F %T', localtime(simulTimerList[1].begin)))) print "[TIMER] conflicts with", name_date recording.autoincrease = True # start with max available length, then increment if recording.setAutoincreaseEnd(): self.session.nav.RecordTimer.record(recording) self.recording.append(recording) self.session.open(MessageBox, _("Record time limited due to conflicting timer %s") % name_date, MessageBox.TYPE_INFO) else: self.session.open(MessageBox, _("Could not record due to conflicting timer %s") % name, MessageBox.TYPE_INFO) else: self.session.open(MessageBox, _("Could not record due to invalid service %s") % serviceref, MessageBox.TYPE_INFO) recording.autoincrease = False def isInstantRecordRunning(self): print "self.recording:", self.recording if self.recording: for x in self.recording: if x.isRunning(): return True return False def recordQuestionCallback(self, answer): print "pre:\n", self.recording if answer is None or answer[1] == "no": return list = [] recording = self.recording[:] for x in recording: if not x in self.session.nav.RecordTimer.timer_list: self.recording.remove(x) elif x.dontSave and x.isRunning(): list.append((x, False)) if answer[1] == "changeduration": if len(self.recording) == 1: self.changeDuration(0) else: self.session.openWithCallback(self.changeDuration, TimerSelection, list) elif answer[1] == "changeendtime": if len(self.recording) == 1: self.setEndtime(0) else: self.session.openWithCallback(self.setEndtime, TimerSelection, list) elif answer[1] == "timer": import TimerEdit self.session.open(TimerEdit.TimerEditList) elif answer[1] == "stop": self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list) elif answer[1] in ( "indefinitely" , "manualduration", "manualendtime", "event"): self.startInstantRecording(limitEvent = answer[1] in ("event", "manualendtime") or False) if answer[1] == "manualduration": self.changeDuration(len(self.recording)-1) elif answer[1] == "manualendtime": self.setEndtime(len(self.recording)-1) elif "timeshift" in answer[1]: ts = self.getTimeshift() if ts: ts.saveTimeshiftFile() self.save_timeshift_file = True if "movie" in answer[1]: self.save_timeshift_in_movie_dir = True print "after:\n", self.recording def setEndtime(self, entry): if entry is not None and entry >= 0: self.selectedEntry = entry self.endtime=ConfigClock(default = self.recording[self.selectedEntry].end) dlg = self.session.openWithCallback(self.TimeDateInputClosed, TimeDateInput, self.endtime) dlg.setTitle(_("Please change recording endtime")) def TimeDateInputClosed(self, ret): if len(ret) > 1: if ret[0]: print "stopping recording at", strftime("%F %T", localtime(ret[1])) if self.recording[self.selectedEntry].end != ret[1]: self.recording[self.selectedEntry].autoincrease = False self.recording[self.selectedEntry].end = ret[1] self.session.nav.RecordTimer.timeChanged(self.recording[self.selectedEntry]) def changeDuration(self, entry): if entry is not None and entry >= 0: self.selectedEntry = entry self.session.openWithCallback(self.inputCallback, InputBox, title=_("How many minutes do you want to record?"), text="5", maxSize=False, type=Input.NUMBER) def inputCallback(self, value): if value is not None: print "stopping recording after", int(value), "minutes." entry = self.recording[self.selectedEntry] if int(value) != 0: entry.autoincrease = False entry.end = int(time()) + 60 * int(value) self.session.nav.RecordTimer.timeChanged(entry) def isTimerRecordRunning(self): identical = timers = 0 for timer in self.session.nav.RecordTimer.timer_list: if timer.isRunning() and not timer.justplay: timers += 1 if self.recording: for x in self.recording: if x.isRunning() and x == timer: identical += 1 return timers > identical def instantRecord(self): pirr = preferredInstantRecordPath() if not findSafeRecordPath(pirr) and not findSafeRecordPath(defaultMoviePath()): if not pirr: pirr = "" self.session.open(MessageBox, _("Missing ") + "\n" + pirr + "\n" + _("No HDD found or HDD not initialized!"), MessageBox.TYPE_ERROR) return if isStandardInfoBar(self): common = ((_("Add recording (stop after current event)"), "event"), (_("Add recording (indefinitely)"), "indefinitely"), (_("Add recording (enter recording duration)"), "manualduration"), (_("Add recording (enter recording endtime)"), "manualendtime"),) else: common = () if self.isInstantRecordRunning(): title =_("A recording is currently running.\nWhat do you want to do?") list = ((_("Stop recording"), "stop"),) + common + \ ((_("Change recording (duration)"), "changeduration"), (_("Change recording (endtime)"), "changeendtime"),) if self.isTimerRecordRunning(): list += ((_("Stop timer recording"), "timer"),) list += ((_("Do nothing"), "no"),) else: title=_("Start recording?") list = common if self.isTimerRecordRunning(): list += ((_("Stop timer recording"), "timer"),) if isStandardInfoBar(self): list += ((_("Do not record"), "no"),) if isStandardInfoBar(self) and self.timeshiftEnabled(): list = list + ((_("Save timeshift file"), "timeshift"), (_("Save timeshift file in movie directory"), "timeshift_movie")) if list: self.session.openWithCallback(self.recordQuestionCallback, ChoiceBox, title=title, list=list) else: return 0 from Tools.ISO639 import LanguageCodes class InfoBarAudioSelection: def __init__(self): self["AudioSelectionAction"] = HelpableActionMap(self, "InfobarAudioSelectionActions", { "audioSelection": (self.audioSelection, _("Audio options...")), }) def audioSelection(self): from Screens.AudioSelection import AudioSelection self.session.openWithCallback(self.audioSelected, AudioSelection, infobar=self) def audioSelected(self, ret=None): print "[infobar::audioSelected]", ret class InfoBarSubserviceSelection: def __init__(self): self["SubserviceSelectionAction"] = HelpableActionMap(self, "InfobarSubserviceSelectionActions", { "subserviceSelection": (self.subserviceSelection, _("Subservice list...")), }) self["SubserviceQuickzapAction"] = HelpableActionMap(self, "InfobarSubserviceQuickzapActions", { "nextSubservice": (self.nextSubservice, _("Switch to next sub service")), "prevSubservice": (self.prevSubservice, _("Switch to previous sub service")) }, -1) self["SubserviceQuickzapAction"].setEnabled(False) self.__event_tracker = ServiceEventTracker(screen=self, eventmap= { iPlayableService.evUpdatedEventInfo: self.checkSubservicesAvail }) self.onClose.append(self.__removeNotifications) self.bsel = None def __removeNotifications(self): self.session.nav.event.remove(self.checkSubservicesAvail) def checkSubservicesAvail(self): service = self.session.nav.getCurrentService() subservices = service and service.subServices() if not subservices or subservices.getNumberOfSubservices() == 0: self["SubserviceQuickzapAction"].setEnabled(False) def nextSubservice(self): self.changeSubservice(+1) def prevSubservice(self): self.changeSubservice(-1) def changeSubservice(self, direction): service = self.session.nav.getCurrentService() subservices = service and service.subServices() n = subservices and subservices.getNumberOfSubservices() if n and n > 0: selection = -1 ref = self.session.nav.getCurrentlyPlayingServiceOrGroup() idx = 0 while idx < n: if subservices.getSubservice(idx).toString() == ref.toString(): selection = idx break idx += 1 if selection != -1: selection += direction if selection >= n: selection=0 elif selection < 0: selection=n-1 newservice = subservices.getSubservice(selection) if newservice.valid(): del subservices del service self.session.nav.playService(newservice, False) def subserviceSelection(self): service = self.session.nav.getCurrentService() subservices = service and service.subServices() self.bouquets = self.servicelist.getBouquetList() n = subservices and subservices.getNumberOfSubservices() selection = 0 if n and n > 0: ref = self.session.nav.getCurrentlyPlayingServiceOrGroup() tlist = [] idx = 0 while idx < n: i = subservices.getSubservice(idx) if i.toString() == ref.toString(): selection = idx tlist.append((i.getName(), i)) idx += 1 if self.bouquets and len(self.bouquets): keys = ["red", "blue", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n if config.usage.multibouquet.value: tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to bouquet"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist else: tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to favourites"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist selection += 3 else: tlist = [(_("Quick zap"), "quickzap", service.subServices()), ("--", "")] + tlist keys = ["red", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n selection += 2 self.session.openWithCallback(self.subserviceSelected, ChoiceBox, title=_("Please select a sub service..."), list = tlist, selection = selection, keys = keys, skin_name = "SubserviceSelection") def subserviceSelected(self, service): del self.bouquets if not service is None: if isinstance(service[1], str): if service[1] == "quickzap": from Screens.SubservicesQuickzap import SubservicesQuickzap self.session.open(SubservicesQuickzap, service[2]) else: self["SubserviceQuickzapAction"].setEnabled(True) self.session.nav.playService(service[1], False) def addSubserviceToBouquetCallback(self, service): if len(service) > 1 and isinstance(service[1], eServiceReference): self.selectedSubservice = service if self.bouquets is None: cnt = 0 else: cnt = len(self.bouquets) if cnt > 1: # show bouquet list self.bsel = self.session.openWithCallback(self.bouquetSelClosed, BouquetSelector, self.bouquets, self.addSubserviceToBouquet) elif cnt == 1: # add to only one existing bouquet self.addSubserviceToBouquet(self.bouquets[0][1]) self.session.open(MessageBox, _("Service has been added to the favourites."), MessageBox.TYPE_INFO) def bouquetSelClosed(self, confirmed): self.bsel = None del self.selectedSubservice if confirmed: self.session.open(MessageBox, _("Service has been added to the selected bouquet."), MessageBox.TYPE_INFO) def addSubserviceToBouquet(self, dest): self.servicelist.addServiceToBouquet(dest, self.selectedSubservice[1]) if self.bsel: self.bsel.close(True) else: del self.selectedSubservice class InfoBarRedButton: def __init__(self): self["RedButtonActions"] = HelpableActionMap(self, "InfobarRedButtonActions", { "activateRedButton": (self.activateRedButton, _("Red button...")), }) self.onHBBTVActivation = [ ] self.onRedButtonActivation = [ ] def activateRedButton(self): service = self.session.nav.getCurrentService() info = service and service.info() if info and info.getInfoString(iServiceInformation.sHBBTVUrl) != "": for x in self.onHBBTVActivation: x() elif False: # TODO: other red button services for x in self.onRedButtonActivation: x() class InfoBarTimerButton: def __init__(self): self["TimerButtonActions"] = HelpableActionMap(self, "InfobarTimerButtonActions", { "timerSelection": (self.timerSelection, _("Timer selection...")), }) def timerSelection(self): from Screens.TimerEdit import TimerEditList self.session.open(TimerEditList) class InfoBarVmodeButton: def __init__(self): self["VmodeButtonActions"] = HelpableActionMap(self, "InfobarVmodeButtonActions", { "vmodeSelection": (self.vmodeSelection, _("Letterbox zoom")), }) def vmodeSelection(self): self.session.open(VideoMode) class VideoMode(Screen): def __init__(self,session): Screen.__init__(self, session) self["videomode"] = Label() self["actions"] = NumberActionMap( [ "InfobarVmodeButtonActions" ], { "vmodeSelection": self.selectVMode }) self.Timer = eTimer() self.Timer.callback.append(self.quit) self.selectVMode() def selectVMode(self): policy = config.av.policy_43 if self.isWideScreen(): policy = config.av.policy_169 idx = policy.choices.index(policy.value) idx = (idx + 1) % len(policy.choices) policy.value = policy.choices[idx] self["videomode"].setText(policy.value) self.Timer.start(1000, True) def isWideScreen(self): from Components.Converter.ServiceInfo import WIDESCREEN service = self.session.nav.getCurrentService() info = service and service.info() return info.getInfo(iServiceInformation.sAspect) in WIDESCREEN def quit(self): self.Timer.stop() self.close() class InfoBarAdditionalInfo: def __init__(self): self["RecordingPossible"] = Boolean(fixed=harddiskmanager.HDDCount() > 0) self["TimeshiftPossible"] = self["RecordingPossible"] self["ExtensionsAvailable"] = Boolean(fixed=1) # TODO: these properties should be queried from the input device keymap self["ShowTimeshiftOnYellow"] = Boolean(fixed=0) self["ShowAudioOnYellow"] = Boolean(fixed=0) self["ShowRecordOnRed"] = Boolean(fixed=0) class InfoBarNotifications: def __init__(self): self.onExecBegin.append(self.checkNotifications) Notifications.notificationAdded.append(self.checkNotificationsIfExecing) self.onClose.append(self.__removeNotification) def __removeNotification(self): Notifications.notificationAdded.remove(self.checkNotificationsIfExecing) def checkNotificationsIfExecing(self): if self.execing: self.checkNotifications() def checkNotifications(self): notifications = Notifications.notifications if notifications: n = notifications[0] del notifications[0] cb = n[0] if n[3].has_key("onSessionOpenCallback"): n[3]["onSessionOpenCallback"]() del n[3]["onSessionOpenCallback"] if cb: dlg = self.session.openWithCallback(cb, n[1], *n[2], **n[3]) elif not Notifications.current_notifications and n[4] == "ZapError": if n[3].has_key("timeout"): del n[3]["timeout"] n[3]["enable_input"] = False dlg = self.session.instantiateDialog(n[1], *n[2], **n[3]) self.hide() dlg.show() self.notificationDialog = dlg eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressNotification) else: dlg = self.session.open(n[1], *n[2], **n[3]) # remember that this notification is currently active d = (n[4], dlg) Notifications.current_notifications.append(d) dlg.onClose.append(boundFunction(self.__notificationClosed, d)) def closeNotificationInstantiateDialog(self): if hasattr(self, "notificationDialog"): self.session.deleteDialog(self.notificationDialog) del self.notificationDialog eActionMap.getInstance().unbindAction('', self.keypressNotification) def keypressNotification(self, key, flag): if flag: self.closeNotificationInstantiateDialog() def __notificationClosed(self, d): Notifications.current_notifications.remove(d) class InfoBarServiceNotifications: def __init__(self): self.__event_tracker = ServiceEventTracker(screen=self, eventmap= { iPlayableService.evEnd: self.serviceHasEnded }) def serviceHasEnded(self): print "service end!" try: self.setSeekState(self.SEEK_STATE_PLAY) except: pass class InfoBarCueSheetSupport: CUT_TYPE_IN = 0 CUT_TYPE_OUT = 1 CUT_TYPE_MARK = 2 CUT_TYPE_LAST = 3 ENABLE_RESUME_SUPPORT = False def __init__(self, actionmap = "InfobarCueSheetActions"): self["CueSheetActions"] = HelpableActionMap(self, actionmap, { "jumpPreviousMark": (self.jumpPreviousMark, _("Jump to previous marked position")), "jumpNextMark": (self.jumpNextMark, _("Jump to next marked position")), "toggleMark": (self.toggleMark, _("Toggle a cut mark at the current position")) }, prio=1) self.cut_list = [ ] self.is_closing = False self.__event_tracker = ServiceEventTracker(screen=self, eventmap= { iPlayableService.evStart: self.__serviceStarted, iPlayableService.evCuesheetChanged: self.downloadCuesheet, }) def __serviceStarted(self): if self.is_closing: return print "new service started! trying to download cuts!" self.downloadCuesheet() if self.ENABLE_RESUME_SUPPORT: for (pts, what) in self.cut_list: if what == self.CUT_TYPE_LAST: last = pts break else: last = getResumePoint(self.session) if last is None: return # only resume if at least 10 seconds ahead, or <10 seconds before the end. seekable = self.__getSeekable() if seekable is None: return # Should not happen? length = seekable.getLength() or (None,0) print "seekable.getLength() returns:", length # Hmm, this implies we don't resume if the length is unknown... if (last > 900000) and (not length[1] or (last < length[1] - 900000)): self.resume_point = last l = last / 90000 if config.usage.on_movie_start.value == "ask" or not length[1]: Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Do you want to resume this playback?") + "\n" + (_("Resume position at %s") % ("%d:%02d:%02d" % (l/3600, l%3600/60, l%60))), timeout=10) elif config.usage.on_movie_start.value == "resume": # TRANSLATORS: The string "Resuming playback" flashes for a moment # TRANSLATORS: at the start of a movie, when the user has selected # TRANSLATORS: "Resume from last position" as start behavior. # TRANSLATORS: The purpose is to notify the user that the movie starts # TRANSLATORS: in the middle somewhere and not from the beginning. # TRANSLATORS: (Some translators seem to have interpreted it as a # TRANSLATORS: question or a choice, but it is a statement.) Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Resuming playback"), timeout=2, type=MessageBox.TYPE_INFO) def playLastCB(self, answer): if answer == True: self.doSeek(self.resume_point) self.hideAfterResume() def hideAfterResume(self): if isinstance(self, InfoBarShowHide): self.hide() def __getSeekable(self): service = self.session.nav.getCurrentService() if service is None: return None return service.seek() def cueGetCurrentPosition(self): seek = self.__getSeekable() if seek is None: return None r = seek.getPlayPosition() if r[0]: return None return long(r[1]) def cueGetEndCutPosition(self): ret = False isin = True for cp in self.cut_list: if cp[1] == self.CUT_TYPE_OUT: if isin: isin = False ret = cp[0] elif cp[1] == self.CUT_TYPE_IN: isin = True return ret def jumpPreviousNextMark(self, cmp, start=False): current_pos = self.cueGetCurrentPosition() if current_pos is None: return False mark = self.getNearestCutPoint(current_pos, cmp=cmp, start=start) if mark is not None: pts = mark[0] else: return False self.doSeek(pts) return True def jumpPreviousMark(self): # we add 5 seconds, so if the play position is <5s after # the mark, the mark before will be used self.jumpPreviousNextMark(lambda x: -x-5*90000, start=True) def jumpNextMark(self): if not self.jumpPreviousNextMark(lambda x: x-90000): self.doSeek(-1) def getNearestCutPoint(self, pts, cmp=abs, start=False): # can be optimized beforecut = True nearest = None bestdiff = -1 instate = True if start: bestdiff = cmp(0 - pts) if bestdiff >= 0: nearest = [0, False] for cp in self.cut_list: if beforecut and cp[1] in (self.CUT_TYPE_IN, self.CUT_TYPE_OUT): beforecut = False if cp[1] == self.CUT_TYPE_IN: # Start is here, disregard previous marks diff = cmp(cp[0] - pts) if start and diff >= 0: nearest = cp bestdiff = diff else: nearest = None bestdiff = -1 if cp[1] == self.CUT_TYPE_IN: instate = True elif cp[1] == self.CUT_TYPE_OUT: instate = False elif cp[1] in (self.CUT_TYPE_MARK, self.CUT_TYPE_LAST): diff = cmp(cp[0] - pts) if instate and diff >= 0 and (nearest is None or bestdiff > diff): nearest = cp bestdiff = diff return nearest def toggleMark(self, onlyremove=False, onlyadd=False, tolerance=5*90000, onlyreturn=False): current_pos = self.cueGetCurrentPosition() if current_pos is None: print "not seekable" return nearest_cutpoint = self.getNearestCutPoint(current_pos) if nearest_cutpoint is not None and abs(nearest_cutpoint[0] - current_pos) < tolerance: if onlyreturn: return nearest_cutpoint if not onlyadd: self.removeMark(nearest_cutpoint) elif not onlyremove and not onlyreturn: self.addMark((current_pos, self.CUT_TYPE_MARK)) if onlyreturn: return None def addMark(self, point): insort(self.cut_list, point) self.uploadCuesheet() self.showAfterCuesheetOperation() def removeMark(self, point): self.cut_list.remove(point) self.uploadCuesheet() self.showAfterCuesheetOperation() def showAfterCuesheetOperation(self): if isinstance(self, InfoBarShowHide): self.doShow() def __getCuesheet(self): service = self.session.nav.getCurrentService() if service is None: return None return service.cueSheet() def uploadCuesheet(self): cue = self.__getCuesheet() if cue is None: print "upload failed, no cuesheet interface" return cue.setCutList(self.cut_list) def downloadCuesheet(self): cue = self.__getCuesheet() if cue is None: print "download failed, no cuesheet interface" self.cut_list = [ ] else: self.cut_list = cue.getCutList() class InfoBarSummary(Screen): skin = """ <screen position="0,0" size="132,64"> <widget source="global.CurrentTime" render="Label" position="62,46" size="82,18" font="Regular;16" > <convert type="ClockToText">WithSeconds</convert> </widget> <widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="82,18" zPosition="1" > <convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert> <convert type="ConditionalShowHide">Blink</convert> </widget> <widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" > <convert type="ServiceName">Name</convert> </widget> <widget source="session.Event_Now" render="Progress" position="6,46" size="46,18" borderWidth="1" > <convert type="EventTime">Progress</convert> </widget> </screen>""" # for picon: (path="piconlcd" will use LCD picons) # <widget source="session.CurrentService" render="Picon" position="6,0" size="120,64" path="piconlcd" > # <convert type="ServiceName">Reference</convert> # </widget> class InfoBarSummarySupport: def __init__(self): pass def createSummary(self): return InfoBarSummary class InfoBarMoviePlayerSummary(Screen): skin = """ <screen position="0,0" size="132,64"> <widget source="global.CurrentTime" render="Label" position="62,46" size="64,18" font="Regular;16" halign="right" > <convert type="ClockToText">WithSeconds</convert> </widget> <widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="64,18" zPosition="1" > <convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert> <convert type="ConditionalShowHide">Blink</convert> </widget> <widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" > <convert type="ServiceName">Name</convert> </widget> <widget source="session.CurrentService" render="Progress" position="6,46" size="56,18" borderWidth="1" > <convert type="ServicePosition">Position</convert> </widget> </screen>""" class InfoBarMoviePlayerSummarySupport: def __init__(self): pass def createSummary(self): return InfoBarMoviePlayerSummary class InfoBarTeletextPlugin: def __init__(self): self.teletext_plugin = None for p in plugins.getPlugins(PluginDescriptor.WHERE_TELETEXT): self.teletext_plugin = p if self.teletext_plugin is not None: self["TeletextActions"] = HelpableActionMap(self, "InfobarTeletextActions", { "startTeletext": (self.startTeletext, _("View teletext...")) }) else: print "no teletext plugin found!" def startTeletext(self): self.teletext_plugin(session=self.session, service=self.session.nav.getCurrentService()) class InfoBarSubtitleSupport(object): def __init__(self): object.__init__(self) self["SubtitleSelectionAction"] = HelpableActionMap(self, "InfobarSubtitleSelectionActions", { "subtitleSelection": (self.subtitleSelection, _("Subtitle selection...")), }) self.selected_subtitle = None if isStandardInfoBar(self): self.subtitle_window = self.session.instantiateDialog(SubtitleDisplay) else: from Screens.InfoBar import InfoBar self.subtitle_window = InfoBar.instance.subtitle_window self.subtitle_window.hide() self.__event_tracker = ServiceEventTracker(screen=self, eventmap= { iPlayableService.evStart: self.__serviceChanged, iPlayableService.evEnd: self.__serviceChanged, iPlayableService.evUpdatedInfo: self.__updatedInfo }) def getCurrentServiceSubtitle(self): service = self.session.nav.getCurrentService() return service and service.subtitle() def subtitleSelection(self): subtitle = self.getCurrentServiceSubtitle() subtitlelist = subtitle and subtitle.getSubtitleList() if self.selected_subtitle or subtitlelist and len(subtitlelist)>0: from Screens.AudioSelection import SubtitleSelection self.session.open(SubtitleSelection, self) else: return 0 def __serviceChanged(self): if self.selected_subtitle: self.selected_subtitle = None self.subtitle_window.hide() def __updatedInfo(self): if not self.selected_subtitle: subtitle = self.getCurrentServiceSubtitle() cachedsubtitle = subtitle.getCachedSubtitle() if cachedsubtitle: self.enableSubtitle(cachedsubtitle) def enableSubtitle(self, selectedSubtitle): subtitle = self.getCurrentServiceSubtitle() self.selected_subtitle = selectedSubtitle if subtitle and self.selected_subtitle: subtitle.enableSubtitles(self.subtitle_window.instance, self.selected_subtitle) self.subtitle_window.show() else: if subtitle: subtitle.disableSubtitles(self.subtitle_window.instance) self.subtitle_window.hide() def restartSubtitle(self): if self.selected_subtitle: self.enableSubtitle(self.selected_subtitle) class InfoBarServiceErrorPopupSupport: def __init__(self): self.__event_tracker = ServiceEventTracker(screen=self, eventmap= { iPlayableService.evTuneFailed: self.__tuneFailed, iPlayableService.evTunedIn: self.__serviceStarted, iPlayableService.evStart: self.__serviceStarted }) self.__serviceStarted() def __serviceStarted(self): self.closeNotificationInstantiateDialog() self.last_error = None Notifications.RemovePopup(id = "ZapError") def __tuneFailed(self): if not config.usage.hide_zap_errors.value: service = self.session.nav.getCurrentService() info = service and service.info() error = info and info.getInfo(iServiceInformation.sDVBState) if error == self.last_error: error = None else: self.last_error = error error = { eDVBServicePMTHandler.eventNoResources: _("No free tuner!"), eDVBServicePMTHandler.eventTuneFailed: _("Tune failed!"), eDVBServicePMTHandler.eventNoPAT: _("No data on transponder!\n(Timeout reading PAT)"), eDVBServicePMTHandler.eventNoPATEntry: _("Service not found!\n(SID not found in PAT)"), eDVBServicePMTHandler.eventNoPMT: _("Service invalid!\n(Timeout reading PMT)"), eDVBServicePMTHandler.eventNewProgramInfo: None, eDVBServicePMTHandler.eventTuned: None, eDVBServicePMTHandler.eventSOF: None, eDVBServicePMTHandler.eventEOF: None, eDVBServicePMTHandler.eventMisconfiguration: _("Service unavailable!\nCheck tuner configuration!"), }.get(error) #this returns None when the key not exist in the dict if error: self.closeNotificationInstantiateDialog() if hasattr(self, "dishDialog") and not self.dishDialog.dishState(): Notifications.AddPopup(text = error, type = MessageBox.TYPE_ERROR, timeout = 5, id = "ZapError") class InfoBarPowersaver: def __init__(self): self.inactivityTimer = eTimer() self.inactivityTimer.callback.append(self.inactivityTimeout) self.restartInactiveTimer() self.sleepTimer = eTimer() self.sleepTimer.callback.append(self.sleepTimerTimeout) eActionMap.getInstance().bindAction('', -maxint - 1, self.keypress) def keypress(self, key, flag): if flag: self.restartInactiveTimer() def restartInactiveTimer(self): time = abs(int(config.usage.inactivity_timer.value)) if time: self.inactivityTimer.startLongTimer(time) else: self.inactivityTimer.stop() def inactivityTimeout(self): if config.usage.inactivity_timer_blocktime.value: curtime = localtime(time()) if curtime.tm_year != 1970: #check if the current time is valid curtime = (curtime.tm_hour, curtime.tm_min, curtime.tm_sec) begintime = tuple(config.usage.inactivity_timer_blocktime_begin.value) endtime = tuple(config.usage.inactivity_timer_blocktime_end.value) if begintime <= endtime and (curtime >= begintime and curtime < endtime) or begintime > endtime and (curtime >= begintime or curtime < endtime): duration = (endtime[0]*3600 + endtime[1]*60) - (curtime[0]*3600 + curtime[1]*60 + curtime[2]) if duration: if duration < 0: duration += 24*3600 self.inactivityTimer.startLongTimer(duration) return if Screens.Standby.inStandby: self.inactivityTimeoutCallback(True) else: if int(config.usage.inactivity_timer.value) < 0: message = _("Your receiver will shutdown due to inactivity.") else: message = _("Your receiver will got to standby due to inactivity.") message += "\n" + _("Do you want this?") self.session.openWithCallback(self.inactivityTimeoutCallback, MessageBox, message, timeout=60, simple = True) def inactivityTimeoutCallback(self, answer): if answer: self.goShutdownOrStandby(int(config.usage.inactivity_timer.value)) else: print "[InfoBarPowersaver] abort" def setSleepTimer(self, time): print "[InfoBarPowersaver] set sleeptimer", time if time: if time < 0: message = _("And will shutdown your receiver over ") else: message = _("And will put your receiver in standby over ") m = abs(time / 60) message = _("The sleep timer has been activated.") + "\n" + message + ngettext("%d minute", "%d minutes", m) % m self.sleepTimer.startLongTimer(abs(time)) else: message = _("The sleep timer has been disabled.") self.sleepTimer.stop() Notifications.AddPopup(message, type = MessageBox.TYPE_INFO, timeout = 5) self.sleepTimerSetting = time def sleepTimerTimeout(self): if Screens.Standby.inStandby: self.sleepTimerTimeoutCallback(True) else: list = [ (_("Yes"), True), (_("Extend sleeptimer 15 minutes"), "extend"), (_("No"), False) ] if self.sleepTimerSetting < 0: message = _("Your receiver will shutdown due to the sleeptimer.") elif self.sleepTimerSetting > 0: message = _("Your receiver will got to stand by due to the sleeptimer.") message += "\n" + _("Do you want this?") self.session.openWithCallback(self.sleepTimerTimeoutCallback, MessageBox, message, timeout=60, simple = True, list = list) def sleepTimerTimeoutCallback(self, answer): if answer == "extend": print "[InfoBarPowersaver] extend sleeptimer" if self.sleepTimerSetting < 0: self.setSleepTimer(-900) else: self.setSleepTimer(900) elif answer: self.goShutdownOrStandby(self.sleepTimerSetting) else: print "[InfoBarPowersaver] abort" self.setSleepTimer(0) def goShutdownOrStandby(self, value): if value < 0: if Screens.Standby.inStandby: print "[InfoBarPowersaver] already in standby now shut down" RecordTimerEntry.TryQuitMainloop() elif not Screens.Standby.inTryQuitMainloop: print "[InfoBarPowersaver] goto shutdown" self.session.open(Screens.Standby.TryQuitMainloop, 1) elif not Screens.Standby.inStandby: print "[InfoBarPowersaver] goto standby" self.session.open(Screens.Standby.Standby)
gpl-2.0
-2,611,762,494,794,469,000
32.653588
216
0.71834
false
salspaugh/splparser
splparser/lexers/toplexer.py
1
5030
#!/usr/bin/env python import ply.lex from ply.lex import TOKEN import re from splparser.regexes.searchregexes import * from splparser.exceptions import SPLSyntaxError tokens = [ 'COMMA', 'WILDCARD', 'EQ', 'IPV4ADDR', 'IPV6ADDR', 'EMAIL','HOSTNAME', 'URL', 'PATH', 'US_PHONE', 'WORD', 'INT', 'BIN', 'OCT', 'HEX', 'FLOAT', 'ID', 'NBSTR', # non-breaking string 'LITERAL', # in quotes 'COMMON_OPT', 'TOP_OPT', 'INTERNAL_FIELD', 'DEFAULT_FIELD', 'DEFAULT_DATETIME_FIELD' ] reserved = { 'by' : 'BYLC', 'BY' : 'BYUC', 'top' : 'TOP', 'sitop' : 'SITOP', } tokens = tokens + list(reserved.values()) precedence = ( ('right', 'EQ'), ) t_ignore = ' ' t_EQ = r'=' # !!! The order in which these functions are defined determine matchine. The # first to match is used. Take CARE when reordering. states = ( ('ipunchecked', 'inclusive'), ) def is_ipv4addr(addr): addr = addr.replace('*', '0') addr = addr.strip() addr = addr.strip('"') port = addr.find(':') if port > 0: addr = addr[:port] slash = addr.find('/') if slash > 0: addr = addr[:slash] addr = addr.strip() import socket try: socket.inet_pton(socket.AF_INET, addr) except socket.error: return False return True def is_ipv6addr(addr): addr = addr.replace('*', '0') addr = addr.strip() addr = addr.strip('"') addr = addr.strip('[') port = addr.find(']') if port > 0: addr = addr[:port] slash = addr.find('/') if slash > 0: addr = addr[:slash] addr = addr.strip() import socket try: socket.inet_pton(socket.AF_INET6, addr) except socket.error: return False return True def type_if_reserved(t, default): if re.match(common_opt, t.value): return 'COMMON_OPT' elif re.match(top_opt, t.value): return 'TOP_OPT' elif re.match(internal_field, t.value): return 'INTERNAL_FIELD' elif re.match(default_field, t.value): return 'DEFAULT_FIELD', elif re.match(default_datetime_field, t.value): return 'DEFAULT_DATETIME_FIELD' else: return reserved.get(t.value, default) @TOKEN(ipv4_addr) def t_ipunchecked_IPV4ADDR(t): if is_ipv4addr(t.value): return t t.lexer.lexpos -= len(t.value) t.lexer.begin('INITIAL') return @TOKEN(ipv6_addr) def t_ipunchecked_IPV6ADDR(t): if is_ipv6addr(t.value): return t t.lexer.lexpos -= len(t.value) t.lexer.begin('INITIAL') return @TOKEN(internal_field) def t_INTERNAL_FIELD(t): t.lexer.begin('ipunchecked') return(t) @TOKEN(default_field) def t_DEFAULT_FIELD(t): t.lexer.begin('ipunchecked') return(t) @TOKEN(default_datetime_field) def t_DEFAULT_DATETIME_FIELD(t): t.lexer.begin('ipunchecked') return(t) def t_COMMA(t): r'''(?:\,)|(?:"\,")|(?:'\,')''' t.lexer.begin('ipunchecked') return t @TOKEN(wildcard) def t_WILDCARD(t): t.lexer.begin('ipunchecked') return t @TOKEN(literal) def t_LITERAL(t): t.lexer.begin('ipunchecked') return(t) @TOKEN(bin) def t_BIN(t): t.lexer.begin('ipunchecked') return t @TOKEN(oct) def t_OCT(t): t.lexer.begin('ipunchecked') return t @TOKEN(hex) def t_HEX(t): t.lexer.begin('ipunchecked') return t @TOKEN(float) def t_FLOAT(t): t.lexer.begin('ipunchecked') return t @TOKEN(word) def t_WORD(t): t.type = type_if_reserved(t, 'WORD') t.lexer.begin('ipunchecked') return t @TOKEN(int) def t_INT(t): t.lexer.begin('ipunchecked') return t @TOKEN(id) def t_ID(t): t.type = type_if_reserved(t, 'ID') t.lexer.begin('ipunchecked') return t @TOKEN(email) def t_EMAIL(t): t.type = type_if_reserved(t, 'EMAIL') t.lexer.begin('ipunchecked') return t @TOKEN(hostname) def t_HOSTNAME(t): t.type = type_if_reserved(t, 'HOSTNAME') t.lexer.begin('ipunchecked') return(t) @TOKEN(path) def t_PATH(t): t.type = type_if_reserved(t, 'PATH') t.lexer.begin('ipunchecked') return(t) @TOKEN(url) def t_URL(t): t.type = type_if_reserved(t, 'URL') t.lexer.begin('ipunchecked') return(t) @TOKEN(us_phone) def t_US_PHONE(t): t.lexer.begin('ipunchecked') return(t) @TOKEN(nbstr) def t_NBSTR(t): # non-breaking string t.type = type_if_reserved(t, 'NBSTR') t.lexer.begin('ipunchecked') return t def t_error(t): badchar = t.value[0] t.lexer.skip(1) t.lexer.begin('ipunchecked') raise SPLSyntaxError("Illegal character in top lexer '%s'" % badchar) def lex(): return ply.lex.lex() def tokenize(data, debug=False, debuglog=None): lexer = ply.lex.lex(debug=debug, debuglog=debuglog) lexer.input(data) lexer.begin('ipunchecked') tokens = [] while True: tok = lexer.token() if not tok: break tokens.append(tok) return tokens if __name__ == "__main__": import sys print tokenize(' '.join(sys.argv[1:]))
bsd-3-clause
-6,684,632,810,067,020,000
19.699588
79
0.599602
false
shawnadelic/shuup
shuup/notify/admin_module/__init__.py
2
3876
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.core.exceptions import ObjectDoesNotExist from django.core.urlresolvers import reverse from django.http.response import JsonResponse from django.utils.translation import ugettext_lazy as _ from django.views.decorators.csrf import csrf_exempt from shuup.admin.base import AdminModule, MenuEntry, Notification from shuup.admin.menu import SETTINGS_MENU_CATEGORY from shuup.admin.utils.permissions import get_default_model_permissions from shuup.admin.utils.urls import ( admin_url, derive_model_url, get_edit_and_list_urls ) from shuup.notify.enums import Priority from shuup.notify.models import Notification as NotificationModel from shuup.notify.models import Script class NotifyAdminModule(AdminModule): name = _(u"Notifications") breadcrumbs_menu_entry = MenuEntry(name, "shuup_admin:notify.script.list") def get_urls(self): permissions = get_default_model_permissions(NotificationModel) return [ admin_url( "notify/script-item-editor/", "shuup.notify.admin_module.views.script_item_editor", name="notify.script-item-editor", permissions=permissions ), admin_url( "notify/script/content/(?P<pk>\d+)/", "shuup.notify.admin_module.views.EditScriptContentView", name="notify.script.edit-content", permissions=permissions ), admin_url( "notify/mark-read/(?P<pk>\d+)/$", self.mark_notification_read_view, name="notify.mark-read", permissions=permissions ), ] + get_edit_and_list_urls( url_prefix="^notify/script", view_template="shuup.notify.admin_module.views.Script%sView", name_template="notify.script.%s", permissions=permissions ) def get_menu_entries(self, request): return [ MenuEntry( text=_("Notifications"), icon="fa fa-code", url="shuup_admin:notify.script.list", category=SETTINGS_MENU_CATEGORY, ordering=9, aliases=[_("Show notification scripts")] ) ] def get_required_permissions(self): return get_default_model_permissions(NotificationModel) @csrf_exempt def mark_notification_read_view(self, request, pk): if request.method == "POST": try: notif = NotificationModel.objects.for_user(request.user).get(pk=pk) except ObjectDoesNotExist: return JsonResponse({"error": "no such notification"}) notif.mark_read(request.user) return JsonResponse({"ok": True}) return JsonResponse({"error": "POST only"}) def get_notifications(self, request): notif_qs = NotificationModel.objects.unread_for_user(request.user).order_by("-id")[:15] for notif in notif_qs: if notif.priority == Priority.HIGH: kind = "warning" elif notif.priority == Priority.CRITICAL: kind = "danger" else: kind = "info" yield Notification( text=notif.message, url=notif.url, kind=kind, dismissal_url=reverse("shuup_admin:notify.mark-read", kwargs={"pk": notif.pk}), datetime=notif.created_on ) def get_model_url(self, object, kind): return derive_model_url(Script, "shuup_admin:notify.script", object, kind)
agpl-3.0
46,661,231,146,315,830
37
101
0.614551
false
ababic/waddleadmin
waddleadmin/helpers/url.py
1
1589
from __future__ import unicode_literals from django.contrib.admin.utils import quote from django.core.urlresolvers import reverse from django.utils.http import urlquote from wagtail.contrib.modeladmin.helpers import ( AdminURLHelper as WagtailAdminURLHelper, PageAdminURLHelper as WagtailPageAdminURLHelper ) wagtailadmin_page_actions = ( 'add', 'edit', 'delete', 'copy', 'move', 'preview', 'view_draft', 'unpublish', 'revisions_index', 'add_subpage' ) class AdminURLHelper(WagtailAdminURLHelper): def get_action_url_for_obj(self, action, obj, *args): if obj is None: return self.get_action_url(action, *args) args = (quote(getattr(obj, self.opts.pk.attname)),) + args return self.get_action_url(action, *args) class PageAdminURLHelper(WagtailPageAdminURLHelper, AdminURLHelper): def get_action_url(self, action, *args, **kwargs): # Note: 'add' is used below, because that's the terminology used by # wagtail's page editing urls / views. For pages, if the action is # 'create', this method should supply the URL for `ChooseParentView`, # rather than going straight to 'wagtailadmin_pages:add' if action in wagtailadmin_page_actions: url_name = 'wagtailadmin_pages:%s' % action target_url = reverse(url_name, args=args, kwargs=kwargs) return '%s?next=%s' % (target_url, urlquote(self.index_url)) return super(PageAdminURLHelper, self).get_action_url(action, *args, **kwargs)
mit
-6,192,402,966,888,174,000
39.74359
77
0.662681
false
pymagic-org/pymagic_driver
bmp180.py
1
5278
import pyb from struct import unpack as unp # BMP180 default address BMP180_I2CADDR = 0x77 # Operating Modes BMP180_ULTRALOWPOWER = 0 BMP180_STANDARD = 1 BMP180_HIGHRES = 2 BMP180_ULTRAHIGHRES = 3 # BMP180 Registers BMP180_CAL_AC1 = 0xAA BMP180_CAL_AC2 = 0xAC BMP180_CAL_AC3 = 0xAE BMP180_CAL_AC4 = 0xB0 BMP180_CAL_AC5 = 0xB2 BMP180_CAL_AC6 = 0xB4 BMP180_CAL_B1 = 0xB6 BMP180_CAL_B2 = 0xB8 BMP180_CAL_MB = 0xBA BMP180_CAL_MC = 0xBC BMP180_CAL_MD = 0xBE BMP180_CONTROL = 0xF4 BMP180_TEMPDATA = 0xF6 BMP180_PRESSUREDATA = 0xF6 # Commands BMP180_READTEMPCMD = 0x2E BMP180_READPRESSUREDCMD = 0x34 class BMP180(): def __init__(self, bus=1, address=BMP180_I2CADDR, mode=BMP180_STANDARD): self._mode = mode self._address = address self._bus = pyb.I2C(bus, pyb.I2C.MASTER) # Load calibration values self._load_calibration() def _read_byte(self, cmd): return self._bus.mem_read(1,self._address,cmd)[0] #return unp('>h',self._bus.mem_read(1,self._address, cmd))[0] def _read_u16(self, cmd): result = self._bus.mem_read(2,self._address,cmd) return (result[0]<<8)+result[1] # return unp('>h',self._bus.mem_read(2,self._address, cmd))[0] def _read_s16(self, cmd): result = self._read_u16(cmd) if result > 32767: result -= (1<<16) return result def _read_u24(self, cmd): result = self._bus.mem_read(3,self._address,cmd) #print(result) return (result[0]<<16)+(result[1]<<8)+result[2] def _write_byte(self, cmd, val): self._bus.mem_write(val, self._address, cmd) def _load_calibration(self): "load calibration" self.cal_AC1 = self._read_s16(BMP180_CAL_AC1) self.cal_AC2 = self._read_s16(BMP180_CAL_AC2) self.cal_AC3 = self._read_s16(BMP180_CAL_AC3) self.cal_AC4 = self._read_u16(BMP180_CAL_AC4) self.cal_AC5 = self._read_u16(BMP180_CAL_AC5) self.cal_AC6 = self._read_u16(BMP180_CAL_AC6) self.cal_B1 = self._read_s16(BMP180_CAL_B1) self.cal_B2 = self._read_s16(BMP180_CAL_B2) self.cal_MB = self._read_s16(BMP180_CAL_MB) self.cal_MC = self._read_s16(BMP180_CAL_MC) self.cal_MD = self._read_s16(BMP180_CAL_MD) def read_raw_temp(self): """Reads the raw (uncompensated) temperature from the sensor.""" self._write_byte(BMP180_CONTROL, BMP180_READTEMPCMD) pyb.udelay(4500) raw = self._read_s16(BMP180_TEMPDATA) return raw def read_raw_pressure(self): """Reads the raw (uncompensated) pressure level from the sensor.""" conversion_time = [5000, 8000, 14000, 26000] self._write_byte(BMP180_CONTROL, BMP180_READPRESSUREDCMD+(self._mode<<6)) pyb.udelay(conversion_time[self._mode]) raw = self._read_u24(BMP180_PRESSUREDATA)>>(8-self._mode) #MSB = self._read_byte(BMP180_PRESSUREDATA) #LSB = self._read_byte(BMP180_PRESSUREDATA+1) #XLSB = self._read_byte(BMP180_PRESSUREDATA+2) #raw = ((MSB << 16) + (LSB << 8) + XLSB) >> (8 - self._mode) return raw def read_temperature(self): """Gets teh compensated temperature in degrees celsius.""" UT = self.read_raw_temp() X1 = ((UT-self.cal_AC6) * self.cal_AC5) >> 15 X2 = (self.cal_MC << 11) / (X1 + self.cal_MD) B5 = X1 + X2 #print('B5 = ',B5) temp = (int(B5 + 8) >> 4) / 10.0 return temp def read_pressure(self): """Gets the compensated pressure in Pascals.""" UT = self.read_raw_temp() UP = self.read_raw_pressure() X1 = ((UT -self.cal_AC6) * self.cal_AC5) >> 15 X2 = (self.cal_MC << 11) / (X1 + self.cal_MD) B5 = X1 + X2 # Pressure Calculations B6 = int(B5 - 4000) X1 = (self.cal_B2 * (B6 * B6) >> 12) >> 11 X2 = (self.cal_AC2 * B6) >> 11 X3 = X1 + X2 B3 = (((self.cal_AC1 * 4 + X3) << self._mode) + 2) / 4 X1 = (self.cal_AC3 * B6) >> 13 X2 = (self.cal_B1 * ((B6 * B6) >> 12)) >> 16 X3 = ((X1 + X2) + 2) >> 2 B4 = (self.cal_AC4 * (X3 + 32768)) >> 15 B7 = (UP - B3) * (50000 >> self._mode) if B7 < 0x80000000: p = int((B7 * 2) / B4) else: p = int((B7 / B4) * 2) X1 = (p >> 8) * (p >> 8) X1 = (X1 * 3038) >> 16 X2 = (-7357 * p) >> 16 p = p + ((X1 + X2 + 3791) >> 4) return p def read_altitude(self, sealevel_pa = 101325.0): """Calculates the altitude in meters.""" #Calculation taken straight from section 3.6 of the datasheet pressure = float(self.read_pressure()) altitude = 44330.0 * (1.0 - pow(pressure / sealevel_pa, (1.0 /5.255))) return altitude def read_sealevel_pressure(self, altitude_m=0.0): """Calculates the pressure at sealevel when given a know altitude in meters. Returns a value in Pascals.""" pressure = float(self.read_pressure()) p0 = pressure / pow(1.0 - altitude_m/4433.0, 5.255) return p0
mit
7,641,198,691,529,508,000
37.246377
81
0.555324
false
gibiansky/tensorflow
tensorflow/contrib/deprecated/__init__.py
9
4576
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Non-core alias for the deprecated tf.X_summary ops. For TensorFlow 1.0, we have re-organized the TensorFlow summary ops into a submodule, and made some semantic tweaks. The first thing to note is that we moved the APIs around as follows: tf.scalar_summary -> tf.summary.scalar tf.histogram_summary -> tf.summary.histogram tf.audio_summary -> tf.summary.audio tf.image_summary -> tf.summary.image tf.merge_summary -> tf.summary.merge tf.merge_all_summaries -> tf.summary.merge_all We think this is a cleaner API and will improve long-term discoverability and clarity of the TensorFlow API. However, we also took the opportunity to make an important change to how summary "tags" work. The "tag" of a summary is the string that is associated with the output data, i.e. the key for organizing the generated protobufs. Previously, the tag was allowed to be any unique string, and had no relation to the summary op generating it, and no relation to the TensorFlow name system. This made it very difficult to write re-usable code that would add summary ops to the graph. If you had a function that would add summary ops, you would need to manually pass in a name scope to that function to create de-duplicated tags, otherwise your program would fail with a runtime error due to tag collision. The new summary APIs under tf.summary throw away the "tag" as an independent concept; instead, the first argument is the node name. This means that summary tags now automatically inherit the surrounding TF name scope, and automatically are deduplicated if there is a conflict. However, now the only allowed characters are alphanumerics, underscores, and forward slashes. To make migration easier, the new APIs automatically convert illegal characters to underscores. Just as an example, consider the following "before" and "after" code snippets: # Before def add_activation_summaries(v, scope): tf.scalar_summary("%s/fraction_of_zero" % scope, tf.nn.fraction_of_zero(v)) tf.histogram_summary("%s/activations" % scope, v) # After def add_activation_summaries(v): tf.summary.scalar("fraction_of_zero", tf.nn.fraction_of_zero(v)) tf.summary.histogram("activations", v) Now, so long as the add_activation_summaries function is called from within the right name scope, the behavior is the same. Because this change does modify the behavior and could break tests, we can't automatically migrate usage to the new APIs. That is why we are making the old APIs temporarily available here at tf.contrib.deprecated. In addition to the name change described above, there are two further changes to the new summary ops: - the "max_images" argument for tf.image_summary was renamed to "max_outputs for tf.summary.image - tf.scalar_summary accepted arbitrary tensors of tags and values. However, tf.summary.scalar requires a single scalar name and scalar value. In most cases, you can create tf.summary.scalars in a loop to get the same behavior As before, TensorBoard will group charts by the top-level name scope. This may be inconvenient, since in the new summary ops the summary will inherit that name scope without user control. We plan to add more grouping mechanisms to TensorBoard, so it will be possible to specify the TensorBoard group for each summary via the summary API. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function # pylint: disable=unused-import,line-too-long from tensorflow.python.ops.logging_ops import audio_summary from tensorflow.python.ops.logging_ops import histogram_summary from tensorflow.python.ops.logging_ops import image_summary from tensorflow.python.ops.logging_ops import merge_all_summaries from tensorflow.python.ops.logging_ops import merge_summary from tensorflow.python.ops.logging_ops import scalar_summary # pylint: enable=unused-import,line-too-long
apache-2.0
-5,136,313,745,253,109,000
46.175258
80
0.769231
false
HaraldWeber/client
src/config/__init__.py
1
3081
__author__ = 'Sheeo' import os import sys import version import logging from logging.handlers import RotatingFileHandler from PyQt4 import QtCore _settings = QtCore.QSettings("ForgedAllianceForever", "FA Lobby") class Settings(object): """ This wraps QSettings, fetching default values from the selected configuration module if the key isn't found. """ @staticmethod def get(key, group=None): if group is None: value = _settings.value(key) else: _settings.beginGroup(group) value = _settings.value(key) _settings.endGroup() if value is None: if group is None: return defaults[key] else: return defaults[group][key] return value @staticmethod def set(key, value, group=None): if group is None: _settings.setValue(key, value) else: _settings.beginGroup(group) _settings.setValue(key, value) _settings.endGroup() def make_dirs(): dirs = [ Settings.get('DIR', 'LOG'), Settings.get('MODS_PATH', 'FA'), Settings.get('ENGINE_PATH', 'FA'), Settings.get('MAPS_PATH', 'FA') ] for d in dirs: if not os.path.isdir(d): os.makedirs(d) v = version.get_git_version() if getattr(sys, 'frozen', False): if not version.is_prerelease_version(v): logging.warning("FAF version: " + repr(version.get_git_version())) from production import defaults make_dirs() rotate = RotatingFileHandler(filename=os.path.join(Settings.get('DIR', 'LOG'), 'forever.log'), maxBytes=Settings.get('MAX_SIZE', 'LOG'), backupCount=10) rotate.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s %(name)-30s %(message)s')) logging.getLogger().addHandler(rotate) logging.getLogger().setLevel(Settings.get('LEVEL', 'LOG')) else: logging.warning("FAF prerelease version: " + repr(version.get_git_version())) from develop import defaults make_dirs() rotate = RotatingFileHandler(filename=os.path.join(Settings.get('DIR', 'LOG'), 'forever.log'), maxBytes=Settings.get('MAX_SIZE', 'LOG'), backupCount=10) rotate.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s %(name)-30s %(message)s')) logging.getLogger().addHandler(rotate) logging.getLogger().setLevel(Settings.get('LEVEL', 'LOG')) else: # Setup logging output devh = logging.StreamHandler() devh.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s %(name)-30s %(message)s')) logging.getLogger().addHandler(devh) logging.getLogger().setLevel(logging.INFO) for k in []: logging.getLogger(k).setLevel(logging.DEBUG) from develop import defaults make_dirs() logging.warning("FAF development version: " + repr(version.get_git_version()))
gpl-3.0
-4,364,117,400,433,639,000
33.617978
102
0.59656
false
RafaelRMachado/qtwebkit
Source/WTF/WTF.vcxproj/work-around-vs-dependency-tracking-bugs.py
13
2585
#!/usr/bin/env python import glob import os import re import sys # It's fragile to rely on the location of this script to find the top-level # source directory. TOP_LEVEL_DIRECTORY = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) WEBKIT_LIBRARIES = os.environ['WEBKIT_LIBRARIES']; def main(): react_to_vsprops_changes() react_to_webkit1_interface_changes() def react_to_vsprops_changes(): vsprops_directory = os.path.join(WEBKIT_LIBRARIES, 'tools', 'vsprops') newest_vsprops_time = mtime_of_newest_file_matching_glob(os.path.join(vsprops_directory, '*.props')) obj_directory = os.path.join(os.environ['CONFIGURATIONBUILDDIR'], 'obj32') # Visual Studio isn't smart enough to figure out it needs to rebuild these file types when # .vsprops files change (even if we touch wtf/Platform.h below), so we delete them to force them # to be rebuilt. for extension in ('dep', 'manifest', 'pch', 'res'): for filepath in glob.iglob(os.path.join(obj_directory, '*', '*.%s' % extension)): delete_if_older_than(filepath, newest_vsprops_time) # Touch wtf/Platform.h so all files will be recompiled. This is necessary # to pick up changes to preprocessor macros (e.g., ENABLE_*). wtf_platform_h = os.path.join(TOP_LEVEL_DIRECTORY, 'Source', 'WTF', 'wtf', 'Platform.h') touch_if_older_than(wtf_platform_h, newest_vsprops_time) def react_to_webkit1_interface_changes(): interfaces_directory = os.path.join(TOP_LEVEL_DIRECTORY, 'Source', 'WebKit', 'win', 'Interfaces') newest_idl_time = mtime_of_newest_file_matching_glob(os.path.join(interfaces_directory, '*.idl')) # WebKit.idl includes all the other IDL files, so needs to be rebuilt if any IDL file changes. # But Visual Studio isn't smart enough to figure this out, so we touch WebKit.idl to ensure that # it gets rebuilt. touch_if_older_than(os.path.join(interfaces_directory, 'WebKit.idl'), newest_idl_time) def mtime_of_newest_file_matching_glob(glob_pattern): files = glob.glob(glob_pattern) assert len(files), "Couldn't find any files matching glob %s" % glob_pattern return max(map(os.path.getmtime, files)) def delete_if_older_than(path, reference_time): if os.path.getmtime(path) < reference_time: print 'Deleting %s' % path os.remove(path) def touch_if_older_than(path, reference_time): if os.path.getmtime(path) < reference_time: print 'Touching %s' % path os.utime(path, None) if __name__ == '__main__': sys.exit(main())
gpl-2.0
8,825,537,818,848,532,000
38.166667
115
0.692456
false
infoINGenieria/Zweb
z_web/costos/migrations/0025_auto_20170910_1318.py
1
2595
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('core', '0019_auto_20170813_1123'), ('parametros', '0005_auto_20160423_1019'), ('costos', '0024_auto_20170910_1209'), ] operations = [ migrations.CreateModel( name='AvanceObra', fields=[ ('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)), ('created_at', models.DateTimeField(verbose_name='Fecha de creación', auto_now_add=True)), ('modified_at', models.DateTimeField(verbose_name='Fecha de modificación', auto_now=True)), ('avance', models.DecimalField(verbose_name='avance', max_digits=18, decimal_places=3)), ('observacion', models.CharField(verbose_name='observación', max_length=255, blank=True, null=True)), ('es_proyeccion', models.BooleanField(verbose_name='Es una proyección', default=False)), ('centro_costo', models.ForeignKey(verbose_name='centro de costo', null=True, related_name='mis_avances', to='core.Obras')), ('periodo', models.ForeignKey(verbose_name='periodo', to='parametros.Periodo')), ], options={ 'verbose_name': 'avance de obra', 'verbose_name_plural': 'avances de obra', 'ordering': ('periodo',), }, ), migrations.AlterModelOptions( name='costoproyeccion', options={'verbose_name': 'proyección de costo', 'verbose_name_plural': 'proyecciones de costo'}, ), migrations.AlterModelOptions( name='costoreal', options={'verbose_name': 'costo', 'verbose_name_plural': 'costos'}, ), migrations.CreateModel( name='AvanceObraProyeccion', fields=[ ], options={ 'verbose_name': 'proyección de avance de obra', 'verbose_name_plural': 'proyecciones de avance de obra', 'proxy': True, }, bases=('costos.avanceobra',), ), migrations.CreateModel( name='AvanceObraReal', fields=[ ], options={ 'verbose_name': 'avance de obra', 'verbose_name_plural': 'avances de obra', 'proxy': True, }, bases=('costos.avanceobra',), ), ]
gpl-2.0
-6,774,689,786,310,076,000
39.453125
140
0.541908
false
ZHAW-INES/rioxo-uClinux-dist
user/python/python-2.4.4/Mac/Tools/IDE/Wquicktime.py
4
3469
import os from Carbon import Qd from Carbon import Win from Carbon import Qt, QuickTime import W from Carbon import File from Carbon import Evt, Events _moviesinitialized = 0 def EnterMovies(): global _moviesinitialized if not _moviesinitialized: Qt.EnterMovies() _moviesinitialized = 1 class Movie(W.Widget): def __init__(self, possize): EnterMovies() self.movie = None self.running = 0 W.Widget.__init__(self, possize) def adjust(self, oldbounds): self.SetPort() self.GetWindow().InvalWindowRect(oldbounds) self.GetWindow().InvalWindowRect(self._bounds) self.calcmoviebox() def set(self, path_or_fss, start = 0): self.SetPort() if self.movie: #self.GetWindow().InvalWindowRect(self.movie.GetMovieBox()) Qd.PaintRect(self.movie.GetMovieBox()) path = File.pathname(path) self.movietitle = os.path.basename(path) movieResRef = Qt.OpenMovieFile(path_or_fss, 1) self.movie, dummy, dummy = Qt.NewMovieFromFile(movieResRef, 0, QuickTime.newMovieActive) self.moviebox = self.movie.GetMovieBox() self.calcmoviebox() Qd.ObscureCursor() # XXX does this work at all? self.movie.GoToBeginningOfMovie() if start: self.movie.StartMovie() self.running = 1 else: self.running = 0 self.movie.MoviesTask(0) def get(self): return self.movie def getmovietitle(self): return self.movietitle def start(self): if self.movie: Qd.ObscureCursor() self.movie.StartMovie() self.running = 1 def stop(self): if self.movie: self.movie.StopMovie() self.running = 0 def rewind(self): if self.movie: self.movie.GoToBeginningOfMovie() def calcmoviebox(self): if not self.movie: return ml, mt, mr, mb = self.moviebox wl, wt, wr, wb = widgetbox = self._bounds mheight = mb - mt mwidth = mr - ml wheight = wb - wt wwidth = wr - wl if (mheight * 2 < wheight) and (mwidth * 2 < wwidth): scale = 2 elif mheight > wheight or mwidth > wwidth: scale = min(float(wheight) / mheight, float(wwidth) / mwidth) else: scale = 1 mwidth, mheight = mwidth * scale, mheight * scale ml, mt = wl + (wwidth - mwidth) / 2, wt + (wheight - mheight) / 2 mr, mb = ml + mwidth, mt + mheight self.movie.SetMovieBox((ml, mt, mr, mb)) def idle(self, *args): if self.movie: if not self.movie.IsMovieDone() and self.running: Qd.ObscureCursor() while 1: self.movie.MoviesTask(0) gotone, event = Evt.EventAvail(Events.everyEvent) if gotone or self.movie.IsMovieDone(): break elif self.running: box = self.movie.GetMovieBox() self.SetPort() self.GetWindow().InvalWindowRect(box) self.movie = None self.running = 0 def draw(self, visRgn = None): if self._visible: Qd.PaintRect(self._bounds) if self.movie: self.movie.UpdateMovie() self.movie.MoviesTask(0)
gpl-2.0
-5,461,630,728,268,689,000
29.699115
96
0.555203
false
AnselCmy/ARPS
report_proposal.py
1
3359
#!/usr/bin/env python # -*- coding:utf-8 -*- from __future__ import print_function import sys import re import time reload(sys) sys.setdefaultencoding('utf8') import pymongo as pm cs_labels = ['计算机网络', '信息安全', '云计算&大数据', '机器学习&模式识别', '数据科学', '计算机图形学&图像处理', '计算机教学', '数据库', '计算机组成与结构', '人机交互', '软件技术', '计算机应用', '信息检索', '物联网', '多媒体技术'] regions = { u'华东': ['上海市', '江苏省', '浙江省', '安徽省', '江西省', '山东省', '福建省'], u'华北': ['北京市', '天津市', '山西省', '河北省'], u'华中': ['河南省', '湖北省', '湖南省'], u'华南': ['广东省', '广西壮族自治区', '海南省', '香港特别行政区', '澳门特别行政区'], u'西南': ['四川省', '贵州省', '云南省', '重庆市', '西藏自治区'], u'西北': ['陕西省', '甘肃省', '青海省', '宁夏回族自治区', '新疆维吾尔自治区'], u'东北': ['黑龙江省', '吉林省', '辽宁省'] } region_weight = [0.5, 0.3, 0.2] region_area_weight = [0.5, 0.5] def get_localtime(times): date = times.split('-') if len(date) == 3: year, month, day = date[0].strip(), date[1].strip(), date[2].strip() if len(year) != 4: year = time.strftime("%Y", time.localtime())[:2] + year else: month, day = date[0].strip(), date[1].strip() year = time.strftime("%Y", time.localtime()) time_number = int(year) * 10000 + int(month) * 100 + int(day) return time_number def connect_db(): conn = pm.MongoClient('localhost', 27017) db = conn.get_database('report_db') reports_col = db.get_collection('reports_with_label') users_col = db.get_collection('users') return reports_col, users_col def get_loc_list(loc): pattern = re.compile(u'([\u4e00-\u9fa5]+):([\u4e00-\u9fa5]+)-([\u4e00-\u9fa5]+)') if '-' not in loc: loc = loc+'-市辖区' return pattern.search(loc).groups() def get_region(province): for reg, prov in regions.items(): if province in prov: return reg else: raise ValueError('invalid province: %s' % province) if __name__ == '__main__': if len(sys.argv) > 1: query = {'username':sys.argv[1]} else: query = {} now_time = get_localtime(time.strftime("%Y-%m-%d", time.localtime())) reports_col, users_col = connect_db() for user in users_col.find(query): p = user['province'] c = user['city'] reports_score = {} # Get the location info of user user_loc_list = [get_region(p), p, c] for report in reports_col.find(): # The score of location report_loc_list = get_loc_list(report['location']) diff = list(map(lambda x: x[0]==x[1], zip(user_loc_list, report_loc_list))) if not diff[1]: diff[2] = False region_score = reduce(lambda x,y: x+y, map(lambda x:x[0]*int(x[1]), zip(region_weight, diff))) # The score of academic area area_score = 0 for a in user['area']: if cs_labels.index(a) in report['label']: area_score = 1 # Get the total score score = reduce(lambda x,y: x+y, map(lambda x:x[0]*x[1], zip(region_area_weight, [region_score, area_score]))) # Update the score reports_score = dict(reports_score, **{report['_id'].__str__(): score}) users_col.update({'_id': user['_id']}, {'$set': {'reports_score': reports_score}})
mit
-7,529,311,655,118,725,000
29.520833
97
0.601912
false
mitodl/lmod_proxy
lmod_proxy/web.py
1
2179
# -*- coding: utf-8 -*- """Root flask application for lmod_proxy""" import json import OpenSSL.crypto from .config import LMODP_CERT from datetime import datetime from flask import Flask, redirect, url_for from passlib.apache import HtpasswdFile from lmod_proxy import __project__ from lmod_proxy.auth import requires_auth from lmod_proxy.edx_grades import edx_grades def app_factory(): """Startup and application factory""" new_app = Flask(__project__) new_app.config.from_object('lmod_proxy.config'.format(__project__)) new_app.register_blueprint(edx_grades, url_prefix='/edx_grades') # Load up user database try: new_app.config['users'] = HtpasswdFile( new_app.config['LMODP_HTPASSWD_PATH'] ) except IOError: new_app.logger.critical( 'No htpasswd file loaded, please set `LMODP_HTPASSWD`' 'environment variable to a valid apache htpasswd file.' ) new_app.config['users'] = HtpasswdFile() new_app.logger.debug( 'Starting with configuration:\n %s', '\n'.join([ '{0}: {1}'.format(x, y) for x, y in sorted(dict(new_app.config).items()) ]) ) return new_app app = app_factory() @app.route('/', methods=['GET']) @requires_auth def index(user): """Welcome them to our amazing LMod Proxy Return: Flask.response """ return redirect(url_for('edx_grades.index')) @app.route('/status', methods=['GET']) def status(): """Route to get app cert expiration date Return: json object containing app_cert_expiration date and status """ app_cert_content = open(LMODP_CERT, 'rt').read() app_cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, app_cert_content) app_cert_expiration = datetime.strptime( app_cert.get_notAfter().decode('utf8'), '%Y%m%d%H%M%SZ') date_delta = app_cert_expiration - datetime.now() retval = { 'app_cert_expires': app_cert_expiration.strftime('%Y-%m-%dT%H:%M:%S'), 'status': 'ok' if date_delta.days > 30 else 'warn' } return json.dumps(retval)
agpl-3.0
-4,694,407,644,492,820,000
28.849315
78
0.623222
false
azavea/nyc-trees
src/nyc_trees/apps/home/training/routes.py
3
1471
# -*- coding: utf-8 -*- from __future__ import print_function from __future__ import unicode_literals from __future__ import division from django.contrib.auth.decorators import login_required from django_tinsel.decorators import route, render_template from django_tinsel.utils import decorate as do from apps.users.views.group import group_list_page from apps.core.decorators import user_must_have_online_training from apps.home.training.decorators import render_flatpage from apps.home.training import views as v from apps.home.training.decorators import mark_user groups_to_follow = route( GET=do(login_required, user_must_have_online_training, mark_user('training_finished_groups_to_follow'), render_template('home/groups_to_follow.html'), group_list_page)) training_list_page = route(GET=do(render_template('home/training.html'), v.training_list_page)) intro_quiz = do(login_required, route(GET=do(render_template('home/quiz_page.html'), v.intro_quiz), POST=do(render_template('home/quiz_complete_page.html'), v.complete_quiz))) training_instructions = route(GET=do( render_template('home/training_instructions.html'), v.training_instructions)) def make_flatpage_route(name): return route(GET=do( login_required, render_flatpage('/%s/' % name)))
agpl-3.0
159,455,125,249,875,520
30.978261
78
0.664174
false
jxta/cc
vendor/Twisted-10.0.0/twisted/internet/test/test_threads.py
4
6106
# Copyright (c) 2008 Twisted Matrix Laboratories. # See LICENSE for details. """ Tests for implementations of L{IReactorThreads}. """ __metaclass__ = type from weakref import ref import gc from twisted.internet.test.reactormixins import ReactorBuilder from twisted.python.threadpool import ThreadPool class ThreadTestsBuilder(ReactorBuilder): """ Builder for defining tests relating to L{IReactorThreads}. """ def test_getThreadPool(self): """ C{reactor.getThreadPool()} returns an instance of L{ThreadPool} which starts when C{reactor.run()} is called and stops before it returns. """ state = [] reactor = self.buildReactor() pool = reactor.getThreadPool() self.assertIsInstance(pool, ThreadPool) self.assertFalse( pool.started, "Pool should not start before reactor.run") def f(): # Record the state for later assertions state.append(pool.started) state.append(pool.joined) reactor.stop() reactor.callWhenRunning(f) self.runReactor(reactor, 2) self.assertTrue( state[0], "Pool should start after reactor.run") self.assertFalse( state[1], "Pool should not be joined before reactor.stop") self.assertTrue( pool.joined, "Pool should be stopped after reactor.run returns") def test_suggestThreadPoolSize(self): """ C{reactor.suggestThreadPoolSize()} sets the maximum size of the reactor threadpool. """ reactor = self.buildReactor() reactor.suggestThreadPoolSize(17) pool = reactor.getThreadPool() self.assertEqual(pool.max, 17) def test_delayedCallFromThread(self): """ A function scheduled with L{IReactorThreads.callFromThread} invoked from a delayed call is run immediately in the next reactor iteration. When invoked from the reactor thread, previous implementations of L{IReactorThreads.callFromThread} would skip the pipe/socket based wake up step, assuming the reactor would wake up on its own. However, this resulted in the reactor not noticing a insert into the thread queue at the right time (in this case, after the thread queue has been processed for that reactor iteration). """ reactor = self.buildReactor() def threadCall(): reactor.stop() # Set up the use of callFromThread being tested. reactor.callLater(0, reactor.callFromThread, threadCall) before = reactor.seconds() self.runReactor(reactor, 60) after = reactor.seconds() # We specified a timeout of 60 seconds. The timeout code in runReactor # probably won't actually work, though. If the reactor comes out of # the event notification API just a little bit early, say after 59.9999 # seconds instead of after 60 seconds, then the queued thread call will # get processed but the timeout delayed call runReactor sets up won't! # Then the reactor will stop and runReactor will return without the # timeout firing. As it turns out, select() and poll() are quite # likely to return *slightly* earlier than we ask them to, so the # timeout will rarely happen, even if callFromThread is broken. So, # instead we'll measure the elapsed time and make sure it's something # less than about half of the timeout we specified. This is heuristic. # It assumes that select() won't ever return after 30 seconds when we # asked it to timeout after 60 seconds. And of course like all # time-based tests, it's slightly non-deterministic. If the OS doesn't # schedule this process for 30 seconds, then the test might fail even # if callFromThread is working. self.assertTrue(after - before < 30) def test_stopThreadPool(self): """ When the reactor stops, L{ReactorBase._stopThreadPool} drops the reactor's direct reference to its internal threadpool and removes the associated startup and shutdown triggers. This is the case of the thread pool being created before the reactor is run. """ reactor = self.buildReactor() threadpool = ref(reactor.getThreadPool()) reactor.callWhenRunning(reactor.stop) self.runReactor(reactor) gc.collect() self.assertIdentical(threadpool(), None) def test_stopThreadPoolWhenStartedAfterReactorRan(self): """ We must handle the case of shutting down the thread pool when it was started after the reactor was run in a special way. Some implementation background: The thread pool is started with callWhenRunning, which only returns a system trigger ID when it is invoked before the reactor is started. This is the case of the thread pool being created after the reactor is started. """ reactor = self.buildReactor() threadPoolRefs = [] def acquireThreadPool(): threadPoolRefs.append(ref(reactor.getThreadPool())) reactor.stop() reactor.callWhenRunning(acquireThreadPool) self.runReactor(reactor) gc.collect() self.assertIdentical(threadPoolRefs[0](), None) def test_cleanUpThreadPoolEvenBeforeReactorIsRun(self): """ When the reactor has its shutdown event fired before it is run, the thread pool is completely destroyed. For what it's worth, the reason we support this behavior at all is because Trial does this. This is the case of the thread pool being created without the reactor being started at al. """ reactor = self.buildReactor() threadPoolRef = ref(reactor.getThreadPool()) reactor.fireSystemEvent("shutdown") self.assertIdentical(threadPoolRef(), None) globals().update(ThreadTestsBuilder.makeTestCaseClasses())
apache-2.0
-8,656,049,951,480,084,000
36.460123
79
0.660007
false
AnishShah/tensorflow
tensorflow/contrib/estimator/python/estimator/export.py
6
9460
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Wrapper for methods to export train/eval graphs from Estimator.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.estimator import model_fn as model_fn_lib def export_saved_model_for_mode( estimator, export_dir_base, input_receiver_fn, assets_extra=None, as_text=False, checkpoint_path=None, strip_default_attrs=False, mode=model_fn_lib.ModeKeys.PREDICT): # pylint: disable=line-too-long """Exports a single train/eval/predict graph as a SavedModel. For a detailed guide, see [Using SavedModel with Estimators]( https://tensorflow.org/guide/saved_model#using_savedmodel_with_estimators). Sample usage: ```python classifier = tf.estimator.LinearClassifier( feature_columns=[age, language]) classifier.train(input_fn=input_fn, steps=1000) feature_spec = { 'age': tf.placeholder(dtype=tf.int64), 'language': array_ops.placeholder(dtype=tf.string) } label_spec = tf.placeholder(dtype=dtypes.int64) train_rcvr_fn = tf.contrib.estimator.build_raw_supervised_input_receiver_fn( feature_spec, label_spec) export_dir = tf.contrib.estimator.export_saved_model_for_mode( classifier, export_dir_base='my_model/', input_receiver_fn=train_rcvr_fn, mode=model_fn_lib.ModeKeys.TRAIN) # export_dir is a timestamped directory with the SavedModel, which # can be used for serving, analysis with TFMA, or directly loaded in. with ops.Graph().as_default() as graph: with session.Session(graph=graph) as sess: loader.load(sess, [tag_constants.TRAINING], export_dir) weights = graph.get_tensor_by_name(''linear/linear_model/age/weights') ... ``` This method is a wrapper for _export_all_saved_models, and wraps a raw input_receiver_fn in a dictionary to pass in to that function. See _export_all_saved_models for full docs. See tf.contrib.estimator.export_saved_model_for_mode for the currently exposed version of this function. Args: estimator: an instance of tf.estimator.Estimator export_dir_base: A string containing a directory in which to create timestamped subdirectories containing exported SavedModels. input_receiver_fn: a function that takes no argument and returns the appropriate subclass of `InputReceiver`. assets_extra: A dict specifying how to populate the assets.extra directory within the exported SavedModel, or `None` if no extra assets are needed. as_text: whether to write the SavedModel proto in text format. checkpoint_path: The checkpoint path to export. If `None` (the default), the most recent checkpoint found within the model directory is chosen. strip_default_attrs: Boolean. If `True`, default-valued attributes will be removed from the NodeDefs. For a detailed guide, see [Stripping Default-Valued Attributes](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md#stripping-default-valued-attributes). mode: tf.estimator.ModeKeys value indicating with mode will be exported. Returns: The string path to the exported directory. Raises: ValueError: if input_receiver_fn is None, no export_outputs are provided, or no checkpoint can be found. """ # pylint: enable=line-too-long # pylint: disable=protected-access return estimator._export_saved_model_for_mode( export_dir_base, input_receiver_fn, assets_extra=assets_extra, as_text=as_text, checkpoint_path=checkpoint_path, strip_default_attrs=strip_default_attrs, mode=mode) # pylint: enable=protected-access def export_all_saved_models( estimator, export_dir_base, input_receiver_fn_map, assets_extra=None, as_text=False, checkpoint_path=None, strip_default_attrs=False): # pylint: disable=line-too-long """Exports requested train/eval/predict graphs as separate SavedModels. See tf.contrib.estimator.export_all_saved_models for the currently exposed version of this function. For each mode passed in via the input_receiver_fn_map, this method builds a new graph by calling the input_receiver_fn to obtain feature and label `Tensor`s. Next, this method calls the `Estimator`'s model_fn in the passed mode to generate the model graph based on those features and labels, and restores the given checkpoint (or, lacking that, the most recent checkpoint) into the graph. Only one of the modes is used for saving variables to the SavedModel (order of preference: TRAIN, EVAL, then PREDICT), such that up to three MetaGraphDefs are saved with a single set of variables in a single SavedModel directory. For prediction, the exported `MetaGraphDef` will provide one `SignatureDef` for each element of the export_outputs dict returned from the model_fn, named using the same keys. One of these keys is always signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY, indicating which signature will be served when a serving request does not specify one. For each signature, the outputs are provided by the corresponding `ExportOutput`s, and the inputs are always the input receivers provided by the serving_input_receiver_fn. For training and evaluation, the train_op is stored in an extra collection, and loss, metrics, and predictions are included in a SignatureDef for the mode in question. Extra assets may be written into the SavedModel via the assets_extra argument. This should be a dict, where each key gives a destination path (including the filename) relative to the assets.extra directory. The corresponding value gives the full path of the source file to be copied. For example, the simple case of copying a single file without renaming it is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`. Sample usage: ```python classifier = tf.estimator.LinearClassifier( feature_columns=[age, language]) classifier.train(input_fn=input_fn) feature_spec = { 'age': tf.placeholder(dtype=tf.int64), 'language': array_ops.placeholder(dtype=tf.string) } label_spec = tf.placeholder(dtype=dtypes.int64) train_rcvr_fn = tf.contrib.estimator.build_raw_supervised_input_receiver_fn( feature_spec, label_spec) serve_rcvr_fn = tf.estimator.export.build_parsing_serving_input_receiver_fn( feature_spec) rcvr_fn_map = { model_fn_lib.ModeKeys.TRAIN: train_rcvr_fn, model_fn_lib.ModeKeys.PREDICT: serve_rcvr_fn, } export_dir = tf.contrib.estimator.export_all_saved_models( classifier, export_dir_base='my_model/', input_receiver_fn_map=rcvr_fn_map) # export_dirs is a dict of directories with SavedModels, which # can be used for serving, analysis with TFMA, or directly loaded in. with ops.Graph().as_default() as graph: with session.Session(graph=graph) as sess: loader.load(sess, [tag_constants.TRAINING], export_dir) weights = graph.get_tensor_by_name('linear/linear_model/age/weights') ... ``` Args: estimator: an instance of tf.estimator.Estimator export_dir_base: A string containing a directory in which to create timestamped subdirectories containing exported SavedModels. input_receiver_fn_map: dict of tf.estimator.ModeKeys to input_receiver_fn mappings, where the input_receiver_fn is a function that takes no argument and returns the appropriate subclass of `InputReceiver`. assets_extra: A dict specifying how to populate the assets.extra directory within the exported SavedModel, or `None` if no extra assets are needed. as_text: whether to write the SavedModel proto in text format. checkpoint_path: The checkpoint path to export. If `None` (the default), the most recent checkpoint found within the model directory is chosen. strip_default_attrs: Boolean. If `True`, default-valued attributes will be removed from the NodeDefs. For a detailed guide, see [Stripping Default-Valued Attributes](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md#stripping-default-valued-attributes). Returns: A dict of tf.estimator.ModeKeys value to string path for each exported directory. Raises: ValueError: if any input_receiver_fn is None, no export_outputs are provided, or no checkpoint can be found. """ # pylint: enable=line-too-long # pylint: disable=protected-access return estimator._export_all_saved_models( export_dir_base, input_receiver_fn_map, assets_extra=assets_extra, as_text=as_text, checkpoint_path=checkpoint_path, strip_default_attrs=strip_default_attrs) # pylint: enable=protected-access
apache-2.0
-2,163,381,448,625,918,000
41.421525
174
0.730338
false
xuru/pyvisdk
pyvisdk/do/license_source.py
1
1044
import logging from pyvisdk.exceptions import InvalidArgumentError ######################################## # Automatically generated, do not edit. ######################################## log = logging.getLogger(__name__) def LicenseSource(vim, *args, **kwargs): '''This data object type is used to communicate configuration about where to find licenses to use for this system.''' obj = vim.client.factory.create('ns0:LicenseSource') # do some validation checking... if (len(args) + len(kwargs)) < 0: raise IndexError('Expected at least 1 arguments got: %d' % len(args)) required = [ ] optional = [ 'dynamicProperty', 'dynamicType' ] for name, arg in zip(required+optional, args): setattr(obj, name, arg) for name, value in kwargs.items(): if name in required + optional: setattr(obj, name, value) else: raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional))) return obj
mit
-4,995,339,743,859,988,000
29.735294
124
0.597701
false
vityagi/azure-linux-extensions
VMEncryption/main/oscrypto/rhel_68/encryptstates/PatchBootSystemState.py
8
6049
#!/usr/bin/env python # # VM Backup extension # # Copyright 2015 Microsoft Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Requires Python 2.7+ # import inspect import re import os import sys from time import sleep from OSEncryptionState import * class PatchBootSystemState(OSEncryptionState): def __init__(self, context): super(PatchBootSystemState, self).__init__('PatchBootSystemState', context) def should_enter(self): self.context.logger.log("Verifying if machine should enter patch_boot_system state") if not super(PatchBootSystemState, self).should_enter(): return False self.context.logger.log("Performing enter checks for patch_boot_system state") self.command_executor.Execute('mount /dev/mapper/osencrypt /oldroot', True) self.command_executor.Execute('umount /oldroot', True) return True def enter(self): if not self.should_enter(): return self.context.logger.log("Entering patch_boot_system state") self.command_executor.Execute('mount /boot', False) self.command_executor.Execute('mount /dev/mapper/osencrypt /oldroot', True) self.command_executor.Execute('mount --make-rprivate /', True) self.command_executor.Execute('mkdir /oldroot/memroot', True) self.command_executor.Execute('pivot_root /oldroot /oldroot/memroot', True) self.command_executor.ExecuteInBash('for i in dev proc sys boot; do mount --move /memroot/$i /$i; done', True) try: self._modify_pivoted_oldroot() except Exception as e: self.command_executor.Execute('mount --make-rprivate /') self.command_executor.Execute('pivot_root /memroot /memroot/oldroot') self.command_executor.Execute('rmdir /oldroot/memroot') self.command_executor.ExecuteInBash('for i in dev proc sys boot; do mount --move /oldroot/$i /$i; done') raise else: self.command_executor.Execute('mount --make-rprivate /') self.command_executor.Execute('pivot_root /memroot /memroot/oldroot') self.command_executor.Execute('rmdir /oldroot/memroot') self.command_executor.ExecuteInBash('for i in dev proc sys boot; do mount --move /oldroot/$i /$i; done') extension_full_name = 'Microsoft.Azure.Security.' + CommonVariables.extension_name self.command_executor.Execute('cp -ax' + ' /var/log/azure/{0}'.format(extension_full_name) + ' /oldroot/var/log/azure/{0}.Stripdown'.format(extension_full_name), True) self.command_executor.Execute('umount /boot') self.command_executor.Execute('umount /oldroot') self.context.logger.log("Pivoted back into memroot successfully, restarting WALA") self.command_executor.Execute('service sshd restart') self.command_executor.Execute('service atd restart') with open("/restart-wala.sh", "w") as f: f.write("service waagent restart\n") with open("/delete-lock.sh", "w") as f: f.write("rm -f /var/lib/azure_disk_encryption_config/daemon_lock_file.lck\n") self.command_executor.Execute('at -f /delete-lock.sh now + 1 minutes', True) self.command_executor.Execute('at -f /restart-wala.sh now + 2 minutes', True) self.should_exit() self.command_executor.ExecuteInBash('pkill -f .*ForLinux.*handle.py.*daemon.*', True) def should_exit(self): self.context.logger.log("Verifying if machine should exit patch_boot_system state") return super(PatchBootSystemState, self).should_exit() def _append_contents_to_file(self, contents, path): with open(path, 'a') as f: f.write(contents) def _modify_pivoted_oldroot(self): self.context.logger.log("Pivoted into oldroot successfully") scriptdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) patchesdir = os.path.join(scriptdir, '../encryptpatches') patchpath = os.path.join(patchesdir, 'rhel_68_dracut.patch') if not os.path.exists(patchpath): message = "Patch not found at path: {0}".format(patchpath) self.context.logger.log(message) raise Exception(message) else: self.context.logger.log("Patch found at path: {0}".format(patchpath)) self.disk_util.remove_mount_info('/') self.disk_util.append_mount_info('/dev/mapper/osencrypt', '/') self.command_executor.ExecuteInBash('patch -b -d /usr/share/dracut/modules.d/90crypt -p1 <{0}'.format(patchpath), True) self._append_contents_to_file('\nadd_drivers+=" fuse vfat nls_cp437 nls_iso8859-1"\n', '/etc/dracut.conf') self._append_contents_to_file('\nadd_dracutmodules+=" crypt"\n', '/etc/dracut.conf') self.command_executor.Execute('/sbin/dracut -f -v', True) with open("/boot/grub/grub.conf", "r") as f: contents = f.read() contents = re.sub(r"rd_NO_LUKS ", r"", contents) contents = re.sub(r"root=(.*?)\s", r"root=/dev/mapper/osencrypt rd_LUKS_UUID=osencrypt rdinitdebug ", contents) with open("/boot/grub/grub.conf", "w") as f: f.write(contents)
apache-2.0
3,507,540,613,643,794,000
41.006944
127
0.631344
false
mosbasik/buzhug
javasrc/lib/Jython/Lib/test/test_random.py
9
21793
#!/usr/bin/env python import unittest import random import time import pickle import warnings from math import log, exp, sqrt, pi from test import test_support class TestBasicOps(unittest.TestCase): # Superclass with tests common to all generators. # Subclasses must arrange for self.gen to retrieve the Random instance # to be tested. def randomlist(self, n): """Helper function to make a list of random numbers""" return [self.gen.random() for i in xrange(n)] def test_autoseed(self): self.gen.seed() state1 = self.gen.getstate() time.sleep(0.1) self.gen.seed() # diffent seeds at different times state2 = self.gen.getstate() self.assertNotEqual(state1, state2) def test_saverestore(self): N = 1000 self.gen.seed() state = self.gen.getstate() randseq = self.randomlist(N) self.gen.setstate(state) # should regenerate the same sequence self.assertEqual(randseq, self.randomlist(N)) def test_seedargs(self): for arg in [None, 0, 0L, 1, 1L, -1, -1L, 10**20, -(10**20), 3.14, 1+2j, 'a', tuple('abc')]: self.gen.seed(arg) for arg in [range(3), dict(one=1)]: self.assertRaises(TypeError, self.gen.seed, arg) self.assertRaises(TypeError, self.gen.seed, 1, 2) self.assertRaises(TypeError, type(self.gen), []) def test_jumpahead(self): self.gen.seed() state1 = self.gen.getstate() self.gen.jumpahead(100) state2 = self.gen.getstate() # s/b distinct from state1 self.assertNotEqual(state1, state2) self.gen.jumpahead(100) state3 = self.gen.getstate() # s/b distinct from state2 self.assertNotEqual(state2, state3) self.assertRaises(TypeError, self.gen.jumpahead) # needs an arg self.assertRaises(TypeError, self.gen.jumpahead, "ick") # wrong type self.assertRaises(TypeError, self.gen.jumpahead, 2.3) # wrong type self.assertRaises(TypeError, self.gen.jumpahead, 2, 3) # too many def test_sample(self): # For the entire allowable range of 0 <= k <= N, validate that # the sample is of the correct length and contains only unique items N = 100 population = xrange(N) for k in xrange(N+1): s = self.gen.sample(population, k) self.assertEqual(len(s), k) uniq = set(s) self.assertEqual(len(uniq), k) self.failUnless(uniq <= set(population)) self.assertEqual(self.gen.sample([], 0), []) # test edge case N==k==0 def test_sample_distribution(self): # For the entire allowable range of 0 <= k <= N, validate that # sample generates all possible permutations n = 5 pop = range(n) trials = 10000 # large num prevents false negatives without slowing normal case def factorial(n): return reduce(int.__mul__, xrange(1, n), 1) for k in xrange(n): expected = factorial(n) // factorial(n-k) perms = {} for i in xrange(trials): perms[tuple(self.gen.sample(pop, k))] = None if len(perms) == expected: break else: self.fail() def test_sample_inputs(self): # SF bug #801342 -- population can be any iterable defining __len__() self.gen.sample(set(range(20)), 2) self.gen.sample(range(20), 2) self.gen.sample(xrange(20), 2) self.gen.sample(str('abcdefghijklmnopqrst'), 2) self.gen.sample(tuple('abcdefghijklmnopqrst'), 2) def test_sample_on_dicts(self): self.gen.sample(dict.fromkeys('abcdefghijklmnopqrst'), 2) # SF bug #1460340 -- random.sample can raise KeyError a = dict.fromkeys(range(10)+range(10,100,2)+range(100,110)) self.gen.sample(a, 3) # A followup to bug #1460340: sampling from a dict could return # a subset of its keys or of its values, depending on the size of # the subset requested. N = 30 d = dict((i, complex(i, i)) for i in xrange(N)) for k in xrange(N+1): samp = self.gen.sample(d, k) # Verify that we got ints back (keys); the values are complex. for x in samp: self.assert_(type(x) is int) samp.sort() self.assertEqual(samp, range(N)) def test_gauss(self): # Ensure that the seed() method initializes all the hidden state. In # particular, through 2.2.1 it failed to reset a piece of state used # by (and only by) the .gauss() method. for seed in 1, 12, 123, 1234, 12345, 123456, 654321: self.gen.seed(seed) x1 = self.gen.random() y1 = self.gen.gauss(0, 1) self.gen.seed(seed) x2 = self.gen.random() y2 = self.gen.gauss(0, 1) self.assertEqual(x1, x2) self.assertEqual(y1, y2) def test_pickling(self): state = pickle.dumps(self.gen) origseq = [self.gen.random() for i in xrange(10)] newgen = pickle.loads(state) restoredseq = [newgen.random() for i in xrange(10)] self.assertEqual(origseq, restoredseq) class WichmannHill_TestBasicOps(TestBasicOps): gen = random.WichmannHill() def test_setstate_first_arg(self): self.assertRaises(ValueError, self.gen.setstate, (2, None, None)) def test_strong_jumpahead(self): # tests that jumpahead(n) semantics correspond to n calls to random() N = 1000 s = self.gen.getstate() self.gen.jumpahead(N) r1 = self.gen.random() # now do it the slow way self.gen.setstate(s) for i in xrange(N): self.gen.random() r2 = self.gen.random() self.assertEqual(r1, r2) def test_gauss_with_whseed(self): # Ensure that the seed() method initializes all the hidden state. In # particular, through 2.2.1 it failed to reset a piece of state used # by (and only by) the .gauss() method. for seed in 1, 12, 123, 1234, 12345, 123456, 654321: self.gen.whseed(seed) x1 = self.gen.random() y1 = self.gen.gauss(0, 1) self.gen.whseed(seed) x2 = self.gen.random() y2 = self.gen.gauss(0, 1) self.assertEqual(x1, x2) self.assertEqual(y1, y2) def test_bigrand(self): # Verify warnings are raised when randrange is too large for random() oldfilters = warnings.filters[:] warnings.filterwarnings("error", "Underlying random") self.assertRaises(UserWarning, self.gen.randrange, 2**60) warnings.filters[:] = oldfilters class SystemRandom_TestBasicOps(TestBasicOps): gen = random.SystemRandom() def test_autoseed(self): # Doesn't need to do anything except not fail self.gen.seed() def test_saverestore(self): self.assertRaises(NotImplementedError, self.gen.getstate) self.assertRaises(NotImplementedError, self.gen.setstate, None) def test_seedargs(self): # Doesn't need to do anything except not fail self.gen.seed(100) def test_jumpahead(self): # Doesn't need to do anything except not fail self.gen.jumpahead(100) def test_gauss(self): self.gen.gauss_next = None self.gen.seed(100) self.assertEqual(self.gen.gauss_next, None) def test_pickling(self): self.assertRaises(NotImplementedError, pickle.dumps, self.gen) def test_53_bits_per_float(self): # This should pass whenever a C double has 53 bit precision. span = 2 ** 53 cum = 0 for i in xrange(100): cum |= int(self.gen.random() * span) self.assertEqual(cum, span-1) def test_bigrand(self): # The randrange routine should build-up the required number of bits # in stages so that all bit positions are active. span = 2 ** 500 cum = 0 for i in xrange(100): r = self.gen.randrange(span) self.assert_(0 <= r < span) cum |= r self.assertEqual(cum, span-1) def test_bigrand_ranges(self): for i in [40,80, 160, 200, 211, 250, 375, 512, 550]: start = self.gen.randrange(2 ** i) stop = self.gen.randrange(2 ** (i-2)) if stop <= start: return self.assert_(start <= self.gen.randrange(start, stop) < stop) def test_rangelimits(self): for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]: self.assertEqual(set(range(start,stop)), set([self.gen.randrange(start,stop) for i in xrange(100)])) def test_genrandbits(self): # Verify ranges for k in xrange(1, 1000): self.assert_(0 <= self.gen.getrandbits(k) < 2**k) # Verify all bits active getbits = self.gen.getrandbits for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]: cum = 0 for i in xrange(100): cum |= getbits(span) self.assertEqual(cum, 2**span-1) # Verify argument checking self.assertRaises(TypeError, self.gen.getrandbits) self.assertRaises(TypeError, self.gen.getrandbits, 1, 2) self.assertRaises(ValueError, self.gen.getrandbits, 0) self.assertRaises(ValueError, self.gen.getrandbits, -1) self.assertRaises(TypeError, self.gen.getrandbits, 10.1) def test_randbelow_logic(self, _log=log, int=int): # check bitcount transition points: 2**i and 2**(i+1)-1 # show that: k = int(1.001 + _log(n, 2)) # is equal to or one greater than the number of bits in n for i in xrange(1, 1000): n = 1L << i # check an exact power of two numbits = i+1 k = int(1.00001 + _log(n, 2)) self.assertEqual(k, numbits) self.assert_(n == 2**(k-1)) n += n - 1 # check 1 below the next power of two k = int(1.00001 + _log(n, 2)) self.assert_(k in [numbits, numbits+1]) self.assert_(2**k > n > 2**(k-2)) n -= n >> 15 # check a little farther below the next power of two k = int(1.00001 + _log(n, 2)) self.assertEqual(k, numbits) # note the stronger assertion self.assert_(2**k > n > 2**(k-1)) # note the stronger assertion class MersenneTwister_TestBasicOps(TestBasicOps): gen = random.Random() def test_setstate_first_arg(self): self.assertRaises(ValueError, self.gen.setstate, (1, None, None)) def test_setstate_middle_arg(self): # Wrong type, s/b tuple self.assertRaises(TypeError, self.gen.setstate, (2, None, None)) # Wrong length, s/b 625 self.assertRaises(ValueError, self.gen.setstate, (2, (1,2,3), None)) # Wrong type, s/b tuple of 625 ints self.assertRaises(TypeError, self.gen.setstate, (2, ('a',)*625, None)) # Last element s/b an int also self.assertRaises(TypeError, self.gen.setstate, (2, (0,)*624+('a',), None)) def test_referenceImplementation(self): # Compare the python implementation with results from the original # code. Create 2000 53-bit precision random floats. Compare only # the last ten entries to show that the independent implementations # are tracking. Here is the main() function needed to create the # list of expected random numbers: # void main(void){ # int i; # unsigned long init[4]={61731, 24903, 614, 42143}, length=4; # init_by_array(init, length); # for (i=0; i<2000; i++) { # printf("%.15f ", genrand_res53()); # if (i%5==4) printf("\n"); # } # } expected = [0.45839803073713259, 0.86057815201978782, 0.92848331726782152, 0.35932681119782461, 0.081823493762449573, 0.14332226470169329, 0.084297823823520024, 0.53814864671831453, 0.089215024911993401, 0.78486196105372907] self.gen.seed(61731L + (24903L<<32) + (614L<<64) + (42143L<<96)) actual = self.randomlist(2000)[-10:] for a, e in zip(actual, expected): self.assertAlmostEqual(a,e,places=14) def test_strong_reference_implementation(self): # Like test_referenceImplementation, but checks for exact bit-level # equality. This should pass on any box where C double contains # at least 53 bits of precision (the underlying algorithm suffers # no rounding errors -- all results are exact). from math import ldexp expected = [0x0eab3258d2231fL, 0x1b89db315277a5L, 0x1db622a5518016L, 0x0b7f9af0d575bfL, 0x029e4c4db82240L, 0x04961892f5d673L, 0x02b291598e4589L, 0x11388382c15694L, 0x02dad977c9e1feL, 0x191d96d4d334c6L] self.gen.seed(61731L + (24903L<<32) + (614L<<64) + (42143L<<96)) actual = self.randomlist(2000)[-10:] for a, e in zip(actual, expected): self.assertEqual(long(ldexp(a, 53)), e) def test_long_seed(self): # This is most interesting to run in debug mode, just to make sure # nothing blows up. Under the covers, a dynamically resized array # is allocated, consuming space proportional to the number of bits # in the seed. Unfortunately, that's a quadratic-time algorithm, # so don't make this horribly big. seed = (1L << (10000 * 8)) - 1 # about 10K bytes self.gen.seed(seed) def test_53_bits_per_float(self): # This should pass whenever a C double has 53 bit precision. span = 2 ** 53 cum = 0 for i in xrange(100): cum |= int(self.gen.random() * span) self.assertEqual(cum, span-1) def test_bigrand(self): # The randrange routine should build-up the required number of bits # in stages so that all bit positions are active. span = 2 ** 500 cum = 0 for i in xrange(100): r = self.gen.randrange(span) self.assert_(0 <= r < span) cum |= r self.assertEqual(cum, span-1) def test_bigrand_ranges(self): for i in [40,80, 160, 200, 211, 250, 375, 512, 550]: start = self.gen.randrange(2 ** i) stop = self.gen.randrange(2 ** (i-2)) if stop <= start: return self.assert_(start <= self.gen.randrange(start, stop) < stop) def test_rangelimits(self): for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]: self.assertEqual(set(range(start,stop)), set([self.gen.randrange(start,stop) for i in xrange(100)])) def test_genrandbits(self): # Verify cross-platform repeatability self.gen.seed(1234567) self.assertEqual(self.gen.getrandbits(100), 97904845777343510404718956115L) # Verify ranges for k in xrange(1, 1000): self.assert_(0 <= self.gen.getrandbits(k) < 2**k) # Verify all bits active getbits = self.gen.getrandbits for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]: cum = 0 for i in xrange(100): cum |= getbits(span) self.assertEqual(cum, 2**span-1) # Verify argument checking self.assertRaises(TypeError, self.gen.getrandbits) self.assertRaises(TypeError, self.gen.getrandbits, 'a') self.assertRaises(TypeError, self.gen.getrandbits, 1, 2) self.assertRaises(ValueError, self.gen.getrandbits, 0) self.assertRaises(ValueError, self.gen.getrandbits, -1) def test_randbelow_logic(self, _log=log, int=int): # check bitcount transition points: 2**i and 2**(i+1)-1 # show that: k = int(1.001 + _log(n, 2)) # is equal to or one greater than the number of bits in n for i in xrange(1, 1000): n = 1L << i # check an exact power of two numbits = i+1 k = int(1.00001 + _log(n, 2)) self.assertEqual(k, numbits) self.assert_(n == 2**(k-1)) n += n - 1 # check 1 below the next power of two k = int(1.00001 + _log(n, 2)) self.assert_(k in [numbits, numbits+1]) self.assert_(2**k > n > 2**(k-2)) n -= n >> 15 # check a little farther below the next power of two k = int(1.00001 + _log(n, 2)) self.assertEqual(k, numbits) # note the stronger assertion self.assert_(2**k > n > 2**(k-1)) # note the stronger assertion def test_randrange_bug_1590891(self): start = 1000000000000 stop = -100000000000000000000 step = -200 x = self.gen.randrange(start, stop, step) self.assert_(stop < x <= start) self.assertEqual((x+stop)%step, 0) _gammacoeff = (0.9999999999995183, 676.5203681218835, -1259.139216722289, 771.3234287757674, -176.6150291498386, 12.50734324009056, -0.1385710331296526, 0.9934937113930748e-05, 0.1659470187408462e-06) def gamma(z, cof=_gammacoeff, g=7): z -= 1.0 sum = cof[0] for i in xrange(1,len(cof)): sum += cof[i] / (z+i) z += 0.5 return (z+g)**z / exp(z+g) * sqrt(2*pi) * sum class TestDistributions(unittest.TestCase): def test_zeroinputs(self): # Verify that distributions can handle a series of zero inputs' g = random.Random() x = [g.random() for i in xrange(50)] + [0.0]*5 g.random = x[:].pop; g.uniform(1,10) g.random = x[:].pop; g.paretovariate(1.0) g.random = x[:].pop; g.expovariate(1.0) g.random = x[:].pop; g.weibullvariate(1.0, 1.0) g.random = x[:].pop; g.normalvariate(0.0, 1.0) g.random = x[:].pop; g.gauss(0.0, 1.0) g.random = x[:].pop; g.lognormvariate(0.0, 1.0) g.random = x[:].pop; g.vonmisesvariate(0.0, 1.0) g.random = x[:].pop; g.gammavariate(0.01, 1.0) g.random = x[:].pop; g.gammavariate(1.0, 1.0) g.random = x[:].pop; g.gammavariate(200.0, 1.0) g.random = x[:].pop; g.betavariate(3.0, 3.0) def test_avg_std(self): # Use integration to test distribution average and standard deviation. # Only works for distributions which do not consume variates in pairs g = random.Random() N = 5000 x = [i/float(N) for i in xrange(1,N)] for variate, args, mu, sigmasqrd in [ (g.uniform, (1.0,10.0), (10.0+1.0)/2, (10.0-1.0)**2/12), (g.expovariate, (1.5,), 1/1.5, 1/1.5**2), (g.paretovariate, (5.0,), 5.0/(5.0-1), 5.0/((5.0-1)**2*(5.0-2))), (g.weibullvariate, (1.0, 3.0), gamma(1+1/3.0), gamma(1+2/3.0)-gamma(1+1/3.0)**2) ]: g.random = x[:].pop y = [] for i in xrange(len(x)): try: y.append(variate(*args)) except IndexError: pass s1 = s2 = 0 for e in y: s1 += e s2 += (e - mu) ** 2 N = len(y) self.assertAlmostEqual(s1/N, mu, 2) self.assertAlmostEqual(s2/(N-1), sigmasqrd, 2) class TestModule(unittest.TestCase): def testMagicConstants(self): self.assertAlmostEqual(random.NV_MAGICCONST, 1.71552776992141) self.assertAlmostEqual(random.TWOPI, 6.28318530718) self.assertAlmostEqual(random.LOG4, 1.38629436111989) self.assertAlmostEqual(random.SG_MAGICCONST, 2.50407739677627) def test__all__(self): # tests validity but not completeness of the __all__ list self.failUnless(set(random.__all__) <= set(dir(random))) def test_random_subclass_with_kwargs(self): # SF bug #1486663 -- this used to erroneously raise a TypeError class Subclass(random.Random): def __init__(self, newarg=None): random.Random.__init__(self) Subclass(newarg=1) def test_main(verbose=None): testclasses = [WichmannHill_TestBasicOps, MersenneTwister_TestBasicOps, TestDistributions, TestModule] if test_support.is_jython: del MersenneTwister_TestBasicOps.test_genrandbits del MersenneTwister_TestBasicOps.test_referenceImplementation del MersenneTwister_TestBasicOps.test_setstate_middle_arg del MersenneTwister_TestBasicOps.test_strong_reference_implementation try: random.SystemRandom().random() except NotImplementedError: pass else: testclasses.append(SystemRandom_TestBasicOps) test_support.run_unittest(*testclasses) # verify reference counting import sys if verbose and hasattr(sys, "gettotalrefcount"): counts = [None] * 5 for i in xrange(len(counts)): test_support.run_unittest(*testclasses) counts[i] = sys.gettotalrefcount() print counts if __name__ == "__main__": test_main(verbose=True)
bsd-3-clause
-9,055,497,666,217,425,000
38.055556
88
0.569862
false
CompassionCH/compassion-modules
crm_request/models/holiday_closure.py
3
1153
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2019 Compassion CH (http://www.compassion.ch) # @author: Quentin Gigon <[email protected]> # # The licence is in the file __manifest__.py # ############################################################################## from odoo import models, fields, api, _ from odoo.exceptions import ValidationError class HolidayClosure(models.Model): _name = "holiday.closure" _description = "Holiday closure" _inherit = "translatable.model" start_date = fields.Date(string="Start of holiday", required=True) end_date = fields.Date(string="End of holiday", required=True) holiday_name = fields.Char(string="Name of holiday", required=True, translate=True) @api.constrains('end_date', 'start_date') def _validate_dates(self): for h in self: if h.start_date and h.end_date and (h.start_date >= h.end_date): raise ValidationError(_( "Please choose an end_date greater than the start_date" ))
agpl-3.0
-6,084,692,401,030,151,000
36.193548
78
0.53686
false
mark-adams/django-waffle
waffle/tests/test_middleware.py
14
1578
from django.http import HttpResponse from django.test import RequestFactory from waffle.middleware import WaffleMiddleware get = RequestFactory().get('/foo') def test_set_cookies(): get.waffles = {'foo': [True, False], 'bar': [False, False]} resp = HttpResponse() assert 'dwf_foo' not in resp.cookies assert 'dwf_bar' not in resp.cookies resp = WaffleMiddleware().process_response(get, resp) assert 'dwf_foo' in resp.cookies assert 'dwf_bar' in resp.cookies assert 'True' == resp.cookies['dwf_foo'].value assert 'False' == resp.cookies['dwf_bar'].value def test_rollout_cookies(): get.waffles = {'foo': [True, True], 'bar': [False, True], 'baz': [True, False], 'qux': [False, False]} resp = HttpResponse() resp = WaffleMiddleware().process_response(get, resp) for k in get.waffles: cookie = 'dwf_%s' % k assert cookie in resp.cookies assert str(get.waffles[k][0]) == resp.cookies[cookie].value if get.waffles[k][1]: assert bool(resp.cookies[cookie]['max-age']) == get.waffles[k][0] else: assert resp.cookies[cookie]['max-age'] def test_testing_cookies(): get.waffles = {} get.waffle_tests = {'foo': True, 'bar': False} resp = HttpResponse() resp = WaffleMiddleware().process_response(get, resp) for k in get.waffle_tests: cookie = 'dwft_%s' % k assert str(get.waffle_tests[k]) == resp.cookies[cookie].value assert not resp.cookies[cookie]['max-age']
bsd-3-clause
-1,903,850,581,952,319,200
31.204082
77
0.6109
false
markovmodel/adaptivemd
setup.py
2
1953
from pprint import pprint import sys, os, shutil try: from setuptools import setup, Command, find_packages except ImportError as e: print("Needs setuptools to install") sys.exit(1) try: import yaml except ImportError as e: print("Needs pyyaml to install") sys.exit(1) # +----------------------------------------------------------------------------- # | CONSTRUCT PARAMETERS FOR setuptools # +----------------------------------------------------------------------------- def trunc_lines(s): parts = s.split('\n') while len(parts[0]) == 0: parts = parts[1:] while len(parts[-1]) == 0: parts = parts[:-1] parts = [part for part in parts if len(part) > 0] return ''.join(parts) def build_keyword_dictionary(prefs): keywords = {} for key in [ 'name', 'license', 'url', 'download_url', 'packages', 'package_dir', 'platforms', 'description', 'install_requires', 'long_description', 'package_data', 'include_package_data', 'scripts']: if key in prefs: keywords[key] = prefs[key] keywords['author'] = \ ', '.join(prefs['authors'][:-1]) + ' and ' + \ prefs['authors'][-1] keywords['author_email'] = \ ', '.join(prefs['emails']) keywords["package_dir"] = \ {package: '/'.join(package.split('.')) for package in prefs['packages']} keywords['long_description'] = \ trunc_lines(keywords['long_description']) output = "" first_tab = 40 second_tab = 60 for key in sorted(keywords.keys()): value = keywords[key] output += key.rjust(first_tab) + str(value).rjust(second_tab) + "" return keywords # load settings from setup.yaml with open('setup.yaml') as f: yaml_string = ''.join(f.readlines()) preferences = yaml.load(yaml_string) setup_args = build_keyword_dictionary(preferences) setup (**setup_args) # TODO reintroduce versioneer
lgpl-2.1
-2,117,176,535,561,625,900
23.721519
80
0.554019
false
bstroebl/QGIS
python/plugins/sextante/saga/SplitRGBBands.py
1
3473
# -*- coding: utf-8 -*- """ *************************************************************************** SplitRGBBands.py --------------------- Date : August 2012 Copyright : (C) 2012 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ from sextante.core.SextanteUtils import SextanteUtils from sextante.core.QGisLayers import QGisLayers from sextante.saga.SagaUtils import SagaUtils __author__ = 'Victor Olaya' __date__ = 'August 2012' __copyright__ = '(C) 2012, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' from PyQt4 import QtGui from sextante.core.GeoAlgorithm import GeoAlgorithm from sextante.parameters.ParameterRaster import ParameterRaster from sextante.outputs.OutputRaster import OutputRaster import os class SplitRGBBands(GeoAlgorithm): INPUT = "INPUT" R = "R" G = "G" B = "B" def getIcon(self): return QtGui.QIcon(os.path.dirname(__file__) + "/../images/saga.png") def defineCharacteristics(self): self.name = "Split RGB bands" self.group = "Grid - Tools" self.addParameter(ParameterRaster(SplitRGBBands.INPUT, "Input layer", False)) self.addOutput(OutputRaster(SplitRGBBands.R, "Output R band layer")) self.addOutput(OutputRaster(SplitRGBBands.G, "Output G band layer")) self.addOutput(OutputRaster(SplitRGBBands.B, "Output B band layer")) def processAlgorithm(self, progress): #TODO:check correct num of bands input = self.getParameterValue(SplitRGBBands.INPUT) temp = SextanteUtils.getTempFilename(); r = self.getOutputValue(SplitRGBBands.R) g = self.getOutputValue(SplitRGBBands.G) b = self.getOutputValue(SplitRGBBands.B) commands = [] if SextanteUtils.isWindows(): commands.append("io_gdal 0 -GRIDS \"" + temp + "\" -FILES \"" + input+"\"") commands.append("io_gdal 1 -GRIDS \"" + temp + "_0001.sgrd\" -FORMAT 1 -TYPE 0 -FILE \"" + r + "\""); commands.append("io_gdal 1 -GRIDS \"" + temp + "_0002.sgrd\" -FORMAT 1 -TYPE 0 -FILE \"" + g + "\""); commands.append("io_gdal 1 -GRIDS \"" + temp + "_0003.sgrd\" -FORMAT 1 -TYPE 0 -FILE \"" + b + "\""); else: commands.append("libio_gdal 0 -GRIDS \"" + temp + "\" -FILES \"" + input + "\"") commands.append("libio_gdal 1 -GRIDS \"" + temp + "_0001.sgrd\" -FORMAT 1 -TYPE 0 -FILE \"" + r + "\""); commands.append("libio_gdal 1 -GRIDS \"" + temp + "_0002.sgrd\" -FORMAT 1 -TYPE 0 -FILE \"" + g + "\""); commands.append("libio_gdal 1 -GRIDS \"" + temp + "_0003.sgrd\" -FORMAT 1 -TYPE 0 -FILE \"" + b + "\""); SagaUtils.createSagaBatchJobFileFromSagaCommands(commands) SagaUtils.executeSaga(progress);
gpl-2.0
3,618,545,700,103,078,400
46.575342
116
0.544486
false
bioinformatics-ua/catalogue
emif/datatable/tests.py
12
1154
# -*- coding: utf-8 -*- # Copyright (C) 2014 Universidade de Aveiro, DETI/IEETA, Bioinformatics Group - http://bioinformatics.ua.pt/ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """ This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase class SimpleTest(TestCase): def test_basic_addition(self): """ Tests that 1 + 1 always equals 2. """ self.assertEqual(1 + 1, 2)
gpl-3.0
-4,801,321,234,140,725,000
35.0625
108
0.727036
false
pquentin/django
tests/forms_tests/tests/test_fields.py
1
77332
# -*- coding: utf-8 -*- """ ########## # Fields # ########## Each Field class does some sort of validation. Each Field has a clean() method, which either raises django.forms.ValidationError or returns the "clean" data -- usually a Unicode object, but, in some rare cases, a list. Each Field's __init__() takes at least these parameters: required -- Boolean that specifies whether the field is required. True by default. widget -- A Widget class, or instance of a Widget class, that should be used for this Field when displaying it. Each Field has a default Widget that it'll use if you don't specify this. In most cases, the default widget is TextInput. label -- A verbose name for this field, for use in displaying this field in a form. By default, Django will use a "pretty" version of the form field name, if the Field is part of a Form. initial -- A value to use in this Field's initial display. This value is *not* used as a fallback if data isn't given. Other than that, the Field subclasses have class-specific options for __init__(). For example, CharField has a max_length option. """ from __future__ import unicode_literals import datetime import os import pickle import re import uuid from decimal import Decimal from unittest import skipIf from django.core.files.uploadedfile import SimpleUploadedFile from django.forms import ( BooleanField, CharField, ChoiceField, ComboField, DateField, DateTimeField, DecimalField, DurationField, EmailField, Field, FileField, FilePathField, FloatField, Form, HiddenInput, ImageField, IntegerField, MultipleChoiceField, NullBooleanField, NumberInput, PasswordInput, RadioSelect, RegexField, SplitDateTimeField, Textarea, TextInput, TimeField, TypedChoiceField, TypedMultipleChoiceField, URLField, UUIDField, ValidationError, Widget, forms, ) from django.test import SimpleTestCase, ignore_warnings from django.utils import formats, six, translation from django.utils._os import upath from django.utils.deprecation import RemovedInDjango20Warning from django.utils.duration import duration_string try: from PIL import Image except ImportError: Image = None def fix_os_paths(x): if isinstance(x, six.string_types): return x.replace('\\', '/') elif isinstance(x, tuple): return tuple(fix_os_paths(list(x))) elif isinstance(x, list): return [fix_os_paths(y) for y in x] else: return x class FieldsTests(SimpleTestCase): def assertWidgetRendersTo(self, field, to): class _Form(Form): f = field self.assertHTMLEqual(str(_Form()['f']), to) def test_field_sets_widget_is_required(self): self.assertTrue(Field(required=True).widget.is_required) self.assertFalse(Field(required=False).widget.is_required) def test_cooperative_multiple_inheritance(self): class A(object): def __init__(self): self.class_a_var = True super(A, self).__init__() class ComplexField(Field, A): def __init__(self): super(ComplexField, self).__init__() f = ComplexField() self.assertTrue(f.class_a_var) # CharField ################################################################### def test_charfield_1(self): f = CharField() self.assertEqual('1', f.clean(1)) self.assertEqual('hello', f.clean('hello')) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertEqual('[1, 2, 3]', f.clean([1, 2, 3])) self.assertEqual(f.max_length, None) self.assertEqual(f.min_length, None) def test_charfield_2(self): f = CharField(required=False) self.assertEqual('1', f.clean(1)) self.assertEqual('hello', f.clean('hello')) self.assertEqual('', f.clean(None)) self.assertEqual('', f.clean('')) self.assertEqual('[1, 2, 3]', f.clean([1, 2, 3])) self.assertEqual(f.max_length, None) self.assertEqual(f.min_length, None) def test_charfield_3(self): f = CharField(max_length=10, required=False) self.assertEqual('12345', f.clean('12345')) self.assertEqual('1234567890', f.clean('1234567890')) self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 10 characters (it has 11).'", f.clean, '1234567890a') self.assertEqual(f.max_length, 10) self.assertEqual(f.min_length, None) def test_charfield_4(self): f = CharField(min_length=10, required=False) self.assertEqual('', f.clean('')) self.assertRaisesMessage(ValidationError, "'Ensure this value has at least 10 characters (it has 5).'", f.clean, '12345') self.assertEqual('1234567890', f.clean('1234567890')) self.assertEqual('1234567890a', f.clean('1234567890a')) self.assertEqual(f.max_length, None) self.assertEqual(f.min_length, 10) def test_charfield_5(self): f = CharField(min_length=10, required=True) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'Ensure this value has at least 10 characters (it has 5).'", f.clean, '12345') self.assertEqual('1234567890', f.clean('1234567890')) self.assertEqual('1234567890a', f.clean('1234567890a')) self.assertEqual(f.max_length, None) self.assertEqual(f.min_length, 10) def test_charfield_length_not_int(self): """ Ensure that setting min_length or max_length to something that is not a number returns an exception. """ self.assertRaises(ValueError, CharField, min_length='a') self.assertRaises(ValueError, CharField, max_length='a') self.assertRaises(ValueError, CharField, 'a') def test_charfield_widget_attrs(self): """ Ensure that CharField.widget_attrs() always returns a dictionary. Refs #15912 """ # Return an empty dictionary if max_length is None f = CharField() self.assertEqual(f.widget_attrs(TextInput()), {}) self.assertEqual(f.widget_attrs(Textarea()), {}) # Otherwise, return a maxlength attribute equal to max_length f = CharField(max_length=10) self.assertEqual(f.widget_attrs(TextInput()), {'maxlength': '10'}) self.assertEqual(f.widget_attrs(PasswordInput()), {'maxlength': '10'}) self.assertEqual(f.widget_attrs(Textarea()), {'maxlength': '10'}) # IntegerField ################################################################ def test_integerfield_1(self): f = IntegerField() self.assertWidgetRendersTo(f, '<input type="number" name="f" id="id_f" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual(1, f.clean('1')) self.assertIsInstance(f.clean('1'), int) self.assertEqual(23, f.clean('23')) self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, 'a') self.assertEqual(42, f.clean(42)) self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, 3.14) self.assertEqual(1, f.clean('1 ')) self.assertEqual(1, f.clean(' 1')) self.assertEqual(1, f.clean(' 1 ')) self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, '1a') self.assertEqual(f.max_value, None) self.assertEqual(f.min_value, None) def test_integerfield_2(self): f = IntegerField(required=False) self.assertIsNone(f.clean('')) self.assertEqual('None', repr(f.clean(''))) self.assertIsNone(f.clean(None)) self.assertEqual('None', repr(f.clean(None))) self.assertEqual(1, f.clean('1')) self.assertIsInstance(f.clean('1'), int) self.assertEqual(23, f.clean('23')) self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, 'a') self.assertEqual(1, f.clean('1 ')) self.assertEqual(1, f.clean(' 1')) self.assertEqual(1, f.clean(' 1 ')) self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, '1a') self.assertEqual(f.max_value, None) self.assertEqual(f.min_value, None) def test_integerfield_3(self): f = IntegerField(max_value=10) self.assertWidgetRendersTo(f, '<input max="10" type="number" name="f" id="id_f" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual(1, f.clean(1)) self.assertEqual(10, f.clean(10)) self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 10.'", f.clean, 11) self.assertEqual(10, f.clean('10')) self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 10.'", f.clean, '11') self.assertEqual(f.max_value, 10) self.assertEqual(f.min_value, None) def test_integerfield_4(self): f = IntegerField(min_value=10) self.assertWidgetRendersTo(f, '<input id="id_f" type="number" name="f" min="10" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertRaisesMessage(ValidationError, "'Ensure this value is greater than or equal to 10.'", f.clean, 1) self.assertEqual(10, f.clean(10)) self.assertEqual(11, f.clean(11)) self.assertEqual(10, f.clean('10')) self.assertEqual(11, f.clean('11')) self.assertEqual(f.max_value, None) self.assertEqual(f.min_value, 10) def test_integerfield_5(self): f = IntegerField(min_value=10, max_value=20) self.assertWidgetRendersTo(f, '<input id="id_f" max="20" type="number" name="f" min="10" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertRaisesMessage(ValidationError, "'Ensure this value is greater than or equal to 10.'", f.clean, 1) self.assertEqual(10, f.clean(10)) self.assertEqual(11, f.clean(11)) self.assertEqual(10, f.clean('10')) self.assertEqual(11, f.clean('11')) self.assertEqual(20, f.clean(20)) self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 20.'", f.clean, 21) self.assertEqual(f.max_value, 20) self.assertEqual(f.min_value, 10) def test_integerfield_localized(self): """ Make sure localized IntegerField's widget renders to a text input with no number input specific attributes. """ f1 = IntegerField(localize=True) self.assertWidgetRendersTo(f1, '<input id="id_f" name="f" type="text" />') def test_integerfield_subclass(self): """ Test that class-defined widget is not overwritten by __init__ (#22245). """ class MyIntegerField(IntegerField): widget = Textarea f = MyIntegerField() self.assertEqual(f.widget.__class__, Textarea) f = MyIntegerField(localize=True) self.assertEqual(f.widget.__class__, Textarea) # FloatField ################################################################## def test_floatfield_1(self): f = FloatField() self.assertWidgetRendersTo(f, '<input step="any" type="number" name="f" id="id_f" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual(1.0, f.clean('1')) self.assertIsInstance(f.clean('1'), float) self.assertEqual(23.0, f.clean('23')) self.assertEqual(3.1400000000000001, f.clean('3.14')) self.assertEqual(3.1400000000000001, f.clean(3.14)) self.assertEqual(42.0, f.clean(42)) self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'a') self.assertEqual(1.0, f.clean('1.0 ')) self.assertEqual(1.0, f.clean(' 1.0')) self.assertEqual(1.0, f.clean(' 1.0 ')) self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '1.0a') self.assertEqual(f.max_value, None) self.assertEqual(f.min_value, None) self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'Infinity') self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'NaN') self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '-Inf') def test_floatfield_2(self): f = FloatField(required=False) self.assertIsNone(f.clean('')) self.assertIsNone(f.clean(None)) self.assertEqual(1.0, f.clean('1')) self.assertEqual(f.max_value, None) self.assertEqual(f.min_value, None) def test_floatfield_3(self): f = FloatField(max_value=1.5, min_value=0.5) self.assertWidgetRendersTo(f, '<input step="any" name="f" min="0.5" max="1.5" type="number" id="id_f" />') self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 1.5.'", f.clean, '1.6') self.assertRaisesMessage(ValidationError, "'Ensure this value is greater than or equal to 0.5.'", f.clean, '0.4') self.assertEqual(1.5, f.clean('1.5')) self.assertEqual(0.5, f.clean('0.5')) self.assertEqual(f.max_value, 1.5) self.assertEqual(f.min_value, 0.5) def test_floatfield_widget_attrs(self): f = FloatField(widget=NumberInput(attrs={'step': 0.01, 'max': 1.0, 'min': 0.0})) self.assertWidgetRendersTo(f, '<input step="0.01" name="f" min="0.0" max="1.0" type="number" id="id_f" />') def test_floatfield_localized(self): """ Make sure localized FloatField's widget renders to a text input with no number input specific attributes. """ f = FloatField(localize=True) self.assertWidgetRendersTo(f, '<input id="id_f" name="f" type="text" />') def test_floatfield_changed(self): f = FloatField() n = 4.35 self.assertFalse(f.has_changed(n, '4.3500')) with translation.override('fr'), self.settings(USE_L10N=True): f = FloatField(localize=True) localized_n = formats.localize_input(n) # -> '4,35' in French self.assertFalse(f.has_changed(n, localized_n)) # DecimalField ################################################################ def test_decimalfield_1(self): f = DecimalField(max_digits=4, decimal_places=2) self.assertWidgetRendersTo(f, '<input id="id_f" step="0.01" type="number" name="f" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual(f.clean('1'), Decimal("1")) self.assertIsInstance(f.clean('1'), Decimal) self.assertEqual(f.clean('23'), Decimal("23")) self.assertEqual(f.clean('3.14'), Decimal("3.14")) self.assertEqual(f.clean(3.14), Decimal("3.14")) self.assertEqual(f.clean(Decimal('3.14')), Decimal("3.14")) self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'NaN') self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'Inf') self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '-Inf') self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'a') self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'łąść') self.assertEqual(f.clean('1.0 '), Decimal("1.0")) self.assertEqual(f.clean(' 1.0'), Decimal("1.0")) self.assertEqual(f.clean(' 1.0 '), Decimal("1.0")) self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '1.0a') self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 4 digits in total.'", f.clean, '123.45') self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 2 decimal places.'", f.clean, '1.234') self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 2 digits before the decimal point.'", f.clean, '123.4') self.assertEqual(f.clean('-12.34'), Decimal("-12.34")) self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 4 digits in total.'", f.clean, '-123.45') self.assertEqual(f.clean('-.12'), Decimal("-0.12")) self.assertEqual(f.clean('-00.12'), Decimal("-0.12")) self.assertEqual(f.clean('-000.12'), Decimal("-0.12")) self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 2 decimal places.'", f.clean, '-000.123') self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 4 digits in total.'", f.clean, '-000.12345') self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '--0.12') self.assertEqual(f.max_digits, 4) self.assertEqual(f.decimal_places, 2) self.assertEqual(f.max_value, None) self.assertEqual(f.min_value, None) def test_decimalfield_2(self): f = DecimalField(max_digits=4, decimal_places=2, required=False) self.assertIsNone(f.clean('')) self.assertIsNone(f.clean(None)) self.assertEqual(f.clean('1'), Decimal("1")) self.assertEqual(f.max_digits, 4) self.assertEqual(f.decimal_places, 2) self.assertEqual(f.max_value, None) self.assertEqual(f.min_value, None) def test_decimalfield_3(self): f = DecimalField(max_digits=4, decimal_places=2, max_value=Decimal('1.5'), min_value=Decimal('0.5')) self.assertWidgetRendersTo(f, '<input step="0.01" name="f" min="0.5" max="1.5" type="number" id="id_f" />') self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 1.5.'", f.clean, '1.6') self.assertRaisesMessage(ValidationError, "'Ensure this value is greater than or equal to 0.5.'", f.clean, '0.4') self.assertEqual(f.clean('1.5'), Decimal("1.5")) self.assertEqual(f.clean('0.5'), Decimal("0.5")) self.assertEqual(f.clean('.5'), Decimal("0.5")) self.assertEqual(f.clean('00.50'), Decimal("0.50")) self.assertEqual(f.max_digits, 4) self.assertEqual(f.decimal_places, 2) self.assertEqual(f.max_value, Decimal('1.5')) self.assertEqual(f.min_value, Decimal('0.5')) def test_decimalfield_4(self): f = DecimalField(decimal_places=2) self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 2 decimal places.'", f.clean, '0.00000001') def test_decimalfield_5(self): f = DecimalField(max_digits=3) # Leading whole zeros "collapse" to one digit. self.assertEqual(f.clean('0000000.10'), Decimal("0.1")) # But a leading 0 before the . doesn't count towards max_digits self.assertEqual(f.clean('0000000.100'), Decimal("0.100")) # Only leading whole zeros "collapse" to one digit. self.assertEqual(f.clean('000000.02'), Decimal('0.02')) self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 3 digits in total.'", f.clean, '000000.0002') self.assertEqual(f.clean('.002'), Decimal("0.002")) def test_decimalfield_6(self): f = DecimalField(max_digits=2, decimal_places=2) self.assertEqual(f.clean('.01'), Decimal(".01")) self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 0 digits before the decimal point.'", f.clean, '1.1') def test_decimalfield_widget_attrs(self): f = DecimalField(max_digits=6, decimal_places=2) self.assertEqual(f.widget_attrs(Widget()), {}) self.assertEqual(f.widget_attrs(NumberInput()), {'step': '0.01'}) f = DecimalField(max_digits=10, decimal_places=0) self.assertEqual(f.widget_attrs(NumberInput()), {'step': '1'}) f = DecimalField(max_digits=19, decimal_places=19) self.assertEqual(f.widget_attrs(NumberInput()), {'step': '1e-19'}) f = DecimalField(max_digits=20) self.assertEqual(f.widget_attrs(NumberInput()), {'step': 'any'}) f = DecimalField(max_digits=6, widget=NumberInput(attrs={'step': '0.01'})) self.assertWidgetRendersTo(f, '<input step="0.01" name="f" type="number" id="id_f" />') def test_decimalfield_localized(self): """ Make sure localized DecimalField's widget renders to a text input with no number input specific attributes. """ f = DecimalField(localize=True) self.assertWidgetRendersTo(f, '<input id="id_f" name="f" type="text" />') def test_decimalfield_changed(self): f = DecimalField(max_digits=2, decimal_places=2) d = Decimal("0.1") self.assertFalse(f.has_changed(d, '0.10')) self.assertTrue(f.has_changed(d, '0.101')) with translation.override('fr'), self.settings(USE_L10N=True): f = DecimalField(max_digits=2, decimal_places=2, localize=True) localized_d = formats.localize_input(d) # -> '0,1' in French self.assertFalse(f.has_changed(d, localized_d)) # DateField ################################################################### def test_datefield_1(self): f = DateField() self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.date(2006, 10, 25))) self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.datetime(2006, 10, 25, 14, 30))) self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59))) self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59, 200))) self.assertEqual(datetime.date(2006, 10, 25), f.clean('2006-10-25')) self.assertEqual(datetime.date(2006, 10, 25), f.clean('10/25/2006')) self.assertEqual(datetime.date(2006, 10, 25), f.clean('10/25/06')) self.assertEqual(datetime.date(2006, 10, 25), f.clean('Oct 25 2006')) self.assertEqual(datetime.date(2006, 10, 25), f.clean('October 25 2006')) self.assertEqual(datetime.date(2006, 10, 25), f.clean('October 25, 2006')) self.assertEqual(datetime.date(2006, 10, 25), f.clean('25 October 2006')) self.assertEqual(datetime.date(2006, 10, 25), f.clean('25 October, 2006')) self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '2006-4-31') self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '200a-10-25') self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '25/10/06') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) def test_datefield_2(self): f = DateField(required=False) self.assertIsNone(f.clean(None)) self.assertEqual('None', repr(f.clean(None))) self.assertIsNone(f.clean('')) self.assertEqual('None', repr(f.clean(''))) def test_datefield_3(self): f = DateField(input_formats=['%Y %m %d']) self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.date(2006, 10, 25))) self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.datetime(2006, 10, 25, 14, 30))) self.assertEqual(datetime.date(2006, 10, 25), f.clean('2006 10 25')) self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '2006-10-25') self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '10/25/2006') self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '10/25/06') def test_datefield_4(self): # Test whitespace stripping behavior (#5714) f = DateField() self.assertEqual(datetime.date(2006, 10, 25), f.clean(' 10/25/2006 ')) self.assertEqual(datetime.date(2006, 10, 25), f.clean(' 10/25/06 ')) self.assertEqual(datetime.date(2006, 10, 25), f.clean(' Oct 25 2006 ')) self.assertEqual(datetime.date(2006, 10, 25), f.clean(' October 25 2006 ')) self.assertEqual(datetime.date(2006, 10, 25), f.clean(' October 25, 2006 ')) self.assertEqual(datetime.date(2006, 10, 25), f.clean(' 25 October 2006 ')) self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, ' ') def test_datefield_5(self): # Test null bytes (#18982) f = DateField() self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, 'a\x00b') @ignore_warnings(category=RemovedInDjango20Warning) # for _has_changed def test_datefield_changed(self): format = '%d/%m/%Y' f = DateField(input_formats=[format]) d = datetime.date(2007, 9, 17) self.assertFalse(f.has_changed(d, '17/09/2007')) # Test for deprecated behavior _has_changed self.assertFalse(f._has_changed(d, '17/09/2007')) def test_datefield_strptime(self): """Test that field.strptime doesn't raise an UnicodeEncodeError (#16123)""" f = DateField() try: f.strptime('31 мая 2011', '%d-%b-%y') except Exception as e: # assertIsInstance or assertRaises cannot be used because UnicodeEncodeError # is a subclass of ValueError self.assertEqual(e.__class__, ValueError) # TimeField ################################################################### def test_timefield_1(self): f = TimeField() self.assertEqual(datetime.time(14, 25), f.clean(datetime.time(14, 25))) self.assertEqual(datetime.time(14, 25, 59), f.clean(datetime.time(14, 25, 59))) self.assertEqual(datetime.time(14, 25), f.clean('14:25')) self.assertEqual(datetime.time(14, 25, 59), f.clean('14:25:59')) self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, 'hello') self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, '1:24 p.m.') def test_timefield_2(self): f = TimeField(input_formats=['%I:%M %p']) self.assertEqual(datetime.time(14, 25), f.clean(datetime.time(14, 25))) self.assertEqual(datetime.time(14, 25, 59), f.clean(datetime.time(14, 25, 59))) self.assertEqual(datetime.time(4, 25), f.clean('4:25 AM')) self.assertEqual(datetime.time(16, 25), f.clean('4:25 PM')) self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, '14:30:45') def test_timefield_3(self): f = TimeField() # Test whitespace stripping behavior (#5714) self.assertEqual(datetime.time(14, 25), f.clean(' 14:25 ')) self.assertEqual(datetime.time(14, 25, 59), f.clean(' 14:25:59 ')) self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ' ') def test_timefield_changed(self): t1 = datetime.time(12, 51, 34, 482548) t2 = datetime.time(12, 51) f = TimeField(input_formats=['%H:%M', '%H:%M %p']) self.assertTrue(f.has_changed(t1, '12:51')) self.assertFalse(f.has_changed(t2, '12:51')) self.assertFalse(f.has_changed(t2, '12:51 PM')) # DateTimeField ############################################################### def test_datetimefield_1(self): f = DateTimeField() self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(datetime.date(2006, 10, 25))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean(datetime.datetime(2006, 10, 25, 14, 30))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 59), f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 59, 200), f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59, 200))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('2006-10-25 14:30:45.000200')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('2006-10-25 14:30:45.0002')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean('2006-10-25 14:30:45')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('2006-10-25 14:30:00')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('2006-10-25 14:30')) self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean('2006-10-25')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('10/25/2006 14:30:45.000200')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean('10/25/2006 14:30:45')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/2006 14:30:00')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/2006 14:30')) self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean('10/25/2006')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('10/25/06 14:30:45.000200')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean('10/25/06 14:30:45')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/06 14:30:00')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/06 14:30')) self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean('10/25/06')) self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'", f.clean, 'hello') self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'", f.clean, '2006-10-25 4:30 p.m.') def test_datetimefield_2(self): f = DateTimeField(input_formats=['%Y %m %d %I:%M %p']) self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(datetime.date(2006, 10, 25))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean(datetime.datetime(2006, 10, 25, 14, 30))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 59), f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 59, 200), f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59, 200))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('2006 10 25 2:30 PM')) self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'", f.clean, '2006-10-25 14:30:45') def test_datetimefield_3(self): f = DateTimeField(required=False) self.assertIsNone(f.clean(None)) self.assertEqual('None', repr(f.clean(None))) self.assertIsNone(f.clean('')) self.assertEqual('None', repr(f.clean(''))) def test_datetimefield_4(self): f = DateTimeField() # Test whitespace stripping behavior (#5714) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean(' 2006-10-25 14:30:45 ')) self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(' 2006-10-25 ')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean(' 10/25/2006 14:30:45 ')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean(' 10/25/2006 14:30 ')) self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(' 10/25/2006 ')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean(' 10/25/06 14:30:45 ')) self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(' 10/25/06 ')) self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'", f.clean, ' ') def test_datetimefield_5(self): f = DateTimeField(input_formats=['%Y.%m.%d %H:%M:%S.%f']) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('2006.10.25 14:30:45.0002')) def test_datetimefield_changed(self): format = '%Y %m %d %I:%M %p' f = DateTimeField(input_formats=[format]) d = datetime.datetime(2006, 9, 17, 14, 30, 0) self.assertFalse(f.has_changed(d, '2006 09 17 2:30 PM')) # DurationField ########################################################### def test_durationfield_1(self): f = DurationField() self.assertEqual(datetime.timedelta(seconds=30), f.clean('30')) self.assertEqual( datetime.timedelta(minutes=15, seconds=30), f.clean('15:30') ) self.assertEqual( datetime.timedelta(hours=1, minutes=15, seconds=30), f.clean('1:15:30') ) self.assertEqual( datetime.timedelta( days=1, hours=1, minutes=15, seconds=30, milliseconds=300), f.clean('1 1:15:30.3') ) def test_durationfield_2(self): class DurationForm(Form): duration = DurationField(initial=datetime.timedelta(hours=1)) f = DurationForm() self.assertHTMLEqual( '<input id="id_duration" type="text" name="duration" value="01:00:00">', str(f['duration']) ) def test_durationfield_prepare_value(self): field = DurationField() td = datetime.timedelta(minutes=15, seconds=30) self.assertEqual(field.prepare_value(td), duration_string(td)) self.assertEqual(field.prepare_value('arbitrary'), 'arbitrary') self.assertIsNone(field.prepare_value(None)) # RegexField ################################################################## def test_regexfield_1(self): f = RegexField('^[0-9][A-F][0-9]$') self.assertEqual('2A2', f.clean('2A2')) self.assertEqual('3F3', f.clean('3F3')) self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '3G3') self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, ' 2A2') self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '2A2 ') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') def test_regexfield_2(self): f = RegexField('^[0-9][A-F][0-9]$', required=False) self.assertEqual('2A2', f.clean('2A2')) self.assertEqual('3F3', f.clean('3F3')) self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '3G3') self.assertEqual('', f.clean('')) def test_regexfield_3(self): f = RegexField(re.compile('^[0-9][A-F][0-9]$')) self.assertEqual('2A2', f.clean('2A2')) self.assertEqual('3F3', f.clean('3F3')) self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '3G3') self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, ' 2A2') self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '2A2 ') @ignore_warnings(category=RemovedInDjango20Warning) # error_message deprecation def test_regexfield_4(self): f = RegexField('^[0-9][0-9][0-9][0-9]$', error_message='Enter a four-digit number.') self.assertEqual('1234', f.clean('1234')) self.assertRaisesMessage(ValidationError, "'Enter a four-digit number.'", f.clean, '123') self.assertRaisesMessage(ValidationError, "'Enter a four-digit number.'", f.clean, 'abcd') def test_regexfield_5(self): f = RegexField('^[0-9]+$', min_length=5, max_length=10) self.assertRaisesMessage(ValidationError, "'Ensure this value has at least 5 characters (it has 3).'", f.clean, '123') six.assertRaisesRegex(self, ValidationError, "'Ensure this value has at least 5 characters \(it has 3\)\.', u?'Enter a valid value\.'", f.clean, 'abc') self.assertEqual('12345', f.clean('12345')) self.assertEqual('1234567890', f.clean('1234567890')) self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 10 characters (it has 11).'", f.clean, '12345678901') self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '12345a') def test_regexfield_6(self): """ Ensure that it works with unicode characters. Refs #. """ f = RegexField('^\w+$') self.assertEqual('éèøçÎÎ你好', f.clean('éèøçÎÎ你好')) def test_change_regex_after_init(self): f = RegexField('^[a-z]+$') f.regex = '^[0-9]+$' self.assertEqual('1234', f.clean('1234')) self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, 'abcd') # EmailField ################################################################## # See also validators tests for validate_email specific tests def test_emailfield_1(self): f = EmailField() self.assertWidgetRendersTo(f, '<input type="email" name="f" id="id_f" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual('[email protected]', f.clean('[email protected]')) self.assertRaisesMessage(ValidationError, "'Enter a valid email address.'", f.clean, 'foo') self.assertEqual('[email protected]\xe4\xf6\xfc\xdfabc.part.com', f.clean('[email protected]äöüßabc.part.com')) def test_email_regexp_for_performance(self): f = EmailField() # Check for runaway regex security problem. This will take for-freeking-ever # if the security fix isn't in place. addr = '[email protected]' self.assertEqual(addr, f.clean(addr)) def test_emailfield_not_required(self): f = EmailField(required=False) self.assertEqual('', f.clean('')) self.assertEqual('', f.clean(None)) self.assertEqual('[email protected]', f.clean('[email protected]')) self.assertEqual('[email protected]', f.clean(' [email protected] \t \t ')) self.assertRaisesMessage(ValidationError, "'Enter a valid email address.'", f.clean, 'foo') def test_emailfield_min_max_length(self): f = EmailField(min_length=10, max_length=15) self.assertWidgetRendersTo(f, '<input id="id_f" type="email" name="f" maxlength="15" />') self.assertRaisesMessage(ValidationError, "'Ensure this value has at least 10 characters (it has 9).'", f.clean, '[email protected]') self.assertEqual('[email protected]', f.clean('[email protected]')) self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 15 characters (it has 20).'", f.clean, '[email protected]') # FileField ################################################################## def test_filefield_1(self): f = FileField() self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '', '') self.assertEqual('files/test1.pdf', f.clean('', 'files/test1.pdf')) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None, '') self.assertEqual('files/test2.pdf', f.clean(None, 'files/test2.pdf')) self.assertRaisesMessage(ValidationError, "'No file was submitted. Check the encoding type on the form.'", f.clean, SimpleUploadedFile('', b'')) self.assertRaisesMessage(ValidationError, "'No file was submitted. Check the encoding type on the form.'", f.clean, SimpleUploadedFile('', b''), '') self.assertEqual('files/test3.pdf', f.clean(None, 'files/test3.pdf')) self.assertRaisesMessage(ValidationError, "'No file was submitted. Check the encoding type on the form.'", f.clean, 'some content that is not a file') self.assertRaisesMessage(ValidationError, "'The submitted file is empty.'", f.clean, SimpleUploadedFile('name', None)) self.assertRaisesMessage(ValidationError, "'The submitted file is empty.'", f.clean, SimpleUploadedFile('name', b'')) self.assertEqual(SimpleUploadedFile, type(f.clean(SimpleUploadedFile('name', b'Some File Content')))) self.assertEqual(SimpleUploadedFile, type(f.clean(SimpleUploadedFile('我隻氣墊船裝滿晒鱔.txt', 'मेरी मँडराने वाली नाव सर्पमीनों से भरी ह'.encode('utf-8'))))) self.assertEqual(SimpleUploadedFile, type(f.clean(SimpleUploadedFile('name', b'Some File Content'), 'files/test4.pdf'))) def test_filefield_2(self): f = FileField(max_length=5) self.assertRaisesMessage(ValidationError, "'Ensure this filename has at most 5 characters (it has 18).'", f.clean, SimpleUploadedFile('test_maxlength.txt', b'hello world')) self.assertEqual('files/test1.pdf', f.clean('', 'files/test1.pdf')) self.assertEqual('files/test2.pdf', f.clean(None, 'files/test2.pdf')) self.assertEqual(SimpleUploadedFile, type(f.clean(SimpleUploadedFile('name', b'Some File Content')))) def test_filefield_3(self): f = FileField(allow_empty_file=True) self.assertEqual(SimpleUploadedFile, type(f.clean(SimpleUploadedFile('name', b'')))) def test_filefield_changed(self): ''' Test for the behavior of has_changed for FileField. The value of data will more than likely come from request.FILES. The value of initial data will likely be a filename stored in the database. Since its value is of no use to a FileField it is ignored. ''' f = FileField() # No file was uploaded and no initial data. self.assertFalse(f.has_changed('', None)) # A file was uploaded and no initial data. self.assertTrue(f.has_changed('', {'filename': 'resume.txt', 'content': 'My resume'})) # A file was not uploaded, but there is initial data self.assertFalse(f.has_changed('resume.txt', None)) # A file was uploaded and there is initial data (file identity is not dealt # with here) self.assertTrue(f.has_changed('resume.txt', {'filename': 'resume.txt', 'content': 'My resume'})) # ImageField ################################################################## @skipIf(Image is None, "Pillow is required to test ImageField") def test_imagefield_annotate_with_image_after_clean(self): f = ImageField() img_path = os.path.dirname(upath(__file__)) + '/filepath_test_files/1x1.png' with open(img_path, 'rb') as img_file: img_data = img_file.read() img_file = SimpleUploadedFile('1x1.png', img_data) img_file.content_type = 'text/plain' uploaded_file = f.clean(img_file) self.assertEqual('PNG', uploaded_file.image.format) self.assertEqual('image/png', uploaded_file.content_type) @skipIf(Image is None, "Pillow is required to test ImageField") def test_imagefield_annotate_with_bitmap_image_after_clean(self): """ This also tests the situation when Pillow doesn't detect the MIME type of the image (#24948). """ f = ImageField() img_path = os.path.dirname(upath(__file__)) + '/filepath_test_files/1x1.bmp' with open(img_path, 'rb') as img_file: img_data = img_file.read() img_file = SimpleUploadedFile('1x1.bmp', img_data) img_file.content_type = 'text/plain' uploaded_file = f.clean(img_file) self.assertEqual('BMP', uploaded_file.image.format) self.assertIsNone(uploaded_file.content_type) # URLField ################################################################## def test_urlfield_1(self): f = URLField() self.assertWidgetRendersTo(f, '<input type="url" name="f" id="id_f" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual('http://localhost', f.clean('http://localhost')) self.assertEqual('http://example.com', f.clean('http://example.com')) self.assertEqual('http://example.com.', f.clean('http://example.com.')) self.assertEqual('http://www.example.com', f.clean('http://www.example.com')) self.assertEqual('http://www.example.com:8000/test', f.clean('http://www.example.com:8000/test')) self.assertEqual('http://valid-with-hyphens.com', f.clean('valid-with-hyphens.com')) self.assertEqual('http://subdomain.domain.com', f.clean('subdomain.domain.com')) self.assertEqual('http://200.8.9.10', f.clean('http://200.8.9.10')) self.assertEqual('http://200.8.9.10:8000/test', f.clean('http://200.8.9.10:8000/test')) self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'foo') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://example') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://example.') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'com.') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, '.') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://.com') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://invalid-.com') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://-invalid.com') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://inv-.alid-.com') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://inv-.-alid.com') self.assertEqual('http://valid-----hyphens.com', f.clean('http://valid-----hyphens.com')) self.assertEqual('http://some.idn.xyz\xe4\xf6\xfc\xdfabc.domain.com:123/blah', f.clean('http://some.idn.xyzäöüßabc.domain.com:123/blah')) self.assertEqual('http://www.example.com/s/http://code.djangoproject.com/ticket/13804', f.clean('www.example.com/s/http://code.djangoproject.com/ticket/13804')) self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, '[a') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://[a') def test_url_regex_ticket11198(self): f = URLField() # hangs "forever" if catastrophic backtracking in ticket:#11198 not fixed self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://%s' % ("X" * 200,)) # a second test, to make sure the problem is really addressed, even on # domains that don't fail the domain label length check in the regex self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://%s' % ("X" * 60,)) def test_urlfield_2(self): f = URLField(required=False) self.assertEqual('', f.clean('')) self.assertEqual('', f.clean(None)) self.assertEqual('http://example.com', f.clean('http://example.com')) self.assertEqual('http://www.example.com', f.clean('http://www.example.com')) self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'foo') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://example') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://example.') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://.com') def test_urlfield_5(self): f = URLField(min_length=15, max_length=20) self.assertWidgetRendersTo(f, '<input id="id_f" type="url" name="f" maxlength="20" />') self.assertRaisesMessage(ValidationError, "'Ensure this value has at least 15 characters (it has 12).'", f.clean, 'http://f.com') self.assertEqual('http://example.com', f.clean('http://example.com')) self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 20 characters (it has 37).'", f.clean, 'http://abcdefghijklmnopqrstuvwxyz.com') def test_urlfield_6(self): f = URLField(required=False) self.assertEqual('http://example.com', f.clean('example.com')) self.assertEqual('', f.clean('')) self.assertEqual('https://example.com', f.clean('https://example.com')) def test_urlfield_7(self): f = URLField() self.assertEqual('http://example.com', f.clean('http://example.com')) self.assertEqual('http://example.com/test', f.clean('http://example.com/test')) self.assertEqual('http://example.com?some_param=some_value', f.clean('http://example.com?some_param=some_value')) def test_urlfield_9(self): f = URLField() urls = ( 'http://עברית.idn.icann.org/', 'http://sãopaulo.com/', 'http://sãopaulo.com.br/', 'http://пример.испытание/', 'http://مثال.إختبار/', 'http://例子.测试/', 'http://例子.測試/', 'http://उदाहरण.परीक्षा/', 'http://例え.テスト/', 'http://مثال.آزمایشی/', 'http://실례.테스트/', 'http://العربية.idn.icann.org/', ) for url in urls: # Valid IDN self.assertEqual(url, f.clean(url)) def test_urlfield_10(self): """Test URLField correctly validates IPv6 (#18779).""" f = URLField() urls = ( 'http://[12:34::3a53]/', 'http://[a34:9238::]:8080/', ) for url in urls: self.assertEqual(url, f.clean(url)) def test_urlfield_not_string(self): f = URLField(required=False) self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 23) # BooleanField ################################################################ def test_booleanfield_1(self): f = BooleanField() self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual(True, f.clean(True)) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, False) self.assertEqual(True, f.clean(1)) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, 0) self.assertEqual(True, f.clean('Django rocks')) self.assertEqual(True, f.clean('True')) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, 'False') def test_booleanfield_2(self): f = BooleanField(required=False) self.assertEqual(False, f.clean('')) self.assertEqual(False, f.clean(None)) self.assertEqual(True, f.clean(True)) self.assertEqual(False, f.clean(False)) self.assertEqual(True, f.clean(1)) self.assertEqual(False, f.clean(0)) self.assertEqual(True, f.clean('1')) self.assertEqual(False, f.clean('0')) self.assertEqual(True, f.clean('Django rocks')) self.assertEqual(False, f.clean('False')) self.assertEqual(False, f.clean('false')) self.assertEqual(False, f.clean('FaLsE')) def test_boolean_picklable(self): self.assertIsInstance(pickle.loads(pickle.dumps(BooleanField())), BooleanField) def test_booleanfield_changed(self): f = BooleanField() self.assertFalse(f.has_changed(None, None)) self.assertFalse(f.has_changed(None, '')) self.assertFalse(f.has_changed('', None)) self.assertFalse(f.has_changed('', '')) self.assertTrue(f.has_changed(False, 'on')) self.assertFalse(f.has_changed(True, 'on')) self.assertTrue(f.has_changed(True, '')) # Initial value may have mutated to a string due to show_hidden_initial (#19537) self.assertTrue(f.has_changed('False', 'on')) # ChoiceField ################################################################# def test_choicefield_1(self): f = ChoiceField(choices=[('1', 'One'), ('2', 'Two')]) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual('1', f.clean(1)) self.assertEqual('1', f.clean('1')) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 3 is not one of the available choices.'", f.clean, '3') def test_choicefield_2(self): f = ChoiceField(choices=[('1', 'One'), ('2', 'Two')], required=False) self.assertEqual('', f.clean('')) self.assertEqual('', f.clean(None)) self.assertEqual('1', f.clean(1)) self.assertEqual('1', f.clean('1')) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 3 is not one of the available choices.'", f.clean, '3') def test_choicefield_3(self): f = ChoiceField(choices=[('J', 'John'), ('P', 'Paul')]) self.assertEqual('J', f.clean('J')) self.assertRaisesMessage(ValidationError, "'Select a valid choice. John is not one of the available choices.'", f.clean, 'John') def test_choicefield_4(self): f = ChoiceField(choices=[('Numbers', (('1', 'One'), ('2', 'Two'))), ('Letters', (('3', 'A'), ('4', 'B'))), ('5', 'Other')]) self.assertEqual('1', f.clean(1)) self.assertEqual('1', f.clean('1')) self.assertEqual('3', f.clean(3)) self.assertEqual('3', f.clean('3')) self.assertEqual('5', f.clean(5)) self.assertEqual('5', f.clean('5')) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 6 is not one of the available choices.'", f.clean, '6') def test_choicefield_callable(self): choices = lambda: [('J', 'John'), ('P', 'Paul')] f = ChoiceField(choices=choices) self.assertEqual('J', f.clean('J')) def test_choicefield_callable_may_evaluate_to_different_values(self): choices = [] def choices_as_callable(): return choices class ChoiceFieldForm(Form): choicefield = ChoiceField(choices=choices_as_callable) choices = [('J', 'John')] form = ChoiceFieldForm() self.assertEqual([('J', 'John')], list(form.fields['choicefield'].choices)) choices = [('P', 'Paul')] form = ChoiceFieldForm() self.assertEqual([('P', 'Paul')], list(form.fields['choicefield'].choices)) # TypedChoiceField ############################################################ # TypedChoiceField is just like ChoiceField, except that coerced types will # be returned: def test_typedchoicefield_1(self): f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int) self.assertEqual(1, f.clean('1')) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 2 is not one of the available choices.'", f.clean, '2') def test_typedchoicefield_2(self): # Different coercion, same validation. f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=float) self.assertEqual(1.0, f.clean('1')) def test_typedchoicefield_3(self): # This can also cause weirdness: be careful (bool(-1) == True, remember) f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=bool) self.assertEqual(True, f.clean('-1')) def test_typedchoicefield_4(self): # Even more weirdness: if you have a valid choice but your coercion function # can't coerce, you'll still get a validation error. Don't do this! f = TypedChoiceField(choices=[('A', 'A'), ('B', 'B')], coerce=int) self.assertRaisesMessage(ValidationError, "'Select a valid choice. B is not one of the available choices.'", f.clean, 'B') # Required fields require values self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') def test_typedchoicefield_5(self): # Non-required fields aren't required f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=False) self.assertEqual('', f.clean('')) # If you want cleaning an empty value to return a different type, tell the field def test_typedchoicefield_6(self): f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=False, empty_value=None) self.assertIsNone(f.clean('')) def test_typedchoicefield_has_changed(self): # has_changed should not trigger required validation f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=True) self.assertFalse(f.has_changed(None, '')) self.assertFalse(f.has_changed(1, '1')) def test_typedchoicefield_special_coerce(self): """ Test a coerce function which results in a value not present in choices. Refs #21397. """ def coerce_func(val): return Decimal('1.%s' % val) f = TypedChoiceField(choices=[(1, "1"), (2, "2")], coerce=coerce_func, required=True) self.assertEqual(Decimal('1.2'), f.clean('2')) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'Select a valid choice. 3 is not one of the available choices.'", f.clean, '3') # NullBooleanField ############################################################ def test_nullbooleanfield_1(self): f = NullBooleanField() self.assertIsNone(f.clean('')) self.assertEqual(True, f.clean(True)) self.assertEqual(False, f.clean(False)) self.assertIsNone(f.clean(None)) self.assertEqual(False, f.clean('0')) self.assertEqual(True, f.clean('1')) self.assertIsNone(f.clean('2')) self.assertIsNone(f.clean('3')) self.assertIsNone(f.clean('hello')) self.assertEqual(True, f.clean('true')) self.assertEqual(False, f.clean('false')) def test_nullbooleanfield_2(self): # Make sure that the internal value is preserved if using HiddenInput (#7753) class HiddenNullBooleanForm(Form): hidden_nullbool1 = NullBooleanField(widget=HiddenInput, initial=True) hidden_nullbool2 = NullBooleanField(widget=HiddenInput, initial=False) f = HiddenNullBooleanForm() self.assertHTMLEqual('<input type="hidden" name="hidden_nullbool1" value="True" id="id_hidden_nullbool1" /><input type="hidden" name="hidden_nullbool2" value="False" id="id_hidden_nullbool2" />', str(f)) def test_nullbooleanfield_3(self): class HiddenNullBooleanForm(Form): hidden_nullbool1 = NullBooleanField(widget=HiddenInput, initial=True) hidden_nullbool2 = NullBooleanField(widget=HiddenInput, initial=False) f = HiddenNullBooleanForm({'hidden_nullbool1': 'True', 'hidden_nullbool2': 'False'}) self.assertIsNone(f.full_clean()) self.assertEqual(True, f.cleaned_data['hidden_nullbool1']) self.assertEqual(False, f.cleaned_data['hidden_nullbool2']) def test_nullbooleanfield_4(self): # Make sure we're compatible with MySQL, which uses 0 and 1 for its boolean # values. (#9609) NULLBOOL_CHOICES = (('1', 'Yes'), ('0', 'No'), ('', 'Unknown')) class MySQLNullBooleanForm(Form): nullbool0 = NullBooleanField(widget=RadioSelect(choices=NULLBOOL_CHOICES)) nullbool1 = NullBooleanField(widget=RadioSelect(choices=NULLBOOL_CHOICES)) nullbool2 = NullBooleanField(widget=RadioSelect(choices=NULLBOOL_CHOICES)) f = MySQLNullBooleanForm({'nullbool0': '1', 'nullbool1': '0', 'nullbool2': ''}) self.assertIsNone(f.full_clean()) self.assertEqual(True, f.cleaned_data['nullbool0']) self.assertEqual(False, f.cleaned_data['nullbool1']) self.assertIsNone(f.cleaned_data['nullbool2']) def test_nullbooleanfield_changed(self): f = NullBooleanField() self.assertTrue(f.has_changed(False, None)) self.assertTrue(f.has_changed(None, False)) self.assertFalse(f.has_changed(None, None)) self.assertFalse(f.has_changed(False, False)) self.assertTrue(f.has_changed(True, False)) self.assertTrue(f.has_changed(True, None)) self.assertTrue(f.has_changed(True, False)) # MultipleChoiceField ######################################################### def test_multiplechoicefield_1(self): f = MultipleChoiceField(choices=[('1', 'One'), ('2', 'Two')]) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual(['1'], f.clean([1])) self.assertEqual(['1'], f.clean(['1'])) self.assertEqual(['1', '2'], f.clean(['1', '2'])) self.assertEqual(['1', '2'], f.clean([1, '2'])) self.assertEqual(['1', '2'], f.clean((1, '2'))) self.assertRaisesMessage(ValidationError, "'Enter a list of values.'", f.clean, 'hello') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, []) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, ()) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 3 is not one of the available choices.'", f.clean, ['3']) def test_multiplechoicefield_2(self): f = MultipleChoiceField(choices=[('1', 'One'), ('2', 'Two')], required=False) self.assertEqual([], f.clean('')) self.assertEqual([], f.clean(None)) self.assertEqual(['1'], f.clean([1])) self.assertEqual(['1'], f.clean(['1'])) self.assertEqual(['1', '2'], f.clean(['1', '2'])) self.assertEqual(['1', '2'], f.clean([1, '2'])) self.assertEqual(['1', '2'], f.clean((1, '2'))) self.assertRaisesMessage(ValidationError, "'Enter a list of values.'", f.clean, 'hello') self.assertEqual([], f.clean([])) self.assertEqual([], f.clean(())) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 3 is not one of the available choices.'", f.clean, ['3']) def test_multiplechoicefield_3(self): f = MultipleChoiceField(choices=[('Numbers', (('1', 'One'), ('2', 'Two'))), ('Letters', (('3', 'A'), ('4', 'B'))), ('5', 'Other')]) self.assertEqual(['1'], f.clean([1])) self.assertEqual(['1'], f.clean(['1'])) self.assertEqual(['1', '5'], f.clean([1, 5])) self.assertEqual(['1', '5'], f.clean([1, '5'])) self.assertEqual(['1', '5'], f.clean(['1', 5])) self.assertEqual(['1', '5'], f.clean(['1', '5'])) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 6 is not one of the available choices.'", f.clean, ['6']) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 6 is not one of the available choices.'", f.clean, ['1', '6']) def test_multiplechoicefield_changed(self): f = MultipleChoiceField(choices=[('1', 'One'), ('2', 'Two'), ('3', 'Three')]) self.assertFalse(f.has_changed(None, None)) self.assertFalse(f.has_changed([], None)) self.assertTrue(f.has_changed(None, ['1'])) self.assertFalse(f.has_changed([1, 2], ['1', '2'])) self.assertFalse(f.has_changed([2, 1], ['1', '2'])) self.assertTrue(f.has_changed([1, 2], ['1'])) self.assertTrue(f.has_changed([1, 2], ['1', '3'])) # TypedMultipleChoiceField ############################################################ # TypedMultipleChoiceField is just like MultipleChoiceField, except that coerced types # will be returned: def test_typedmultiplechoicefield_1(self): f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int) self.assertEqual([1], f.clean(['1'])) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 2 is not one of the available choices.'", f.clean, ['2']) def test_typedmultiplechoicefield_2(self): # Different coercion, same validation. f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=float) self.assertEqual([1.0], f.clean(['1'])) def test_typedmultiplechoicefield_3(self): # This can also cause weirdness: be careful (bool(-1) == True, remember) f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=bool) self.assertEqual([True], f.clean(['-1'])) def test_typedmultiplechoicefield_4(self): f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int) self.assertEqual([1, -1], f.clean(['1', '-1'])) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 2 is not one of the available choices.'", f.clean, ['1', '2']) def test_typedmultiplechoicefield_5(self): # Even more weirdness: if you have a valid choice but your coercion function # can't coerce, you'll still get a validation error. Don't do this! f = TypedMultipleChoiceField(choices=[('A', 'A'), ('B', 'B')], coerce=int) self.assertRaisesMessage(ValidationError, "'Select a valid choice. B is not one of the available choices.'", f.clean, ['B']) # Required fields require values self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, []) def test_typedmultiplechoicefield_6(self): # Non-required fields aren't required f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=False) self.assertEqual([], f.clean([])) def test_typedmultiplechoicefield_7(self): # If you want cleaning an empty value to return a different type, tell the field f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=False, empty_value=None) self.assertIsNone(f.clean([])) def test_typedmultiplechoicefield_has_changed(self): # has_changed should not trigger required validation f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=True) self.assertFalse(f.has_changed(None, '')) def test_typedmultiplechoicefield_special_coerce(self): """ Test a coerce function which results in a value not present in choices. Refs #21397. """ def coerce_func(val): return Decimal('1.%s' % val) f = TypedMultipleChoiceField( choices=[(1, "1"), (2, "2")], coerce=coerce_func, required=True) self.assertEqual([Decimal('1.2')], f.clean(['2'])) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, []) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 3 is not one of the available choices.'", f.clean, ['3']) # ComboField ################################################################## def test_combofield_1(self): f = ComboField(fields=[CharField(max_length=20), EmailField()]) self.assertEqual('[email protected]', f.clean('[email protected]')) self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 20 characters (it has 28).'", f.clean, '[email protected]') self.assertRaisesMessage(ValidationError, "'Enter a valid email address.'", f.clean, 'not an email') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) def test_combofield_2(self): f = ComboField(fields=[CharField(max_length=20), EmailField()], required=False) self.assertEqual('[email protected]', f.clean('[email protected]')) self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 20 characters (it has 28).'", f.clean, '[email protected]') self.assertRaisesMessage(ValidationError, "'Enter a valid email address.'", f.clean, 'not an email') self.assertEqual('', f.clean('')) self.assertEqual('', f.clean(None)) # FilePathField ############################################################### def test_filepathfield_1(self): path = os.path.abspath(upath(forms.__file__)) path = os.path.dirname(path) + '/' self.assertTrue(fix_os_paths(path).endswith('/django/forms/')) def test_filepathfield_2(self): path = upath(forms.__file__) path = os.path.dirname(os.path.abspath(path)) + '/' f = FilePathField(path=path) f.choices = [p for p in f.choices if p[0].endswith('.py')] f.choices.sort() expected = [ ('/django/forms/__init__.py', '__init__.py'), ('/django/forms/fields.py', 'fields.py'), ('/django/forms/forms.py', 'forms.py'), ('/django/forms/formsets.py', 'formsets.py'), ('/django/forms/models.py', 'models.py'), ('/django/forms/util.py', 'util.py'), ('/django/forms/utils.py', 'utils.py'), ('/django/forms/widgets.py', 'widgets.py') ] for exp, got in zip(expected, fix_os_paths(f.choices)): self.assertEqual(exp[1], got[1]) self.assertTrue(got[0].endswith(exp[0])) self.assertRaisesMessage(ValidationError, "'Select a valid choice. fields.py is not one of the available choices.'", f.clean, 'fields.py') assert fix_os_paths(f.clean(path + 'fields.py')).endswith('/django/forms/fields.py') def test_filepathfield_3(self): path = upath(forms.__file__) path = os.path.dirname(os.path.abspath(path)) + '/' f = FilePathField(path=path, match='^.*?\.py$') f.choices.sort() expected = [ ('/django/forms/__init__.py', '__init__.py'), ('/django/forms/fields.py', 'fields.py'), ('/django/forms/forms.py', 'forms.py'), ('/django/forms/formsets.py', 'formsets.py'), ('/django/forms/models.py', 'models.py'), ('/django/forms/util.py', 'util.py'), ('/django/forms/utils.py', 'utils.py'), ('/django/forms/widgets.py', 'widgets.py') ] for exp, got in zip(expected, fix_os_paths(f.choices)): self.assertEqual(exp[1], got[1]) self.assertTrue(got[0].endswith(exp[0])) def test_filepathfield_4(self): path = os.path.abspath(upath(forms.__file__)) path = os.path.dirname(path) + '/' f = FilePathField(path=path, recursive=True, match='^.*?\.py$') f.choices.sort() expected = [ ('/django/forms/__init__.py', '__init__.py'), ('/django/forms/extras/__init__.py', 'extras/__init__.py'), ('/django/forms/extras/widgets.py', 'extras/widgets.py'), ('/django/forms/fields.py', 'fields.py'), ('/django/forms/forms.py', 'forms.py'), ('/django/forms/formsets.py', 'formsets.py'), ('/django/forms/models.py', 'models.py'), ('/django/forms/util.py', 'util.py'), ('/django/forms/utils.py', 'utils.py'), ('/django/forms/widgets.py', 'widgets.py') ] for exp, got in zip(expected, fix_os_paths(f.choices)): self.assertEqual(exp[1], got[1]) self.assertTrue(got[0].endswith(exp[0])) def test_filepathfield_folders(self): path = os.path.dirname(upath(__file__)) + '/filepath_test_files/' f = FilePathField(path=path, allow_folders=True, allow_files=False) f.choices.sort() expected = [ ('/tests/forms_tests/tests/filepath_test_files/directory', 'directory'), ] for exp, got in zip(expected, fix_os_paths(f.choices)): self.assertEqual(exp[1], got[1]) self.assertTrue(got[0].endswith(exp[0])) f = FilePathField(path=path, allow_folders=True, allow_files=True) f.choices.sort() expected = [ ('/tests/forms_tests/tests/filepath_test_files/.dot-file', '.dot-file'), ('/tests/forms_tests/tests/filepath_test_files/1x1.bmp', '1x1.bmp'), ('/tests/forms_tests/tests/filepath_test_files/1x1.png', '1x1.png'), ('/tests/forms_tests/tests/filepath_test_files/directory', 'directory'), ('/tests/forms_tests/tests/filepath_test_files/fake-image.jpg', 'fake-image.jpg'), ('/tests/forms_tests/tests/filepath_test_files/real-text-file.txt', 'real-text-file.txt'), ] actual = fix_os_paths(f.choices) self.assertEqual(len(expected), len(actual)) for exp, got in zip(expected, actual): self.assertEqual(exp[1], got[1]) self.assertTrue(got[0].endswith(exp[0])) # SplitDateTimeField ########################################################## def test_splitdatetimefield_1(self): from django.forms.widgets import SplitDateTimeWidget f = SplitDateTimeField() self.assertIsInstance(f.widget, SplitDateTimeWidget) self.assertEqual(datetime.datetime(2006, 1, 10, 7, 30), f.clean([datetime.date(2006, 1, 10), datetime.time(7, 30)])) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'Enter a list of values.'", f.clean, 'hello') six.assertRaisesRegex(self, ValidationError, "'Enter a valid date\.', u?'Enter a valid time\.'", f.clean, ['hello', 'there']) self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ['2006-01-10', 'there']) self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, ['hello', '07:30']) def test_splitdatetimefield_2(self): f = SplitDateTimeField(required=False) self.assertEqual(datetime.datetime(2006, 1, 10, 7, 30), f.clean([datetime.date(2006, 1, 10), datetime.time(7, 30)])) self.assertEqual(datetime.datetime(2006, 1, 10, 7, 30), f.clean(['2006-01-10', '07:30'])) self.assertIsNone(f.clean(None)) self.assertIsNone(f.clean('')) self.assertIsNone(f.clean([''])) self.assertIsNone(f.clean(['', ''])) self.assertRaisesMessage(ValidationError, "'Enter a list of values.'", f.clean, 'hello') six.assertRaisesRegex(self, ValidationError, "'Enter a valid date\.', u?'Enter a valid time\.'", f.clean, ['hello', 'there']) self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ['2006-01-10', 'there']) self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, ['hello', '07:30']) self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ['2006-01-10', '']) self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ['2006-01-10']) self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, ['', '07:30']) def test_splitdatetimefield_changed(self): f = SplitDateTimeField(input_date_formats=['%d/%m/%Y']) self.assertFalse(f.has_changed(['11/01/2012', '09:18:15'], ['11/01/2012', '09:18:15'])) self.assertTrue(f.has_changed(datetime.datetime(2008, 5, 6, 12, 40, 00), ['2008-05-06', '12:40:00'])) self.assertFalse(f.has_changed(datetime.datetime(2008, 5, 6, 12, 40, 00), ['06/05/2008', '12:40'])) self.assertTrue(f.has_changed(datetime.datetime(2008, 5, 6, 12, 40, 00), ['06/05/2008', '12:41'])) def test_uuidfield_1(self): field = UUIDField() value = field.clean('550e8400e29b41d4a716446655440000') self.assertEqual(value, uuid.UUID('550e8400e29b41d4a716446655440000')) def test_uuidfield_2(self): field = UUIDField(required=False) value = field.clean('') self.assertEqual(value, None) def test_uuidfield_3(self): field = UUIDField() with self.assertRaises(ValidationError) as cm: field.clean('550e8400') self.assertEqual(cm.exception.messages[0], 'Enter a valid UUID.') def test_uuidfield_4(self): field = UUIDField() value = field.prepare_value(uuid.UUID('550e8400e29b41d4a716446655440000')) self.assertEqual(value, '550e8400e29b41d4a716446655440000')
bsd-3-clause
2,720,080,960,476,381,700
52.579569
211
0.618176
false
rstens/gwells
gwells/models/ScreenIntakeMethodCode.py
1
1289
""" Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from .AuditModel import AuditModel from django.db import models class ScreenIntakeMethodCode(AuditModel): """ Refers to the type of intake mechanism for a well screen, i.e. Screen, Open Bottom, Uncased Hole. """ screen_intake_code = models.CharField(primary_key=True, max_length=10, editable=False) description = models.CharField(max_length=100) display_order = models.PositiveIntegerField() effective_date = models.DateTimeField(blank=True, null=True) expiry_date = models.DateTimeField(blank=True, null=True) class Meta: db_table = 'screen_intake_method_code' ordering = ['display_order', 'description'] def __str__(self): return self.description
apache-2.0
6,329,418,084,396,265
38.060606
102
0.715283
false
superchilli/webapp
config.py
1
1439
import os basedir = os.path.abspath(os.path.dirname(__file__)) class Config: SECRET_KEY = os.environ.get('SECRET_KEY') or 'hard to guess string' SSL_DISABLE = False SQLALCHEMY_COMMIT_ON_TEARDOWN = True SQLALCHEMY_TRACK_MODIFICATION = False SQLALCHEMY_RECORD_QUERIES = True MAIL_SERVER = 'smtp.live.com' MAIL_PORT = 587 MAIL_USE_TLS = True MAIL_USERNAME = os.environ.get('MAIL_USERNAME') MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD') MAIL_SUBJECT_PREFIX = '[SUPERCHILLI]' MAIL_SENDER = 'Superchilli Admin<[email protected]>' ADMAIN = os.environ.get('ADMAIN') POSTS_PER_PAGE = 20 FOLLOWERS_PER_PAGE = 50 COMMENTS_PER_PAGE = 30 SLOW_DB_QUERY_TIME = 0.5 UPLOAD_FOLDER_ROOT = os.path.dirname(os.path.abspath(__file__)) UPLOAD_FOLDER_PATH = 'app/static/uploads' UPLOAD_FOLDER = os.path.join(UPLOAD_FOLDER_ROOT, UPLOAD_FOLDER_PATH) @staticmethod def init_app(app): pass class DevelopmentConfig(Config): DEBUG = True SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL') or \ 'sqlite:///' + os.path.join(basedir, 'data-dev.sqlite') class ProductionConfig(Config): SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \ 'sqlite:///' + os.path.join(basedir, 'data.sqlite') config = { 'development': DevelopmentConfig, 'prodution': ProductionConfig, 'default': DevelopmentConfig }
mit
-3,404,573,651,877,395,000
27.78
72
0.669215
false
waseem18/oh-mainline
vendor/packages/twill/twill/other_packages/_mechanize_dist/_clientcookie.py
20
62742
"""HTTP cookie handling for web clients. This module originally developed from my port of Gisle Aas' Perl module HTTP::Cookies, from the libwww-perl library. Docstrings, comments and debug strings in this code refer to the attributes of the HTTP cookie system as cookie-attributes, to distinguish them clearly from Python attributes. CookieJar____ / \ \ FileCookieJar \ \ / | \ \ \ MozillaCookieJar | LWPCookieJar \ \ | | \ | ---MSIEBase | \ | / | | \ | / MSIEDBCookieJar BSDDBCookieJar |/ MSIECookieJar Comments to John J Lee <[email protected]>. Copyright 2002-2006 John J Lee <[email protected]> Copyright 1997-1999 Gisle Aas (original libwww-perl code) Copyright 2002-2003 Johnny Lee (original MSIE Perl code) This code is free software; you can redistribute it and/or modify it under the terms of the BSD or ZPL 2.1 licenses (see the file COPYING.txt included with the distribution). """ import sys, re, copy, time, struct, urllib, types, logging try: import threading _threading = threading; del threading except ImportError: import dummy_threading _threading = dummy_threading; del dummy_threading import httplib # only for the default HTTP port MISSING_FILENAME_TEXT = ("a filename was not supplied (nor was the CookieJar " "instance initialised with one)") DEFAULT_HTTP_PORT = str(httplib.HTTP_PORT) from _headersutil import split_header_words, parse_ns_headers from _util import isstringlike import _rfc3986 debug = logging.getLogger("mechanize.cookies").debug def reraise_unmasked_exceptions(unmasked=()): # There are a few catch-all except: statements in this module, for # catching input that's bad in unexpected ways. # This function re-raises some exceptions we don't want to trap. import mechanize, warnings if not mechanize.USE_BARE_EXCEPT: raise unmasked = unmasked + (KeyboardInterrupt, SystemExit, MemoryError) etype = sys.exc_info()[0] if issubclass(etype, unmasked): raise # swallowed an exception import traceback, StringIO f = StringIO.StringIO() traceback.print_exc(None, f) msg = f.getvalue() warnings.warn("mechanize bug!\n%s" % msg, stacklevel=2) IPV4_RE = re.compile(r"\.\d+$") def is_HDN(text): """Return True if text is a host domain name.""" # XXX # This may well be wrong. Which RFC is HDN defined in, if any (for # the purposes of RFC 2965)? # For the current implementation, what about IPv6? Remember to look # at other uses of IPV4_RE also, if change this. return not (IPV4_RE.search(text) or text == "" or text[0] == "." or text[-1] == ".") def domain_match(A, B): """Return True if domain A domain-matches domain B, according to RFC 2965. A and B may be host domain names or IP addresses. RFC 2965, section 1: Host names can be specified either as an IP address or a HDN string. Sometimes we compare one host name with another. (Such comparisons SHALL be case-insensitive.) Host A's name domain-matches host B's if * their host name strings string-compare equal; or * A is a HDN string and has the form NB, where N is a non-empty name string, B has the form .B', and B' is a HDN string. (So, x.y.com domain-matches .Y.com but not Y.com.) Note that domain-match is not a commutative operation: a.b.c.com domain-matches .c.com, but not the reverse. """ # Note that, if A or B are IP addresses, the only relevant part of the # definition of the domain-match algorithm is the direct string-compare. A = A.lower() B = B.lower() if A == B: return True if not is_HDN(A): return False i = A.rfind(B) has_form_nb = not (i == -1 or i == 0) return ( has_form_nb and B.startswith(".") and is_HDN(B[1:]) ) def liberal_is_HDN(text): """Return True if text is a sort-of-like a host domain name. For accepting/blocking domains. """ return not IPV4_RE.search(text) def user_domain_match(A, B): """For blocking/accepting domains. A and B may be host domain names or IP addresses. """ A = A.lower() B = B.lower() if not (liberal_is_HDN(A) and liberal_is_HDN(B)): if A == B: # equal IP addresses return True return False initial_dot = B.startswith(".") if initial_dot and A.endswith(B): return True if not initial_dot and A == B: return True return False cut_port_re = re.compile(r":\d+$") def request_host(request): """Return request-host, as defined by RFC 2965. Variation from RFC: returned value is lowercased, for convenient comparison. """ url = request.get_full_url() host = _rfc3986.urlsplit(url)[1] if host is None: host = request.get_header("Host", "") # remove port, if present host = cut_port_re.sub("", host, 1) return host.lower() def eff_request_host(request): """Return a tuple (request-host, effective request-host name). As defined by RFC 2965, except both are lowercased. """ erhn = req_host = request_host(request) if req_host.find(".") == -1 and not IPV4_RE.search(req_host): erhn = req_host + ".local" return req_host, erhn def request_path(request): """request-URI, as defined by RFC 2965.""" url = request.get_full_url() path, query, frag = _rfc3986.urlsplit(url)[2:] path = escape_path(path) req_path = _rfc3986.urlunsplit((None, None, path, query, frag)) if not req_path.startswith("/"): req_path = "/"+req_path return req_path def request_port(request): host = request.get_host() i = host.find(':') if i >= 0: port = host[i+1:] try: int(port) except ValueError: debug("nonnumeric port: '%s'", port) return None else: port = DEFAULT_HTTP_PORT return port # Characters in addition to A-Z, a-z, 0-9, '_', '.', and '-' that don't # need to be escaped to form a valid HTTP URL (RFCs 2396 and 1738). HTTP_PATH_SAFE = "%/;:@&=+$,!~*'()" ESCAPED_CHAR_RE = re.compile(r"%([0-9a-fA-F][0-9a-fA-F])") def uppercase_escaped_char(match): return "%%%s" % match.group(1).upper() def escape_path(path): """Escape any invalid characters in HTTP URL, and uppercase all escapes.""" # There's no knowing what character encoding was used to create URLs # containing %-escapes, but since we have to pick one to escape invalid # path characters, we pick UTF-8, as recommended in the HTML 4.0 # specification: # http://www.w3.org/TR/REC-html40/appendix/notes.html#h-B.2.1 # And here, kind of: draft-fielding-uri-rfc2396bis-03 # (And in draft IRI specification: draft-duerst-iri-05) # (And here, for new URI schemes: RFC 2718) if isinstance(path, types.UnicodeType): path = path.encode("utf-8") path = urllib.quote(path, HTTP_PATH_SAFE) path = ESCAPED_CHAR_RE.sub(uppercase_escaped_char, path) return path def reach(h): """Return reach of host h, as defined by RFC 2965, section 1. The reach R of a host name H is defined as follows: * If - H is the host domain name of a host; and, - H has the form A.B; and - A has no embedded (that is, interior) dots; and - B has at least one embedded dot, or B is the string "local". then the reach of H is .B. * Otherwise, the reach of H is H. >>> reach("www.acme.com") '.acme.com' >>> reach("acme.com") 'acme.com' >>> reach("acme.local") '.local' """ i = h.find(".") if i >= 0: #a = h[:i] # this line is only here to show what a is b = h[i+1:] i = b.find(".") if is_HDN(h) and (i >= 0 or b == "local"): return "."+b return h def is_third_party(request): """ RFC 2965, section 3.3.6: An unverifiable transaction is to a third-party host if its request- host U does not domain-match the reach R of the request-host O in the origin transaction. """ req_host = request_host(request) # the origin request's request-host was stuffed into request by # _urllib2_support.AbstractHTTPHandler return not domain_match(req_host, reach(request.origin_req_host)) class Cookie: """HTTP Cookie. This class represents both Netscape and RFC 2965 cookies. This is deliberately a very simple class. It just holds attributes. It's possible to construct Cookie instances that don't comply with the cookie standards. CookieJar.make_cookies is the factory function for Cookie objects -- it deals with cookie parsing, supplying defaults, and normalising to the representation used in this class. CookiePolicy is responsible for checking them to see whether they should be accepted from and returned to the server. version: integer; name: string; value: string (may be None); port: string; None indicates no attribute was supplied (eg. "Port", rather than eg. "Port=80"); otherwise, a port string (eg. "80") or a port list string (eg. "80,8080") port_specified: boolean; true if a value was supplied with the Port cookie-attribute domain: string; domain_specified: boolean; true if Domain was explicitly set domain_initial_dot: boolean; true if Domain as set in HTTP header by server started with a dot (yes, this really is necessary!) path: string; path_specified: boolean; true if Path was explicitly set secure: boolean; true if should only be returned over secure connection expires: integer; seconds since epoch (RFC 2965 cookies should calculate this value from the Max-Age attribute) discard: boolean, true if this is a session cookie; (if no expires value, this should be true) comment: string; comment_url: string; rfc2109: boolean; true if cookie arrived in a Set-Cookie: (not Set-Cookie2:) header, but had a version cookie-attribute of 1 rest: mapping of other cookie-attributes Note that the port may be present in the headers, but unspecified ("Port" rather than"Port=80", for example); if this is the case, port is None. """ def __init__(self, version, name, value, port, port_specified, domain, domain_specified, domain_initial_dot, path, path_specified, secure, expires, discard, comment, comment_url, rest, rfc2109=False, ): if version is not None: version = int(version) if expires is not None: expires = int(expires) if port is None and port_specified is True: raise ValueError("if port is None, port_specified must be false") self.version = version self.name = name self.value = value self.port = port self.port_specified = port_specified # normalise case, as per RFC 2965 section 3.3.3 self.domain = domain.lower() self.domain_specified = domain_specified # Sigh. We need to know whether the domain given in the # cookie-attribute had an initial dot, in order to follow RFC 2965 # (as clarified in draft errata). Needed for the returned $Domain # value. self.domain_initial_dot = domain_initial_dot self.path = path self.path_specified = path_specified self.secure = secure self.expires = expires self.discard = discard self.comment = comment self.comment_url = comment_url self.rfc2109 = rfc2109 self._rest = copy.copy(rest) def has_nonstandard_attr(self, name): return self._rest.has_key(name) def get_nonstandard_attr(self, name, default=None): return self._rest.get(name, default) def set_nonstandard_attr(self, name, value): self._rest[name] = value def nonstandard_attr_keys(self): return self._rest.keys() def is_expired(self, now=None): if now is None: now = time.time() return (self.expires is not None) and (self.expires <= now) def __str__(self): if self.port is None: p = "" else: p = ":"+self.port limit = self.domain + p + self.path if self.value is not None: namevalue = "%s=%s" % (self.name, self.value) else: namevalue = self.name return "<Cookie %s for %s>" % (namevalue, limit) def __repr__(self): args = [] for name in ["version", "name", "value", "port", "port_specified", "domain", "domain_specified", "domain_initial_dot", "path", "path_specified", "secure", "expires", "discard", "comment", "comment_url", ]: attr = getattr(self, name) args.append("%s=%s" % (name, repr(attr))) args.append("rest=%s" % repr(self._rest)) args.append("rfc2109=%s" % repr(self.rfc2109)) return "Cookie(%s)" % ", ".join(args) class CookiePolicy: """Defines which cookies get accepted from and returned to server. May also modify cookies. The subclass DefaultCookiePolicy defines the standard rules for Netscape and RFC 2965 cookies -- override that if you want a customised policy. As well as implementing set_ok and return_ok, implementations of this interface must also supply the following attributes, indicating which protocols should be used, and how. These can be read and set at any time, though whether that makes complete sense from the protocol point of view is doubtful. Public attributes: netscape: implement netscape protocol rfc2965: implement RFC 2965 protocol rfc2109_as_netscape: WARNING: This argument will change or go away if is not accepted into the Python standard library in this form! If true, treat RFC 2109 cookies as though they were Netscape cookies. The default is for this attribute to be None, which means treat 2109 cookies as RFC 2965 cookies unless RFC 2965 handling is switched off (which it is, by default), and as Netscape cookies otherwise. hide_cookie2: don't add Cookie2 header to requests (the presence of this header indicates to the server that we understand RFC 2965 cookies) """ def set_ok(self, cookie, request): """Return true if (and only if) cookie should be accepted from server. Currently, pre-expired cookies never get this far -- the CookieJar class deletes such cookies itself. cookie: mechanize.Cookie object request: object implementing the interface defined by CookieJar.extract_cookies.__doc__ """ raise NotImplementedError() def return_ok(self, cookie, request): """Return true if (and only if) cookie should be returned to server. cookie: mechanize.Cookie object request: object implementing the interface defined by CookieJar.add_cookie_header.__doc__ """ raise NotImplementedError() def domain_return_ok(self, domain, request): """Return false if cookies should not be returned, given cookie domain. This is here as an optimization, to remove the need for checking every cookie with a particular domain (which may involve reading many files). The default implementations of domain_return_ok and path_return_ok (return True) leave all the work to return_ok. If domain_return_ok returns true for the cookie domain, path_return_ok is called for the cookie path. Otherwise, path_return_ok and return_ok are never called for that cookie domain. If path_return_ok returns true, return_ok is called with the Cookie object itself for a full check. Otherwise, return_ok is never called for that cookie path. Note that domain_return_ok is called for every *cookie* domain, not just for the *request* domain. For example, the function might be called with both ".acme.com" and "www.acme.com" if the request domain is "www.acme.com". The same goes for path_return_ok. For argument documentation, see the docstring for return_ok. """ return True def path_return_ok(self, path, request): """Return false if cookies should not be returned, given cookie path. See the docstring for domain_return_ok. """ return True class DefaultCookiePolicy(CookiePolicy): """Implements the standard rules for accepting and returning cookies. Both RFC 2965 and Netscape cookies are covered. RFC 2965 handling is switched off by default. The easiest way to provide your own policy is to override this class and call its methods in your overriden implementations before adding your own additional checks. import mechanize class MyCookiePolicy(mechanize.DefaultCookiePolicy): def set_ok(self, cookie, request): if not mechanize.DefaultCookiePolicy.set_ok( self, cookie, request): return False if i_dont_want_to_store_this_cookie(): return False return True In addition to the features required to implement the CookiePolicy interface, this class allows you to block and allow domains from setting and receiving cookies. There are also some strictness switches that allow you to tighten up the rather loose Netscape protocol rules a little bit (at the cost of blocking some benign cookies). A domain blacklist and whitelist is provided (both off by default). Only domains not in the blacklist and present in the whitelist (if the whitelist is active) participate in cookie setting and returning. Use the blocked_domains constructor argument, and blocked_domains and set_blocked_domains methods (and the corresponding argument and methods for allowed_domains). If you set a whitelist, you can turn it off again by setting it to None. Domains in block or allow lists that do not start with a dot must string-compare equal. For example, "acme.com" matches a blacklist entry of "acme.com", but "www.acme.com" does not. Domains that do start with a dot are matched by more specific domains too. For example, both "www.acme.com" and "www.munitions.acme.com" match ".acme.com" (but "acme.com" itself does not). IP addresses are an exception, and must match exactly. For example, if blocked_domains contains "192.168.1.2" and ".168.1.2" 192.168.1.2 is blocked, but 193.168.1.2 is not. Additional Public Attributes: General strictness switches strict_domain: don't allow sites to set two-component domains with country-code top-level domains like .co.uk, .gov.uk, .co.nz. etc. This is far from perfect and isn't guaranteed to work! RFC 2965 protocol strictness switches strict_rfc2965_unverifiable: follow RFC 2965 rules on unverifiable transactions (usually, an unverifiable transaction is one resulting from a redirect or an image hosted on another site); if this is false, cookies are NEVER blocked on the basis of verifiability Netscape protocol strictness switches strict_ns_unverifiable: apply RFC 2965 rules on unverifiable transactions even to Netscape cookies strict_ns_domain: flags indicating how strict to be with domain-matching rules for Netscape cookies: DomainStrictNoDots: when setting cookies, host prefix must not contain a dot (eg. www.foo.bar.com can't set a cookie for .bar.com, because www.foo contains a dot) DomainStrictNonDomain: cookies that did not explicitly specify a Domain cookie-attribute can only be returned to a domain that string-compares equal to the domain that set the cookie (eg. rockets.acme.com won't be returned cookies from acme.com that had no Domain cookie-attribute) DomainRFC2965Match: when setting cookies, require a full RFC 2965 domain-match DomainLiberal and DomainStrict are the most useful combinations of the above flags, for convenience strict_ns_set_initial_dollar: ignore cookies in Set-Cookie: headers that have names starting with '$' strict_ns_set_path: don't allow setting cookies whose path doesn't path-match request URI """ DomainStrictNoDots = 1 DomainStrictNonDomain = 2 DomainRFC2965Match = 4 DomainLiberal = 0 DomainStrict = DomainStrictNoDots|DomainStrictNonDomain def __init__(self, blocked_domains=None, allowed_domains=None, netscape=True, rfc2965=False, # WARNING: this argument will change or go away if is not # accepted into the Python standard library in this form! # default, ie. treat 2109 as netscape iff not rfc2965 rfc2109_as_netscape=None, hide_cookie2=False, strict_domain=False, strict_rfc2965_unverifiable=True, strict_ns_unverifiable=False, strict_ns_domain=DomainLiberal, strict_ns_set_initial_dollar=False, strict_ns_set_path=False, ): """ Constructor arguments should be used as keyword arguments only. blocked_domains: sequence of domain names that we never accept cookies from, nor return cookies to allowed_domains: if not None, this is a sequence of the only domains for which we accept and return cookies For other arguments, see CookiePolicy.__doc__ and DefaultCookiePolicy.__doc__.. """ self.netscape = netscape self.rfc2965 = rfc2965 self.rfc2109_as_netscape = rfc2109_as_netscape self.hide_cookie2 = hide_cookie2 self.strict_domain = strict_domain self.strict_rfc2965_unverifiable = strict_rfc2965_unverifiable self.strict_ns_unverifiable = strict_ns_unverifiable self.strict_ns_domain = strict_ns_domain self.strict_ns_set_initial_dollar = strict_ns_set_initial_dollar self.strict_ns_set_path = strict_ns_set_path if blocked_domains is not None: self._blocked_domains = tuple(blocked_domains) else: self._blocked_domains = () if allowed_domains is not None: allowed_domains = tuple(allowed_domains) self._allowed_domains = allowed_domains def blocked_domains(self): """Return the sequence of blocked domains (as a tuple).""" return self._blocked_domains def set_blocked_domains(self, blocked_domains): """Set the sequence of blocked domains.""" self._blocked_domains = tuple(blocked_domains) def is_blocked(self, domain): for blocked_domain in self._blocked_domains: if user_domain_match(domain, blocked_domain): return True return False def allowed_domains(self): """Return None, or the sequence of allowed domains (as a tuple).""" return self._allowed_domains def set_allowed_domains(self, allowed_domains): """Set the sequence of allowed domains, or None.""" if allowed_domains is not None: allowed_domains = tuple(allowed_domains) self._allowed_domains = allowed_domains def is_not_allowed(self, domain): if self._allowed_domains is None: return False for allowed_domain in self._allowed_domains: if user_domain_match(domain, allowed_domain): return False return True def set_ok(self, cookie, request): """ If you override set_ok, be sure to call this method. If it returns false, so should your subclass (assuming your subclass wants to be more strict about which cookies to accept). """ debug(" - checking cookie %s", cookie) assert cookie.name is not None for n in "version", "verifiability", "name", "path", "domain", "port": fn_name = "set_ok_"+n fn = getattr(self, fn_name) if not fn(cookie, request): return False return True def set_ok_version(self, cookie, request): if cookie.version is None: # Version is always set to 0 by parse_ns_headers if it's a Netscape # cookie, so this must be an invalid RFC 2965 cookie. debug(" Set-Cookie2 without version attribute (%s)", cookie) return False if cookie.version > 0 and not self.rfc2965: debug(" RFC 2965 cookies are switched off") return False elif cookie.version == 0 and not self.netscape: debug(" Netscape cookies are switched off") return False return True def set_ok_verifiability(self, cookie, request): if request.unverifiable and is_third_party(request): if cookie.version > 0 and self.strict_rfc2965_unverifiable: debug(" third-party RFC 2965 cookie during " "unverifiable transaction") return False elif cookie.version == 0 and self.strict_ns_unverifiable: debug(" third-party Netscape cookie during " "unverifiable transaction") return False return True def set_ok_name(self, cookie, request): # Try and stop servers setting V0 cookies designed to hack other # servers that know both V0 and V1 protocols. if (cookie.version == 0 and self.strict_ns_set_initial_dollar and cookie.name.startswith("$")): debug(" illegal name (starts with '$'): '%s'", cookie.name) return False return True def set_ok_path(self, cookie, request): if cookie.path_specified: req_path = request_path(request) if ((cookie.version > 0 or (cookie.version == 0 and self.strict_ns_set_path)) and not req_path.startswith(cookie.path)): debug(" path attribute %s is not a prefix of request " "path %s", cookie.path, req_path) return False return True def set_ok_countrycode_domain(self, cookie, request): """Return False if explicit cookie domain is not acceptable. Called by set_ok_domain, for convenience of overriding by subclasses. """ if cookie.domain_specified and self.strict_domain: domain = cookie.domain # since domain was specified, we know that: assert domain.startswith(".") if domain.count(".") == 2: # domain like .foo.bar i = domain.rfind(".") tld = domain[i+1:] sld = domain[1:i] if (sld.lower() in [ "co", "ac", "com", "edu", "org", "net", "gov", "mil", "int", "aero", "biz", "cat", "coop", "info", "jobs", "mobi", "museum", "name", "pro", "travel", ] and len(tld) == 2): # domain like .co.uk return False return True def set_ok_domain(self, cookie, request): if self.is_blocked(cookie.domain): debug(" domain %s is in user block-list", cookie.domain) return False if self.is_not_allowed(cookie.domain): debug(" domain %s is not in user allow-list", cookie.domain) return False if not self.set_ok_countrycode_domain(cookie, request): debug(" country-code second level domain %s", cookie.domain) return False if cookie.domain_specified: req_host, erhn = eff_request_host(request) domain = cookie.domain if domain.startswith("."): undotted_domain = domain[1:] else: undotted_domain = domain embedded_dots = (undotted_domain.find(".") >= 0) if not embedded_dots and domain != ".local": debug(" non-local domain %s contains no embedded dot", domain) return False if cookie.version == 0: if (not erhn.endswith(domain) and (not erhn.startswith(".") and not ("."+erhn).endswith(domain))): debug(" effective request-host %s (even with added " "initial dot) does not end end with %s", erhn, domain) return False if (cookie.version > 0 or (self.strict_ns_domain & self.DomainRFC2965Match)): if not domain_match(erhn, domain): debug(" effective request-host %s does not domain-match " "%s", erhn, domain) return False if (cookie.version > 0 or (self.strict_ns_domain & self.DomainStrictNoDots)): host_prefix = req_host[:-len(domain)] if (host_prefix.find(".") >= 0 and not IPV4_RE.search(req_host)): debug(" host prefix %s for domain %s contains a dot", host_prefix, domain) return False return True def set_ok_port(self, cookie, request): if cookie.port_specified: req_port = request_port(request) if req_port is None: req_port = "80" else: req_port = str(req_port) for p in cookie.port.split(","): try: int(p) except ValueError: debug(" bad port %s (not numeric)", p) return False if p == req_port: break else: debug(" request port (%s) not found in %s", req_port, cookie.port) return False return True def return_ok(self, cookie, request): """ If you override return_ok, be sure to call this method. If it returns false, so should your subclass (assuming your subclass wants to be more strict about which cookies to return). """ # Path has already been checked by path_return_ok, and domain blocking # done by domain_return_ok. debug(" - checking cookie %s", cookie) for n in "version", "verifiability", "secure", "expires", "port", "domain": fn_name = "return_ok_"+n fn = getattr(self, fn_name) if not fn(cookie, request): return False return True def return_ok_version(self, cookie, request): if cookie.version > 0 and not self.rfc2965: debug(" RFC 2965 cookies are switched off") return False elif cookie.version == 0 and not self.netscape: debug(" Netscape cookies are switched off") return False return True def return_ok_verifiability(self, cookie, request): if request.unverifiable and is_third_party(request): if cookie.version > 0 and self.strict_rfc2965_unverifiable: debug(" third-party RFC 2965 cookie during unverifiable " "transaction") return False elif cookie.version == 0 and self.strict_ns_unverifiable: debug(" third-party Netscape cookie during unverifiable " "transaction") return False return True def return_ok_secure(self, cookie, request): if cookie.secure and request.get_type() != "https": debug(" secure cookie with non-secure request") return False return True def return_ok_expires(self, cookie, request): if cookie.is_expired(self._now): debug(" cookie expired") return False return True def return_ok_port(self, cookie, request): if cookie.port: req_port = request_port(request) if req_port is None: req_port = "80" for p in cookie.port.split(","): if p == req_port: break else: debug(" request port %s does not match cookie port %s", req_port, cookie.port) return False return True def return_ok_domain(self, cookie, request): req_host, erhn = eff_request_host(request) domain = cookie.domain # strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't if (cookie.version == 0 and (self.strict_ns_domain & self.DomainStrictNonDomain) and not cookie.domain_specified and domain != erhn): debug(" cookie with unspecified domain does not string-compare " "equal to request domain") return False if cookie.version > 0 and not domain_match(erhn, domain): debug(" effective request-host name %s does not domain-match " "RFC 2965 cookie domain %s", erhn, domain) return False if cookie.version == 0 and not ("."+erhn).endswith(domain): debug(" request-host %s does not match Netscape cookie domain " "%s", req_host, domain) return False return True def domain_return_ok(self, domain, request): # Liberal check of domain. This is here as an optimization to avoid # having to load lots of MSIE cookie files unless necessary. # Munge req_host and erhn to always start with a dot, so as to err on # the side of letting cookies through. dotted_req_host, dotted_erhn = eff_request_host(request) if not dotted_req_host.startswith("."): dotted_req_host = "."+dotted_req_host if not dotted_erhn.startswith("."): dotted_erhn = "."+dotted_erhn if not (dotted_req_host.endswith(domain) or dotted_erhn.endswith(domain)): #debug(" request domain %s does not match cookie domain %s", # req_host, domain) return False if self.is_blocked(domain): debug(" domain %s is in user block-list", domain) return False if self.is_not_allowed(domain): debug(" domain %s is not in user allow-list", domain) return False return True def path_return_ok(self, path, request): debug("- checking cookie path=%s", path) req_path = request_path(request) if not req_path.startswith(path): debug(" %s does not path-match %s", req_path, path) return False return True def vals_sorted_by_key(adict): keys = adict.keys() keys.sort() return map(adict.get, keys) class MappingIterator: """Iterates over nested mapping, depth-first, in sorted order by key.""" def __init__(self, mapping): self._s = [(vals_sorted_by_key(mapping), 0, None)] # LIFO stack def __iter__(self): return self def next(self): # this is hairy because of lack of generators while 1: try: vals, i, prev_item = self._s.pop() except IndexError: raise StopIteration() if i < len(vals): item = vals[i] i = i + 1 self._s.append((vals, i, prev_item)) try: item.items except AttributeError: # non-mapping break else: # mapping self._s.append((vals_sorted_by_key(item), 0, item)) continue return item # Used as second parameter to dict.get method, to distinguish absent # dict key from one with a None value. class Absent: pass class CookieJar: """Collection of HTTP cookies. You may not need to know about this class: try mechanize.urlopen(). The major methods are extract_cookies and add_cookie_header; these are all you are likely to need. CookieJar supports the iterator protocol: for cookie in cookiejar: # do something with cookie Methods: add_cookie_header(request) extract_cookies(response, request) make_cookies(response, request) set_cookie_if_ok(cookie, request) set_cookie(cookie) clear_session_cookies() clear_expired_cookies() clear(domain=None, path=None, name=None) Public attributes policy: CookiePolicy object """ non_word_re = re.compile(r"\W") quote_re = re.compile(r"([\"\\])") strict_domain_re = re.compile(r"\.?[^.]*") domain_re = re.compile(r"[^.]*") dots_re = re.compile(r"^\.+") def __init__(self, policy=None): """ See CookieJar.__doc__ for argument documentation. """ if policy is None: policy = DefaultCookiePolicy() self._policy = policy self._cookies = {} # for __getitem__ iteration in pre-2.2 Pythons self._prev_getitem_index = 0 def set_policy(self, policy): self._policy = policy def _cookies_for_domain(self, domain, request): cookies = [] if not self._policy.domain_return_ok(domain, request): return [] debug("Checking %s for cookies to return", domain) cookies_by_path = self._cookies[domain] for path in cookies_by_path.keys(): if not self._policy.path_return_ok(path, request): continue cookies_by_name = cookies_by_path[path] for cookie in cookies_by_name.values(): if not self._policy.return_ok(cookie, request): debug(" not returning cookie") continue debug(" it's a match") cookies.append(cookie) return cookies def _cookies_for_request(self, request): """Return a list of cookies to be returned to server.""" cookies = [] for domain in self._cookies.keys(): cookies.extend(self._cookies_for_domain(domain, request)) return cookies def _cookie_attrs(self, cookies): """Return a list of cookie-attributes to be returned to server. like ['foo="bar"; $Path="/"', ...] The $Version attribute is also added when appropriate (currently only once per request). """ # add cookies in order of most specific (ie. longest) path first def decreasing_size(a, b): return cmp(len(b.path), len(a.path)) cookies.sort(decreasing_size) version_set = False attrs = [] for cookie in cookies: # set version of Cookie header # XXX # What should it be if multiple matching Set-Cookie headers have # different versions themselves? # Answer: there is no answer; was supposed to be settled by # RFC 2965 errata, but that may never appear... version = cookie.version if not version_set: version_set = True if version > 0: attrs.append("$Version=%s" % version) # quote cookie value if necessary # (not for Netscape protocol, which already has any quotes # intact, due to the poorly-specified Netscape Cookie: syntax) if ((cookie.value is not None) and self.non_word_re.search(cookie.value) and version > 0): value = self.quote_re.sub(r"\\\1", cookie.value) else: value = cookie.value # add cookie-attributes to be returned in Cookie header if cookie.value is None: attrs.append(cookie.name) else: attrs.append("%s=%s" % (cookie.name, value)) if version > 0: if cookie.path_specified: attrs.append('$Path="%s"' % cookie.path) if cookie.domain.startswith("."): domain = cookie.domain if (not cookie.domain_initial_dot and domain.startswith(".")): domain = domain[1:] attrs.append('$Domain="%s"' % domain) if cookie.port is not None: p = "$Port" if cookie.port_specified: p = p + ('="%s"' % cookie.port) attrs.append(p) return attrs def add_cookie_header(self, request): """Add correct Cookie: header to request (urllib2.Request object). The Cookie2 header is also added unless policy.hide_cookie2 is true. The request object (usually a urllib2.Request instance) must support the methods get_full_url, get_host, get_type, has_header, get_header, header_items and add_unredirected_header, as documented by urllib2, and the port attribute (the port number). Actually, RequestUpgradeProcessor will automatically upgrade your Request object to one with has_header, get_header, header_items and add_unredirected_header, if it lacks those methods, for compatibility with pre-2.4 versions of urllib2. """ debug("add_cookie_header") self._policy._now = self._now = int(time.time()) req_host, erhn = eff_request_host(request) strict_non_domain = ( self._policy.strict_ns_domain & self._policy.DomainStrictNonDomain) cookies = self._cookies_for_request(request) attrs = self._cookie_attrs(cookies) if attrs: if not request.has_header("Cookie"): request.add_unredirected_header("Cookie", "; ".join(attrs)) # if necessary, advertise that we know RFC 2965 if self._policy.rfc2965 and not self._policy.hide_cookie2: for cookie in cookies: if cookie.version != 1 and not request.has_header("Cookie2"): request.add_unredirected_header("Cookie2", '$Version="1"') break self.clear_expired_cookies() def _normalized_cookie_tuples(self, attrs_set): """Return list of tuples containing normalised cookie information. attrs_set is the list of lists of key,value pairs extracted from the Set-Cookie or Set-Cookie2 headers. Tuples are name, value, standard, rest, where name and value are the cookie name and value, standard is a dictionary containing the standard cookie-attributes (discard, secure, version, expires or max-age, domain, path and port) and rest is a dictionary containing the rest of the cookie-attributes. """ cookie_tuples = [] boolean_attrs = "discard", "secure" value_attrs = ("version", "expires", "max-age", "domain", "path", "port", "comment", "commenturl") for cookie_attrs in attrs_set: name, value = cookie_attrs[0] # Build dictionary of standard cookie-attributes (standard) and # dictionary of other cookie-attributes (rest). # Note: expiry time is normalised to seconds since epoch. V0 # cookies should have the Expires cookie-attribute, and V1 cookies # should have Max-Age, but since V1 includes RFC 2109 cookies (and # since V0 cookies may be a mish-mash of Netscape and RFC 2109), we # accept either (but prefer Max-Age). max_age_set = False bad_cookie = False standard = {} rest = {} for k, v in cookie_attrs[1:]: lc = k.lower() # don't lose case distinction for unknown fields if lc in value_attrs or lc in boolean_attrs: k = lc if k in boolean_attrs and v is None: # boolean cookie-attribute is present, but has no value # (like "discard", rather than "port=80") v = True if standard.has_key(k): # only first value is significant continue if k == "domain": if v is None: debug(" missing value for domain attribute") bad_cookie = True break # RFC 2965 section 3.3.3 v = v.lower() if k == "expires": if max_age_set: # Prefer max-age to expires (like Mozilla) continue if v is None: debug(" missing or invalid value for expires " "attribute: treating as session cookie") continue if k == "max-age": max_age_set = True try: v = int(v) except ValueError: debug(" missing or invalid (non-numeric) value for " "max-age attribute") bad_cookie = True break # convert RFC 2965 Max-Age to seconds since epoch # XXX Strictly you're supposed to follow RFC 2616 # age-calculation rules. Remember that zero Max-Age is a # is a request to discard (old and new) cookie, though. k = "expires" v = self._now + v if (k in value_attrs) or (k in boolean_attrs): if (v is None and k not in ["port", "comment", "commenturl"]): debug(" missing value for %s attribute" % k) bad_cookie = True break standard[k] = v else: rest[k] = v if bad_cookie: continue cookie_tuples.append((name, value, standard, rest)) return cookie_tuples def _cookie_from_cookie_tuple(self, tup, request): # standard is dict of standard cookie-attributes, rest is dict of the # rest of them name, value, standard, rest = tup domain = standard.get("domain", Absent) path = standard.get("path", Absent) port = standard.get("port", Absent) expires = standard.get("expires", Absent) # set the easy defaults version = standard.get("version", None) if version is not None: version = int(version) secure = standard.get("secure", False) # (discard is also set if expires is Absent) discard = standard.get("discard", False) comment = standard.get("comment", None) comment_url = standard.get("commenturl", None) # set default path if path is not Absent and path != "": path_specified = True path = escape_path(path) else: path_specified = False path = request_path(request) i = path.rfind("/") if i != -1: if version == 0: # Netscape spec parts company from reality here path = path[:i] else: path = path[:i+1] if len(path) == 0: path = "/" # set default domain domain_specified = domain is not Absent # but first we have to remember whether it starts with a dot domain_initial_dot = False if domain_specified: domain_initial_dot = bool(domain.startswith(".")) if domain is Absent: req_host, erhn = eff_request_host(request) domain = erhn elif not domain.startswith("."): domain = "."+domain # set default port port_specified = False if port is not Absent: if port is None: # Port attr present, but has no value: default to request port. # Cookie should then only be sent back on that port. port = request_port(request) else: port_specified = True port = re.sub(r"\s+", "", port) else: # No port attr present. Cookie can be sent back on any port. port = None # set default expires and discard if expires is Absent: expires = None discard = True elif expires <= self._now: # Expiry date in past is request to delete cookie. This can't be # in DefaultCookiePolicy, because can't delete cookies there. try: self.clear(domain, path, name) except KeyError: pass debug("Expiring cookie, domain='%s', path='%s', name='%s'", domain, path, name) return None return Cookie(version, name, value, port, port_specified, domain, domain_specified, domain_initial_dot, path, path_specified, secure, expires, discard, comment, comment_url, rest) def _cookies_from_attrs_set(self, attrs_set, request): cookie_tuples = self._normalized_cookie_tuples(attrs_set) cookies = [] for tup in cookie_tuples: cookie = self._cookie_from_cookie_tuple(tup, request) if cookie: cookies.append(cookie) return cookies def _process_rfc2109_cookies(self, cookies): if self._policy.rfc2109_as_netscape is None: rfc2109_as_netscape = not self._policy.rfc2965 else: rfc2109_as_netscape = self._policy.rfc2109_as_netscape for cookie in cookies: if cookie.version == 1: cookie.rfc2109 = True if rfc2109_as_netscape: # treat 2109 cookies as Netscape cookies rather than # as RFC2965 cookies cookie.version = 0 def make_cookies(self, response, request): """Return sequence of Cookie objects extracted from response object. See extract_cookies.__doc__ for the interfaces required of the response and request arguments. """ # get cookie-attributes for RFC 2965 and Netscape protocols headers = response.info() rfc2965_hdrs = headers.getheaders("Set-Cookie2") ns_hdrs = headers.getheaders("Set-Cookie") rfc2965 = self._policy.rfc2965 netscape = self._policy.netscape if ((not rfc2965_hdrs and not ns_hdrs) or (not ns_hdrs and not rfc2965) or (not rfc2965_hdrs and not netscape) or (not netscape and not rfc2965)): return [] # no relevant cookie headers: quick exit try: cookies = self._cookies_from_attrs_set( split_header_words(rfc2965_hdrs), request) except: reraise_unmasked_exceptions() cookies = [] if ns_hdrs and netscape: try: # RFC 2109 and Netscape cookies ns_cookies = self._cookies_from_attrs_set( parse_ns_headers(ns_hdrs), request) except: reraise_unmasked_exceptions() ns_cookies = [] self._process_rfc2109_cookies(ns_cookies) # Look for Netscape cookies (from Set-Cookie headers) that match # corresponding RFC 2965 cookies (from Set-Cookie2 headers). # For each match, keep the RFC 2965 cookie and ignore the Netscape # cookie (RFC 2965 section 9.1). Actually, RFC 2109 cookies are # bundled in with the Netscape cookies for this purpose, which is # reasonable behaviour. if rfc2965: lookup = {} for cookie in cookies: lookup[(cookie.domain, cookie.path, cookie.name)] = None def no_matching_rfc2965(ns_cookie, lookup=lookup): key = ns_cookie.domain, ns_cookie.path, ns_cookie.name return not lookup.has_key(key) ns_cookies = filter(no_matching_rfc2965, ns_cookies) if ns_cookies: cookies.extend(ns_cookies) return cookies def set_cookie_if_ok(self, cookie, request): """Set a cookie if policy says it's OK to do so. cookie: mechanize.Cookie instance request: see extract_cookies.__doc__ for the required interface """ self._policy._now = self._now = int(time.time()) if self._policy.set_ok(cookie, request): self.set_cookie(cookie) def set_cookie(self, cookie): """Set a cookie, without checking whether or not it should be set. cookie: mechanize.Cookie instance """ c = self._cookies if not c.has_key(cookie.domain): c[cookie.domain] = {} c2 = c[cookie.domain] if not c2.has_key(cookie.path): c2[cookie.path] = {} c3 = c2[cookie.path] c3[cookie.name] = cookie def extract_cookies(self, response, request): """Extract cookies from response, where allowable given the request. Look for allowable Set-Cookie: and Set-Cookie2: headers in the response object passed as argument. Any of these headers that are found are used to update the state of the object (subject to the policy.set_ok method's approval). The response object (usually be the result of a call to mechanize.urlopen, or similar) should support an info method, which returns a mimetools.Message object (in fact, the 'mimetools.Message object' may be any object that provides a getallmatchingheaders method). The request object (usually a urllib2.Request instance) must support the methods get_full_url and get_host, as documented by urllib2, and the port attribute (the port number). The request is used to set default values for cookie-attributes as well as for checking that the cookie is OK to be set. """ debug("extract_cookies: %s", response.info()) self._policy._now = self._now = int(time.time()) for cookie in self.make_cookies(response, request): if self._policy.set_ok(cookie, request): debug(" setting cookie: %s", cookie) self.set_cookie(cookie) def clear(self, domain=None, path=None, name=None): """Clear some cookies. Invoking this method without arguments will clear all cookies. If given a single argument, only cookies belonging to that domain will be removed. If given two arguments, cookies belonging to the specified path within that domain are removed. If given three arguments, then the cookie with the specified name, path and domain is removed. Raises KeyError if no matching cookie exists. """ if name is not None: if (domain is None) or (path is None): raise ValueError( "domain and path must be given to remove a cookie by name") del self._cookies[domain][path][name] elif path is not None: if domain is None: raise ValueError( "domain must be given to remove cookies by path") del self._cookies[domain][path] elif domain is not None: del self._cookies[domain] else: self._cookies = {} def clear_session_cookies(self): """Discard all session cookies. Discards all cookies held by object which had either no Max-Age or Expires cookie-attribute or an explicit Discard cookie-attribute, or which otherwise have ended up with a true discard attribute. For interactive browsers, the end of a session usually corresponds to closing the browser window. Note that the save method won't save session cookies anyway, unless you ask otherwise by passing a true ignore_discard argument. """ for cookie in self: if cookie.discard: self.clear(cookie.domain, cookie.path, cookie.name) def clear_expired_cookies(self): """Discard all expired cookies. You probably don't need to call this method: expired cookies are never sent back to the server (provided you're using DefaultCookiePolicy), this method is called by CookieJar itself every so often, and the save method won't save expired cookies anyway (unless you ask otherwise by passing a true ignore_expires argument). """ now = time.time() for cookie in self: if cookie.is_expired(now): self.clear(cookie.domain, cookie.path, cookie.name) def __getitem__(self, i): if i == 0: self._getitem_iterator = self.__iter__() elif self._prev_getitem_index != i-1: raise IndexError( "CookieJar.__getitem__ only supports sequential iteration") self._prev_getitem_index = i try: return self._getitem_iterator.next() except StopIteration: raise IndexError() def __iter__(self): return MappingIterator(self._cookies) def __len__(self): """Return number of contained cookies.""" i = 0 for cookie in self: i = i + 1 return i def __repr__(self): r = [] for cookie in self: r.append(repr(cookie)) return "<%s[%s]>" % (self.__class__, ", ".join(r)) def __str__(self): r = [] for cookie in self: r.append(str(cookie)) return "<%s[%s]>" % (self.__class__, ", ".join(r)) class LoadError(Exception): pass class FileCookieJar(CookieJar): """CookieJar that can be loaded from and saved to a file. Additional methods save(filename=None, ignore_discard=False, ignore_expires=False) load(filename=None, ignore_discard=False, ignore_expires=False) revert(filename=None, ignore_discard=False, ignore_expires=False) Additional public attributes filename: filename for loading and saving cookies Additional public readable attributes delayload: request that cookies are lazily loaded from disk; this is only a hint since this only affects performance, not behaviour (unless the cookies on disk are changing); a CookieJar object may ignore it (in fact, only MSIECookieJar lazily loads cookies at the moment) """ def __init__(self, filename=None, delayload=False, policy=None): """ See FileCookieJar.__doc__ for argument documentation. Cookies are NOT loaded from the named file until either the load or revert method is called. """ CookieJar.__init__(self, policy) if filename is not None and not isstringlike(filename): raise ValueError("filename must be string-like") self.filename = filename self.delayload = bool(delayload) def save(self, filename=None, ignore_discard=False, ignore_expires=False): """Save cookies to a file. filename: name of file in which to save cookies ignore_discard: save even cookies set to be discarded ignore_expires: save even cookies that have expired The file is overwritten if it already exists, thus wiping all its cookies. Saved cookies can be restored later using the load or revert methods. If filename is not specified, self.filename is used; if self.filename is None, ValueError is raised. """ raise NotImplementedError() def load(self, filename=None, ignore_discard=False, ignore_expires=False): """Load cookies from a file. Old cookies are kept unless overwritten by newly loaded ones. Arguments are as for .save(). If filename is not specified, self.filename is used; if self.filename is None, ValueError is raised. The named file must be in the format understood by the class, or LoadError will be raised. This format will be identical to that written by the save method, unless the load format is not sufficiently well understood (as is the case for MSIECookieJar). """ if filename is None: if self.filename is not None: filename = self.filename else: raise ValueError(MISSING_FILENAME_TEXT) f = open(filename) try: self._really_load(f, filename, ignore_discard, ignore_expires) finally: f.close() def revert(self, filename=None, ignore_discard=False, ignore_expires=False): """Clear all cookies and reload cookies from a saved file. Raises LoadError (or IOError) if reversion is not successful; the object's state will not be altered if this happens. """ if filename is None: if self.filename is not None: filename = self.filename else: raise ValueError(MISSING_FILENAME_TEXT) old_state = copy.deepcopy(self._cookies) self._cookies = {} try: self.load(filename, ignore_discard, ignore_expires) except (LoadError, IOError): self._cookies = old_state raise
agpl-3.0
5,814,518,179,863,201,000
37.002423
83
0.588123
false
OriHoch/Open-Knesset
mks/migrations/0021_add_member_backlinks_enabled.py
14
11000
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Member.backlinks_enabled' db.add_column('mks_member', 'backlinks_enabled', self.gf('django.db.models.fields.BooleanField')(default=True, blank=True), keep_default=True) def backwards(self, orm): # Deleting field 'Member.backlinks_enabled' db.delete_column('mks_member', 'backlinks_enabled') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'mks.correlation': { 'Meta': {'object_name': 'Correlation'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'm1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'m1'", 'to': "orm['mks.Member']"}), 'm2': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'m2'", 'to': "orm['mks.Member']"}), 'normalized_score': ('django.db.models.fields.FloatField', [], {'null': 'True'}), 'not_same_party': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}), 'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}) }, 'mks.member': { 'Meta': {'object_name': 'Member'}, 'area_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'average_monthly_committee_presence': ('django.db.models.fields.FloatField', [], {'null': 'True'}), 'average_weekly_presence_hours': ('django.db.models.fields.FloatField', [], {'null': 'True'}), 'backlinks_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'bills_stats_approved': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'bills_stats_first': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'bills_stats_pre': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'bills_stats_proposed': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'blog': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['planet.Blog']", 'unique': 'True', 'null': 'True', 'blank': 'True'}), 'current_party': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'members'", 'null': 'True', 'to': "orm['mks.Party']"}), 'current_role_descriptions': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}), 'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'date_of_death': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), 'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'family_status': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), 'fax': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), 'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'img_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}), 'is_current': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'number_of_children': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'parties': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'all_members'", 'symmetrical': 'False', 'through': "orm['mks.Membership']", 'to': "orm['mks.Party']"}), 'phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), 'place_of_birth': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'place_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'place_of_residence_lat': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}), 'place_of_residence_lon': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}), 'residence_centrality': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'residence_economy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}), 'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'year_of_aliyah': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}) }, 'mks.membership': { 'Meta': {'object_name': 'Membership'}, 'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Member']"}), 'party': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Party']"}), 'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}) }, 'mks.party': { 'Meta': {'object_name': 'Party'}, 'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_coalition': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'number_of_members': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'number_of_seats': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}) }, 'mks.weeklypresence': { 'Meta': {'object_name': 'WeeklyPresence'}, 'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'hours': ('django.db.models.fields.FloatField', [], {'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Member']"}) }, 'planet.blog': { 'Meta': {'object_name': 'Blog'}, 'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}), 'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '1024', 'db_index': 'True'}) } } complete_apps = ['mks']
bsd-3-clause
7,459,421,677,559,605,000
78.136691
200
0.546182
false
shiquanwang/pylearn2
pylearn2/cross_validation/train_cv_extensions.py
19
4594
""" Cross-validation training extensions. """ __author__ = "Steven Kearnes" __copyright__ = "Copyright 2014, Stanford University" __license__ = "3-clause BSD" __maintainer__ = "Steven Kearnes" import numpy as np import os from pylearn2.train import SerializationGuard from pylearn2.train_extensions.best_params import MonitorBasedSaveBest from pylearn2.utils import serial class TrainCVExtension(object): """ TrainCV extension class. This class operates on the Train objects corresponding to each fold of cross-validation, and therefore does not implement an on_monitor method. """ def setup(self, trainers): """ Set up training extension. Parameters ---------- trainers : list List of Train objects belonging to the parent TrainCV object. """ def on_save(self, trainers): """ Called by TrainCV just before saving models. Parameters ---------- trainers : list List of Train objects belonging to the parent TrainCV object. """ class MonitorBasedSaveBestCV(TrainCVExtension): """ Save best model for each cross-validation fold. Based on train_extensions.best_params.MonitorBasedSaveBest. Parameters ---------- channel_name : str Channel to monitor. save_path : str or None, optional Output filename. If None (the default), store_best_model must be true. store_best_model : bool, optional Whether to store the best model in memory. If False (the default), save_path must be defined. Note that the best model from each child trainer must be accessed through the extensions for that trainer. higher_is_better : bool, optional Whether a higher channel value indicates a better model. tag_key : str, optional Unique key to associate with the best model. If provided, this key will be modified to have a unique value for each child model. save_folds : bool Whether to write individual files for each cross-validation fold. Only used if save_path is not None. """ def __init__(self, channel_name, save_path=None, store_best_model=False, higher_is_better=False, tag_key=None, save_folds=False): self.channel_name = channel_name assert save_path is not None or store_best_model, ( "Either save_path must be defined or store_best_model must be " + "True. (Or both.)") self.save_path = save_path self.store_best_model = store_best_model self.higher_is_better = higher_is_better self.best_cost = np.inf self.best_model = None self.tag_key = tag_key self.save_folds = save_folds def setup(self, trainers): """ Add tracking to all trainers. Parameters ---------- trainers : list List of Train objects belonging to the parent TrainCV object. """ for k, trainer in enumerate(trainers): if self.save_path is not None and self.save_folds: path, ext = os.path.splitext(self.save_path) save_path = path + '-{}'.format(k) + ext else: save_path = None if self.tag_key is not None: tag_key = '{}-{}'.format(self.tag_key, k) else: tag_key = None extension = MonitorBasedSaveBest( self.channel_name, save_path=save_path, store_best_model=True, higher_is_better=self.higher_is_better, tag_key=tag_key) trainer.extensions.append(extension) def on_save(self, trainers): """ Save best model from each cross-validation fold. Parameters ---------- trainers : list List of Train objects belonging to the parent TrainCV object. """ if self.save_path is None: return models = [] for trainer in trainers: for extension in trainer.extensions: if isinstance(extension, MonitorBasedSaveBest): models.append(extension.best_model) break assert len(models) == len(trainers) try: for trainer in trainers: trainer.dataset._serialization_guard = SerializationGuard() serial.save(self.save_path, models, on_overwrite='backup') finally: for trainer in trainers: trainer.dataset._serialization_guard = None
bsd-3-clause
-8,686,569,234,582,203,000
33.80303
78
0.603396
false
JConwayAWT/PGSS14CC
lib/python/multimetallics/ase/neb.py
2
10714
from math import sqrt import numpy as np import ase.parallel as mpi from ase.calculators.singlepoint import SinglePointCalculator from ase.io import read class NEB: def __init__(self, images, k=0.1, climb=False, parallel=False, world=None): """Nudged elastic band. images: list of Atoms objects Images defining path from initial to final state. k: float or list of floats Spring constant(s). One number or one for each spring. climb: bool Use a climbing image (default is no climbing image). parallel: bool Distribute images over processors. """ self.images = images self.climb = climb self.parallel = parallel self.natoms = len(images[0]) self.nimages = len(images) self.emax = np.nan if isinstance(k, (float, int)): k = [k] * (self.nimages - 1) self.k = list(k) if world is None: world = mpi.world self.world = world assert not parallel or world.size % (self.nimages - 2) == 0 def interpolate(self): pos1 = self.images[0].get_positions() pos2 = self.images[-1].get_positions() d = (pos2 - pos1) / (self.nimages - 1.0) for i in range(1, self.nimages - 1): self.images[i].set_positions(pos1 + i * d) # Parallel NEB with Jacapo needs this: try: self.images[i].get_calculator().set_atoms(self.images[i]) except AttributeError: pass def get_positions(self): positions = np.empty(((self.nimages - 2) * self.natoms, 3)) n1 = 0 for image in self.images[1:-1]: n2 = n1 + self.natoms positions[n1:n2] = image.get_positions() n1 = n2 return positions def set_positions(self, positions): n1 = 0 for image in self.images[1:-1]: n2 = n1 + self.natoms image.set_positions(positions[n1:n2]) n1 = n2 # Parallel NEB with Jacapo needs this: try: image.get_calculator().set_atoms(image) except AttributeError: pass def get_forces(self): """Evaluate and return the forces.""" images = self.images forces = np.empty(((self.nimages - 2), self.natoms, 3)) energies = np.empty(self.nimages - 2) if not self.parallel: # Do all images - one at a time: for i in range(1, self.nimages - 1): energies[i - 1] = images[i].get_potential_energy() forces[i - 1] = images[i].get_forces() else: # Parallelize over images: i = self.world.rank * (self.nimages - 2) // self.world.size + 1 try: energies[i - 1] = images[i].get_potential_energy() forces[i - 1] = images[i].get_forces() except: # Make sure other images also fail: error = self.world.sum(1.0) raise else: error = self.world.sum(0.0) if error: raise RuntimeError('Parallel NEB failed!') for i in range(1, self.nimages - 1): root = (i - 1) * self.world.size // (self.nimages - 2) self.world.broadcast(energies[i - 1:i], root) self.world.broadcast(forces[i - 1], root) imax = 1 + np.argsort(energies)[-1] self.emax = energies[imax - 1] tangent1 = images[1].get_positions() - images[0].get_positions() for i in range(1, self.nimages - 1): tangent2 = (images[i + 1].get_positions() - images[i].get_positions()) if i < imax: tangent = tangent2 elif i > imax: tangent = tangent1 else: tangent = tangent1 + tangent2 tt = np.vdot(tangent, tangent) f = forces[i - 1] ft = np.vdot(f, tangent) if i == imax and self.climb: f -= 2 * ft / tt * tangent else: f -= ft / tt * tangent f -= np.vdot(tangent1 * self.k[i - 1] - tangent2 * self.k[i], tangent) / tt * tangent tangent1 = tangent2 return forces.reshape((-1, 3)) def get_potential_energy(self): return self.emax def __len__(self): return (self.nimages - 2) * self.natoms class SingleCalculatorNEB(NEB): def __init__(self, images, k=0.1, climb=False): if isinstance(images, str): # this is a filename traj = read(images, '0:') images = [] for atoms in traj: images.append(atoms) NEB.__init__(self, images, k, climb, False) self.calculators = [None] * self.nimages self.energies_ok = False def interpolate(self, initial=0, final=-1): """Interpolate linearly between initial and final images.""" if final < 0: final = self.nimages + final n = final - initial pos1 = self.images[initial].get_positions() pos2 = self.images[final].get_positions() d = (pos2 - pos1) / n for i in range(1, n): self.images[initial + i].set_positions(pos1 + i * d) def refine(self, steps=1, begin=0, end=-1): """Refine the NEB trajectory.""" if end < 0: end = self.nimages + end j = begin n = end - begin for i in range(n): for k in range(steps): self.images.insert(j + 1, self.images[j].copy()) self.calculators.insert(j + 1, None) self.k[j:j + 1] = [self.k[j] * (steps + 1)] * (steps + 1) self.nimages = len(self.images) self.interpolate(j, j + steps + 1) j += steps + 1 def set_positions(self, positions): # new positions -> new forces if self.energies_ok: # restore calculators self.set_calculators(self.calculators[1:-1]) NEB.set_positions(self, positions) def get_calculators(self): """Return the original calculators.""" calculators = [] for i, image in enumerate(self.images): if self.calculators[i] is None: calculators.append(image.get_calculator()) else: calculators.append(self.calculators[i]) return calculators def set_calculators(self, calculators): """Set new calculators to the images.""" self.energies_ok = False if not isinstance(calculators, list): calculators = [calculators] * self.nimages n = len(calculators) if n == self.nimages: for i in range(self.nimages): self.images[i].set_calculator(calculators[i]) elif n == self.nimages - 2: for i in range(1, self.nimages - 1): self.images[i].set_calculator(calculators[i - 1]) else: raise RuntimeError( 'len(calculators)=%d does not fit to len(images)=%d' % (n, self.nimages)) def get_energies_and_forces(self, all=False): """Evaluate energies and forces and hide the calculators""" if self.energies_ok: return self.emax = -1.e32 def calculate_and_hide(i): image = self.images[i] calc = image.get_calculator() if self.calculators[i] is None: self.calculators[i] = calc if calc is not None: if not isinstance(calc, SinglePointCalculator): self.images[i].set_calculator( SinglePointCalculator(image.get_potential_energy(), image.get_forces(), None, None, image)) self.emax = min(self.emax, image.get_potential_energy()) if all and self.calculators[0] is None: calculate_and_hide(0) # Do all images - one at a time: for i in range(1, self.nimages - 1): calculate_and_hide(i) if all and self.calculators[-1] is None: calculate_and_hide(-1) self.energies_ok = True def get_forces(self): self.get_energies_and_forces() return NEB.get_forces(self) def n(self): return self.nimages def write(self, filename): from ase.io.trajectory import PickleTrajectory traj = PickleTrajectory(filename, 'w', self) traj.write() traj.close() def __add__(self, other): for image in other: self.images.append(image) return self def fit(images): E = [i.get_potential_energy() for i in images] F = [i.get_forces() for i in images] R = [i.get_positions() for i in images] return fit0(E, F, R) def fit0(E, F, R): E = np.array(E) - E[0] n = len(E) Efit = np.empty((n - 1) * 20 + 1) Sfit = np.empty((n - 1) * 20 + 1) s = [0] for i in range(n - 1): s.append(s[-1] + sqrt(((R[i + 1] - R[i])**2).sum())) lines = [] for i in range(n): if i == 0: d = R[1] - R[0] ds = 0.5 * s[1] elif i == n - 1: d = R[-1] - R[-2] ds = 0.5 * (s[-1] - s[-2]) else: d = R[i + 1] - R[i - 1] ds = 0.25 * (s[i + 1] - s[i - 1]) d = d / sqrt((d**2).sum()) dEds = -(F[i] * d).sum() x = np.linspace(s[i] - ds, s[i] + ds, 3) y = E[i] + dEds * (x - s[i]) lines.append((x, y)) if i > 0: s0 = s[i - 1] s1 = s[i] x = np.linspace(s0, s1, 20, endpoint=False) c = np.linalg.solve(np.array([(1, s0, s0**2, s0**3), (1, s1, s1**2, s1**3), (0, 1, 2 * s0, 3 * s0**2), (0, 1, 2 * s1, 3 * s1**2)]), np.array([E[i - 1], E[i], dEds0, dEds])) y = c[0] + x * (c[1] + x * (c[2] + x * c[3])) Sfit[(i - 1) * 20:i * 20] = x Efit[(i - 1) * 20:i * 20] = y dEds0 = dEds Sfit[-1] = s[-1] Efit[-1] = E[-1] return s, E, Sfit, Efit, lines
gpl-2.0
-3,329,476,169,308,717,000
32.376947
75
0.482733
false
hzlf/openbroadcast
website/lib/util/merge.py
1
4599
from django.db import transaction from django.db.models import get_models, Model from django.contrib.contenttypes.generic import GenericForeignKey import logging logger = logging.getLogger(__name__) @transaction.commit_on_success def merge_model_objects(primary_object, alias_objects=[], keep_old=False): log = logging.getLogger('lib.util.merge.merge_model_objects') """ Use this function to merge model objects (i.e. Users, Organizations, Polls, etc.) and migrate all of the related fields from the alias objects to the primary object. Usage: from django.contrib.auth.models import User primary_user = User.objects.get(email='[email protected]') duplicate_user = User.objects.get(email='[email protected]') merge_model_objects(primary_user, duplicate_user) """ if not isinstance(alias_objects, list): alias_objects = [alias_objects] # check that all aliases are the same class as primary one and that # they are subclass of model primary_class = primary_object.__class__ if not issubclass(primary_class, Model): raise TypeError('Only django.db.models.Model subclasses can be merged') for alias_object in alias_objects: if not isinstance(alias_object, primary_class): print 'hm what strange error??' #raise TypeError('Only models of same class can be merged') # Get a list of all GenericForeignKeys in all models # TODO: this is a bit of a hack, since the generics framework should provide a similar # method to the ForeignKey field for accessing the generic related fields. generic_fields = [] for model in get_models(): for field_name, field in filter(lambda x: isinstance(x[1], GenericForeignKey), model.__dict__.iteritems()): generic_fields.append(field) blank_local_fields = set([field.attname for field in primary_object._meta.local_fields if getattr(primary_object, field.attname) in [None, '']]) # Loop through all alias objects and migrate their data to the primary object. for alias_object in alias_objects: # Migrate all foreign key references from alias object to primary object. for related_object in alias_object._meta.get_all_related_objects(): # The variable name on the alias_object model. alias_varname = related_object.get_accessor_name() # The variable name on the related model. obj_varname = related_object.field.name related_objects = getattr(alias_object, alias_varname) for obj in related_objects.all(): setattr(obj, obj_varname, primary_object) obj.save() # Migrate all many to many references from alias object to primary object. for related_many_object in alias_object._meta.get_all_related_many_to_many_objects(): alias_varname = related_many_object.get_accessor_name() obj_varname = related_many_object.field.name if alias_varname is not None: # standard case related_many_objects = getattr(alias_object, alias_varname).all() else: # special case, symmetrical relation, no reverse accessor related_many_objects = getattr(alias_object, obj_varname).all() for obj in related_many_objects.all(): getattr(obj, obj_varname).remove(alias_object) getattr(obj, obj_varname).add(primary_object) # Migrate all generic foreign key references from alias object to primary object. for field in generic_fields: filter_kwargs = {} filter_kwargs[field.fk_field] = alias_object._get_pk_val() filter_kwargs[field.ct_field] = field.get_content_type(alias_object) for generic_related_object in field.model.objects.filter(**filter_kwargs): setattr(generic_related_object, field.name, primary_object) try: generic_related_object.save() except: pass # Try to fill all missing values in primary object by values of duplicates filled_up = set() for field_name in blank_local_fields: val = getattr(alias_object, field_name) if val not in [None, '']: setattr(primary_object, field_name, val) filled_up.add(field_name) blank_local_fields -= filled_up if not keep_old: alias_object.delete() primary_object.save() return primary_object
gpl-3.0
1,839,782,211,920,915,000
44.544554
148
0.651446
false
oskar456/youtube-dl
youtube_dl/extractor/noovo.py
13
3504
# coding: utf-8 from __future__ import unicode_literals from .brightcove import BrightcoveNewIE from .common import InfoExtractor from ..compat import compat_str from ..utils import ( int_or_none, js_to_json, smuggle_url, try_get, ) class NoovoIE(InfoExtractor): _VALID_URL = r'https?://(?:[^/]+\.)?noovo\.ca/videos/(?P<id>[^/]+/[^/?#&]+)' _TESTS = [{ # clip 'url': 'http://noovo.ca/videos/rpm-plus/chrysler-imperial', 'info_dict': { 'id': '5386045029001', 'ext': 'mp4', 'title': 'Chrysler Imperial', 'description': 'md5:de3c898d1eb810f3e6243e08c8b4a056', 'timestamp': 1491399228, 'upload_date': '20170405', 'uploader_id': '618566855001', 'series': 'RPM+', }, 'params': { 'skip_download': True, }, }, { # episode 'url': 'http://noovo.ca/videos/l-amour-est-dans-le-pre/episode-13-8', 'info_dict': { 'id': '5395865725001', 'title': 'Épisode 13 : Les retrouvailles', 'description': 'md5:888c3330f0c1b4476c5bc99a1c040473', 'ext': 'mp4', 'timestamp': 1492019320, 'upload_date': '20170412', 'uploader_id': '618566855001', 'series': "L'amour est dans le pré", 'season_number': 5, 'episode': 'Épisode 13', 'episode_number': 13, }, 'params': { 'skip_download': True, }, }] BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/618566855001/default_default/index.html?videoId=%s' def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) bc_url = BrightcoveNewIE._extract_url(self, webpage) data = self._parse_json( self._search_regex( r'(?s)dataLayer\.push\(\s*({.+?})\s*\);', webpage, 'data', default='{}'), video_id, transform_source=js_to_json, fatal=False) title = try_get( data, lambda x: x['video']['nom'], compat_str) or self._html_search_meta( 'dcterms.Title', webpage, 'title', fatal=True) description = self._html_search_meta( ('dcterms.Description', 'description'), webpage, 'description') series = try_get( data, lambda x: x['emission']['nom']) or self._search_regex( r'<div[^>]+class="banner-card__subtitle h4"[^>]*>([^<]+)', webpage, 'series', default=None) season_el = try_get(data, lambda x: x['emission']['saison'], dict) or {} season = try_get(season_el, lambda x: x['nom'], compat_str) season_number = int_or_none(try_get(season_el, lambda x: x['numero'])) episode_el = try_get(season_el, lambda x: x['episode'], dict) or {} episode = try_get(episode_el, lambda x: x['nom'], compat_str) episode_number = int_or_none(try_get(episode_el, lambda x: x['numero'])) return { '_type': 'url_transparent', 'ie_key': BrightcoveNewIE.ie_key(), 'url': smuggle_url(bc_url, {'geo_countries': ['CA']}), 'title': title, 'description': description, 'series': series, 'season': season, 'season_number': season_number, 'episode': episode, 'episode_number': episode_number, }
unlicense
4,679,593,537,187,388,000
34.01
112
0.52585
false
nemesisdesign/django
tests/model_fields/test_foreignkey.py
42
3522
from decimal import Decimal from django.apps import apps from django.core import checks from django.db import models from django.test import TestCase, skipIfDBFeature from django.test.utils import isolate_apps from django.utils import six from .models import Bar, FkToChar, Foo, PrimaryKeyCharModel class ForeignKeyTests(TestCase): def test_callable_default(self): """A lazy callable may be used for ForeignKey.default.""" a = Foo.objects.create(id=1, a='abc', d=Decimal('12.34')) b = Bar.objects.create(b='bcd') self.assertEqual(b.a, a) @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_empty_string_fk(self): """ Empty strings foreign key values don't get converted to None (#19299). """ char_model_empty = PrimaryKeyCharModel.objects.create(string='') fk_model_empty = FkToChar.objects.create(out=char_model_empty) fk_model_empty = FkToChar.objects.select_related('out').get(id=fk_model_empty.pk) self.assertEqual(fk_model_empty.out, char_model_empty) @isolate_apps('model_fields') def test_warning_when_unique_true_on_fk(self): class Foo(models.Model): pass class FKUniqueTrue(models.Model): fk_field = models.ForeignKey(Foo, models.CASCADE, unique=True) model = FKUniqueTrue() expected_warnings = [ checks.Warning( 'Setting unique=True on a ForeignKey has the same effect as using a OneToOneField.', hint='ForeignKey(unique=True) is usually better served by a OneToOneField.', obj=FKUniqueTrue.fk_field.field, id='fields.W342', ) ] warnings = model.check() self.assertEqual(warnings, expected_warnings) def test_related_name_converted_to_text(self): rel_name = Bar._meta.get_field('a').remote_field.related_name self.assertIsInstance(rel_name, six.text_type) def test_abstract_model_pending_operations(self): """ Foreign key fields declared on abstract models should not add lazy relations to resolve relationship declared as string (#24215). """ pending_ops_before = list(apps._pending_operations.items()) class AbstractForeignKeyModel(models.Model): fk = models.ForeignKey('missing.FK', models.CASCADE) class Meta: abstract = True self.assertIs(AbstractForeignKeyModel._meta.apps, apps) self.assertEqual( pending_ops_before, list(apps._pending_operations.items()), 'Pending lookup added for a foreign key on an abstract model' ) @isolate_apps('model_fields', 'model_fields.tests') def test_abstract_model_app_relative_foreign_key(self): class AbstractReferent(models.Model): reference = models.ForeignKey('Refered', on_delete=models.CASCADE) class Meta: app_label = 'model_fields' abstract = True def assert_app_model_resolved(label): class Refered(models.Model): class Meta: app_label = label class ConcreteReferent(AbstractReferent): class Meta: app_label = label self.assertEqual(ConcreteReferent._meta.get_field('reference').related_model, Refered) assert_app_model_resolved('model_fields') assert_app_model_resolved('tests')
bsd-3-clause
-7,693,355,165,790,673,000
35.6875
100
0.631459
false
jeanmask/opps
tests/core/test_cache.py
5
1194
#!/usr/bin/env python # -*- coding: utf-8 -*- from unittest import TestCase from django.http import HttpRequest from opps.core.cache import cache_page class DecoratorsTest(TestCase): def test_cache_page_new_style(self): """ Test that we can call cache_page the new way """ def my_view(request): return "response" my_view_cached = cache_page(123)(my_view) self.assertEqual(my_view_cached(HttpRequest()), "response") my_view_cached2 = cache_page(123, key_prefix="test")(my_view) self.assertEqual(my_view_cached2(HttpRequest()), "response") def test_cache_page_is_mobile(self): """ Test that we can call cache_page in mobile page """ def is_mobile_view(request): return request.is_mobile def mobile_view(request): return 'response' mobile_view_cached = cache_page(123)(mobile_view) is_mobile_view_cached = cache_page(123)(is_mobile_view) request = HttpRequest() request.is_mobile = True self.assertEqual(mobile_view_cached(request), "response") self.assertTrue(is_mobile_view_cached(request))
mit
1,234,804,438,560,658,200
30.421053
69
0.624791
false
jkonecki/autorest
AutoRest/Generators/Python/Python.Tests/Expected/AcceptanceTests/Http/autoresthttpinfrastructuretestservice/operations/multiple_responses.py
5
54689
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.pipeline import ClientRawResponse from msrest.exceptions import HttpOperationError from .. import models class MultipleResponses(object): """MultipleResponses operations. :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An objec model deserializer. """ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self.config = config def get200_model204_no_model_default_error200_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 200 response with valid payload: {'statusCode': '200'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/204/none/default/Error/response/200/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200, 204]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model204_no_model_default_error204_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 204 response with no payload :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/204/none/default/Error/response/204/none' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200, 204]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model204_no_model_default_error201_invalid( self, custom_headers={}, raw=False, **operation_config): """ Send a 201 response with valid payload: {'statusCode': '201'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/204/none/default/Error/response/201/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200, 204]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model204_no_model_default_error202_none( self, custom_headers={}, raw=False, **operation_config): """ Send a 202 response with no payload: :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/204/none/default/Error/response/202/none' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200, 204]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model204_no_model_default_error400_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 400 response with valid error payload: {'status': 400, 'message': 'client error'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/204/none/default/Error/response/400/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200, 204]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model201_model_default_error200_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 200 response with valid payload: {'statusCode': '200'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/201/B/default/Error/response/200/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200, 201]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if response.status_code == 201: deserialized = self._deserialize('B', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model201_model_default_error201_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 201 response with valid payload: {'statusCode': '201', 'textStatusCode': 'Created'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/201/B/default/Error/response/201/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200, 201]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if response.status_code == 201: deserialized = self._deserialize('B', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model201_model_default_error400_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 400 response with valid payload: {'code': '400', 'message': 'client error'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/201/B/default/Error/response/400/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200, 201]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if response.status_code == 201: deserialized = self._deserialize('B', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model_a201_model_c404_model_ddefault_error200_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 200 response with valid payload: {'statusCode': '200'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: object :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/201/C/404/D/default/Error/response/200/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200, 201, 404]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if response.status_code == 201: deserialized = self._deserialize('C', response) if response.status_code == 404: deserialized = self._deserialize('D', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model_a201_model_c404_model_ddefault_error201_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 200 response with valid payload: {'httpCode': '201'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: object :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/201/C/404/D/default/Error/response/201/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200, 201, 404]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if response.status_code == 201: deserialized = self._deserialize('C', response) if response.status_code == 404: deserialized = self._deserialize('D', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model_a201_model_c404_model_ddefault_error404_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 200 response with valid payload: {'httpStatusCode': '404'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: object :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/201/C/404/D/default/Error/response/404/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200, 201, 404]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if response.status_code == 201: deserialized = self._deserialize('C', response) if response.status_code == 404: deserialized = self._deserialize('D', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model_a201_model_c404_model_ddefault_error400_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 400 response with valid payload: {'code': '400', 'message': 'client error'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: object :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/201/C/404/D/default/Error/response/400/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200, 201, 404]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if response.status_code == 201: deserialized = self._deserialize('C', response) if response.status_code == 404: deserialized = self._deserialize('D', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get202_none204_none_default_error202_none( self, custom_headers={}, raw=False, **operation_config): """ Send a 202 response with no payload :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/202/none/204/none/default/Error/response/202/none' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [202, 204]: raise models.ErrorException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get202_none204_none_default_error204_none( self, custom_headers={}, raw=False, **operation_config): """ Send a 204 response with no payload :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/202/none/204/none/default/Error/response/204/none' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [202, 204]: raise models.ErrorException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get202_none204_none_default_error400_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 400 response with valid payload: {'code': '400', 'message': 'client error'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/202/none/204/none/default/Error/response/400/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [202, 204]: raise models.ErrorException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get202_none204_none_default_none202_invalid( self, custom_headers={}, raw=False, **operation_config): """ Send a 202 response with an unexpected payload {'property': 'value'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/202/none/204/none/default/none/response/202/invalid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [202, 204]: raise HttpOperationError(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get202_none204_none_default_none204_none( self, custom_headers={}, raw=False, **operation_config): """ Send a 204 response with no payload :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/202/none/204/none/default/none/response/204/none' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [202, 204]: raise HttpOperationError(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get202_none204_none_default_none400_none( self, custom_headers={}, raw=False, **operation_config): """ Send a 400 response with no payload :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/202/none/204/none/default/none/response/400/none' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [202, 204]: raise HttpOperationError(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get202_none204_none_default_none400_invalid( self, custom_headers={}, raw=False, **operation_config): """ Send a 400 response with an unexpected payload {'property': 'value'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/202/none/204/none/default/none/response/400/invalid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [202, 204]: raise HttpOperationError(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get_default_model_a200_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 200 response with valid payload: {'statusCode': '200'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/default/A/response/200/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code < 200 or response.status_code >= 300: raise models.MyException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get_default_model_a200_none( self, custom_headers={}, raw=False, **operation_config): """ Send a 200 response with no payload :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/default/A/response/200/none' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code < 200 or response.status_code >= 300: raise models.MyException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get_default_model_a400_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 400 response with valid payload: {'statusCode': '400'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/default/A/response/400/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code < 200 or response.status_code >= 300: raise models.MyException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get_default_model_a400_none( self, custom_headers={}, raw=False, **operation_config): """ Send a 400 response with no payload :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/default/A/response/400/none' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code < 200 or response.status_code >= 300: raise models.MyException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get_default_none200_invalid( self, custom_headers={}, raw=False, **operation_config): """ Send a 200 response with invalid payload: {'statusCode': '200'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/default/none/response/200/invalid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code < 200 or response.status_code >= 300: raise HttpOperationError(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get_default_none200_none( self, custom_headers={}, raw=False, **operation_config): """ Send a 200 response with no payload :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/default/none/response/200/none' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code < 200 or response.status_code >= 300: raise HttpOperationError(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get_default_none400_invalid( self, custom_headers={}, raw=False, **operation_config): """ Send a 400 response with valid payload: {'statusCode': '400'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/default/none/response/400/invalid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code < 200 or response.status_code >= 300: raise HttpOperationError(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get_default_none400_none( self, custom_headers={}, raw=False, **operation_config): """ Send a 400 response with no payload :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/default/none/response/400/none' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code < 200 or response.status_code >= 300: raise HttpOperationError(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def get200_model_a200_none( self, custom_headers={}, raw=False, **operation_config): """ Send a 200 response with no payload, when a payload is expected - client should return a null object of thde type for model A :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/response/200/none' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: raise HttpOperationError(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model_a200_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 200 response with payload {'statusCode': '200'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/response/200/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: raise HttpOperationError(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model_a200_invalid( self, custom_headers={}, raw=False, **operation_config): """ Send a 200 response with invalid payload {'statusCodeInvalid': '200'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/response/200/invalid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: raise HttpOperationError(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model_a400_none( self, custom_headers={}, raw=False, **operation_config): """ Send a 400 response with no payload client should treat as an http error with no error model :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/response/400/none' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: raise HttpOperationError(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model_a400_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 200 response with payload {'statusCode': '400'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/response/400/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: raise HttpOperationError(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model_a400_invalid( self, custom_headers={}, raw=False, **operation_config): """ Send a 200 response with invalid payload {'statusCodeInvalid': '400'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/response/400/invalid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: raise HttpOperationError(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def get200_model_a202_valid( self, custom_headers={}, raw=False, **operation_config): """ Send a 202 response with payload {'statusCode': '202'} :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`A <fixtures.acceptancetestshttp.models.A>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/http/payloads/200/A/response/202/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: raise HttpOperationError(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('A', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
mit
755,333,431,665,381,200
36.638679
84
0.634954
false
ohsailey/MAD-IS
mad_interface_server/views/communicate.py
1
10093
''' Copyright (c) 2014 OpenISDM Project Name: OpenISDM MAD-IS Version: 1.0 File Name: communicate.py Abstract: communicate.py is a module of Interface Server (IS) of Mobile Assistance for Disasters (MAD) in the OpenISDM Virtual Repository project. Establish communication with client side, and server will do some actions according different request. Authors: Bai Shan-Wei, [email protected] License: GPL 3.0 This file is subject to the terms and conditions defined in file 'COPYING.txt', which is part of this source code package. Major Revision History: 2014/6/5: complete version 1.0 ''' from flask import Flask, request, jsonify, render_template from flask import Blueprint, after_this_request, make_response from mad_interface_server.database import db, POS from mad_interface_server import app demand = Blueprint('demand', __name__) import json import uuid import requests import time postInfo = "Null" need_validate = False @demand.teardown_request def teardown_request(exception): global postInfo postData = postInfo if postData != "Null": time.sleep(2) validate_topic_url(postData) postInfo = 'Null' pass else: return 'thx' @demand.route('/fetch/', methods=['POST', 'GET']) def receive_req(): ''' Receive a request from client side and response according different demands ''' from mad_interface_server import information response = information.answer(request.data) return jsonify(response) @demand.route('/send/', methods=['GET', 'POST']) def create_info(): ''' Receive the information from client side and store it. ''' from mad_interface_server import information if request.method == 'POST': data = json.loads(request.data) information.build_info(data) return 'ok' @demand.route('/hub/', methods=['GET', 'HEAD']) def discovery(): ''' If subscribers access '/hub' path by using HTTP GET or HEAD method, IS will response HTTP Link Header and response body to subscribers. This route can make a Link Header of response to subscribers Default Link Header can be "Null" ''' pos_id = request.args.get('posId') pos_type = request.args.get('posType') dt = determine_topic_and_hub(pos_id, pos_type) print 'return response' print dt # print >> sys.stderr, "/hub GET HEAD..." resp = make_response(render_template('ok.html'), 200) resp.headers['link'] = '<' + dt['hub_url'] + '>; rel="hub", <' \ + dt['topic_url'] + '>; rel="self"' print resp return resp # # To Do judgement function for response # # result = determineTopic(request.query_string) # @demand.route('/subscribe/', methods=['POST']) def hub(): print 'hello' # # If subscribers access '/hub' path using HTTP POST method, # IS will be a Hub to deal with subscribe/unsubcribe action. # # This request has a Content-Type of application/x-www-form-urlencoded and # the following parameters in its body: # # hub.callback # # hub.mode # # hub.topic # # hub.lease_seconds(Optional) - # The hub-determined number of seconds that the subscription will # stay active before expiring, measured from the time the verification # request was made from the hub to the subscriber. # # hub.secret(Optional) - # A subscriber-provided secret string that will be used to compute an # HMAC digest for authorized content distribution. # global postInfo postData = request.form print postData if postData['hub.mode'] and postData['hub.topic'] \ and postData['hub.callback']: if postData['hub.mode'] == 'subscribe': # # solve postData to Global context # # g.postData = postData # is_find_url = info.match_url(postInfo['hub.topic']) postInfo = postData resp = make_response(render_template('Accepted.html'), 202) return resp elif postData['hub.mode'] == 'unsubscribe': # # To Do a function to clear record in list of subscribers # resp = make_response(render_template('Accepted.html'), 202) return resp else: resp = make_response(render_template('Unknown.html'), 406) return resp else: resp = make_response(render_template('Unknown.html'), 406) return resp # # To Do publish for other publisher if we need to be a public hub # # elif postData['hub.mode'] == 'publish': # return 'publish' # def validate_topic_url(postData): # # Subscriptions MAY be validated by the Hubs who may require more details # to accept or refuse a subscription.The Hub MAY also check with the # publisher whether the subscription should be accepted.Hubs MUST preserve # the query string during subscription verification by appending new # parameters to the end of the list using the & (ampersand) character # to join. # # If topic URL is correct from publisher, the hub MUST perform verification # of intent of the subscirber if denied, hub must infrom GET request to # subscriber's callback URL [] # # print >> sys.stderr, 'validate_topic_url' # answer = fromDb(postData['hub.topic']) answer_reason = 'No this topic' print postInfo['hub.topic'] is_find_url = match_url(postInfo['hub.topic']) # if answer.judge: if is_find_url is True: # # Verifie Intent of the Subscribers # This request has the following query string arguments appended: # # hub.mode # hub.topic # hub.challage - A hub-generated, random string that MUST be echoed # by the subscriber to verify the subscription. # hub.lease_seconds(Optional) # randomKey = uuid.uuid4() payload = {'hub.mode': postInfo['hub.mode'], 'hub.topic': postInfo['hub.topic'], 'hub.challenge': randomKey} req = requests.get(postInfo['hub.callback'], params=payload) print payload print postInfo['hub.callback'] print str(req.status_code) if ( str(req.status_code)[:1] == '2' and str(req.content) == str(randomKey)): store_subscriber(postInfo['hub.topic'], postInfo['hub.callback']) content_distribution(postInfo['hub.callback']) print 'success' else: print 'fail' # 'verification to have failed.' # storefailedSubscritions(g.postData['hub.callback']) # print >> sys.stderr, 'storefailedSlubscritions: %s' % g.postData[ # 'hub.callback'] else: # # return 'send reason to subscribers' # This request has the following query string arguments appended: # # hub.mode # hub.topic # hub.reason(Optional) -The hub may include a reason for which the # subscription has been denied. # payload = {'hub.mode': postInfo['hub.mode'], 'hub.topic': postInfo['hub.topic'], 'hub.reason': answer_reason} req = requests.get(postInfo['hub.callback'], params=payload) @demand.route('/textView/') def show_text(): ''' Display the topic content with text. ''' return render_template('text_view.html') @demand.route('/imgView/') def show_img(): """ Display the topic content with image. """ print 'ff' return render_template('image_view.html') def determine_topic_and_hub(pos_id, pos_type): """ Decide which topic address and hub address will be assigned the subscriber and return the json object that include their values. pos_id : The POS server ID pos_type : The fix type or mobile type of POS server """ reply = { 'hub_url': app.config['WEB_URL'] + '/subscribe/', } reply['topic_url'] = 'Not found' if pos_type == 'fix': for p in db.session.query(POS): if p.id == pos_id: reply['topic_url'] = p.topic_dir elif pos_type == 'mobile': print 'testing' return reply def match_url(topic_url): is_find = False for p in db.session.query(POS): if p.topic_dir == topic_url: is_find = True return is_find def store_subscriber(topic_url, callback_url): for p in db.session.query(POS): if p.topic_dir == topic_url: p.callback_url = callback_url p.is_subscribe = True db.session.commit() print 'have stored the subscription' def content_distribution(sub_url): """ Prepare file that will send the subscriber, then publish to the corresponding subscriber. """ # search the corresponding POS ID according the subscriber url if sub_url is not None: for p in db.session.query(POS): if p.callback_url == sub_url: pos_id = p.id topic_dictionary = { 'png': app.config['TOPIC_DIR'] + pos_id + '/' + pos_id + '.png', 'rdf': app.config['TOPIC_DIR'] + pos_id + '/' + pos_id + '.rdf' } for x in topic_dictionary: files = {'file': open(topic_dictionary[x], 'rb')} r = requests.post(sub_url, files=files) else: print "This POS server have not subscribed"
gpl-3.0
-2,992,886,536,795,352,000
27.51462
79
0.579213
false
anbangleo/NlsdeWeb
Python-3.6.0/Lib/json/encoder.py
4
16020
"""Implementation of JSONEncoder """ import re try: from _json import encode_basestring_ascii as c_encode_basestring_ascii except ImportError: c_encode_basestring_ascii = None try: from _json import encode_basestring as c_encode_basestring except ImportError: c_encode_basestring = None try: from _json import make_encoder as c_make_encoder except ImportError: c_make_encoder = None ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]') ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])') HAS_UTF8 = re.compile(b'[\x80-\xff]') ESCAPE_DCT = { '\\': '\\\\', '"': '\\"', '\b': '\\b', '\f': '\\f', '\n': '\\n', '\r': '\\r', '\t': '\\t', } for i in range(0x20): ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i)) #ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,)) INFINITY = float('inf') def py_encode_basestring(s): """Return a JSON representation of a Python string """ def replace(match): return ESCAPE_DCT[match.group(0)] return '"' + ESCAPE.sub(replace, s) + '"' encode_basestring = (c_encode_basestring or py_encode_basestring) def py_encode_basestring_ascii(s): """Return an ASCII-only JSON representation of a Python string """ def replace(match): s = match.group(0) try: return ESCAPE_DCT[s] except KeyError: n = ord(s) if n < 0x10000: return '\\u{0:04x}'.format(n) #return '\\u%04x' % (n,) else: # surrogate pair n -= 0x10000 s1 = 0xd800 | ((n >> 10) & 0x3ff) s2 = 0xdc00 | (n & 0x3ff) return '\\u{0:04x}\\u{1:04x}'.format(s1, s2) return '"' + ESCAPE_ASCII.sub(replace, s) + '"' encode_basestring_ascii = ( c_encode_basestring_ascii or py_encode_basestring_ascii) class JSONEncoder(object): """Extensible JSON <http://json.org> encoder for Python data structures. Supports the following objects and types by default: +-------------------+---------------+ | Python | JSON | +===================+===============+ | dict | object | +-------------------+---------------+ | list, tuple | array | +-------------------+---------------+ | str | string | +-------------------+---------------+ | int, float | number | +-------------------+---------------+ | True | true | +-------------------+---------------+ | False | false | +-------------------+---------------+ | None | null | +-------------------+---------------+ To extend this to recognize other objects, subclass and implement a ``.default()`` method with another method that returns a serializable object for ``o`` if possible, otherwise it should call the superclass implementation (to raise ``TypeError``). """ item_separator = ', ' key_separator = ': ' def __init__(self, *, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, sort_keys=False, indent=None, separators=None, default=None): """Constructor for JSONEncoder, with sensible defaults. If skipkeys is false, then it is a TypeError to attempt encoding of keys that are not str, int, float or None. If skipkeys is True, such items are simply skipped. If ensure_ascii is true, the output is guaranteed to be str objects with all incoming non-ASCII characters escaped. If ensure_ascii is false, the output can contain non-ASCII characters. If check_circular is true, then lists, dicts, and custom encoded objects will be checked for circular references during encoding to prevent an infinite recursion (which would cause an OverflowError). Otherwise, no such check takes place. If allow_nan is true, then NaN, Infinity, and -Infinity will be encoded as such. This behavior is not JSON specification compliant, but is consistent with most JavaScript based encoders and decoders. Otherwise, it will be a ValueError to encode such floats. If sort_keys is true, then the output of dictionaries will be sorted by key; this is useful for regression tests to ensure that JSON serializations can be compared on a day-to-day basis. If indent is a non-negative integer, then JSON array elements and object members will be pretty-printed with that indent level. An indent level of 0 will only insert newlines. None is the most compact representation. If specified, separators should be an (item_separator, key_separator) tuple. The default is (', ', ': ') if *indent* is ``None`` and (',', ': ') otherwise. To get the most compact JSON representation, you should specify (',', ':') to eliminate whitespace. If specified, default is a function that gets called for objects that can't otherwise be serialized. It should return a JSON encodable version of the object or raise a ``TypeError``. """ self.skipkeys = skipkeys self.ensure_ascii = ensure_ascii self.check_circular = check_circular self.allow_nan = allow_nan self.sort_keys = sort_keys self.indent = indent if separators is not None: self.item_separator, self.key_separator = separators elif indent is not None: self.item_separator = ',' if default is not None: self.default = default def default(self, o): """Implement this method in a subclass such that it returns a serializable object for ``o``, or calls the base implementation (to raise a ``TypeError``). For example, to support arbitrary iterators, you could implement default like this:: def default(self, o): try: iterable = iter(o) except TypeError: pass else: return list(iterable) # Let the base class default method raise the TypeError return JSONEncoder.default(self, o) """ raise TypeError("Object of type '%s' is not JSON serializable" % o.__class__.__name__) def encode(self, o): """Return a JSON string representation of a Python data structure. >>> from json.encoder import JSONEncoder >>> JSONEncoder().encode({"foo": ["bar", "baz"]}) '{"foo": ["bar", "baz"]}' """ # This is for extremely simple cases and benchmarks. if isinstance(o, str): if self.ensure_ascii: return encode_basestring_ascii(o) else: return encode_basestring(o) # This doesn't pass the iterator directly to ''.join() because the # exceptions aren't as detailed. The list call should be roughly # equivalent to the PySequence_Fast that ''.join() would do. chunks = self.iterencode(o, _one_shot=True) if not isinstance(chunks, (list, tuple)): chunks = list(chunks) return ''.join(chunks) def iterencode(self, o, _one_shot=False): """Encode the given object and yield each string representation as available. For example:: for chunk in JSONEncoder().iterencode(bigobject): mysocket.write(chunk) """ if self.check_circular: markers = {} else: markers = None if self.ensure_ascii: _encoder = encode_basestring_ascii else: _encoder = encode_basestring def floatstr(o, allow_nan=self.allow_nan, _repr=float.__repr__, _inf=INFINITY, _neginf=-INFINITY): # Check for specials. Note that this type of test is processor # and/or platform-specific, so do tests which don't depend on the # internals. if o != o: text = 'NaN' elif o == _inf: text = 'Infinity' elif o == _neginf: text = '-Infinity' else: return _repr(o) if not allow_nan: raise ValueError( "Out of range float values are not JSON compliant: " + repr(o)) return text if (_one_shot and c_make_encoder is not None and self.indent is None): _iterencode = c_make_encoder( markers, self.default, _encoder, self.indent, self.key_separator, self.item_separator, self.sort_keys, self.skipkeys, self.allow_nan) else: _iterencode = _make_iterencode( markers, self.default, _encoder, self.indent, floatstr, self.key_separator, self.item_separator, self.sort_keys, self.skipkeys, _one_shot) return _iterencode(o, 0) def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot, ## HACK: hand-optimized bytecode; turn globals into locals ValueError=ValueError, dict=dict, float=float, id=id, int=int, isinstance=isinstance, list=list, str=str, tuple=tuple, _intstr=int.__str__, ): if _indent is not None and not isinstance(_indent, str): _indent = ' ' * _indent def _iterencode_list(lst, _current_indent_level): if not lst: yield '[]' return if markers is not None: markerid = id(lst) if markerid in markers: raise ValueError("Circular reference detected") markers[markerid] = lst buf = '[' if _indent is not None: _current_indent_level += 1 newline_indent = '\n' + _indent * _current_indent_level separator = _item_separator + newline_indent buf += newline_indent else: newline_indent = None separator = _item_separator first = True for value in lst: if first: first = False else: buf = separator if isinstance(value, str): yield buf + _encoder(value) elif value is None: yield buf + 'null' elif value is True: yield buf + 'true' elif value is False: yield buf + 'false' elif isinstance(value, int): # Subclasses of int/float may override __str__, but we still # want to encode them as integers/floats in JSON. One example # within the standard library is IntEnum. yield buf + _intstr(value) elif isinstance(value, float): # see comment above for int yield buf + _floatstr(value) else: yield buf if isinstance(value, (list, tuple)): chunks = _iterencode_list(value, _current_indent_level) elif isinstance(value, dict): chunks = _iterencode_dict(value, _current_indent_level) else: chunks = _iterencode(value, _current_indent_level) yield from chunks if newline_indent is not None: _current_indent_level -= 1 yield '\n' + _indent * _current_indent_level yield ']' if markers is not None: del markers[markerid] def _iterencode_dict(dct, _current_indent_level): if not dct: yield '{}' return if markers is not None: markerid = id(dct) if markerid in markers: raise ValueError("Circular reference detected") markers[markerid] = dct yield '{' if _indent is not None: _current_indent_level += 1 newline_indent = '\n' + _indent * _current_indent_level item_separator = _item_separator + newline_indent yield newline_indent else: newline_indent = None item_separator = _item_separator first = True if _sort_keys: items = sorted(dct.items(), key=lambda kv: kv[0]) else: items = dct.items() for key, value in items: if isinstance(key, str): pass # JavaScript is weakly typed for these, so it makes sense to # also allow them. Many encoders seem to do something like this. elif isinstance(key, float): # see comment for int/float in _make_iterencode key = _floatstr(key) elif key is True: key = 'true' elif key is False: key = 'false' elif key is None: key = 'null' elif isinstance(key, int): # see comment for int/float in _make_iterencode key = _intstr(key) elif _skipkeys: continue else: raise TypeError("key " + repr(key) + " is not a string") if first: first = False else: yield item_separator yield _encoder(key) yield _key_separator if isinstance(value, str): yield _encoder(value) elif value is None: yield 'null' elif value is True: yield 'true' elif value is False: yield 'false' elif isinstance(value, int): # see comment for int/float in _make_iterencode yield _intstr(value) elif isinstance(value, float): # see comment for int/float in _make_iterencode yield _floatstr(value) else: if isinstance(value, (list, tuple)): chunks = _iterencode_list(value, _current_indent_level) elif isinstance(value, dict): chunks = _iterencode_dict(value, _current_indent_level) else: chunks = _iterencode(value, _current_indent_level) yield from chunks if newline_indent is not None: _current_indent_level -= 1 yield '\n' + _indent * _current_indent_level yield '}' if markers is not None: del markers[markerid] def _iterencode(o, _current_indent_level): if isinstance(o, str): yield _encoder(o) elif o is None: yield 'null' elif o is True: yield 'true' elif o is False: yield 'false' elif isinstance(o, int): # see comment for int/float in _make_iterencode yield _intstr(o) elif isinstance(o, float): # see comment for int/float in _make_iterencode yield _floatstr(o) elif isinstance(o, (list, tuple)): yield from _iterencode_list(o, _current_indent_level) elif isinstance(o, dict): yield from _iterencode_dict(o, _current_indent_level) else: if markers is not None: markerid = id(o) if markerid in markers: raise ValueError("Circular reference detected") markers[markerid] = o o = _default(o) yield from _iterencode(o, _current_indent_level) if markers is not None: del markers[markerid] return _iterencode
mit
-111,930,662,550,374,030
35.326531
78
0.528589
false
tradej/pykickstart-old
pykickstart/handlers/rhel3.py
10
3894
# # Chris Lumens <[email protected]> # # Copyright 2007 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, modify, # copy, or redistribute it subject to the terms and conditions of the GNU # General Public License v.2. This program is distributed in the hope that it # will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the # implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat # trademarks that are incorporated in the source code or documentation are not # subject to the GNU General Public License and may only be used or replicated # with the express permission of Red Hat, Inc. # __all__ = ["RHEL3Handler"] from pykickstart import commands from pykickstart.base import BaseHandler from pykickstart.version import RHEL3 class RHEL3Handler(BaseHandler): version = RHEL3 commandMap = { "auth": commands.authconfig.FC3_Authconfig, "authconfig": commands.authconfig.FC3_Authconfig, "autopart": commands.autopart.FC3_AutoPart, "autostep": commands.autostep.FC3_AutoStep, "bootloader": commands.bootloader.FC3_Bootloader, "cdrom": commands.cdrom.FC3_Cdrom, "clearpart": commands.clearpart.FC3_ClearPart, "cmdline": commands.displaymode.FC3_DisplayMode, "device": commands.device.FC3_Device, "deviceprobe": commands.deviceprobe.FC3_DeviceProbe, "driverdisk": commands.driverdisk.FC3_DriverDisk, "firewall": commands.firewall.FC3_Firewall, "firstboot": commands.firstboot.FC3_Firstboot, "graphical": commands.displaymode.FC3_DisplayMode, "halt": commands.reboot.FC3_Reboot, "harddrive": commands.harddrive.FC3_HardDrive, "ignoredisk": commands.ignoredisk.FC3_IgnoreDisk, "install": commands.upgrade.FC3_Upgrade, "interactive": commands.interactive.FC3_Interactive, "keyboard": commands.keyboard.FC3_Keyboard, "lang": commands.lang.FC3_Lang, "langsupport": commands.langsupport.FC3_LangSupport, "lilo": commands.bootloader.FC3_Bootloader, "lilocheck": commands.lilocheck.FC3_LiloCheck, "logvol": commands.logvol.FC3_LogVol, "method": commands.method.FC3_Method, "monitor": commands.monitor.FC3_Monitor, "mouse": commands.mouse.RHEL3_Mouse, "network": commands.network.FC3_Network, "nfs": commands.nfs.FC3_NFS, "part": commands.partition.FC3_Partition, "partition": commands.partition.FC3_Partition, "poweroff": commands.reboot.FC3_Reboot, "raid": commands.raid.FC3_Raid, "reboot": commands.reboot.FC3_Reboot, "rootpw": commands.rootpw.FC3_RootPw, "shutdown": commands.reboot.FC3_Reboot, "skipx": commands.skipx.FC3_SkipX, "text": commands.displaymode.FC3_DisplayMode, "timezone": commands.timezone.FC3_Timezone, "upgrade": commands.upgrade.FC3_Upgrade, "url": commands.url.FC3_Url, "vnc": commands.vnc.FC3_Vnc, "volgroup": commands.volgroup.FC3_VolGroup, "xconfig": commands.xconfig.FC3_XConfig, "zerombr": commands.zerombr.FC3_ZeroMbr, } dataMap = { "DriverDiskData": commands.driverdisk.FC3_DriverDiskData, "LogVolData": commands.logvol.FC3_LogVolData, "NetworkData": commands.network.RHEL4_NetworkData, "PartData": commands.partition.FC3_PartData, "RaidData": commands.raid.FC3_RaidData, "VolGroupData": commands.volgroup.FC3_VolGroupData, "ZFCPData": commands.zfcp.FC3_ZFCPData, }
gpl-2.0
4,120,978,417,579,551,000
44.27907
79
0.694145
false
mfazekas/safaridriver
selenium/src/py/lib/docutils/core.py
5
27509
# Authors: David Goodger # Contact: [email protected] # Revision: $Revision: 4361 $ # Date: $Date: 2006-02-28 23:33:41 +0100 (Tue, 28 Feb 2006) $ # Copyright: This module has been placed in the public domain. """ Calling the ``publish_*`` convenience functions (or instantiating a `Publisher` object) with component names will result in default behavior. For custom behavior (setting component options), create custom component objects first, and pass *them* to ``publish_*``/`Publisher`. See `The Docutils Publisher`_. .. _The Docutils Publisher: http://docutils.sf.net/docs/api/publisher.html """ __docformat__ = 'reStructuredText' import sys import pprint from types import StringType from docutils import __version__, __version_details__, SettingsSpec from docutils import frontend, io, utils, readers, writers from docutils.frontend import OptionParser from docutils.transforms import Transformer import docutils.readers.doctree class Publisher: """ A facade encapsulating the high-level logic of a Docutils system. """ def __init__(self, reader=None, parser=None, writer=None, source=None, source_class=io.FileInput, destination=None, destination_class=io.FileOutput, settings=None): """ Initial setup. If any of `reader`, `parser`, or `writer` are not specified, the corresponding ``set_...`` method should be called with a component name (`set_reader` sets the parser as well). """ self.document = None """The document tree (`docutils.nodes` objects).""" self.reader = reader """A `docutils.readers.Reader` instance.""" self.parser = parser """A `docutils.parsers.Parser` instance.""" self.writer = writer """A `docutils.writers.Writer` instance.""" for component in 'reader', 'parser', 'writer': assert not isinstance(getattr(self, component), StringType), \ ('passed string as "%s" parameter; use "%s_name" instead' % (getattr(self, component), component, component)) self.source = source """The source of input data, a `docutils.io.Input` instance.""" self.source_class = source_class """The class for dynamically created source objects.""" self.destination = destination """The destination for docutils output, a `docutils.io.Output` instance.""" self.destination_class = destination_class """The class for dynamically created destination objects.""" self.settings = settings """An object containing Docutils settings as instance attributes. Set by `self.process_command_line()` or `self.get_settings()`.""" def set_reader(self, reader_name, parser, parser_name): """Set `self.reader` by name.""" reader_class = readers.get_reader_class(reader_name) self.reader = reader_class(parser, parser_name) self.parser = self.reader.parser def set_writer(self, writer_name): """Set `self.writer` by name.""" writer_class = writers.get_writer_class(writer_name) self.writer = writer_class() def set_components(self, reader_name, parser_name, writer_name): if self.reader is None: self.set_reader(reader_name, self.parser, parser_name) if self.parser is None: if self.reader.parser is None: self.reader.set_parser(parser_name) self.parser = self.reader.parser if self.writer is None: self.set_writer(writer_name) def setup_option_parser(self, usage=None, description=None, settings_spec=None, config_section=None, **defaults): if config_section: if not settings_spec: settings_spec = SettingsSpec() settings_spec.config_section = config_section parts = config_section.split() if len(parts) > 1 and parts[-1] == 'application': settings_spec.config_section_dependencies = ['applications'] #@@@ Add self.source & self.destination to components in future? option_parser = OptionParser( components=(self.parser, self.reader, self.writer, settings_spec), defaults=defaults, read_config_files=1, usage=usage, description=description) return option_parser def get_settings(self, usage=None, description=None, settings_spec=None, config_section=None, **defaults): """ Set and return default settings (overrides in `defaults` dict). Set components first (`self.set_reader` & `self.set_writer`). Explicitly setting `self.settings` disables command line option processing from `self.publish()`. """ option_parser = self.setup_option_parser( usage, description, settings_spec, config_section, **defaults) self.settings = option_parser.get_default_values() return self.settings def process_programmatic_settings(self, settings_spec, settings_overrides, config_section): if self.settings is None: defaults = (settings_overrides or {}).copy() # Propagate exceptions by default when used programmatically: defaults.setdefault('traceback', 1) self.get_settings(settings_spec=settings_spec, config_section=config_section, **defaults) def process_command_line(self, argv=None, usage=None, description=None, settings_spec=None, config_section=None, **defaults): """ Pass an empty list to `argv` to avoid reading `sys.argv` (the default). Set components first (`self.set_reader` & `self.set_writer`). """ option_parser = self.setup_option_parser( usage, description, settings_spec, config_section, **defaults) if argv is None: argv = sys.argv[1:] self.settings = option_parser.parse_args(argv) def set_io(self, source_path=None, destination_path=None): if self.source is None: self.set_source(source_path=source_path) if self.destination is None: self.set_destination(destination_path=destination_path) def set_source(self, source=None, source_path=None): if source_path is None: source_path = self.settings._source else: self.settings._source = source_path self.source = self.source_class( source=source, source_path=source_path, encoding=self.settings.input_encoding) def set_destination(self, destination=None, destination_path=None): if destination_path is None: destination_path = self.settings._destination else: self.settings._destination = destination_path self.destination = self.destination_class( destination=destination, destination_path=destination_path, encoding=self.settings.output_encoding, error_handler=self.settings.output_encoding_error_handler) def apply_transforms(self): self.document.transformer.populate_from_components( (self.source, self.reader, self.reader.parser, self.writer, self.destination)) self.document.transformer.apply_transforms() def publish(self, argv=None, usage=None, description=None, settings_spec=None, settings_overrides=None, config_section=None, enable_exit_status=None): """ Process command line options and arguments (if `self.settings` not already set), run `self.reader` and then `self.writer`. Return `self.writer`'s output. """ exit = None try: if self.settings is None: self.process_command_line( argv, usage, description, settings_spec, config_section, **(settings_overrides or {})) self.set_io() self.document = self.reader.read(self.source, self.parser, self.settings) self.apply_transforms() output = self.writer.write(self.document, self.destination) self.writer.assemble_parts() except SystemExit, error: exit = 1 exit_status = error.code except Exception, error: if not self.settings: # exception too early to report nicely raise if self.settings.traceback: # Propagate exceptions? self.debugging_dumps() raise self.report_Exception(error) exit = 1 exit_status = 1 self.debugging_dumps() if (enable_exit_status and self.document and (self.document.reporter.max_level >= self.settings.exit_status_level)): sys.exit(self.document.reporter.max_level + 10) elif exit: sys.exit(exit_status) return output def debugging_dumps(self): if not self.document: return if self.settings.dump_settings: print >>sys.stderr, '\n::: Runtime settings:' print >>sys.stderr, pprint.pformat(self.settings.__dict__) if self.settings.dump_internals: print >>sys.stderr, '\n::: Document internals:' print >>sys.stderr, pprint.pformat(self.document.__dict__) if self.settings.dump_transforms: print >>sys.stderr, '\n::: Transforms applied:' print >>sys.stderr, (' (priority, transform class, ' 'pending node details, keyword args)') print >>sys.stderr, pprint.pformat( [(priority, '%s.%s' % (xclass.__module__, xclass.__name__), pending and pending.details, kwargs) for priority, xclass, pending, kwargs in self.document.transformer.applied]) if self.settings.dump_pseudo_xml: print >>sys.stderr, '\n::: Pseudo-XML:' print >>sys.stderr, self.document.pformat().encode( 'raw_unicode_escape') def report_Exception(self, error): if isinstance(error, utils.SystemMessage): self.report_SystemMessage(error) elif isinstance(error, UnicodeError): self.report_UnicodeError(error) else: print >>sys.stderr, '%s: %s' % (error.__class__.__name__, error) print >>sys.stderr, ("""\ Exiting due to error. Use "--traceback" to diagnose. Please report errors to <[email protected]>. Include "--traceback" output, Docutils version (%s [%s]), Python version (%s), your OS type & version, and the command line used.""" % (__version__, __version_details__, sys.version.split()[0])) def report_SystemMessage(self, error): print >>sys.stderr, ('Exiting due to level-%s (%s) system message.' % (error.level, utils.Reporter.levels[error.level])) def report_UnicodeError(self, error): sys.stderr.write( '%s: %s\n' '\n' 'The specified output encoding (%s) cannot\n' 'handle all of the output.\n' 'Try setting "--output-encoding-error-handler" to\n' '\n' '* "xmlcharrefreplace" (for HTML & XML output);\n' % (error.__class__.__name__, error, self.settings.output_encoding)) try: data = error.object[error.start:error.end] sys.stderr.write( ' the output will contain "%s" and should be usable.\n' '* "backslashreplace" (for other output formats, Python 2.3+);\n' ' look for "%s" in the output.\n' % (data.encode('ascii', 'xmlcharrefreplace'), data.encode('ascii', 'backslashreplace'))) except AttributeError: sys.stderr.write(' the output should be usable as-is.\n') sys.stderr.write( '* "replace"; look for "?" in the output.\n' '\n' '"--output-encoding-error-handler" is currently set to "%s".\n' '\n' 'Exiting due to error. Use "--traceback" to diagnose.\n' 'If the advice above doesn\'t eliminate the error,\n' 'please report it to <[email protected]>.\n' 'Include "--traceback" output, Docutils version (%s),\n' 'Python version (%s), your OS type & version, and the\n' 'command line used.\n' % (self.settings.output_encoding_error_handler, __version__, sys.version.split()[0])) default_usage = '%prog [options] [<source> [<destination>]]' default_description = ('Reads from <source> (default is stdin) and writes to ' '<destination> (default is stdout). See ' '<http://docutils.sf.net/docs/user/config.html> for ' 'the full reference.') def publish_cmdline(reader=None, reader_name='standalone', parser=None, parser_name='restructuredtext', writer=None, writer_name='pseudoxml', settings=None, settings_spec=None, settings_overrides=None, config_section=None, enable_exit_status=1, argv=None, usage=default_usage, description=default_description): """ Set up & run a `Publisher` for command-line-based file I/O (input and output file paths taken automatically from the command line). Return the encoded string output also. Parameters: see `publish_programmatically` for the remainder. - `argv`: Command-line argument list to use instead of ``sys.argv[1:]``. - `usage`: Usage string, output if there's a problem parsing the command line. - `description`: Program description, output for the "--help" option (along with command-line option descriptions). """ pub = Publisher(reader, parser, writer, settings=settings) pub.set_components(reader_name, parser_name, writer_name) output = pub.publish( argv, usage, description, settings_spec, settings_overrides, config_section=config_section, enable_exit_status=enable_exit_status) return output def publish_file(source=None, source_path=None, destination=None, destination_path=None, reader=None, reader_name='standalone', parser=None, parser_name='restructuredtext', writer=None, writer_name='pseudoxml', settings=None, settings_spec=None, settings_overrides=None, config_section=None, enable_exit_status=None): """ Set up & run a `Publisher` for programmatic use with file-like I/O. Return the encoded string output also. Parameters: see `publish_programmatically`. """ output, pub = publish_programmatically( source_class=io.FileInput, source=source, source_path=source_path, destination_class=io.FileOutput, destination=destination, destination_path=destination_path, reader=reader, reader_name=reader_name, parser=parser, parser_name=parser_name, writer=writer, writer_name=writer_name, settings=settings, settings_spec=settings_spec, settings_overrides=settings_overrides, config_section=config_section, enable_exit_status=enable_exit_status) return output def publish_string(source, source_path=None, destination_path=None, reader=None, reader_name='standalone', parser=None, parser_name='restructuredtext', writer=None, writer_name='pseudoxml', settings=None, settings_spec=None, settings_overrides=None, config_section=None, enable_exit_status=None): """ Set up & run a `Publisher` for programmatic use with string I/O. Return the encoded string or Unicode string output. For encoded string output, be sure to set the 'output_encoding' setting to the desired encoding. Set it to 'unicode' for unencoded Unicode string output. Here's one way:: publish_string(..., settings_overrides={'output_encoding': 'unicode'}) Similarly for Unicode string input (`source`):: publish_string(..., settings_overrides={'input_encoding': 'unicode'}) Parameters: see `publish_programmatically`. """ output, pub = publish_programmatically( source_class=io.StringInput, source=source, source_path=source_path, destination_class=io.StringOutput, destination=None, destination_path=destination_path, reader=reader, reader_name=reader_name, parser=parser, parser_name=parser_name, writer=writer, writer_name=writer_name, settings=settings, settings_spec=settings_spec, settings_overrides=settings_overrides, config_section=config_section, enable_exit_status=enable_exit_status) return output def publish_parts(source, source_path=None, source_class=io.StringInput, destination_path=None, reader=None, reader_name='standalone', parser=None, parser_name='restructuredtext', writer=None, writer_name='pseudoxml', settings=None, settings_spec=None, settings_overrides=None, config_section=None, enable_exit_status=None): """ Set up & run a `Publisher`, and return a dictionary of document parts. Dictionary keys are the names of parts, and values are Unicode strings; encoding is up to the client. For programmatic use with string I/O. For encoded string input, be sure to set the 'input_encoding' setting to the desired encoding. Set it to 'unicode' for unencoded Unicode string input. Here's how:: publish_parts(..., settings_overrides={'input_encoding': 'unicode'}) Parameters: see `publish_programmatically`. """ output, pub = publish_programmatically( source=source, source_path=source_path, source_class=source_class, destination_class=io.StringOutput, destination=None, destination_path=destination_path, reader=reader, reader_name=reader_name, parser=parser, parser_name=parser_name, writer=writer, writer_name=writer_name, settings=settings, settings_spec=settings_spec, settings_overrides=settings_overrides, config_section=config_section, enable_exit_status=enable_exit_status) return pub.writer.parts def publish_doctree(source, source_path=None, source_class=io.StringInput, reader=None, reader_name='standalone', parser=None, parser_name='restructuredtext', settings=None, settings_spec=None, settings_overrides=None, config_section=None, enable_exit_status=None): """ Set up & run a `Publisher` for programmatic use with string I/O. Return the document tree. For encoded string input, be sure to set the 'input_encoding' setting to the desired encoding. Set it to 'unicode' for unencoded Unicode string input. Here's one way:: publish_doctree(..., settings_overrides={'input_encoding': 'unicode'}) Parameters: see `publish_programmatically`. """ pub = Publisher(reader=reader, parser=parser, writer=None, settings=settings, source_class=source_class, destination_class=io.NullOutput) pub.set_components(reader_name, parser_name, 'null') pub.process_programmatic_settings( settings_spec, settings_overrides, config_section) pub.set_source(source, source_path) pub.set_destination(None, None) output = pub.publish(enable_exit_status=enable_exit_status) return pub.document def publish_from_doctree(document, destination_path=None, writer=None, writer_name='pseudoxml', settings=None, settings_spec=None, settings_overrides=None, config_section=None, enable_exit_status=None): """ Set up & run a `Publisher` to render from an existing document tree data structure, for programmatic use with string I/O. Return a pair of encoded string output and document parts. Note that document.settings is overridden; if you want to use the settings of the original `document`, pass settings=document.settings. Also, new document.transformer and document.reporter objects are generated. For encoded string output, be sure to set the 'output_encoding' setting to the desired encoding. Set it to 'unicode' for unencoded Unicode string output. Here's one way:: publish_from_doctree( ..., settings_overrides={'output_encoding': 'unicode'}) Parameters: `document` is a `docutils.nodes.document` object, an existing document tree. Other parameters: see `publish_programmatically`. """ reader = docutils.readers.doctree.Reader(parser_name='null') pub = Publisher(reader, None, writer, source=io.DocTreeInput(document), destination_class=io.StringOutput, settings=settings) if not writer and writer_name: pub.set_writer(writer_name) pub.process_programmatic_settings( settings_spec, settings_overrides, config_section) pub.set_destination(None, destination_path) return pub.publish(enable_exit_status=enable_exit_status) def publish_programmatically(source_class, source, source_path, destination_class, destination, destination_path, reader, reader_name, parser, parser_name, writer, writer_name, settings, settings_spec, settings_overrides, config_section, enable_exit_status): """ Set up & run a `Publisher` for custom programmatic use. Return the encoded string output and the Publisher object. Applications should not need to call this function directly. If it does seem to be necessary to call this function directly, please write to the Docutils-develop mailing list <http://docutils.sf.net/docs/user/mailing-lists.html#docutils-develop>. Parameters: * `source_class` **required**: The class for dynamically created source objects. Typically `io.FileInput` or `io.StringInput`. * `source`: Type depends on `source_class`: - If `source_class` is `io.FileInput`: Either a file-like object (must have 'read' and 'close' methods), or ``None`` (`source_path` is opened). If neither `source` nor `source_path` are supplied, `sys.stdin` is used. - If `source_class` is `io.StringInput` **required**: The input string, either an encoded 8-bit string (set the 'input_encoding' setting to the correct encoding) or a Unicode string (set the 'input_encoding' setting to 'unicode'). * `source_path`: Type depends on `source_class`: - `io.FileInput`: Path to the input file, opened if no `source` supplied. - `io.StringInput`: Optional. Path to the file or object that produced `source`. Only used for diagnostic output. * `destination_class` **required**: The class for dynamically created destination objects. Typically `io.FileOutput` or `io.StringOutput`. * `destination`: Type depends on `destination_class`: - `io.FileOutput`: Either a file-like object (must have 'write' and 'close' methods), or ``None`` (`destination_path` is opened). If neither `destination` nor `destination_path` are supplied, `sys.stdout` is used. - `io.StringOutput`: Not used; pass ``None``. * `destination_path`: Type depends on `destination_class`: - `io.FileOutput`: Path to the output file. Opened if no `destination` supplied. - `io.StringOutput`: Path to the file or object which will receive the output; optional. Used for determining relative paths (stylesheets, source links, etc.). * `reader`: A `docutils.readers.Reader` object. * `reader_name`: Name or alias of the Reader class to be instantiated if no `reader` supplied. * `parser`: A `docutils.parsers.Parser` object. * `parser_name`: Name or alias of the Parser class to be instantiated if no `parser` supplied. * `writer`: A `docutils.writers.Writer` object. * `writer_name`: Name or alias of the Writer class to be instantiated if no `writer` supplied. * `settings`: A runtime settings (`docutils.frontend.Values`) object, for dotted-attribute access to runtime settings. It's the end result of the `SettingsSpec`, config file, and option processing. If `settings` is passed, it's assumed to be complete and no further setting/config/option processing is done. * `settings_spec`: A `docutils.SettingsSpec` subclass or object. Provides extra application-specific settings definitions independently of components. In other words, the application becomes a component, and its settings data is processed along with that of the other components. Used only if no `settings` specified. * `settings_overrides`: A dictionary containing application-specific settings defaults that override the defaults of other components. Used only if no `settings` specified. * `config_section`: A string, the name of the configuration file section for this application. Overrides the ``config_section`` attribute defined by `settings_spec`. Used only if no `settings` specified. * `enable_exit_status`: Boolean; enable exit status at end of processing? """ pub = Publisher(reader, parser, writer, settings=settings, source_class=source_class, destination_class=destination_class) pub.set_components(reader_name, parser_name, writer_name) pub.process_programmatic_settings( settings_spec, settings_overrides, config_section) pub.set_source(source, source_path) pub.set_destination(destination, destination_path) output = pub.publish(enable_exit_status=enable_exit_status) return output, pub
apache-2.0
1,154,422,403,144,568,600
42.730081
81
0.608019
false
timbennett/twitter-tools
get_followers.py
1
2031
''' Get the user IDs of everyone who follows a certain account, written into username.txt. Usage: python get_followers.py username Twitter users have a screen name, which is changeable, and a user ID, which is permanent. This script returns the latter, because Twitter returns 5000 followers per query by this method, but many fewer if you want usernames. With an API limit of 15 requests per 15 minutes, this works out to 5000 followers a minute over the long term. ''' import sys import tweepy import csv import twitter_auth # make sure twitter_auth.py exists with contents: # # access_key = "" # access_secret = "" # consumer_key = "" # consumer_secret = "" # # pick user to investigate try: username = sys.argv[1] except IndexError: print "Error: No username provided." raise except: e = sys.exc_info()[0] print "Error: General error {}".format(e) raise print "Downloading followers for {}".format(username) # set up authentication & api auth = tweepy.OAuthHandler(twitter_auth.consumer_key, twitter_auth.consumer_secret) auth.set_access_token(twitter_auth.access_key, twitter_auth.access_secret) api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True) # zero out current followers file ids_file = open('{}.txt'.format(username),'w') # let's grab followers: ids = [] for page in tweepy.Cursor(api.followers_ids, screen_name=username).pages(): ids.extend(page) print "Downloaded {} followers".format(len(page)) # write follower ids to file; check x = 0 ids_file = open('{}.txt'.format(username),'a') for user in page: ids_file.write("%s\n" % user) x += 1 ids_file.close() print "Wrote {} followers to {}.txt".format(x, username) if len(page) != x: # Check previous issue, now thought fixed, where not all IDs would be written to file: print "Warning: some followers may have been missed.\nIDs expected: {}. IDs written:{}".format(len(ids),x) print "Finished downloading."
mit
3,929,431,864,385,541,600
29.313433
114
0.6903
false
ragupta-git/ImcSdk
imcsdk/mometa/mgmt/MgmtController.py
1
5087
"""This module contains the general information for MgmtController ManagedObject.""" from ...imcmo import ManagedObject from ...imccoremeta import MoPropertyMeta, MoMeta from ...imcmeta import VersionMeta class MgmtControllerConsts: SUBJECT_SAS_EXPANDER = "SAS Expander" SUBJECT_ADAPTOR = "adaptor" SUBJECT_BLADE = "blade" SUBJECT_BOARD_CONTROLLER = "board-controller" SUBJECT_SYSTEM = "system" SUBJECT_UNKNOWN = "unknown" SUBJECT_SIOC = "sioc" class MgmtController(ManagedObject): """This is MgmtController class.""" consts = MgmtControllerConsts() naming_props = set([]) mo_meta = { "classic": MoMeta("MgmtController", "mgmtController", "mgmt", VersionMeta.Version151f, "OutputOnly", 0xf, [], ["admin", "read-only", "user"], [u'adaptorUnit', u'computeRackUnit', u'storageSasExpander'], [u'firmwareBootDefinition', u'firmwareRunning', u'firmwareUpdatable', u'mgmtIf', u'sysdebugMEpLog'], ["Get"]), "modular": MoMeta("MgmtController", "mgmtController", "mgmt", VersionMeta.Version2013e, "OutputOnly", 0xf, [], ["admin", "read-only", "user"], [u'adaptorUnit', u'computeServerNode', u'equipmentSharedIOModule', u'equipmentSystemIOController', u'storageSasExpander'], [u'firmwareBootDefinition', u'firmwareRunning', u'firmwareUpdatable', u'mgmtIf', u'sysdebugMEpLog'], ["Get"]) } prop_meta = { "classic": { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version151f, MoPropertyMeta.INTERNAL, None, None, None, None, [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x2, 0, 255, None, [], []), "model": MoPropertyMeta("model", "model", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x4, 0, 255, None, [], []), "serial": MoPropertyMeta("serial", "serial", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x8, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), "subject": MoPropertyMeta("subject", "subject", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, None, None, None, ["SAS Expander", "adaptor", "blade", "board-controller", "system", "unknown"], []), "vendor": MoPropertyMeta("vendor", "vendor", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), }, "modular": { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version2013e, MoPropertyMeta.INTERNAL, None, None, None, None, [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x2, 0, 255, None, [], []), "model": MoPropertyMeta("model", "model", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x4, 0, 255, None, [], []), "serial": MoPropertyMeta("serial", "serial", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x8, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), "subject": MoPropertyMeta("subject", "subject", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, ["SAS Expander", "adaptor", "blade", "board-controller", "sioc", "system", "unknown"], []), "vendor": MoPropertyMeta("vendor", "vendor", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), }, } prop_map = { "classic": { "childAction": "child_action", "dn": "dn", "model": "model", "rn": "rn", "serial": "serial", "status": "status", "subject": "subject", "vendor": "vendor", }, "modular": { "childAction": "child_action", "dn": "dn", "model": "model", "rn": "rn", "serial": "serial", "status": "status", "subject": "subject", "vendor": "vendor", }, } def __init__(self, parent_mo_or_dn, **kwargs): self._dirty_mask = 0 self.child_action = None self.model = None self.serial = None self.status = None self.subject = None self.vendor = None ManagedObject.__init__(self, "MgmtController", parent_mo_or_dn, **kwargs)
apache-2.0
982,515,583,627,515,400
54.293478
383
0.610183
false
CiscoSystems/nova
nova/tests/integrated/v3/test_flavor_manage.py
20
1578
# Copyright 2012 Nebula, Inc. # Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.tests.integrated.v3 import api_sample_base class FlavorManageSampleJsonTests(api_sample_base.ApiSampleTestBaseV3): extension_name = 'flavor-manage' def _create_flavor(self): """Create a flavor.""" subs = { 'flavor_id': 10, 'flavor_name': "test_flavor" } response = self._do_post("flavors", "flavor-create-post-req", subs) subs.update(self._get_regexes()) self._verify_response("flavor-create-post-resp", subs, response, 201) def test_create_flavor(self): # Get api sample to create a flavor. self._create_flavor() def test_delete_flavor(self): # Get api sample to delete a flavor. self._create_flavor() response = self._do_delete("flavors/10") self.assertEqual(response.status, 204) self.assertEqual(response.read(), '')
apache-2.0
258,590,737,368,351,400
35.697674
78
0.634981
false
AdrianRibao/notifintime
docs/source/conf.py
2
8083
# -*- coding: utf-8 -*- # # django-users documentation build configuration file, created by # sphinx-quickstart on Thu Jul 19 12:19:01 2012. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'django-users' copyright = u'2012, Adrián Ribao' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.1' # The full version, including alpha/beta/rc tags. release = '0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'django-usersdoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'django-users.tex', u'django-users Documentation', u'Adrián Ribao', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'django-users', u'django-users Documentation', [u'Adrián Ribao'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'django-users', u'django-users Documentation', u'Adrián Ribao', 'django-users', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'http://docs.python.org/': None}
bsd-3-clause
-8,799,851,572,573,176,000
31.841463
193
0.706028
false
openstack/vitrage
vitrage/evaluator/template_functions/v2/__init__.py
1
1691
# Copyright 2019 - Nokia # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log from vitrage.evaluator.template_functions import function_resolver from vitrage.evaluator.template_functions import GET_PARAM from vitrage.evaluator.template_validation.base import get_custom_fault_result from vitrage.evaluator.template_validation.base import ValidationError from vitrage.evaluator.template_validation.content.base import \ get_content_correct_result from vitrage.evaluator.template_validation.content.base import \ get_template_schema LOG = log.getLogger(__name__) def resolve_parameters(template_def, params=None): result, template_schema = get_template_schema(template_def) if not result.is_valid_config: return result get_param = template_schema.functions.get(GET_PARAM) try: function_resolver.resolve_function( func_info=function_resolver.FuncInfo( name=GET_PARAM, func=get_param, error_code=0), template=template_def, actual_params=params) except ValidationError as e: return get_custom_fault_result(e.code, e.details) return get_content_correct_result()
apache-2.0
-1,409,433,809,184,499,700
36.577778
78
0.745121
false