repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
dwadler/QGIS
|
python/plugins/processing/algs/gdal/ClipRasterByExtent.py
|
5
|
6353
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
ClipRasterByExtent.py
---------------------
Date : September 2013
Copyright : (C) 2013 by Alexander Bruy
Email : alexander bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'September 2013'
__copyright__ = '(C) 2013, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsRasterFileWriter,
QgsProcessingException,
QgsProcessingParameterDefinition,
QgsProcessingParameterRasterLayer,
QgsProcessingParameterEnum,
QgsProcessingParameterExtent,
QgsProcessingParameterString,
QgsProcessingParameterNumber,
QgsProcessingParameterRasterDestination)
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class ClipRasterByExtent(GdalAlgorithm):
INPUT = 'INPUT'
EXTENT = 'PROJWIN'
NODATA = 'NODATA'
OPTIONS = 'OPTIONS'
DATA_TYPE = 'DATA_TYPE'
OUTPUT = 'OUTPUT'
TYPES = ['Use input layer data type', 'Byte', 'Int16', 'UInt16', 'UInt32', 'Int32', 'Float32', 'Float64', 'CInt16', 'CInt32', 'CFloat32', 'CFloat64']
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterRasterLayer(self.INPUT,
self.tr('Input layer')))
self.addParameter(QgsProcessingParameterExtent(self.EXTENT,
self.tr('Clipping extent')))
self.addParameter(QgsProcessingParameterNumber(self.NODATA,
self.tr('Assign a specified nodata value to output bands'),
type=QgsProcessingParameterNumber.Double,
defaultValue=None,
optional=True))
options_param = QgsProcessingParameterString(self.OPTIONS,
self.tr('Additional creation options'),
defaultValue='',
optional=True)
options_param.setFlags(options_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
options_param.setMetadata({
'widget_wrapper': {
'class': 'processing.algs.gdal.ui.RasterOptionsWidget.RasterOptionsWidgetWrapper'}})
self.addParameter(options_param)
dataType_param = QgsProcessingParameterEnum(self.DATA_TYPE,
self.tr('Output data type'),
self.TYPES,
allowMultiple=False,
defaultValue=0)
dataType_param.setFlags(dataType_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(dataType_param)
self.addParameter(QgsProcessingParameterRasterDestination(self.OUTPUT,
self.tr('Clipped (extent)')))
def name(self):
return 'cliprasterbyextent'
def displayName(self):
return self.tr('Clip raster by extent')
def group(self):
return self.tr('Raster extraction')
def groupId(self):
return 'rasterextraction'
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'raster-clip.png'))
def commandName(self):
return "gdal_translate"
def getConsoleCommands(self, parameters, context, feedback, executing=True):
inLayer = self.parameterAsRasterLayer(parameters, self.INPUT, context)
if inLayer is None:
raise QgsProcessingException('Invalid input layer {}'.format(parameters[self.INPUT] if self.INPUT in parameters else 'INPUT'))
bbox = self.parameterAsExtent(parameters, self.EXTENT, context, inLayer.crs())
if self.NODATA in parameters and parameters[self.NODATA] is not None:
nodata = self.parameterAsDouble(parameters, self.NODATA, context)
else:
nodata = None
options = self.parameterAsString(parameters, self.OPTIONS, context)
out = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
arguments = []
arguments.append('-projwin')
arguments.append(str(bbox.xMinimum()))
arguments.append(str(bbox.yMaximum()))
arguments.append(str(bbox.xMaximum()))
arguments.append(str(bbox.yMinimum()))
if nodata is not None:
arguments.append('-a_nodata {}'.format(nodata))
data_type = self.parameterAsEnum(parameters, self.DATA_TYPE, context)
if data_type:
arguments.append('-ot ' + self.TYPES[data_type])
arguments.append('-of')
arguments.append(QgsRasterFileWriter.driverForExtension(os.path.splitext(out)[1]))
if options:
arguments.extend(GdalUtils.parseCreationOptions(options))
arguments.append(inLayer.source())
arguments.append(out)
return [self.commandName(), GdalUtils.escapeAndJoin(arguments)]
|
gpl-2.0
|
mbiebl/rsyslog-doc
|
source/_ext/edit_on_github.py
|
16
|
1177
|
"""
Sphinx extension to add ReadTheDocs-style "Edit on GitHub" links to the
sidebar.
Loosely based on https://github.com/astropy/astropy/pull/347
"""
import os
import warnings
__licence__ = 'BSD (3 clause)'
def get_github_url(app, view, path):
return 'https://github.com/{project}/{view}/{branch}/source/{path}'.format(
project=app.config.edit_on_github_project,
view=view,
branch=app.config.edit_on_github_branch,
path=path)
def html_page_context(app, pagename, templatename, context, doctree):
if templatename != 'page.html':
return
if not app.config.edit_on_github_project:
warnings.warn("edit_on_github_project not specified")
return
path = os.path.relpath(doctree.get('source'), app.builder.srcdir)
show_url = get_github_url(app, 'blob', path)
edit_url = get_github_url(app, 'edit', path)
context['show_on_github_url'] = show_url
context['edit_on_github_url'] = edit_url
def setup(app):
app.add_config_value('edit_on_github_project', '', True)
app.add_config_value('edit_on_github_branch', 'master', True)
app.connect('html-page-context', html_page_context)
|
apache-2.0
|
BellScurry/gem5-fault-injection
|
src/cpu/simple/TimingSimpleCPU.py
|
69
|
1886
|
# Copyright (c) 2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
from m5.params import *
from BaseSimpleCPU import BaseSimpleCPU
class TimingSimpleCPU(BaseSimpleCPU):
type = 'TimingSimpleCPU'
cxx_header = "cpu/simple/timing.hh"
@classmethod
def memory_mode(cls):
return 'timing'
@classmethod
def support_take_over(cls):
return True
|
bsd-3-clause
|
devGregA/code
|
build/lib.linux-x86_64-2.7/scrapy/contrib/spiders/sitemap.py
|
30
|
2670
|
import re
from scrapy.spider import Spider
from scrapy.http import Request, XmlResponse
from scrapy.utils.sitemap import Sitemap, sitemap_urls_from_robots
from scrapy.utils.gz import gunzip, is_gzipped
from scrapy import log
class SitemapSpider(Spider):
sitemap_urls = ()
sitemap_rules = [('', 'parse')]
sitemap_follow = ['']
sitemap_alternate_links = False
def __init__(self, *a, **kw):
super(SitemapSpider, self).__init__(*a, **kw)
self._cbs = []
for r, c in self.sitemap_rules:
if isinstance(c, basestring):
c = getattr(self, c)
self._cbs.append((regex(r), c))
self._follow = [regex(x) for x in self.sitemap_follow]
def start_requests(self):
return (Request(x, callback=self._parse_sitemap) for x in self.sitemap_urls)
def _parse_sitemap(self, response):
if response.url.endswith('/robots.txt'):
for url in sitemap_urls_from_robots(response.body):
yield Request(url, callback=self._parse_sitemap)
else:
body = self._get_sitemap_body(response)
if body is None:
log.msg(format="Ignoring invalid sitemap: %(response)s",
level=log.WARNING, spider=self, response=response)
return
s = Sitemap(body)
if s.type == 'sitemapindex':
for loc in iterloc(s, self.sitemap_alternate_links):
if any(x.search(loc) for x in self._follow):
yield Request(loc, callback=self._parse_sitemap)
elif s.type == 'urlset':
for loc in iterloc(s):
for r, c in self._cbs:
if r.search(loc):
yield Request(loc, callback=c)
break
def _get_sitemap_body(self, response):
"""Return the sitemap body contained in the given response, or None if the
response is not a sitemap.
"""
if isinstance(response, XmlResponse):
return response.body
elif is_gzipped(response):
return gunzip(response.body)
elif response.url.endswith('.xml'):
return response.body
elif response.url.endswith('.xml.gz'):
return gunzip(response.body)
def regex(x):
if isinstance(x, basestring):
return re.compile(x)
return x
def iterloc(it, alt=False):
for d in it:
yield d['loc']
# Also consider alternate URLs (xhtml:link rel="alternate")
if alt and 'alternate' in d:
for l in d['alternate']:
yield l
|
bsd-3-clause
|
devs1991/test_edx_docmode
|
venv/lib/python2.7/site-packages/boto/ec2/elb/attributes.py
|
153
|
5103
|
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# Created by Chris Huegle for TellApart, Inc.
class ConnectionSettingAttribute(object):
"""
Represents the ConnectionSetting segment of ELB Attributes.
"""
def __init__(self, connection=None):
self.idle_timeout = None
def __repr__(self):
return 'ConnectionSettingAttribute(%s)' % (
self.idle_timeout)
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'IdleTimeout':
self.idle_timeout = int(value)
class CrossZoneLoadBalancingAttribute(object):
"""
Represents the CrossZoneLoadBalancing segement of ELB Attributes.
"""
def __init__(self, connection=None):
self.enabled = None
def __repr__(self):
return 'CrossZoneLoadBalancingAttribute(%s)' % (
self.enabled)
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'Enabled':
if value.lower() == 'true':
self.enabled = True
else:
self.enabled = False
class AccessLogAttribute(object):
"""
Represents the AccessLog segment of ELB attributes.
"""
def __init__(self, connection=None):
self.enabled = None
self.s3_bucket_name = None
self.s3_bucket_prefix = None
self.emit_interval = None
def __repr__(self):
return 'AccessLog(%s, %s, %s, %s)' % (
self.enabled,
self.s3_bucket_name,
self.s3_bucket_prefix,
self.emit_interval
)
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'Enabled':
if value.lower() == 'true':
self.enabled = True
else:
self.enabled = False
elif name == 'S3BucketName':
self.s3_bucket_name = value
elif name == 'S3BucketPrefix':
self.s3_bucket_prefix = value
elif name == 'EmitInterval':
self.emit_interval = int(value)
class ConnectionDrainingAttribute(object):
"""
Represents the ConnectionDraining segment of ELB attributes.
"""
def __init__(self, connection=None):
self.enabled = None
self.timeout = None
def __repr__(self):
return 'ConnectionDraining(%s, %s)' % (
self.enabled,
self.timeout
)
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'Enabled':
if value.lower() == 'true':
self.enabled = True
else:
self.enabled = False
elif name == 'Timeout':
self.timeout = int(value)
class LbAttributes(object):
"""
Represents the Attributes of an Elastic Load Balancer.
"""
def __init__(self, connection=None):
self.connection = connection
self.cross_zone_load_balancing = CrossZoneLoadBalancingAttribute(
self.connection)
self.access_log = AccessLogAttribute(self.connection)
self.connection_draining = ConnectionDrainingAttribute(self.connection)
self.connecting_settings = ConnectionSettingAttribute(self.connection)
def __repr__(self):
return 'LbAttributes(%s, %s, %s, %s)' % (
repr(self.cross_zone_load_balancing),
repr(self.access_log),
repr(self.connection_draining),
repr(self.connecting_settings))
def startElement(self, name, attrs, connection):
if name == 'CrossZoneLoadBalancing':
return self.cross_zone_load_balancing
if name == 'AccessLog':
return self.access_log
if name == 'ConnectionDraining':
return self.connection_draining
if name == 'ConnectionSettings':
return self.connecting_settings
def endElement(self, name, value, connection):
pass
|
agpl-3.0
|
kpkhxlgy0/SublimeText3
|
Packages/SublimeCodeIntel/libs/SilverCity/ScintillaConstants.py
|
10
|
39525
|
# The file was automatically generated by write_scintilla.py
# from Scintilla.iface
#
# Do not manually edit!
# LexerModule ids (used in find_lexer_module_by_id)
SCLEX_CONTAINER = 0
SCLEX_NULL = 1
SCLEX_PYTHON = 2
SCLEX_CPP = 3
SCLEX_HTML = 4
SCLEX_XML = 5
SCLEX_PERL = 6
SCLEX_SQL = 7
SCLEX_VB = 8
SCLEX_PROPERTIES = 9
SCLEX_ERRORLIST = 10
SCLEX_MAKEFILE = 11
SCLEX_BATCH = 12
SCLEX_XCODE = 13
SCLEX_LATEX = 14
SCLEX_LUA = 15
SCLEX_DIFF = 16
SCLEX_CONF = 17
SCLEX_PASCAL = 18
SCLEX_AVE = 19
SCLEX_ADA = 20
SCLEX_LISP = 21
SCLEX_RUBY = 22
SCLEX_EIFFEL = 23
SCLEX_EIFFELKW = 24
SCLEX_TCL = 25
SCLEX_NNCRONTAB = 26
SCLEX_BULLANT = 27
SCLEX_VBSCRIPT = 28
SCLEX_BAAN = 31
SCLEX_MATLAB = 32
SCLEX_SCRIPTOL = 33
SCLEX_ASM = 34
SCLEX_CPPNOCASE = 35
SCLEX_FORTRAN = 36
SCLEX_F77 = 37
SCLEX_CSS = 38
SCLEX_POV = 39
SCLEX_LOUT = 40
SCLEX_ESCRIPT = 41
SCLEX_PS = 42
SCLEX_NSIS = 43
SCLEX_MMIXAL = 44
SCLEX_CLW = 45
SCLEX_CLWNOCASE = 46
SCLEX_LOT = 47
SCLEX_YAML = 48
SCLEX_TEX = 49
SCLEX_METAPOST = 50
SCLEX_POWERBASIC = 51
SCLEX_FORTH = 52
SCLEX_ERLANG = 53
SCLEX_OCTAVE = 54
SCLEX_MSSQL = 55
SCLEX_VERILOG = 56
SCLEX_KIX = 57
SCLEX_GUI4CLI = 58
SCLEX_SPECMAN = 59
SCLEX_AU3 = 60
SCLEX_APDL = 61
SCLEX_BASH = 62
SCLEX_ASN1 = 63
SCLEX_VHDL = 64
SCLEX_CAML = 65
SCLEX_BLITZBASIC = 66
SCLEX_PUREBASIC = 67
SCLEX_HASKELL = 68
SCLEX_PHPSCRIPT = 69
SCLEX_TADS3 = 70
SCLEX_REBOL = 71
SCLEX_SMALLTALK = 72
SCLEX_FLAGSHIP = 73
SCLEX_CSOUND = 74
SCLEX_FREEBASIC = 75
SCLEX_INNOSETUP = 76
SCLEX_OPAL = 77
SCLEX_SPICE = 78
SCLEX_D = 79
SCLEX_CMAKE = 80
SCLEX_GAP = 81
SCLEX_PLM = 82
SCLEX_PROGRESS = 83
SCLEX_ABAQUS = 84
SCLEX_ASYMPTOTE = 85
SCLEX_R = 86
SCLEX_MAGIK = 87
SCLEX_POWERSHELL = 88
SCLEX_MYSQL = 89
SCLEX_PO = 90
SCLEX_TAL = 91
SCLEX_COBOL = 92
SCLEX_TACL = 93
SCLEX_SORCUS = 94
SCLEX_POWERPRO = 95
SCLEX_NIMROD = 96
SCLEX_SML = 97
SCLEX_MARKDOWN = 98
SCLEX_TXT2TAGS = 99
SCLEX_A68K = 100
SCLEX_MODULA = 101
SCLEX_COFFEESCRIPT = 102
SCLEX_TCMD = 103
SCLEX_AVS = 104
SCLEX_ECL = 105
SCLEX_OSCRIPT = 106
SCLEX_VISUALPROLOG = 107
SCLEX_LITERATEHASKELL = 108
SCLEX_STTXT = 109
SCLEX_XSLT = 110
SCLEX_UDL = 111
SCLEX_AUTOMATIC = 1000
# Lexical states (style constants returned by tokenize_by_style)
SCE_P_DEFAULT = 0
SCE_P_COMMENTLINE = 1
SCE_P_NUMBER = 2
SCE_P_STRING = 3
SCE_P_CHARACTER = 4
SCE_P_WORD = 5
SCE_P_TRIPLE = 6
SCE_P_TRIPLEDOUBLE = 7
SCE_P_CLASSNAME = 8
SCE_P_DEFNAME = 9
SCE_P_OPERATOR = 10
SCE_P_IDENTIFIER = 11
SCE_P_COMMENTBLOCK = 12
SCE_P_STRINGEOL = 13
SCE_P_WORD2 = 14
SCE_P_DECORATOR = 15
SCE_P_STDIN = 16
SCE_P_STDOUT = 17
SCE_P_STDERR = 18
SCE_P_UPPER_BOUND = 19
SCE_C_DEFAULT = 0
SCE_C_COMMENT = 1
SCE_C_COMMENTLINE = 2
SCE_C_COMMENTDOC = 3
SCE_C_NUMBER = 4
SCE_C_WORD = 5
SCE_C_STRING = 6
SCE_C_CHARACTER = 7
SCE_C_UUID = 8
SCE_C_PREPROCESSOR = 9
SCE_C_OPERATOR = 10
SCE_C_IDENTIFIER = 11
SCE_C_STRINGEOL = 12
SCE_C_VERBATIM = 13
SCE_C_REGEX = 14
SCE_C_COMMENTLINEDOC = 15
SCE_C_WORD2 = 16
SCE_C_COMMENTDOCKEYWORD = 17
SCE_C_COMMENTDOCKEYWORDERROR = 18
SCE_C_GLOBALCLASS = 19
SCE_C_STRINGRAW = 20
SCE_C_TRIPLEVERBATIM = 21
SCE_C_HASHQUOTEDSTRING = 22
SCE_C_PREPROCESSORCOMMENT = 23
SCE_C_PREPROCESSORCOMMENTDOC = 24
SCE_C_STDIN = 25
SCE_C_STDOUT = 26
SCE_C_STDERR = 27
SCE_D_DEFAULT = 0
SCE_D_COMMENT = 1
SCE_D_COMMENTLINE = 2
SCE_D_COMMENTDOC = 3
SCE_D_COMMENTNESTED = 4
SCE_D_NUMBER = 5
SCE_D_WORD = 6
SCE_D_WORD2 = 7
SCE_D_WORD3 = 8
SCE_D_TYPEDEF = 9
SCE_D_STRING = 10
SCE_D_STRINGEOL = 11
SCE_D_CHARACTER = 12
SCE_D_OPERATOR = 13
SCE_D_IDENTIFIER = 14
SCE_D_COMMENTLINEDOC = 15
SCE_D_COMMENTDOCKEYWORD = 16
SCE_D_COMMENTDOCKEYWORDERROR = 17
SCE_D_STRINGB = 18
SCE_D_STRINGR = 19
SCE_D_WORD5 = 20
SCE_D_WORD6 = 21
SCE_D_WORD7 = 22
SCE_TCL_DEFAULT = 0
SCE_TCL_COMMENT = 1
SCE_TCL_VARIABLE = 2
SCE_TCL_ARRAY = 3
SCE_TCL_NUMBER = 4
SCE_TCL_WORD = 5
SCE_TCL_STRING = 6
SCE_TCL_CHARACTER = 7
SCE_TCL_LITERAL = 8
SCE_TCL_IDENTIFIER = 9
SCE_TCL_OPERATOR = 10
SCE_TCL_EOL = 11
SCE_TCL_STDIN = 12
SCE_TCL_STDOUT = 13
SCE_TCL_STDERR = 14
SCE_TCL_UPPER_BOUND = 15
SCE_H_DEFAULT = 0
SCE_H_TAG = 1
SCE_H_TAGUNKNOWN = 2
SCE_H_ATTRIBUTE = 3
SCE_H_ATTRIBUTEUNKNOWN = 4
SCE_H_NUMBER = 5
SCE_H_DOUBLESTRING = 6
SCE_H_SINGLESTRING = 7
SCE_H_OTHER = 8
SCE_H_COMMENT = 9
SCE_H_ENTITY = 10
SCE_H_TAGEND = 11
SCE_H_XMLSTART = 12
SCE_H_XMLEND = 13
SCE_H_SCRIPT = 14
SCE_H_ASP = 15
SCE_H_ASPAT = 16
SCE_H_CDATA = 17
SCE_H_QUESTION = 18
SCE_H_VALUE = 19
SCE_H_XCCOMMENT = 20
SCE_H_SGML_DEFAULT = 21
SCE_H_SGML_COMMAND = 22
SCE_H_SGML_1ST_PARAM = 23
SCE_H_SGML_DOUBLESTRING = 24
SCE_H_SGML_SIMPLESTRING = 25
SCE_H_SGML_ERROR = 26
SCE_H_SGML_SPECIAL = 27
SCE_H_SGML_ENTITY = 28
SCE_H_SGML_COMMENT = 29
SCE_H_SGML_1ST_PARAM_COMMENT = 30
SCE_H_SGML_BLOCK_DEFAULT = 31
SCE_HJ_START = 40
SCE_HJ_DEFAULT = 41
SCE_HJ_COMMENT = 42
SCE_HJ_COMMENTLINE = 43
SCE_HJ_COMMENTDOC = 44
SCE_HJ_NUMBER = 45
SCE_HJ_WORD = 46
SCE_HJ_KEYWORD = 47
SCE_HJ_DOUBLESTRING = 48
SCE_HJ_SINGLESTRING = 49
SCE_HJ_SYMBOLS = 50
SCE_HJ_STRINGEOL = 51
SCE_HJ_REGEX = 52
SCE_HJA_START = 55
SCE_HJA_DEFAULT = 56
SCE_HJA_COMMENT = 57
SCE_HJA_COMMENTLINE = 58
SCE_HJA_COMMENTDOC = 59
SCE_HJA_NUMBER = 60
SCE_HJA_WORD = 61
SCE_HJA_KEYWORD = 62
SCE_HJA_DOUBLESTRING = 63
SCE_HJA_SINGLESTRING = 64
SCE_HJA_SYMBOLS = 65
SCE_HJA_STRINGEOL = 66
SCE_HJA_REGEX = 67
SCE_HB_START = 70
SCE_HB_DEFAULT = 71
SCE_HB_COMMENTLINE = 72
SCE_HB_NUMBER = 73
SCE_HB_WORD = 74
SCE_HB_STRING = 75
SCE_HB_IDENTIFIER = 76
SCE_HB_STRINGEOL = 77
SCE_HBA_START = 80
SCE_HBA_DEFAULT = 81
SCE_HBA_COMMENTLINE = 82
SCE_HBA_NUMBER = 83
SCE_HBA_WORD = 84
SCE_HBA_STRING = 85
SCE_HBA_IDENTIFIER = 86
SCE_HBA_STRINGEOL = 87
SCE_HP_START = 90
SCE_HP_DEFAULT = 91
SCE_HP_COMMENTLINE = 92
SCE_HP_NUMBER = 93
SCE_HP_STRING = 94
SCE_HP_CHARACTER = 95
SCE_HP_WORD = 96
SCE_HP_TRIPLE = 97
SCE_HP_TRIPLEDOUBLE = 98
SCE_HP_CLASSNAME = 99
SCE_HP_DEFNAME = 100
SCE_HP_OPERATOR = 101
SCE_HP_IDENTIFIER = 102
SCE_HPHP_COMPLEX_VARIABLE = 104
SCE_HPA_START = 105
SCE_HPA_DEFAULT = 106
SCE_HPA_COMMENTLINE = 107
SCE_HPA_NUMBER = 108
SCE_HPA_STRING = 109
SCE_HPA_CHARACTER = 110
SCE_HPA_WORD = 111
SCE_HPA_TRIPLE = 112
SCE_HPA_TRIPLEDOUBLE = 113
SCE_HPA_CLASSNAME = 114
SCE_HPA_DEFNAME = 115
SCE_HPA_OPERATOR = 116
SCE_HPA_IDENTIFIER = 117
SCE_HPHP_DEFAULT = 118
SCE_HPHP_HSTRING = 119
SCE_HPHP_SIMPLESTRING = 120
SCE_HPHP_WORD = 121
SCE_HPHP_NUMBER = 122
SCE_HPHP_VARIABLE = 123
SCE_HPHP_COMMENT = 124
SCE_HPHP_COMMENTLINE = 125
SCE_HPHP_HSTRING_VARIABLE = 126
SCE_HPHP_OPERATOR = 127
SCE_PL_DEFAULT = 0
SCE_PL_ERROR = 1
SCE_PL_COMMENTLINE = 2
SCE_PL_POD = 3
SCE_PL_NUMBER = 4
SCE_PL_WORD = 5
SCE_PL_STRING = 6
SCE_PL_CHARACTER = 7
SCE_PL_PUNCTUATION = 8
SCE_PL_PREPROCESSOR = 9
SCE_PL_OPERATOR = 10
SCE_PL_IDENTIFIER = 11
SCE_PL_SCALAR = 12
SCE_PL_ARRAY = 13
SCE_PL_HASH = 14
SCE_PL_SYMBOLTABLE = 15
SCE_PL_VARIABLE_INDEXER = 16
SCE_PL_REGEX = 17
SCE_PL_REGSUBST = 18
SCE_PL_LONGQUOTE = 19
SCE_PL_BACKTICKS = 20
SCE_PL_DATASECTION = 21
SCE_PL_HERE_DELIM = 22
SCE_PL_HERE_Q = 23
SCE_PL_HERE_QQ = 24
SCE_PL_HERE_QX = 25
SCE_PL_STRING_Q = 26
SCE_PL_STRING_QQ = 27
SCE_PL_STRING_QX = 28
SCE_PL_STRING_QR = 29
SCE_PL_STRING_QW = 30
SCE_PL_POD_VERB = 31
SCE_PL_SUB_PROTOTYPE = 40
SCE_PL_FORMAT_IDENT = 41
SCE_PL_FORMAT = 42
SCE_PL_SUB = 43
SCE_PL_SUB_ARGS = 44
SCE_PL_UNKNOWN_FIELD = 45
SCE_PL_STDIN = 46
SCE_PL_STDOUT = 47
SCE_PL_STDERR = 48
SCE_PL_UPPER_BOUND = 49
SCE_RB_DEFAULT = 0
SCE_RB_ERROR = 1
SCE_RB_COMMENTLINE = 2
SCE_RB_POD = 3
SCE_RB_NUMBER = 4
SCE_RB_WORD = 5
SCE_RB_STRING = 6
SCE_RB_CHARACTER = 7
SCE_RB_CLASSNAME = 8
SCE_RB_DEFNAME = 9
SCE_RB_OPERATOR = 10
SCE_RB_IDENTIFIER = 11
SCE_RB_REGEX = 12
SCE_RB_GLOBAL = 13
SCE_RB_SYMBOL = 14
SCE_RB_MODULE_NAME = 15
SCE_RB_INSTANCE_VAR = 16
SCE_RB_CLASS_VAR = 17
SCE_RB_BACKTICKS = 18
SCE_RB_DATASECTION = 19
SCE_RB_HERE_DELIM = 20
SCE_RB_HERE_Q = 21
SCE_RB_HERE_QQ = 22
SCE_RB_HERE_QX = 23
SCE_RB_STRING_Q = 24
SCE_RB_STRING_QQ = 25
SCE_RB_STRING_QX = 26
SCE_RB_STRING_QR = 27
SCE_RB_STRING_QW = 28
SCE_RB_STRING_QI = 29
SCE_RB_WORD_DEMOTED = 30
SCE_RB_STDIN = 31
SCE_RB_STDOUT = 40
SCE_RB_STDERR = 41
SCE_RB_UPPER_BOUND = 42
SCE_B_DEFAULT = 0
SCE_B_COMMENT = 1
SCE_B_NUMBER = 2
SCE_B_KEYWORD = 3
SCE_B_STRING = 4
SCE_B_PREPROCESSOR = 5
SCE_B_OPERATOR = 6
SCE_B_IDENTIFIER = 7
SCE_B_DATE = 8
SCE_B_STRINGEOL = 9
SCE_B_KEYWORD2 = 10
SCE_B_KEYWORD3 = 11
SCE_B_KEYWORD4 = 12
SCE_B_CONSTANT = 13
SCE_B_ASM = 14
SCE_B_LABEL = 15
SCE_B_ERROR = 16
SCE_B_HEXNUMBER = 17
SCE_B_BINNUMBER = 18
SCE_PROPS_DEFAULT = 0
SCE_PROPS_COMMENT = 1
SCE_PROPS_SECTION = 2
SCE_PROPS_ASSIGNMENT = 3
SCE_PROPS_DEFVAL = 4
SCE_PROPS_KEY = 5
SCE_L_DEFAULT = 0
SCE_L_COMMAND = 1
SCE_L_TAG = 2
SCE_L_MATH = 3
SCE_L_COMMENT = 4
SCE_L_TAG2 = 5
SCE_L_MATH2 = 6
SCE_L_COMMENT2 = 7
SCE_L_VERBATIM = 8
SCE_L_SHORTCMD = 9
SCE_L_SPECIAL = 10
SCE_L_CMDOPT = 11
SCE_L_ERROR = 12
SCE_LUA_DEFAULT = 0
SCE_LUA_COMMENT = 1
SCE_LUA_COMMENTLINE = 2
SCE_LUA_COMMENTDOC = 3
SCE_LUA_NUMBER = 4
SCE_LUA_WORD = 5
SCE_LUA_STRING = 6
SCE_LUA_CHARACTER = 7
SCE_LUA_LITERALSTRING = 8
SCE_LUA_PREPROCESSOR = 9
SCE_LUA_OPERATOR = 10
SCE_LUA_IDENTIFIER = 11
SCE_LUA_STRINGEOL = 12
SCE_LUA_WORD2 = 13
SCE_LUA_WORD3 = 14
SCE_LUA_WORD4 = 15
SCE_LUA_WORD5 = 16
SCE_LUA_WORD6 = 17
SCE_LUA_WORD7 = 18
SCE_LUA_WORD8 = 19
SCE_LUA_LABEL = 20
SCE_ERR_DEFAULT = 0
SCE_ERR_PYTHON = 1
SCE_ERR_GCC = 2
SCE_ERR_MS = 3
SCE_ERR_CMD = 4
SCE_ERR_BORLAND = 5
SCE_ERR_PERL = 6
SCE_ERR_NET = 7
SCE_ERR_LUA = 8
SCE_ERR_CTAG = 9
SCE_ERR_DIFF_CHANGED = 10
SCE_ERR_DIFF_ADDITION = 11
SCE_ERR_DIFF_DELETION = 12
SCE_ERR_DIFF_MESSAGE = 13
SCE_ERR_PHP = 14
SCE_ERR_ELF = 15
SCE_ERR_IFC = 16
SCE_ERR_IFORT = 17
SCE_ERR_ABSF = 18
SCE_ERR_TIDY = 19
SCE_ERR_JAVA_STACK = 20
SCE_ERR_VALUE = 21
SCE_ERR_GCC_INCLUDED_FROM = 22
SCE_BAT_DEFAULT = 0
SCE_BAT_COMMENT = 1
SCE_BAT_WORD = 2
SCE_BAT_LABEL = 3
SCE_BAT_HIDE = 4
SCE_BAT_COMMAND = 5
SCE_BAT_IDENTIFIER = 6
SCE_BAT_OPERATOR = 7
SCE_TCMD_DEFAULT = 0
SCE_TCMD_COMMENT = 1
SCE_TCMD_WORD = 2
SCE_TCMD_LABEL = 3
SCE_TCMD_HIDE = 4
SCE_TCMD_COMMAND = 5
SCE_TCMD_IDENTIFIER = 6
SCE_TCMD_OPERATOR = 7
SCE_TCMD_ENVIRONMENT = 8
SCE_TCMD_EXPANSION = 9
SCE_TCMD_CLABEL = 10
SCE_MAKE_DEFAULT = 0
SCE_MAKE_COMMENT = 1
SCE_MAKE_PREPROCESSOR = 2
SCE_MAKE_IDENTIFIER = 3
SCE_MAKE_OPERATOR = 4
SCE_MAKE_TARGET = 5
SCE_MAKE_IDEOL = 9
SCE_DIFF_DEFAULT = 0
SCE_DIFF_COMMENT = 1
SCE_DIFF_COMMAND = 2
SCE_DIFF_HEADER = 3
SCE_DIFF_POSITION = 4
SCE_DIFF_DELETED = 5
SCE_DIFF_ADDED = 6
SCE_DIFF_CHANGED = 7
SCE_CONF_DEFAULT = 0
SCE_CONF_COMMENT = 1
SCE_CONF_NUMBER = 2
SCE_CONF_IDENTIFIER = 3
SCE_CONF_EXTENSION = 4
SCE_CONF_PARAMETER = 5
SCE_CONF_STRING = 6
SCE_CONF_OPERATOR = 7
SCE_CONF_IP = 8
SCE_CONF_DIRECTIVE = 9
SCE_AVE_DEFAULT = 0
SCE_AVE_COMMENT = 1
SCE_AVE_NUMBER = 2
SCE_AVE_WORD = 3
SCE_AVE_STRING = 6
SCE_AVE_ENUM = 7
SCE_AVE_STRINGEOL = 8
SCE_AVE_IDENTIFIER = 9
SCE_AVE_OPERATOR = 10
SCE_AVE_WORD1 = 11
SCE_AVE_WORD2 = 12
SCE_AVE_WORD3 = 13
SCE_AVE_WORD4 = 14
SCE_AVE_WORD5 = 15
SCE_AVE_WORD6 = 16
SCE_ADA_DEFAULT = 0
SCE_ADA_WORD = 1
SCE_ADA_IDENTIFIER = 2
SCE_ADA_NUMBER = 3
SCE_ADA_DELIMITER = 4
SCE_ADA_CHARACTER = 5
SCE_ADA_CHARACTEREOL = 6
SCE_ADA_STRING = 7
SCE_ADA_STRINGEOL = 8
SCE_ADA_LABEL = 9
SCE_ADA_COMMENTLINE = 10
SCE_ADA_ILLEGAL = 11
SCE_BAAN_DEFAULT = 0
SCE_BAAN_COMMENT = 1
SCE_BAAN_COMMENTDOC = 2
SCE_BAAN_NUMBER = 3
SCE_BAAN_WORD = 4
SCE_BAAN_STRING = 5
SCE_BAAN_PREPROCESSOR = 6
SCE_BAAN_OPERATOR = 7
SCE_BAAN_IDENTIFIER = 8
SCE_BAAN_STRINGEOL = 9
SCE_BAAN_WORD2 = 10
SCE_LISP_DEFAULT = 0
SCE_LISP_COMMENT = 1
SCE_LISP_NUMBER = 2
SCE_LISP_KEYWORD = 3
SCE_LISP_KEYWORD_KW = 4
SCE_LISP_SYMBOL = 5
SCE_LISP_STRING = 6
SCE_LISP_STRINGEOL = 8
SCE_LISP_IDENTIFIER = 9
SCE_LISP_OPERATOR = 10
SCE_LISP_SPECIAL = 11
SCE_LISP_MULTI_COMMENT = 12
SCE_EIFFEL_DEFAULT = 0
SCE_EIFFEL_COMMENTLINE = 1
SCE_EIFFEL_NUMBER = 2
SCE_EIFFEL_WORD = 3
SCE_EIFFEL_STRING = 4
SCE_EIFFEL_CHARACTER = 5
SCE_EIFFEL_OPERATOR = 6
SCE_EIFFEL_IDENTIFIER = 7
SCE_EIFFEL_STRINGEOL = 8
SCE_NNCRONTAB_DEFAULT = 0
SCE_NNCRONTAB_COMMENT = 1
SCE_NNCRONTAB_TASK = 2
SCE_NNCRONTAB_SECTION = 3
SCE_NNCRONTAB_KEYWORD = 4
SCE_NNCRONTAB_MODIFIER = 5
SCE_NNCRONTAB_ASTERISK = 6
SCE_NNCRONTAB_NUMBER = 7
SCE_NNCRONTAB_STRING = 8
SCE_NNCRONTAB_ENVIRONMENT = 9
SCE_NNCRONTAB_IDENTIFIER = 10
SCE_FORTH_DEFAULT = 0
SCE_FORTH_COMMENT = 1
SCE_FORTH_COMMENT_ML = 2
SCE_FORTH_IDENTIFIER = 3
SCE_FORTH_CONTROL = 4
SCE_FORTH_KEYWORD = 5
SCE_FORTH_DEFWORD = 6
SCE_FORTH_PREWORD1 = 7
SCE_FORTH_PREWORD2 = 8
SCE_FORTH_NUMBER = 9
SCE_FORTH_STRING = 10
SCE_FORTH_LOCALE = 11
SCE_MATLAB_DEFAULT = 0
SCE_MATLAB_COMMENT = 1
SCE_MATLAB_COMMAND = 2
SCE_MATLAB_NUMBER = 3
SCE_MATLAB_KEYWORD = 4
SCE_MATLAB_STRING = 5
SCE_MATLAB_OPERATOR = 6
SCE_MATLAB_IDENTIFIER = 7
SCE_MATLAB_DOUBLEQUOTESTRING = 8
SCE_SCRIPTOL_DEFAULT = 0
SCE_SCRIPTOL_WHITE = 1
SCE_SCRIPTOL_COMMENTLINE = 2
SCE_SCRIPTOL_PERSISTENT = 3
SCE_SCRIPTOL_CSTYLE = 4
SCE_SCRIPTOL_COMMENTBLOCK = 5
SCE_SCRIPTOL_NUMBER = 6
SCE_SCRIPTOL_STRING = 7
SCE_SCRIPTOL_CHARACTER = 8
SCE_SCRIPTOL_STRINGEOL = 9
SCE_SCRIPTOL_KEYWORD = 10
SCE_SCRIPTOL_OPERATOR = 11
SCE_SCRIPTOL_IDENTIFIER = 12
SCE_SCRIPTOL_TRIPLE = 13
SCE_SCRIPTOL_CLASSNAME = 14
SCE_SCRIPTOL_PREPROCESSOR = 15
SCE_ASM_DEFAULT = 0
SCE_ASM_COMMENT = 1
SCE_ASM_NUMBER = 2
SCE_ASM_STRING = 3
SCE_ASM_OPERATOR = 4
SCE_ASM_IDENTIFIER = 5
SCE_ASM_CPUINSTRUCTION = 6
SCE_ASM_MATHINSTRUCTION = 7
SCE_ASM_REGISTER = 8
SCE_ASM_DIRECTIVE = 9
SCE_ASM_DIRECTIVEOPERAND = 10
SCE_ASM_COMMENTBLOCK = 11
SCE_ASM_CHARACTER = 12
SCE_ASM_STRINGEOL = 13
SCE_ASM_EXTINSTRUCTION = 14
SCE_ASM_COMMENTDIRECTIVE = 15
SCE_F_DEFAULT = 0
SCE_F_COMMENT = 1
SCE_F_NUMBER = 2
SCE_F_STRING1 = 3
SCE_F_STRING2 = 4
SCE_F_STRINGEOL = 5
SCE_F_OPERATOR = 6
SCE_F_IDENTIFIER = 7
SCE_F_WORD = 8
SCE_F_WORD2 = 9
SCE_F_WORD3 = 10
SCE_F_PREPROCESSOR = 11
SCE_F_OPERATOR2 = 12
SCE_F_LABEL = 13
SCE_F_CONTINUATION = 14
SCE_CSS_DEFAULT = 0
SCE_CSS_TAG = 1
SCE_CSS_CLASS = 2
SCE_CSS_PSEUDOCLASS = 3
SCE_CSS_UNKNOWN_PSEUDOCLASS = 4
SCE_CSS_OPERATOR = 5
SCE_CSS_IDENTIFIER = 6
SCE_CSS_UNKNOWN_IDENTIFIER = 7
SCE_CSS_VALUE = 8
SCE_CSS_COMMENT = 9
SCE_CSS_ID = 10
SCE_CSS_IMPORTANT = 11
SCE_CSS_DIRECTIVE = 12
SCE_CSS_DOUBLESTRING = 13
SCE_CSS_SINGLESTRING = 14
SCE_CSS_IDENTIFIER2 = 15
SCE_CSS_ATTRIBUTE = 16
SCE_CSS_IDENTIFIER3 = 17
SCE_CSS_PSEUDOELEMENT = 18
SCE_CSS_EXTENDED_IDENTIFIER = 19
SCE_CSS_EXTENDED_PSEUDOCLASS = 20
SCE_CSS_EXTENDED_PSEUDOELEMENT = 21
SCE_CSS_MEDIA = 22
SCE_CSS_VARIABLE = 23
SCE_CSS_NUMBER = 24
SCE_CSS_STRINGEOL = 25
SCE_CSS_MIXIN = 26
SCE_POV_DEFAULT = 0
SCE_POV_COMMENT = 1
SCE_POV_COMMENTLINE = 2
SCE_POV_NUMBER = 3
SCE_POV_OPERATOR = 4
SCE_POV_IDENTIFIER = 5
SCE_POV_STRING = 6
SCE_POV_STRINGEOL = 7
SCE_POV_DIRECTIVE = 8
SCE_POV_BADDIRECTIVE = 9
SCE_POV_WORD2 = 10
SCE_POV_WORD3 = 11
SCE_POV_WORD4 = 12
SCE_POV_WORD5 = 13
SCE_POV_WORD6 = 14
SCE_POV_WORD7 = 15
SCE_POV_WORD8 = 16
SCE_LOUT_DEFAULT = 0
SCE_LOUT_COMMENT = 1
SCE_LOUT_NUMBER = 2
SCE_LOUT_WORD = 3
SCE_LOUT_WORD2 = 4
SCE_LOUT_WORD3 = 5
SCE_LOUT_WORD4 = 6
SCE_LOUT_STRING = 7
SCE_LOUT_OPERATOR = 8
SCE_LOUT_IDENTIFIER = 9
SCE_LOUT_STRINGEOL = 10
SCE_ESCRIPT_DEFAULT = 0
SCE_ESCRIPT_COMMENT = 1
SCE_ESCRIPT_COMMENTLINE = 2
SCE_ESCRIPT_COMMENTDOC = 3
SCE_ESCRIPT_NUMBER = 4
SCE_ESCRIPT_WORD = 5
SCE_ESCRIPT_STRING = 6
SCE_ESCRIPT_OPERATOR = 7
SCE_ESCRIPT_IDENTIFIER = 8
SCE_ESCRIPT_BRACE = 9
SCE_ESCRIPT_WORD2 = 10
SCE_ESCRIPT_WORD3 = 11
SCE_PS_DEFAULT = 0
SCE_PS_COMMENT = 1
SCE_PS_DSC_COMMENT = 2
SCE_PS_DSC_VALUE = 3
SCE_PS_NUMBER = 4
SCE_PS_NAME = 5
SCE_PS_KEYWORD = 6
SCE_PS_LITERAL = 7
SCE_PS_IMMEVAL = 8
SCE_PS_PAREN_ARRAY = 9
SCE_PS_PAREN_DICT = 10
SCE_PS_PAREN_PROC = 11
SCE_PS_TEXT = 12
SCE_PS_HEXSTRING = 13
SCE_PS_BASE85STRING = 14
SCE_PS_BADSTRINGCHAR = 15
SCE_NSIS_DEFAULT = 0
SCE_NSIS_COMMENT = 1
SCE_NSIS_STRINGDQ = 2
SCE_NSIS_STRINGLQ = 3
SCE_NSIS_STRINGRQ = 4
SCE_NSIS_FUNCTION = 5
SCE_NSIS_VARIABLE = 6
SCE_NSIS_LABEL = 7
SCE_NSIS_USERDEFINED = 8
SCE_NSIS_SECTIONDEF = 9
SCE_NSIS_SUBSECTIONDEF = 10
SCE_NSIS_IFDEFINEDEF = 11
SCE_NSIS_MACRODEF = 12
SCE_NSIS_STRINGVAR = 13
SCE_NSIS_NUMBER = 14
SCE_NSIS_SECTIONGROUP = 15
SCE_NSIS_PAGEEX = 16
SCE_NSIS_FUNCTIONDEF = 17
SCE_NSIS_COMMENTBOX = 18
SCE_MMIXAL_LEADWS = 0
SCE_MMIXAL_COMMENT = 1
SCE_MMIXAL_LABEL = 2
SCE_MMIXAL_OPCODE = 3
SCE_MMIXAL_OPCODE_PRE = 4
SCE_MMIXAL_OPCODE_VALID = 5
SCE_MMIXAL_OPCODE_UNKNOWN = 6
SCE_MMIXAL_OPCODE_POST = 7
SCE_MMIXAL_OPERANDS = 8
SCE_MMIXAL_NUMBER = 9
SCE_MMIXAL_REF = 10
SCE_MMIXAL_CHAR = 11
SCE_MMIXAL_STRING = 12
SCE_MMIXAL_REGISTER = 13
SCE_MMIXAL_HEX = 14
SCE_MMIXAL_OPERATOR = 15
SCE_MMIXAL_SYMBOL = 16
SCE_MMIXAL_INCLUDE = 17
SCE_CLW_DEFAULT = 0
SCE_CLW_LABEL = 1
SCE_CLW_COMMENT = 2
SCE_CLW_STRING = 3
SCE_CLW_USER_IDENTIFIER = 4
SCE_CLW_INTEGER_CONSTANT = 5
SCE_CLW_REAL_CONSTANT = 6
SCE_CLW_PICTURE_STRING = 7
SCE_CLW_KEYWORD = 8
SCE_CLW_COMPILER_DIRECTIVE = 9
SCE_CLW_RUNTIME_EXPRESSIONS = 10
SCE_CLW_BUILTIN_PROCEDURES_FUNCTION = 11
SCE_CLW_STRUCTURE_DATA_TYPE = 12
SCE_CLW_ATTRIBUTE = 13
SCE_CLW_STANDARD_EQUATE = 14
SCE_CLW_ERROR = 15
SCE_CLW_DEPRECATED = 16
SCE_LOT_DEFAULT = 0
SCE_LOT_HEADER = 1
SCE_LOT_BREAK = 2
SCE_LOT_SET = 3
SCE_LOT_PASS = 4
SCE_LOT_FAIL = 5
SCE_LOT_ABORT = 6
SCE_YAML_DEFAULT = 0
SCE_YAML_COMMENT = 1
SCE_YAML_IDENTIFIER = 2
SCE_YAML_KEYWORD = 3
SCE_YAML_NUMBER = 4
SCE_YAML_REFERENCE = 5
SCE_YAML_DOCUMENT = 6
SCE_YAML_TEXT = 7
SCE_YAML_ERROR = 8
SCE_YAML_OPERATOR = 9
SCE_TEX_DEFAULT = 0
SCE_TEX_SPECIAL = 1
SCE_TEX_GROUP = 2
SCE_TEX_SYMBOL = 3
SCE_TEX_COMMAND = 4
SCE_TEX_TEXT = 5
SCE_METAPOST_DEFAULT = 0
SCE_METAPOST_SPECIAL = 1
SCE_METAPOST_GROUP = 2
SCE_METAPOST_SYMBOL = 3
SCE_METAPOST_COMMAND = 4
SCE_METAPOST_TEXT = 5
SCE_METAPOST_EXTRA = 6
SCE_ERLANG_DEFAULT = 0
SCE_ERLANG_COMMENT = 1
SCE_ERLANG_VARIABLE = 2
SCE_ERLANG_NUMBER = 3
SCE_ERLANG_KEYWORD = 4
SCE_ERLANG_STRING = 5
SCE_ERLANG_OPERATOR = 6
SCE_ERLANG_ATOM = 7
SCE_ERLANG_FUNCTION_NAME = 8
SCE_ERLANG_CHARACTER = 9
SCE_ERLANG_MACRO = 10
SCE_ERLANG_RECORD = 11
SCE_ERLANG_PREPROC = 12
SCE_ERLANG_NODE_NAME = 13
SCE_ERLANG_COMMENT_FUNCTION = 14
SCE_ERLANG_COMMENT_MODULE = 15
SCE_ERLANG_COMMENT_DOC = 16
SCE_ERLANG_COMMENT_DOC_MACRO = 17
SCE_ERLANG_ATOM_QUOTED = 18
SCE_ERLANG_MACRO_QUOTED = 19
SCE_ERLANG_RECORD_QUOTED = 20
SCE_ERLANG_NODE_NAME_QUOTED = 21
SCE_ERLANG_BIFS = 22
SCE_ERLANG_MODULES = 23
SCE_ERLANG_MODULES_ATT = 24
SCE_ERLANG_UNKNOWN = 31
SCE_MSSQL_DEFAULT = 0
SCE_MSSQL_COMMENT = 1
SCE_MSSQL_LINE_COMMENT = 2
SCE_MSSQL_NUMBER = 3
SCE_MSSQL_STRING = 4
SCE_MSSQL_OPERATOR = 5
SCE_MSSQL_IDENTIFIER = 6
SCE_MSSQL_VARIABLE = 7
SCE_MSSQL_COLUMN_NAME = 8
SCE_MSSQL_STATEMENT = 9
SCE_MSSQL_DATATYPE = 10
SCE_MSSQL_SYSTABLE = 11
SCE_MSSQL_GLOBAL_VARIABLE = 12
SCE_MSSQL_FUNCTION = 13
SCE_MSSQL_STORED_PROCEDURE = 14
SCE_MSSQL_DEFAULT_PREF_DATATYPE = 15
SCE_MSSQL_COLUMN_NAME_2 = 16
SCE_V_DEFAULT = 0
SCE_V_COMMENT = 1
SCE_V_COMMENTLINE = 2
SCE_V_COMMENTLINEBANG = 3
SCE_V_NUMBER = 4
SCE_V_WORD = 5
SCE_V_STRING = 6
SCE_V_WORD2 = 7
SCE_V_WORD3 = 8
SCE_V_PREPROCESSOR = 9
SCE_V_OPERATOR = 10
SCE_V_IDENTIFIER = 11
SCE_V_STRINGEOL = 12
SCE_V_USER = 19
SCE_KIX_DEFAULT = 0
SCE_KIX_COMMENT = 1
SCE_KIX_STRING1 = 2
SCE_KIX_STRING2 = 3
SCE_KIX_NUMBER = 4
SCE_KIX_VAR = 5
SCE_KIX_MACRO = 6
SCE_KIX_KEYWORD = 7
SCE_KIX_FUNCTIONS = 8
SCE_KIX_OPERATOR = 9
SCE_KIX_IDENTIFIER = 31
SCE_GC_DEFAULT = 0
SCE_GC_COMMENTLINE = 1
SCE_GC_COMMENTBLOCK = 2
SCE_GC_GLOBAL = 3
SCE_GC_EVENT = 4
SCE_GC_ATTRIBUTE = 5
SCE_GC_CONTROL = 6
SCE_GC_COMMAND = 7
SCE_GC_STRING = 8
SCE_GC_OPERATOR = 9
SCE_SN_DEFAULT = 0
SCE_SN_CODE = 1
SCE_SN_COMMENTLINE = 2
SCE_SN_COMMENTLINEBANG = 3
SCE_SN_NUMBER = 4
SCE_SN_WORD = 5
SCE_SN_STRING = 6
SCE_SN_WORD2 = 7
SCE_SN_WORD3 = 8
SCE_SN_PREPROCESSOR = 9
SCE_SN_OPERATOR = 10
SCE_SN_IDENTIFIER = 11
SCE_SN_STRINGEOL = 12
SCE_SN_REGEXTAG = 13
SCE_SN_SIGNAL = 14
SCE_SN_USER = 19
SCE_AU3_DEFAULT = 0
SCE_AU3_COMMENT = 1
SCE_AU3_COMMENTBLOCK = 2
SCE_AU3_NUMBER = 3
SCE_AU3_FUNCTION = 4
SCE_AU3_KEYWORD = 5
SCE_AU3_MACRO = 6
SCE_AU3_STRING = 7
SCE_AU3_OPERATOR = 8
SCE_AU3_VARIABLE = 9
SCE_AU3_SENT = 10
SCE_AU3_PREPROCESSOR = 11
SCE_AU3_SPECIAL = 12
SCE_AU3_EXPAND = 13
SCE_AU3_COMOBJ = 14
SCE_AU3_UDF = 15
SCE_APDL_DEFAULT = 0
SCE_APDL_COMMENT = 1
SCE_APDL_COMMENTBLOCK = 2
SCE_APDL_NUMBER = 3
SCE_APDL_STRING = 4
SCE_APDL_OPERATOR = 5
SCE_APDL_WORD = 6
SCE_APDL_PROCESSOR = 7
SCE_APDL_COMMAND = 8
SCE_APDL_SLASHCOMMAND = 9
SCE_APDL_STARCOMMAND = 10
SCE_APDL_ARGUMENT = 11
SCE_APDL_FUNCTION = 12
SCE_SH_DEFAULT = 0
SCE_SH_ERROR = 1
SCE_SH_COMMENTLINE = 2
SCE_SH_NUMBER = 3
SCE_SH_WORD = 4
SCE_SH_STRING = 5
SCE_SH_CHARACTER = 6
SCE_SH_OPERATOR = 7
SCE_SH_IDENTIFIER = 8
SCE_SH_SCALAR = 9
SCE_SH_PARAM = 10
SCE_SH_BACKTICKS = 11
SCE_SH_HERE_DELIM = 12
SCE_SH_HERE_Q = 13
SCE_ASN1_DEFAULT = 0
SCE_ASN1_COMMENT = 1
SCE_ASN1_IDENTIFIER = 2
SCE_ASN1_STRING = 3
SCE_ASN1_OID = 4
SCE_ASN1_SCALAR = 5
SCE_ASN1_KEYWORD = 6
SCE_ASN1_ATTRIBUTE = 7
SCE_ASN1_DESCRIPTOR = 8
SCE_ASN1_TYPE = 9
SCE_ASN1_OPERATOR = 10
SCE_VHDL_DEFAULT = 0
SCE_VHDL_COMMENT = 1
SCE_VHDL_COMMENTLINEBANG = 2
SCE_VHDL_NUMBER = 3
SCE_VHDL_STRING = 4
SCE_VHDL_OPERATOR = 5
SCE_VHDL_IDENTIFIER = 6
SCE_VHDL_STRINGEOL = 7
SCE_VHDL_KEYWORD = 8
SCE_VHDL_STDOPERATOR = 9
SCE_VHDL_ATTRIBUTE = 10
SCE_VHDL_STDFUNCTION = 11
SCE_VHDL_STDPACKAGE = 12
SCE_VHDL_STDTYPE = 13
SCE_VHDL_USERWORD = 14
SCE_CAML_DEFAULT = 0
SCE_CAML_IDENTIFIER = 1
SCE_CAML_TAGNAME = 2
SCE_CAML_KEYWORD = 3
SCE_CAML_KEYWORD2 = 4
SCE_CAML_KEYWORD3 = 5
SCE_CAML_LINENUM = 6
SCE_CAML_OPERATOR = 7
SCE_CAML_NUMBER = 8
SCE_CAML_CHAR = 9
SCE_CAML_WHITE = 10
SCE_CAML_STRING = 11
SCE_CAML_COMMENT = 12
SCE_CAML_COMMENT1 = 13
SCE_CAML_COMMENT2 = 14
SCE_CAML_COMMENT3 = 15
SCE_HA_DEFAULT = 0
SCE_HA_IDENTIFIER = 1
SCE_HA_KEYWORD = 2
SCE_HA_NUMBER = 3
SCE_HA_STRING = 4
SCE_HA_CHARACTER = 5
SCE_HA_CLASS = 6
SCE_HA_MODULE = 7
SCE_HA_CAPITAL = 8
SCE_HA_DATA = 9
SCE_HA_IMPORT = 10
SCE_HA_OPERATOR = 11
SCE_HA_INSTANCE = 12
SCE_HA_COMMENTLINE = 13
SCE_HA_COMMENTBLOCK = 14
SCE_HA_COMMENTBLOCK2 = 15
SCE_HA_COMMENTBLOCK3 = 16
SCE_HA_PRAGMA = 17
SCE_HA_PREPROCESSOR = 18
SCE_HA_STRINGEOL = 19
SCE_HA_RESERVED_OPERATOR = 20
SCE_HA_LITERATE_COMMENT = 21
SCE_HA_LITERATE_CODEDELIM = 22
SCE_T3_DEFAULT = 0
SCE_T3_X_DEFAULT = 1
SCE_T3_PREPROCESSOR = 2
SCE_T3_BLOCK_COMMENT = 3
SCE_T3_LINE_COMMENT = 4
SCE_T3_OPERATOR = 5
SCE_T3_KEYWORD = 6
SCE_T3_NUMBER = 7
SCE_T3_IDENTIFIER = 8
SCE_T3_S_STRING = 9
SCE_T3_D_STRING = 10
SCE_T3_X_STRING = 11
SCE_T3_LIB_DIRECTIVE = 12
SCE_T3_MSG_PARAM = 13
SCE_T3_HTML_TAG = 14
SCE_T3_HTML_DEFAULT = 15
SCE_T3_HTML_STRING = 16
SCE_T3_USER1 = 17
SCE_T3_USER2 = 18
SCE_T3_USER3 = 19
SCE_T3_BRACE = 20
SCE_REBOL_DEFAULT = 0
SCE_REBOL_COMMENTLINE = 1
SCE_REBOL_COMMENTBLOCK = 2
SCE_REBOL_PREFACE = 3
SCE_REBOL_OPERATOR = 4
SCE_REBOL_CHARACTER = 5
SCE_REBOL_QUOTEDSTRING = 6
SCE_REBOL_BRACEDSTRING = 7
SCE_REBOL_NUMBER = 8
SCE_REBOL_PAIR = 9
SCE_REBOL_TUPLE = 10
SCE_REBOL_BINARY = 11
SCE_REBOL_MONEY = 12
SCE_REBOL_ISSUE = 13
SCE_REBOL_TAG = 14
SCE_REBOL_FILE = 15
SCE_REBOL_EMAIL = 16
SCE_REBOL_URL = 17
SCE_REBOL_DATE = 18
SCE_REBOL_TIME = 19
SCE_REBOL_IDENTIFIER = 20
SCE_REBOL_WORD = 21
SCE_REBOL_WORD2 = 22
SCE_REBOL_WORD3 = 23
SCE_REBOL_WORD4 = 24
SCE_REBOL_WORD5 = 25
SCE_REBOL_WORD6 = 26
SCE_REBOL_WORD7 = 27
SCE_REBOL_WORD8 = 28
SCE_SQL_DEFAULT = 0
SCE_SQL_COMMENT = 1
SCE_SQL_COMMENTLINE = 2
SCE_SQL_COMMENTDOC = 3
SCE_SQL_NUMBER = 4
SCE_SQL_WORD = 5
SCE_SQL_STRING = 6
SCE_SQL_CHARACTER = 7
SCE_SQL_SQLPLUS = 8
SCE_SQL_SQLPLUS_PROMPT = 9
SCE_SQL_OPERATOR = 10
SCE_SQL_IDENTIFIER = 11
SCE_SQL_SQLPLUS_COMMENT = 13
SCE_SQL_COMMENTLINEDOC = 15
SCE_SQL_WORD2 = 16
SCE_SQL_COMMENTDOCKEYWORD = 17
SCE_SQL_COMMENTDOCKEYWORDERROR = 18
SCE_SQL_USER1 = 19
SCE_SQL_USER2 = 20
SCE_SQL_USER3 = 21
SCE_SQL_USER4 = 22
SCE_SQL_QUOTEDIDENTIFIER = 23
SCE_ST_DEFAULT = 0
SCE_ST_STRING = 1
SCE_ST_NUMBER = 2
SCE_ST_COMMENT = 3
SCE_ST_SYMBOL = 4
SCE_ST_BINARY = 5
SCE_ST_BOOL = 6
SCE_ST_SELF = 7
SCE_ST_SUPER = 8
SCE_ST_NIL = 9
SCE_ST_GLOBAL = 10
SCE_ST_RETURN = 11
SCE_ST_SPECIAL = 12
SCE_ST_KWSEND = 13
SCE_ST_ASSIGN = 14
SCE_ST_CHARACTER = 15
SCE_ST_SPEC_SEL = 16
SCE_FS_DEFAULT = 0
SCE_FS_COMMENT = 1
SCE_FS_COMMENTLINE = 2
SCE_FS_COMMENTDOC = 3
SCE_FS_COMMENTLINEDOC = 4
SCE_FS_COMMENTDOCKEYWORD = 5
SCE_FS_COMMENTDOCKEYWORDERROR = 6
SCE_FS_KEYWORD = 7
SCE_FS_KEYWORD2 = 8
SCE_FS_KEYWORD3 = 9
SCE_FS_KEYWORD4 = 10
SCE_FS_NUMBER = 11
SCE_FS_STRING = 12
SCE_FS_PREPROCESSOR = 13
SCE_FS_OPERATOR = 14
SCE_FS_IDENTIFIER = 15
SCE_FS_DATE = 16
SCE_FS_STRINGEOL = 17
SCE_FS_CONSTANT = 18
SCE_FS_WORDOPERATOR = 19
SCE_FS_DISABLEDCODE = 20
SCE_FS_DEFAULT_C = 21
SCE_FS_COMMENTDOC_C = 22
SCE_FS_COMMENTLINEDOC_C = 23
SCE_FS_KEYWORD_C = 24
SCE_FS_KEYWORD2_C = 25
SCE_FS_NUMBER_C = 26
SCE_FS_STRING_C = 27
SCE_FS_PREPROCESSOR_C = 28
SCE_FS_OPERATOR_C = 29
SCE_FS_IDENTIFIER_C = 30
SCE_FS_STRINGEOL_C = 31
SCE_CSOUND_DEFAULT = 0
SCE_CSOUND_COMMENT = 1
SCE_CSOUND_NUMBER = 2
SCE_CSOUND_OPERATOR = 3
SCE_CSOUND_INSTR = 4
SCE_CSOUND_IDENTIFIER = 5
SCE_CSOUND_OPCODE = 6
SCE_CSOUND_HEADERSTMT = 7
SCE_CSOUND_USERKEYWORD = 8
SCE_CSOUND_COMMENTBLOCK = 9
SCE_CSOUND_PARAM = 10
SCE_CSOUND_ARATE_VAR = 11
SCE_CSOUND_KRATE_VAR = 12
SCE_CSOUND_IRATE_VAR = 13
SCE_CSOUND_GLOBAL_VAR = 14
SCE_CSOUND_STRINGEOL = 15
SCE_INNO_DEFAULT = 0
SCE_INNO_COMMENT = 1
SCE_INNO_KEYWORD = 2
SCE_INNO_PARAMETER = 3
SCE_INNO_SECTION = 4
SCE_INNO_PREPROC = 5
SCE_INNO_INLINE_EXPANSION = 6
SCE_INNO_COMMENT_PASCAL = 7
SCE_INNO_KEYWORD_PASCAL = 8
SCE_INNO_KEYWORD_USER = 9
SCE_INNO_STRING_DOUBLE = 10
SCE_INNO_STRING_SINGLE = 11
SCE_INNO_IDENTIFIER = 12
SCE_OPAL_SPACE = 0
SCE_OPAL_COMMENT_BLOCK = 1
SCE_OPAL_COMMENT_LINE = 2
SCE_OPAL_INTEGER = 3
SCE_OPAL_KEYWORD = 4
SCE_OPAL_SORT = 5
SCE_OPAL_STRING = 6
SCE_OPAL_PAR = 7
SCE_OPAL_BOOL_CONST = 8
SCE_OPAL_DEFAULT = 32
SCE_SPICE_DEFAULT = 0
SCE_SPICE_IDENTIFIER = 1
SCE_SPICE_KEYWORD = 2
SCE_SPICE_KEYWORD2 = 3
SCE_SPICE_KEYWORD3 = 4
SCE_SPICE_NUMBER = 5
SCE_SPICE_DELIMITER = 6
SCE_SPICE_VALUE = 7
SCE_SPICE_COMMENTLINE = 8
SCE_CMAKE_DEFAULT = 0
SCE_CMAKE_COMMENT = 1
SCE_CMAKE_STRINGDQ = 2
SCE_CMAKE_STRINGLQ = 3
SCE_CMAKE_STRINGRQ = 4
SCE_CMAKE_COMMANDS = 5
SCE_CMAKE_PARAMETERS = 6
SCE_CMAKE_VARIABLE = 7
SCE_CMAKE_USERDEFINED = 8
SCE_CMAKE_WHILEDEF = 9
SCE_CMAKE_FOREACHDEF = 10
SCE_CMAKE_IFDEFINEDEF = 11
SCE_CMAKE_MACRODEF = 12
SCE_CMAKE_STRINGVAR = 13
SCE_CMAKE_NUMBER = 14
SCE_GAP_DEFAULT = 0
SCE_GAP_IDENTIFIER = 1
SCE_GAP_KEYWORD = 2
SCE_GAP_KEYWORD2 = 3
SCE_GAP_KEYWORD3 = 4
SCE_GAP_KEYWORD4 = 5
SCE_GAP_STRING = 6
SCE_GAP_CHAR = 7
SCE_GAP_OPERATOR = 8
SCE_GAP_COMMENT = 9
SCE_GAP_NUMBER = 10
SCE_GAP_STRINGEOL = 11
SCE_PLM_DEFAULT = 0
SCE_PLM_COMMENT = 1
SCE_PLM_STRING = 2
SCE_PLM_NUMBER = 3
SCE_PLM_IDENTIFIER = 4
SCE_PLM_OPERATOR = 5
SCE_PLM_CONTROL = 6
SCE_PLM_KEYWORD = 7
SCE_4GL_DEFAULT = 0
SCE_4GL_NUMBER = 1
SCE_4GL_WORD = 2
SCE_4GL_STRING = 3
SCE_4GL_CHARACTER = 4
SCE_4GL_PREPROCESSOR = 5
SCE_4GL_OPERATOR = 6
SCE_4GL_IDENTIFIER = 7
SCE_4GL_BLOCK = 8
SCE_4GL_END = 9
SCE_4GL_COMMENT1 = 10
SCE_4GL_COMMENT2 = 11
SCE_4GL_COMMENT3 = 12
SCE_4GL_COMMENT4 = 13
SCE_4GL_COMMENT5 = 14
SCE_4GL_COMMENT6 = 15
SCE_4GL_DEFAULT_ = 16
SCE_4GL_NUMBER_ = 17
SCE_4GL_WORD_ = 18
SCE_4GL_STRING_ = 19
SCE_4GL_CHARACTER_ = 20
SCE_4GL_PREPROCESSOR_ = 21
SCE_4GL_OPERATOR_ = 22
SCE_4GL_IDENTIFIER_ = 23
SCE_4GL_BLOCK_ = 24
SCE_4GL_END_ = 25
SCE_4GL_COMMENT1_ = 26
SCE_4GL_COMMENT2_ = 27
SCE_4GL_COMMENT3_ = 28
SCE_4GL_COMMENT4_ = 29
SCE_4GL_COMMENT5_ = 30
SCE_4GL_COMMENT6_ = 31
SCE_ABAQUS_DEFAULT = 0
SCE_ABAQUS_COMMENT = 1
SCE_ABAQUS_COMMENTBLOCK = 2
SCE_ABAQUS_NUMBER = 3
SCE_ABAQUS_STRING = 4
SCE_ABAQUS_OPERATOR = 5
SCE_ABAQUS_WORD = 6
SCE_ABAQUS_PROCESSOR = 7
SCE_ABAQUS_COMMAND = 8
SCE_ABAQUS_SLASHCOMMAND = 9
SCE_ABAQUS_STARCOMMAND = 10
SCE_ABAQUS_ARGUMENT = 11
SCE_ABAQUS_FUNCTION = 12
SCE_ASY_DEFAULT = 0
SCE_ASY_COMMENT = 1
SCE_ASY_COMMENTLINE = 2
SCE_ASY_NUMBER = 3
SCE_ASY_WORD = 4
SCE_ASY_STRING = 5
SCE_ASY_CHARACTER = 6
SCE_ASY_OPERATOR = 7
SCE_ASY_IDENTIFIER = 8
SCE_ASY_STRINGEOL = 9
SCE_ASY_COMMENTLINEDOC = 10
SCE_ASY_WORD2 = 11
SCE_R_DEFAULT = 0
SCE_R_COMMENT = 1
SCE_R_KWORD = 2
SCE_R_BASEKWORD = 3
SCE_R_OTHERKWORD = 4
SCE_R_NUMBER = 5
SCE_R_STRING = 6
SCE_R_STRING2 = 7
SCE_R_OPERATOR = 8
SCE_R_IDENTIFIER = 9
SCE_R_INFIX = 10
SCE_R_INFIXEOL = 11
SCE_MAGIK_DEFAULT = 0
SCE_MAGIK_COMMENT = 1
SCE_MAGIK_HYPER_COMMENT = 16
SCE_MAGIK_STRING = 2
SCE_MAGIK_CHARACTER = 3
SCE_MAGIK_NUMBER = 4
SCE_MAGIK_IDENTIFIER = 5
SCE_MAGIK_OPERATOR = 6
SCE_MAGIK_FLOW = 7
SCE_MAGIK_CONTAINER = 8
SCE_MAGIK_BRACKET_BLOCK = 9
SCE_MAGIK_BRACE_BLOCK = 10
SCE_MAGIK_SQBRACKET_BLOCK = 11
SCE_MAGIK_UNKNOWN_KEYWORD = 12
SCE_MAGIK_KEYWORD = 13
SCE_MAGIK_PRAGMA = 14
SCE_MAGIK_SYMBOL = 15
SCE_POWERSHELL_DEFAULT = 0
SCE_POWERSHELL_COMMENT = 1
SCE_POWERSHELL_STRING = 2
SCE_POWERSHELL_CHARACTER = 3
SCE_POWERSHELL_NUMBER = 4
SCE_POWERSHELL_VARIABLE = 5
SCE_POWERSHELL_OPERATOR = 6
SCE_POWERSHELL_IDENTIFIER = 7
SCE_POWERSHELL_KEYWORD = 8
SCE_POWERSHELL_CMDLET = 9
SCE_POWERSHELL_ALIAS = 10
SCE_POWERSHELL_FUNCTION = 11
SCE_POWERSHELL_USER1 = 12
SCE_POWERSHELL_COMMENTSTREAM = 13
SCE_POWERSHELL_HERE_STRING = 14
SCE_POWERSHELL_HERE_CHARACTER = 15
SCE_POWERSHELL_COMMENTDOCKEYWORD = 16
SCE_MYSQL_DEFAULT = 0
SCE_MYSQL_COMMENT = 1
SCE_MYSQL_COMMENTLINE = 2
SCE_MYSQL_VARIABLE = 3
SCE_MYSQL_SYSTEMVARIABLE = 4
SCE_MYSQL_KNOWNSYSTEMVARIABLE = 5
SCE_MYSQL_NUMBER = 6
SCE_MYSQL_MAJORKEYWORD = 7
SCE_MYSQL_KEYWORD = 8
SCE_MYSQL_DATABASEOBJECT = 9
SCE_MYSQL_PROCEDUREKEYWORD = 10
SCE_MYSQL_STRING = 11
SCE_MYSQL_SQSTRING = 12
SCE_MYSQL_DQSTRING = 13
SCE_MYSQL_OPERATOR = 14
SCE_MYSQL_FUNCTION = 15
SCE_MYSQL_IDENTIFIER = 16
SCE_MYSQL_QUOTEDIDENTIFIER = 17
SCE_MYSQL_USER1 = 18
SCE_MYSQL_USER2 = 19
SCE_MYSQL_USER3 = 20
SCE_MYSQL_HIDDENCOMMAND = 21
SCE_MYSQL_PLACEHOLDER = 22
SCE_PO_DEFAULT = 0
SCE_PO_COMMENT = 1
SCE_PO_MSGID = 2
SCE_PO_MSGID_TEXT = 3
SCE_PO_MSGSTR = 4
SCE_PO_MSGSTR_TEXT = 5
SCE_PO_MSGCTXT = 6
SCE_PO_MSGCTXT_TEXT = 7
SCE_PO_FUZZY = 8
SCE_PO_PROGRAMMER_COMMENT = 9
SCE_PO_REFERENCE = 10
SCE_PO_FLAGS = 11
SCE_PO_MSGID_TEXT_EOL = 12
SCE_PO_MSGSTR_TEXT_EOL = 13
SCE_PO_MSGCTXT_TEXT_EOL = 14
SCE_PO_ERROR = 15
SCE_PAS_DEFAULT = 0
SCE_PAS_IDENTIFIER = 1
SCE_PAS_COMMENT = 2
SCE_PAS_COMMENT2 = 3
SCE_PAS_COMMENTLINE = 4
SCE_PAS_PREPROCESSOR = 5
SCE_PAS_PREPROCESSOR2 = 6
SCE_PAS_NUMBER = 7
SCE_PAS_HEXNUMBER = 8
SCE_PAS_WORD = 9
SCE_PAS_STRING = 10
SCE_PAS_STRINGEOL = 11
SCE_PAS_CHARACTER = 12
SCE_PAS_OPERATOR = 13
SCE_PAS_ASM = 14
SCE_SORCUS_DEFAULT = 0
SCE_SORCUS_COMMAND = 1
SCE_SORCUS_PARAMETER = 2
SCE_SORCUS_COMMENTLINE = 3
SCE_SORCUS_STRING = 4
SCE_SORCUS_STRINGEOL = 5
SCE_SORCUS_IDENTIFIER = 6
SCE_SORCUS_OPERATOR = 7
SCE_SORCUS_NUMBER = 8
SCE_SORCUS_CONSTANT = 9
SCE_POWERPRO_DEFAULT = 0
SCE_POWERPRO_COMMENTBLOCK = 1
SCE_POWERPRO_COMMENTLINE = 2
SCE_POWERPRO_NUMBER = 3
SCE_POWERPRO_WORD = 4
SCE_POWERPRO_WORD2 = 5
SCE_POWERPRO_WORD3 = 6
SCE_POWERPRO_WORD4 = 7
SCE_POWERPRO_DOUBLEQUOTEDSTRING = 8
SCE_POWERPRO_SINGLEQUOTEDSTRING = 9
SCE_POWERPRO_LINECONTINUE = 10
SCE_POWERPRO_OPERATOR = 11
SCE_POWERPRO_IDENTIFIER = 12
SCE_POWERPRO_STRINGEOL = 13
SCE_POWERPRO_VERBATIM = 14
SCE_POWERPRO_ALTQUOTE = 15
SCE_POWERPRO_FUNCTION = 16
SCE_SML_DEFAULT = 0
SCE_SML_IDENTIFIER = 1
SCE_SML_TAGNAME = 2
SCE_SML_KEYWORD = 3
SCE_SML_KEYWORD2 = 4
SCE_SML_KEYWORD3 = 5
SCE_SML_LINENUM = 6
SCE_SML_OPERATOR = 7
SCE_SML_NUMBER = 8
SCE_SML_CHAR = 9
SCE_SML_STRING = 11
SCE_SML_COMMENT = 12
SCE_SML_COMMENT1 = 13
SCE_SML_COMMENT2 = 14
SCE_SML_COMMENT3 = 15
SCE_MARKDOWN_DEFAULT = 0
SCE_MARKDOWN_LINE_BEGIN = 1
SCE_MARKDOWN_STRONG1 = 2
SCE_MARKDOWN_STRONG2 = 3
SCE_MARKDOWN_EM1 = 4
SCE_MARKDOWN_EM2 = 5
SCE_MARKDOWN_HEADER1 = 6
SCE_MARKDOWN_HEADER2 = 7
SCE_MARKDOWN_HEADER3 = 8
SCE_MARKDOWN_HEADER4 = 9
SCE_MARKDOWN_HEADER5 = 10
SCE_MARKDOWN_HEADER6 = 11
SCE_MARKDOWN_PRECHAR = 12
SCE_MARKDOWN_ULIST_ITEM = 13
SCE_MARKDOWN_OLIST_ITEM = 14
SCE_MARKDOWN_BLOCKQUOTE = 15
SCE_MARKDOWN_STRIKEOUT = 16
SCE_MARKDOWN_HRULE = 17
SCE_MARKDOWN_LINK = 18
SCE_MARKDOWN_CODE = 19
SCE_MARKDOWN_CODE2 = 20
SCE_MARKDOWN_CODEBK = 21
SCE_TXT2TAGS_DEFAULT = 0
SCE_TXT2TAGS_LINE_BEGIN = 1
SCE_TXT2TAGS_STRONG1 = 2
SCE_TXT2TAGS_STRONG2 = 3
SCE_TXT2TAGS_EM1 = 4
SCE_TXT2TAGS_EM2 = 5
SCE_TXT2TAGS_HEADER1 = 6
SCE_TXT2TAGS_HEADER2 = 7
SCE_TXT2TAGS_HEADER3 = 8
SCE_TXT2TAGS_HEADER4 = 9
SCE_TXT2TAGS_HEADER5 = 10
SCE_TXT2TAGS_HEADER6 = 11
SCE_TXT2TAGS_PRECHAR = 12
SCE_TXT2TAGS_ULIST_ITEM = 13
SCE_TXT2TAGS_OLIST_ITEM = 14
SCE_TXT2TAGS_BLOCKQUOTE = 15
SCE_TXT2TAGS_STRIKEOUT = 16
SCE_TXT2TAGS_HRULE = 17
SCE_TXT2TAGS_LINK = 18
SCE_TXT2TAGS_CODE = 19
SCE_TXT2TAGS_CODE2 = 20
SCE_TXT2TAGS_CODEBK = 21
SCE_TXT2TAGS_COMMENT = 22
SCE_TXT2TAGS_OPTION = 23
SCE_TXT2TAGS_PREPROC = 24
SCE_TXT2TAGS_POSTPROC = 25
SCE_A68K_DEFAULT = 0
SCE_A68K_COMMENT = 1
SCE_A68K_NUMBER_DEC = 2
SCE_A68K_NUMBER_BIN = 3
SCE_A68K_NUMBER_HEX = 4
SCE_A68K_STRING1 = 5
SCE_A68K_OPERATOR = 6
SCE_A68K_CPUINSTRUCTION = 7
SCE_A68K_EXTINSTRUCTION = 8
SCE_A68K_REGISTER = 9
SCE_A68K_DIRECTIVE = 10
SCE_A68K_MACRO_ARG = 11
SCE_A68K_LABEL = 12
SCE_A68K_STRING2 = 13
SCE_A68K_IDENTIFIER = 14
SCE_A68K_MACRO_DECLARATION = 15
SCE_A68K_COMMENT_WORD = 16
SCE_A68K_COMMENT_SPECIAL = 17
SCE_A68K_COMMENT_DOXYGEN = 18
SCE_MODULA_DEFAULT = 0
SCE_MODULA_COMMENT = 1
SCE_MODULA_DOXYCOMM = 2
SCE_MODULA_DOXYKEY = 3
SCE_MODULA_KEYWORD = 4
SCE_MODULA_RESERVED = 5
SCE_MODULA_NUMBER = 6
SCE_MODULA_BASENUM = 7
SCE_MODULA_FLOAT = 8
SCE_MODULA_STRING = 9
SCE_MODULA_STRSPEC = 10
SCE_MODULA_CHAR = 11
SCE_MODULA_CHARSPEC = 12
SCE_MODULA_PROC = 13
SCE_MODULA_PRAGMA = 14
SCE_MODULA_PRGKEY = 15
SCE_MODULA_OPERATOR = 16
SCE_MODULA_BADSTR = 17
SCE_COFFEESCRIPT_DEFAULT = 0
SCE_COFFEESCRIPT_COMMENT = 1
SCE_COFFEESCRIPT_COMMENTLINE = 2
SCE_COFFEESCRIPT_COMMENTDOC = 3
SCE_COFFEESCRIPT_NUMBER = 4
SCE_COFFEESCRIPT_WORD = 5
SCE_COFFEESCRIPT_STRING = 6
SCE_COFFEESCRIPT_CHARACTER = 7
SCE_COFFEESCRIPT_UUID = 8
SCE_COFFEESCRIPT_PREPROCESSOR = 9
SCE_COFFEESCRIPT_OPERATOR = 10
SCE_COFFEESCRIPT_IDENTIFIER = 11
SCE_COFFEESCRIPT_STRINGEOL = 12
SCE_COFFEESCRIPT_VERBATIM = 13
SCE_COFFEESCRIPT_REGEX = 14
SCE_COFFEESCRIPT_COMMENTLINEDOC = 15
SCE_COFFEESCRIPT_WORD2 = 16
SCE_COFFEESCRIPT_COMMENTDOCKEYWORD = 17
SCE_COFFEESCRIPT_COMMENTDOCKEYWORDERROR = 18
SCE_COFFEESCRIPT_GLOBALCLASS = 19
SCE_COFFEESCRIPT_STRINGRAW = 20
SCE_COFFEESCRIPT_TRIPLEVERBATIM = 21
SCE_COFFEESCRIPT_HASHQUOTEDSTRING = 22
SCE_COFFEESCRIPT_VERBOSE_REGEX = 23
SCE_COFFEESCRIPT_VERBOSE_REGEX_COMMENT = 24
SCE_COFFEESCRIPT_COMMENTBLOCK = 25
SCE_AVS_DEFAULT = 0
SCE_AVS_COMMENTBLOCK = 1
SCE_AVS_COMMENTBLOCKN = 2
SCE_AVS_COMMENTLINE = 3
SCE_AVS_NUMBER = 4
SCE_AVS_OPERATOR = 5
SCE_AVS_IDENTIFIER = 6
SCE_AVS_STRING = 7
SCE_AVS_TRIPLESTRING = 8
SCE_AVS_KEYWORD = 9
SCE_AVS_FILTER = 10
SCE_AVS_PLUGIN = 11
SCE_AVS_FUNCTION = 12
SCE_AVS_CLIPPROP = 13
SCE_AVS_USERDFN = 14
SCE_ECL_DEFAULT = 0
SCE_ECL_COMMENT = 1
SCE_ECL_COMMENTLINE = 2
SCE_ECL_NUMBER = 3
SCE_ECL_STRING = 4
SCE_ECL_WORD0 = 5
SCE_ECL_OPERATOR = 6
SCE_ECL_CHARACTER = 7
SCE_ECL_UUID = 8
SCE_ECL_PREPROCESSOR = 9
SCE_ECL_UNKNOWN = 10
SCE_ECL_IDENTIFIER = 11
SCE_ECL_STRINGEOL = 12
SCE_ECL_VERBATIM = 13
SCE_ECL_REGEX = 14
SCE_ECL_COMMENTLINEDOC = 15
SCE_ECL_WORD1 = 16
SCE_ECL_COMMENTDOCKEYWORD = 17
SCE_ECL_COMMENTDOCKEYWORDERROR = 18
SCE_ECL_WORD2 = 19
SCE_ECL_WORD3 = 20
SCE_ECL_WORD4 = 21
SCE_ECL_WORD5 = 22
SCE_ECL_COMMENTDOC = 23
SCE_ECL_ADDED = 24
SCE_ECL_DELETED = 25
SCE_ECL_CHANGED = 26
SCE_ECL_MOVED = 27
SCE_OSCRIPT_DEFAULT = 0
SCE_OSCRIPT_LINE_COMMENT = 1
SCE_OSCRIPT_BLOCK_COMMENT = 2
SCE_OSCRIPT_DOC_COMMENT = 3
SCE_OSCRIPT_PREPROCESSOR = 4
SCE_OSCRIPT_NUMBER = 5
SCE_OSCRIPT_SINGLEQUOTE_STRING = 6
SCE_OSCRIPT_DOUBLEQUOTE_STRING = 7
SCE_OSCRIPT_CONSTANT = 8
SCE_OSCRIPT_IDENTIFIER = 9
SCE_OSCRIPT_GLOBAL = 10
SCE_OSCRIPT_KEYWORD = 11
SCE_OSCRIPT_OPERATOR = 12
SCE_OSCRIPT_LABEL = 13
SCE_OSCRIPT_TYPE = 14
SCE_OSCRIPT_FUNCTION = 15
SCE_OSCRIPT_OBJECT = 16
SCE_OSCRIPT_PROPERTY = 17
SCE_OSCRIPT_METHOD = 18
SCE_VISUALPROLOG_DEFAULT = 0
SCE_VISUALPROLOG_KEY_MAJOR = 1
SCE_VISUALPROLOG_KEY_MINOR = 2
SCE_VISUALPROLOG_KEY_DIRECTIVE = 3
SCE_VISUALPROLOG_COMMENT_BLOCK = 4
SCE_VISUALPROLOG_COMMENT_LINE = 5
SCE_VISUALPROLOG_COMMENT_KEY = 6
SCE_VISUALPROLOG_COMMENT_KEY_ERROR = 7
SCE_VISUALPROLOG_IDENTIFIER = 8
SCE_VISUALPROLOG_VARIABLE = 9
SCE_VISUALPROLOG_ANONYMOUS = 10
SCE_VISUALPROLOG_NUMBER = 11
SCE_VISUALPROLOG_OPERATOR = 12
SCE_VISUALPROLOG_CHARACTER = 13
SCE_VISUALPROLOG_CHARACTER_TOO_MANY = 14
SCE_VISUALPROLOG_CHARACTER_ESCAPE_ERROR = 15
SCE_VISUALPROLOG_STRING = 16
SCE_VISUALPROLOG_STRING_ESCAPE = 17
SCE_VISUALPROLOG_STRING_ESCAPE_ERROR = 18
SCE_VISUALPROLOG_STRING_EOL_OPEN = 19
SCE_VISUALPROLOG_STRING_VERBATIM = 20
SCE_VISUALPROLOG_STRING_VERBATIM_SPECIAL = 21
SCE_VISUALPROLOG_STRING_VERBATIM_EOL = 22
SCE_STTXT_DEFAULT = 0
SCE_STTXT_COMMENT = 1
SCE_STTXT_COMMENTLINE = 2
SCE_STTXT_KEYWORD = 3
SCE_STTXT_TYPE = 4
SCE_STTXT_FUNCTION = 5
SCE_STTXT_FB = 6
SCE_STTXT_NUMBER = 7
SCE_STTXT_HEXNUMBER = 8
SCE_STTXT_PRAGMA = 9
SCE_STTXT_OPERATOR = 10
SCE_STTXT_CHARACTER = 11
SCE_STTXT_STRING1 = 12
SCE_STTXT_STRING2 = 13
SCE_STTXT_STRINGEOL = 14
SCE_STTXT_IDENTIFIER = 15
SCE_STTXT_DATETIME = 16
SCE_STTXT_VARS = 17
SCE_STTXT_PRAGMAS = 18
SCE_XML_DEFAULT = 0
SCE_XML_PROLOG = 0
SCE_XML_START_TAG_OPEN = 1
SCE_XML_START_TAG_NAME = 2
SCE_XML_START_TAG_CLOSE = 3
SCE_XML_START_TAG_EMPTY_CLOSE = 4
SCE_XML_START_TAG_ATTR_NAME = 5
SCE_XML_START_TAG_ATTR_EQUALS = 6
SCE_XML_START_TAG_ATTR_QUOT_OPEN = 7
SCE_XML_START_TAG_ATTR_QUOT_CONTENT = 8
SCE_XML_START_TAG_ATTR_QUOT_CLOSE = 9
SCE_XML_START_TAG_ATTR_APOS_OPEN = 10
SCE_XML_START_TAG_ATTR_APOS_CONTENT = 11
SCE_XML_START_TAG_ATTR_APOS_CLOSE = 12
SCE_XML_END_TAG_OPEN = 13
SCE_XML_END_TAG_NAME = 14
SCE_XML_END_TAG_CLOSE = 15
SCE_XML_START_TAG_ATTR_NUMBER = 16
SCE_XML_ENTITY_REF = 17
SCE_XML_CHAR_REF = 18
SCE_XML_DATA_NEWLINE = 19
SCE_XML_DATA_CHARS = 20
SCE_XML_CDATA_SECT_OPEN = 21
SCE_XML_CDATA_SECT_CONTENT = 22
SCE_XML_CDATA_SECT_CLOSE = 23
SCE_XML_COMMENT_OPEN = 24
SCE_XML_COMMENT_CONTENT = 25
SCE_XML_COMMENT_CLOSE = 26
SCE_XML_PI_OPEN = 27
SCE_XML_PI_CONTENT = 28
SCE_XML_PI_CLOSE = 29
SCE_XML_XML_DECL_OPEN = 30
SCE_XML_XML_DECL_CONTENT = 31
SCE_XML_XML_DECL_CLOSE = 40
SCE_XML_BOM = 41
SCE_XPATH_TAG_NAME = 42
SCE_XPATH_ATTR_NAME = 43
SCE_XPATH_OPEN = 44
SCE_XPATH_CONTENT_QUOT = 45
SCE_XPATH_CONTENT_APOS = 46
SCE_XPATH_CLOSE = 47
SCE_XML_START_TAG_WHITE_SPACE = 48
SCE_XML_START_TAG_ATTR_UNQUOTED = 49
SCE_XML_END_TAG_WHITE_SPACE = 50
SCE_XML_DECLARATION_OPEN = 51
SCE_XML_DECLARATION_TYPE = 52
SCE_XML_DECLN_WHITE_SPACE = 53
SCE_XML_DECLN_NAME = 54
SCE_XML_DECLN_CLOSE = 55
SCE_XML_DECLN_QUOT_CONTENT = 56
SCE_XML_DECLN_APOS_CONTENT = 57
SCE_XML_DECLN_DATA_CHARS = 58
SCE_XML_UPPER_BOUND = 59
SCE_UDL_M_DEFAULT = 0
SCE_UDL_M_STAGO = 1
SCE_UDL_M_TAGNAME = 2
SCE_UDL_M_TAGSPACE = 3
SCE_UDL_M_ATTRNAME = 4
SCE_UDL_M_OPERATOR = 5
SCE_UDL_M_STAGC = 6
SCE_UDL_M_EMP_TAGC = 7
SCE_UDL_M_STRING = 8
SCE_UDL_M_ETAGO = 9
SCE_UDL_M_ETAGC = 10
SCE_UDL_M_ENTITY = 11
SCE_UDL_M_PI = 12
SCE_UDL_M_CDATA = 13
SCE_UDL_M_COMMENT = 14
SCE_UDL_CSS_DEFAULT = 15
SCE_UDL_CSS_COMMENT = 16
SCE_UDL_CSS_NUMBER = 17
SCE_UDL_CSS_STRING = 18
SCE_UDL_CSS_WORD = 19
SCE_UDL_CSS_IDENTIFIER = 20
SCE_UDL_CSS_OPERATOR = 21
SCE_UDL_CSL_DEFAULT = 22
SCE_UDL_CSL_COMMENT = 23
SCE_UDL_CSL_COMMENTBLOCK = 24
SCE_UDL_CSL_NUMBER = 25
SCE_UDL_CSL_STRING = 26
SCE_UDL_CSL_WORD = 27
SCE_UDL_CSL_IDENTIFIER = 28
SCE_UDL_CSL_OPERATOR = 29
SCE_UDL_CSL_REGEX = 30
SCE_UDL_SSL_DEFAULT = 31
SCE_UDL_SSL_COMMENT = 40
SCE_UDL_SSL_COMMENTBLOCK = 41
SCE_UDL_SSL_NUMBER = 42
SCE_UDL_SSL_STRING = 43
SCE_UDL_SSL_WORD = 44
SCE_UDL_SSL_IDENTIFIER = 45
SCE_UDL_SSL_OPERATOR = 46
SCE_UDL_SSL_REGEX = 47
SCE_UDL_SSL_VARIABLE = 48
SCE_UDL_TPL_DEFAULT = 49
SCE_UDL_TPL_COMMENT = 50
SCE_UDL_TPL_COMMENTBLOCK = 51
SCE_UDL_TPL_NUMBER = 52
SCE_UDL_TPL_STRING = 53
SCE_UDL_TPL_WORD = 54
SCE_UDL_TPL_IDENTIFIER = 55
SCE_UDL_TPL_OPERATOR = 56
SCE_UDL_TPL_VARIABLE = 57
SCE_UDL_UPPER_BOUND = 57
|
mit
|
codenote/chromium-test
|
chrome/test/webdriver/test/util.py
|
49
|
3101
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generic utilities for all python scripts."""
import atexit
import os
import signal
import stat
import subprocess
import sys
import tempfile
import urllib
def GetFileURLForPath(path):
"""Get file:// url for the given path.
Also quotes the url using urllib.quote().
"""
abs_path = os.path.abspath(path)
if sys.platform == 'win32':
# Don't quote the ':' in drive letter ( say, C: ) on win.
# Also, replace '\' with '/' as expected in a file:/// url.
drive, rest = os.path.splitdrive(abs_path)
quoted_path = drive.upper() + urllib.quote((rest.replace('\\', '/')))
return 'file:///' + quoted_path
else:
quoted_path = urllib.quote(abs_path)
return 'file://' + quoted_path
def MakeTempDir(parent_dir=None):
"""Creates a temporary directory and returns an absolute path to it.
The temporary directory is automatically deleted when the python interpreter
exits normally.
Args:
parent_dir: the directory to create the temp dir in. If None, the system
temp dir is used.
Returns:
The absolute path to the temporary directory.
"""
path = tempfile.mkdtemp(dir=parent_dir)
def DeleteDir():
# Don't use shutil.rmtree because it can't delete read-only files on Win.
for root, dirs, files in os.walk(path, topdown=False):
for name in files:
filename = os.path.join(root, name)
os.chmod(filename, stat.S_IWRITE)
os.remove(filename)
for name in dirs:
os.rmdir(os.path.join(root, name))
atexit.register(DeleteDir)
return path
def IsWin():
return sys.platform == 'cygwin' or sys.platform.startswith('win')
def IsLinux():
return sys.platform.startswith('linux')
def IsMac():
return sys.platform.startswith('darwin')
def Kill(pid):
"""Terminate the given pid."""
if IsWin():
subprocess.call(['taskkill.exe', '/T', '/F', '/PID', str(pid)])
else:
os.kill(pid, signal.SIGTERM)
def RunCommand(cmd, cwd=None):
"""Runs the given command and returns the exit code.
Args:
cmd: list of command arguments.
cwd: working directory to execute the command, or None if the current
working directory should be used.
Returns:
The exit code of the command.
"""
process = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, cwd=cwd)
process.wait()
return process.returncode
def Unzip(zip_path, output_dir):
"""Unzips the given zip file using a system installed unzip tool.
Args:
zip_path: zip file to unzip.
output_dir: directory to unzip the contents of the zip file. The directory
must exist.
Raises:
RuntimeError if the unzip operation fails.
"""
if IsWin():
unzip_cmd = ['C:\\Program Files\\7-Zip\\7z.exe', 'x', '-y']
else:
unzip_cmd = ['unzip', '-o']
unzip_cmd += [zip_path]
if RunCommand(unzip_cmd, output_dir) != 0:
raise RuntimeError('Unable to unzip %s to %s' % (zip_path, output_dir))
|
bsd-3-clause
|
codeAshu/cgt
|
cgt/api.py
|
3
|
19725
|
import operator
import numpy as np
import sys
if sys.argv[0] != "gen_py.py":
from api_autogen import *
import cgt
from . import core, utils
# Every non-underscored function in this file should have a docstring, and it should enforce that the input data is valid
# ================================================================
# Variable Constructors
# ================================================================
_tensor_doc_template = """
Creates a symbolic variable representing a %s argument (i.e., a tensor of rank %s).
Inputs
------
name: (optional) string name of this variable, which will be displayed by printing functions.
dtype: string (e.g., 'float','int', 'float32') or numpy dtype object. Note that float precision
will be ignored and cgt.floatX will be used.
fixed_shape: a tuple of either int or None, e.g., (None, 3, 10), representing the known
shape components of this argument. This argument allows CGT to infer the shape of
variables depending on this one, and also apply optimization that depend on the shape
being known.
"""
def scalar(name=None, dtype=None, fixed_shape=None):
return core.Argument(core.TensorType(cgt.floatX if dtype is None else dtype, 0), name, fixed_shape=fixed_shape)
scalar.__doc__ = _tensor_doc_template%("scalar",0)
def vector(name=None, dtype=None, fixed_shape=None):
return core.Argument(core.TensorType(cgt.floatX if dtype is None else dtype, 1), name, fixed_shape=fixed_shape)
vector.__doc__ = _tensor_doc_template%("vector",1)
def matrix(name=None, dtype=None, fixed_shape=None):
return core.Argument(core.TensorType(cgt.floatX if dtype is None else dtype, 2), name, fixed_shape=fixed_shape)
matrix.__doc__ = _tensor_doc_template%("matrix",2)
def tensor3(name=None, dtype=None, fixed_shape=None):
return core.Argument(core.TensorType(cgt.floatX if dtype is None else dtype, 3), name, fixed_shape=fixed_shape)
tensor3.__doc__ = _tensor_doc_template%("3-tensor",3)
def tensor4(name=None, dtype=None, fixed_shape=None):
return core.Argument(core.TensorType(cgt.floatX if dtype is None else dtype, 4), name, fixed_shape=fixed_shape)
tensor4.__doc__ = _tensor_doc_template%("4-tensor",4)
def tensor(dtype, ndim, name=None, fixed_shape=None):
return core.Argument(core.TensorType(cgt.floatX if dtype is None else dtype, ndim), name, fixed_shape=fixed_shape)
scalar.__doc__ = _tensor_doc_template%("k-tensor","k")
# ================================================================
# Symbolic functions
# ================================================================
def add_multi(xs):
"""
xs -> xs[0] + xs[1] + ... + xs[len(xs)-1]
"""
return reduce(operator.add, xs)
def arange(start, stop=None, step=1, dtype=None):
"""
Like numpy.arange, but arguments can be symbolic
"""
if (stop is None):
(start, stop) = (0, start)
if (dtype is None):
dtype = 'i8'
return core.Result(core.Arange(dtype), [start, stop, step])
def argmax(x, axis=None, keepdims=False):
"""
Like numpy.argmax, but arguments can be symbolic
"""
if (axis is None):
out = flatten(x).argmax(axis=0)
else:
assert isinstance(axis, int)
out = core.Result(core.Argmax(axis), [x])
if (not keepdims):
out = _dropdims(out, [axis])
return out
def batched_matmul(x, y):
r"""
Given two 3-tensors x_nij, and y_njk, loop over 'n' and contract along 'j'
x_nij, y_njk -> z_nik := \sum_n x_nij y_njk
A variety of useful tensor contraction operations can be written in this form
after permuting axes and reshaping.
"""
return core.Result(core.BatchedMul22(False,False), [x,y])
def broadcast(opname, a, b, bcpat):
"""
Perform elementwise binary operation such as addition or multiplication, and expand
singleton dimensions when appropriate.
opname: string name of operation: *,+,-,/,<,>,<=,>=,**,==,!=
a, b: variables
bcpat: a string of x,1 specifying which dimensions are singletons in both a and b. Here are some examples:
"x1,1x": a.shape[1] == 1 and b.shape[0] == 1
"xx1,xxx": a.shape[2] == 1, but we should have a.shape[0]==b.shape[0] and a.shape[1]==b.shape[1]
E.g., here's an example of using this function
a = np.zeros((2,3))
b = np.zeros((2,1))
z = cgt.broadcast("+", a, b, "xx,x1")
"""
x,y = a,b # switched x,y -> a,b so 'x' in bcpat would be less confusing
(xpat, ypat) = bcpat.split(',')
(xbcaxes, ybcaxes) = [[i for (i, letter) in enumerate(pat) if (letter == '1')] for pat in (xpat, ypat)]
assert (x.ndim == y.ndim)
if xbcaxes:
# for i in xbcaxes: core.assertequal1(size(x,i), 1, "you mislabeled axis %i as singleton"%i) # @SHAPE_CHECK
x = core.Result(core.Repeat(xbcaxes), [x] + [size(y, ax) for ax in xbcaxes])
if ybcaxes:
# for i in ybcaxes: core.assertequal1(size(y,i), 1, "you mislabeled axis %i as singleton"%i) # @SHAPE_CHECK
y = core.Result(core.Repeat(ybcaxes), [y] + [size(x, ax) for ax in ybcaxes])
return core.elwise_binary(opname, x, y)
def _get_nu_cast(dtype):
castfunc = np.cast[dtype]
def _nu_cast(x, out=None):
if out is None:
return castfunc(x)
else:
out[...] = castfunc(x)
return _nu_cast
def cast(x, dtype):
"""
Convert variable x to the desired datatype
"""
x = core.as_node(x)
if (x.dtype == dtype):
return x
else:
diff = (core.dtype_kind(dtype) in 'cf')
opname = 'cast_to_%s' % dtype
ui = core.UnaryInfo(opname, _get_nu_cast(dtype), diff, dtype,
lambda x,y,gy : (cast(gy, x.dtype) if diff else core._nondiff()), 'x')
return core.Result(core.ElwiseUnary(opname, ui), [x])
def ceil_divide(x, y):
return iceil(x / y)
def concatenate(xs, axis=0):
"""
Like np.concatenate
xs: a list of variables with the same shape along all axes other than `axis`
"""
return core.Result(core.Concatenate(axis), xs)
def constant(val):
"""
creates a symbolic expression with constant value
val: numpy array or python scalar
"""
if isinstance(val, tuple):
val = core.as_valid_tuple(val)
op = core.ConstantTuple(val)
else:
val = core.as_valid_array(val)
op = core.ConstantTensor(val)
return core.Result(op, [])
def dot(x, y):
"""
Like numpy.dot
x,y: variables
"""
x = core.as_node(x)
y = core.as_node(y)
xdim = x.ndim
ydim = y.ndim
if (xdim == 1):
if (ydim == 1):
return core.Result(core.Dot(), [x, y])
elif (ydim == 2):
return core.Result(core.Mul21(True), [y, x])
else:
raise NotImplementedError
elif (xdim == 2):
if (ydim == 1):
return core.Result(core.Mul21(False), [x, y])
elif (ydim == 2):
return core.Result(core.Mul22(False, False), [x, y])
else:
raise NotImplementedError
else:
raise NotImplementedError
def einsum(desc, x, y):
"""
Like numpy.einsum except x and y are symbolic variables
desc: string like "nij,njk->nik"
x,y: symbolic variables
"""
import re
pat = '(\\w+),(\\w+)->(\\w+)'
match = re.match(pat, desc)
if (match is None):
raise ValueError('einsum error: desc should match regexp %s' % pat)
(xdesc, ydesc, zdesc) = match.groups()
if ((not _is_unique(xdesc)) and _is_unique(ydesc) and _is_unique(zdesc)):
raise ValueError('Invalid tensor description %s passed into einsum. Tensor indices should be unique' % desc)
if (not _is_subset_of(xdesc + ydesc, zdesc)):
raise ValueError('Invalid tensor description %s passed into einsum. Unrecognized index in output.' % desc)
loop = []
justx = []
contr = []
justy = []
for c in xdesc:
if (c in ydesc):
if (c in zdesc):
loop.append(c)
else:
contr.append(c)
else:
justx.append(c)
for c in ydesc:
if (not (c in xdesc)):
justy.append(c)
(ixloop, ijustx, ixcontr) = [[xdesc.index(c) for c in chars] for chars in [loop, justx, contr]]
(iyloop, ijusty, iycontr) = [[ydesc.index(c) for c in chars] for chars in [loop, justy, contr]]
xshp = shape(x)
yshp = shape(y)
xt = transpose(x, ixloop + ijustx + ixcontr).reshape([mul_multi([xshp[i] for i in icol]) for icol in [ixloop, ijustx, ixcontr]])
yt = transpose(y, iyloop + iycontr + ijusty).reshape([mul_multi([yshp[i] for i in icol]) for icol in [iyloop, iycontr, ijusty]])
zt = batched_matmul(xt, yt)
return transpose(zt.reshape([size(x, xdesc.index(c)) for c in loop] + [size(x, xdesc.index(c)) for c in justx] + [size(y, ydesc.index(c)) for c in justy]), utils.invert_perm([zdesc.index(c) for c in loop + justx + justy]))
def fill(val, shape):
"""
Create an array of shape `shape` filled with scalar `val`
"""
assert isinstance(shape, list)
val = core.as_node(val)
# if val is a constant, use a Fill Op, which includes the value as a attribute
if isinstance(val.op, core.Constant):
return core.Result(core.Fill(val.op.value), shape)
# if val is a non-constant variable, we can use a Repeat Op
else:
singleton = reshape(val, [1]*len(shape))
return core.Result(core.Repeat(range(len(shape))), [singleton] + shape)
def flatten(x):
"""
Like numpy.flatten
"""
return reshape(x, [mul_multi(shape(x))])
def flip(x, axes):
"""
Reverse array along specified axes
e.g.
flip(x,0) == x[::-1],
flip(x, 1) == x[:,::-1]
"""
x = core.as_node(x)
assert isinstance(axes, list)
return core.Result(core.Flip(axes), [x])
def floor_divide(x, y):
"""
returns floor(x/y), with integer dtype
"""
return ifloor(x / y)
def getitem(arr, slis):
"""
Used internally for array indexing/slicing, though we will specify it's behavior later
"""
arr = core.as_node(arr)
if isinstance(arr.typ, core.TupleType):
assert isinstance(slis, int)
return tuple_index(arr, slis)
if (not _is_list_or_tuple(slis)):
slis = [slis]
if all(isinstance(sli, (int, slice, type(None))) for sli in slis):
return getitem_nonfancy(arr, slis)
elif all((isinstance(sli, (np.ndarray, core.Node)) for sli in slis)):
return getitem_fancy(arr, slis)
else:
raise ValueError('Tried to index with slices %s. Either all should be in {slice,int,colon} or all must be a ndarray of ints' % str(slis))
def getitem_fancy(arr, indarrs):
"""
Used internally for fancy indexing
"""
assert all(((indarr.ndim == 1) for indarr in indarrs))
indarrs = map(core.as_node, indarrs)
flatinds = sub2ind(indarrs, shape(arr))
return core.Result(core.GetFlatIndices(), [arr, flatinds])
def getitem_nonfancy(arr, slis):
"""
Used internally for slicing
"""
out = arr
ax = 0
shapedesc = []
if (not _is_list_or_tuple(slis)):
slis = [slis]
for sli in slis:
if isinstance(sli, slice) and all(x is None for x in (sli.start, sli.stop, sli.step)):
shapedesc.append(ax)
elif (sli is None):
shapedesc.append('+')
ax -= 1
elif isinstance(sli, bool):
raise ValueError('tried to index with a bool')
else:
if isinstance(sli, slice):
shapedesc.append('k')
elif isinstance(sli, int):
sli = slice(sli, sli + 1, 1)
shapedesc.append('-')
else:
raise NotImplementedError
start = (0 if sli.start is None else sli.start)
stop = size(arr, ax) if (sli.stop is None) else sli.stop
step = (1 if sli.step is None else sli.step)
if (isinstance(stop, int) and (stop < 0)):
stop = size(arr, ax) - stop
if isinstance(step, int):
assert step != 0
if step < 0:
raise NotImplementedError("negative `step parameter is not implemented. use flip(x,0) instead of x[::-1]")
out = core.Result(core.GetSli(ax), [out, start, stop, step])
ax += 1
if all(((x == 'k') for x in shapedesc)):
return out
else:
axidx = 0
newshape = []
for d in shapedesc:
if (d == '+'):
newshape.append(1)
elif (d == '-'):
axidx += 1
else:
newshape.append(size(out, axidx))
axidx += 1
for axidx in xrange(axidx, out.ndim):
newshape.append(size(out, axidx))
out = reshape(out, newshape)
return out
def irfft(x, axes):
"""
like np.fft.irfft
"""
return core.Result(core.IRFFT(axes),[x])
def make_tuple(*xs):
"""
Create a symbolic tuple variable out of a collection of symbolic variables
"""
return core.Result(core.MakeTuple(), list(xs))
def max(x, axis=None, keepdims=False): #pylint: disable=W0622
"""
Like numpy.max
"""
axes = _red_axes(axis, x.ndim)
out = core.Result(core.Max(axes), [x])
if (not keepdims):
out = _dropdims(out, axes)
return out
def mean(x, axis=None, keepdims=False):
"""
Like numpy.mean
"""
if x.dtype == 'i1': x = cgt.cast(x, cgt.floatX)
axes = _red_axes(axis, x.ndim)
return sum(x, axis=axes, keepdims=keepdims) / mul_multi([size(x, ax) for ax in axes])
def mul_multi(xs):
"""
returns xs[0] * xs[1] * ... * xs[len(xs)-1]
"""
return reduce(operator.mul, xs) if (len(xs) > 0) else constant(np.array(1, dtype='i8'))
def norm(x, axis=None, p=2, keepdims=False):
"""
Computes p-norm of vectors formed by varying `axis`
"""
if p==2:
return sqrt(square(x).sum(axis=axis,keepdims=keepdims))
else:
return pow(pow(x, p).sum(axis=axis,keepdims=keepdims), 1.0 / p)
def ones(shape, dtype=None): #pylint: disable=W0621
"""
Like numpy.ones
"""
if (dtype is None):
dtype = cgt.floatX
return core.Result(core.Fill(np.array(1, dtype)), shape)
def ones_like(x):
"""
Like numpy.ones_like
"""
return ones(shape(x), x.dtype)
def outer(x, y):
"""
Like numpy.outer
"""
assert (x.ndim == y.ndim == 1)
return core.Result(core.Outer(), [x, y])
def _validate_shape(shp,funcname):
if len(shp)==1 and isinstance(shp[0],tuple):
raise ValueError("you called %s(x) where x is a tuple. You should call %s(a,b,c...) instead."%(funcname,funcname))
def rand(*shp):
"""
Like numpy.random.rand
"""
_validate_shape(shp,"rand")
shp = map(core.as_node, shp)
return core.Result(core.ScalarRng('uniform'), shp)
def randn(*shp):
"""
Like numpy.random.randn
"""
_validate_shape(shp,"randn")
shp = map(core.as_node, shp)
return core.Result(core.ScalarRng('gaussian'), shp)
def real(x):
"""
Like numpy.real
"""
assert (core.dtype_kind(x.dtype) == 'c')
return cast(x, cgt.floatX)
def repeat(x, repeats, axis):
"""
Like numpy.repeat
"""
return core.Result(core.Repeat([axis]), [x, core.as_node(repeats)])
def reshape(x, shp):
"""
Like numpy.reshape
"""
shp = map(core.as_node, shp)
assert all(s.ndim==0 and core.dtype_kind(s.dtype)=='i' for s in shp)
return core.Result(core.Reshape(), [x] + list(shp))
def rfft(x, periods, axes):
"""
Like numpy.rfft
"""
return core.Result(core.RFFT(axes),[x]+list(periods))
def set_precision(prec):
"""
prec in {"single", "double"}
globally set floating point precision for float and complex types
"""
assert prec in ("half","single", "double","quad")
if prec == "half":
cgt.floatX = 'f2'
cgt.complexX = None
utils.warn("half precision not yet supported")
elif prec == "single":
cgt.floatX = 'f4'
cgt.complexX = 'c8'
elif prec == "double":
cgt.floatX = 'f8'
cgt.complexX = 'c16'
elif prec == "quad":
cgt.floatX = 'f16'
cgt.complexX = 'c32'
def get_precision():
"""
Returns the current global precision, "half","single","double","quad"
"""
return {"f2":"half","f4":"single","f8":"double","f16":"quad"}[cgt.floatX]
def shape(x):
"""
Like numpy.shape
"""
x = core.as_node(x)
if isinstance(x.typ, core.TensorType):
return [size(x, i) for i in xrange(x.ndim)]
else:
return tuple(map(shape, x.parents))
def shared(val, name=None, device=None, fixed_shape_mask=None):
"""
Creates a variable that has an underlying data value, which can be changed externally
"""
op = core.InMemoryData(val, device=device,fixed_shape_mask=fixed_shape_mask)
return core.Result(op, [], name=name)
def size(x, axis):
"""
size(x, axis) == x.shape[axis]
"""
return core.Result(core.Size(axis), [x])
def stack(scalars):
"""
scalars : a list of scalar variables
stack([a,b,c]) builds a vector with a,b,c as its elements
"""
assert (len(scalars) > 0) and all(s.ndim == 0 for s in scalars)
return core.Result(core.Stack(), scalars)
def sub2ind(subs, shp):
"""
Like matlab sub2ind
"""
ndim = len(shp)
assert ndim >= 1
strides = [None]*(ndim-1) + [1]
for i in xrange(ndim-2, -1, -1):
strides[i] = shp[i+1] * strides[i+1]
return add_multi([stride*sub for (stride,sub) in utils.safezip(strides, subs)])
def prod(x, axis=None, keepdims=False):
"""
Like numpy.prod
"""
return cgt.exp(cgt.sum(cgt.log(x), axis=axis, keepdims=keepdims))
def sum(x, axis=None, keepdims=False): #pylint: disable=W0622
"""
Like numpy.sum
"""
if x.dtype == 'i1':
utils.warn("Called sum() on a one-byte integer, so you risk overflow. Might want to cast to float.")
axes = _red_axes(axis, x.ndim)
if (len(axes) == 0):
return x
out = core.Result(core.Sum(axes), [x])
if (not keepdims):
out = _dropdims(out, axes)
return out
# def transport(x):
# return core.Result(core.Transport(), [x])
def transpose(arr, axes=None):
"""
Like numpy.transpose
"""
if axes is None:
assert arr.ndim == 2
axes = [1,0]
else:
assert _is_list_or_tuple(axes) and len(axes) == arr.ndim
axes = list(axes)
if axes == range(arr.ndim):
return arr
else:
return core.Result(core.Transpose(axes), [arr])
def tuple_index(x, i):
"""
If x is a symbolic variable with isinstance(x.typ, TupleType), return x[i]
"""
return core.Result(core.TupleIndex(i), [x])
def zeros(shape, dtype=None): #pylint: disable=W0621
"""
Like numpy.zeros
"""
if (dtype is None):
dtype = cgt.floatX
return core.Result(core.Fill(np.array(0, dtype)), shape)
def zeros_like(x):
"""
Like numpy.zeros_like
"""
return zeros(shape(x), x.dtype)
def _dropdims(x, axes):
return reshape(x, [size(x, i) for i in xrange(x.ndim) if (i not in axes)])
def _is_list_or_tuple(xs):
return isinstance(xs, (list, tuple))
def _is_subset_of(maybesub, bigset):
return (len(set(bigset).difference(set(maybesub))) == 0)
def _is_unique(col):
return (len(set(col)) == len(col))
def _red_axes(axis, ndim):
if (axis is None):
return range(ndim)
elif isinstance(axis, int):
return [axis]
elif isinstance(axis, (list, tuple)):
return list(axis)
else:
raise ValueError("invalid argument 'axis'=%s" % axis)
|
mit
|
MarllonSoares/PYTHON-ESTUDO
|
tipos-de-variaveis/02-atribuicao-multipla/atribuicao-multipla.py
|
1
|
1038
|
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 18 01:47:28 2017
@author: Marllon Soares
@site: www.marllonsoares.com.br
@assunto: Atribuição múltipla
@linguagem: Python3
@doc:
O Python permite que você atribua um único valor a
várias variáveis simultaneamente. Por exemplo -
a = b = c = 1
Aqui, um objeto inteiro é criado com o valor 1,
e todas as três variáveis são atribuídas ao mesmo
local de memória. Você também pode atribuir vários
objetos a várias variáveis. Por exemplo -
a,b,c = 1,2,"john"
Aqui, dois objetos inteiros com valores 1 e 2 são atribuídos
às variáveis a e b, respectivamente, e um objeto string
com o valor "john" é atribuído à variável c.
"""
# Atribuindo um único valor a várias variáveis simultaneamente
a = b = c = 1
print("--- Exemplo 1 ---")
print(a)
print(b)
print(c)
# Atribuindo vários valores a várias variáveis
a, b, c = 1, 2, "Atribuição múltipla"
print("--- Exemplo 2 ---")
print(a)
print(b)
print(c)
|
mit
|
s0enke/boto
|
tests/integration/ec2/autoscale/test_cert_verification.py
|
126
|
1575
|
# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Check that all of the certs on all service endpoints validate.
"""
import unittest
from tests.integration import ServiceCertVerificationTest
import boto.ec2.autoscale
class AutoscaleCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
autoscale = True
regions = boto.ec2.autoscale.regions()
def sample_service_call(self, conn):
conn.get_all_groups()
|
mit
|
RadioFreeAsia/RDacity
|
lib-src/lv2/serd/waflib/Tools/ldc2.py
|
330
|
1029
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import sys
from waflib.Tools import ar,d
from waflib.Configure import conf
@conf
def find_ldc2(conf):
conf.find_program(['ldc2'],var='D')
out=conf.cmd_and_log([conf.env.D,'-version'])
if out.find("based on DMD v2.")==-1:
conf.fatal("detected compiler is not ldc2")
@conf
def common_flags_ldc2(conf):
v=conf.env
v['D_SRC_F']=['-c']
v['D_TGT_F']='-of%s'
v['D_LINKER']=v['D']
v['DLNK_SRC_F']=''
v['DLNK_TGT_F']='-of%s'
v['DINC_ST']='-I%s'
v['DSHLIB_MARKER']=v['DSTLIB_MARKER']=''
v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s'
v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s'
v['LINKFLAGS_dshlib']=['-L-shared']
v['DHEADER_ext']='.di'
v['DFLAGS_d_with_header']=['-H','-Hf']
v['D_HDR_F']='%s'
v['LINKFLAGS']=[]
v['DFLAGS_dshlib']=['-relocation-model=pic']
def configure(conf):
conf.find_ldc2()
conf.load('ar')
conf.load('d')
conf.common_flags_ldc2()
conf.d_platform_flags()
|
gpl-2.0
|
Kazade/NeHe-Website
|
google_appengine/lib/django-1.5/tests/regressiontests/utils/functional.py
|
93
|
1084
|
from django.utils import unittest
from django.utils.functional import lazy, lazy_property
class FunctionalTestCase(unittest.TestCase):
def test_lazy(self):
t = lazy(lambda: tuple(range(3)), list, tuple)
for a, b in zip(t(), range(3)):
self.assertEqual(a, b)
def test_lazy_base_class(self):
"""Test that lazy also finds base class methods in the proxy object"""
class Base(object):
def base_method(self):
pass
class Klazz(Base):
pass
t = lazy(lambda: Klazz(), Klazz)()
self.assertTrue('base_method' in dir(t))
def test_lazy_property(self):
class A(object):
def _get_do(self):
raise NotImplementedError
def _set_do(self, value):
raise NotImplementedError
do = lazy_property(_get_do, _set_do)
class B(A):
def _get_do(self):
return "DO IT"
self.assertRaises(NotImplementedError, lambda: A().do)
self.assertEqual(B().do, 'DO IT')
|
bsd-3-clause
|
40223125/40223125-2
|
static/Brython3.1.1-20150328-091302/Lib/_thread.py
|
740
|
4879
|
"""Drop-in replacement for the thread module.
Meant to be used as a brain-dead substitute so that threaded code does
not need to be rewritten for when the thread module is not present.
Suggested usage is::
try:
import _thread
except ImportError:
import _dummy_thread as _thread
"""
# Exports only things specified by thread documentation;
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
'interrupt_main', 'LockType']
# A dummy value
TIMEOUT_MAX = 2**31
# NOTE: this module can be imported early in the extension building process,
# and so top level imports of other modules should be avoided. Instead, all
# imports are done when needed on a function-by-function basis. Since threads
# are disabled, the import lock should not be an issue anyway (??).
error = RuntimeError
def start_new_thread(function, args, kwargs={}):
"""Dummy implementation of _thread.start_new_thread().
Compatibility is maintained by making sure that ``args`` is a
tuple and ``kwargs`` is a dictionary. If an exception is raised
and it is SystemExit (which can be done by _thread.exit()) it is
caught and nothing is done; all other exceptions are printed out
by using traceback.print_exc().
If the executed function calls interrupt_main the KeyboardInterrupt will be
raised when the function returns.
"""
if type(args) != type(tuple()):
raise TypeError("2nd arg must be a tuple")
if type(kwargs) != type(dict()):
raise TypeError("3rd arg must be a dict")
global _main
_main = False
try:
function(*args, **kwargs)
except SystemExit:
pass
except:
import traceback
traceback.print_exc()
_main = True
global _interrupt
if _interrupt:
_interrupt = False
raise KeyboardInterrupt
def exit():
"""Dummy implementation of _thread.exit()."""
raise SystemExit
def get_ident():
"""Dummy implementation of _thread.get_ident().
Since this module should only be used when _threadmodule is not
available, it is safe to assume that the current process is the
only thread. Thus a constant can be safely returned.
"""
return -1
def allocate_lock():
"""Dummy implementation of _thread.allocate_lock()."""
return LockType()
def stack_size(size=None):
"""Dummy implementation of _thread.stack_size()."""
if size is not None:
raise error("setting thread stack size not supported")
return 0
class LockType(object):
"""Class implementing dummy implementation of _thread.LockType.
Compatibility is maintained by maintaining self.locked_status
which is a boolean that stores the state of the lock. Pickling of
the lock, though, should not be done since if the _thread module is
then used with an unpickled ``lock()`` from here problems could
occur from this class not having atomic methods.
"""
def __init__(self):
self.locked_status = False
def acquire(self, waitflag=None, timeout=-1):
"""Dummy implementation of acquire().
For blocking calls, self.locked_status is automatically set to
True and returned appropriately based on value of
``waitflag``. If it is non-blocking, then the value is
actually checked and not set if it is already acquired. This
is all done so that threading.Condition's assert statements
aren't triggered and throw a little fit.
"""
if waitflag is None or waitflag:
self.locked_status = True
return True
else:
if not self.locked_status:
self.locked_status = True
return True
else:
if timeout > 0:
import time
time.sleep(timeout)
return False
__enter__ = acquire
def __exit__(self, typ, val, tb):
self.release()
def release(self):
"""Release the dummy lock."""
# XXX Perhaps shouldn't actually bother to test? Could lead
# to problems for complex, threaded code.
if not self.locked_status:
raise error
self.locked_status = False
return True
def locked(self):
return self.locked_status
# Used to signal that interrupt_main was called in a "thread"
_interrupt = False
# True when not executing in a "thread"
_main = True
def interrupt_main():
"""Set _interrupt flag to True to have start_new_thread raise
KeyboardInterrupt upon exiting."""
if _main:
raise KeyboardInterrupt
else:
global _interrupt
_interrupt = True
# Brython-specific to avoid circular references between threading and _threading_local
class _local:
pass
|
gpl-3.0
|
glenn-edgar/local_scda
|
flask_web/werkzeug-master/werkzeug/contrib/cache.py
|
72
|
23068
|
# -*- coding: utf-8 -*-
"""
werkzeug.contrib.cache
~~~~~~~~~~~~~~~~~~~~~~
The main problem with dynamic Web sites is, well, they're dynamic. Each
time a user requests a page, the webserver executes a lot of code, queries
the database, renders templates until the visitor gets the page he sees.
This is a lot more expensive than just loading a file from the file system
and sending it to the visitor.
For most Web applications, this overhead isn't a big deal but once it
becomes, you will be glad to have a cache system in place.
How Caching Works
=================
Caching is pretty simple. Basically you have a cache object lurking around
somewhere that is connected to a remote cache or the file system or
something else. When the request comes in you check if the current page
is already in the cache and if so, you're returning it from the cache.
Otherwise you generate the page and put it into the cache. (Or a fragment
of the page, you don't have to cache the full thing)
Here is a simple example of how to cache a sidebar for a template::
def get_sidebar(user):
identifier = 'sidebar_for/user%d' % user.id
value = cache.get(identifier)
if value is not None:
return value
value = generate_sidebar_for(user=user)
cache.set(identifier, value, timeout=60 * 5)
return value
Creating a Cache Object
=======================
To create a cache object you just import the cache system of your choice
from the cache module and instantiate it. Then you can start working
with that object:
>>> from werkzeug.contrib.cache import SimpleCache
>>> c = SimpleCache()
>>> c.set("foo", "value")
>>> c.get("foo")
'value'
>>> c.get("missing") is None
True
Please keep in mind that you have to create the cache and put it somewhere
you have access to it (either as a module global you can import or you just
put it into your WSGI application).
:copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import re
import tempfile
try:
from hashlib import md5
except ImportError:
from md5 import new as md5
from itertools import izip
from time import time
from werkzeug.posixemulation import rename
try:
import cPickle as pickle
except ImportError:
import pickle
def _items(mappingorseq):
"""Wrapper for efficient iteration over mappings represented by dicts
or sequences::
>>> for k, v in _items((i, i*i) for i in xrange(5)):
... assert k*k == v
>>> for k, v in _items(dict((i, i*i) for i in xrange(5))):
... assert k*k == v
"""
return mappingorseq.iteritems() if hasattr(mappingorseq, 'iteritems') \
else mappingorseq
class BaseCache(object):
"""Baseclass for the cache systems. All the cache systems implement this
API or a superset of it.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`set`.
"""
def __init__(self, default_timeout=300):
self.default_timeout = default_timeout
def get(self, key):
"""Looks up key in the cache and returns the value for it.
If the key does not exist `None` is returned instead.
:param key: the key to be looked up.
"""
return None
def delete(self, key):
"""Deletes `key` from the cache. If it does not exist in the cache
nothing happens.
:param key: the key to delete.
"""
pass
def get_many(self, *keys):
"""Returns a list of values for the given keys.
For each key a item in the list is created. Example::
foo, bar = cache.get_many("foo", "bar")
If a key can't be looked up `None` is returned for that key
instead.
:param keys: The function accepts multiple keys as positional
arguments.
"""
return map(self.get, keys)
def get_dict(self, *keys):
"""Works like :meth:`get_many` but returns a dict::
d = cache.get_dict("foo", "bar")
foo = d["foo"]
bar = d["bar"]
:param keys: The function accepts multiple keys as positional
arguments.
"""
return dict(izip(keys, self.get_many(*keys)))
def set(self, key, value, timeout=None):
"""Adds a new key/value to the cache (overwrites value, if key already
exists in the cache).
:param key: the key to set
:param value: the value for the key
:param timeout: the cache timeout for the key (if not specified,
it uses the default timeout).
"""
pass
def add(self, key, value, timeout=None):
"""Works like :meth:`set` but does not overwrite the values of already
existing keys.
:param key: the key to set
:param value: the value for the key
:param timeout: the cache timeout for the key or the default
timeout if not specified.
"""
pass
def set_many(self, mapping, timeout=None):
"""Sets multiple keys and values from a mapping.
:param mapping: a mapping with the keys/values to set.
:param timeout: the cache timeout for the key (if not specified,
it uses the default timeout).
"""
for key, value in _items(mapping):
self.set(key, value, timeout)
def delete_many(self, *keys):
"""Deletes multiple keys at once.
:param keys: The function accepts multiple keys as positional
arguments.
"""
for key in keys:
self.delete(key)
def clear(self):
"""Clears the cache. Keep in mind that not all caches support
completely clearing the cache.
"""
pass
def inc(self, key, delta=1):
"""Increments the value of a key by `delta`. If the key does
not yet exist it is initialized with `delta`.
For supporting caches this is an atomic operation.
:param key: the key to increment.
:param delta: the delta to add.
"""
self.set(key, (self.get(key) or 0) + delta)
def dec(self, key, delta=1):
"""Decrements the value of a key by `delta`. If the key does
not yet exist it is initialized with `-delta`.
For supporting caches this is an atomic operation.
:param key: the key to increment.
:param delta: the delta to subtract.
"""
self.set(key, (self.get(key) or 0) - delta)
class NullCache(BaseCache):
"""A cache that doesn't cache. This can be useful for unit testing.
:param default_timeout: a dummy parameter that is ignored but exists
for API compatibility with other caches.
"""
class SimpleCache(BaseCache):
"""Simple memory cache for single process environments. This class exists
mainly for the development server and is not 100% thread safe. It tries
to use as many atomic operations as possible and no locks for simplicity
but it could happen under heavy load that keys are added multiple times.
:param threshold: the maximum number of items the cache stores before
it starts deleting some.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`~BaseCache.set`.
"""
def __init__(self, threshold=500, default_timeout=300):
BaseCache.__init__(self, default_timeout)
self._cache = {}
self.clear = self._cache.clear
self._threshold = threshold
def _prune(self):
if len(self._cache) > self._threshold:
now = time()
for idx, (key, (expires, _)) in enumerate(self._cache.items()):
if expires <= now or idx % 3 == 0:
self._cache.pop(key, None)
def get(self, key):
now = time()
expires, value = self._cache.get(key, (0, None))
if expires > time():
return pickle.loads(value)
def set(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
self._prune()
self._cache[key] = (time() + timeout, pickle.dumps(value,
pickle.HIGHEST_PROTOCOL))
def add(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
if len(self._cache) > self._threshold:
self._prune()
item = (time() + timeout, pickle.dumps(value,
pickle.HIGHEST_PROTOCOL))
self._cache.setdefault(key, item)
def delete(self, key):
self._cache.pop(key, None)
_test_memcached_key = re.compile(r'[^\x00-\x21\xff]{1,250}$').match
class MemcachedCache(BaseCache):
"""A cache that uses memcached as backend.
The first argument can either be an object that resembles the API of a
:class:`memcache.Client` or a tuple/list of server addresses. In the
event that a tuple/list is passed, Werkzeug tries to import the best
available memcache library.
Implementation notes: This cache backend works around some limitations in
memcached to simplify the interface. For example unicode keys are encoded
to utf-8 on the fly. Methods such as :meth:`~BaseCache.get_dict` return
the keys in the same format as passed. Furthermore all get methods
silently ignore key errors to not cause problems when untrusted user data
is passed to the get methods which is often the case in web applications.
:param servers: a list or tuple of server addresses or alternatively
a :class:`memcache.Client` or a compatible client.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`~BaseCache.set`.
:param key_prefix: a prefix that is added before all keys. This makes it
possible to use the same memcached server for different
applications. Keep in mind that
:meth:`~BaseCache.clear` will also clear keys with a
different prefix.
"""
def __init__(self, servers=None, default_timeout=300, key_prefix=None):
BaseCache.__init__(self, default_timeout)
if servers is None or isinstance(servers, (list, tuple)):
if servers is None:
servers = ['127.0.0.1:11211']
self._client = self.import_preferred_memcache_lib(servers)
if self._client is None:
raise RuntimeError('no memcache module found')
else:
# NOTE: servers is actually an already initialized memcache
# client.
self._client = servers
self.key_prefix = key_prefix
def get(self, key):
if isinstance(key, unicode):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
# memcached doesn't support keys longer than that. Because often
# checks for so long keys can occour because it's tested from user
# submitted data etc we fail silently for getting.
if _test_memcached_key(key):
return self._client.get(key)
def get_dict(self, *keys):
key_mapping = {}
have_encoded_keys = False
for key in keys:
if isinstance(key, unicode):
encoded_key = key.encode('utf-8')
have_encoded_keys = True
else:
encoded_key = key
if self.key_prefix:
encoded_key = self.key_prefix + encoded_key
if _test_memcached_key(key):
key_mapping[encoded_key] = key
d = rv = self._client.get_multi(key_mapping.keys())
if have_encoded_keys or self.key_prefix:
rv = {}
for key, value in d.iteritems():
rv[key_mapping[key]] = value
if len(rv) < len(keys):
for key in keys:
if key not in rv:
rv[key] = None
return rv
def add(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
if isinstance(key, unicode):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
self._client.add(key, value, timeout)
def set(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
if isinstance(key, unicode):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
self._client.set(key, value, timeout)
def get_many(self, *keys):
d = self.get_dict(*keys)
return [d[key] for key in keys]
def set_many(self, mapping, timeout=None):
if timeout is None:
timeout = self.default_timeout
new_mapping = {}
for key, value in _items(mapping):
if isinstance(key, unicode):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
new_mapping[key] = value
self._client.set_multi(new_mapping, timeout)
def delete(self, key):
if isinstance(key, unicode):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
if _test_memcached_key(key):
self._client.delete(key)
def delete_many(self, *keys):
new_keys = []
for key in keys:
if isinstance(key, unicode):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
if _test_memcached_key(key):
new_keys.append(key)
self._client.delete_multi(new_keys)
def clear(self):
self._client.flush_all()
def inc(self, key, delta=1):
if isinstance(key, unicode):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
self._client.incr(key, delta)
def dec(self, key, delta=1):
if isinstance(key, unicode):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
self._client.decr(key, delta)
def import_preferred_memcache_lib(self, servers):
"""Returns an initialized memcache client. Used by the constructor."""
try:
import pylibmc
except ImportError:
pass
else:
return pylibmc.Client(servers)
try:
from google.appengine.api import memcache
except ImportError:
pass
else:
return memcache.Client()
try:
import memcache
except ImportError:
pass
else:
return memcache.Client(servers)
# backwards compatibility
GAEMemcachedCache = MemcachedCache
class RedisCache(BaseCache):
"""Uses the Redis key-value store as a cache backend.
The first argument can be either a string denoting address of the Redis
server or an object resembling an instance of a redis.Redis class.
Note: Python Redis API already takes care of encoding unicode strings on
the fly.
.. versionadded:: 0.7
.. versionadded:: 0.8
`key_prefix` was added.
.. versionchanged:: 0.8
This cache backend now properly serializes objects.
:param host: address of the Redis server or an object which API is
compatible with the official Python Redis client (redis-py).
:param port: port number on which Redis server listens for connections
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`~BaseCache.set`.
:param key_prefix: A prefix that should be added to all keys.
"""
def __init__(self, host='localhost', port=6379, password=None,
default_timeout=300, key_prefix=None):
BaseCache.__init__(self, default_timeout)
if isinstance(host, basestring):
try:
import redis
except ImportError:
raise RuntimeError('no redis module found')
self._client = redis.Redis(host=host, port=port, password=password)
else:
self._client = host
self.key_prefix = key_prefix or ''
def dump_object(self, value):
"""Dumps an object into a string for redis. By default it serializes
integers as regular string and pickle dumps everything else.
"""
t = type(value)
if t is int or t is long:
return str(value)
return '!' + pickle.dumps(value)
def load_object(self, value):
"""The reversal of :meth:`dump_object`. This might be callde with
None.
"""
if value is None:
return None
if value.startswith('!'):
return pickle.loads(value[1:])
try:
return int(value)
except ValueError:
# before 0.8 we did not have serialization. Still support that.
return value
def get(self, key):
return self.load_object(self._client.get(self.key_prefix + key))
def get_many(self, *keys):
if self.key_prefix:
keys = [self.key_prefix + key for key in keys]
return [self.load_object(x) for x in self._client.mget(keys)]
def set(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
dump = self.dump_object(value)
self._client.setex(self.key_prefix + key, dump, timeout)
def add(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
dump = self.dump_object(value)
added = self._client.setnx(self.key_prefix + key, dump)
if added:
self._client.expire(self.key_prefix + key, timeout)
def set_many(self, mapping, timeout=None):
if timeout is None:
timeout = self.default_timeout
pipe = self._client.pipeline()
for key, value in _items(mapping):
dump = self.dump_object(value)
pipe.setex(self.key_prefix + key, dump, timeout)
pipe.execute()
def delete(self, key):
self._client.delete(self.key_prefix + key)
def delete_many(self, *keys):
if not keys:
return
if self.key_prefix:
keys = [self.key_prefix + key for key in keys]
self._client.delete(*keys)
def clear(self):
if self.key_prefix:
keys = self._client.keys(self.key_prefix + '*')
if keys:
self._client.delete(*keys)
else:
self._client.flushdb()
def inc(self, key, delta=1):
return self._client.incr(self.key_prefix + key, delta)
def dec(self, key, delta=1):
return self._client.decr(self.key_prefix + key, delta)
class FileSystemCache(BaseCache):
"""A cache that stores the items on the file system. This cache depends
on being the only user of the `cache_dir`. Make absolutely sure that
nobody but this cache stores files there or otherwise the cache will
randomly delete files therein.
:param cache_dir: the directory where cache files are stored.
:param threshold: the maximum number of items the cache stores before
it starts deleting some.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`~BaseCache.set`.
:param mode: the file mode wanted for the cache files, default 0600
"""
#: used for temporary files by the FileSystemCache
_fs_transaction_suffix = '.__wz_cache'
def __init__(self, cache_dir, threshold=500, default_timeout=300, mode=0600):
BaseCache.__init__(self, default_timeout)
self._path = cache_dir
self._threshold = threshold
self._mode = mode
if not os.path.exists(self._path):
os.makedirs(self._path)
def _list_dir(self):
"""return a list of (fully qualified) cache filenames
"""
return [os.path.join(self._path, fn) for fn in os.listdir(self._path)
if not fn.endswith(self._fs_transaction_suffix)]
def _prune(self):
entries = self._list_dir()
if len(entries) > self._threshold:
now = time()
for idx, fname in enumerate(entries):
remove = False
f = None
try:
try:
f = open(fname, 'rb')
expires = pickle.load(f)
remove = expires <= now or idx % 3 == 0
finally:
if f is not None:
f.close()
except Exception:
pass
if remove:
try:
os.remove(fname)
except (IOError, OSError):
pass
def clear(self):
for fname in self._list_dir():
try:
os.remove(fname)
except (IOError, OSError):
pass
def _get_filename(self, key):
hash = md5(key).hexdigest()
return os.path.join(self._path, hash)
def get(self, key):
filename = self._get_filename(key)
try:
f = open(filename, 'rb')
try:
if pickle.load(f) >= time():
return pickle.load(f)
finally:
f.close()
os.remove(filename)
except Exception:
return None
def add(self, key, value, timeout=None):
filename = self._get_filename(key)
if not os.path.exists(filename):
self.set(key, value, timeout)
def set(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
filename = self._get_filename(key)
self._prune()
try:
fd, tmp = tempfile.mkstemp(suffix=self._fs_transaction_suffix,
dir=self._path)
f = os.fdopen(fd, 'wb')
try:
pickle.dump(int(time() + timeout), f, 1)
pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
finally:
f.close()
rename(tmp, filename)
os.chmod(filename, self._mode)
except (IOError, OSError):
pass
def delete(self, key):
try:
os.remove(self._get_filename(key))
except (IOError, OSError):
pass
|
mit
|
bmravec/DownMan
|
downman/downloaders/hosters/rapidshare.py
|
1
|
7541
|
#
# rapidshare.py
#
# Copyright 2010 Brett Mravec <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
from threading import Thread
import re, os, os.path
import pycurl
from downman.downloaders.tempfile import TempFile
from downman.downloaders.writefile import WriteFile
from downman.downloaders.timeout import Timeout
from generichost import *
RAPIDSHARE_MATCH = 'http:\/\/rapidshare\.com'
class Rapidshare (GenericHost):
def __init__ (self, url, downman):
GenericHost.__init__ (self, url, downman)
self.case_handlers = [
('<form id="ff" action="([^"]*)" method="post">', self.handle_start_page),
('You have reached the download limit for free\-users', self.handle_download_limit),
('Your IP address (\w+\.\w+\.\w+\.\w+) is already downloading a file', self.handle_already_downloading),
('we have no more download slots', self.handle_no_slots),
('Please try again in (\w+) minutes', self.handle_no_slots_count),
('var c=(\d+)', self.handle_download),
]
def handle_start_page (self, match):
self.aurl = match.group (1)
m = re.search ('<p class="downloadlink">[^<]*?<font [^>]*?>\| (\d+) (KB|MB|GB)</font></p>', self.tfile.contents)
self.total = int (m.group (1))
if m.group (2) == 'KB':
self.total = self.total * 1000
if m.group (2) == 'MB':
self.total = self.total * 1000000
if m.group (2) == 'GB':
self.total = self.total * 1000000000
self.name = re.search ('([^\/]*)$', self.url).group (1)
if self.state != STATE_INFO:
self.tfile = TempFile (self.aurl, "dl.start=Free", self.url)
self.tfile.completed_cb = self.stage_download_completed
self.tfile.start ()
else:
self.set_state (STATE_INFO_COMPLETED)
def handle_download_limit (self, match):
m2 = re.search ('Or try again in about (\d+) minutes', self.tfile.contents)
self.status = 'Download limit, try again in %s minutes' % (m2.group (1))
self.set_state (STATE_HOLDING)
def handle_already_downloading (self, match):
self.status = 'You are already downloading %s' % (match.group (1))
self.set_state (STATE_HOLDING)
def handle_no_slots (self, match):
self.status = 'No download slots available'
self.set_state (STATE_HOLDING)
def handle_no_slots_count (self, match):
self.status = 'No download slots available, try again in %s minutes' % (match.group (1))
self.set_state (STATE_HOLDING)
def handle_download (self, match):
m = re.search ('var c=(\d+)', self.tfile.contents)
m2 = re.search ('<input checked type="radio" name="mirror" onclick="document\.dlf\.action=\\\\\'(.*?)\\\\\';" \/> (.*?)<br \/>', self.tfile.contents)
# print 'Mirror %s: %s' % (m2.group (2), m2.group (1))
self.furl = m2.group (1)
num = int (m.group (1))
self.timeout = Timeout (num, self.handle_start_download, self.print_progress)
self.set_state (STATE_WAITING)
def handle_start_download (self):
dpath = self.downman.config.get_property ('DefaultDownloadDirectory')
self.location = os.path.join (dpath, re.search ('([^\/]*)$', self.furl).group (1))
"""
resume = None
if os.path.exists (self.location):
s = os.stat (self.location)
if s.st_size == self.total:
self.set_state (STATE_COMPLETED)
return
if s.st_size == self.downloaded:
self.furl = self.furl + '?start=' + str (int (self.downloaded))
resume = self.downloaded
"""
self.tfile = RapidshareWriteFile (self.furl, self.location, self.url)
self.tfile.completed_cb = self.download_completed
self.tfile.progress_cb = self.download_progress
self.tfile.start ()
self.status = 'Downloading...'
self.set_state (STATE_DOWNLOADING)
def startup (self, data):
self.name = data['name']
self.url = data['url']
self.downloaded = float (data['downloaded'])
self.total = float (data['total'])
self.state = int (data['state'])
if data.has_key ('location'):
self.location = data['location']
def shutdown (self):
data = {}
if self.state == STATE_CONNECTING:
self.tfile.close ()
self.state = STATE_QUEUED
elif self.state == STATE_DOWNLOADING:
self.tfile.close ()
self.state = STATE_QUEUED
elif self.state == STATE_WAITING:
self.timeout.cancel ()
self.state = STATE_QUEUED
elif self.state == STATE_INFO or self.state == STATE_INFO_COMPLETED:
self.tfile.close ()
return
data['name'] = self.name
data['url'] = self.url
data['downloaded'] = str (self.downloaded)
data['total'] = str (self.total)
data['state'] = str (self.state)
data['match'] = RAPIDSHARE_MATCH
if hasattr (self, 'location'):
data['location'] = self.location
return data
class RapidshareWriteFile (Thread):
completed_cb = None
progress_cb = None
drun = False
def __init__ (self, url, filename, referer=None, resume=None):
Thread.__init__ (self)
self.url = url
self.filename = filename
self.referer = referer
self.resume = resume
def run (self):
self.c = pycurl.Curl ()
self.c.setopt (pycurl.URL, self.url)
self.c.setopt (pycurl.VERBOSE, 1)
if self.resume != None:
f = open (self.filename, 'ab')
else:
f = open (self.filename, 'wb')
self.c.setopt (pycurl.WRITEFUNCTION, f.write)
if self.referer != None:
self.c.setopt (pycurl.REFERER, self.referer)
self.c.setopt (pycurl.NOPROGRESS, 0)
self.c.setopt (pycurl.PROGRESSFUNCTION, self.download_progress)
self.drun = True
try:
try:
self.c.perform ()
if self.completed_cb != None:
self.completed_cb (self)
except:
pass
finally:
self.c.close ()
f.close ()
def download_progress (self, dt, dd, ut, ud):
if self.resume:
dd += self.resume
if self.progress_cb:
self.progress_cb (dt, dd, ut, ud)
if not self.drun:
return 1
def close (self):
self.drun = False
self.join ()
self.c.close ()
from downman.downloaders.hosters import factory
factory.add_hoster (Rapidshare, RAPIDSHARE_MATCH)
|
gpl-2.0
|
rockyzhang/zhangyanhit-python-for-android-mips
|
sl4atools/fullscreenwrapper2/fullscreenwrapper2.py
|
44
|
25662
|
'''
@copyright: Hariharan Srinath, 2012
@license: This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/
'''
import abc
#import android
import cPickle
import json
import sys
import time
import os
import hashlib
class BaseDict(dict):
'''
implements a dictionary that can be accessed by BaseDict[key] as well as by BaseDict.key to allow more pythonic access
credits: BaseDict pattern at http://code.activestate.com/recipes/473790/ under PSF license
'''
def __init__(self, data=None):
if data:
dict.__init__(self, data)
else:
dict.__init__(self)
def __setattr__(self, name, val):
if name in self.__dict__:
self.__dict__[name]= val
else:
self[name] = val
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
else:
return self[name]
def setDict(self, name, val):
'''
setDict(name, val): Assign *val* to the key *name* of __dict__.
>>> bd.setDict('height', 160)
{}
>>> bd.getDict()['height']
160
'''
self.__dict__[name] = val
return self
def getDict(self):
'''
Return the internal __dict__.
>>> bd.setDict('height', 160)
{}
>>> bd.getDict()['height']
160
'''
return self.__dict__
def setItem(self, name, val):
'''
Set the value of dict key *name* to *val*. Note this dict
is not the __dict__.
'''
self[name] = val
return self
def __getstate__(self):
''' Needed for cPickle in .copy() '''
return self.__dict__.copy()
def __setstate__(self,dict):
''' Needed for cPickle in .copy() '''
self.__dict__.update(dict)
def copy(self):
'''
Return a copy.
'''
return cPickle.loads(cPickle.dumps(self))
class EventHandler(object):
'''
Defines an SL4A event handler and provides a matching function to compare vs. Android.eventPoll().result
SL4A eventdata returned by Android.eventWait() or Android.eventPoll().result in general take the form of a dict:
{"data":{"attribute1":value,"attribute2":value}, "name":"event_name", "time":eventtime}
The EventHandler object consists of an event_name, a compare_attribute to look for within the "data" dict & a
compare_value which the compare_attribute will get matched against. It also has optionally an event_handler_fn
which stores a reference to the method to be called and the reference to the view referred to by the event.
fullscreenwrapper2 module pre-defines click_EventHandler, itemclick_EventHandler and key_EventHandler which are
commonly used with Layout views for your convenience
When the FullScreenWrapper2App class which handles events finds a match, it will call the function defined in the
EventHandler passing the view & a copy of the eventdata. The event handler method signature should therefore be:
def event_handler_function(self, view, eventdata):
'''
def __init__(self,event_name, compare_attribute,compare_value,view = None, handler_function=None):
'''
creates an SL4A event handler
SL4A eventdata returned by Android.eventWait() or Android.eventPoll().result in general take the form of a dict:
{"data":{"attribute1":value,"attribute2":value}, "name":"event_name", "time":eventtime}
The EventHandler object consists of an event_name, a compare_attribute to look for within the "data" dict & a
compare_value which the compare_attribute will get matched against. It also has optionally an event_handler_fn
which stores a reference to the method to be called and the reference to the view referred to by the event.
The compare_attribute can be None. if this is the case, then the event_name alone is matched. You can use this feature
to catch other SL4A API events like sensor events
'''
self.view = view
self.event_name = event_name
self.compare_attribute = compare_attribute
self.compare_value = compare_value
self.event_handler_fn = handler_function
def match_event_data(self, event_data):
'''
Provides a matching function to compare event handler vs. data returned by Android.eventPoll().result or Android.eventWait()
SL4A eventdata returned by Android.eventWait() or Android.eventPoll().result in general take the form of a dict:
{"data":{"attribute1":value,"attribute2":value}, "name":"event_name", "time":eventtime}
The function first matches event_data[event_name] and then tries to match event_data["data"][compare_attribute] to compare_value
returns True on match, False on no-match or event not found
The compare_attribute can be None. if this is the case, then the event_name alone is matched. You can use this feature
to catch other SL4A API events like sensor events
'''
try:
if event_data["name"]==self.event_name:
if self.compare_attribute != None:
if event_data["data"][self.compare_attribute]==self.compare_value:
return True
else:
return True
except:
return False
else:
return False
def __str__(self):
'''
convenience function for debugging
'''
return str(self.event_name)+":"+str(self.compare_attribute)+"="+str(self.compare_value)
class click_EventHandler(EventHandler):
'''
predefined click event handler for use with Views
This is the event handler to typically associate with TextView, Button, ImageView etc. You only need to pass the view to
link the click event to & the handler function & rest of event handler initialization is handled automatically
'''
EVENT_NAME = "click"
COMPARE_ATTRIBUTE = "id"
def __init__(self,view, handler_function=None):
'''
predefined click event handler for use with Views
This is the event handler to typically associate with TextView, Button, ImageView etc. You only need to pass the view to
link the click event to & the handler function & rest of event handler initialization is handled automatically
'''
super(click_EventHandler,self).__init__(self.EVENT_NAME,self.COMPARE_ATTRIBUTE,view.view_id,view,handler_function)
class itemclick_EventHandler(EventHandler):
'''
predefined itemclick event handler for use with Views
This is the event handler to typically associate with ListView. You only need to pass the ListView to link the itemclick event
to & the handler function & rest of event handler initialization is handled automatically
'''
EVENT_NAME = "itemclick"
COMPARE_ATTRIBUTE = "id"
def __init__(self,view,handler_function=None):
super(itemclick_EventHandler,self).__init__(self.EVENT_NAME,self.COMPARE_ATTRIBUTE,view.view_id,view,handler_function)
class key_EventHandler(EventHandler):
'''
predefined key event handler for use with Layout. defaults to Back Key with key_id = "4"
This is the event handler to typically associate with a layout. You need to pass key_id to associate with (defaults to
back key = "4") & the handler function & rest of event handler initialization is handled automatically
'''
EVENT_NAME = "key"
COMPARE_ATTRIBUTE = "key"
def __init__(self,key_match_id="4",view=None,handler_function=None):
super(key_EventHandler,self).__init__(self.EVENT_NAME,self.COMPARE_ATTRIBUTE,key_match_id,view,handler_function)
class _internal_exit_signal():
'''
Internal to fullscreenwrapper2 - do not use in your programs. Used by FullScreenWrapper2App to signal the eventloop to stop.
'''
EVENT_NAME = "fullscreenwrapper2_internal_exit_signal"
COMPARE_ATTRIBUTE = "id"
'''
the event handler in exit signal checks to ensure it is receiving exit signal from the same Process ID
'''
eventhandler = EventHandler(EVENT_NAME,COMPARE_ATTRIBUTE,str(os.getpid()),None,None)
'''
used to post exit signal to the event loop
'''
@classmethod
def post_internal_exit_signal(cls):
data = {cls.COMPARE_ATTRIBUTE:str(os.getpid())}
FullScreenWrapper2App.get_android_instance().eventPost(cls.EVENT_NAME, json.dumps(data),True)
class View(object):
'''
Defines a View and provides pythonic access to its properties & a mechanism to define events.
You don't create views yourself. They are created by FullScreenWrapper2App.show_layout() after showing the xml
and are populated in the Layout.views which is a BaseDict => ie. a dict that allows access by both [key] and .key
You can access a view's properties simply by Layout.views.viewname.property to get & set property. Doing this
calls the appropriate SL4A api function like fullSetProperty()
To add and remove events, use the View.add_event() and View.remove_event() methods. To set the contents of
a ListView, use the View.set_listitems() method
'''
def __init__(self,view_id, view_type):
'''
View constructer called with view_id & view_type. DO NOT create a view yourself.
Views are created by FullScreenWrapper2App.show_layout() after showing the xml and are populated
in the Layout.views which is a BaseDict => ie. a dict that allows access by both [key] and .key
'''
self.view_type = view_type
self.view_id = view_id
self._events = {}
def add_event(self, eventhandler):
'''
Used to add an EventHandler to the view.
You would typically add one of click_EventHandler or itemclick_EventHandler (for List Views)
to a view
'''
self._events[eventhandler.event_name]=eventhandler
def remove_event(self,event_name):
'''
removes an event added previously by matching the event_name. Use this to temporarily disable a view's click event
'''
self._events.pop(event_name)
def set_listitems(self,listitems):
'''
sets a list for a ListView. Takes a list of str as input
'''
FullScreenWrapper2App.set_list_contents(self.view_id, listitems)
def __setattr__(self, name, value):
'''
This allows pythonic access to setting a View's properties by calling SL4A api
For eg: Layout.views.viewname.color = "#FFFFFFFF"
'''
if name in ("view_type","view_id","_events"):
object.__setattr__(self,name,value)
else:
#sys.stderr.write("calling sl4a to set name:"+str(name)+" value:"+value+"\n")
return FullScreenWrapper2App.set_property_value(self.id, name, value)
def __getattr__(self, name):
'''
This allows pythonic access to getting a View's properties by calling SL4A api
For eg: buttontext = Layout.views.buttonname.text
'''
#sys.stderr.write("calling sl4a to get name:"+str(name)+"\n")
return FullScreenWrapper2App.get_property_value(self.view_id, name)
def __str__(self):
'''
str(View) will return the View.text
'''
try:
return self.text
except AttributeError:
return None
class Layout(object):
'''
Defines a "screen" with an xml layout that contains views. Layout is a abstract class - you MUST derive your own Layout class.
To use a Layout, you need to first derive a class with Layout as a base class: MyLayout(Layout) and define the functions on_show(self)
and on_close(self). in your MyLayout.__init__(), you MUST include a call to super(MyLayout,self).__init__(xml, title)
the xml property stores the xml text. This is used by FullScreenWrapper2App.show_layout() to actually display the layout.
The layout contains importantly a BaseDict called views. A BaseDict is a dict that allows access to members by either [key] or .key
IMPORTANT: The views BaseDict is populated by FullScreenWrapper2App.show_layout() once the xml is displayed on the screen. Your layout's Views that have an id
only become accessible once the xml is displayed & the Layout.on_show() function is caled by the framework. DO NOT try to access views in the __init__() and
put all your view initialization code & event handler attachment in the on_show() function.
The views BaseDict allows you to access & modify properties of your views & allows event based interaction. You would typically access view
properties as Layout.views.view_id.property with the FullScreenWrapper2 framework making the appropriate SL4A api calls
to access the property. To set events for the views, use Layout.views.view_id.add_event(EventHandler)
The FullScreenWrapper2App actually stores layout objects in a stack allowing you to seamlessly the right parent layout on closing a child layout. This
lets you build a natural interaction using the "back" key. Note however that every time a layout is shown, its views are created afresh & the Layout.views
BaseDict is cleared & re-populated and the Layout.on_show() function is called. This is why you should put all your view initialization & event handler setup
code in Layout.on_show()
Layout.on_close method MUST also be defined - though it can simply be a 1 line function containing pass. This is called when a layout is either closed
or a child layout is opened. This method to save state.
Layouts also allow you to set "Layout" events through Layout.add_event() - you would typically use this for things like "back key press" or even for
other events which are accessible through the SL4A EventFacade's event system like sensor data. For catching these events, you would typically set
EventHandler.compare_attribute to None.
Layout events are internally handled by adding a special "layout" view to the views collection identified by a hashtag. You should not yourself
access this special view.
'''
__metaclass__ = abc.ABCMeta
def __init__(self,xml,title):
'''
creates a layout and stes its xml and title, initializes the views collection
NOTE that this DOES NOT display the layout and the layout Views are also not populated. The special "layout" view for handling Layout
evnets however is created here
'''
self.uid = hashlib.md5(str(title)+str(os.getpid())+str(time.time())).hexdigest()
self.title = title
self.xml = xml
self.views = BaseDict()
self._reset()
def _reset(self):
'''
This function will clear the views collection & add the special "Layout" view
which is used to handle layout events internally
'''
self.views.clear()
#adds a dummy view representing the layout for event management
self.views[self.uid]= View(self.uid,"Layout")
def add_event(self, eventhandler):
'''
This function adds a Layout event. This event is added to the special "layout" view in the views collection
'''
self.views[self.uid].add_event(eventhandler)
def remove_event(self,event_name):
'''
This function removes a Layout event by event name. This event is actually stored in the special "layout" view in the views collection
'''
self.views[self.uid].remove_event(event_name)
@abc.abstractmethod
def on_show(self):
'''
The on_show method is called after your layout is displayed to allow you to initialize your layout's views' attributes & setup event handlers.
on_show is an abstract method which MUST be defined in the your layout class. FullScreenWrapper2App.show_layout() displays the layout & populates the views BaseDict collection
and then calls Layout.on_show() letting you do your view initializations & setup event handlers. This function is called every time a view is displayed - for eg. after a child
layout is closed & the parent layout shown again on screen
If you have saved state in Layout.on_close() be sure to read back state & populate data in your layout's views in on_show()
'''
pass
@abc.abstractmethod
def on_close(self):
'''
The on_close method MUST be defined & is called both when your layout is closed or before displaying a child layout to let you save state.
If you're saving state here, you can read back state on on_show() method
'''
pass
class FullScreenWrapper2App(object):
'''
FullScreenWrapper2App implements the "App" incorporating an eventloop & a layout stack with methods to display & close layouts and access SL4A FullScreenUI API functions
You SHOULD NOT instantiate a FullScreenWrapper2App but rather, simply call its class methods. To use the app, you first need to call FullScreenWrapper2App.initialize(android_instance) with the droid = android.Android()
object that you have created in you program. This is always subsequently accessible by FullScreenWrapper2App.get_android_instance()
You can then call FullScreenWrapper2App.show_layout() and FullScreenWrapper2App.close_layout() to show and close layouts respectively. FullScreenWrapper2App places layouts in an internal stack. This lets the framework seamlessly handle
parent->show child->close child->show parent type of transitions simplifying your code. It also gives you a method to exit the app by calling FullScreenWrapper2App.exit_FullScreenWrapper2App() at any time. This internally works by signalling
the eventloop to terminate the loop by posting an internal event
the internal function called by FullScreenWrapper2App.show_layout() and close_layout() actually populates the layout's views once it is shown & also calls the Layout.on_show() function
Once you have called show_layout() and your first layout is displayed on screen with its view properties & event handlers set, you should call FullScreenWrapper2App.eventloop() to start the event loop. The event loop will keep polling for
the event queue and dispatch events to the appropriate handler functions
The FullScreenWrapper also defines a few "convenience" functions which are used to set and access fullscreen properties via SL4A api calls
'''
_android_instance = None
_layouts = []
SHOW_LAYOUT_PUSH_OVER_CURRENT = 0
SHOW_LAYOUT_REPLACING_CURRENT = 1
_SHOW_LAYOUT_POP_CURRENT = 2
@classmethod
def initialize(cls, android_instance):
'''
You MUST call this first with your droid = android.Android() instance before calling any other function
'''
cls._android_instance = android_instance
@classmethod
def get_android_instance(cls):
'''
this allows you to access the android.Android() instance set in FullScreenWrapper2App.initialize() at any time
'''
if cls._android_instance!=None:
return cls._android_instance
else:
raise RuntimeError("You need to call FullScreenWrapper2App.initialize(android_instance) first")
@classmethod
def show_layout(cls, layout, show_mode = SHOW_LAYOUT_PUSH_OVER_CURRENT):
'''
This will show the layout, set the title, clean & re-populate layout's views BaseDict collection & call the Layout.on_show()
this will also push the layout to the top FullScreenWrapper2App._layouts[] stack. If there is already a parent layout showing, then
this will call the parent layout's on_close() function to let the parent layout save state
'''
if show_mode == cls.SHOW_LAYOUT_PUSH_OVER_CURRENT or show_mode == cls.SHOW_LAYOUT_REPLACING_CURRENT or show_mode == cls._SHOW_LAYOUT_POP_CURRENT:
curlayoutidx = len(cls._layouts)-1
if(curlayoutidx > -1):
cls._layouts[curlayoutidx].on_close()
cls.get_android_instance().fullShow(layout.xml)
cls.get_android_instance().fullSetTitle(layout.title)
viewsdict = cls.get_android_instance().fullQuery().result
layout._reset()
for viewname in iter(viewsdict):
layout.views[viewname] = View(viewname, viewsdict[viewname]["type"])
if show_mode == cls.SHOW_LAYOUT_PUSH_OVER_CURRENT:
cls._layouts.append(layout)
elif show_mode == cls.SHOW_LAYOUT_REPLACING_CURRENT:
if(curlayoutidx > -1):
cls._layouts.pop()
cls._layouts.append(layout)
elif show_mode == cls._SHOW_LAYOUT_POP_CURRENT:
if(curlayoutidx > -1):
cls._layouts.pop()
layout.on_show()
@classmethod
def close_layout(cls):
'''
This will first call a layout's on_close() function to help save state & then close the active layout.
If the layout being closed is a child layout, then this will pop the child layout from the FullScreenWrapper2App._layouts[] stack
and show the parent immediately below the child layout in the stack.
'''
curlayoutidx = len(cls._layouts)-1
if curlayoutidx >0:
cls.show_layout(cls._layouts[curlayoutidx-1], cls._SHOW_LAYOUT_POP_CURRENT)
elif curlayoutidx == 0:
cls.get_android_instance().fullDismiss()
cls.exit_FullScreenWrapper2App()
@classmethod
def exit_FullScreenWrapper2App(cls):
'''
convenience function to exit the app. this works by signalling the eventloop to stop
'''
cls.get_android_instance().fullDismiss()
#curlayout = cls._layouts[len(cls._layouts)-1]
#curlayout._reset()
_internal_exit_signal.post_internal_exit_signal()
@classmethod
def eventloop(cls):
'''
The main event loop to catch & dispatch events in the active/topmost layout in the _layouts[] stack & its views.
Call this once your first layout's event handlers are setup from your main() program. This catches & dispatches events
by matching EventHandlers in the ACTIVE/TOPMOST layout int he _layouts[] stack & its views.
Note that only the active layout & its views are matched with events. This function also looks for "exit" signal
which can be raised by calling exit_FullScreenWrapper2App() to terminate the event loop.
'''
if len(cls._layouts)<1:
raise RuntimeError("Trying to start eventloop without a layout visible")
while(True):
evt=cls.get_android_instance().eventPoll()
if(len(evt.result)>0):
eventdata=evt.result[0]
#this corrects an eventpost issue where an extra "" wraps the json
try:
if type(eventdata["data"]) != type({}):
eventdata["data"]=json.loads(eventdata["data"])
except:
pass
#sys.stderr.write("in event loop-got an event\n")
#sys.stderr.write(str(eventdata)+"\n")
if _internal_exit_signal.eventhandler.match_event_data(eventdata):
break
curlayout = cls._layouts[len(cls._layouts)-1]
for viewname in iter(curlayout.views):
view = curlayout.views[viewname]
#sys.stderr.write("Checking with"+ view.view_id+"\n")
for eventname in iter(view._events):
event = view._events[eventname]
if event.match_event_data(eventdata):
if event.event_handler_fn != None:
event.event_handler_fn(event.view, eventdata)
event_handled = True
#sys.stderr.write("found a match in view "+str(event.view.view_id))
break
#Functions to manipulate contents of the FullScreen - these provide a thin cover over droid.fullset/query
@classmethod
def set_list_contents(cls,id, list):
return cls.get_android_instance().fullSetList(id, list)
@classmethod
def set_property_value(cls,id, property, value):
'''Set the value of an XML view's property'''
return cls.get_android_instance().fullSetProperty(id, property,value)
@classmethod
def get_property_value(cls,id, property):
'''Get the value of a given XML view's property'''
ret = cls.get_android_instance().fullQueryDetail(id).result
try:
return ret[property]
except:
#sys.stderr.write("The property "+property+" for the view "+id+" was not found\n")
return None
|
apache-2.0
|
grlee77/scipy
|
scipy/io/matlab/tests/test_mio_utils.py
|
15
|
1593
|
""" Testing
"""
import numpy as np
from numpy.testing import assert_array_equal, assert_
from scipy.io.matlab.mio_utils import squeeze_element, chars_to_strings
def test_squeeze_element():
a = np.zeros((1,3))
assert_array_equal(np.squeeze(a), squeeze_element(a))
# 0-D output from squeeze gives scalar
sq_int = squeeze_element(np.zeros((1,1), dtype=float))
assert_(isinstance(sq_int, float))
# Unless it's a structured array
sq_sa = squeeze_element(np.zeros((1,1),dtype=[('f1', 'f')]))
assert_(isinstance(sq_sa, np.ndarray))
# Squeezing empty arrays maintain their dtypes.
sq_empty = squeeze_element(np.empty(0, np.uint8))
assert sq_empty.dtype == np.uint8
def test_chars_strings():
# chars as strings
strings = ['learn ', 'python', 'fast ', 'here ']
str_arr = np.array(strings, dtype='U6') # shape (4,)
chars = [list(s) for s in strings]
char_arr = np.array(chars, dtype='U1') # shape (4,6)
assert_array_equal(chars_to_strings(char_arr), str_arr)
ca2d = char_arr.reshape((2,2,6))
sa2d = str_arr.reshape((2,2))
assert_array_equal(chars_to_strings(ca2d), sa2d)
ca3d = char_arr.reshape((1,2,2,6))
sa3d = str_arr.reshape((1,2,2))
assert_array_equal(chars_to_strings(ca3d), sa3d)
# Fortran ordered arrays
char_arrf = np.array(chars, dtype='U1', order='F') # shape (4,6)
assert_array_equal(chars_to_strings(char_arrf), str_arr)
# empty array
arr = np.array([['']], dtype='U1')
out_arr = np.array([''], dtype='U1')
assert_array_equal(chars_to_strings(arr), out_arr)
|
bsd-3-clause
|
thinkopensolutions/tkobr-addons
|
unported/tko_project_task_type/project_task.py
|
2
|
2555
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# ThinkOpen Solutions Brasil
# Copyright (C) Thinkopen Solutions <http://www.tkobr.com>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, fields
class task_type(models.Model):
_name = 'task.type'
name = fields.Char(string='Name', required=True)
color = fields.Integer('Color Index', size=1)
task_id = fields.Many2one('project.task', string='Task')
class project_task(models.Model):
_inherit = 'project.task'
type_name = fields.Char(
compute='_get_type_name',
store=True,
string='Name')
task_type_id = fields.Many2one('task.type', string='Type')
color = fields.Integer(compute='_get_color', string='Color', store=False)
@api.multi
def name_get(self):
result = []
for task in self:
task_type = task.task_type_id and task.task_type_id.name or ''
result.append(
(task.id, "%s %s" %
('[' + str(task_type) + ']', task.name or ' ')))
return result
@api.depends('task_type_id.name')
def _get_type_name(self):
for record in self:
if record.task_type_id:
record.type_name = record.task_type_id.name
@api.depends('task_type_id.color')
def _get_color(self):
for record in self:
if record.task_type_id:
record.color = record.task_type_id.color
@api.onchange('task_type_id')
def _change_task_type(self):
if self.task_type_id:
self.color = str(self.task_type_id.color)[-1]
self.type_name = self.task_type_id.name
|
agpl-3.0
|
taedla01/MissionPlanner
|
Lib/site-packages/scipy/signal/tests/test_fir_filter_design.py
|
55
|
13131
|
import numpy as np
from numpy.testing import TestCase, run_module_suite, assert_raises, \
assert_array_almost_equal, assert_
from scipy.signal import firwin, firwin2, kaiserord, freqz, remez
class TestFirwin(TestCase):
def check_response(self, h, expected_response, tol=.05):
N = len(h)
alpha = 0.5 * (N-1)
m = np.arange(0,N) - alpha # time indices of taps
for freq, expected in expected_response:
actual = abs(np.sum(h*np.exp(-1.j*np.pi*m*freq)))
mse = abs(actual-expected)**2
self.assertTrue(mse < tol, 'response not as expected, mse=%g > %g'\
%(mse, tol))
def test_response(self):
N = 51
f = .5
# increase length just to try even/odd
h = firwin(N, f) # low-pass from 0 to f
self.check_response(h, [(.25,1), (.75,0)])
h = firwin(N+1, f, window='nuttall') # specific window
self.check_response(h, [(.25,1), (.75,0)])
h = firwin(N+2, f, pass_zero=False) # stop from 0 to f --> high-pass
self.check_response(h, [(.25,0), (.75,1)])
f1, f2, f3, f4 = .2, .4, .6, .8
h = firwin(N+3, [f1, f2], pass_zero=False) # band-pass filter
self.check_response(h, [(.1,0), (.3,1), (.5,0)])
h = firwin(N+4, [f1, f2]) # band-stop filter
self.check_response(h, [(.1,1), (.3,0), (.5,1)])
h = firwin(N+5, [f1, f2, f3, f4], pass_zero=False, scale=False)
self.check_response(h, [(.1,0), (.3,1), (.5,0), (.7,1), (.9,0)])
h = firwin(N+6, [f1, f2, f3, f4]) # multiband filter
self.check_response(h, [(.1,1), (.3,0), (.5,1), (.7,0), (.9,1)])
h = firwin(N+7, 0.1, width=.03) # low-pass
self.check_response(h, [(.05,1), (.75,0)])
h = firwin(N+8, 0.1, pass_zero=False) # high-pass
self.check_response(h, [(.05,0), (.75,1)])
def mse(self, h, bands):
"""Compute mean squared error versus ideal response across frequency
band.
h -- coefficients
bands -- list of (left, right) tuples relative to 1==Nyquist of
passbands
"""
w, H = freqz(h, worN=1024)
f = w/np.pi
passIndicator = np.zeros(len(w), bool)
for left, right in bands:
passIndicator |= (f>=left) & (f<right)
Hideal = np.where(passIndicator, 1, 0)
mse = np.mean(abs(abs(H)-Hideal)**2)
return mse
def test_scaling(self):
"""
For one lowpass, bandpass, and highpass example filter, this test
checks two things:
- the mean squared error over the frequency domain of the unscaled
filter is smaller than the scaled filter (true for rectangular
window)
- the response of the scaled filter is exactly unity at the center
of the first passband
"""
N = 11
cases = [
([.5], True, (0, 1)),
([0.2, .6], False, (.4, 1)),
([.5], False, (1, 1)),
]
for cutoff, pass_zero, expected_response in cases:
h = firwin(N, cutoff, scale=False, pass_zero=pass_zero, window='ones')
hs = firwin(N, cutoff, scale=True, pass_zero=pass_zero, window='ones')
if len(cutoff) == 1:
if pass_zero:
cutoff = [0] + cutoff
else:
cutoff = cutoff + [1]
self.assertTrue(self.mse(h, [cutoff]) < self.mse(hs, [cutoff]),
'least squares violation')
self.check_response(hs, [expected_response], 1e-12)
class TestFirWinMore(TestCase):
"""Different author, different style, different tests..."""
def test_lowpass(self):
width = 0.04
ntaps, beta = kaiserord(120, width)
taps = firwin(ntaps, cutoff=0.5, window=('kaiser', beta), scale=False)
# Check the symmetry of taps.
assert_array_almost_equal(taps[:ntaps//2], taps[ntaps:ntaps-ntaps//2-1:-1])
# Check the gain at a few samples where we know it should be approximately 0 or 1.
freq_samples = np.array([0.0, 0.25, 0.5-width/2, 0.5+width/2, 0.75, 1.0])
freqs, response = freqz(taps, worN=np.pi*freq_samples)
assert_array_almost_equal(np.abs(response),
[1.0, 1.0, 1.0, 0.0, 0.0, 0.0], decimal=5)
def test_highpass(self):
width = 0.04
ntaps, beta = kaiserord(120, width)
# Ensure that ntaps is odd.
ntaps |= 1
taps = firwin(ntaps, cutoff=0.5, window=('kaiser', beta),
pass_zero=False, scale=False)
# Check the symmetry of taps.
assert_array_almost_equal(taps[:ntaps//2], taps[ntaps:ntaps-ntaps//2-1:-1])
# Check the gain at a few samples where we know it should be approximately 0 or 1.
freq_samples = np.array([0.0, 0.25, 0.5-width/2, 0.5+width/2, 0.75, 1.0])
freqs, response = freqz(taps, worN=np.pi*freq_samples)
assert_array_almost_equal(np.abs(response),
[0.0, 0.0, 0.0, 1.0, 1.0, 1.0], decimal=5)
def test_bandpass(self):
width = 0.04
ntaps, beta = kaiserord(120, width)
taps = firwin(ntaps, cutoff=[0.3, 0.7], window=('kaiser', beta),
pass_zero=False, scale=False)
# Check the symmetry of taps.
assert_array_almost_equal(taps[:ntaps//2], taps[ntaps:ntaps-ntaps//2-1:-1])
# Check the gain at a few samples where we know it should be approximately 0 or 1.
freq_samples = np.array([0.0, 0.2, 0.3-width/2, 0.3+width/2, 0.5,
0.7-width/2, 0.7+width/2, 0.8, 1.0])
freqs, response = freqz(taps, worN=np.pi*freq_samples)
assert_array_almost_equal(np.abs(response),
[0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0], decimal=5)
def test_multi(self):
width = 0.04
ntaps, beta = kaiserord(120, width)
taps = firwin(ntaps, cutoff=[0.2, 0.5, 0.8], window=('kaiser', beta),
pass_zero=True, scale=False)
# Check the symmetry of taps.
assert_array_almost_equal(taps[:ntaps//2], taps[ntaps:ntaps-ntaps//2-1:-1])
# Check the gain at a few samples where we know it should be approximately 0 or 1.
freq_samples = np.array([0.0, 0.1, 0.2-width/2, 0.2+width/2, 0.35,
0.5-width/2, 0.5+width/2, 0.65,
0.8-width/2, 0.8+width/2, 0.9, 1.0])
freqs, response = freqz(taps, worN=np.pi*freq_samples)
assert_array_almost_equal(np.abs(response),
[1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0],
decimal=5)
def test_nyq(self):
"""Test the nyq keyword."""
nyquist = 1000
width = 40.0
relative_width = width/nyquist
ntaps, beta = kaiserord(120, relative_width)
taps = firwin(ntaps, cutoff=[300, 700], window=('kaiser', beta),
pass_zero=False, scale=False, nyq=nyquist)
# Check the symmetry of taps.
assert_array_almost_equal(taps[:ntaps//2], taps[ntaps:ntaps-ntaps//2-1:-1])
# Check the gain at a few samples where we know it should be approximately 0 or 1.
freq_samples = np.array([0.0, 200, 300-width/2, 300+width/2, 500,
700-width/2, 700+width/2, 800, 1000])
freqs, response = freqz(taps, worN=np.pi*freq_samples/nyquist)
assert_array_almost_equal(np.abs(response),
[0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0], decimal=5)
def test_bad_cutoff(self):
"""Test that invalid cutoff argument raises ValueError."""
# cutoff values must be greater than 0 and less than 1.
assert_raises(ValueError, firwin, 99, -0.5)
assert_raises(ValueError, firwin, 99, 1.5)
# Don't allow 0 or 1 in cutoff.
assert_raises(ValueError, firwin, 99, [0, 0.5])
assert_raises(ValueError, firwin, 99, [0.5, 1])
# cutoff values must be strictly increasing.
assert_raises(ValueError, firwin, 99, [0.1, 0.5, 0.2])
assert_raises(ValueError, firwin, 99, [0.1, 0.5, 0.5])
# Must have at least one cutoff value.
assert_raises(ValueError, firwin, 99, [])
# 2D array not allowed.
assert_raises(ValueError, firwin, 99, [[0.1, 0.2],[0.3, 0.4]])
# cutoff values must be less than nyq.
assert_raises(ValueError, firwin, 99, 50.0, nyq=40)
assert_raises(ValueError, firwin, 99, [10, 20, 30], nyq=25)
def test_even_highpass_raises_value_error(self):
"""Test that attempt to create a highpass filter with an even number
of taps raises a ValueError exception."""
assert_raises(ValueError, firwin, 40, 0.5, pass_zero=False)
assert_raises(ValueError, firwin, 40, [.25, 0.5])
class TestFirwin2(TestCase):
def test_invalid_args(self):
# `freq` and `gain` have different lengths.
assert_raises(ValueError, firwin2, 50, [0, 0.5, 1], [0.0, 1.0])
# `nfreqs` is less than `ntaps`.
assert_raises(ValueError, firwin2, 50, [0, 0.5, 1], [0.0, 1.0, 1.0], nfreqs=33)
# Decreasing value in `freq`
assert_raises(ValueError, firwin2, 50, [0, 0.5, 0.4, 1.0], [0, .25, .5, 1.0])
# Value in `freq` repeated more than once.
assert_raises(ValueError, firwin2, 50, [ 0, .1, .1, .1, 1.0],
[0.0, 0.5, 0.75, 1.0, 1.0])
# `freq` does not start at 0.0.
assert_raises(ValueError, firwin2, 50, [0.5, 1.0], [0.0, 1.0])
def test01(self):
width = 0.04
beta = 12.0
ntaps = 400
# Filter is 1 from w=0 to w=0.5, then decreases linearly from 1 to 0 as w
# increases from w=0.5 to w=1 (w=1 is the Nyquist frequency).
freq = [0.0, 0.5, 1.0]
gain = [1.0, 1.0, 0.0]
taps = firwin2(ntaps, freq, gain, window=('kaiser', beta))
freq_samples = np.array([0.0, 0.25, 0.5-width/2, 0.5+width/2,
0.75, 1.0-width/2])
freqs, response = freqz(taps, worN=np.pi*freq_samples)
assert_array_almost_equal(np.abs(response),
[1.0, 1.0, 1.0, 1.0-width, 0.5, width], decimal=5)
def test02(self):
width = 0.04
beta = 12.0
# ntaps must be odd for positive gain at Nyquist.
ntaps = 401
# An ideal highpass filter.
freq = [0.0, 0.5, 0.5, 1.0]
gain = [0.0, 0.0, 1.0, 1.0]
taps = firwin2(ntaps, freq, gain, window=('kaiser', beta))
freq_samples = np.array([0.0, 0.25, 0.5-width, 0.5+width, 0.75, 1.0])
freqs, response = freqz(taps, worN=np.pi*freq_samples)
assert_array_almost_equal(np.abs(response),
[0.0, 0.0, 0.0, 1.0, 1.0, 1.0], decimal=5)
def test03(self):
width = 0.02
ntaps, beta = kaiserord(120, width)
# ntaps must be odd for positive gain at Nyquist.
ntaps = int(ntaps) | 1
freq = [0.0, 0.4, 0.4, 0.5, 0.5, 1.0]
gain = [1.0, 1.0, 0.0, 0.0, 1.0, 1.0]
taps = firwin2(ntaps, freq, gain, window=('kaiser', beta))
freq_samples = np.array([0.0, 0.4-width, 0.4+width, 0.45,
0.5-width, 0.5+width, 0.75, 1.0])
freqs, response = freqz(taps, worN=np.pi*freq_samples)
assert_array_almost_equal(np.abs(response),
[1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0], decimal=5)
def test_nyq(self):
taps1 = firwin2(80, [0.0, 0.5, 1.0], [1.0, 1.0, 0.0])
taps2 = firwin2(80, [0.0, 30.0, 60.0], [1.0, 1.0, 0.0], nyq=60.0)
assert_array_almost_equal(taps1, taps2)
class TestRemez(TestCase):
def test_hilbert(self):
N = 11 # number of taps in the filter
a = 0.1 # width of the transition band
# design an unity gain hilbert bandpass filter from w to 0.5-w
h = remez(11, [ a, 0.5-a ], [ 1 ], type='hilbert')
# make sure the filter has correct # of taps
assert_(len(h) == N, "Number of Taps")
# make sure it is type III (anti-symmetric tap coefficients)
assert_array_almost_equal(h[:(N-1)//2], -h[:-(N-1)//2-1:-1])
# Since the requested response is symmetric, all even coeffcients
# should be zero (or in this case really small)
assert_((abs(h[1::2]) < 1e-15).all(), "Even Coefficients Equal Zero")
# now check the frequency response
w, H = freqz(h, 1)
f = w/2/np.pi
Hmag = abs(H)
# should have a zero at 0 and pi (in this case close to zero)
assert_((Hmag[ [0,-1] ] < 0.02).all(), "Zero at zero and pi")
# check that the pass band is close to unity
idx = (f > a) * (f < 0.5-a)
assert_((abs(Hmag[idx] - 1) < 0.015).all(), "Pass Band Close To Unity")
if __name__ == "__main__":
run_module_suite()
|
gpl-3.0
|
DeercoderResearch/theano_exercises
|
02_advanced/02_debugging/ex_02_detect_negative_soln.py
|
13
|
2042
|
import numpy as np
from theano import function
from theano import tensor as T
import theano
from theano.compile import Mode
class NegativeVariableError(Exception):
pass
def get_neg_detection_mode():
"""
Returns a theano Mode that detects if any negative value occurs in the
evaluation of a theano function.
"""
class NegDetectionMode(Mode):
def __init__(self):
def flatten(l):
if isinstance(l, (list, tuple)):
rval = []
for elem in l:
if isinstance(elem, (list, tuple)):
rval.extend(flatten(elem))
else:
rval.append(elem)
else:
return rval
def do_check_on(var, nd, f):
if var.min() < 0:
raise NegativeVariableError()
def neg_check(i, node, fn):
inputs = fn.inputs
for x in flatten(inputs):
do_check_on(x, node, fn)
fn()
outputs = fn.outputs
for j, x in enumerate(flatten(outputs)):
do_check_on(x, node, fn)
wrap_linker = theano.gof.WrapLinkerMany(
[theano.gof.OpWiseCLinker()],
[neg_check])
super(NegDetectionMode, self).__init__(wrap_linker,
optimizer='fast_run')
return NegDetectionMode()
if __name__ == "__main__":
x = T.scalar()
x.name = 'x'
y = T.nnet.sigmoid(x)
y.name = 'y'
z = - y
z.name = 'z'
mode = get_neg_detection_mode()
f = function([x], z, mode=mode)
caught = False
try:
f(0.)
except NegativeVariableError:
caught = True
if not caught:
print "You failed to catch a negative value."
quit(-1)
f = function([x], y, mode=mode)
y1 = f(0.)
f = function([x], y)
assert np.allclose(f(0.), y1)
print "SUCCESS!"
|
bsd-3-clause
|
nagyistoce/koalacloud
|
boto/sdb/item.py
|
19
|
6974
|
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import base64
class Item(dict):
"""
A ``dict`` sub-class that serves as an object representation of a
SimpleDB item. An item in SDB is similar to a row in a relational
database. Items belong to a :py:class:`Domain <boto.sdb.domain.Domain>`,
which is similar to a table in a relational database.
The keys on instances of this object correspond to attributes that are
stored on the SDB item.
.. tip:: While it is possible to instantiate this class directly, you may
want to use the convenience methods on :py:class:`boto.sdb.domain.Domain`
for that purpose. For example, :py:meth:`boto.sdb.domain.Domain.get_item`.
"""
def __init__(self, domain, name='', active=False):
"""
:type domain: :py:class:`boto.sdb.domain.Domain`
:param domain: The domain that this item belongs to.
:param str name: The name of this item. This name will be used when
querying for items using methods like
:py:meth:`boto.sdb.domain.Domain.get_item`
"""
dict.__init__(self)
self.domain = domain
self.name = name
self.active = active
self.request_id = None
self.encoding = None
self.in_attribute = False
self.converter = self.domain.connection.converter
def startElement(self, name, attrs, connection):
if name == 'Attribute':
self.in_attribute = True
self.encoding = attrs.get('encoding', None)
return None
def decode_value(self, value):
if self.encoding == 'base64':
self.encoding = None
return base64.decodestring(value)
else:
return value
def endElement(self, name, value, connection):
if name == 'ItemName':
self.name = self.decode_value(value)
elif name == 'Name':
if self.in_attribute:
self.last_key = self.decode_value(value)
else:
self.name = self.decode_value(value)
elif name == 'Value':
if self.last_key in self:
if not isinstance(self[self.last_key], list):
self[self.last_key] = [self[self.last_key]]
value = self.decode_value(value)
if self.converter:
value = self.converter.decode(value)
self[self.last_key].append(value)
else:
value = self.decode_value(value)
if self.converter:
value = self.converter.decode(value)
self[self.last_key] = value
elif name == 'BoxUsage':
try:
connection.box_usage += float(value)
except:
pass
elif name == 'RequestId':
self.request_id = value
elif name == 'Attribute':
self.in_attribute = False
else:
setattr(self, name, value)
def load(self):
"""
Loads or re-loads this item's attributes from SDB.
.. warning::
If you have changed attribute values on an Item instance,
this method will over-write the values if they are different in
SDB. For any local attributes that don't yet exist in SDB,
they will be safe.
"""
self.domain.get_attributes(self.name, item=self)
def save(self, replace=True):
"""
Saves this item to SDB.
:param bool replace: If ``True``, delete any attributes on the remote
SDB item that have a ``None`` value on this object.
"""
self.domain.put_attributes(self.name, self, replace)
# Delete any attributes set to "None"
if replace:
del_attrs = []
for name in self:
if self[name] == None:
del_attrs.append(name)
if len(del_attrs) > 0:
self.domain.delete_attributes(self.name, del_attrs)
def add_value(self, key, value):
"""
Helps set or add to attributes on this item. If you are adding a new
attribute that has yet to be set, it will simply create an attribute
named ``key`` with your given ``value`` as its value. If you are
adding a value to an existing attribute, this method will convert the
attribute to a list (if it isn't already) and append your new value
to said list.
For clarification, consider the following interactive session:
.. code-block:: python
>>> item = some_domain.get_item('some_item')
>>> item.has_key('some_attr')
False
>>> item.add_value('some_attr', 1)
>>> item['some_attr']
1
>>> item.add_value('some_attr', 2)
>>> item['some_attr']
[1, 2]
:param str key: The attribute to add a value to.
:param object value: The value to set or append to the attribute.
"""
if key in self:
# We already have this key on the item.
if not isinstance(self[key], list):
# The key isn't already a list, take its current value and
# convert it to a list with the only member being the
# current value.
self[key] = [self[key]]
# Add the new value to the list.
self[key].append(value)
else:
# This is a new attribute, just set it.
self[key] = value
def delete(self):
"""
Deletes this item in SDB.
.. note:: This local Python object remains in its current state
after deletion, this only deletes the remote item in SDB.
"""
self.domain.delete_item(self)
|
apache-2.0
|
imjonsnooow/vivisect
|
vivisect/qt/views.py
|
4
|
11441
|
import vqt.tree as vq_tree
import vivisect.base as viv_base
import envi.qt.memory as e_q_memory
import visgraph.pathcore as vg_path
import envi.qt.memcanvas as e_q_memcanvas
import vivisect.qt.ctxmenu as v_q_ctxmenu
from PyQt4 import QtGui,QtCore
from vqt.main import *
from vqt.common import *
from vivisect.const import *
class VivNavModel(e_q_memory.EnviNavModel):
pass
class VivView(VqtView, viv_base.VivEventCore):
'''
In any vivisect list/tree view, the first column will be
an address expression. Other than that, all bets are off.
'''
def __init__(self, vw, parent=None):
VqtView.__init__(self, parent=parent)
viv_base.VivEventCore.__init__(self, vw)
self.vw = vw
self.vivgui = vw.getVivGui()
self.vivgui.addEventCore(self)
class VivLocModel(VqtModel):
columns = ('Address','Location')
class VivLocView(VivView):
def __init__(self, vw, loctypes, parent=None):
VivView.__init__(self, vw, parent=parent)
self.loctypes = loctypes
locs = []
for ltype in self.loctypes:
locs.extend( vw.getLocations(ltype) )
rows = [ ('0x%.8x' % loc[0], vw.reprLocation(loc), loc) for loc in locs ]
model = VivLocModel(rows=rows)
self.setModel(model)
def VWE_ADDLOCATION(self, vw, event, loc):
lva, lsize, ltype, linfo = loc
if ltype in self.loctypes:
self.model().sourceModel().append( ('0x%.8x' % lva, self.vw.reprLocation(loc), loc) )
def VWE_DELLOCATION(self, vw, event, einfo):
lva, lsize, ltype, linfo = einfo
if ltype in self.loctypes:
print 'DEL ONE!'
def getLocView(vw, loctypes, title, parent=None):
view = VivLocView( vw, loctypes, parent=parent)
view.setWindowTitle(title)
return vw.getVivGui().vqDockWidget( view, floating=True )
class VQVivTreeView(vq_tree.VQTreeView, viv_base.VivEventCore):
window_title = "VivTreeView"
_viv_navcol = 0
def __init__(self, vw, vwqgui):
vq_tree.VQTreeView.__init__(self, parent=vwqgui)
viv_base.VivEventCore.__init__(self, vw)
self.vw = vw
self.vwqgui = vwqgui
self._viv_va_nodes = {}
vwqgui.addEventCore(self)
self.setWindowTitle(self.window_title)
self.setSortingEnabled(True)
self.setDragEnabled( True )
self.doubleClicked.connect( self.doubleClickedSignal )
def doubleClickedSignal(self, idx):
if idx.isValid() and self._viv_navcol != None:
pnode = idx.internalPointer()
expr = pnode.rowdata[self._viv_navcol]
vqtevent('envi:nav:expr', ('viv',expr,None))
return True
def contextMenuEvent(self, event):
menu = QtGui.QMenu(parent=self)
idxlist = self.selectedIndexes()
if not idxlist:
return
idx = idxlist[0]
if idx.isValid() and self._viv_navcol != None:
pnode = idx.internalPointer()
expr = pnode.rowdata[self._viv_navcol]
v_q_ctxmenu.buildContextMenu(self.vw, expr=expr, menu=menu)
menu.exec_(event.globalPos())
def vivAddRow(self, va, *row):
node = self.model().append(row)
node.va = va
self._viv_va_nodes[va] = node
return node
def vivDelRow(self, va):
node = self._viv_va_nodes.pop(va, None)
if node:
self.model().vqDelRow(node)
def vivSetData(self, va, col, val):
'''
Set a row/col in the data model. This will quietly fail
if we don't contain a row for the va (makes users not need
to check...)
Example: view.vivSetData(0x41414141, 2, 'Woot Function')
NOTE: This is for use by the VWE_ event callback handlers!
'''
pnode = self._viv_va_nodes.get(va)
if not pnode:
return
idx = self.model().createIndex(pnode.row(), col, pnode)
# We are *not* the edit role...
self.model().setData(idx, val, role=None)
def vivGetData(self, va, col):
pnode = self._viv_va_nodes.get(va)
if not pnode:
return None
return pnode.rowdata[col]
class VQVivLocView(VQVivTreeView):
loctypes = ()
def __init__(self, vw, vwqgui):
VQVivTreeView.__init__(self, vw, vwqgui)
model = VivNavModel(self._viv_navcol, parent=self, columns=self.columns)
self.setModel(model)
self.vqLoad()
self.vqSizeColumns()
def vqLoad(self):
for l in self.loctypes:
for lva, lsize, ltype, linfo in self.vw.getLocations(l):
self.vivAddLocation(lva, lsize, ltype, linfo)
def VWE_DELLOCATION(self, vw, event, einfo):
lva, lsize, ltype, linfo = einfo
self.vivDelRow(lva)
def VWE_ADDLOCATION(self, vw, event, einfo):
lva, lsize, ltype, linfo = einfo
if ltype in self.loctypes:
self.vivAddLocation(lva, lsize, ltype, linfo)
def vivAddLocation(self, lva, lsize, ltype, linfo):
print "FIXME OVERRIDE"
class VQVivStringsView(VQVivLocView):
columns = ('Address','String')
loctypes = (LOC_STRING, LOC_UNI)
window_title = 'Strings'
def vivAddLocation(self, lva, lsize, ltype, linfo):
s = self.vw.readMemory(lva, lsize)
if ltype == LOC_UNI:
s = s.decode('utf-16le', 'ignore')
self.vivAddRow(lva, '0x%.8x' % lva, repr(s))
class VQVivImportsView(VQVivLocView):
columns = ('Address', 'Library', 'Function')
loctypes = (LOC_IMPORT,)
window_title = 'Imports'
def vivAddLocation(self, lva, lsize, ltype, linfo):
libname, funcname = linfo.split('.', 1)
self.vivAddRow(lva, '0x%.8x' % lva, libname, funcname)
class VQVivStructsView(VQVivLocView):
columns = ('Address', 'Structure', 'Loc Name')
loctypes = (LOC_STRUCT,)
window_title = 'Structures'
def vivAddLocation(self, lva, lsize, ltype, linfo):
sym = self.vw.getSymByAddr(lva)
self.vivAddRow(lva, '0x%.8x' % lva, linfo, str(sym))
class VQVivExportsView(VQVivTreeView):
window_title = 'Exports'
columns = ('Address', 'File', 'Export')
def __init__(self, vw, vwqgui):
VQVivTreeView.__init__(self, vw, vwqgui)
self.setModel( VivNavModel(self._viv_navcol, self, columns=self.columns) )
self.vqLoad()
self.vqSizeColumns()
def vqLoad(self):
for va, etype, ename, fname in self.vw.getExports():
self.vivAddExport(va, etype, ename, fname)
def vivAddExport(self, va, etype, ename, fname):
self.vivAddRow(va, '0x%.8x' % va, fname, ename)
def VWE_ADDEXPORT(self, vw, event, einfo):
va, etype, ename, fname = einfo
self.vivAddExport(va, etype, ename, fname)
class VQVivSegmentsView(VQVivTreeView):
_viv_navcol = 2
window_title = 'Segments'
columns = ('Module','Section', 'Address', 'Size')
def __init__(self, vw, vwqgui):
VQVivTreeView.__init__(self, vw, vwqgui)
self.setModel( VivNavModel(self._viv_navcol, self, columns=self.columns) )
self.vqLoad()
self.vqSizeColumns()
def vqLoad(self):
for va, size, sname, fname in self.vw.getSegments():
self.vivAddRow(va, fname, sname, '0x%.8x' % va, str(size))
class VQVivFunctionsView(VQVivTreeView):
_viv_navcol = 0
window_title = 'Functions'
columns = ('Name','Address', 'Size', 'Ref Count')
def __init__(self, vw, vwqgui):
VQVivTreeView.__init__(self, vw, vwqgui)
self.setModel( VivNavModel(self._viv_navcol, self, columns=self.columns) )
self.vqLoad()
self.vqSizeColumns()
def vqLoad(self):
for fva in self.vw.getFunctions():
self.vivAddFunction(fva)
def VWE_ADDFUNCTION(self, vw, event, einfo):
fva, fmeta = einfo
self.vivAddFunction(fva)
def VWE_DELFUNCTION(self, vw, event, efino):
fva, fmeta = einfo
self.vivDelRow(fva)
def VWE_SETNAME(self, vw, event, einfo):
va, name = einfo
self.vivSetData(va, 0, name)
def vivAddFunction(self, fva):
size = self.vw.getFunctionMeta(fva, "Size", -1)
fname = self.vw.getName(fva)
xcount = len(self.vw.getXrefsTo(fva))
self.vivAddRow(fva, fname, '0x%.8x' % fva, size, xcount)
def VWE_ADDXREF(self, vw, event, einfo):
fromva, tova, rtype, rflag = einfo
cnt = self.vivGetData(tova, 3)
if cnt == None:
return
self.vivSetData(tova, 3, cnt + 1)
def VWE_DELXREF(self, vw, event, einfo):
fromva, tova, rtype, rflag = einfo
cnt = self.vivGetData(tova, 3)
if cnt == None:
return
self.vivSetData(tova, 3, cnt - 1)
def VWE_SETFUNCMETA(self, vw, event, einfo):
funcva, key, value = einfo
if key == "Size":
self.vivSetData(funcva, 2, value)
vaset_coltypes = {
VASET_STRING:str,
VASET_ADDRESS:long,
VASET_INTEGER:long,
}
class VQVivVaSetView(VQVivTreeView):
_viv_navcol = 0
def __init__(self, vw, vwqgui, setname):
self._va_setname = setname
setdef = vw.getVaSetDef( setname )
cols = [ cname for (cname,ctype) in setdef ]
VQVivTreeView.__init__(self, vw, vwqgui)
self.setModel( VivNavModel(self._viv_navcol, self, columns=cols) )
self.vqLoad()
self.vqSizeColumns()
self.setWindowTitle('Va Set: %s' % setname)
def VWE_SETVASETROW(self, vw, event, einfo):
setname, row = einfo
if setname == self._va_setname:
va = row[0]
row = list(row)
row[0] = '0x%.8x' % va
self.vivAddRow( va, *row )
def vqLoad(self):
setdef = self.vw.getVaSetDef( self._va_setname )
rows = self.vw.getVaSetRows( self._va_setname )
for row in rows:
va = row[0]
row = list(row)
row[0] = '0x%.8x' % va
self.vivAddRow(va, *row)
class VQXrefView(VQVivTreeView):
_viv_navcol = 0
def __init__(self, vw, vwqgui, xrefs=(), title='Xrefs'):
self.window_title = title
VQVivTreeView.__init__(self, vw, vwqgui)
model = VivNavModel(self._viv_navcol, self, columns=('Xref From', 'Xref Type', 'Xref Flags', 'Func Name'))
self.setModel(model)
for fromva, tova, rtype, rflags in xrefs:
fva = vw.getFunction(fromva)
funcname = ''
if fva:
funcname = vw.getName(fva)
self.vivAddRow(fromva, '0x%.8x' % fromva, rtype, rflags, funcname)
self.vqSizeColumns()
class VQVivNamesView(VQVivTreeView):
_viv_navcol = 0
window_title = 'Workspace Names'
columns = ('Address', 'Name')
def __init__(self, vw, vwqgui):
VQVivTreeView.__init__(self, vw, vwqgui)
self.setModel( VivNavModel(self._viv_navcol, self, columns=self.columns) )
self.vqLoad()
self.vqSizeColumns()
def vqLoad(self):
for name in self.vw.getNames():
self.vivAddName(name)
def VWE_SETNAME(self, vw, event, einfo):
va, name = einfo
#self.vivSetData(va, 1, name)
self.vivAddName(einfo)
def vivAddName(self, nifo):
va, name = nifo
if self.vivGetData(va, 0) == None:
self.vivAddRow(va, '0x%.8x' % va, name)
else:
self.vivSetData(va, 1, name)
|
apache-2.0
|
dreamsxin/kbengine
|
kbe/res/scripts/common/Lib/test/test_structmembers.py
|
87
|
4889
|
import unittest
from test import support
# Skip this test if the _testcapi module isn't available.
support.import_module('_testcapi')
from _testcapi import _test_structmembersType, \
CHAR_MAX, CHAR_MIN, UCHAR_MAX, \
SHRT_MAX, SHRT_MIN, USHRT_MAX, \
INT_MAX, INT_MIN, UINT_MAX, \
LONG_MAX, LONG_MIN, ULONG_MAX, \
LLONG_MAX, LLONG_MIN, ULLONG_MAX, \
PY_SSIZE_T_MAX, PY_SSIZE_T_MIN
ts=_test_structmembersType(False, # T_BOOL
1, # T_BYTE
2, # T_UBYTE
3, # T_SHORT
4, # T_USHORT
5, # T_INT
6, # T_UINT
7, # T_LONG
8, # T_ULONG
23, # T_PYSSIZET
9.99999,# T_FLOAT
10.1010101010, # T_DOUBLE
"hi" # T_STRING_INPLACE
)
class ReadWriteTests(unittest.TestCase):
def test_bool(self):
ts.T_BOOL = True
self.assertEqual(ts.T_BOOL, True)
ts.T_BOOL = False
self.assertEqual(ts.T_BOOL, False)
self.assertRaises(TypeError, setattr, ts, 'T_BOOL', 1)
def test_byte(self):
ts.T_BYTE = CHAR_MAX
self.assertEqual(ts.T_BYTE, CHAR_MAX)
ts.T_BYTE = CHAR_MIN
self.assertEqual(ts.T_BYTE, CHAR_MIN)
ts.T_UBYTE = UCHAR_MAX
self.assertEqual(ts.T_UBYTE, UCHAR_MAX)
def test_short(self):
ts.T_SHORT = SHRT_MAX
self.assertEqual(ts.T_SHORT, SHRT_MAX)
ts.T_SHORT = SHRT_MIN
self.assertEqual(ts.T_SHORT, SHRT_MIN)
ts.T_USHORT = USHRT_MAX
self.assertEqual(ts.T_USHORT, USHRT_MAX)
def test_int(self):
ts.T_INT = INT_MAX
self.assertEqual(ts.T_INT, INT_MAX)
ts.T_INT = INT_MIN
self.assertEqual(ts.T_INT, INT_MIN)
ts.T_UINT = UINT_MAX
self.assertEqual(ts.T_UINT, UINT_MAX)
def test_long(self):
ts.T_LONG = LONG_MAX
self.assertEqual(ts.T_LONG, LONG_MAX)
ts.T_LONG = LONG_MIN
self.assertEqual(ts.T_LONG, LONG_MIN)
ts.T_ULONG = ULONG_MAX
self.assertEqual(ts.T_ULONG, ULONG_MAX)
def test_py_ssize_t(self):
ts.T_PYSSIZET = PY_SSIZE_T_MAX
self.assertEqual(ts.T_PYSSIZET, PY_SSIZE_T_MAX)
ts.T_PYSSIZET = PY_SSIZE_T_MIN
self.assertEqual(ts.T_PYSSIZET, PY_SSIZE_T_MIN)
@unittest.skipUnless(hasattr(ts, "T_LONGLONG"), "long long not present")
def test_longlong(self):
ts.T_LONGLONG = LLONG_MAX
self.assertEqual(ts.T_LONGLONG, LLONG_MAX)
ts.T_LONGLONG = LLONG_MIN
self.assertEqual(ts.T_LONGLONG, LLONG_MIN)
ts.T_ULONGLONG = ULLONG_MAX
self.assertEqual(ts.T_ULONGLONG, ULLONG_MAX)
## make sure these will accept a plain int as well as a long
ts.T_LONGLONG = 3
self.assertEqual(ts.T_LONGLONG, 3)
ts.T_ULONGLONG = 4
self.assertEqual(ts.T_ULONGLONG, 4)
def test_bad_assignments(self):
integer_attributes = [
'T_BOOL',
'T_BYTE', 'T_UBYTE',
'T_SHORT', 'T_USHORT',
'T_INT', 'T_UINT',
'T_LONG', 'T_ULONG',
'T_PYSSIZET'
]
if hasattr(ts, 'T_LONGLONG'):
integer_attributes.extend(['T_LONGLONG', 'T_ULONGLONG'])
# issue8014: this produced 'bad argument to internal function'
# internal error
for nonint in None, 3.2j, "full of eels", {}, []:
for attr in integer_attributes:
self.assertRaises(TypeError, setattr, ts, attr, nonint)
def test_inplace_string(self):
self.assertEqual(ts.T_STRING_INPLACE, "hi")
self.assertRaises(TypeError, setattr, ts, "T_STRING_INPLACE", "s")
self.assertRaises(TypeError, delattr, ts, "T_STRING_INPLACE")
class TestWarnings(unittest.TestCase):
def test_byte_max(self):
with support.check_warnings(('', RuntimeWarning)):
ts.T_BYTE = CHAR_MAX+1
def test_byte_min(self):
with support.check_warnings(('', RuntimeWarning)):
ts.T_BYTE = CHAR_MIN-1
def test_ubyte_max(self):
with support.check_warnings(('', RuntimeWarning)):
ts.T_UBYTE = UCHAR_MAX+1
def test_short_max(self):
with support.check_warnings(('', RuntimeWarning)):
ts.T_SHORT = SHRT_MAX+1
def test_short_min(self):
with support.check_warnings(('', RuntimeWarning)):
ts.T_SHORT = SHRT_MIN-1
def test_ushort_max(self):
with support.check_warnings(('', RuntimeWarning)):
ts.T_USHORT = USHRT_MAX+1
def test_main(verbose=None):
support.run_unittest(__name__)
if __name__ == "__main__":
test_main(verbose=True)
|
lgpl-3.0
|
jarmoni/ansible-modules-core
|
packaging/language/gem.py
|
76
|
8146
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Johan Wiren <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: gem
short_description: Manage Ruby gems
description:
- Manage installation and uninstallation of Ruby gems.
version_added: "1.1"
options:
name:
description:
- The name of the gem to be managed.
required: true
state:
description:
- The desired state of the gem. C(latest) ensures that the latest version is installed.
required: false
choices: [present, absent, latest]
default: present
gem_source:
description:
- The path to a local gem used as installation source.
required: false
include_dependencies:
description:
- Whether to include dependencies or not.
required: false
choices: [ "yes", "no" ]
default: "yes"
repository:
description:
- The repository from which the gem will be installed
required: false
aliases: [source]
user_install:
description:
- Install gem in user's local gems cache or for all users
required: false
default: "yes"
version_added: "1.3"
executable:
description:
- Override the path to the gem executable
required: false
version_added: "1.4"
version:
description:
- Version of the gem to be installed/removed.
required: false
pre_release:
description:
- Allow installation of pre-release versions of the gem.
required: false
default: "no"
version_added: "1.6"
include_doc:
description:
- Install with or without docs.
required: false
default: "no"
version_added: "2.0"
build_flags:
description:
- Allow adding build flags for gem compilation
required: false
version_added: "2.0"
author:
- "Ansible Core Team"
- "Johan Wiren"
'''
EXAMPLES = '''
# Installs version 1.0 of vagrant.
- gem: name=vagrant version=1.0 state=present
# Installs latest available version of rake.
- gem: name=rake state=latest
# Installs rake version 1.0 from a local gem on disk.
- gem: name=rake gem_source=/path/to/gems/rake-1.0.gem state=present
'''
import re
def get_rubygems_path(module):
if module.params['executable']:
return module.params['executable'].split(' ')
else:
return [ module.get_bin_path('gem', True) ]
def get_rubygems_version(module):
cmd = get_rubygems_path(module) + [ '--version' ]
(rc, out, err) = module.run_command(cmd, check_rc=True)
match = re.match(r'^(\d+)\.(\d+)\.(\d+)', out)
if not match:
return None
return tuple(int(x) for x in match.groups())
def get_installed_versions(module, remote=False):
cmd = get_rubygems_path(module)
cmd.append('query')
if remote:
cmd.append('--remote')
if module.params['repository']:
cmd.extend([ '--source', module.params['repository'] ])
cmd.append('-n')
cmd.append('^%s$' % module.params['name'])
(rc, out, err) = module.run_command(cmd, check_rc=True)
installed_versions = []
for line in out.splitlines():
match = re.match(r"\S+\s+\((.+)\)", line)
if match:
versions = match.group(1)
for version in versions.split(', '):
installed_versions.append(version.split()[0])
return installed_versions
def exists(module):
if module.params['state'] == 'latest':
remoteversions = get_installed_versions(module, remote=True)
if remoteversions:
module.params['version'] = remoteversions[0]
installed_versions = get_installed_versions(module)
if module.params['version']:
if module.params['version'] in installed_versions:
return True
else:
if installed_versions:
return True
return False
def uninstall(module):
if module.check_mode:
return
cmd = get_rubygems_path(module)
cmd.append('uninstall')
if module.params['version']:
cmd.extend([ '--version', module.params['version'] ])
else:
cmd.append('--all')
cmd.append('--executable')
cmd.append(module.params['name'])
module.run_command(cmd, check_rc=True)
def install(module):
if module.check_mode:
return
ver = get_rubygems_version(module)
if ver:
major = ver[0]
else:
major = None
cmd = get_rubygems_path(module)
cmd.append('install')
if module.params['version']:
cmd.extend([ '--version', module.params['version'] ])
if module.params['repository']:
cmd.extend([ '--source', module.params['repository'] ])
if not module.params['include_dependencies']:
cmd.append('--ignore-dependencies')
else:
if major and major < 2:
cmd.append('--include-dependencies')
if module.params['user_install']:
cmd.append('--user-install')
else:
cmd.append('--no-user-install')
if module.params['pre_release']:
cmd.append('--pre')
if not module.params['include_doc']:
if major and major < 2:
cmd.append('--no-rdoc')
cmd.append('--no-ri')
else:
cmd.append('--no-document')
cmd.append(module.params['gem_source'])
if module.params['build_flags']:
cmd.extend([ '--', module.params['build_flags'] ])
module.run_command(cmd, check_rc=True)
def main():
module = AnsibleModule(
argument_spec = dict(
executable = dict(required=False, type='str'),
gem_source = dict(required=False, type='str'),
include_dependencies = dict(required=False, default=True, type='bool'),
name = dict(required=True, type='str'),
repository = dict(required=False, aliases=['source'], type='str'),
state = dict(required=False, default='present', choices=['present','absent','latest'], type='str'),
user_install = dict(required=False, default=True, type='bool'),
pre_release = dict(required=False, default=False, type='bool'),
include_doc = dict(required=False, default=False, type='bool'),
version = dict(required=False, type='str'),
build_flags = dict(required=False, type='str'),
),
supports_check_mode = True,
mutually_exclusive = [ ['gem_source','repository'], ['gem_source','version'] ],
)
if module.params['version'] and module.params['state'] == 'latest':
module.fail_json(msg="Cannot specify version when state=latest")
if module.params['gem_source'] and module.params['state'] == 'latest':
module.fail_json(msg="Cannot maintain state=latest when installing from local source")
if not module.params['gem_source']:
module.params['gem_source'] = module.params['name']
changed = False
if module.params['state'] in [ 'present', 'latest']:
if not exists(module):
install(module)
changed = True
elif module.params['state'] == 'absent':
if exists(module):
uninstall(module)
changed = True
result = {}
result['name'] = module.params['name']
result['state'] = module.params['state']
if module.params['version']:
result['version'] = module.params['version']
result['changed'] = changed
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
main()
|
gpl-3.0
|
openthread/openthread
|
tools/harness-automation/cases/med_6_2_1.py
|
9
|
1871
|
#!/usr/bin/env python
#
# Copyright (c) 2019, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
from autothreadharness.harness_case import HarnessCase
class MED_6_2_1(HarnessCase):
role = HarnessCase.ROLE_MED
case = '6 2 1'
golden_devices_required = 2
def on_dialog(self, dialog, title):
pass
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
cneill/designate
|
designate/openstack/common/report/views/text/process.py
|
7
|
1236
|
# Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Provides process view
This module provides a view for
visualizing processes in human-readable formm
"""
import designate.openstack.common.report.views.jinja_view as jv
class ProcessView(jv.JinjaView):
"""A Process View
This view displays process models defined by
:class:`openstack.common.report.models.process.ProcessModel`
"""
VIEW_TEXT = (
"Process {{ pid }} (under {{ parent_pid }}) "
"[ run by: {{ username }} ({{ uids.real|default('unknown uid') }}),"
" state: {{ state }} ]\n"
"{% for child in children %}"
" {{ child }}"
"{% endfor %}"
)
|
apache-2.0
|
hujiajie/pa-chromium
|
tools/telemetry/third_party/png/png.py
|
161
|
157814
|
#!/usr/bin/env python
# $URL$
# $Rev$
# png.py - PNG encoder/decoder in pure Python
#
# Copyright (C) 2006 Johann C. Rocholl <[email protected]>
# Portions Copyright (C) 2009 David Jones <[email protected]>
# And probably portions Copyright (C) 2006 Nicko van Someren <[email protected]>
#
# Original concept by Johann C. Rocholl.
#
# LICENSE (The MIT License)
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Changelog (recent first):
# 2009-03-11 David: interlaced bit depth < 8 (writing).
# 2009-03-10 David: interlaced bit depth < 8 (reading).
# 2009-03-04 David: Flat and Boxed pixel formats.
# 2009-02-26 David: Palette support (writing).
# 2009-02-23 David: Bit-depths < 8; better PNM support.
# 2006-06-17 Nicko: Reworked into a class, faster interlacing.
# 2006-06-17 Johann: Very simple prototype PNG decoder.
# 2006-06-17 Nicko: Test suite with various image generators.
# 2006-06-17 Nicko: Alpha-channel, grey-scale, 16-bit/plane support.
# 2006-06-15 Johann: Scanline iterator interface for large input files.
# 2006-06-09 Johann: Very simple prototype PNG encoder.
# Incorporated into Bangai-O Development Tools by drj on 2009-02-11 from
# http://trac.browsershots.org/browser/trunk/pypng/lib/png.py?rev=2885
# Incorporated into pypng by drj on 2009-03-12 from
# //depot/prj/bangaio/master/code/png.py#67
"""
Pure Python PNG Reader/Writer
This Python module implements support for PNG images (see PNG
specification at http://www.w3.org/TR/2003/REC-PNG-20031110/ ). It reads
and writes PNG files with all allowable bit depths (1/2/4/8/16/24/32/48/64
bits per pixel) and colour combinations: greyscale (1/2/4/8/16 bit); RGB,
RGBA, LA (greyscale with alpha) with 8/16 bits per channel; colour mapped
images (1/2/4/8 bit). Adam7 interlacing is supported for reading and
writing. A number of optional chunks can be specified (when writing)
and understood (when reading): ``tRNS``, ``bKGD``, ``gAMA``.
For help, type ``import png; help(png)`` in your python interpreter.
A good place to start is the :class:`Reader` and :class:`Writer` classes.
Requires Python 2.3. Limited support is available for Python 2.2, but
not everything works. Best with Python 2.4 and higher. Installation is
trivial, but see the ``README.txt`` file (with the source distribution)
for details.
This file can also be used as a command-line utility to convert
`Netpbm <http://netpbm.sourceforge.net/>`_ PNM files to PNG, and the reverse conversion from PNG to
PNM. The interface is similar to that of the ``pnmtopng`` program from
Netpbm. Type ``python png.py --help`` at the shell prompt
for usage and a list of options.
A note on spelling and terminology
----------------------------------
Generally British English spelling is used in the documentation. So
that's "greyscale" and "colour". This not only matches the author's
native language, it's also used by the PNG specification.
The major colour models supported by PNG (and hence by PyPNG) are:
greyscale, RGB, greyscale--alpha, RGB--alpha. These are sometimes
referred to using the abbreviations: L, RGB, LA, RGBA. In this case
each letter abbreviates a single channel: *L* is for Luminance or Luma or
Lightness which is the channel used in greyscale images; *R*, *G*, *B* stand
for Red, Green, Blue, the components of a colour image; *A* stands for
Alpha, the opacity channel (used for transparency effects, but higher
values are more opaque, so it makes sense to call it opacity).
A note on formats
-----------------
When getting pixel data out of this module (reading) and presenting
data to this module (writing) there are a number of ways the data could
be represented as a Python value. Generally this module uses one of
three formats called "flat row flat pixel", "boxed row flat pixel", and
"boxed row boxed pixel". Basically the concern is whether each pixel
and each row comes in its own little tuple (box), or not.
Consider an image that is 3 pixels wide by 2 pixels high, and each pixel
has RGB components:
Boxed row flat pixel::
list([R,G,B, R,G,B, R,G,B],
[R,G,B, R,G,B, R,G,B])
Each row appears as its own list, but the pixels are flattened so that
three values for one pixel simply follow the three values for the previous
pixel. This is the most common format used, because it provides a good
compromise between space and convenience. PyPNG regards itself as
at liberty to replace any sequence type with any sufficiently compatible
other sequence type; in practice each row is an array (from the array
module), and the outer list is sometimes an iterator rather than an
explicit list (so that streaming is possible).
Flat row flat pixel::
[R,G,B, R,G,B, R,G,B,
R,G,B, R,G,B, R,G,B]
The entire image is one single giant sequence of colour values.
Generally an array will be used (to save space), not a list.
Boxed row boxed pixel::
list([ (R,G,B), (R,G,B), (R,G,B) ],
[ (R,G,B), (R,G,B), (R,G,B) ])
Each row appears in its own list, but each pixel also appears in its own
tuple. A serious memory burn in Python.
In all cases the top row comes first, and for each row the pixels are
ordered from left-to-right. Within a pixel the values appear in the
order, R-G-B-A (or L-A for greyscale--alpha).
There is a fourth format, mentioned because it is used internally,
is close to what lies inside a PNG file itself, and has some support
from the public API. This format is called packed. When packed,
each row is a sequence of bytes (integers from 0 to 255), just as
it is before PNG scanline filtering is applied. When the bit depth
is 8 this is essentially the same as boxed row flat pixel; when the
bit depth is less than 8, several pixels are packed into each byte;
when the bit depth is 16 (the only value more than 8 that is supported
by the PNG image format) each pixel value is decomposed into 2 bytes
(and `packed` is a misnomer). This format is used by the
:meth:`Writer.write_packed` method. It isn't usually a convenient
format, but may be just right if the source data for the PNG image
comes from something that uses a similar format (for example, 1-bit
BMPs, or another PNG file).
And now, my famous members
--------------------------
"""
# http://www.python.org/doc/2.2.3/whatsnew/node5.html
from __future__ import generators
__version__ = "$URL$ $Rev$"
from array import array
try: # See :pyver:old
import itertools
except:
pass
import math
# http://www.python.org/doc/2.4.4/lib/module-operator.html
import operator
import struct
import sys
import zlib
# http://www.python.org/doc/2.4.4/lib/module-warnings.html
import warnings
try:
import pyximport
pyximport.install()
import cpngfilters as pngfilters
except ImportError:
pass
__all__ = ['Image', 'Reader', 'Writer', 'write_chunks', 'from_array']
# The PNG signature.
# http://www.w3.org/TR/PNG/#5PNG-file-signature
_signature = struct.pack('8B', 137, 80, 78, 71, 13, 10, 26, 10)
_adam7 = ((0, 0, 8, 8),
(4, 0, 8, 8),
(0, 4, 4, 8),
(2, 0, 4, 4),
(0, 2, 2, 4),
(1, 0, 2, 2),
(0, 1, 1, 2))
def group(s, n):
# See
# http://www.python.org/doc/2.6/library/functions.html#zip
return zip(*[iter(s)]*n)
def isarray(x):
"""Same as ``isinstance(x, array)`` except on Python 2.2, where it
always returns ``False``. This helps PyPNG work on Python 2.2.
"""
try:
return isinstance(x, array)
except:
return False
try: # see :pyver:old
array.tostring
except:
def tostring(row):
l = len(row)
return struct.pack('%dB' % l, *row)
else:
def tostring(row):
"""Convert row of bytes to string. Expects `row` to be an
``array``.
"""
return row.tostring()
# Conditionally convert to bytes. Works on Python 2 and Python 3.
try:
bytes('', 'ascii')
def strtobytes(x): return bytes(x, 'iso8859-1')
def bytestostr(x): return str(x, 'iso8859-1')
except:
strtobytes = str
bytestostr = str
def interleave_planes(ipixels, apixels, ipsize, apsize):
"""
Interleave (colour) planes, e.g. RGB + A = RGBA.
Return an array of pixels consisting of the `ipsize` elements of data
from each pixel in `ipixels` followed by the `apsize` elements of data
from each pixel in `apixels`. Conventionally `ipixels` and
`apixels` are byte arrays so the sizes are bytes, but it actually
works with any arrays of the same type. The returned array is the
same type as the input arrays which should be the same type as each other.
"""
itotal = len(ipixels)
atotal = len(apixels)
newtotal = itotal + atotal
newpsize = ipsize + apsize
# Set up the output buffer
# See http://www.python.org/doc/2.4.4/lib/module-array.html#l2h-1356
out = array(ipixels.typecode)
# It's annoying that there is no cheap way to set the array size :-(
out.extend(ipixels)
out.extend(apixels)
# Interleave in the pixel data
for i in range(ipsize):
out[i:newtotal:newpsize] = ipixels[i:itotal:ipsize]
for i in range(apsize):
out[i+ipsize:newtotal:newpsize] = apixels[i:atotal:apsize]
return out
def check_palette(palette):
"""Check a palette argument (to the :class:`Writer` class) for validity.
Returns the palette as a list if okay; raises an exception otherwise.
"""
# None is the default and is allowed.
if palette is None:
return None
p = list(palette)
if not (0 < len(p) <= 256):
raise ValueError("a palette must have between 1 and 256 entries")
seen_triple = False
for i,t in enumerate(p):
if len(t) not in (3,4):
raise ValueError(
"palette entry %d: entries must be 3- or 4-tuples." % i)
if len(t) == 3:
seen_triple = True
if seen_triple and len(t) == 4:
raise ValueError(
"palette entry %d: all 4-tuples must precede all 3-tuples" % i)
for x in t:
if int(x) != x or not(0 <= x <= 255):
raise ValueError(
"palette entry %d: values must be integer: 0 <= x <= 255" % i)
return p
class Error(Exception):
prefix = 'Error'
def __str__(self):
return self.prefix + ': ' + ' '.join(self.args)
class FormatError(Error):
"""Problem with input file format. In other words, PNG file does
not conform to the specification in some way and is invalid.
"""
prefix = 'FormatError'
class ChunkError(FormatError):
prefix = 'ChunkError'
class Writer:
"""
PNG encoder in pure Python.
"""
def __init__(self, width=None, height=None,
size=None,
greyscale=False,
alpha=False,
bitdepth=8,
palette=None,
transparent=None,
background=None,
gamma=None,
compression=None,
interlace=False,
bytes_per_sample=None, # deprecated
planes=None,
colormap=None,
maxval=None,
chunk_limit=2**20):
"""
Create a PNG encoder object.
Arguments:
width, height
Image size in pixels, as two separate arguments.
size
Image size (w,h) in pixels, as single argument.
greyscale
Input data is greyscale, not RGB.
alpha
Input data has alpha channel (RGBA or LA).
bitdepth
Bit depth: from 1 to 16.
palette
Create a palette for a colour mapped image (colour type 3).
transparent
Specify a transparent colour (create a ``tRNS`` chunk).
background
Specify a default background colour (create a ``bKGD`` chunk).
gamma
Specify a gamma value (create a ``gAMA`` chunk).
compression
zlib compression level: 0 (none) to 9 (more compressed); default: -1 or None.
interlace
Create an interlaced image.
chunk_limit
Write multiple ``IDAT`` chunks to save memory.
The image size (in pixels) can be specified either by using the
`width` and `height` arguments, or with the single `size`
argument. If `size` is used it should be a pair (*width*,
*height*).
`greyscale` and `alpha` are booleans that specify whether
an image is greyscale (or colour), and whether it has an
alpha channel (or not).
`bitdepth` specifies the bit depth of the source pixel values.
Each source pixel value must be an integer between 0 and
``2**bitdepth-1``. For example, 8-bit images have values
between 0 and 255. PNG only stores images with bit depths of
1,2,4,8, or 16. When `bitdepth` is not one of these values,
the next highest valid bit depth is selected, and an ``sBIT``
(significant bits) chunk is generated that specifies the original
precision of the source image. In this case the supplied pixel
values will be rescaled to fit the range of the selected bit depth.
The details of which bit depth / colour model combinations the
PNG file format supports directly, are somewhat arcane
(refer to the PNG specification for full details). Briefly:
"small" bit depths (1,2,4) are only allowed with greyscale and
colour mapped images; colour mapped images cannot have bit depth
16.
For colour mapped images (in other words, when the `palette`
argument is specified) the `bitdepth` argument must match one of
the valid PNG bit depths: 1, 2, 4, or 8. (It is valid to have a
PNG image with a palette and an ``sBIT`` chunk, but the meaning
is slightly different; it would be awkward to press the
`bitdepth` argument into service for this.)
The `palette` option, when specified, causes a colour mapped image
to be created: the PNG colour type is set to 3; greyscale
must not be set; alpha must not be set; transparent must
not be set; the bit depth must be 1,2,4, or 8. When a colour
mapped image is created, the pixel values are palette indexes
and the `bitdepth` argument specifies the size of these indexes
(not the size of the colour values in the palette).
The palette argument value should be a sequence of 3- or
4-tuples. 3-tuples specify RGB palette entries; 4-tuples
specify RGBA palette entries. If both 4-tuples and 3-tuples
appear in the sequence then all the 4-tuples must come
before all the 3-tuples. A ``PLTE`` chunk is created; if there
are 4-tuples then a ``tRNS`` chunk is created as well. The
``PLTE`` chunk will contain all the RGB triples in the same
sequence; the ``tRNS`` chunk will contain the alpha channel for
all the 4-tuples, in the same sequence. Palette entries
are always 8-bit.
If specified, the `transparent` and `background` parameters must
be a tuple with three integer values for red, green, blue, or
a simple integer (or singleton tuple) for a greyscale image.
If specified, the `gamma` parameter must be a positive number
(generally, a float). A ``gAMA`` chunk will be created. Note that
this will not change the values of the pixels as they appear in
the PNG file, they are assumed to have already been converted
appropriately for the gamma specified.
The `compression` argument specifies the compression level to
be used by the ``zlib`` module. Values from 1 to 9 specify
compression, with 9 being "more compressed" (usually smaller
and slower, but it doesn't always work out that way). 0 means
no compression. -1 and ``None`` both mean that the default
level of compession will be picked by the ``zlib`` module
(which is generally acceptable).
If `interlace` is true then an interlaced image is created
(using PNG's so far only interace method, *Adam7*). This does not
affect how the pixels should be presented to the encoder, rather
it changes how they are arranged into the PNG file. On slow
connexions interlaced images can be partially decoded by the
browser to give a rough view of the image that is successively
refined as more image data appears.
.. note ::
Enabling the `interlace` option requires the entire image
to be processed in working memory.
`chunk_limit` is used to limit the amount of memory used whilst
compressing the image. In order to avoid using large amounts of
memory, multiple ``IDAT`` chunks may be created.
"""
# At the moment the `planes` argument is ignored;
# its purpose is to act as a dummy so that
# ``Writer(x, y, **info)`` works, where `info` is a dictionary
# returned by Reader.read and friends.
# Ditto for `colormap`.
# A couple of helper functions come first. Best skipped if you
# are reading through.
def isinteger(x):
try:
return int(x) == x
except:
return False
def check_color(c, which):
"""Checks that a colour argument for transparent or
background options is the right form. Also "corrects" bare
integers to 1-tuples.
"""
if c is None:
return c
if greyscale:
try:
l = len(c)
except TypeError:
c = (c,)
if len(c) != 1:
raise ValueError("%s for greyscale must be 1-tuple" %
which)
if not isinteger(c[0]):
raise ValueError(
"%s colour for greyscale must be integer" %
which)
else:
if not (len(c) == 3 and
isinteger(c[0]) and
isinteger(c[1]) and
isinteger(c[2])):
raise ValueError(
"%s colour must be a triple of integers" %
which)
return c
if size:
if len(size) != 2:
raise ValueError(
"size argument should be a pair (width, height)")
if width is not None and width != size[0]:
raise ValueError(
"size[0] (%r) and width (%r) should match when both are used."
% (size[0], width))
if height is not None and height != size[1]:
raise ValueError(
"size[1] (%r) and height (%r) should match when both are used."
% (size[1], height))
width,height = size
del size
if width <= 0 or height <= 0:
raise ValueError("width and height must be greater than zero")
if not isinteger(width) or not isinteger(height):
raise ValueError("width and height must be integers")
# http://www.w3.org/TR/PNG/#7Integers-and-byte-order
if width > 2**32-1 or height > 2**32-1:
raise ValueError("width and height cannot exceed 2**32-1")
if alpha and transparent is not None:
raise ValueError(
"transparent colour not allowed with alpha channel")
if bytes_per_sample is not None:
warnings.warn('please use bitdepth instead of bytes_per_sample',
DeprecationWarning)
if bytes_per_sample not in (0.125, 0.25, 0.5, 1, 2):
raise ValueError(
"bytes per sample must be .125, .25, .5, 1, or 2")
bitdepth = int(8*bytes_per_sample)
del bytes_per_sample
if not isinteger(bitdepth) or bitdepth < 1 or 16 < bitdepth:
raise ValueError("bitdepth (%r) must be a postive integer <= 16" %
bitdepth)
self.rescale = None
if palette:
if bitdepth not in (1,2,4,8):
raise ValueError("with palette, bitdepth must be 1, 2, 4, or 8")
if transparent is not None:
raise ValueError("transparent and palette not compatible")
if alpha:
raise ValueError("alpha and palette not compatible")
if greyscale:
raise ValueError("greyscale and palette not compatible")
else:
# No palette, check for sBIT chunk generation.
if alpha or not greyscale:
if bitdepth not in (8,16):
targetbitdepth = (8,16)[bitdepth > 8]
self.rescale = (bitdepth, targetbitdepth)
bitdepth = targetbitdepth
del targetbitdepth
else:
assert greyscale
assert not alpha
if bitdepth not in (1,2,4,8,16):
if bitdepth > 8:
targetbitdepth = 16
elif bitdepth == 3:
targetbitdepth = 4
else:
assert bitdepth in (5,6,7)
targetbitdepth = 8
self.rescale = (bitdepth, targetbitdepth)
bitdepth = targetbitdepth
del targetbitdepth
if bitdepth < 8 and (alpha or not greyscale and not palette):
raise ValueError(
"bitdepth < 8 only permitted with greyscale or palette")
if bitdepth > 8 and palette:
raise ValueError(
"bit depth must be 8 or less for images with palette")
transparent = check_color(transparent, 'transparent')
background = check_color(background, 'background')
# It's important that the true boolean values (greyscale, alpha,
# colormap, interlace) are converted to bool because Iverson's
# convention is relied upon later on.
self.width = width
self.height = height
self.transparent = transparent
self.background = background
self.gamma = gamma
self.greyscale = bool(greyscale)
self.alpha = bool(alpha)
self.colormap = bool(palette)
self.bitdepth = int(bitdepth)
self.compression = compression
self.chunk_limit = chunk_limit
self.interlace = bool(interlace)
self.palette = check_palette(palette)
self.color_type = 4*self.alpha + 2*(not greyscale) + 1*self.colormap
assert self.color_type in (0,2,3,4,6)
self.color_planes = (3,1)[self.greyscale or self.colormap]
self.planes = self.color_planes + self.alpha
# :todo: fix for bitdepth < 8
self.psize = (self.bitdepth/8) * self.planes
def make_palette(self):
"""Create the byte sequences for a ``PLTE`` and if necessary a
``tRNS`` chunk. Returned as a pair (*p*, *t*). *t* will be
``None`` if no ``tRNS`` chunk is necessary.
"""
p = array('B')
t = array('B')
for x in self.palette:
p.extend(x[0:3])
if len(x) > 3:
t.append(x[3])
p = tostring(p)
t = tostring(t)
if t:
return p,t
return p,None
def write(self, outfile, rows):
"""Write a PNG image to the output file. `rows` should be
an iterable that yields each row in boxed row flat pixel format.
The rows should be the rows of the original image, so there
should be ``self.height`` rows of ``self.width * self.planes`` values.
If `interlace` is specified (when creating the instance), then
an interlaced PNG file will be written. Supply the rows in the
normal image order; the interlacing is carried out internally.
.. note ::
Interlacing will require the entire image to be in working memory.
"""
if self.interlace:
fmt = 'BH'[self.bitdepth > 8]
a = array(fmt, itertools.chain(*rows))
return self.write_array(outfile, a)
else:
nrows = self.write_passes(outfile, rows)
if nrows != self.height:
raise ValueError(
"rows supplied (%d) does not match height (%d)" %
(nrows, self.height))
def write_passes(self, outfile, rows, packed=False):
"""
Write a PNG image to the output file.
Most users are expected to find the :meth:`write` or
:meth:`write_array` method more convenient.
The rows should be given to this method in the order that
they appear in the output file. For straightlaced images,
this is the usual top to bottom ordering, but for interlaced
images the rows should have already been interlaced before
passing them to this function.
`rows` should be an iterable that yields each row. When
`packed` is ``False`` the rows should be in boxed row flat pixel
format; when `packed` is ``True`` each row should be a packed
sequence of bytes.
"""
# http://www.w3.org/TR/PNG/#5PNG-file-signature
outfile.write(_signature)
# http://www.w3.org/TR/PNG/#11IHDR
write_chunk(outfile, 'IHDR',
struct.pack("!2I5B", self.width, self.height,
self.bitdepth, self.color_type,
0, 0, self.interlace))
# See :chunk:order
# http://www.w3.org/TR/PNG/#11gAMA
if self.gamma is not None:
write_chunk(outfile, 'gAMA',
struct.pack("!L", int(round(self.gamma*1e5))))
# See :chunk:order
# http://www.w3.org/TR/PNG/#11sBIT
if self.rescale:
write_chunk(outfile, 'sBIT',
struct.pack('%dB' % self.planes,
*[self.rescale[0]]*self.planes))
# :chunk:order: Without a palette (PLTE chunk), ordering is
# relatively relaxed. With one, gAMA chunk must precede PLTE
# chunk which must precede tRNS and bKGD.
# See http://www.w3.org/TR/PNG/#5ChunkOrdering
if self.palette:
p,t = self.make_palette()
write_chunk(outfile, 'PLTE', p)
if t:
# tRNS chunk is optional. Only needed if palette entries
# have alpha.
write_chunk(outfile, 'tRNS', t)
# http://www.w3.org/TR/PNG/#11tRNS
if self.transparent is not None:
if self.greyscale:
write_chunk(outfile, 'tRNS',
struct.pack("!1H", *self.transparent))
else:
write_chunk(outfile, 'tRNS',
struct.pack("!3H", *self.transparent))
# http://www.w3.org/TR/PNG/#11bKGD
if self.background is not None:
if self.greyscale:
write_chunk(outfile, 'bKGD',
struct.pack("!1H", *self.background))
else:
write_chunk(outfile, 'bKGD',
struct.pack("!3H", *self.background))
# http://www.w3.org/TR/PNG/#11IDAT
if self.compression is not None:
compressor = zlib.compressobj(self.compression)
else:
compressor = zlib.compressobj()
# Choose an extend function based on the bitdepth. The extend
# function packs/decomposes the pixel values into bytes and
# stuffs them onto the data array.
data = array('B')
if self.bitdepth == 8 or packed:
extend = data.extend
elif self.bitdepth == 16:
# Decompose into bytes
def extend(sl):
fmt = '!%dH' % len(sl)
data.extend(array('B', struct.pack(fmt, *sl)))
else:
# Pack into bytes
assert self.bitdepth < 8
# samples per byte
spb = int(8/self.bitdepth)
def extend(sl):
a = array('B', sl)
# Adding padding bytes so we can group into a whole
# number of spb-tuples.
l = float(len(a))
extra = math.ceil(l / float(spb))*spb - l
a.extend([0]*int(extra))
# Pack into bytes
l = group(a, spb)
l = map(lambda e: reduce(lambda x,y:
(x << self.bitdepth) + y, e), l)
data.extend(l)
if self.rescale:
oldextend = extend
factor = \
float(2**self.rescale[1]-1) / float(2**self.rescale[0]-1)
def extend(sl):
oldextend(map(lambda x: int(round(factor*x)), sl))
# Build the first row, testing mostly to see if we need to
# changed the extend function to cope with NumPy integer types
# (they cause our ordinary definition of extend to fail, so we
# wrap it). See
# http://code.google.com/p/pypng/issues/detail?id=44
enumrows = enumerate(rows)
del rows
# First row's filter type.
data.append(0)
# :todo: Certain exceptions in the call to ``.next()`` or the
# following try would indicate no row data supplied.
# Should catch.
i,row = enumrows.next()
try:
# If this fails...
extend(row)
except:
# ... try a version that converts the values to int first.
# Not only does this work for the (slightly broken) NumPy
# types, there are probably lots of other, unknown, "nearly"
# int types it works for.
def wrapmapint(f):
return lambda sl: f(map(int, sl))
extend = wrapmapint(extend)
del wrapmapint
extend(row)
for i,row in enumrows:
# Add "None" filter type. Currently, it's essential that
# this filter type be used for every scanline as we do not
# mark the first row of a reduced pass image; that means we
# could accidentally compute the wrong filtered scanline if
# we used "up", "average", or "paeth" on such a line.
data.append(0)
extend(row)
if len(data) > self.chunk_limit:
compressed = compressor.compress(tostring(data))
if len(compressed):
# print >> sys.stderr, len(data), len(compressed)
write_chunk(outfile, 'IDAT', compressed)
# Because of our very witty definition of ``extend``,
# above, we must re-use the same ``data`` object. Hence
# we use ``del`` to empty this one, rather than create a
# fresh one (which would be my natural FP instinct).
del data[:]
if len(data):
compressed = compressor.compress(tostring(data))
else:
compressed = ''
flushed = compressor.flush()
if len(compressed) or len(flushed):
# print >> sys.stderr, len(data), len(compressed), len(flushed)
write_chunk(outfile, 'IDAT', compressed + flushed)
# http://www.w3.org/TR/PNG/#11IEND
write_chunk(outfile, 'IEND')
return i+1
def write_array(self, outfile, pixels):
"""
Write an array in flat row flat pixel format as a PNG file on
the output file. See also :meth:`write` method.
"""
if self.interlace:
self.write_passes(outfile, self.array_scanlines_interlace(pixels))
else:
self.write_passes(outfile, self.array_scanlines(pixels))
def write_packed(self, outfile, rows):
"""
Write PNG file to `outfile`. The pixel data comes from `rows`
which should be in boxed row packed format. Each row should be
a sequence of packed bytes.
Technically, this method does work for interlaced images but it
is best avoided. For interlaced images, the rows should be
presented in the order that they appear in the file.
This method should not be used when the source image bit depth
is not one naturally supported by PNG; the bit depth should be
1, 2, 4, 8, or 16.
"""
if self.rescale:
raise Error("write_packed method not suitable for bit depth %d" %
self.rescale[0])
return self.write_passes(outfile, rows, packed=True)
def convert_pnm(self, infile, outfile):
"""
Convert a PNM file containing raw pixel data into a PNG file
with the parameters set in the writer object. Works for
(binary) PGM, PPM, and PAM formats.
"""
if self.interlace:
pixels = array('B')
pixels.fromfile(infile,
(self.bitdepth/8) * self.color_planes *
self.width * self.height)
self.write_passes(outfile, self.array_scanlines_interlace(pixels))
else:
self.write_passes(outfile, self.file_scanlines(infile))
def convert_ppm_and_pgm(self, ppmfile, pgmfile, outfile):
"""
Convert a PPM and PGM file containing raw pixel data into a
PNG outfile with the parameters set in the writer object.
"""
pixels = array('B')
pixels.fromfile(ppmfile,
(self.bitdepth/8) * self.color_planes *
self.width * self.height)
apixels = array('B')
apixels.fromfile(pgmfile,
(self.bitdepth/8) *
self.width * self.height)
pixels = interleave_planes(pixels, apixels,
(self.bitdepth/8) * self.color_planes,
(self.bitdepth/8))
if self.interlace:
self.write_passes(outfile, self.array_scanlines_interlace(pixels))
else:
self.write_passes(outfile, self.array_scanlines(pixels))
def file_scanlines(self, infile):
"""
Generates boxed rows in flat pixel format, from the input file
`infile`. It assumes that the input file is in a "Netpbm-like"
binary format, and is positioned at the beginning of the first
pixel. The number of pixels to read is taken from the image
dimensions (`width`, `height`, `planes`) and the number of bytes
per value is implied by the image `bitdepth`.
"""
# Values per row
vpr = self.width * self.planes
row_bytes = vpr
if self.bitdepth > 8:
assert self.bitdepth == 16
row_bytes *= 2
fmt = '>%dH' % vpr
def line():
return array('H', struct.unpack(fmt, infile.read(row_bytes)))
else:
def line():
scanline = array('B', infile.read(row_bytes))
return scanline
for y in range(self.height):
yield line()
def array_scanlines(self, pixels):
"""
Generates boxed rows (flat pixels) from flat rows (flat pixels)
in an array.
"""
# Values per row
vpr = self.width * self.planes
stop = 0
for y in range(self.height):
start = stop
stop = start + vpr
yield pixels[start:stop]
def array_scanlines_interlace(self, pixels):
"""
Generator for interlaced scanlines from an array. `pixels` is
the full source image in flat row flat pixel format. The
generator yields each scanline of the reduced passes in turn, in
boxed row flat pixel format.
"""
# http://www.w3.org/TR/PNG/#8InterlaceMethods
# Array type.
fmt = 'BH'[self.bitdepth > 8]
# Value per row
vpr = self.width * self.planes
for xstart, ystart, xstep, ystep in _adam7:
if xstart >= self.width:
continue
# Pixels per row (of reduced image)
ppr = int(math.ceil((self.width-xstart)/float(xstep)))
# number of values in reduced image row.
row_len = ppr*self.planes
for y in range(ystart, self.height, ystep):
if xstep == 1:
offset = y * vpr
yield pixels[offset:offset+vpr]
else:
row = array(fmt)
# There's no easier way to set the length of an array
row.extend(pixels[0:row_len])
offset = y * vpr + xstart * self.planes
end_offset = (y+1) * vpr
skip = self.planes * xstep
for i in range(self.planes):
row[i::self.planes] = \
pixels[offset+i:end_offset:skip]
yield row
def write_chunk(outfile, tag, data=strtobytes('')):
"""
Write a PNG chunk to the output file, including length and
checksum.
"""
# http://www.w3.org/TR/PNG/#5Chunk-layout
outfile.write(struct.pack("!I", len(data)))
tag = strtobytes(tag)
outfile.write(tag)
outfile.write(data)
checksum = zlib.crc32(tag)
checksum = zlib.crc32(data, checksum)
checksum &= 2**32-1
outfile.write(struct.pack("!I", checksum))
def write_chunks(out, chunks):
"""Create a PNG file by writing out the chunks."""
out.write(_signature)
for chunk in chunks:
write_chunk(out, *chunk)
def filter_scanline(type, line, fo, prev=None):
"""Apply a scanline filter to a scanline. `type` specifies the
filter type (0 to 4); `line` specifies the current (unfiltered)
scanline as a sequence of bytes; `prev` specifies the previous
(unfiltered) scanline as a sequence of bytes. `fo` specifies the
filter offset; normally this is size of a pixel in bytes (the number
of bytes per sample times the number of channels), but when this is
< 1 (for bit depths < 8) then the filter offset is 1.
"""
assert 0 <= type < 5
# The output array. Which, pathetically, we extend one-byte at a
# time (fortunately this is linear).
out = array('B', [type])
def sub():
ai = -fo
for x in line:
if ai >= 0:
x = (x - line[ai]) & 0xff
out.append(x)
ai += 1
def up():
for i,x in enumerate(line):
x = (x - prev[i]) & 0xff
out.append(x)
def average():
ai = -fo
for i,x in enumerate(line):
if ai >= 0:
x = (x - ((line[ai] + prev[i]) >> 1)) & 0xff
else:
x = (x - (prev[i] >> 1)) & 0xff
out.append(x)
ai += 1
def paeth():
# http://www.w3.org/TR/PNG/#9Filter-type-4-Paeth
ai = -fo # also used for ci
for i,x in enumerate(line):
a = 0
b = prev[i]
c = 0
if ai >= 0:
a = line[ai]
c = prev[ai]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc: Pr = a
elif pb <= pc: Pr = b
else: Pr = c
x = (x - Pr) & 0xff
out.append(x)
ai += 1
if not prev:
# We're on the first line. Some of the filters can be reduced
# to simpler cases which makes handling the line "off the top"
# of the image simpler. "up" becomes "none"; "paeth" becomes
# "left" (non-trivial, but true). "average" needs to be handled
# specially.
if type == 2: # "up"
return line # type = 0
elif type == 3:
prev = [0]*len(line)
elif type == 4: # "paeth"
type = 1
if type == 0:
out.extend(line)
elif type == 1:
sub()
elif type == 2:
up()
elif type == 3:
average()
else: # type == 4
paeth()
return out
def from_array(a, mode=None, info={}):
"""Create a PNG :class:`Image` object from a 2- or 3-dimensional array.
One application of this function is easy PIL-style saving:
``png.from_array(pixels, 'L').save('foo.png')``.
.. note :
The use of the term *3-dimensional* is for marketing purposes
only. It doesn't actually work. Please bear with us. Meanwhile
enjoy the complimentary snacks (on request) and please use a
2-dimensional array.
Unless they are specified using the *info* parameter, the PNG's
height and width are taken from the array size. For a 3 dimensional
array the first axis is the height; the second axis is the width;
and the third axis is the channel number. Thus an RGB image that is
16 pixels high and 8 wide will use an array that is 16x8x3. For 2
dimensional arrays the first axis is the height, but the second axis
is ``width*channels``, so an RGB image that is 16 pixels high and 8
wide will use a 2-dimensional array that is 16x24 (each row will be
8*3==24 sample values).
*mode* is a string that specifies the image colour format in a
PIL-style mode. It can be:
``'L'``
greyscale (1 channel)
``'LA'``
greyscale with alpha (2 channel)
``'RGB'``
colour image (3 channel)
``'RGBA'``
colour image with alpha (4 channel)
The mode string can also specify the bit depth (overriding how this
function normally derives the bit depth, see below). Appending
``';16'`` to the mode will cause the PNG to be 16 bits per channel;
any decimal from 1 to 16 can be used to specify the bit depth.
When a 2-dimensional array is used *mode* determines how many
channels the image has, and so allows the width to be derived from
the second array dimension.
The array is expected to be a ``numpy`` array, but it can be any
suitable Python sequence. For example, a list of lists can be used:
``png.from_array([[0, 255, 0], [255, 0, 255]], 'L')``. The exact
rules are: ``len(a)`` gives the first dimension, height;
``len(a[0])`` gives the second dimension; ``len(a[0][0])`` gives the
third dimension, unless an exception is raised in which case a
2-dimensional array is assumed. It's slightly more complicated than
that because an iterator of rows can be used, and it all still
works. Using an iterator allows data to be streamed efficiently.
The bit depth of the PNG is normally taken from the array element's
datatype (but if *mode* specifies a bitdepth then that is used
instead). The array element's datatype is determined in a way which
is supposed to work both for ``numpy`` arrays and for Python
``array.array`` objects. A 1 byte datatype will give a bit depth of
8, a 2 byte datatype will give a bit depth of 16. If the datatype
does not have an implicit size, for example it is a plain Python
list of lists, as above, then a default of 8 is used.
The *info* parameter is a dictionary that can be used to specify
metadata (in the same style as the arguments to the
:class:``png.Writer`` class). For this function the keys that are
useful are:
height
overrides the height derived from the array dimensions and allows
*a* to be an iterable.
width
overrides the width derived from the array dimensions.
bitdepth
overrides the bit depth derived from the element datatype (but
must match *mode* if that also specifies a bit depth).
Generally anything specified in the
*info* dictionary will override any implicit choices that this
function would otherwise make, but must match any explicit ones.
For example, if the *info* dictionary has a ``greyscale`` key then
this must be true when mode is ``'L'`` or ``'LA'`` and false when
mode is ``'RGB'`` or ``'RGBA'``.
"""
# We abuse the *info* parameter by modifying it. Take a copy here.
# (Also typechecks *info* to some extent).
info = dict(info)
# Syntax check mode string.
bitdepth = None
try:
mode = mode.split(';')
if len(mode) not in (1,2):
raise Error()
if mode[0] not in ('L', 'LA', 'RGB', 'RGBA'):
raise Error()
if len(mode) == 2:
try:
bitdepth = int(mode[1])
except:
raise Error()
except Error:
raise Error("mode string should be 'RGB' or 'L;16' or similar.")
mode = mode[0]
# Get bitdepth from *mode* if possible.
if bitdepth:
if info.get('bitdepth') and bitdepth != info['bitdepth']:
raise Error("mode bitdepth (%d) should match info bitdepth (%d)." %
(bitdepth, info['bitdepth']))
info['bitdepth'] = bitdepth
# Fill in and/or check entries in *info*.
# Dimensions.
if 'size' in info:
# Check width, height, size all match where used.
for dimension,axis in [('width', 0), ('height', 1)]:
if dimension in info:
if info[dimension] != info['size'][axis]:
raise Error(
"info[%r] shhould match info['size'][%r]." %
(dimension, axis))
info['width'],info['height'] = info['size']
if 'height' not in info:
try:
l = len(a)
except:
raise Error(
"len(a) does not work, supply info['height'] instead.")
info['height'] = l
# Colour format.
if 'greyscale' in info:
if bool(info['greyscale']) != ('L' in mode):
raise Error("info['greyscale'] should match mode.")
info['greyscale'] = 'L' in mode
if 'alpha' in info:
if bool(info['alpha']) != ('A' in mode):
raise Error("info['alpha'] should match mode.")
info['alpha'] = 'A' in mode
planes = len(mode)
if 'planes' in info:
if info['planes'] != planes:
raise Error("info['planes'] should match mode.")
# In order to work out whether we the array is 2D or 3D we need its
# first row, which requires that we take a copy of its iterator.
# We may also need the first row to derive width and bitdepth.
a,t = itertools.tee(a)
row = t.next()
del t
try:
row[0][0]
threed = True
testelement = row[0]
except:
threed = False
testelement = row
if 'width' not in info:
if threed:
width = len(row)
else:
width = len(row) // planes
info['width'] = width
# Not implemented yet
assert not threed
if 'bitdepth' not in info:
try:
dtype = testelement.dtype
# goto the "else:" clause. Sorry.
except:
try:
# Try a Python array.array.
bitdepth = 8 * testelement.itemsize
except:
# We can't determine it from the array element's
# datatype, use a default of 8.
bitdepth = 8
else:
# If we got here without exception, we now assume that
# the array is a numpy array.
if dtype.kind == 'b':
bitdepth = 1
else:
bitdepth = 8 * dtype.itemsize
info['bitdepth'] = bitdepth
for thing in 'width height bitdepth greyscale alpha'.split():
assert thing in info
return Image(a, info)
# So that refugee's from PIL feel more at home. Not documented.
fromarray = from_array
class Image:
"""A PNG image.
You can create an :class:`Image` object from an array of pixels by calling
:meth:`png.from_array`. It can be saved to disk with the
:meth:`save` method."""
def __init__(self, rows, info):
"""
.. note ::
The constructor is not public. Please do not call it.
"""
self.rows = rows
self.info = info
def save(self, file):
"""Save the image to *file*. If *file* looks like an open file
descriptor then it is used, otherwise it is treated as a
filename and a fresh file is opened.
In general, you can only call this method once; after it has
been called the first time and the PNG image has been saved, the
source data will have been streamed, and cannot be streamed
again.
"""
w = Writer(**self.info)
try:
file.write
def close(): pass
except:
file = open(file, 'wb')
def close(): file.close()
try:
w.write(file, self.rows)
finally:
close()
class _readable:
"""
A simple file-like interface for strings and arrays.
"""
def __init__(self, buf):
self.buf = buf
self.offset = 0
def read(self, n):
r = self.buf[self.offset:self.offset+n]
if isarray(r):
r = r.tostring()
self.offset += n
return r
class Reader:
"""
PNG decoder in pure Python.
"""
def __init__(self, _guess=None, **kw):
"""
Create a PNG decoder object.
The constructor expects exactly one keyword argument. If you
supply a positional argument instead, it will guess the input
type. You can choose among the following keyword arguments:
filename
Name of input file (a PNG file).
file
A file-like object (object with a read() method).
bytes
``array`` or ``string`` with PNG data.
"""
if ((_guess is not None and len(kw) != 0) or
(_guess is None and len(kw) != 1)):
raise TypeError("Reader() takes exactly 1 argument")
# Will be the first 8 bytes, later on. See validate_signature.
self.signature = None
self.transparent = None
# A pair of (len,type) if a chunk has been read but its data and
# checksum have not (in other words the file position is just
# past the 4 bytes that specify the chunk type). See preamble
# method for how this is used.
self.atchunk = None
if _guess is not None:
if isarray(_guess):
kw["bytes"] = _guess
elif isinstance(_guess, str):
kw["filename"] = _guess
elif isinstance(_guess, file):
kw["file"] = _guess
if "filename" in kw:
self.file = open(kw["filename"], "rb")
elif "file" in kw:
self.file = kw["file"]
elif "bytes" in kw:
self.file = _readable(kw["bytes"])
else:
raise TypeError("expecting filename, file or bytes array")
def chunk(self, seek=None, lenient=False):
"""
Read the next PNG chunk from the input file; returns a
(*type*,*data*) tuple. *type* is the chunk's type as a string
(all PNG chunk types are 4 characters long). *data* is the
chunk's data content, as a string.
If the optional `seek` argument is
specified then it will keep reading chunks until it either runs
out of file or finds the type specified by the argument. Note
that in general the order of chunks in PNGs is unspecified, so
using `seek` can cause you to miss chunks.
If the optional `lenient` argument evaluates to True,
checksum failures will raise warnings rather than exceptions.
"""
self.validate_signature()
while True:
# http://www.w3.org/TR/PNG/#5Chunk-layout
if not self.atchunk:
self.atchunk = self.chunklentype()
length,type = self.atchunk
self.atchunk = None
data = self.file.read(length)
if len(data) != length:
raise ChunkError('Chunk %s too short for required %i octets.'
% (type, length))
checksum = self.file.read(4)
if len(checksum) != 4:
raise ValueError('Chunk %s too short for checksum.', tag)
if seek and type != seek:
continue
verify = zlib.crc32(strtobytes(type))
verify = zlib.crc32(data, verify)
# Whether the output from zlib.crc32 is signed or not varies
# according to hideous implementation details, see
# http://bugs.python.org/issue1202 .
# We coerce it to be positive here (in a way which works on
# Python 2.3 and older).
verify &= 2**32 - 1
verify = struct.pack('!I', verify)
if checksum != verify:
# print repr(checksum)
(a, ) = struct.unpack('!I', checksum)
(b, ) = struct.unpack('!I', verify)
message = "Checksum error in %s chunk: 0x%08X != 0x%08X." % (type, a, b)
if lenient:
warnings.warn(message, RuntimeWarning)
else:
raise ChunkError(message)
return type, data
def chunks(self):
"""Return an iterator that will yield each chunk as a
(*chunktype*, *content*) pair.
"""
while True:
t,v = self.chunk()
yield t,v
if t == 'IEND':
break
def undo_filter(self, filter_type, scanline, previous):
"""Undo the filter for a scanline. `scanline` is a sequence of
bytes that does not include the initial filter type byte.
`previous` is decoded previous scanline (for straightlaced
images this is the previous pixel row, but for interlaced
images, it is the previous scanline in the reduced image, which
in general is not the previous pixel row in the final image).
When there is no previous scanline (the first row of a
straightlaced image, or the first row in one of the passes in an
interlaced image), then this argument should be ``None``.
The scanline will have the effects of filtering removed, and the
result will be returned as a fresh sequence of bytes.
"""
# :todo: Would it be better to update scanline in place?
# Yes, with the Cython extension making the undo_filter fast,
# updating scanline inplace makes the code 3 times faster
# (reading 50 images of 800x800 went from 40s to 16s)
result = scanline
if filter_type == 0:
return result
if filter_type not in (1,2,3,4):
raise FormatError('Invalid PNG Filter Type.'
' See http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters .')
# Filter unit. The stride from one pixel to the corresponding
# byte from the previous previous. Normally this is the pixel
# size in bytes, but when this is smaller than 1, the previous
# byte is used instead.
fu = max(1, self.psize)
# For the first line of a pass, synthesize a dummy previous
# line. An alternative approach would be to observe that on the
# first line 'up' is the same as 'null', 'paeth' is the same
# as 'sub', with only 'average' requiring any special case.
if not previous:
previous = array('B', [0]*len(scanline))
def sub():
"""Undo sub filter."""
ai = 0
# Loops starts at index fu. Observe that the initial part
# of the result is already filled in correctly with
# scanline.
for i in range(fu, len(result)):
x = scanline[i]
a = result[ai]
result[i] = (x + a) & 0xff
ai += 1
def up():
"""Undo up filter."""
for i in range(len(result)):
x = scanline[i]
b = previous[i]
result[i] = (x + b) & 0xff
def average():
"""Undo average filter."""
ai = -fu
for i in range(len(result)):
x = scanline[i]
if ai < 0:
a = 0
else:
a = result[ai]
b = previous[i]
result[i] = (x + ((a + b) >> 1)) & 0xff
ai += 1
def paeth():
"""Undo Paeth filter."""
# Also used for ci.
ai = -fu
for i in range(len(result)):
x = scanline[i]
if ai < 0:
a = c = 0
else:
a = result[ai]
c = previous[ai]
b = previous[i]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
pr = a
elif pb <= pc:
pr = b
else:
pr = c
result[i] = (x + pr) & 0xff
ai += 1
# Call appropriate filter algorithm. Note that 0 has already
# been dealt with.
(None,
pngfilters.undo_filter_sub,
pngfilters.undo_filter_up,
pngfilters.undo_filter_average,
pngfilters.undo_filter_paeth)[filter_type](fu, scanline, previous, result)
return result
def deinterlace(self, raw):
"""
Read raw pixel data, undo filters, deinterlace, and flatten.
Return in flat row flat pixel format.
"""
# print >> sys.stderr, ("Reading interlaced, w=%s, r=%s, planes=%s," +
# " bpp=%s") % (self.width, self.height, self.planes, self.bps)
# Values per row (of the target image)
vpr = self.width * self.planes
# Make a result array, and make it big enough. Interleaving
# writes to the output array randomly (well, not quite), so the
# entire output array must be in memory.
fmt = 'BH'[self.bitdepth > 8]
a = array(fmt, [0]*vpr*self.height)
source_offset = 0
for xstart, ystart, xstep, ystep in _adam7:
# print >> sys.stderr, "Adam7: start=%s,%s step=%s,%s" % (
# xstart, ystart, xstep, ystep)
if xstart >= self.width:
continue
# The previous (reconstructed) scanline. None at the
# beginning of a pass to indicate that there is no previous
# line.
recon = None
# Pixels per row (reduced pass image)
ppr = int(math.ceil((self.width-xstart)/float(xstep)))
# Row size in bytes for this pass.
row_size = int(math.ceil(self.psize * ppr))
for y in range(ystart, self.height, ystep):
filter_type = raw[source_offset]
source_offset += 1
scanline = raw[source_offset:source_offset+row_size]
source_offset += row_size
recon = self.undo_filter(filter_type, scanline, recon)
# Convert so that there is one element per pixel value
flat = self.serialtoflat(recon, ppr)
if xstep == 1:
assert xstart == 0
offset = y * vpr
a[offset:offset+vpr] = flat
else:
offset = y * vpr + xstart * self.planes
end_offset = (y+1) * vpr
skip = self.planes * xstep
for i in range(self.planes):
a[offset+i:end_offset:skip] = \
flat[i::self.planes]
return a
def iterboxed(self, rows):
"""Iterator that yields each scanline in boxed row flat pixel
format. `rows` should be an iterator that yields the bytes of
each row in turn.
"""
def asvalues(raw):
"""Convert a row of raw bytes into a flat row. Result may
or may not share with argument"""
if self.bitdepth == 8:
return raw
if self.bitdepth == 16:
raw = tostring(raw)
return array('H', struct.unpack('!%dH' % (len(raw)//2), raw))
assert self.bitdepth < 8
width = self.width
# Samples per byte
spb = 8//self.bitdepth
out = array('B')
mask = 2**self.bitdepth - 1
shifts = map(self.bitdepth.__mul__, reversed(range(spb)))
for o in raw:
out.extend(map(lambda i: mask&(o>>i), shifts))
return out[:width]
return itertools.imap(asvalues, rows)
def serialtoflat(self, bytes, width=None):
"""Convert serial format (byte stream) pixel data to flat row
flat pixel.
"""
if self.bitdepth == 8:
return bytes
if self.bitdepth == 16:
bytes = tostring(bytes)
return array('H',
struct.unpack('!%dH' % (len(bytes)//2), bytes))
assert self.bitdepth < 8
if width is None:
width = self.width
# Samples per byte
spb = 8//self.bitdepth
out = array('B')
mask = 2**self.bitdepth - 1
shifts = map(self.bitdepth.__mul__, reversed(range(spb)))
l = width
for o in bytes:
out.extend([(mask&(o>>s)) for s in shifts][:l])
l -= spb
if l <= 0:
l = width
return out
def iterstraight(self, raw):
"""Iterator that undoes the effect of filtering, and yields each
row in serialised format (as a sequence of bytes). Assumes input
is straightlaced. `raw` should be an iterable that yields the
raw bytes in chunks of arbitrary size."""
# length of row, in bytes
rb = self.row_bytes
a = array('B')
# The previous (reconstructed) scanline. None indicates first
# line of image.
recon = None
for some in raw:
a.extend(some)
while len(a) >= rb + 1:
filter_type = a[0]
scanline = a[1:rb+1]
del a[:rb+1]
recon = self.undo_filter(filter_type, scanline, recon)
yield recon
if len(a) != 0:
# :file:format We get here with a file format error: when the
# available bytes (after decompressing) do not pack into exact
# rows.
raise FormatError(
'Wrong size for decompressed IDAT chunk.')
assert len(a) == 0
def validate_signature(self):
"""If signature (header) has not been read then read and
validate it; otherwise do nothing.
"""
if self.signature:
return
self.signature = self.file.read(8)
if self.signature != _signature:
raise FormatError("PNG file has invalid signature.")
def preamble(self, lenient=False):
"""
Extract the image metadata by reading the initial part of the PNG
file up to the start of the ``IDAT`` chunk. All the chunks that
precede the ``IDAT`` chunk are read and either processed for
metadata or discarded.
If the optional `lenient` argument evaluates to True,
checksum failures will raise warnings rather than exceptions.
"""
self.validate_signature()
while True:
if not self.atchunk:
self.atchunk = self.chunklentype()
if self.atchunk is None:
raise FormatError(
'This PNG file has no IDAT chunks.')
if self.atchunk[1] == 'IDAT':
return
self.process_chunk(lenient=lenient)
def chunklentype(self):
"""Reads just enough of the input to determine the next
chunk's length and type, returned as a (*length*, *type*) pair
where *type* is a string. If there are no more chunks, ``None``
is returned.
"""
x = self.file.read(8)
if not x:
return None
if len(x) != 8:
raise FormatError(
'End of file whilst reading chunk length and type.')
length,type = struct.unpack('!I4s', x)
type = bytestostr(type)
if length > 2**31-1:
raise FormatError('Chunk %s is too large: %d.' % (type,length))
return length,type
def process_chunk(self, lenient=False):
"""Process the next chunk and its data. This only processes the
following chunk types, all others are ignored: ``IHDR``,
``PLTE``, ``bKGD``, ``tRNS``, ``gAMA``, ``sBIT``.
If the optional `lenient` argument evaluates to True,
checksum failures will raise warnings rather than exceptions.
"""
type, data = self.chunk(lenient=lenient)
if type == 'IHDR':
# http://www.w3.org/TR/PNG/#11IHDR
if len(data) != 13:
raise FormatError('IHDR chunk has incorrect length.')
(self.width, self.height, self.bitdepth, self.color_type,
self.compression, self.filter,
self.interlace) = struct.unpack("!2I5B", data)
# Check that the header specifies only valid combinations.
if self.bitdepth not in (1,2,4,8,16):
raise Error("invalid bit depth %d" % self.bitdepth)
if self.color_type not in (0,2,3,4,6):
raise Error("invalid colour type %d" % self.color_type)
# Check indexed (palettized) images have 8 or fewer bits
# per pixel; check only indexed or greyscale images have
# fewer than 8 bits per pixel.
if ((self.color_type & 1 and self.bitdepth > 8) or
(self.bitdepth < 8 and self.color_type not in (0,3))):
raise FormatError("Illegal combination of bit depth (%d)"
" and colour type (%d)."
" See http://www.w3.org/TR/2003/REC-PNG-20031110/#table111 ."
% (self.bitdepth, self.color_type))
if self.compression != 0:
raise Error("unknown compression method %d" % self.compression)
if self.filter != 0:
raise FormatError("Unknown filter method %d,"
" see http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters ."
% self.filter)
if self.interlace not in (0,1):
raise FormatError("Unknown interlace method %d,"
" see http://www.w3.org/TR/2003/REC-PNG-20031110/#8InterlaceMethods ."
% self.interlace)
# Derived values
# http://www.w3.org/TR/PNG/#6Colour-values
colormap = bool(self.color_type & 1)
greyscale = not (self.color_type & 2)
alpha = bool(self.color_type & 4)
color_planes = (3,1)[greyscale or colormap]
planes = color_planes + alpha
self.colormap = colormap
self.greyscale = greyscale
self.alpha = alpha
self.color_planes = color_planes
self.planes = planes
self.psize = float(self.bitdepth)/float(8) * planes
if int(self.psize) == self.psize:
self.psize = int(self.psize)
self.row_bytes = int(math.ceil(self.width * self.psize))
# Stores PLTE chunk if present, and is used to check
# chunk ordering constraints.
self.plte = None
# Stores tRNS chunk if present, and is used to check chunk
# ordering constraints.
self.trns = None
# Stores sbit chunk if present.
self.sbit = None
elif type == 'PLTE':
# http://www.w3.org/TR/PNG/#11PLTE
if self.plte:
warnings.warn("Multiple PLTE chunks present.")
self.plte = data
if len(data) % 3 != 0:
raise FormatError(
"PLTE chunk's length should be a multiple of 3.")
if len(data) > (2**self.bitdepth)*3:
raise FormatError("PLTE chunk is too long.")
if len(data) == 0:
raise FormatError("Empty PLTE is not allowed.")
elif type == 'bKGD':
try:
if self.colormap:
if not self.plte:
warnings.warn(
"PLTE chunk is required before bKGD chunk.")
self.background = struct.unpack('B', data)
else:
self.background = struct.unpack("!%dH" % self.color_planes,
data)
except struct.error:
raise FormatError("bKGD chunk has incorrect length.")
elif type == 'tRNS':
# http://www.w3.org/TR/PNG/#11tRNS
self.trns = data
if self.colormap:
if not self.plte:
warnings.warn("PLTE chunk is required before tRNS chunk.")
else:
if len(data) > len(self.plte)/3:
# Was warning, but promoted to Error as it
# would otherwise cause pain later on.
raise FormatError("tRNS chunk is too long.")
else:
if self.alpha:
raise FormatError(
"tRNS chunk is not valid with colour type %d." %
self.color_type)
try:
self.transparent = \
struct.unpack("!%dH" % self.color_planes, data)
except struct.error:
raise FormatError("tRNS chunk has incorrect length.")
elif type == 'gAMA':
try:
self.gamma = struct.unpack("!L", data)[0] / 100000.0
except struct.error:
raise FormatError("gAMA chunk has incorrect length.")
elif type == 'sBIT':
self.sbit = data
if (self.colormap and len(data) != 3 or
not self.colormap and len(data) != self.planes):
raise FormatError("sBIT chunk has incorrect length.")
def read(self, lenient=False):
"""
Read the PNG file and decode it. Returns (`width`, `height`,
`pixels`, `metadata`).
May use excessive memory.
`pixels` are returned in boxed row flat pixel format.
If the optional `lenient` argument evaluates to True,
checksum failures will raise warnings rather than exceptions.
"""
def iteridat():
"""Iterator that yields all the ``IDAT`` chunks as strings."""
while True:
try:
type, data = self.chunk(lenient=lenient)
except ValueError, e:
raise ChunkError(e.args[0])
if type == 'IEND':
# http://www.w3.org/TR/PNG/#11IEND
break
if type != 'IDAT':
continue
# type == 'IDAT'
# http://www.w3.org/TR/PNG/#11IDAT
if self.colormap and not self.plte:
warnings.warn("PLTE chunk is required before IDAT chunk")
yield data
def iterdecomp(idat):
"""Iterator that yields decompressed strings. `idat` should
be an iterator that yields the ``IDAT`` chunk data.
"""
# Currently, with no max_length paramter to decompress, this
# routine will do one yield per IDAT chunk. So not very
# incremental.
d = zlib.decompressobj()
# Each IDAT chunk is passed to the decompressor, then any
# remaining state is decompressed out.
for data in idat:
# :todo: add a max_length argument here to limit output
# size.
yield array('B', d.decompress(data))
yield array('B', d.flush())
self.preamble(lenient=lenient)
raw = iterdecomp(iteridat())
if self.interlace:
raw = array('B', itertools.chain(*raw))
arraycode = 'BH'[self.bitdepth>8]
# Like :meth:`group` but producing an array.array object for
# each row.
pixels = itertools.imap(lambda *row: array(arraycode, row),
*[iter(self.deinterlace(raw))]*self.width*self.planes)
else:
pixels = self.iterboxed(self.iterstraight(raw))
meta = dict()
for attr in 'greyscale alpha planes bitdepth interlace'.split():
meta[attr] = getattr(self, attr)
meta['size'] = (self.width, self.height)
for attr in 'gamma transparent background'.split():
a = getattr(self, attr, None)
if a is not None:
meta[attr] = a
if self.plte:
meta['palette'] = self.palette()
return self.width, self.height, pixels, meta
def read_flat(self):
"""
Read a PNG file and decode it into flat row flat pixel format.
Returns (*width*, *height*, *pixels*, *metadata*).
May use excessive memory.
`pixels` are returned in flat row flat pixel format.
See also the :meth:`read` method which returns pixels in the
more stream-friendly boxed row flat pixel format.
"""
x, y, pixel, meta = self.read()
arraycode = 'BH'[meta['bitdepth']>8]
pixel = array(arraycode, itertools.chain(*pixel))
return x, y, pixel, meta
def palette(self, alpha='natural'):
"""Returns a palette that is a sequence of 3-tuples or 4-tuples,
synthesizing it from the ``PLTE`` and ``tRNS`` chunks. These
chunks should have already been processed (for example, by
calling the :meth:`preamble` method). All the tuples are the
same size: 3-tuples if there is no ``tRNS`` chunk, 4-tuples when
there is a ``tRNS`` chunk. Assumes that the image is colour type
3 and therefore a ``PLTE`` chunk is required.
If the `alpha` argument is ``'force'`` then an alpha channel is
always added, forcing the result to be a sequence of 4-tuples.
"""
if not self.plte:
raise FormatError(
"Required PLTE chunk is missing in colour type 3 image.")
plte = group(array('B', self.plte), 3)
if self.trns or alpha == 'force':
trns = array('B', self.trns or '')
trns.extend([255]*(len(plte)-len(trns)))
plte = map(operator.add, plte, group(trns, 1))
return plte
def asDirect(self):
"""Returns the image data as a direct representation of an
``x * y * planes`` array. This method is intended to remove the
need for callers to deal with palettes and transparency
themselves. Images with a palette (colour type 3)
are converted to RGB or RGBA; images with transparency (a
``tRNS`` chunk) are converted to LA or RGBA as appropriate.
When returned in this format the pixel values represent the
colour value directly without needing to refer to palettes or
transparency information.
Like the :meth:`read` method this method returns a 4-tuple:
(*width*, *height*, *pixels*, *meta*)
This method normally returns pixel values with the bit depth
they have in the source image, but when the source PNG has an
``sBIT`` chunk it is inspected and can reduce the bit depth of
the result pixels; pixel values will be reduced according to
the bit depth specified in the ``sBIT`` chunk (PNG nerds should
note a single result bit depth is used for all channels; the
maximum of the ones specified in the ``sBIT`` chunk. An RGB565
image will be rescaled to 6-bit RGB666).
The *meta* dictionary that is returned reflects the `direct`
format and not the original source image. For example, an RGB
source image with a ``tRNS`` chunk to represent a transparent
colour, will have ``planes=3`` and ``alpha=False`` for the
source image, but the *meta* dictionary returned by this method
will have ``planes=4`` and ``alpha=True`` because an alpha
channel is synthesized and added.
*pixels* is the pixel data in boxed row flat pixel format (just
like the :meth:`read` method).
All the other aspects of the image data are not changed.
"""
self.preamble()
# Simple case, no conversion necessary.
if not self.colormap and not self.trns and not self.sbit:
return self.read()
x,y,pixels,meta = self.read()
if self.colormap:
meta['colormap'] = False
meta['alpha'] = bool(self.trns)
meta['bitdepth'] = 8
meta['planes'] = 3 + bool(self.trns)
plte = self.palette()
def iterpal(pixels):
for row in pixels:
row = map(plte.__getitem__, row)
yield array('B', itertools.chain(*row))
pixels = iterpal(pixels)
elif self.trns:
# It would be nice if there was some reasonable way of doing
# this without generating a whole load of intermediate tuples.
# But tuples does seem like the easiest way, with no other way
# clearly much simpler or much faster. (Actually, the L to LA
# conversion could perhaps go faster (all those 1-tuples!), but
# I still wonder whether the code proliferation is worth it)
it = self.transparent
maxval = 2**meta['bitdepth']-1
planes = meta['planes']
meta['alpha'] = True
meta['planes'] += 1
typecode = 'BH'[meta['bitdepth']>8]
def itertrns(pixels):
for row in pixels:
# For each row we group it into pixels, then form a
# characterisation vector that says whether each pixel
# is opaque or not. Then we convert True/False to
# 0/maxval (by multiplication), and add it as the extra
# channel.
row = group(row, planes)
opa = map(it.__ne__, row)
opa = map(maxval.__mul__, opa)
opa = zip(opa) # convert to 1-tuples
yield array(typecode,
itertools.chain(*map(operator.add, row, opa)))
pixels = itertrns(pixels)
targetbitdepth = None
if self.sbit:
sbit = struct.unpack('%dB' % len(self.sbit), self.sbit)
targetbitdepth = max(sbit)
if targetbitdepth > meta['bitdepth']:
raise Error('sBIT chunk %r exceeds bitdepth %d' %
(sbit,self.bitdepth))
if min(sbit) <= 0:
raise Error('sBIT chunk %r has a 0-entry' % sbit)
if targetbitdepth == meta['bitdepth']:
targetbitdepth = None
if targetbitdepth:
shift = meta['bitdepth'] - targetbitdepth
meta['bitdepth'] = targetbitdepth
def itershift(pixels):
for row in pixels:
yield map(shift.__rrshift__, row)
pixels = itershift(pixels)
return x,y,pixels,meta
def asFloat(self, maxval=1.0):
"""Return image pixels as per :meth:`asDirect` method, but scale
all pixel values to be floating point values between 0.0 and
*maxval*.
"""
x,y,pixels,info = self.asDirect()
sourcemaxval = 2**info['bitdepth']-1
del info['bitdepth']
info['maxval'] = float(maxval)
factor = float(maxval)/float(sourcemaxval)
def iterfloat():
for row in pixels:
yield map(factor.__mul__, row)
return x,y,iterfloat(),info
def _as_rescale(self, get, targetbitdepth):
"""Helper used by :meth:`asRGB8` and :meth:`asRGBA8`."""
width,height,pixels,meta = get()
maxval = 2**meta['bitdepth'] - 1
targetmaxval = 2**targetbitdepth - 1
factor = float(targetmaxval) / float(maxval)
meta['bitdepth'] = targetbitdepth
def iterscale():
for row in pixels:
yield map(lambda x: int(round(x*factor)), row)
if maxval == targetmaxval:
return width, height, pixels, meta
else:
return width, height, iterscale(), meta
def asRGB8(self):
"""Return the image data as an RGB pixels with 8-bits per
sample. This is like the :meth:`asRGB` method except that
this method additionally rescales the values so that they
are all between 0 and 255 (8-bit). In the case where the
source image has a bit depth < 8 the transformation preserves
all the information; where the source image has bit depth
> 8, then rescaling to 8-bit values loses precision. No
dithering is performed. Like :meth:`asRGB`, an alpha channel
in the source image will raise an exception.
This function returns a 4-tuple:
(*width*, *height*, *pixels*, *metadata*).
*width*, *height*, *metadata* are as per the :meth:`read` method.
*pixels* is the pixel data in boxed row flat pixel format.
"""
return self._as_rescale(self.asRGB, 8)
def asRGBA8(self):
"""Return the image data as RGBA pixels with 8-bits per
sample. This method is similar to :meth:`asRGB8` and
:meth:`asRGBA`: The result pixels have an alpha channel, *and*
values are rescaled to the range 0 to 255. The alpha channel is
synthesized if necessary (with a small speed penalty).
"""
return self._as_rescale(self.asRGBA, 8)
def asRGB(self):
"""Return image as RGB pixels. RGB colour images are passed
through unchanged; greyscales are expanded into RGB
triplets (there is a small speed overhead for doing this).
An alpha channel in the source image will raise an
exception.
The return values are as for the :meth:`read` method
except that the *metadata* reflect the returned pixels, not the
source image. In particular, for this method
``metadata['greyscale']`` will be ``False``.
"""
width,height,pixels,meta = self.asDirect()
if meta['alpha']:
raise Error("will not convert image with alpha channel to RGB")
if not meta['greyscale']:
return width,height,pixels,meta
meta['greyscale'] = False
typecode = 'BH'[meta['bitdepth'] > 8]
def iterrgb():
for row in pixels:
a = array(typecode, [0]) * 3 * width
for i in range(3):
a[i::3] = row
yield a
return width,height,iterrgb(),meta
def asRGBA(self):
"""Return image as RGBA pixels. Greyscales are expanded into
RGB triplets; an alpha channel is synthesized if necessary.
The return values are as for the :meth:`read` method
except that the *metadata* reflect the returned pixels, not the
source image. In particular, for this method
``metadata['greyscale']`` will be ``False``, and
``metadata['alpha']`` will be ``True``.
"""
width,height,pixels,meta = self.asDirect()
if meta['alpha'] and not meta['greyscale']:
return width,height,pixels,meta
typecode = 'BH'[meta['bitdepth'] > 8]
maxval = 2**meta['bitdepth'] - 1
maxbuffer = struct.pack('=' + typecode, maxval) * 4 * width
def newarray():
return array(typecode, maxbuffer)
if meta['alpha'] and meta['greyscale']:
# LA to RGBA
def convert():
for row in pixels:
# Create a fresh target row, then copy L channel
# into first three target channels, and A channel
# into fourth channel.
a = newarray()
pngfilters.convert_la_to_rgba(row, a)
yield a
elif meta['greyscale']:
# L to RGBA
def convert():
for row in pixels:
a = newarray()
pngfilters.convert_l_to_rgba(row, a)
yield a
else:
assert not meta['alpha'] and not meta['greyscale']
# RGB to RGBA
def convert():
for row in pixels:
a = newarray()
pngfilters.convert_rgb_to_rgba(row, a)
yield a
meta['alpha'] = True
meta['greyscale'] = False
return width,height,convert(),meta
# === Legacy Version Support ===
# :pyver:old: PyPNG works on Python versions 2.3 and 2.2, but not
# without some awkward problems. Really PyPNG works on Python 2.4 (and
# above); it works on Pythons 2.3 and 2.2 by virtue of fixing up
# problems here. It's a bit ugly (which is why it's hidden down here).
#
# Generally the strategy is one of pretending that we're running on
# Python 2.4 (or above), and patching up the library support on earlier
# versions so that it looks enough like Python 2.4. When it comes to
# Python 2.2 there is one thing we cannot patch: extended slices
# http://www.python.org/doc/2.3/whatsnew/section-slices.html.
# Instead we simply declare that features that are implemented using
# extended slices will not work on Python 2.2.
#
# In order to work on Python 2.3 we fix up a recurring annoyance involving
# the array type. In Python 2.3 an array cannot be initialised with an
# array, and it cannot be extended with a list (or other sequence).
# Both of those are repeated issues in the code. Whilst I would not
# normally tolerate this sort of behaviour, here we "shim" a replacement
# for array into place (and hope no-ones notices). You never read this.
#
# In an amusing case of warty hacks on top of warty hacks... the array
# shimming we try and do only works on Python 2.3 and above (you can't
# subclass array.array in Python 2.2). So to get it working on Python
# 2.2 we go for something much simpler and (probably) way slower.
try:
array('B').extend([])
array('B', array('B'))
except:
# Expect to get here on Python 2.3
try:
class _array_shim(array):
true_array = array
def __new__(cls, typecode, init=None):
super_new = super(_array_shim, cls).__new__
it = super_new(cls, typecode)
if init is None:
return it
it.extend(init)
return it
def extend(self, extension):
super_extend = super(_array_shim, self).extend
if isinstance(extension, self.true_array):
return super_extend(extension)
if not isinstance(extension, (list, str)):
# Convert to list. Allows iterators to work.
extension = list(extension)
return super_extend(self.true_array(self.typecode, extension))
array = _array_shim
except:
# Expect to get here on Python 2.2
def array(typecode, init=()):
if type(init) == str:
return map(ord, init)
return list(init)
# Further hacks to get it limping along on Python 2.2
try:
enumerate
except:
def enumerate(seq):
i=0
for x in seq:
yield i,x
i += 1
try:
reversed
except:
def reversed(l):
l = list(l)
l.reverse()
for x in l:
yield x
try:
itertools
except:
class _dummy_itertools:
pass
itertools = _dummy_itertools()
def _itertools_imap(f, seq):
for x in seq:
yield f(x)
itertools.imap = _itertools_imap
def _itertools_chain(*iterables):
for it in iterables:
for element in it:
yield element
itertools.chain = _itertools_chain
# === Support for users without Cython ===
try:
pngfilters
except:
class pngfilters(object):
def undo_filter_sub(filter_unit, scanline, previous, result):
"""Undo sub filter."""
ai = 0
# Loops starts at index fu. Observe that the initial part
# of the result is already filled in correctly with
# scanline.
for i in range(filter_unit, len(result)):
x = scanline[i]
a = result[ai]
result[i] = (x + a) & 0xff
ai += 1
undo_filter_sub = staticmethod(undo_filter_sub)
def undo_filter_up(filter_unit, scanline, previous, result):
"""Undo up filter."""
for i in range(len(result)):
x = scanline[i]
b = previous[i]
result[i] = (x + b) & 0xff
undo_filter_up = staticmethod(undo_filter_up)
def undo_filter_average(filter_unit, scanline, previous, result):
"""Undo up filter."""
ai = -filter_unit
for i in range(len(result)):
x = scanline[i]
if ai < 0:
a = 0
else:
a = result[ai]
b = previous[i]
result[i] = (x + ((a + b) >> 1)) & 0xff
ai += 1
undo_filter_average = staticmethod(undo_filter_average)
def undo_filter_paeth(filter_unit, scanline, previous, result):
"""Undo Paeth filter."""
# Also used for ci.
ai = -filter_unit
for i in range(len(result)):
x = scanline[i]
if ai < 0:
a = c = 0
else:
a = result[ai]
c = previous[ai]
b = previous[i]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
pr = a
elif pb <= pc:
pr = b
else:
pr = c
result[i] = (x + pr) & 0xff
ai += 1
undo_filter_paeth = staticmethod(undo_filter_paeth)
def convert_la_to_rgba(row, result):
for i in range(3):
result[i::4] = row[0::2]
result[3::4] = row[1::2]
convert_la_to_rgba = staticmethod(convert_la_to_rgba)
def convert_l_to_rgba(row, result):
"""Convert a grayscale image to RGBA. This method assumes the alpha
channel in result is already correctly initialized."""
for i in range(3):
result[i::4] = row
convert_l_to_rgba = staticmethod(convert_l_to_rgba)
def convert_rgb_to_rgba(row, result):
"""Convert an RGB image to RGBA. This method assumes the alpha
channel in result is already correctly initialized."""
for i in range(3):
result[i::4] = row[i::3]
convert_rgb_to_rgba = staticmethod(convert_rgb_to_rgba)
# === Internal Test Support ===
# This section comprises the tests that are internally validated (as
# opposed to tests which produce output files that are externally
# validated). Primarily they are unittests.
# Note that it is difficult to internally validate the results of
# writing a PNG file. The only thing we can do is read it back in
# again, which merely checks consistency, not that the PNG file we
# produce is valid.
# Run the tests from the command line:
# python -c 'import png;png.test()'
# (For an in-memory binary file IO object) We use BytesIO where
# available, otherwise we use StringIO, but name it BytesIO.
try:
from io import BytesIO
except:
from StringIO import StringIO as BytesIO
import tempfile
# http://www.python.org/doc/2.4.4/lib/module-unittest.html
import unittest
def test():
unittest.main(__name__)
def topngbytes(name, rows, x, y, **k):
"""Convenience function for creating a PNG file "in memory" as a
string. Creates a :class:`Writer` instance using the keyword arguments,
then passes `rows` to its :meth:`Writer.write` method. The resulting
PNG file is returned as a string. `name` is used to identify the file for
debugging.
"""
import os
print name
f = BytesIO()
w = Writer(x, y, **k)
w.write(f, rows)
if os.environ.get('PYPNG_TEST_TMP'):
w = open(name, 'wb')
w.write(f.getvalue())
w.close()
return f.getvalue()
def testWithIO(inp, out, f):
"""Calls the function `f` with ``sys.stdin`` changed to `inp`
and ``sys.stdout`` changed to `out`. They are restored when `f`
returns. This function returns whatever `f` returns.
"""
import os
try:
oldin,sys.stdin = sys.stdin,inp
oldout,sys.stdout = sys.stdout,out
x = f()
finally:
sys.stdin = oldin
sys.stdout = oldout
if os.environ.get('PYPNG_TEST_TMP') and hasattr(out,'getvalue'):
name = mycallersname()
if name:
w = open(name+'.png', 'wb')
w.write(out.getvalue())
w.close()
return x
def mycallersname():
"""Returns the name of the caller of the caller of this function
(hence the name of the caller of the function in which
"mycallersname()" textually appears). Returns None if this cannot
be determined."""
# http://docs.python.org/library/inspect.html#the-interpreter-stack
import inspect
frame = inspect.currentframe()
if not frame:
return None
frame_,filename_,lineno_,funname,linelist_,listi_ = (
inspect.getouterframes(frame)[2])
return funname
def seqtobytes(s):
"""Convert a sequence of integers to a *bytes* instance. Good for
plastering over Python 2 / Python 3 cracks.
"""
return strtobytes(''.join(chr(x) for x in s))
class Test(unittest.TestCase):
# This member is used by the superclass. If we don't define a new
# class here then when we use self.assertRaises() and the PyPNG code
# raises an assertion then we get no proper traceback. I can't work
# out why, but defining a new class here means we get a proper
# traceback.
class failureException(Exception):
pass
def helperLN(self, n):
mask = (1 << n) - 1
# Use small chunk_limit so that multiple chunk writing is
# tested. Making it a test for Issue 20.
w = Writer(15, 17, greyscale=True, bitdepth=n, chunk_limit=99)
f = BytesIO()
w.write_array(f, array('B', map(mask.__and__, range(1, 256))))
r = Reader(bytes=f.getvalue())
x,y,pixels,meta = r.read()
self.assertEqual(x, 15)
self.assertEqual(y, 17)
self.assertEqual(list(itertools.chain(*pixels)),
map(mask.__and__, range(1,256)))
def testL8(self):
return self.helperLN(8)
def testL4(self):
return self.helperLN(4)
def testL2(self):
"Also tests asRGB8."
w = Writer(1, 4, greyscale=True, bitdepth=2)
f = BytesIO()
w.write_array(f, array('B', range(4)))
r = Reader(bytes=f.getvalue())
x,y,pixels,meta = r.asRGB8()
self.assertEqual(x, 1)
self.assertEqual(y, 4)
for i,row in enumerate(pixels):
self.assertEqual(len(row), 3)
self.assertEqual(list(row), [0x55*i]*3)
def testP2(self):
"2-bit palette."
a = (255,255,255)
b = (200,120,120)
c = (50,99,50)
w = Writer(1, 4, bitdepth=2, palette=[a,b,c])
f = BytesIO()
w.write_array(f, array('B', (0,1,1,2)))
r = Reader(bytes=f.getvalue())
x,y,pixels,meta = r.asRGB8()
self.assertEqual(x, 1)
self.assertEqual(y, 4)
self.assertEqual(map(list, pixels), map(list, [a, b, b, c]))
def testPtrns(self):
"Test colour type 3 and tRNS chunk (and 4-bit palette)."
a = (50,99,50,50)
b = (200,120,120,80)
c = (255,255,255)
d = (200,120,120)
e = (50,99,50)
w = Writer(3, 3, bitdepth=4, palette=[a,b,c,d,e])
f = BytesIO()
w.write_array(f, array('B', (4, 3, 2, 3, 2, 0, 2, 0, 1)))
r = Reader(bytes=f.getvalue())
x,y,pixels,meta = r.asRGBA8()
self.assertEqual(x, 3)
self.assertEqual(y, 3)
c = c+(255,)
d = d+(255,)
e = e+(255,)
boxed = [(e,d,c),(d,c,a),(c,a,b)]
flat = map(lambda row: itertools.chain(*row), boxed)
self.assertEqual(map(list, pixels), map(list, flat))
def testRGBtoRGBA(self):
"asRGBA8() on colour type 2 source."""
# Test for Issue 26
r = Reader(bytes=_pngsuite['basn2c08'])
x,y,pixels,meta = r.asRGBA8()
# Test the pixels at row 9 columns 0 and 1.
row9 = list(pixels)[9]
self.assertEqual(list(row9[0:8]),
[0xff, 0xdf, 0xff, 0xff, 0xff, 0xde, 0xff, 0xff])
def testLtoRGBA(self):
"asRGBA() on grey source."""
# Test for Issue 60
r = Reader(bytes=_pngsuite['basi0g08'])
x,y,pixels,meta = r.asRGBA()
row9 = list(list(pixels)[9])
self.assertEqual(row9[0:8],
[222, 222, 222, 255, 221, 221, 221, 255])
def testCtrns(self):
"Test colour type 2 and tRNS chunk."
# Test for Issue 25
r = Reader(bytes=_pngsuite['tbrn2c08'])
x,y,pixels,meta = r.asRGBA8()
# I just happen to know that the first pixel is transparent.
# In particular it should be #7f7f7f00
row0 = list(pixels)[0]
self.assertEqual(tuple(row0[0:4]), (0x7f, 0x7f, 0x7f, 0x00))
def testAdam7read(self):
"""Adam7 interlace reading.
Specifically, test that for images in the PngSuite that
have both an interlaced and straightlaced pair that both
images from the pair produce the same array of pixels."""
for candidate in _pngsuite:
if not candidate.startswith('basn'):
continue
candi = candidate.replace('n', 'i')
if candi not in _pngsuite:
continue
print 'adam7 read', candidate
straight = Reader(bytes=_pngsuite[candidate])
adam7 = Reader(bytes=_pngsuite[candi])
# Just compare the pixels. Ignore x,y (because they're
# likely to be correct?); metadata is ignored because the
# "interlace" member differs. Lame.
straight = straight.read()[2]
adam7 = adam7.read()[2]
self.assertEqual(map(list, straight), map(list, adam7))
def testAdam7write(self):
"""Adam7 interlace writing.
For each test image in the PngSuite, write an interlaced
and a straightlaced version. Decode both, and compare results.
"""
# Not such a great test, because the only way we can check what
# we have written is to read it back again.
for name,bytes in _pngsuite.items():
# Only certain colour types supported for this test.
if name[3:5] not in ['n0', 'n2', 'n4', 'n6']:
continue
it = Reader(bytes=bytes)
x,y,pixels,meta = it.read()
pngi = topngbytes('adam7wn'+name+'.png', pixels,
x=x, y=y, bitdepth=it.bitdepth,
greyscale=it.greyscale, alpha=it.alpha,
transparent=it.transparent,
interlace=False)
x,y,ps,meta = Reader(bytes=pngi).read()
it = Reader(bytes=bytes)
x,y,pixels,meta = it.read()
pngs = topngbytes('adam7wi'+name+'.png', pixels,
x=x, y=y, bitdepth=it.bitdepth,
greyscale=it.greyscale, alpha=it.alpha,
transparent=it.transparent,
interlace=True)
x,y,pi,meta = Reader(bytes=pngs).read()
self.assertEqual(map(list, ps), map(list, pi))
def testPGMin(self):
"""Test that the command line tool can read PGM files."""
def do():
return _main(['testPGMin'])
s = BytesIO()
s.write(strtobytes('P5 2 2 3\n'))
s.write(strtobytes('\x00\x01\x02\x03'))
s.flush()
s.seek(0)
o = BytesIO()
testWithIO(s, o, do)
r = Reader(bytes=o.getvalue())
x,y,pixels,meta = r.read()
self.assertTrue(r.greyscale)
self.assertEqual(r.bitdepth, 2)
def testPAMin(self):
"""Test that the command line tool can read PAM file."""
def do():
return _main(['testPAMin'])
s = BytesIO()
s.write(strtobytes('P7\nWIDTH 3\nHEIGHT 1\nDEPTH 4\nMAXVAL 255\n'
'TUPLTYPE RGB_ALPHA\nENDHDR\n'))
# The pixels in flat row flat pixel format
flat = [255,0,0,255, 0,255,0,120, 0,0,255,30]
asbytes = seqtobytes(flat)
s.write(asbytes)
s.flush()
s.seek(0)
o = BytesIO()
testWithIO(s, o, do)
r = Reader(bytes=o.getvalue())
x,y,pixels,meta = r.read()
self.assertTrue(r.alpha)
self.assertTrue(not r.greyscale)
self.assertEqual(list(itertools.chain(*pixels)), flat)
def testLA4(self):
"""Create an LA image with bitdepth 4."""
bytes = topngbytes('la4.png', [[5, 12]], 1, 1,
greyscale=True, alpha=True, bitdepth=4)
sbit = Reader(bytes=bytes).chunk('sBIT')[1]
self.assertEqual(sbit, strtobytes('\x04\x04'))
def testPal(self):
"""Test that a palette PNG returns the palette in info."""
r = Reader(bytes=_pngsuite['basn3p04'])
x,y,pixels,info = r.read()
self.assertEqual(x, 32)
self.assertEqual(y, 32)
self.assertTrue('palette' in info)
def testPalWrite(self):
"""Test metadata for paletted PNG can be passed from one PNG
to another."""
r = Reader(bytes=_pngsuite['basn3p04'])
x,y,pixels,info = r.read()
w = Writer(**info)
o = BytesIO()
w.write(o, pixels)
o.flush()
o.seek(0)
r = Reader(file=o)
_,_,_,again_info = r.read()
# Same palette
self.assertEqual(again_info['palette'], info['palette'])
def testPalExpand(self):
"""Test that bitdepth can be used to fiddle with pallete image."""
r = Reader(bytes=_pngsuite['basn3p04'])
x,y,pixels,info = r.read()
pixels = [list(row) for row in pixels]
info['bitdepth'] = 8
w = Writer(**info)
o = BytesIO()
w.write(o, pixels)
o.flush()
o.seek(0)
r = Reader(file=o)
_,_,again_pixels,again_info = r.read()
# Same pixels
again_pixels = [list(row) for row in again_pixels]
self.assertEqual(again_pixels, pixels)
def testPNMsbit(self):
"""Test that PNM files can generates sBIT chunk."""
def do():
return _main(['testPNMsbit'])
s = BytesIO()
s.write(strtobytes('P6 8 1 1\n'))
for pixel in range(8):
s.write(struct.pack('<I', (0x4081*pixel)&0x10101)[:3])
s.flush()
s.seek(0)
o = BytesIO()
testWithIO(s, o, do)
r = Reader(bytes=o.getvalue())
sbit = r.chunk('sBIT')[1]
self.assertEqual(sbit, strtobytes('\x01\x01\x01'))
def testLtrns0(self):
"""Create greyscale image with tRNS chunk."""
return self.helperLtrns(0)
def testLtrns1(self):
"""Using 1-tuple for transparent arg."""
return self.helperLtrns((0,))
def helperLtrns(self, transparent):
"""Helper used by :meth:`testLtrns*`."""
pixels = zip([0x00, 0x38, 0x4c, 0x54, 0x5c, 0x40, 0x38, 0x00])
o = BytesIO()
w = Writer(8, 8, greyscale=True, bitdepth=1, transparent=transparent)
w.write_packed(o, pixels)
r = Reader(bytes=o.getvalue())
x,y,pixels,meta = r.asDirect()
self.assertTrue(meta['alpha'])
self.assertTrue(meta['greyscale'])
self.assertEqual(meta['bitdepth'], 1)
def testWinfo(self):
"""Test the dictionary returned by a `read` method can be used
as args for :meth:`Writer`.
"""
r = Reader(bytes=_pngsuite['basn2c16'])
info = r.read()[3]
w = Writer(**info)
def testPackedIter(self):
"""Test iterator for row when using write_packed.
Indicative for Issue 47.
"""
w = Writer(16, 2, greyscale=True, alpha=False, bitdepth=1)
o = BytesIO()
w.write_packed(o, [itertools.chain([0x0a], [0xaa]),
itertools.chain([0x0f], [0xff])])
r = Reader(bytes=o.getvalue())
x,y,pixels,info = r.asDirect()
pixels = list(pixels)
self.assertEqual(len(pixels), 2)
self.assertEqual(len(pixels[0]), 16)
def testInterlacedArray(self):
"""Test that reading an interlaced PNG yields each row as an
array."""
r = Reader(bytes=_pngsuite['basi0g08'])
list(r.read()[2])[0].tostring
def testTrnsArray(self):
"""Test that reading a type 2 PNG with tRNS chunk yields each
row as an array (using asDirect)."""
r = Reader(bytes=_pngsuite['tbrn2c08'])
list(r.asDirect()[2])[0].tostring
# Invalid file format tests. These construct various badly
# formatted PNG files, then feed them into a Reader. When
# everything is working properly, we should get FormatError
# exceptions raised.
def testEmpty(self):
"""Test empty file."""
r = Reader(bytes='')
self.assertRaises(FormatError, r.asDirect)
def testSigOnly(self):
"""Test file containing just signature bytes."""
r = Reader(bytes=_signature)
self.assertRaises(FormatError, r.asDirect)
def testExtraPixels(self):
"""Test file that contains too many pixels."""
def eachchunk(chunk):
if chunk[0] != 'IDAT':
return chunk
data = zlib.decompress(chunk[1])
data += strtobytes('\x00garbage')
data = zlib.compress(data)
chunk = (chunk[0], data)
return chunk
self.assertRaises(FormatError, self.helperFormat, eachchunk)
def testNotEnoughPixels(self):
def eachchunk(chunk):
if chunk[0] != 'IDAT':
return chunk
# Remove last byte.
data = zlib.decompress(chunk[1])
data = data[:-1]
data = zlib.compress(data)
return (chunk[0], data)
self.assertRaises(FormatError, self.helperFormat, eachchunk)
def helperFormat(self, f):
r = Reader(bytes=_pngsuite['basn0g01'])
o = BytesIO()
def newchunks():
for chunk in r.chunks():
yield f(chunk)
write_chunks(o, newchunks())
r = Reader(bytes=o.getvalue())
return list(r.asDirect()[2])
def testBadFilter(self):
def eachchunk(chunk):
if chunk[0] != 'IDAT':
return chunk
data = zlib.decompress(chunk[1])
# Corrupt the first filter byte
data = strtobytes('\x99') + data[1:]
data = zlib.compress(data)
return (chunk[0], data)
self.assertRaises(FormatError, self.helperFormat, eachchunk)
def testFlat(self):
"""Test read_flat."""
import hashlib
r = Reader(bytes=_pngsuite['basn0g02'])
x,y,pixel,meta = r.read_flat()
d = hashlib.md5(seqtobytes(pixel)).digest()
self.assertEqual(_enhex(d), '255cd971ab8cd9e7275ff906e5041aa0')
def testfromarray(self):
img = from_array([[0, 0x33, 0x66], [0xff, 0xcc, 0x99]], 'L')
img.save('testfromarray.png')
def testfromarrayL16(self):
img = from_array(group(range(2**16), 256), 'L;16')
img.save('testL16.png')
def testfromarrayRGB(self):
img = from_array([[0,0,0, 0,0,1, 0,1,0, 0,1,1],
[1,0,0, 1,0,1, 1,1,0, 1,1,1]], 'RGB;1')
o = BytesIO()
img.save(o)
def testfromarrayIter(self):
import itertools
i = itertools.islice(itertools.count(10), 20)
i = itertools.imap(lambda x: [x, x, x], i)
img = from_array(i, 'RGB;5', dict(height=20))
f = open('testiter.png', 'wb')
img.save(f)
f.close()
# numpy dependent tests. These are skipped (with a message to
# sys.stderr) if numpy cannot be imported.
def testNumpyuint16(self):
"""numpy uint16."""
try:
import numpy
except ImportError:
print >>sys.stderr, "skipping numpy test"
return
rows = [map(numpy.uint16, range(0,0x10000,0x5555))]
b = topngbytes('numpyuint16.png', rows, 4, 1,
greyscale=True, alpha=False, bitdepth=16)
def testNumpyuint8(self):
"""numpy uint8."""
try:
import numpy
except ImportError:
print >>sys.stderr, "skipping numpy test"
return
rows = [map(numpy.uint8, range(0,0x100,0x55))]
b = topngbytes('numpyuint8.png', rows, 4, 1,
greyscale=True, alpha=False, bitdepth=8)
def testNumpybool(self):
"""numpy bool."""
try:
import numpy
except ImportError:
print >>sys.stderr, "skipping numpy test"
return
rows = [map(numpy.bool, [0,1])]
b = topngbytes('numpybool.png', rows, 2, 1,
greyscale=True, alpha=False, bitdepth=1)
def testNumpyarray(self):
"""numpy array."""
try:
import numpy
except ImportError:
print >>sys.stderr, "skipping numpy test"
return
pixels = numpy.array([[0,0x5555],[0x5555,0xaaaa]], numpy.uint16)
img = from_array(pixels, 'L')
img.save('testnumpyL16.png')
def paeth(self, x, a, b, c):
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
pr = a
elif pb <= pc:
pr = b
else:
pr = c
return x - pr
# test filters and unfilters
def testFilterScanlineFirstLine(self):
fo = 3 # bytes per pixel
line = [30, 31, 32, 230, 231, 232]
out = filter_scanline(0, line, fo, None) # none
self.assertEqual(list(out), [0, 30, 31, 32, 230, 231, 232])
out = filter_scanline(1, line, fo, None) # sub
self.assertEqual(list(out), [1, 30, 31, 32, 200, 200, 200])
out = filter_scanline(2, line, fo, None) # up
# TODO: All filtered scanlines start with a byte indicating the filter
# algorithm, except "up". Is this a bug? Should the expected output
# start with 2 here?
self.assertEqual(list(out), [30, 31, 32, 230, 231, 232])
out = filter_scanline(3, line, fo, None) # average
self.assertEqual(list(out), [3, 30, 31, 32, 215, 216, 216])
out = filter_scanline(4, line, fo, None) # paeth
self.assertEqual(list(out), [
4, self.paeth(30, 0, 0, 0), self.paeth(31, 0, 0, 0),
self.paeth(32, 0, 0, 0), self.paeth(230, 30, 0, 0),
self.paeth(231, 31, 0, 0), self.paeth(232, 32, 0, 0)
])
def testFilterScanline(self):
prev = [20, 21, 22, 210, 211, 212]
line = [30, 32, 34, 230, 233, 236]
fo = 3
out = filter_scanline(0, line, fo, prev) # none
self.assertEqual(list(out), [0, 30, 32, 34, 230, 233, 236])
out = filter_scanline(1, line, fo, prev) # sub
self.assertEqual(list(out), [1, 30, 32, 34, 200, 201, 202])
out = filter_scanline(2, line, fo, prev) # up
self.assertEqual(list(out), [2, 10, 11, 12, 20, 22, 24])
out = filter_scanline(3, line, fo, prev) # average
self.assertEqual(list(out), [3, 20, 22, 23, 110, 112, 113])
out = filter_scanline(4, line, fo, prev) # paeth
self.assertEqual(list(out), [
4, self.paeth(30, 0, 20, 0), self.paeth(32, 0, 21, 0),
self.paeth(34, 0, 22, 0), self.paeth(230, 30, 210, 20),
self.paeth(233, 32, 211, 21), self.paeth(236, 34, 212, 22)
])
def testUnfilterScanline(self):
reader = Reader(bytes='')
reader.psize = 3
scanprev = array('B', [20, 21, 22, 210, 211, 212])
scanline = array('B', [30, 32, 34, 230, 233, 236])
def cp(a):
return array('B', a)
out = reader.undo_filter(0, cp(scanline), cp(scanprev))
self.assertEqual(list(out), list(scanline)) # none
out = reader.undo_filter(1, cp(scanline), cp(scanprev))
self.assertEqual(list(out), [30, 32, 34, 4, 9, 14]) # sub
out = reader.undo_filter(2, cp(scanline), cp(scanprev))
self.assertEqual(list(out), [50, 53, 56, 184, 188, 192]) # up
out = reader.undo_filter(3, cp(scanline), cp(scanprev))
self.assertEqual(list(out), [40, 42, 45, 99, 103, 108]) # average
out = reader.undo_filter(4, cp(scanline), cp(scanprev))
self.assertEqual(list(out), [50, 53, 56, 184, 188, 192]) # paeth
def testUnfilterScanlinePaeth(self):
# This tests more edge cases in the paeth unfilter
reader = Reader(bytes='')
reader.psize = 3
scanprev = array('B', [2, 0, 0, 0, 9, 11])
scanline = array('B', [6, 10, 9, 100, 101, 102])
out = reader.undo_filter(4, scanline, scanprev)
self.assertEqual(list(out), [8, 10, 9, 108, 111, 113]) # paeth
def testIterstraight(self):
def arraify(list_of_str):
return [array('B', s) for s in list_of_str]
reader = Reader(bytes='')
reader.row_bytes = 6
reader.psize = 3
rows = reader.iterstraight(arraify(['\x00abcdef', '\x00ghijkl']))
self.assertEqual(list(rows), arraify(['abcdef', 'ghijkl']))
rows = reader.iterstraight(arraify(['\x00abc', 'def\x00ghijkl']))
self.assertEqual(list(rows), arraify(['abcdef', 'ghijkl']))
rows = reader.iterstraight(arraify(['\x00abcdef\x00ghijkl']))
self.assertEqual(list(rows), arraify(['abcdef', 'ghijkl']))
rows = reader.iterstraight(arraify(['\x00abcdef\x00ghi', 'jkl']))
self.assertEqual(list(rows), arraify(['abcdef', 'ghijkl']))
# === Command Line Support ===
def _dehex(s):
"""Liberally convert from hex string to binary string."""
import re
import binascii
# Remove all non-hexadecimal digits
s = re.sub(r'[^a-fA-F\d]', '', s)
# binscii.unhexlify works in Python 2 and Python 3 (unlike
# thing.decode('hex')).
return binascii.unhexlify(strtobytes(s))
def _enhex(s):
"""Convert from binary string (bytes) to hex string (str)."""
import binascii
return bytestostr(binascii.hexlify(s))
# Copies of PngSuite test files taken
# from http://www.schaik.com/pngsuite/pngsuite_bas_png.html
# on 2009-02-19 by drj and converted to hex.
# Some of these are not actually in PngSuite (but maybe they should
# be?), they use the same naming scheme, but start with a capital
# letter.
_pngsuite = {
'basi0g01': _dehex("""
89504e470d0a1a0a0000000d49484452000000200000002001000000012c0677
cf0000000467414d41000186a031e8965f0000009049444154789c2d8d310ec2
300c45dfc682c415187a00a42e197ab81e83b127e00c5639001363a580d8582c
65c910357c4b78b0bfbfdf4f70168c19e7acb970a3f2d1ded9695ce5bf5963df
d92aaf4c9fd927ea449e6487df5b9c36e799b91bdf082b4d4bd4014fe4014b01
ab7a17aee694d28d328a2d63837a70451e1648702d9a9ff4a11d2f7a51aa21e5
a18c7ffd0094e3511d661822f20000000049454e44ae426082
"""),
'basi0g02': _dehex("""
89504e470d0a1a0a0000000d49484452000000200000002002000000016ba60d
1f0000000467414d41000186a031e8965f0000005149444154789c635062e860
00e17286bb609c93c370ec189494960631366e4467b3ae675dcf10f521ea0303
90c1ca006444e11643482064114a4852c710baea3f18c31918020c30410403a6
0ac1a09239009c52804d85b6d97d0000000049454e44ae426082
"""),
'basi0g04': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200400000001e4e6f8
bf0000000467414d41000186a031e8965f000000ae49444154789c658e5111c2
301044171c141c141c041c843a287510ea20d441c041c141c141c04191102454
03994998cecd7edcecedbb9bdbc3b2c2b6457545fbc4bac1be437347f7c66a77
3c23d60db15e88f5c5627338a5416c2e691a9b475a89cd27eda12895ae8dfdab
43d61e590764f5c83a226b40d669bec307f93247701687723abf31ff83a2284b
a5b4ae6b63ac6520ad730ca4ed7b06d20e030369bd6720ed383290360406d24e
13811f2781eba9d34d07160000000049454e44ae426082
"""),
'basi0g08': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200800000001211615
be0000000467414d41000186a031e8965f000000b549444154789cb5905d0ac2
3010849dbac81c42c47bf843cf253e8878b0aa17110f214bdca6be240f5d21a5
94ced3e49bcd322c1624115515154998aa424822a82a5624a1aa8a8b24c58f99
999908130989a04a00d76c2c09e76cf21adcb209393a6553577da17140a2c59e
70ecbfa388dff1f03b82fb82bd07f05f7cb13f80bb07ad2fd60c011c3c588eef
f1f4e03bbec7ce832dca927aea005e431b625796345307b019c845e6bfc3bb98
769d84f9efb02ea6c00f9bb9ff45e81f9f280000000049454e44ae426082
"""),
'basi0g16': _dehex("""
89504e470d0a1a0a0000000d49484452000000200000002010000000017186c9
fd0000000467414d41000186a031e8965f000000e249444154789cb5913b0ec2
301044c7490aa8f85d81c3e4301c8f53a4ca0da8902c8144b3920b4043111282
23bc4956681a6bf5fc3c5a3ba0448912d91a4de2c38dd8e380231eede4c4f7a1
4677700bec7bd9b1d344689315a3418d1a6efbe5b8305ba01f8ff4808c063e26
c60d5c81edcf6c58c535e252839e93801b15c0a70d810ae0d306b205dc32b187
272b64057e4720ff0502154034831520154034c3df81400510cdf0015c86e5cc
5c79c639fddba9dcb5456b51d7980eb52d8e7d7fa620a75120d6064641a05120
b606771a05626b401a05f1f589827cf0fe44c1f0bae0055698ee8914fffffe00
00000049454e44ae426082
"""),
'basi2c08': _dehex("""
89504e470d0a1a0a0000000d49484452000000200000002008020000018b1fdd
350000000467414d41000186a031e8965f000000f249444154789cd59341aa04
210c44abc07b78133d59d37333bd89d76868b566d10cf4675af8596431a11662
7c5688919280e312257dd6a0a4cf1a01008ee312a5f3c69c37e6fcc3f47e6776
a07f8bdaf5b40feed2d33e025e2ff4fe2d4a63e1a16d91180b736d8bc45854c5
6d951863f4a7e0b66dcf09a900f3ffa2948d4091e53ca86c048a64390f662b50
4a999660ced906182b9a01a8be00a56404a6ede182b1223b4025e32c4de34304
63457680c93aada6c99b73865aab2fc094920d901a203f5ddfe1970d28456783
26cffbafeffcd30654f46d119be4793f827387fc0d189d5bc4d69a3c23d45a7f
db803146578337df4d0a3121fc3d330000000049454e44ae426082
"""),
'basi2c16': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000201002000001db8f01
760000000467414d41000186a031e8965f0000020a49444154789cd5962173e3
3010853fcf1838cc61a1818185a53e56787fa13fa130852e3b5878b4b0b03081
b97f7030070b53e6b057a0a8912bbb9163b9f109ececbc59bd7dcf2b45492409
d66f00eb1dd83cb5497d65456aeb8e1040913b3b2c04504c936dd5a9c7e2c6eb
b1b8f17a58e8d043da56f06f0f9f62e5217b6ba3a1b76f6c9e99e8696a2a72e2
c4fb1e4d452e92ec9652b807486d12b6669be00db38d9114b0c1961e375461a5
5f76682a85c367ad6f682ff53a9c2a353191764b78bb07d8ddc3c97c1950f391
6745c7b9852c73c2f212605a466a502705c8338069c8b9e84efab941eb393a97
d4c9fd63148314209f1c1d3434e847ead6380de291d6f26a25c1ebb5047f5f24
d85c49f0f22cc1d34282c72709cab90477bf25b89d49f0f351822297e0ea9704
f34c82bc94002448ede51866e5656aef5d7c6a385cb4d80e6a538ceba04e6df2
480e9aa84ddedb413bb5c97b3838456df2d4fec2c7a706983e7474d085fae820
a841776a83073838973ac0413fea2f1dc4a06e71108fda73109bdae48954ad60
bf867aac3ce44c7c1589a711cf8a81df9b219679d96d1cec3d8bbbeaa2012626
df8c7802eda201b2d2e0239b409868171fc104ba8b76f10b4da09f6817ffc609
c413ede267fd1fbab46880c90f80eccf0013185eb48b47ba03df2bdaadef3181
cb8976f18e13188768170f98c0f844bb78cb04c62ddac59d09fc3fa25dfc1da4
14deb3df1344f70000000049454e44ae426082
"""),
'basi3p08': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020080300000133a3ba
500000000467414d41000186a031e8965f00000300504c5445224400f5ffed77
ff77cbffff110a003a77002222ffff11ff110000222200ffac5566ff66ff6666
ff01ff221200dcffffccff994444ff005555220000cbcbff44440055ff55cbcb
00331a00ffecdcedffffe4ffcbffdcdc44ff446666ff330000442200ededff66
6600ffa444ffffaaeded0000cbcbfefffffdfffeffff0133ff33552a000101ff
8888ff00aaaa010100440000888800ffe4cbba5b0022ff22663200ffff99aaaa
ff550000aaaa00cb630011ff11d4ffaa773a00ff4444dc6b0066000001ff0188
4200ecffdc6bdc00ffdcba00333300ed00ed7300ffff88994a0011ffff770000
ff8301ffbabafe7b00fffeff00cb00ff999922ffff880000ffff77008888ffdc
ff1a33000000aa33ffff009900990000000001326600ffbaff44ffffffaaff00
770000fefeaa00004a9900ffff66ff22220000998bff1155ffffff0101ff88ff
005500001111fffffefffdfea4ff4466ffffff66ff003300ffff55ff77770000
88ff44ff00110077ffff006666ffffed000100fff5ed1111ffffff44ff22ffff
eded11110088ffff00007793ff2200dcdc3333fffe00febabaff99ffff333300
63cb00baba00acff55ffffdcffff337bfe00ed00ed5555ffaaffffdcdcff5555
00000066dcdc00dc00dc83ff017777fffefeffffffcbff5555777700fefe00cb
00cb0000fe010200010000122200ffff220044449bff33ffd4aa0000559999ff
999900ba00ba2a5500ffcbcbb4ff66ff9b33ffffbaaa00aa42880053aa00ffaa
aa0000ed00babaffff1100fe00000044009999990099ffcc99ba000088008800
dc00ff93220000dcfefffeaa5300770077020100cb0000000033ffedff00ba00
ff3333edffedffc488bcff7700aa00660066002222dc0000ffcbffdcffdcff8b
110000cb00010155005500880000002201ffffcbffcbed0000ff88884400445b
ba00ffbc77ff99ff006600baffba00777773ed00fe00003300330000baff77ff
004400aaffaafffefe000011220022c4ff8800eded99ff99ff55ff002200ffb4
661100110a1100ff1111dcffbabaffff88ff88010001ff33ffb98ed362000002
a249444154789c65d0695c0b001806f03711a9904a94d24dac63292949e5a810
d244588a14ca5161d1a1323973252242d62157d12ae498c8124d25ca3a11398a
16e55a3cdffab0ffe7f77d7fcff3528645349b584c3187824d9d19d4ec2e3523
9eb0ae975cf8de02f2486d502191841b42967a1ad49e5ddc4265f69a899e26b5
e9e468181baae3a71a41b95669da8df2ea3594c1b31046d7b17bfb86592e4cbe
d89b23e8db0af6304d756e60a8f4ad378bdc2552ae5948df1d35b52143141533
33bbbbababebeb3b3bc9c9c9c6c6c0c0d7b7b535323225a5aa8a02024a4bedec
0a0a2a2bcdcd7d7cf2f3a9a9c9cdcdd8b8adcdd5b5ababa828298982824a4ab2
b21212acadbdbc1414e2e24859b9a72730302f4f49292c4c57373c9c0a0b7372
8c8c1c1c3a3a92936d6dfdfd293e3e26262a4a4eaea2424b4b5fbfbc9c323278
3c0b0ba1303abaae8ecdeeed950d6669a9a7a7a141d4de9e9d5d5cdcd2229b94
c572716132f97cb1d8db9bc3110864a39795d9db6b6a26267a7a9a98d4d6a6a7
cb76090ef6f030354d4d75766e686030545464cb393a1a1ac6c68686eae8f8f9
a9aa4644c8b66d6e1689dcdd2512a994cb35330b0991ad9f9b6b659596a6addd
d8282fafae5e5323fb8f41d01f76c22fd8061be01bfc041a0323e1002c81cd30
0b9ec027a0c930014ec035580fc3e112bc069a0b53e11c0c8095f00176c163a0
e5301baec06a580677600ddc05ba0f13e120bc81a770133ec355a017300d4ec2
0c7800bbe1219c02fa08f3e13c1c85dbb00a2ec05ea0dff00a6ec15a98027360
070c047a06d7e1085c84f1b014f6c03fa0b33018b6c0211801ebe018fc00da0a
6f61113c877eb01d4ec317a085700f26c130f80efbe132bc039a0733e106fc81
f7f017f6c10aa0d1300a0ec374780943e1382c06fa0a9b60238c83473016cec0
02f80f73fefe1072afc1e50000000049454e44ae426082
"""),
'basi6a08': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200806000001047d4a
620000000467414d41000186a031e8965f0000012049444154789cc595414ec3
3010459fa541b8bbb26641b8069b861e8b4d12c1c112c1452a710a2a65d840d5
949041fc481ec98ae27c7f3f8d27e3e4648047600fec0d1f390fbbe2633a31e2
9389e4e4ea7bfdbf3d9a6b800ab89f1bd6b553cfcbb0679e960563d72e0a9293
b7337b9f988cc67f5f0e186d20e808042f1c97054e1309da40d02d7e27f92e03
6cbfc64df0fc3117a6210a1b6ad1a00df21c1abcf2a01944c7101b0cb568a001
909c9cf9e399cf3d8d9d4660a875405d9a60d000b05e2de55e25780b7a5268e0
622118e2399aab063a815808462f1ab86890fc2e03e48bb109ded7d26ce4bf59
0db91bac0050747fec5015ce80da0e5700281be533f0ce6d5900b59bcb00ea6d
200314cf801faab200ea752803a8d7a90c503a039f824a53f4694e7342000000
0049454e44ae426082
"""),
'basn0g01': _dehex("""
89504e470d0a1a0a0000000d49484452000000200000002001000000005b0147
590000000467414d41000186a031e8965f0000005b49444154789c2dccb10903
300c05d1ebd204b24a200b7a346f90153c82c18d0a61450751f1e08a2faaead2
a4846ccea9255306e753345712e211b221bf4b263d1b427325255e8bdab29e6f
6aca30692e9d29616ee96f3065f0bf1f1087492fd02f14c90000000049454e44
ae426082
"""),
'basn0g02': _dehex("""
89504e470d0a1a0a0000000d49484452000000200000002002000000001ca13d
890000000467414d41000186a031e8965f0000001f49444154789c6360085df5
1f8cf1308850c20053868f0133091f6390b90700bd497f818b0989a900000000
49454e44ae426082
"""),
# A version of basn0g04 dithered down to 3 bits.
'Basn0g03': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020040000000093e1c8
2900000001734249540371d88211000000fd49444154789c6d90d18906210c84
c356f22356b2889588604301b112112b11d94a96bb495cf7fe87f32d996f2689
44741cc658e39c0b118f883e1f63cc89dafbc04c0f619d7d898396c54b875517
83f3a2e7ac09a2074430e7f497f00f1138a5444f82839c5206b1f51053cca968
63258821e7f2b5438aac16fbecc052b646e709de45cf18996b29648508728612
952ca606a73566d44612b876845e9a347084ea4868d2907ff06be4436c4b41a3
a3e1774285614c5affb40dbd931a526619d9fa18e4c2be420858de1df0e69893
a0e3e5523461be448561001042b7d4a15309ce2c57aef2ba89d1c13794a109d7
b5880aa27744fc5c4aecb5e7bcef5fe528ec6293a930690000000049454e44ae
426082
"""),
'basn0g04': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020040000000093e1c8
290000000467414d41000186a031e8965f0000004849444154789c6360601014
545232367671090d4d4b2b2f6720430095dbd1418e002a77e64c720450b9ab56
912380caddbd9b1c0154ee9933e408a072efde25470095fbee1d1902001f14ee
01eaff41fa0000000049454e44ae426082
"""),
'basn0g08': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200800000000561125
280000000467414d41000186a031e8965f0000004149444154789c6364602400
1408c8b30c05058c0f0829f8f71f3f6079301c1430ca11906764a2795c0c0605
8c8ff0cafeffcff887e67131181430cae0956564040050e5fe7135e2d8590000
000049454e44ae426082
"""),
'basn0g16': _dehex("""
89504e470d0a1a0a0000000d49484452000000200000002010000000000681f9
6b0000000467414d41000186a031e8965f0000005e49444154789cd5d2310ac0
300c4351395bef7fc6dca093c0287b32d52a04a3d98f3f3880a7b857131363a0
3a82601d089900dd82f640ca04e816dc06422640b7a03d903201ba05b7819009
d02d680fa44c603f6f07ec4ff41938cf7f0016d84bd85fae2b9fd70000000049
454e44ae426082
"""),
'basn2c08': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200802000000fc18ed
a30000000467414d41000186a031e8965f0000004849444154789cedd5c10900
300c024085ec91fdb772133b442bf4a1f8cee12bb40d043b800a14f81ca0ede4
7d4c784081020f4a871fc284071428f0a0743823a94081bb7077a3c00182b1f9
5e0f40cf4b0000000049454e44ae426082
"""),
'basn2c16': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000201002000000ac8831
e00000000467414d41000186a031e8965f000000e549444154789cd596c10a83
301044a7e0417fcb7eb7fdadf6961e06039286266693cc7a188645e43dd6a08f
1042003e2fe09aef6472737e183d27335fcee2f35a77b702ebce742870a23397
f3edf2705dd10160f3b2815fe8ecf2027974a6b0c03f74a6e4192843e75c6c03
35e8ec3202f5e84c0181bbe8cca967a00d9df3491bb040671f2e6087ce1c2860
8d1e05f8c7ee0f1d00b667e70df44467ef26d01fbd9bc028f42860f71d188bce
fb8d3630039dbd59601e7ab3c06cf428507f0634d039afdc80123a7bb1801e7a
b1802a7a14c89f016d74ce331bf080ce9e08f8414f04bca133bfe642fe5e07bb
c4ec0000000049454e44ae426082
"""),
'basn3p04': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200403000000815467
c70000000467414d41000186a031e8965f000000037342495404040477f8b5a3
0000002d504c54452200ff00ffff8800ff22ff000099ffff6600dd00ff77ff00
ff000000ff99ddff00ff00bbffbb000044ff00ff44d2b049bd00000047494441
54789c63e8e8080d3d7366d5aaf27263e377ef66ce64204300952b28488e002a
d7c5851c0154eeddbbe408a07119c81140e52a29912380ca4d4b23470095bb7b
37190200e0c4ead10f82057d0000000049454e44ae426082
"""),
'basn6a08': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200806000000737a7a
f40000000467414d41000186a031e8965f0000006f49444154789cedd6310a80
300c46e12764684fa1f73f55048f21c4ddc545781d52e85028fc1f4d28d98a01
305e7b7e9cffba33831d75054703ca06a8f90d58a0074e351e227d805c8254e3
1bb0420f5cdc2e0079208892ffe2a00136a07b4007943c1004d900195036407f
011bf00052201a9c160fb84c0000000049454e44ae426082
"""),
'cs3n3p08': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020080300000044a48a
c60000000467414d41000186a031e8965f0000000373424954030303a392a042
00000054504c544592ff0000ff9200ffff00ff0000dbff00ff6dffb600006dff
b6ff00ff9200dbff000049ffff2400ff000024ff0049ff0000ffdb00ff4900ff
b6ffff0000ff2400b6ffffdb000092ffff6d000024ffff49006dff00df702b17
0000004b49444154789c85cac70182000000b1b3625754b0edbfa72324ef7486
184ed0177a437b680bcdd0031c0ed00ea21f74852ed00a1c9ed0086da0057487
6ed0121cd6d004bda0013a421ff803224033e177f4ae260000000049454e44ae
426082
"""),
's09n3p02': _dehex("""
89504e470d0a1a0a0000000d49484452000000090000000902030000009dffee
830000000467414d41000186a031e8965f000000037342495404040477f8b5a3
0000000c504c544500ff000077ffff00ffff7700ff5600640000001f49444154
789c63600002fbff0c0c56ab19182ca381581a4283f82071200000696505c36a
437f230000000049454e44ae426082
"""),
'tbgn3p08': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020080300000044a48a
c60000000467414d41000186a031e8965f00000207504c54457f7f7fafafafab
abab110000222200737300999999510d00444400959500959595e6e600919191
8d8d8d620d00898989666600b7b700911600000000730d007373736f6f6faaaa
006b6b6b676767c41a00cccc0000f30000ef00d51e0055555567670000dd0051
515100d1004d4d4de61e0038380000b700160d0d00ab00560d00090900009500
009100008d003333332f2f2f2f2b2f2b2b000077007c7c001a05002b27000073
002b2b2b006f00bb1600272727780d002323230055004d4d00cc1e00004d00cc
1a000d00003c09006f6f00002f003811271111110d0d0d55554d090909001100
4d0900050505000d00e2e200000900000500626200a6a6a6a2a2a29e9e9e8484
00fb00fbd5d500801100800d00ea00ea555500a6a600e600e6f7f700e200e233
0500888888d900d9848484c01a007777003c3c05c8c8008080804409007c7c7c
bb00bbaa00aaa600a61e09056262629e009e9a009af322005e5e5e05050000ee
005a5a5adddd00a616008d008d00e20016050027270088110078780000c40078
00787300736f006f44444400aa00c81e004040406600663c3c3c090000550055
1a1a00343434d91e000084004d004d007c004500453c3c00ea1e00222222113c
113300331e1e1efb22001a1a1a004400afaf00270027003c001616161e001e0d
160d2f2f00808000001e00d1d1001100110d000db7b7b7090009050005b3b3b3
6d34c4230000000174524e530040e6d86600000001624b474402660b7c640000
01f249444154789c6360c0048c8c58049100575f215ee92e6161ef109cd2a15e
4b9645ce5d2c8f433aa4c24f3cbd4c98833b2314ab74a186f094b9c2c27571d2
6a2a58e4253c5cda8559057a392363854db4d9d0641973660b0b0bb76bb16656
06970997256877a07a95c75a1804b2fbcd128c80b482a0b0300f8a824276a9a8
ec6e61612b3e57ee06fbf0009619d5fac846ac5c60ed20e754921625a2daadc6
1967e29e97d2239c8aec7e61fdeca9cecebef54eb36c848517164514af16169e
866444b2b0b7b55534c815cc2ec22d89cd1353800a8473100a4485852d924a6a
412adc74e7ad1016ceed043267238c901716f633a812022998a4072267c4af02
92127005c0f811b62830054935ce017b38bf0948cc5c09955f030a24617d9d46
63371fd940b0827931cbfdf4956076ac018b592f72d45594a9b1f307f3261b1a
084bc2ad50018b1900719ba6ba4ca325d0427d3f6161449486f981144cf3100e
2a5f2a1ce8683e4ddf1b64275240c8438d98af0c729bbe07982b8a1c94201dc2
b3174c9820bcc06201585ad81b25b64a2146384e3798290c05ad280a18c0a62e
e898260c07fca80a24c076cc864b777131a00190cdfa3069035eccbc038c30e1
3e88b46d16b6acc5380d6ac202511c392f4b789aa7b0b08718765990111606c2
9e854c38e5191878fbe471e749b0112bb18902008dc473b2b2e8e72700000000
49454e44ae426082
"""),
'Tp2n3p08': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020080300000044a48a
c60000000467414d41000186a031e8965f00000300504c544502ffff80ff05ff
7f0703ff7f0180ff04ff00ffff06ff000880ff05ff7f07ffff06ff000804ff00
0180ff02ffff03ff7f02ffff80ff0503ff7f0180ffff0008ff7f0704ff00ffff
06ff000802ffffff7f0704ff0003ff7fffff0680ff050180ff04ff000180ffff
0008ffff0603ff7f80ff05ff7f0702ffffff000880ff05ffff0603ff7f02ffff
ff7f070180ff04ff00ffff06ff000880ff050180ffff7f0702ffff04ff0003ff
7fff7f0704ff0003ff7f0180ffffff06ff000880ff0502ffffffff0603ff7fff
7f0702ffff04ff000180ff80ff05ff0008ff7f07ffff0680ff0504ff00ff0008
0180ff03ff7f02ffff02ffffffff0604ff0003ff7f0180ffff000880ff05ff7f
0780ff05ff00080180ff02ffffff7f0703ff7fffff0604ff00ff7f07ff0008ff
ff0680ff0504ff0002ffff0180ff03ff7fff0008ffff0680ff0504ff000180ff
02ffff03ff7fff7f070180ff02ffff04ff00ffff06ff0008ff7f0780ff0503ff
7fffff06ff0008ff7f0780ff0502ffff03ff7f0180ff04ff0002ffffff7f07ff
ff0604ff0003ff7fff00080180ff80ff05ffff0603ff7f0180ffff000804ff00
80ff0502ffffff7f0780ff05ffff0604ff000180ffff000802ffffff7f0703ff
7fff0008ff7f070180ff03ff7f02ffff80ff05ffff0604ff00ff0008ffff0602
ffff0180ff04ff0003ff7f80ff05ff7f070180ff04ff00ff7f0780ff0502ffff
ff000803ff7fffff0602ffffff7f07ffff0680ff05ff000804ff0003ff7f0180
ff02ffff0180ffff7f0703ff7fff000804ff0080ff05ffff0602ffff04ff00ff
ff0603ff7fff7f070180ff80ff05ff000803ff7f0180ffff7f0702ffffff0008
04ff00ffff0680ff0503ff7f0180ff04ff0080ff05ffff06ff000802ffffff7f
0780ff05ff0008ff7f070180ff03ff7f04ff0002ffffffff0604ff00ff7f07ff
000880ff05ffff060180ff02ffff03ff7f80ff05ffff0602ffff0180ff03ff7f
04ff00ff7f07ff00080180ffff000880ff0502ffff04ff00ff7f0703ff7fffff
06ff0008ffff0604ff00ff7f0780ff0502ffff03ff7f0180ffdeb83387000000
f874524e53000000000000000008080808080808081010101010101010181818
1818181818202020202020202029292929292929293131313131313131393939
393939393941414141414141414a4a4a4a4a4a4a4a52525252525252525a5a5a
5a5a5a5a5a62626262626262626a6a6a6a6a6a6a6a73737373737373737b7b7b
7b7b7b7b7b83838383838383838b8b8b8b8b8b8b8b94949494949494949c9c9c
9c9c9c9c9ca4a4a4a4a4a4a4a4acacacacacacacacb4b4b4b4b4b4b4b4bdbdbd
bdbdbdbdbdc5c5c5c5c5c5c5c5cdcdcdcdcdcdcdcdd5d5d5d5d5d5d5d5dedede
dededededee6e6e6e6e6e6e6e6eeeeeeeeeeeeeeeef6f6f6f6f6f6f6f6b98ac5
ca0000012c49444154789c6360e7169150d230b475f7098d4ccc28a96ced9e32
63c1da2d7b8e9fb97af3d1fb8f3f18e8a0808953544a4dd7c4c2c9233c2621bf
b4aab17fdacce5ab36ee3a72eafaad87efbefea68702362e7159652d031b07cf
c0b8a4cce28aa68e89f316aedfb4ffd0b92bf79fbcfcfe931e0a183904e55435
8decdcbcc22292b3caaadb7b27cc5db67af3be63e72fdf78fce2d31f7a2860e5
119356d037b374f10e8a4fc92eaa6fee99347fc9caad7b0f9ebd74f7c1db2fbf
e8a180995f484645dbdccad12f38363dafbcb6a573faeca5ebb6ed3e7ce2c29d
e76fbefda38702063e0149751d537b67ff80e8d4dcc29a86bea97316add9b0e3
c0e96bf79ebdfafc971e0a587885e515f58cad5d7d43a2d2720aeadaba26cf5a
bc62fbcea3272fde7efafac37f3a28000087c0fe101bc2f85f0000000049454e
44ae426082
"""),
'tbbn1g04': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020040000000093e1c8
290000000467414d41000186a031e8965f0000000274524e530007e8f7589b00
000002624b47440000aa8d23320000013e49444154789c55d1cd4b024118c7f1
efbe6419045b6a48a72d352808b435284f9187ae9b098627a1573a19945beba5
e8129e8222af11d81e3a4545742de8ef6af6d5762e0fbf0fc33c33f36085cb76
bc4204778771b867260683ee57e13f0c922df5c719c2b3b6c6c25b2382cea4b9
9f7d4f244370746ac71f4ca88e0f173a6496749af47de8e44ba8f3bf9bdfa98a
0faf857a7dd95c7dc8d7c67c782c99727997f41eb2e3c1e554152465bb00fe8e
b692d190b718d159f4c0a45c4435915a243c58a7a4312a7a57913f05747594c6
46169866c57101e4d4ce4d511423119c419183a3530cc63db88559ae28e7342a
1e9c8122b71139b8872d6e913153224bc1f35b60e4445bd4004e20ed6682c759
1d9873b3da0fbf50137dc5c9bde84fdb2ec8bde1189e0448b63584735993c209
7a601bd2710caceba6158797285b7f2084a2f82c57c01a0000000049454e44ae
426082
"""),
'tbrn2c08': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200802000000fc18ed
a30000000467414d41000186a031e8965f0000000674524e53007f007f007f8a
33334f00000006624b474400ff0000000033277cf3000004d649444154789cad
965f68537714c73fd912d640235e692f34d0406fa0c1663481045ab060065514
56660a295831607df0a1488715167060840a1614e6431e9cb34fd2c00a762c85
f6a10f816650c13b0cf40612e1822ddc4863bd628a8924d23d6464f9d3665dd9
f7e977ce3dbff3cd3939bfdfef6bb87dfb364782dbed065ebe7cd93acc78b4ec
a228debd7bb7bfbfbfbbbbfb7f261045311a8d261209405194274f9ea4d3e916
f15f1c3eb5dd6e4fa5fecce526239184a2b0b8486f6f617171b1f5ae4311381c
8e57af5e5dbd7a351088150a78bd389d44222c2f93cdfe66b7db8f4ee07038b6
b6b6bebf766d7e7e7e60a06432313b4ba984c3c1c4049a46b95c5a58583822c1
dbb76f27272733d1b9df853c3030c0f232562b9108cf9eb1b888d7cbf030abab
31abd5fa1f08dc6ef7e7cf9f1f3f7e1c8944745d4f1400c62c001313acad21cb
b8dd2c2c603271eb1640341aad4c6d331aa7e8c48913a150a861307ecc11e964
74899919bc5e14e56fffc404f1388502f178dceff7ef4bf0a5cfe7abb533998c
e5f9ea2f1dd88c180d64cb94412df3dd57e83a6b3b3c7a84c98420100c72fd3a
636348bae726379fe69e8e8d8dbd79f3a6558b0607079796965256479b918085
7b02db12712b6181950233023f3f647494ee6e2e5ea45864cce5b8a7fe3acffc
3aebb22c2bd5d20e22d0757d7b7bbbbdbd3d94a313bed1b0aa3cd069838b163a
8d4c59585f677292d0b84d9a995bd337def3fe6bbe5e6001989b9b6bfe27ea08
36373781542ab56573248b4c5bc843ac4048c7ab21aa24ca00534c25482828a3
8c9ee67475bbaaaab22cb722c8e57240a150301a8d219de94e44534d7d90e885
87acb0e2c4f9800731629b6c5ee14a35a6b9887d2a0032994cb9cf15dbe59650
ff7b46a04c9a749e7cc5112214266cc65c31354d5b5d5d3d90209bcd5616a552
a95c2e87f2a659bd9ee01c2cd73964e438f129a6aa9e582c363838b80f81d7eb
5555b56a2a8ad2d9d7affd0409f8015c208013fea00177b873831b0282c964f2
783c1e8fa7582cee5f81a669b5e6eeeeaee58e8559b0c233d8843c7c0b963a82
34e94b5cb2396d7d7d7db22c8ba258fb0afd43f0e2c58b919191ba9de9b4d425
118329b0c3323c8709d02041b52b4ea7f39de75d2a934a2693c0a953a76a93d4
5d157ebf7f6565a5542a553df97c5e10045dd731c130b86113cc300cbd489224
08422a952a140a95788fc763b1d41558d7a2d7af5f5fb870a1d6a3aaaacd6603
18802da84c59015bd2e6897b745d9765b99a1df0f97c0daf74e36deaf7fbcd66
73ad2797cb89a2c839880188a2e8743a8bc5a22ccbba5e376466b3b9bdbdbd21
6123413a9d0e0402b51e4dd3bababa788eb022b85caeb6b6364551b6b7b76942
43f7f727007a7a7a04a1ee8065b3595fde2768423299ac1ec6669c3973e65004
c0f8f878ad69341a33994ced2969c0d0d0502412f9f8f163f3a7fd654b474787
288ad53e74757535df6215b85cae60302849d2410aecc037f9f2e5cbd5b5c160
680eb0dbede170381c0e7ff8f0a185be3b906068684892a4ca7a6f6faff69328
8ad3d3d3f7efdfdfdbdbfb57e96868a14d0d0643381c96242997cbe5f3794010
84603078fcf8f1d6496bd14a3aba5c2ea7d369341a5555b5582c8140e0fcf9f3
1b1b1b87cf4eeb0a8063c78e45a3d19e9e1ebfdfdf5a831e844655d18093274f
9e3d7bf6d3a74f3b3b3b47c80efc05ff7af28fefb70d9b0000000049454e44ae
426082
"""),
'basn6a16': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020100600000023eaa6
b70000000467414d41000186a031e8965f00000d2249444154789cdd995f6c1c
d775c67ff38fb34b724d2ee55a8e4b04a0ac87049100cab4dbd8c6528902cb4d
10881620592e52d4325ac0905bc98a94025e71fd622cb5065ac98a0c283050c0
728a00b6e542a1d126885cd3298928891d9a0444037e904434951d4b90b84b2f
c9dde1fcebc33977a95555348f411e16dfce9d3b77ee77eebde77ce78c95a669
0ad07c17009a13edd898b87dfb1fcb7d2b4d1bff217f33df80deb1e6267df0ff
c1e6e6dfafdf1f5a7fd30f9aef66b6d546dd355bf02c40662e3307f9725a96c6
744c3031f83782f171c148dbc3bf1774f5dad1e79d6f095a3f54d4fbec5234ef
d9a2f8d73afe4f14f57ef4f42def7b44f19060f06b45bddf1c5534d77fd922be
2973a15a82e648661c6e3240aa3612ead952b604bde57458894f29deaf133bac
13d2766f5227a4a3b8cf08da7adfd6fbd6bd8a4fe9dbb43d35e3dfa3f844fbf8
9119bf4f7144094fb56333abf8a86063ca106f94b3a3b512343765e60082097f
1bb86ba72439a653519b09f5cee1ce61c897d37eedf5553580ae60f4af8af33a
b14fd400b6a0f34535c0434afc0b3a9f07147527a5fa7ca218ff56c74d74dc3f
155cfd3325fc278acf2ae1cb4a539f5f9937c457263b0bd51234c732a300cdd1
cc1840f0aaff54db0e4874ed5a9b5d6d27d4bb36746d80de72baa877ff4b275a
d7895ed1897ea4139b5143fcbb1a62560da1ed9662aaed895ec78a91c18795b8
5e07ab4af8ba128e95e682e0728bf8f2e5ae815a091a53d902ac1920d8e05f06
589de8d8d66680789f4e454fb9d9ec66cd857af796ee2d902fa73fd5bba775a2
153580ae44705ed0d37647d15697cb8f14bfa3e3e8fdf8031d47af571503357c
f30d25acedcbbf135c9a35c49766ba07ab255859e8ec03684e66860182dff8f7
0304bff6ff1c20fc81b7afdd00a71475539a536e36bb5973a19e3b923b02bde5
e4efd4003ac170eb2d13fe274157afedbd82d6fb3a9a1e85e4551d47cf7078f8
9671fe4289ebf5f2bf08d63f37c4eb4773c55a0996efeefa0ca011671d8060ca
2f0004c7fcc300e166ef0240f825efe3361f106d57d423d0723f7acacd66376b
2ed47b7a7a7a205f4ef4ac4691e0aad9aa0d41cf13741c3580a506487574ddca
61a8c403c1863ebfbcac3475168b2de28b8b3d77544bb05ce92a02aceced3c0d
d0cc65ea371b201cf1c601c24dde1c4078cedbdeb60322f50126a019bf6edc9b
39e566b39b3517eaf97c3e0fbde5e4491d45bd74537145d155b476aa0176e868
c6abebf30dbd5e525c54ac8e18e2d56abeb756827a3d970358a97416019a6f64
f60004fdfe1580d5c98e618070cc1b05887eee7e0d209a70db7d8063029889b4
c620ead78d7b33a7dc6c76b3e6427ddddbebde867c393aa7845e5403e8ca794a
d0d6fb897af5f03525fe5782f5e7046bdaef468bf88d1debc6ab25583cd17310
6079b9ab0ba059c914018245bf076075b5a303200c3c1f209a733701444fbbaf
00c4134ebb016c5d0b23614c243701cdf875e3decce9349bddacb9505fbf7dfd
76e82d87736a00f5d2b5ffd4b7dce2719a4d25ae717ee153c1abef18e257cfad
7fa45682da48ef38c052b53b0fd06864b300c151ff08c0ea431de701a287dd5f
004497dc7b01a253ee3e80b8c7f91c20f967fb6fdb7c80ada7d8683723614c24
3701cdf875e3decc29379bddacb950ef3fd47f08f2e5a61ea4aa2a3eb757cd55
13345efcfa59c12b2f19e2578ef77fb75a82854ffbee01a83f977b11a031931d
040802df07082b5e11207cc17b1e209a770700e2df0a83e409fb7580f827c230
99b06fd901fb058d6835dacd481813c94d40337eddb83773cacd66376b2ed437
bebcf165e82d2f4e4beb7f3fa6e652c2d7ee10bc78c010bfb87fe3c95a09ae9f
bd732740bd2fb700d0f865f64180e059ff044018ca0ca28a5b04883f701e0088
bfec7c0c909cb71f0448c6ec518074b375012079d9dedf66004bcfbc51eb2dd1
aadacd481813c94d40337eddb83773cacd66376b2ed487868686205fbe7c49ef
5605a73f34c4a7a787eeab96e0da81bb4e022c15ba27019a5b339300e16bf286
a8eae601e25866907cdf3e0890acb36f00245fb57f05904e59c300e92561946e
b2e600d209ab7d07f04d458dfb46ad1bd16ab49b913026929b8066fcba716fe6
949bcd6ed65ca8ef7e7cf7e3d05b7e7c8f217ee6cdddbb6a25a856f37980e0c7
fe4e80a82623c48193014846ec7180f4acf518409aca0cd28a5504e03b32c374
de1a00608a0240faaa327a4b19fe946fb6f90054dbb5f2333d022db56eb4966a
3723614c243701cdf8f556bea8a7dc6c76b3e66bd46584ddbbcebc0990cf4b0f
ff4070520c282338a7e26700ec725202b01e4bcf0258963c6f1d4d8f0030cb20
805549c520930c03584fa522b676f11600ffc03fde3e1b3489a9c9054c9aa23b
c08856a3dd8c843191dc0434e3d78d7b33a75c36fb993761f7ae5a69f72ef97f
e6ad336fed7e1c60e8bee96980bbdebbb60da07b7069062033d9dc0ae03d296f
70ab511ec071640676252902d833c916007b3e1900b0a6d2028035968e025861
ea01581369fb11488c34d18cbc95989afccca42baad65ba2d5683723614c24d7
8066fcbab8b7e96918baaf5aaa56219f975fb50a43f7c9bde90fa73f1c1a02d8
78f2e27e803b77ca08b90519315b6fe400fc1392097a9eccc0ad444500e70199
a1331f0f00d8934901c07e5d526ceb87c2d07e2579badd005a2b31a5089391b7
1253358049535a6add8856dd0146c298482e01ede27ed878b256ba7600ee3a09
c18fc1df09fe01084ec25defc1b56db0f1a4f4bd78e0e2818d2f0334e7330300
7df7c888b917e50dd9c1c60c80efcb0cbc63e1f700bce7c31700dccbd1060027
8add9b0de06c8e2f00d84962b7d7030e2a61538331b98051f92631bd253f336a
dd8856a3dd44c25c390efddfad96ae9f853b77c25201ba27c533b8bdf28b6ad0
3d084b33d2e7fa59099e9901b8f2d29597fa0f01848f78e70082117f1ca07b76
6910209b9519f895a008d031bbba05c09d8f06005c5b18b8fba25300cea6780e
c03e911c6ccf06d507b48a4fa606634a114609de929f9934c5a87511ad57cfc1
fa476aa5854fa1ef1e3910b905686e85cc24c40138198915f133d2d6dc2a7dea
7df2ccc2a752faf2cec1d577aebeb37e3b4034eeee0008dff3be0e6b923773b4
7904c0ef9119767cb4fa1500ef1361e08e452500f71561e84cc4ed3e20fab6a2
c905f40cb76a3026bf3319b91ac2e46792a6dcd801ebc6aba5da08f48ecb81c8
bd088d5f42f6417191de93908c803d0e76199292b485af41b60e8d9c3c537f0e
8211f0c7211a077707dc18b931b2ee6d80a4d7ae024491ebc24d4a708ff70680
7f25e807e8785f1878e322d6ddaf453f0770ff2dfa769b01423dbbad72a391b6
5a7c3235985629423372494cab55c8f7d64a8b27a0e7202c55a13b0f8d19c80e
4ae9ca3f015115dc3ca467c17a4c7ee95970ab10e5a54ff0ac3cd39881ee5958
1a84f03df0be0e492fd855a8d6aa35d10b4962dbb0a604a3d3ee5e80a8eee600
a24977f8660378bf0bbf00e01d0a8fb7f980f04b8aa6ce6aca8d5a7533c52753
839152c4e222f4dc512dd5eb90cbc981e8ea12cf90cd8a8bf47d89159e2741d3
7124f65b96fcd254dae258fa84a13c13043246a32129574787e49eae2b49b86d
c3e2e78b9ff7f4002415bb08907c66df0d103b4e0c104db90500ff70700c203a
ee1e82dba4c3e16e256c0acca6ceaae9afd1f612d7eb472157ac95962bd05594
7dd1598466053245088e827f44628657942a825b84e4fb601f84b4025611aca3
901e01bb024911dc0a4445f08e41f83df02b10142173149ab71baf027611ea95
7a257704201d14cd9af4d90b00f194530088cb4e09c0df1c5c0088f7393f6833
c0aa3ac156655de3bca9b34ab9716906ba07aba5e5bba1eb3358d90b9da7c533
64f6888bf47b60f521e8380fe10be03d2feac17900927560df40f4e48f805960
50328d648bf4893f9067c217a0631656b7c898c122847bc07b03a2d3e0ee85e4
33b0ef867450c4fad2ecd26cf7168074c0ba0c904cdac300c9cfec4701924df6
1cdca61e10685c6f7d52d0caba1498972f43d740adb4b2009d7d7220b20e3473
90a943d00ffe959bb6eac3e0fe42ea49ee00c45f06e76329b1dabf127d690d80
5581b408f63c2403e0cc433c00ee658836803b0fd100747c04ab5f917704fd10
d5c1cd41ec801343d207f602a403605d86e5f9e5f9ae0d00e994556833806685
c931fb709b0f08b4e869bea5c827859549e82c544b8d29c816a0390999613920
7e610d5727a16318c2003c1fa24be0de2b32caf92224e7c17e5004b6350c4c01
05601218066b0ad28224e149019c086257ca315102de2712903bde97b8144d82
3b2c6ac52d403c054e019249b087f53d0558995a99ea946c70cc927458b3c1ff
550f30050df988d4284376b4566a8e416654cc921985e037e0df0fc131f00f4b
acf0c6211c036f14a239703741740adc7da227edd7e56b833d0ae92549b4d357
25dfb49ed2ff63908e6adf27d6d0dda7638d4154d2778daca17f58e61297c129
41f233b01f5dc3740cac51688c35c6b22580f48224fee9b83502569a66b629f1
09f3713473413e2666e7fe6f6c6efefdfafda1f56f6e06f93496d9d67cb7366a
9964b6f92e64b689196ec6c604646fd3fe4771ff1bf03f65d8ecc3addbb5f300
00000049454e44ae426082
"""),
}
def read_pam_header(infile):
"""
Read (the rest of a) PAM header. `infile` should be positioned
immediately after the initial 'P7' line (at the beginning of the
second line). Returns are as for `read_pnm_header`.
"""
# Unlike PBM, PGM, and PPM, we can read the header a line at a time.
header = dict()
while True:
l = infile.readline().strip()
if l == strtobytes('ENDHDR'):
break
if not l:
raise EOFError('PAM ended prematurely')
if l[0] == strtobytes('#'):
continue
l = l.split(None, 1)
if l[0] not in header:
header[l[0]] = l[1]
else:
header[l[0]] += strtobytes(' ') + l[1]
required = ['WIDTH', 'HEIGHT', 'DEPTH', 'MAXVAL']
required = [strtobytes(x) for x in required]
WIDTH,HEIGHT,DEPTH,MAXVAL = required
present = [x for x in required if x in header]
if len(present) != len(required):
raise Error('PAM file must specify WIDTH, HEIGHT, DEPTH, and MAXVAL')
width = int(header[WIDTH])
height = int(header[HEIGHT])
depth = int(header[DEPTH])
maxval = int(header[MAXVAL])
if (width <= 0 or
height <= 0 or
depth <= 0 or
maxval <= 0):
raise Error(
'WIDTH, HEIGHT, DEPTH, MAXVAL must all be positive integers')
return 'P7', width, height, depth, maxval
def read_pnm_header(infile, supported=('P5','P6')):
"""
Read a PNM header, returning (format,width,height,depth,maxval).
`width` and `height` are in pixels. `depth` is the number of
channels in the image; for PBM and PGM it is synthesized as 1, for
PPM as 3; for PAM images it is read from the header. `maxval` is
synthesized (as 1) for PBM images.
"""
# Generally, see http://netpbm.sourceforge.net/doc/ppm.html
# and http://netpbm.sourceforge.net/doc/pam.html
supported = [strtobytes(x) for x in supported]
# Technically 'P7' must be followed by a newline, so by using
# rstrip() we are being liberal in what we accept. I think this
# is acceptable.
type = infile.read(3).rstrip()
if type not in supported:
raise NotImplementedError('file format %s not supported' % type)
if type == strtobytes('P7'):
# PAM header parsing is completely different.
return read_pam_header(infile)
# Expected number of tokens in header (3 for P4, 4 for P6)
expected = 4
pbm = ('P1', 'P4')
if type in pbm:
expected = 3
header = [type]
# We have to read the rest of the header byte by byte because the
# final whitespace character (immediately following the MAXVAL in
# the case of P6) may not be a newline. Of course all PNM files in
# the wild use a newline at this point, so it's tempting to use
# readline; but it would be wrong.
def getc():
c = infile.read(1)
if not c:
raise Error('premature EOF reading PNM header')
return c
c = getc()
while True:
# Skip whitespace that precedes a token.
while c.isspace():
c = getc()
# Skip comments.
while c == '#':
while c not in '\n\r':
c = getc()
if not c.isdigit():
raise Error('unexpected character %s found in header' % c)
# According to the specification it is legal to have comments
# that appear in the middle of a token.
# This is bonkers; I've never seen it; and it's a bit awkward to
# code good lexers in Python (no goto). So we break on such
# cases.
token = strtobytes('')
while c.isdigit():
token += c
c = getc()
# Slight hack. All "tokens" are decimal integers, so convert
# them here.
header.append(int(token))
if len(header) == expected:
break
# Skip comments (again)
while c == '#':
while c not in '\n\r':
c = getc()
if not c.isspace():
raise Error('expected header to end with whitespace, not %s' % c)
if type in pbm:
# synthesize a MAXVAL
header.append(1)
depth = (1,3)[type == strtobytes('P6')]
return header[0], header[1], header[2], depth, header[3]
def write_pnm(file, width, height, pixels, meta):
"""Write a Netpbm PNM/PAM file."""
bitdepth = meta['bitdepth']
maxval = 2**bitdepth - 1
# Rudely, the number of image planes can be used to determine
# whether we are L (PGM), LA (PAM), RGB (PPM), or RGBA (PAM).
planes = meta['planes']
# Can be an assert as long as we assume that pixels and meta came
# from a PNG file.
assert planes in (1,2,3,4)
if planes in (1,3):
if 1 == planes:
# PGM
# Could generate PBM if maxval is 1, but we don't (for one
# thing, we'd have to convert the data, not just blat it
# out).
fmt = 'P5'
else:
# PPM
fmt = 'P6'
file.write('%s %d %d %d\n' % (fmt, width, height, maxval))
if planes in (2,4):
# PAM
# See http://netpbm.sourceforge.net/doc/pam.html
if 2 == planes:
tupltype = 'GRAYSCALE_ALPHA'
else:
tupltype = 'RGB_ALPHA'
file.write('P7\nWIDTH %d\nHEIGHT %d\nDEPTH %d\nMAXVAL %d\n'
'TUPLTYPE %s\nENDHDR\n' %
(width, height, planes, maxval, tupltype))
# Values per row
vpr = planes * width
# struct format
fmt = '>%d' % vpr
if maxval > 0xff:
fmt = fmt + 'H'
else:
fmt = fmt + 'B'
for row in pixels:
file.write(struct.pack(fmt, *row))
file.flush()
def color_triple(color):
"""
Convert a command line colour value to a RGB triple of integers.
FIXME: Somewhere we need support for greyscale backgrounds etc.
"""
if color.startswith('#') and len(color) == 4:
return (int(color[1], 16),
int(color[2], 16),
int(color[3], 16))
if color.startswith('#') and len(color) == 7:
return (int(color[1:3], 16),
int(color[3:5], 16),
int(color[5:7], 16))
elif color.startswith('#') and len(color) == 13:
return (int(color[1:5], 16),
int(color[5:9], 16),
int(color[9:13], 16))
def _add_common_options(parser):
"""Call *parser.add_option* for each of the options that are
common between this PNG--PNM conversion tool and the gen
tool.
"""
parser.add_option("-i", "--interlace",
default=False, action="store_true",
help="create an interlaced PNG file (Adam7)")
parser.add_option("-t", "--transparent",
action="store", type="string", metavar="#RRGGBB",
help="mark the specified colour as transparent")
parser.add_option("-b", "--background",
action="store", type="string", metavar="#RRGGBB",
help="save the specified background colour")
parser.add_option("-g", "--gamma",
action="store", type="float", metavar="value",
help="save the specified gamma value")
parser.add_option("-c", "--compression",
action="store", type="int", metavar="level",
help="zlib compression level (0-9)")
return parser
def _main(argv):
"""
Run the PNG encoder with options from the command line.
"""
# Parse command line arguments
from optparse import OptionParser
import re
version = '%prog ' + re.sub(r'( ?\$|URL: |Rev:)', '', __version__)
parser = OptionParser(version=version)
parser.set_usage("%prog [options] [imagefile]")
parser.add_option('-r', '--read-png', default=False,
action='store_true',
help='Read PNG, write PNM')
parser.add_option("-a", "--alpha",
action="store", type="string", metavar="pgmfile",
help="alpha channel transparency (RGBA)")
_add_common_options(parser)
(options, args) = parser.parse_args(args=argv[1:])
# Convert options
if options.transparent is not None:
options.transparent = color_triple(options.transparent)
if options.background is not None:
options.background = color_triple(options.background)
# Prepare input and output files
if len(args) == 0:
infilename = '-'
infile = sys.stdin
elif len(args) == 1:
infilename = args[0]
infile = open(infilename, 'rb')
else:
parser.error("more than one input file")
outfile = sys.stdout
if sys.platform == "win32":
import msvcrt, os
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
if options.read_png:
# Encode PNG to PPM
png = Reader(file=infile)
width,height,pixels,meta = png.asDirect()
write_pnm(outfile, width, height, pixels, meta)
else:
# Encode PNM to PNG
format, width, height, depth, maxval = \
read_pnm_header(infile, ('P5','P6','P7'))
# When it comes to the variety of input formats, we do something
# rather rude. Observe that L, LA, RGB, RGBA are the 4 colour
# types supported by PNG and that they correspond to 1, 2, 3, 4
# channels respectively. So we use the number of channels in
# the source image to determine which one we have. We do not
# care about TUPLTYPE.
greyscale = depth <= 2
pamalpha = depth in (2,4)
supported = map(lambda x: 2**x-1, range(1,17))
try:
mi = supported.index(maxval)
except ValueError:
raise NotImplementedError(
'your maxval (%s) not in supported list %s' %
(maxval, str(supported)))
bitdepth = mi+1
writer = Writer(width, height,
greyscale=greyscale,
bitdepth=bitdepth,
interlace=options.interlace,
transparent=options.transparent,
background=options.background,
alpha=bool(pamalpha or options.alpha),
gamma=options.gamma,
compression=options.compression)
if options.alpha:
pgmfile = open(options.alpha, 'rb')
format, awidth, aheight, adepth, amaxval = \
read_pnm_header(pgmfile, 'P5')
if amaxval != '255':
raise NotImplementedError(
'maxval %s not supported for alpha channel' % amaxval)
if (awidth, aheight) != (width, height):
raise ValueError("alpha channel image size mismatch"
" (%s has %sx%s but %s has %sx%s)"
% (infilename, width, height,
options.alpha, awidth, aheight))
writer.convert_ppm_and_pgm(infile, pgmfile, outfile)
else:
writer.convert_pnm(infile, outfile)
if __name__ == '__main__':
try:
_main(sys.argv)
except Error, e:
print >>sys.stderr, e
|
bsd-3-clause
|
celibertojr/Kbsim
|
lib/pgu/gui/deprecated.py
|
26
|
2375
|
import pygame
from .const import *
from . import table
from . import group
from . import button, basic
from . import pguglobals
def action_open(value):
print('gui.action_open',"Scheduled to be deprecated.")
value.setdefault('x',None)
value.setdefault('y',None)
value['container'].open(value['window'],value['x'],value['y'])
def action_setvalue(value):
print('gui.action_setvalue',"Scheduled to be deprecated.")
a,b = value
b.value = a.value
def action_quit(value):
print('gui.action_quit',"Scheduled to be deprecated.")
value.quit()
def action_exec(value):
print('gui.action_exec',"Scheduled to be deprecated.")
exec(value['script'],globals(),value['dict'])
class Toolbox(table.Table):
def __setattr__(self,k,v):
_v = self.__dict__.get(k,NOATTR)
self.__dict__[k]=v
if k == 'value' and _v != NOATTR and _v != v:
self.group.value = v
for w in self.group.widgets:
if w.value != v: w.pcls = ""
else: w.pcls = "down"
self.repaint()
def _change(self,value):
self.value = self.group.value
self.send(CHANGE)
def __init__(self,data,cols=0,rows=0,tool_cls='tool',value=None,**params):
print('gui.Toolbox','Scheduled to be deprecated.')
params.setdefault('cls','toolbox')
table.Table.__init__(self,**params)
if cols == 0 and rows == 0: cols = len(data)
if cols != 0 and rows != 0: rows = 0
self.tools = {}
_value = value
g = group.Group()
self.group = g
g.connect(CHANGE,self._change,None)
self.group.value = _value
x,y,p,s = 0,0,None,1
for ico,value in data:
#from __init__ import theme
img = pguglobals.app.theme.get(tool_cls+"."+ico,"","image")
if img:
i = basic.Image(img)
else: i = basic.Label(ico,cls=tool_cls+".label")
p = button.Tool(g,i,value,cls=tool_cls)
self.tools[ico] = p
#p.style.hexpand = 1
#p.style.vexpand = 1
self.add(p,x,y)
s = 0
if cols != 0: x += 1
if cols != 0 and x == cols: x,y = 0,y+1
if rows != 0: y += 1
if rows != 0 and y == rows: x,y = x+1,0
|
gpl-3.0
|
peterwilletts24/Monsoon-Python-Scripts
|
rain/rain_mean_by_day.py
|
1
|
3074
|
"""
Load multiple pp diagnostic files, aggregate by year, day etc, calcualte mean, sum etc and save
"""
import os, sys
import datetime
import iris
import iris.unit as unit
from iris.coord_categorisation import add_categorised_coord
import pdb
diag = 'avg.5216'
cube_name_explicit='stratiform_rainfall_rate'
cube_name_param='convective_rainfall_rate'
pp_file_path='/projects/cascade/pwille/moose_retrievals/'
experiment_ids = ['djznw', 'djzny', 'djznq', 'djzns', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu', 'dklzq'] # All minus large 3
#experiment_ids = ['djznw', 'djzny', 'djznq', 'dkjxq', 'dkmbq', 'dklzq']
def add_hour_of_day(cube, coord, name='hour'):
add_categorised_coord(cube, name, coord,
lambda coord, x: coord.units.num2date(x).hour)
#def add_day_of_year(cube, coord, name='day_of_year'):
# add_categorised_coord(cube, name, coord,
# lambda coord, x: coord.units.num2date(x).day_of_year)
pdb.set_trace()
dtmindt = datetime.datetime(2011,8,19,0,0,0)
dtmaxdt = datetime.datetime(2011,9,7,23,0,0)
dtmin = unit.date2num(dtmindt, 'hours since 1970-01-01 00:00:00', unit.CALENDAR_STANDARD)
dtmax = unit.date2num(dtmaxdt, 'hours since 1970-01-01 00:00:00', unit.CALENDAR_STANDARD)
time_constraint = iris.Constraint(time= lambda t: dtmin <= t.point <= dtmax)
fg = '%sdjzn/djznw/%s.pp' % (pp_file_path, diag)
glob_load = iris.load_cube(fg, ('%s' % cube_name_param) & time_constraint)
## Get time points from global LAM to use as time constraint when loading other runs
time_list = glob_load.coord('time').points
glob_tc = iris.Constraint(time=time_list)
del glob_load
for experiment_id in experiment_ids:
expmin1 = experiment_id[:-1]
fu = '/projects/cascade/pwille/moose_retrievals/%s/%s/avg.5216.pp' % (expmin1, experiment_id)
print experiment_id
sys.stdout.flush()
try:
#cube_names = ['%s' % cube_name_param, '%s' % cube_name_explicit]
cubeconv = iris.load_cube(fu,'%s' % cube_name_param & glob_tc)
cubestrat = iris.load_cube(fu,'%s' % cube_name_explicit & glob_tc)
cube=cubeconv+cubestrat
cube.rename('total_precipitation_rate')
except iris.exceptions.ConstraintMismatchError:
cube = iris.load_cube(fu, ('%s' % cube_name_explicit) & glob_tc)
time_coords = cube.coord('time')
#add_hour_of_day(cube, time_coords)
iris.coord_categorisation.add_day_of_year(cube, time_coords, name='day_of_year')
#hours=[dt.hour for dt in time_dt.astype(object)]
# Rainfall output with Stuart scripts as hourly mean. time_interval is time_interval as fraction of hour
# t = rain.coord('time').points
# time_interval = (t.flatten()[1]-t.flatten()[0])
# iris.analysis.maths.multiply(rain,60*(60*time_interval),in_place=True)
#rain_total = rain.collapsed('time', iris.analysis.SUM)
#for t, time_cube in enumerate (cube.slices(['time', 'grid_latitude', 'grid_longitude'])):
rain_mean = cube.aggregated_by('day_of_year', iris.analysis.MEAN)
iris.save((rain_mean),'/projects/cascade/pwille/moose_retrievals/%s/%s/rain_mean_by_day.pp' % (expmin1, experiment_id))
|
mit
|
bartoszj/Mallet
|
mallet/UIKit/UIDeviceRGBColor.py
|
1
|
4351
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2015 Bartosz Janda
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from .. import helpers
from ..common import SummaryBase
import UIColor
class UIDeviceRGBColorSyntheticProvider(UIColor.UIColorSyntheticProvider):
"""
Class representing UIDeviceRGBColor.
"""
def __init__(self, value_obj, internal_dict):
super(UIDeviceRGBColorSyntheticProvider, self).__init__(value_obj, internal_dict)
self.type_name = "UIDeviceRGBColor"
self.register_child_value("red_component", ivar_name="redComponent",
primitive_value_function=SummaryBase.get_float_value,
summary_function=self.get_red_component_summary)
self.register_child_value("green_component", ivar_name="greenComponent",
primitive_value_function=SummaryBase.get_float_value,
summary_function=self.get_green_component_summary)
self.register_child_value("blue_component", ivar_name="blueComponent",
primitive_value_function=SummaryBase.get_float_value,
summary_function=self.get_blue_component_summary)
self.register_child_value("alpha_component", ivar_name="alphaComponent",
primitive_value_function=SummaryBase.get_float_value,
summary_function=self.get_alpha_component_summary)
self.synthetic_children = ["red_component", "green_component", "blue_component", "alpha_component", "system_color_name"]
@staticmethod
def get_red_component_summary(value):
return "red={}".format(SummaryBase.formatted_float(value))
@staticmethod
def get_green_component_summary(value):
return "green={}".format(SummaryBase.formatted_float(value))
@staticmethod
def get_blue_component_summary(value):
return "blue={}".format(SummaryBase.formatted_float(value))
@staticmethod
def get_alpha_component_summary(value):
if value == 1:
return None
return "alpha={}".format(SummaryBase.formatted_float(value))
def get_rgb_summary(self):
r = self.red_component_value
g = self.green_component_value
b = self.blue_component_value
a = self.alpha_component_value
if r is None or g is None or b is None:
return None
r_value = int(round(r * 255))
g_value = int(round(g * 255))
b_value = int(round(b * 255))
a_value = int(round(a * 255)) if a is not None else 255
if a_value == 255:
return "rgb=#{:02X}{:02X}{:02X}".format(r_value, g_value, b_value)
return "rgba=#{:02X}{:02X}{:02X}{:02X}".format(r_value, g_value, b_value, a_value)
def summaries_parts(self):
return [self.get_rgb_summary(),
self.red_component_summary,
self.green_component_summary,
self.blue_component_summary,
self.alpha_component_summary,
self.system_color_name_summary]
def summary_provider(value_obj, internal_dict):
return helpers.generic_summary_provider(value_obj, internal_dict, UIDeviceRGBColorSyntheticProvider)
|
mit
|
red-hood/calendarserver
|
twistedcaldav/test/test_caldavxml.py
|
1
|
6897
|
##
# Copyright (c) 2011-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
import twistedcaldav.test.util
from twistedcaldav import caldavxml
from twistedcaldav.caldavxml import CalendarData
from twistedcaldav.ical import normalize_iCalStr, Component
def normalizeJSON(j):
return "".join(map(str.strip, j.splitlines())).replace(", ", ",").replace(": ", ":")
class CustomXML (twistedcaldav.test.util.TestCase):
def test_TimeRange(self):
self.assertRaises(ValueError, caldavxml.CalDAVTimeRangeElement)
tr = caldavxml.CalDAVTimeRangeElement(start="20110201T120000Z")
self.assertTrue(tr.valid())
tr = caldavxml.CalDAVTimeRangeElement(start="20110201T120000")
self.assertFalse(tr.valid())
tr = caldavxml.CalDAVTimeRangeElement(start="20110201")
self.assertFalse(tr.valid())
tr = caldavxml.CalDAVTimeRangeElement(end="20110201T120000Z")
self.assertTrue(tr.valid())
tr = caldavxml.CalDAVTimeRangeElement(end="20110201T120000")
self.assertFalse(tr.valid())
tr = caldavxml.CalDAVTimeRangeElement(end="20110201")
self.assertFalse(tr.valid())
tr = caldavxml.CalDAVTimeRangeElement(start="20110201T120000Z", end="20110202T120000Z")
self.assertTrue(tr.valid())
tr = caldavxml.CalDAVTimeRangeElement(start="20110201T120000Z", end="20110202T120000")
self.assertFalse(tr.valid())
tr = caldavxml.CalDAVTimeRangeElement(start="20110201T120000Z", end="20110202")
self.assertFalse(tr.valid())
def test_CalendarDataTextAndJSON(self):
"""
Text that we can both parse and generate CalendarData elements with both text and json formats.
"""
dataText = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
DTSTAMP:20080601T120000Z
EXDATE:20080602T120000Z
EXDATE:20080603T120000Z
ORGANIZER;CN=User 01:mailto:[email protected]
RRULE:FREQ=DAILY
SUMMARY:Test
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
dataXML = """<?xml version='1.0' encoding='UTF-8'?>
<calendar-data xmlns='urn:ietf:params:xml:ns:caldav'><![CDATA[%s]]></calendar-data>""" % (dataText,)
jsonText = """[
"vcalendar",
[
["version", {}, "text", "2.0"],
["prodid", {}, "text", "-//CALENDARSERVER.ORG//NONSGML Version 1//EN"]
],
[
["vevent",
[
["uid", {}, "text", "12345-67890"],
["dtstart", {}, "date-time", "2008-06-01T12:00:00Z"],
["dtend", {}, "date-time", "2008-06-01T13:00:00Z"],
["attendee", {}, "cal-address", "mailto:[email protected]"],
["attendee", {}, "cal-address", "mailto:[email protected]"],
["dtstamp", {}, "date-time", "2008-06-01T12:00:00Z"],
["exdate", {}, "date-time", "2008-06-02T12:00:00Z"],
["exdate", {}, "date-time", "2008-06-03T12:00:00Z"],
["organizer", {"cn": "User 01"}, "cal-address", "mailto:[email protected]"],
["rrule", {}, "recur", {"freq": "DAILY"}],
["summary", {}, "text", "Test"]
],
[
]
]
]
]
"""
jsonXML = """<?xml version='1.0' encoding='UTF-8'?>
<calendar-data content-type='application/calendar+json' xmlns='urn:ietf:params:xml:ns:caldav'><![CDATA[%s]]></calendar-data>""" % (jsonText,)
cd = CalendarData.fromTextData(dataText)
self.assertEqual(normalize_iCalStr(cd.calendar().getTextWithTimezones(True, format="text/calendar")), normalize_iCalStr(dataText))
self.assertEqual(normalizeJSON(cd.calendar().getTextWithTimezones(True, format="application/calendar+json")), normalizeJSON(jsonText))
self.assertEqual(cd.content_type, "text/calendar")
self.assertEqual(cd.toxml(), dataXML)
comp = Component.fromString(dataText)
cd = CalendarData.fromCalendar(comp)
self.assertEqual(normalize_iCalStr(cd.calendar().getTextWithTimezones(True, format="text/calendar")), normalize_iCalStr(dataText))
self.assertEqual(normalizeJSON(cd.calendar().getTextWithTimezones(True, format="application/calendar+json")), normalizeJSON(jsonText))
self.assertEqual(cd.content_type, "text/calendar")
self.assertEqual(cd.toxml(), dataXML)
cd = CalendarData.fromCalendar(comp, format="application/calendar+json")
self.assertEqual(normalize_iCalStr(cd.calendar().getTextWithTimezones(True, format="text/calendar")), normalize_iCalStr(dataText))
self.assertEqual(normalizeJSON(cd.calendar().getTextWithTimezones(True, format="application/calendar+json")), normalizeJSON(jsonText))
self.assertEqual(cd.content_type, "application/calendar+json")
self.assertEqual(normalizeJSON(cd.toxml()), normalizeJSON(jsonXML))
cd = CalendarData.fromTextData(jsonText, format="application/calendar+json")
self.assertEqual(normalize_iCalStr(cd.calendar().getTextWithTimezones(True, format="text/calendar")), normalize_iCalStr(dataText))
self.assertEqual(normalizeJSON(cd.calendar().getTextWithTimezones(True, format="application/calendar+json")), normalizeJSON(jsonText))
self.assertEqual(cd.content_type, "application/calendar+json")
self.assertEqual(cd.toxml(), jsonXML)
comp = Component.fromString(jsonText, format="application/calendar+json")
cd = CalendarData.fromCalendar(comp)
self.assertEqual(normalize_iCalStr(cd.calendar().getTextWithTimezones(True, format="text/calendar")), normalize_iCalStr(dataText))
self.assertEqual(normalizeJSON(cd.calendar().getTextWithTimezones(True, format="application/calendar+json")), normalizeJSON(jsonText))
self.assertEqual(cd.content_type, "text/calendar")
self.assertEqual(cd.toxml(), dataXML)
cd = CalendarData.fromCalendar(comp, format="application/calendar+json")
self.assertEqual(normalize_iCalStr(cd.calendar().getTextWithTimezones(True, format="text/calendar")), normalize_iCalStr(dataText))
self.assertEqual(normalizeJSON(cd.calendar().getTextWithTimezones(True, format="application/calendar+json")), normalizeJSON(jsonText))
self.assertEqual(cd.content_type, "application/calendar+json")
self.assertEqual(normalizeJSON(cd.toxml()), normalizeJSON(jsonXML))
|
apache-2.0
|
pawciobiel/fgpst-gae
|
tests/utils/test_memc.py
|
1
|
1311
|
import base64
import datetime
import json
import sys
import unittest
from webtest import TestApp
from google.appengine.api import memcache
from google.appengine.ext import testbed
from fgpst.utils import memc
class TestMemc(unittest.TestCase):
def setUp(self):
# First, create an instance of the Testbed class.
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_memcache_stub()
def tearDown(self):
self.testbed.deactivate()
def test_simple(self):
val = set()
val.add('abcd')
self.assertTrue(memcache.set('a', val))
expected = val
actual = memcache.get('a')
self.assertEqual(expected, actual)
g = 'group'
k = 'key'
v = 'val'
expected = True
actual = memc.add_to_group(g, k, v)
self.assertEqual(expected, actual)
expected = {'key',}
actual = memcache.get(g)
self.assertEqual(expected, actual)
ik = memc.build_item_key_for_group(g, k)
self.assertEqual('group:key', ik)
expected = v
actual = memcache.get(ik)
self.assertEqual(expected, actual)
expected = {'key': 'val'}
actual = memc.get_all_from_group(g)
self.assertEqual(expected, actual)
|
gpl-3.0
|
hobbyjobs/photivo
|
scons-local-2.2.0/SCons/Action.py
|
14
|
47618
|
"""SCons.Action
This encapsulates information about executing any sort of action that
can build one or more target Nodes (typically files) from one or more
source Nodes (also typically files) given a specific Environment.
The base class here is ActionBase. The base class supplies just a few
OO utility methods and some generic methods for displaying information
about an Action in response to the various commands that control printing.
A second-level base class is _ActionAction. This extends ActionBase
by providing the methods that can be used to show and perform an
action. True Action objects will subclass _ActionAction; Action
factory class objects will subclass ActionBase.
The heavy lifting is handled by subclasses for the different types of
actions we might execute:
CommandAction
CommandGeneratorAction
FunctionAction
ListAction
The subclasses supply the following public interface methods used by
other modules:
__call__()
THE public interface, "calling" an Action object executes the
command or Python function. This also takes care of printing
a pre-substitution command for debugging purposes.
get_contents()
Fetches the "contents" of an Action for signature calculation
plus the varlist. This is what gets MD5 checksummed to decide
if a target needs to be rebuilt because its action changed.
genstring()
Returns a string representation of the Action *without*
command substitution, but allows a CommandGeneratorAction to
generate the right action based on the specified target,
source and env. This is used by the Signature subsystem
(through the Executor) to obtain an (imprecise) representation
of the Action operation for informative purposes.
Subclasses also supply the following methods for internal use within
this module:
__str__()
Returns a string approximation of the Action; no variable
substitution is performed.
execute()
The internal method that really, truly, actually handles the
execution of a command or Python function. This is used so
that the __call__() methods can take care of displaying any
pre-substitution representations, and *then* execute an action
without worrying about the specific Actions involved.
get_presig()
Fetches the "contents" of a subclass for signature calculation.
The varlist is added to this to produce the Action's contents.
strfunction()
Returns a substituted string representation of the Action.
This is used by the _ActionAction.show() command to display the
command/function that will be executed to generate the target(s).
There is a related independent ActionCaller class that looks like a
regular Action, and which serves as a wrapper for arbitrary functions
that we want to let the user specify the arguments to now, but actually
execute later (when an out-of-date check determines that it's needed to
be executed, for example). Objects of this class are returned by an
ActionFactory class that provides a __call__() method as a convenient
way for wrapping up the functions.
"""
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Action.py issue-2856:2676:d23b7a2f45e8 2012/08/05 15:38:28 garyo"
import SCons.compat
import dis
import os
# compat layer imports "cPickle" for us if it's available.
import pickle
import re
import sys
import subprocess
from SCons.Debug import logInstanceCreation
import SCons.Errors
import SCons.Executor
import SCons.Util
import SCons.Subst
# we use these a lot, so try to optimize them
is_String = SCons.Util.is_String
is_List = SCons.Util.is_List
class _null(object):
pass
print_actions = 1
execute_actions = 1
print_actions_presub = 0
def rfile(n):
try:
return n.rfile()
except AttributeError:
return n
def default_exitstatfunc(s):
return s
try:
SET_LINENO = dis.SET_LINENO
HAVE_ARGUMENT = dis.HAVE_ARGUMENT
except AttributeError:
remove_set_lineno_codes = lambda x: x
else:
def remove_set_lineno_codes(code):
result = []
n = len(code)
i = 0
while i < n:
c = code[i]
op = ord(c)
if op >= HAVE_ARGUMENT:
if op != SET_LINENO:
result.append(code[i:i+3])
i = i+3
else:
result.append(c)
i = i+1
return ''.join(result)
strip_quotes = re.compile('^[\'"](.*)[\'"]$')
def _callable_contents(obj):
"""Return the signature contents of a callable Python object.
"""
try:
# Test if obj is a method.
return _function_contents(obj.im_func)
except AttributeError:
try:
# Test if obj is a callable object.
return _function_contents(obj.__call__.im_func)
except AttributeError:
try:
# Test if obj is a code object.
return _code_contents(obj)
except AttributeError:
# Test if obj is a function object.
return _function_contents(obj)
def _object_contents(obj):
"""Return the signature contents of any Python object.
We have to handle the case where object contains a code object
since it can be pickled directly.
"""
try:
# Test if obj is a method.
return _function_contents(obj.im_func)
except AttributeError:
try:
# Test if obj is a callable object.
return _function_contents(obj.__call__.im_func)
except AttributeError:
try:
# Test if obj is a code object.
return _code_contents(obj)
except AttributeError:
try:
# Test if obj is a function object.
return _function_contents(obj)
except AttributeError:
# Should be a pickable Python object.
try:
return pickle.dumps(obj)
except (pickle.PicklingError, TypeError):
# This is weird, but it seems that nested classes
# are unpickable. The Python docs say it should
# always be a PicklingError, but some Python
# versions seem to return TypeError. Just do
# the best we can.
return str(obj)
def _code_contents(code):
"""Return the signature contents of a code object.
By providing direct access to the code object of the
function, Python makes this extremely easy. Hooray!
Unfortunately, older versions of Python include line
number indications in the compiled byte code. Boo!
So we remove the line number byte codes to prevent
recompilations from moving a Python function.
"""
contents = []
# The code contents depends on the number of local variables
# but not their actual names.
contents.append("%s,%s" % (code.co_argcount, len(code.co_varnames)))
try:
contents.append(",%s,%s" % (len(code.co_cellvars), len(code.co_freevars)))
except AttributeError:
# Older versions of Python do not support closures.
contents.append(",0,0")
# The code contents depends on any constants accessed by the
# function. Note that we have to call _object_contents on each
# constants because the code object of nested functions can
# show-up among the constants.
#
# Note that we also always ignore the first entry of co_consts
# which contains the function doc string. We assume that the
# function does not access its doc string.
contents.append(',(' + ','.join(map(_object_contents,code.co_consts[1:])) + ')')
# The code contents depends on the variable names used to
# accessed global variable, as changing the variable name changes
# the variable actually accessed and therefore changes the
# function result.
contents.append(',(' + ','.join(map(_object_contents,code.co_names)) + ')')
# The code contents depends on its actual code!!!
contents.append(',(' + str(remove_set_lineno_codes(code.co_code)) + ')')
return ''.join(contents)
def _function_contents(func):
"""Return the signature contents of a function."""
contents = [_code_contents(func.func_code)]
# The function contents depends on the value of defaults arguments
if func.func_defaults:
contents.append(',(' + ','.join(map(_object_contents,func.func_defaults)) + ')')
else:
contents.append(',()')
# The function contents depends on the closure captured cell values.
try:
closure = func.func_closure or []
except AttributeError:
# Older versions of Python do not support closures.
closure = []
#xxx = [_object_contents(x.cell_contents) for x in closure]
try:
xxx = [_object_contents(x.cell_contents) for x in closure]
except AttributeError:
xxx = []
contents.append(',(' + ','.join(xxx) + ')')
return ''.join(contents)
def _actionAppend(act1, act2):
# This function knows how to slap two actions together.
# Mainly, it handles ListActions by concatenating into
# a single ListAction.
a1 = Action(act1)
a2 = Action(act2)
if a1 is None:
return a2
if a2 is None:
return a1
if isinstance(a1, ListAction):
if isinstance(a2, ListAction):
return ListAction(a1.list + a2.list)
else:
return ListAction(a1.list + [ a2 ])
else:
if isinstance(a2, ListAction):
return ListAction([ a1 ] + a2.list)
else:
return ListAction([ a1, a2 ])
def _do_create_keywords(args, kw):
"""This converts any arguments after the action argument into
their equivalent keywords and adds them to the kw argument.
"""
v = kw.get('varlist', ())
# prevent varlist="FOO" from being interpreted as ['F', 'O', 'O']
if is_String(v): v = (v,)
kw['varlist'] = tuple(v)
if args:
# turn positional args into equivalent keywords
cmdstrfunc = args[0]
if cmdstrfunc is None or is_String(cmdstrfunc):
kw['cmdstr'] = cmdstrfunc
elif callable(cmdstrfunc):
kw['strfunction'] = cmdstrfunc
else:
raise SCons.Errors.UserError(
'Invalid command display variable type. '
'You must either pass a string or a callback which '
'accepts (target, source, env) as parameters.')
if len(args) > 1:
kw['varlist'] = args[1:] + kw['varlist']
if kw.get('strfunction', _null) is not _null \
and kw.get('cmdstr', _null) is not _null:
raise SCons.Errors.UserError(
'Cannot have both strfunction and cmdstr args to Action()')
def _do_create_action(act, kw):
"""This is the actual "implementation" for the
Action factory method, below. This handles the
fact that passing lists to Action() itself has
different semantics than passing lists as elements
of lists.
The former will create a ListAction, the latter
will create a CommandAction by converting the inner
list elements to strings."""
if isinstance(act, ActionBase):
return act
if is_List(act):
return CommandAction(act, **kw)
if callable(act):
try:
gen = kw['generator']
del kw['generator']
except KeyError:
gen = 0
if gen:
action_type = CommandGeneratorAction
else:
action_type = FunctionAction
return action_type(act, kw)
if is_String(act):
var=SCons.Util.get_environment_var(act)
if var:
# This looks like a string that is purely an Environment
# variable reference, like "$FOO" or "${FOO}". We do
# something special here...we lazily evaluate the contents
# of that Environment variable, so a user could put something
# like a function or a CommandGenerator in that variable
# instead of a string.
return LazyAction(var, kw)
commands = str(act).split('\n')
if len(commands) == 1:
return CommandAction(commands[0], **kw)
# The list of string commands may include a LazyAction, so we
# reprocess them via _do_create_list_action.
return _do_create_list_action(commands, kw)
# Catch a common error case with a nice message:
if isinstance(act, int) or isinstance(act, float):
raise TypeError("Don't know how to create an Action from a number (%s)"%act)
# Else fail silently (???)
return None
def _do_create_list_action(act, kw):
"""A factory for list actions. Convert the input list into Actions
and then wrap them in a ListAction."""
acts = []
for a in act:
aa = _do_create_action(a, kw)
if aa is not None: acts.append(aa)
if not acts:
return ListAction([])
elif len(acts) == 1:
return acts[0]
else:
return ListAction(acts)
def Action(act, *args, **kw):
"""A factory for action objects."""
# Really simple: the _do_create_* routines do the heavy lifting.
_do_create_keywords(args, kw)
if is_List(act):
return _do_create_list_action(act, kw)
return _do_create_action(act, kw)
class ActionBase(object):
"""Base class for all types of action objects that can be held by
other objects (Builders, Executors, etc.) This provides the
common methods for manipulating and combining those actions."""
def __cmp__(self, other):
return cmp(self.__dict__, other)
def no_batch_key(self, env, target, source):
return None
batch_key = no_batch_key
def genstring(self, target, source, env):
return str(self)
def get_contents(self, target, source, env):
result = [ self.get_presig(target, source, env) ]
# This should never happen, as the Action() factory should wrap
# the varlist, but just in case an action is created directly,
# we duplicate this check here.
vl = self.get_varlist(target, source, env)
if is_String(vl): vl = (vl,)
for v in vl:
result.append(env.subst('${'+v+'}'))
return ''.join(result)
def __add__(self, other):
return _actionAppend(self, other)
def __radd__(self, other):
return _actionAppend(other, self)
def presub_lines(self, env):
# CommandGeneratorAction needs a real environment
# in order to return the proper string here, since
# it may call LazyAction, which looks up a key
# in that env. So we temporarily remember the env here,
# and CommandGeneratorAction will use this env
# when it calls its _generate method.
self.presub_env = env
lines = str(self).split('\n')
self.presub_env = None # don't need this any more
return lines
def get_varlist(self, target, source, env, executor=None):
return self.varlist
def get_targets(self, env, executor):
"""
Returns the type of targets ($TARGETS, $CHANGED_TARGETS) used
by this action.
"""
return self.targets
class _ActionAction(ActionBase):
"""Base class for actions that create output objects."""
def __init__(self, cmdstr=_null, strfunction=_null, varlist=(),
presub=_null, chdir=None, exitstatfunc=None,
batch_key=None, targets='$TARGETS',
**kw):
self.cmdstr = cmdstr
if strfunction is not _null:
if strfunction is None:
self.cmdstr = None
else:
self.strfunction = strfunction
self.varlist = varlist
self.presub = presub
self.chdir = chdir
if not exitstatfunc:
exitstatfunc = default_exitstatfunc
self.exitstatfunc = exitstatfunc
self.targets = targets
if batch_key:
if not callable(batch_key):
# They have set batch_key, but not to their own
# callable. The default behavior here will batch
# *all* targets+sources using this action, separated
# for each construction environment.
def default_batch_key(self, env, target, source):
return (id(self), id(env))
batch_key = default_batch_key
SCons.Util.AddMethod(self, batch_key, 'batch_key')
def print_cmd_line(self, s, target, source, env):
# In python 3, and in some of our tests, sys.stdout is
# a String io object, and it takes unicode strings only
# In other cases it's a regular Python 2.x file object
# which takes strings (bytes), and if you pass those a
# unicode object they try to decode with 'ascii' codec
# which fails if the cmd line has any hi-bit-set chars.
# This code assumes s is a regular string, but should
# work if it's unicode too.
try:
sys.stdout.write(unicode(s + "\n"))
except UnicodeDecodeError:
sys.stdout.write(s + "\n")
def __call__(self, target, source, env,
exitstatfunc=_null,
presub=_null,
show=_null,
execute=_null,
chdir=_null,
executor=None):
if not is_List(target):
target = [target]
if not is_List(source):
source = [source]
if presub is _null:
presub = self.presub
if presub is _null:
presub = print_actions_presub
if exitstatfunc is _null: exitstatfunc = self.exitstatfunc
if show is _null: show = print_actions
if execute is _null: execute = execute_actions
if chdir is _null: chdir = self.chdir
save_cwd = None
if chdir:
save_cwd = os.getcwd()
try:
chdir = str(chdir.abspath)
except AttributeError:
if not is_String(chdir):
if executor:
chdir = str(executor.batches[0].targets[0].dir)
else:
chdir = str(target[0].dir)
if presub:
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
t = ' and '.join(map(str, target))
l = '\n '.join(self.presub_lines(env))
out = u"Building %s with action:\n %s\n" % (t, l)
sys.stdout.write(out)
cmd = None
if show and self.strfunction:
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
try:
cmd = self.strfunction(target, source, env, executor)
except TypeError:
cmd = self.strfunction(target, source, env)
if cmd:
if chdir:
cmd = ('os.chdir(%s)\n' % repr(chdir)) + cmd
try:
get = env.get
except AttributeError:
print_func = self.print_cmd_line
else:
print_func = get('PRINT_CMD_LINE_FUNC')
if not print_func:
print_func = self.print_cmd_line
print_func(cmd, target, source, env)
stat = 0
if execute:
if chdir:
os.chdir(chdir)
try:
stat = self.execute(target, source, env, executor=executor)
if isinstance(stat, SCons.Errors.BuildError):
s = exitstatfunc(stat.status)
if s:
stat.status = s
else:
stat = s
else:
stat = exitstatfunc(stat)
finally:
if save_cwd:
os.chdir(save_cwd)
if cmd and save_cwd:
print_func('os.chdir(%s)' % repr(save_cwd), target, source, env)
return stat
def _string_from_cmd_list(cmd_list):
"""Takes a list of command line arguments and returns a pretty
representation for printing."""
cl = []
for arg in map(str, cmd_list):
if ' ' in arg or '\t' in arg:
arg = '"' + arg + '"'
cl.append(arg)
return ' '.join(cl)
# A fiddlin' little function that has an 'import SCons.Environment' which
# can't be moved to the top level without creating an import loop. Since
# this import creates a local variable named 'SCons', it blocks access to
# the global variable, so we move it here to prevent complaints about local
# variables being used uninitialized.
default_ENV = None
def get_default_ENV(env):
global default_ENV
try:
return env['ENV']
except KeyError:
if not default_ENV:
import SCons.Environment
# This is a hideously expensive way to get a default shell
# environment. What it really should do is run the platform
# setup to get the default ENV. Fortunately, it's incredibly
# rare for an Environment not to have a shell environment, so
# we're not going to worry about it overmuch.
default_ENV = SCons.Environment.Environment()['ENV']
return default_ENV
# This function is still in draft mode. We're going to need something like
# it in the long run as more and more places use subprocess, but I'm sure
# it'll have to be tweaked to get the full desired functionality.
# one special arg (so far?), 'error', to tell what to do with exceptions.
def _subproc(scons_env, cmd, error = 'ignore', **kw):
"""Do common setup for a subprocess.Popen() call"""
# allow std{in,out,err} to be "'devnull'"
io = kw.get('stdin')
if is_String(io) and io == 'devnull':
kw['stdin'] = open(os.devnull)
io = kw.get('stdout')
if is_String(io) and io == 'devnull':
kw['stdout'] = open(os.devnull, 'w')
io = kw.get('stderr')
if is_String(io) and io == 'devnull':
kw['stderr'] = open(os.devnull, 'w')
# Figure out what shell environment to use
ENV = kw.get('env', None)
if ENV is None: ENV = get_default_ENV(scons_env)
# Ensure that the ENV values are all strings:
new_env = {}
for key, value in ENV.items():
if is_List(value):
# If the value is a list, then we assume it is a path list,
# because that's a pretty common list-like value to stick
# in an environment variable:
value = SCons.Util.flatten_sequence(value)
new_env[key] = os.pathsep.join(map(str, value))
else:
# It's either a string or something else. If it's a string,
# we still want to call str() because it might be a *Unicode*
# string, which makes subprocess.Popen() gag. If it isn't a
# string or a list, then we just coerce it to a string, which
# is the proper way to handle Dir and File instances and will
# produce something reasonable for just about everything else:
new_env[key] = str(value)
kw['env'] = new_env
try:
return subprocess.Popen(cmd, **kw)
except EnvironmentError, e:
if error == 'raise': raise
# return a dummy Popen instance that only returns error
class dummyPopen(object):
def __init__(self, e): self.exception = e
def communicate(self): return ('','')
def wait(self): return -self.exception.errno
stdin = None
class f(object):
def read(self): return ''
def readline(self): return ''
stdout = stderr = f()
return dummyPopen(e)
class CommandAction(_ActionAction):
"""Class for command-execution actions."""
def __init__(self, cmd, **kw):
# Cmd can actually be a list or a single item; if it's a
# single item it should be the command string to execute; if a
# list then it should be the words of the command string to
# execute. Only a single command should be executed by this
# object; lists of commands should be handled by embedding
# these objects in a ListAction object (which the Action()
# factory above does). cmd will be passed to
# Environment.subst_list() for substituting environment
# variables.
if __debug__: logInstanceCreation(self, 'Action.CommandAction')
_ActionAction.__init__(self, **kw)
if is_List(cmd):
if list(filter(is_List, cmd)):
raise TypeError("CommandAction should be given only " \
"a single command")
self.cmd_list = cmd
def __str__(self):
if is_List(self.cmd_list):
return ' '.join(map(str, self.cmd_list))
return str(self.cmd_list)
def process(self, target, source, env, executor=None):
if executor:
result = env.subst_list(self.cmd_list, 0, executor=executor)
else:
result = env.subst_list(self.cmd_list, 0, target, source)
silent = None
ignore = None
while True:
try: c = result[0][0][0]
except IndexError: c = None
if c == '@': silent = 1
elif c == '-': ignore = 1
else: break
result[0][0] = result[0][0][1:]
try:
if not result[0][0]:
result[0] = result[0][1:]
except IndexError:
pass
return result, ignore, silent
def strfunction(self, target, source, env, executor=None):
if self.cmdstr is None:
return None
if self.cmdstr is not _null:
from SCons.Subst import SUBST_RAW
if executor:
c = env.subst(self.cmdstr, SUBST_RAW, executor=executor)
else:
c = env.subst(self.cmdstr, SUBST_RAW, target, source)
if c:
return c
cmd_list, ignore, silent = self.process(target, source, env, executor)
if silent:
return ''
return _string_from_cmd_list(cmd_list[0])
def execute(self, target, source, env, executor=None):
"""Execute a command action.
This will handle lists of commands as well as individual commands,
because construction variable substitution may turn a single
"command" into a list. This means that this class can actually
handle lists of commands, even though that's not how we use it
externally.
"""
escape_list = SCons.Subst.escape_list
flatten_sequence = SCons.Util.flatten_sequence
try:
shell = env['SHELL']
except KeyError:
raise SCons.Errors.UserError('Missing SHELL construction variable.')
try:
spawn = env['SPAWN']
except KeyError:
raise SCons.Errors.UserError('Missing SPAWN construction variable.')
else:
if is_String(spawn):
spawn = env.subst(spawn, raw=1, conv=lambda x: x)
escape = env.get('ESCAPE', lambda x: x)
ENV = get_default_ENV(env)
# Ensure that the ENV values are all strings:
for key, value in ENV.items():
if not is_String(value):
if is_List(value):
# If the value is a list, then we assume it is a
# path list, because that's a pretty common list-like
# value to stick in an environment variable:
value = flatten_sequence(value)
ENV[key] = os.pathsep.join(map(str, value))
else:
# If it isn't a string or a list, then we just coerce
# it to a string, which is the proper way to handle
# Dir and File instances and will produce something
# reasonable for just about everything else:
ENV[key] = str(value)
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
cmd_list, ignore, silent = self.process(target, list(map(rfile, source)), env, executor)
# Use len() to filter out any "command" that's zero-length.
for cmd_line in filter(len, cmd_list):
# Escape the command line for the interpreter we are using.
cmd_line = escape_list(cmd_line, escape)
result = spawn(shell, escape, cmd_line[0], cmd_line, ENV)
if not ignore and result:
msg = "Error %s" % result
return SCons.Errors.BuildError(errstr=msg,
status=result,
action=self,
command=cmd_line)
return 0
def get_presig(self, target, source, env, executor=None):
"""Return the signature contents of this action's command line.
This strips $(-$) and everything in between the string,
since those parts don't affect signatures.
"""
from SCons.Subst import SUBST_SIG
cmd = self.cmd_list
if is_List(cmd):
cmd = ' '.join(map(str, cmd))
else:
cmd = str(cmd)
if executor:
return env.subst_target_source(cmd, SUBST_SIG, executor=executor)
else:
return env.subst_target_source(cmd, SUBST_SIG, target, source)
def get_implicit_deps(self, target, source, env, executor=None):
icd = env.get('IMPLICIT_COMMAND_DEPENDENCIES', True)
if is_String(icd) and icd[:1] == '$':
icd = env.subst(icd)
if not icd or icd in ('0', 'None'):
return []
from SCons.Subst import SUBST_SIG
if executor:
cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, executor=executor)
else:
cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, target, source)
res = []
for cmd_line in cmd_list:
if cmd_line:
d = str(cmd_line[0])
m = strip_quotes.match(d)
if m:
d = m.group(1)
d = env.WhereIs(d)
if d:
res.append(env.fs.File(d))
return res
class CommandGeneratorAction(ActionBase):
"""Class for command-generator actions."""
def __init__(self, generator, kw):
if __debug__: logInstanceCreation(self, 'Action.CommandGeneratorAction')
self.generator = generator
self.gen_kw = kw
self.varlist = kw.get('varlist', ())
self.targets = kw.get('targets', '$TARGETS')
def _generate(self, target, source, env, for_signature, executor=None):
# ensure that target is a list, to make it easier to write
# generator functions:
if not is_List(target):
target = [target]
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
ret = self.generator(target=target,
source=source,
env=env,
for_signature=for_signature)
gen_cmd = Action(ret, **self.gen_kw)
if not gen_cmd:
raise SCons.Errors.UserError("Object returned from command generator: %s cannot be used to create an Action." % repr(ret))
return gen_cmd
def __str__(self):
try:
env = self.presub_env
except AttributeError:
env = None
if env is None:
env = SCons.Defaults.DefaultEnvironment()
act = self._generate([], [], env, 1)
return str(act)
def batch_key(self, env, target, source):
return self._generate(target, source, env, 1).batch_key(env, target, source)
def genstring(self, target, source, env, executor=None):
return self._generate(target, source, env, 1, executor).genstring(target, source, env)
def __call__(self, target, source, env, exitstatfunc=_null, presub=_null,
show=_null, execute=_null, chdir=_null, executor=None):
act = self._generate(target, source, env, 0, executor)
if act is None:
raise SCons.Errors.UserError("While building `%s': "
"Cannot deduce file extension from source files: %s"
% (repr(list(map(str, target))), repr(list(map(str, source)))))
return act(target, source, env, exitstatfunc, presub,
show, execute, chdir, executor)
def get_presig(self, target, source, env, executor=None):
"""Return the signature contents of this action's command line.
This strips $(-$) and everything in between the string,
since those parts don't affect signatures.
"""
return self._generate(target, source, env, 1, executor).get_presig(target, source, env)
def get_implicit_deps(self, target, source, env, executor=None):
return self._generate(target, source, env, 1, executor).get_implicit_deps(target, source, env)
def get_varlist(self, target, source, env, executor=None):
return self._generate(target, source, env, 1, executor).get_varlist(target, source, env, executor)
def get_targets(self, env, executor):
return self._generate(None, None, env, 1, executor).get_targets(env, executor)
# A LazyAction is a kind of hybrid generator and command action for
# strings of the form "$VAR". These strings normally expand to other
# strings (think "$CCCOM" to "$CC -c -o $TARGET $SOURCE"), but we also
# want to be able to replace them with functions in the construction
# environment. Consequently, we want lazy evaluation and creation of
# an Action in the case of the function, but that's overkill in the more
# normal case of expansion to other strings.
#
# So we do this with a subclass that's both a generator *and*
# a command action. The overridden methods all do a quick check
# of the construction variable, and if it's a string we just call
# the corresponding CommandAction method to do the heavy lifting.
# If not, then we call the same-named CommandGeneratorAction method.
# The CommandGeneratorAction methods work by using the overridden
# _generate() method, that is, our own way of handling "generation" of
# an action based on what's in the construction variable.
class LazyAction(CommandGeneratorAction, CommandAction):
def __init__(self, var, kw):
if __debug__: logInstanceCreation(self, 'Action.LazyAction')
#FUTURE CommandAction.__init__(self, '${'+var+'}', **kw)
CommandAction.__init__(self, '${'+var+'}', **kw)
self.var = SCons.Util.to_String(var)
self.gen_kw = kw
def get_parent_class(self, env):
c = env.get(self.var)
if is_String(c) and not '\n' in c:
return CommandAction
return CommandGeneratorAction
def _generate_cache(self, env):
if env:
c = env.get(self.var, '')
else:
c = ''
gen_cmd = Action(c, **self.gen_kw)
if not gen_cmd:
raise SCons.Errors.UserError("$%s value %s cannot be used to create an Action." % (self.var, repr(c)))
return gen_cmd
def _generate(self, target, source, env, for_signature, executor=None):
return self._generate_cache(env)
def __call__(self, target, source, env, *args, **kw):
c = self.get_parent_class(env)
return c.__call__(self, target, source, env, *args, **kw)
def get_presig(self, target, source, env):
c = self.get_parent_class(env)
return c.get_presig(self, target, source, env)
def get_varlist(self, target, source, env, executor=None):
c = self.get_parent_class(env)
return c.get_varlist(self, target, source, env, executor)
class FunctionAction(_ActionAction):
"""Class for Python function actions."""
def __init__(self, execfunction, kw):
if __debug__: logInstanceCreation(self, 'Action.FunctionAction')
self.execfunction = execfunction
try:
self.funccontents = _callable_contents(execfunction)
except AttributeError:
try:
# See if execfunction will do the heavy lifting for us.
self.gc = execfunction.get_contents
except AttributeError:
# This is weird, just do the best we can.
self.funccontents = _object_contents(execfunction)
_ActionAction.__init__(self, **kw)
def function_name(self):
try:
return self.execfunction.__name__
except AttributeError:
try:
return self.execfunction.__class__.__name__
except AttributeError:
return "unknown_python_function"
def strfunction(self, target, source, env, executor=None):
if self.cmdstr is None:
return None
if self.cmdstr is not _null:
from SCons.Subst import SUBST_RAW
if executor:
c = env.subst(self.cmdstr, SUBST_RAW, executor=executor)
else:
c = env.subst(self.cmdstr, SUBST_RAW, target, source)
if c:
return c
def array(a):
def quote(s):
try:
str_for_display = s.str_for_display
except AttributeError:
s = repr(s)
else:
s = str_for_display()
return s
return '[' + ", ".join(map(quote, a)) + ']'
try:
strfunc = self.execfunction.strfunction
except AttributeError:
pass
else:
if strfunc is None:
return None
if callable(strfunc):
return strfunc(target, source, env)
name = self.function_name()
tstr = array(target)
sstr = array(source)
return "%s(%s, %s)" % (name, tstr, sstr)
def __str__(self):
name = self.function_name()
if name == 'ActionCaller':
return str(self.execfunction)
return "%s(target, source, env)" % name
def execute(self, target, source, env, executor=None):
exc_info = (None,None,None)
try:
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
rsources = list(map(rfile, source))
try:
result = self.execfunction(target=target, source=rsources, env=env)
except KeyboardInterrupt, e:
raise
except SystemExit, e:
raise
except Exception, e:
result = e
exc_info = sys.exc_info()
if result:
result = SCons.Errors.convert_to_BuildError(result, exc_info)
result.node=target
result.action=self
try:
result.command=self.strfunction(target, source, env, executor)
except TypeError:
result.command=self.strfunction(target, source, env)
# FIXME: This maintains backward compatibility with respect to
# which type of exceptions were returned by raising an
# exception and which ones were returned by value. It would
# probably be best to always return them by value here, but
# some codes do not check the return value of Actions and I do
# not have the time to modify them at this point.
if (exc_info[1] and
not isinstance(exc_info[1],EnvironmentError)):
raise result
return result
finally:
# Break the cycle between the traceback object and this
# function stack frame. See the sys.exc_info() doc info for
# more information about this issue.
del exc_info
def get_presig(self, target, source, env):
"""Return the signature contents of this callable action."""
try:
return self.gc(target, source, env)
except AttributeError:
return self.funccontents
def get_implicit_deps(self, target, source, env):
return []
class ListAction(ActionBase):
"""Class for lists of other actions."""
def __init__(self, actionlist):
if __debug__: logInstanceCreation(self, 'Action.ListAction')
def list_of_actions(x):
if isinstance(x, ActionBase):
return x
return Action(x)
self.list = list(map(list_of_actions, actionlist))
# our children will have had any varlist
# applied; we don't need to do it again
self.varlist = ()
self.targets = '$TARGETS'
def genstring(self, target, source, env):
return '\n'.join([a.genstring(target, source, env) for a in self.list])
def __str__(self):
return '\n'.join(map(str, self.list))
def presub_lines(self, env):
return SCons.Util.flatten_sequence(
[a.presub_lines(env) for a in self.list])
def get_presig(self, target, source, env):
"""Return the signature contents of this action list.
Simple concatenation of the signatures of the elements.
"""
return "".join([x.get_contents(target, source, env) for x in self.list])
def __call__(self, target, source, env, exitstatfunc=_null, presub=_null,
show=_null, execute=_null, chdir=_null, executor=None):
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
for act in self.list:
stat = act(target, source, env, exitstatfunc, presub,
show, execute, chdir, executor)
if stat:
return stat
return 0
def get_implicit_deps(self, target, source, env):
result = []
for act in self.list:
result.extend(act.get_implicit_deps(target, source, env))
return result
def get_varlist(self, target, source, env, executor=None):
result = SCons.Util.OrderedDict()
for act in self.list:
for var in act.get_varlist(target, source, env, executor):
result[var] = True
return list(result.keys())
class ActionCaller(object):
"""A class for delaying calling an Action function with specific
(positional and keyword) arguments until the Action is actually
executed.
This class looks to the rest of the world like a normal Action object,
but what it's really doing is hanging on to the arguments until we
have a target, source and env to use for the expansion.
"""
def __init__(self, parent, args, kw):
self.parent = parent
self.args = args
self.kw = kw
def get_contents(self, target, source, env):
actfunc = self.parent.actfunc
try:
# "self.actfunc" is a function.
contents = str(actfunc.func_code.co_code)
except AttributeError:
# "self.actfunc" is a callable object.
try:
contents = str(actfunc.__call__.im_func.func_code.co_code)
except AttributeError:
# No __call__() method, so it might be a builtin
# or something like that. Do the best we can.
contents = str(actfunc)
contents = remove_set_lineno_codes(contents)
return contents
def subst(self, s, target, source, env):
# If s is a list, recursively apply subst()
# to every element in the list
if is_List(s):
result = []
for elem in s:
result.append(self.subst(elem, target, source, env))
return self.parent.convert(result)
# Special-case hack: Let a custom function wrapped in an
# ActionCaller get at the environment through which the action
# was called by using this hard-coded value as a special return.
if s == '$__env__':
return env
elif is_String(s):
return env.subst(s, 1, target, source)
return self.parent.convert(s)
def subst_args(self, target, source, env):
return [self.subst(x, target, source, env) for x in self.args]
def subst_kw(self, target, source, env):
kw = {}
for key in self.kw.keys():
kw[key] = self.subst(self.kw[key], target, source, env)
return kw
def __call__(self, target, source, env, executor=None):
args = self.subst_args(target, source, env)
kw = self.subst_kw(target, source, env)
return self.parent.actfunc(*args, **kw)
def strfunction(self, target, source, env):
args = self.subst_args(target, source, env)
kw = self.subst_kw(target, source, env)
return self.parent.strfunc(*args, **kw)
def __str__(self):
return self.parent.strfunc(*self.args, **self.kw)
class ActionFactory(object):
"""A factory class that will wrap up an arbitrary function
as an SCons-executable Action object.
The real heavy lifting here is done by the ActionCaller class.
We just collect the (positional and keyword) arguments that we're
called with and give them to the ActionCaller object we create,
so it can hang onto them until it needs them.
"""
def __init__(self, actfunc, strfunc, convert=lambda x: x):
self.actfunc = actfunc
self.strfunc = strfunc
self.convert = convert
def __call__(self, *args, **kw):
ac = ActionCaller(self, args, kw)
action = Action(ac, strfunction=ac.strfunction)
return action
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
gpl-3.0
|
kamcpp/tensorflow
|
tensorflow/contrib/learn/python/learn/estimators/dynamic_rnn_estimator_test.py
|
5
|
20218
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for learn.estimators.dynamic_rnn_estimator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow.contrib.learn.python.learn.estimators import dynamic_rnn_estimator
from tensorflow.python.ops import rnn_cell
class IdentityRNNCell(tf.nn.rnn_cell.RNNCell):
def __init__(self, state_size, output_size):
self._state_size = state_size
self._output_size = output_size
@property
def state_size(self):
return self._state_size
@property
def output_size(self):
return self._output_size
def __call__(self, inputs, state):
return tf.identity(inputs), tf.identity(state)
class MockTargetColumn(object):
def __init__(self, num_label_columns=None):
self._num_label_columns = num_label_columns
def get_eval_ops(self, features, activations, targets, metrics):
raise NotImplementedError(
'MockTargetColumn.get_eval_ops called unexpectedly.')
def logits_to_predictions(self, flattened_activations, proba=False):
raise NotImplementedError(
'MockTargetColumn.logits_to_predictions called unexpectedly.')
def loss(self, activations, targets, features):
raise NotImplementedError('MockTargetColumn.loss called unexpectedly.')
@property
def num_label_columns(self):
if self._num_label_columns is None:
raise ValueError('MockTargetColumn.num_label_columns has not been set.')
return self._num_label_columns
def set_num_label_columns(self, n):
self._num_label_columns = n
class MockOptimizer(object):
def compute_gradients(self, loss, var_list):
raise NotImplementedError(
'MockOptimizer.compute_gradients called unexpectedly.')
def apply_gradients(self, processed_gradients, global_step):
raise NotImplementedError(
'MockOptimizer.apply_gradients called unexpectedly.')
def sequence_length_mask(values, lengths):
masked = values
for i, length in enumerate(lengths):
masked[i, length:, :] = np.zeros_like(masked[i, length:, :])
return masked
class DynamicRnnEstimatorTest(tf.test.TestCase):
NUM_RNN_CELL_UNITS = 8
NUM_LABEL_COLUMNS = 4
def setUp(self):
self._rnn_cell = rnn_cell.BasicRNNCell(self.NUM_RNN_CELL_UNITS)
self._mock_target_column = MockTargetColumn(
num_label_columns=self.NUM_LABEL_COLUMNS)
location = tf.contrib.layers.sparse_column_with_keys(
'location', keys=['west_side', 'east_side', 'nyc'])
location_onehot = tf.contrib.layers.one_hot_column(location)
context_features = [location_onehot]
wire_cast = tf.contrib.layers.sparse_column_with_keys(
'wire_cast', ['marlo', 'omar', 'stringer'])
wire_cast_embedded = tf.contrib.layers.embedding_column(
wire_cast, dimension=8)
measurements = tf.contrib.layers.real_valued_column(
'measurements', dimension=2)
sequence_features = [measurements, wire_cast_embedded]
self._rnn_estimator = dynamic_rnn_estimator._MultiValueRNNEstimator(
cell=self._rnn_cell,
sequence_feature_columns=sequence_features,
context_feature_columns=context_features,
target_column=self._mock_target_column,
optimizer=tf.train.GradientDescentOptimizer(0.1))
self._columns_to_tensors = {
'location': tf.SparseTensor(
indices=[[0, 0], [1, 0], [2, 0]],
values=['west_side', 'west_side', 'nyc'],
shape=[3, 1]),
'wire_cast': tf.SparseTensor(
indices=[[0, 0, 0], [0, 1, 0],
[1, 0, 0], [1, 1, 0], [1, 1, 1],
[2, 0, 0]],
values=[b'marlo', b'stringer',
b'omar', b'stringer', b'marlo',
b'marlo'],
shape=[3, 2, 2]),
'measurements': tf.random_uniform([3, 2, 2])}
def testGetModelInput(self):
initial_state, sequence_input = self._rnn_estimator._get_model_input(
self._columns_to_tensors)
self.assertIsNone(initial_state)
with self.test_session() as sess:
sess.run(tf.initialize_all_variables())
sess.run(tf.initialize_all_tables())
sequence_input_val = sess.run(sequence_input)
expected_shape = np.array([
3, # expected batch size
2, # padded sequence length
3 + 8 + 2 # location keys + embedding dim + measurement dimension
])
self.assertAllEqual(expected_shape, sequence_input_val.shape)
def testConstructRNN(self):
"""Test `DynamicRNNEstimator._construct_rnn`."""
initial_state, sequence_input = self._rnn_estimator._get_model_input(
self._columns_to_tensors)
activations_t, final_state_t = self._rnn_estimator._construct_rnn(
initial_state, sequence_input)
# Obtain values of activations and final state.
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
sess.run(tf.initialize_all_tables())
activations, final_state = sess.run([activations_t, final_state_t])
expected_activations_shape = np.array([3, 2, self.NUM_LABEL_COLUMNS])
self.assertAllEqual(expected_activations_shape, activations.shape)
expected_state_shape = np.array([3, self.NUM_RNN_CELL_UNITS])
self.assertAllEqual(expected_state_shape, final_state.shape)
class MultiValueRNNEstimatorTest(tf.test.TestCase):
"""Tests for `_MultiValueRNNEstimator` class."""
CELL_STATE_SIZE = 8
CELL_OUTPUT_SIZE = 6
INPUTS_COLUMN = tf.contrib.layers.real_valued_column(
'inputs', dimension=CELL_OUTPUT_SIZE)
def setUp(self):
self._rnn_cell = IdentityRNNCell(self.CELL_STATE_SIZE,
self.CELL_OUTPUT_SIZE)
self._mock_target_column = MockTargetColumn()
self._seq_estimator = dynamic_rnn_estimator._MultiValueRNNEstimator(
cell=self._rnn_cell,
sequence_feature_columns=[self.INPUTS_COLUMN],
target_column=self._mock_target_column,
optimizer=tf.train.GradientDescentOptimizer(0.1))
def testPaddingMask(self):
"""Test `_padding_mask`."""
batch_size = 16
padded_length = 32
np.random.seed(1234)
sequence_lengths = np.random.randint(0, padded_length + 1, batch_size)
padding_mask_t = dynamic_rnn_estimator._padding_mask(
tf.constant(sequence_lengths, dtype=tf.int32),
tf.constant(padded_length, dtype=tf.int32))
with tf.Session() as sess:
padding_mask = sess.run(padding_mask_t)
for i in range(batch_size):
actual_mask = padding_mask[i]
expected_mask = np.concatenate(
[np.ones(sequence_lengths[i]),
np.zeros(padded_length - sequence_lengths[i])],
axis=0)
np.testing.assert_equal(actual_mask, expected_mask,
'Mismatch on row {}. Got {}; expected {}.'.format(
i, actual_mask, expected_mask))
def testMaskActivationsAndTargets(self):
"""Test `_mask_activations_and_targets`."""
batch_size = 4
padded_length = 6
num_classes = 4
np.random.seed(1234)
sequence_length = np.random.randint(0, padded_length + 1, batch_size)
activations = np.random.rand(batch_size, padded_length, num_classes)
targets = np.random.randint(0, num_classes, [batch_size, padded_length])
(activations_masked_t,
targets_masked_t) = dynamic_rnn_estimator._mask_activations_and_targets(
tf.constant(
activations, dtype=tf.float32),
tf.constant(
targets, dtype=tf.int32),
tf.constant(
sequence_length, dtype=tf.int32))
with tf.Session() as sess:
activations_masked, targets_masked = sess.run(
[activations_masked_t, targets_masked_t])
expected_activations_shape = [sum(sequence_length), num_classes]
np.testing.assert_equal(
expected_activations_shape, activations_masked.shape,
'Wrong activations shape. Expected {}; got {}.'.format(
expected_activations_shape, activations_masked.shape))
expected_targets_shape = [sum(sequence_length)]
np.testing.assert_equal(expected_targets_shape, targets_masked.shape,
'Wrong targets shape. Expected {}; got {}.'.format(
expected_targets_shape, targets_masked.shape))
masked_index = 0
for i in range(batch_size):
for j in range(sequence_length[i]):
actual_activations = activations_masked[masked_index]
expected_activations = activations[i, j, :]
np.testing.assert_almost_equal(
expected_activations,
actual_activations,
err_msg='Unexpected logit value at index [{}, {}, :].'
' Expected {}; got {}.'.format(i, j, expected_activations,
actual_activations))
actual_targets = targets_masked[masked_index]
expected_targets = targets[i, j]
np.testing.assert_almost_equal(
expected_targets,
actual_targets,
err_msg='Unexpected logit value at index [{}, {}].'
' Expected {}; got {}.'.format(i, j, expected_targets,
actual_targets))
masked_index += 1
def testActivationsToPredictions(self):
"""Test `DynamicRNNEstimator._activations_to_predictions`."""
batch_size = 8
sequence_length = 16
num_classes = 3
np.random.seed(10101)
activations = np.random.rand(batch_size, sequence_length, num_classes)
flattened_activations = np.reshape(activations, [-1, num_classes])
flattened_argmax = np.argmax(flattened_activations, axis=1)
expected_predictions = np.argmax(activations, axis=2)
with tf.test.mock.patch.object(
self._mock_target_column,
'logits_to_predictions',
return_value=flattened_argmax,
autospec=True) as mock_logits_to_predictions:
predictions_t = self._seq_estimator._activations_to_predictions(
None, tf.constant(activations, dtype=tf.float32))
(target_column_input_activations_t,
), _ = mock_logits_to_predictions.call_args
with tf.Session() as sess:
target_column_input_activations, predictions = sess.run(
[target_column_input_activations_t, predictions_t])
np.testing.assert_almost_equal(flattened_activations,
target_column_input_activations)
np.testing.assert_equal(expected_predictions, predictions)
def testLearnSineFunction(self):
"""Tests that `_MultiValueRNNEstimator` can learn a sine function."""
batch_size = 8
sequence_length = 64
train_steps = 200
eval_steps = 20
cell_size = 4
learning_rate = 0.1
loss_threshold = 0.02
def get_sin_input_fn(batch_size, sequence_length, increment, seed=None):
def _sin_fn(x):
ranger = tf.linspace(
tf.reshape(x[0], []),
(sequence_length - 1) * increment, sequence_length + 1)
return tf.sin(ranger)
def input_fn():
starts = tf.random_uniform([batch_size], maxval=(2 * np.pi), seed=seed)
sin_curves = tf.map_fn(_sin_fn, (starts,), dtype=tf.float32)
inputs = tf.expand_dims(
tf.slice(sin_curves, [0, 0], [batch_size, sequence_length]), 2)
labels = tf.slice(sin_curves, [0, 1], [batch_size, sequence_length])
return {'inputs': inputs}, labels
return input_fn
seq_columns = [tf.contrib.layers.real_valued_column(
'inputs', dimension=cell_size)]
config = tf.contrib.learn.RunConfig(tf_random_seed=1234)
sequence_estimator = dynamic_rnn_estimator.multi_value_rnn_regressor(
num_units=cell_size,
sequence_feature_columns=seq_columns,
learning_rate=learning_rate,
config=config)
train_input_fn = get_sin_input_fn(
batch_size, sequence_length, np.pi / 32, seed=1234)
eval_input_fn = get_sin_input_fn(
batch_size, sequence_length, np.pi / 32, seed=4321)
sequence_estimator.fit(input_fn=train_input_fn, steps=train_steps)
loss = sequence_estimator.evaluate(
input_fn=eval_input_fn, steps=eval_steps)['loss']
self.assertLess(loss, loss_threshold,
'Loss should be less than {}; got {}'.format(
loss_threshold, loss))
def testLearnShiftByOne(self):
"""Tests that `_MultiValueRNNEstimator` can learn a 'shift-by-one' example.
Each label sequence consists of the input sequence 'shifted' by one place.
The RNN must learn to 'remember' the previous input.
"""
batch_size = 16
sequence_length = 32
train_steps = 200
eval_steps = 20
cell_size = 4
learning_rate = 0.3
accuracy_threshold = 0.9
def get_shift_input_fn(batch_size, sequence_length, seed=None):
def input_fn():
random_sequence = tf.random_uniform(
[batch_size, sequence_length + 1], 0, 2, dtype=tf.int32, seed=seed)
labels = tf.slice(
random_sequence, [0, 0], [batch_size, sequence_length])
inputs = tf.expand_dims(
tf.to_float(tf.slice(
random_sequence, [0, 1], [batch_size, sequence_length])), 2)
return {'inputs': inputs}, labels
return input_fn
seq_columns = [tf.contrib.layers.real_valued_column(
'inputs', dimension=cell_size)]
config = tf.contrib.learn.RunConfig(tf_random_seed=21212)
sequence_estimator = dynamic_rnn_estimator.multi_value_rnn_classifier(
num_classes=2,
num_units=cell_size,
sequence_feature_columns=seq_columns,
learning_rate=learning_rate,
config=config)
train_input_fn = get_shift_input_fn(batch_size, sequence_length, seed=12321)
eval_input_fn = get_shift_input_fn(batch_size, sequence_length, seed=32123)
sequence_estimator.fit(input_fn=train_input_fn, steps=train_steps)
evaluation = sequence_estimator.evaluate(
input_fn=eval_input_fn, steps=eval_steps)
accuracy = evaluation['accuracy']
self.assertGreater(accuracy, accuracy_threshold,
'Accuracy should be higher than {}; got {}'.format(
accuracy_threshold, accuracy))
class SingleValueRNNEstimatorTest(tf.test.TestCase):
def testSelectLastactivations(self):
"""Test `_select_last_activations`."""
batch_size = 4
padded_length = 6
num_classes = 4
np.random.seed(4444)
sequence_length = np.random.randint(0, padded_length + 1, batch_size)
activations = np.random.rand(batch_size, padded_length, num_classes)
last_activations_t = dynamic_rnn_estimator._select_last_activations(
tf.constant(activations, dtype=tf.float32),
tf.constant(sequence_length, dtype=tf.int32))
with tf.Session() as sess:
last_activations = sess.run(last_activations_t)
expected_activations_shape = [batch_size, num_classes]
np.testing.assert_equal(
expected_activations_shape, last_activations.shape,
'Wrong activations shape. Expected {}; got {}.'.format(
expected_activations_shape, last_activations.shape))
for i in range(batch_size):
actual_activations = last_activations[i, :]
expected_activations = activations[i, sequence_length[i] - 1, :]
np.testing.assert_almost_equal(
expected_activations,
actual_activations,
err_msg='Unexpected logit value at index [{}, :].'
' Expected {}; got {}.'.format(i, expected_activations,
actual_activations))
def testLearnMean(self):
"""Test that `_SequenceRegressor` can learn to calculate a mean."""
batch_size = 16
sequence_length = 3
train_steps = 200
eval_steps = 20
cell_type = 'basic_rnn'
cell_size = 8
optimizer_type = 'Momentum'
learning_rate = 0.5
momentum = 0.9
loss_threshold = 0.1
def get_mean_input_fn(batch_size, sequence_length, seed=None):
def input_fn():
# Create examples by choosing 'centers' and adding uniform noise.
centers = tf.matmul(
tf.random_uniform(
[batch_size, 1], -0.75, 0.75, dtype=tf.float32, seed=seed),
tf.ones([1, sequence_length]))
noise = tf.random_uniform(
[batch_size, sequence_length],
-0.25,
0.25,
dtype=tf.float32,
seed=seed)
sequences = centers + noise
inputs = tf.expand_dims(sequences, 2)
labels = tf.reduce_mean(sequences, reduction_indices=[1])
return {'inputs': inputs}, labels
return input_fn
seq_columns = [tf.contrib.layers.real_valued_column(
'inputs', dimension=cell_size)]
config = tf.contrib.learn.RunConfig(tf_random_seed=6)
sequence_regressor = dynamic_rnn_estimator.single_value_rnn_regressor(
num_units=cell_size,
sequence_feature_columns=seq_columns,
cell_type=cell_type,
optimizer_type=optimizer_type,
learning_rate=learning_rate,
momentum=momentum,
config=config)
train_input_fn = get_mean_input_fn(batch_size, sequence_length, 121)
eval_input_fn = get_mean_input_fn(batch_size, sequence_length, 212)
sequence_regressor.fit(input_fn=train_input_fn, steps=train_steps)
evaluation = sequence_regressor.evaluate(
input_fn=eval_input_fn, steps=eval_steps)
loss = evaluation['loss']
self.assertLess(loss, loss_threshold,
'Loss should be less than {}; got {}'.format(
loss_threshold, loss))
def testLearnMajority(self):
"""Test that `_SequenceClassifier` can learn the 'majority' function."""
batch_size = 16
sequence_length = 7
train_steps = 200
eval_steps = 20
cell_type = 'lstm'
cell_size = 4
optimizer_type = 'Momentum'
learning_rate = 2.0
momentum = 0.9
accuracy_threshold = 0.9
def get_majority_input_fn(batch_size, sequence_length, seed=None):
tf.set_random_seed(seed)
def input_fn():
random_sequence = tf.random_uniform(
[batch_size, sequence_length], 0, 2, dtype=tf.int32, seed=seed)
inputs = tf.expand_dims(tf.to_float(random_sequence), 2)
labels = tf.to_int32(
tf.squeeze(
tf.reduce_sum(
inputs, reduction_indices=[1]) > (sequence_length / 2.0)))
return {'inputs': inputs}, labels
return input_fn
seq_columns = [tf.contrib.layers.real_valued_column(
'inputs', dimension=cell_size)]
config = tf.contrib.learn.RunConfig(tf_random_seed=77)
sequence_classifier = dynamic_rnn_estimator.single_value_rnn_classifier(
num_classes=2,
num_units=cell_size,
sequence_feature_columns=seq_columns,
cell_type=cell_type,
optimizer_type=optimizer_type,
learning_rate=learning_rate,
momentum=momentum,
config=config)
train_input_fn = get_majority_input_fn(batch_size, sequence_length, 1111)
eval_input_fn = get_majority_input_fn(batch_size, sequence_length, 2222)
sequence_classifier.fit(input_fn=train_input_fn, steps=train_steps)
evaluation = sequence_classifier.evaluate(
input_fn=eval_input_fn, steps=eval_steps)
accuracy = evaluation['accuracy']
self.assertGreater(accuracy, accuracy_threshold,
'Accuracy should be higher than {}; got {}'.format(
accuracy_threshold, accuracy))
if __name__ == '__main__':
tf.test.main()
|
apache-2.0
|
nvoron23/socialite
|
jython/Lib/email/parser.py
|
392
|
3300
|
# Copyright (C) 2001-2006 Python Software Foundation
# Author: Barry Warsaw, Thomas Wouters, Anthony Baxter
# Contact: [email protected]
"""A parser of RFC 2822 and MIME email messages."""
__all__ = ['Parser', 'HeaderParser']
import warnings
from cStringIO import StringIO
from email.feedparser import FeedParser
from email.message import Message
class Parser:
def __init__(self, *args, **kws):
"""Parser of RFC 2822 and MIME email messages.
Creates an in-memory object tree representing the email message, which
can then be manipulated and turned over to a Generator to return the
textual representation of the message.
The string must be formatted as a block of RFC 2822 headers and header
continuation lines, optionally preceeded by a `Unix-from' header. The
header block is terminated either by the end of the string or by a
blank line.
_class is the class to instantiate for new message objects when they
must be created. This class must have a constructor that can take
zero arguments. Default is Message.Message.
"""
if len(args) >= 1:
if '_class' in kws:
raise TypeError("Multiple values for keyword arg '_class'")
kws['_class'] = args[0]
if len(args) == 2:
if 'strict' in kws:
raise TypeError("Multiple values for keyword arg 'strict'")
kws['strict'] = args[1]
if len(args) > 2:
raise TypeError('Too many arguments')
if '_class' in kws:
self._class = kws['_class']
del kws['_class']
else:
self._class = Message
if 'strict' in kws:
warnings.warn("'strict' argument is deprecated (and ignored)",
DeprecationWarning, 2)
del kws['strict']
if kws:
raise TypeError('Unexpected keyword arguments')
def parse(self, fp, headersonly=False):
"""Create a message structure from the data in a file.
Reads all the data from the file and returns the root of the message
structure. Optional headersonly is a flag specifying whether to stop
parsing after reading the headers or not. The default is False,
meaning it parses the entire contents of the file.
"""
feedparser = FeedParser(self._class)
if headersonly:
feedparser._set_headersonly()
while True:
data = fp.read(8192)
if not data:
break
feedparser.feed(data)
return feedparser.close()
def parsestr(self, text, headersonly=False):
"""Create a message structure from a string.
Returns the root of the message structure. Optional headersonly is a
flag specifying whether to stop parsing after reading the headers or
not. The default is False, meaning it parses the entire contents of
the file.
"""
return self.parse(StringIO(text), headersonly=headersonly)
class HeaderParser(Parser):
def parse(self, fp, headersonly=True):
return Parser.parse(self, fp, True)
def parsestr(self, text, headersonly=True):
return Parser.parsestr(self, text, True)
|
apache-2.0
|
charactory/namcap
|
Namcap/capsnamespkg.py
|
2
|
1271
|
#
# namcap rules - capsnamespkg
# Copyright (C) 2003-2007 Jason Chu <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import pacman,re
class package:
def short_name(self):
return "capsnamespkg"
def long_name(self):
return "Verifies package name in package does not include upper case letters"
def prereq(self):
return "pkg"
def analyze(self, pkginfo, tar):
ret = [[],[],[]]
if re.search('[A-Z]', pkginfo.name) != None:
ret[0].append('No upper case letters in package names')
return ret
def type(self):
return "tarball"
# vim: set ts=4 sw=4 noet:
|
gpl-2.0
|
openstack/automaton
|
automaton/runners.py
|
1
|
7425
|
# Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from automaton import exceptions as excp
from automaton import machines
_JUMPER_NOT_FOUND_TPL = ("Unable to progress since no reaction (or"
" sent event) has been made available in"
" new state '%s' (moved to from state '%s'"
" in response to event '%s')")
class Runner(metaclass=abc.ABCMeta):
"""Machine runner used to run a state machine.
Only **one** runner per machine should be active at the same time (aka
there should not be multiple runners using the same machine instance at
the same time).
"""
def __init__(self, machine):
self._machine = machine
@abc.abstractmethod
def run(self, event, initialize=True):
"""Runs the state machine, using reactions only."""
@abc.abstractmethod
def run_iter(self, event, initialize=True):
"""Returns a iterator/generator that will run the state machine.
NOTE(harlowja): only one runner iterator/generator should be active for
a machine, if this is not observed then it is possible for
initialization and other local state to be corrupted and cause issues
when running...
"""
class FiniteRunner(Runner):
"""Finite machine runner used to run a finite machine.
Only **one** runner per machine should be active at the same time (aka
there should not be multiple runners using the same machine instance at
the same time).
"""
def __init__(self, machine):
"""Create a runner for the given machine."""
if not isinstance(machine, (machines.FiniteMachine,)):
raise TypeError("FiniteRunner only works with FiniteMachine(s)")
super(FiniteRunner, self).__init__(machine)
def run(self, event, initialize=True):
for transition in self.run_iter(event, initialize=initialize):
pass
def run_iter(self, event, initialize=True):
if initialize:
self._machine.initialize()
while True:
old_state = self._machine.current_state
reaction, terminal = self._machine.process_event(event)
new_state = self._machine.current_state
try:
sent_event = yield (old_state, new_state)
except GeneratorExit:
break
if terminal:
break
if reaction is None and sent_event is None:
raise excp.NotFound(_JUMPER_NOT_FOUND_TPL % (new_state,
old_state,
event))
elif sent_event is not None:
event = sent_event
else:
cb, args, kwargs = reaction
event = cb(old_state, new_state, event, *args, **kwargs)
class HierarchicalRunner(Runner):
"""Hierarchical machine runner used to run a hierarchical machine.
Only **one** runner per machine should be active at the same time (aka
there should not be multiple runners using the same machine instance at
the same time).
"""
def __init__(self, machine):
"""Create a runner for the given machine."""
if not isinstance(machine, (machines.HierarchicalFiniteMachine,)):
raise TypeError("HierarchicalRunner only works with"
" HierarchicalFiniteMachine(s)")
super(HierarchicalRunner, self).__init__(machine)
def run(self, event, initialize=True):
for transition in self.run_iter(event, initialize=initialize):
pass
@staticmethod
def _process_event(machines, event):
"""Matches a event to the machine hierarchy.
If the lowest level machine does not handle the event, then the
parent machine is referred to and so on, until there is only one
machine left which *must* handle the event.
The machine whose ``process_event`` does not throw invalid state or
not found exceptions is expected to be the machine that should
continue handling events...
"""
while True:
machine = machines[-1]
try:
result = machine.process_event(event)
except (excp.InvalidState, excp.NotFound):
if len(machines) == 1:
raise
else:
current = machine._current
if current is not None and current.on_exit is not None:
current.on_exit(current.name, event)
machine._current = None
machines.pop()
else:
return result
def run_iter(self, event, initialize=True):
"""Returns a iterator/generator that will run the state machine.
This will keep a stack (hierarchy) of machines active and jumps through
them as needed (depending on which machine handles which event) during
the running lifecycle.
NOTE(harlowja): only one runner iterator/generator should be active for
a machine hierarchy, if this is not observed then it is possible for
initialization and other local state to be corrupted and causes issues
when running...
"""
machines = [self._machine]
if initialize:
machines[-1].initialize()
while True:
old_state = machines[-1].current_state
effect = self._process_event(machines, event)
new_state = machines[-1].current_state
try:
machine = effect.machine
except AttributeError:
pass
else:
if machine is not None and machine is not machines[-1]:
machine.initialize()
machines.append(machine)
try:
sent_event = yield (old_state, new_state)
except GeneratorExit:
break
if len(machines) == 1 and effect.terminal:
# Only allow the top level machine to actually terminate the
# execution, the rest of the nested machines must not handle
# events if they wish to have the root machine terminate...
break
if effect.reaction is None and sent_event is None:
raise excp.NotFound(_JUMPER_NOT_FOUND_TPL % (new_state,
old_state,
event))
elif sent_event is not None:
event = sent_event
else:
cb, args, kwargs = effect.reaction
event = cb(old_state, new_state, event, *args, **kwargs)
|
apache-2.0
|
dancingdan/tensorflow
|
tensorflow/contrib/distributions/python/ops/inverse_gamma.py
|
13
|
11091
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The InverseGamma distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.distributions import distribution
from tensorflow.python.ops.distributions import util as distribution_util
from tensorflow.python.util import deprecation
__all__ = [
"InverseGamma",
"InverseGammaWithSoftplusConcentrationRate",
]
class InverseGamma(distribution.Distribution):
"""InverseGamma distribution.
The `InverseGamma` distribution is defined over positive real numbers using
parameters `concentration` (aka "alpha") and `rate` (aka "beta").
#### Mathematical Details
The probability density function (pdf) is,
```none
pdf(x; alpha, beta, x > 0) = x**(-alpha - 1) exp(-beta / x) / Z
Z = Gamma(alpha) beta**-alpha
```
where:
* `concentration = alpha`,
* `rate = beta`,
* `Z` is the normalizing constant, and,
* `Gamma` is the [gamma function](
https://en.wikipedia.org/wiki/Gamma_function).
The cumulative density function (cdf) is,
```none
cdf(x; alpha, beta, x > 0) = GammaInc(alpha, beta / x) / Gamma(alpha)
```
where `GammaInc` is the [upper incomplete Gamma function](
https://en.wikipedia.org/wiki/Incomplete_gamma_function).
The parameters can be intuited via their relationship to mean and stddev,
```none
concentration = alpha = (mean / stddev)**2
rate = beta = mean / stddev**2
```
Distribution parameters are automatically broadcast in all functions; see
examples for details.
WARNING: This distribution may draw 0-valued samples for small concentration
values. See note in `tf.random_gamma` docstring.
#### Examples
```python
import tensorflow_probability as tfp
tfd = tfp.distributions
dist = tfd.InverseGamma(concentration=3.0, rate=2.0)
dist2 = tfd.InverseGamma(concentration=[3.0, 4.0], rate=[2.0, 3.0])
```
"""
@deprecation.deprecated(
"2018-10-01",
"The TensorFlow Distributions library has moved to "
"TensorFlow Probability "
"(https://github.com/tensorflow/probability). You "
"should update all references to use `tfp.distributions` "
"instead of `tf.contrib.distributions`.",
warn_once=True)
def __init__(self,
concentration,
rate,
validate_args=False,
allow_nan_stats=True,
name="InverseGamma"):
"""Construct InverseGamma with `concentration` and `rate` parameters.
The parameters `concentration` and `rate` must be shaped in a way that
supports broadcasting (e.g. `concentration + rate` is a valid operation).
Args:
concentration: Floating point tensor, the concentration params of the
distribution(s). Must contain only positive values.
rate: Floating point tensor, the inverse scale params of the
distribution(s). Must contain only positive values.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Raises:
TypeError: if `concentration` and `rate` are different dtypes.
"""
parameters = dict(locals())
with ops.name_scope(name, values=[concentration, rate]) as name:
with ops.control_dependencies([
check_ops.assert_positive(concentration),
check_ops.assert_positive(rate),
] if validate_args else []):
self._concentration = array_ops.identity(
concentration, name="concentration")
self._rate = array_ops.identity(rate, name="rate")
check_ops.assert_same_float_dtype(
[self._concentration, self._rate])
super(InverseGamma, self).__init__(
dtype=self._concentration.dtype,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
parameters=parameters,
graph_parents=[self._concentration,
self._rate],
name=name)
@staticmethod
def _param_shapes(sample_shape):
return dict(
zip(("concentration", "rate"), ([ops.convert_to_tensor(
sample_shape, dtype=dtypes.int32)] * 2)))
@property
def concentration(self):
"""Concentration parameter."""
return self._concentration
@property
def rate(self):
"""Rate parameter."""
return self._rate
def _batch_shape_tensor(self):
return array_ops.broadcast_dynamic_shape(
array_ops.shape(self.concentration),
array_ops.shape(self.rate))
def _batch_shape(self):
return array_ops.broadcast_static_shape(
self.concentration.get_shape(),
self.rate.get_shape())
def _event_shape_tensor(self):
return constant_op.constant([], dtype=dtypes.int32)
def _event_shape(self):
return tensor_shape.scalar()
@distribution_util.AppendDocstring(
"""Note: See `tf.random_gamma` docstring for sampling details and
caveats.""")
def _sample_n(self, n, seed=None):
return 1. / random_ops.random_gamma(
shape=[n],
alpha=self.concentration,
beta=self.rate,
dtype=self.dtype,
seed=seed)
def _log_prob(self, x):
return self._log_unnormalized_prob(x) - self._log_normalization()
def _cdf(self, x):
x = self._maybe_assert_valid_sample(x)
# Note that igammac returns the upper regularized incomplete gamma
# function Q(a, x), which is what we want for the CDF.
return math_ops.igammac(self.concentration, self.rate / x)
def _log_unnormalized_prob(self, x):
x = self._maybe_assert_valid_sample(x)
return -(1. + self.concentration) * math_ops.log(x) - self.rate / x
def _log_normalization(self):
return (math_ops.lgamma(self.concentration)
- self.concentration * math_ops.log(self.rate))
def _entropy(self):
return (self.concentration
+ math_ops.log(self.rate)
+ math_ops.lgamma(self.concentration)
- ((1. + self.concentration) *
math_ops.digamma(self.concentration)))
@distribution_util.AppendDocstring(
"""The mean of an inverse gamma distribution is
`rate / (concentration - 1)`, when `concentration > 1`, and `NaN`
otherwise. If `self.allow_nan_stats` is `False`, an exception will be
raised rather than returning `NaN`""")
def _mean(self):
mean = self.rate / (self.concentration - 1.)
if self.allow_nan_stats:
nan = array_ops.fill(
self.batch_shape_tensor(),
np.array(np.nan, dtype=self.dtype.as_numpy_dtype()),
name="nan")
return array_ops.where(self.concentration > 1., mean, nan)
else:
return control_flow_ops.with_dependencies([
check_ops.assert_less(
array_ops.ones([], self.dtype), self.concentration,
message="mean undefined when any concentration <= 1"),
], mean)
@distribution_util.AppendDocstring(
"""Variance for inverse gamma is defined only for `concentration > 2`. If
`self.allow_nan_stats` is `False`, an exception will be raised rather
than returning `NaN`.""")
def _variance(self):
var = (math_ops.square(self.rate)
/ math_ops.square(self.concentration - 1.)
/ (self.concentration - 2.))
if self.allow_nan_stats:
nan = array_ops.fill(
self.batch_shape_tensor(),
np.array(np.nan, dtype=self.dtype.as_numpy_dtype()),
name="nan")
return array_ops.where(self.concentration > 2., var, nan)
else:
return control_flow_ops.with_dependencies([
check_ops.assert_less(
constant_op.constant(2., dtype=self.dtype),
self.concentration,
message="variance undefined when any concentration <= 2"),
], var)
@distribution_util.AppendDocstring(
"""The mode of an inverse gamma distribution is `rate / (concentration +
1)`.""")
def _mode(self):
return self.rate / (1. + self.concentration)
def _maybe_assert_valid_sample(self, x):
check_ops.assert_same_float_dtype(
tensors=[x], dtype=self.dtype)
if not self.validate_args:
return x
return control_flow_ops.with_dependencies([
check_ops.assert_positive(x),
], x)
class InverseGammaWithSoftplusConcentrationRate(InverseGamma):
"""`InverseGamma` with softplus of `concentration` and `rate`."""
@deprecation.deprecated(
"2018-10-01",
"The TensorFlow Distributions library has moved to "
"TensorFlow Probability "
"(https://github.com/tensorflow/probability). You "
"should update all references to use `tfp.distributions` "
"instead of `tf.contrib.distributions`.",
warn_once=True)
def __init__(self,
concentration,
rate,
validate_args=False,
allow_nan_stats=True,
name="InverseGammaWithSoftplusConcentrationRate"):
parameters = dict(locals())
with ops.name_scope(name, values=[concentration, rate]) as name:
super(InverseGammaWithSoftplusConcentrationRate, self).__init__(
concentration=nn.softplus(concentration,
name="softplus_concentration"),
rate=nn.softplus(rate, name="softplus_rate"),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
name=name)
self._parameters = parameters
|
apache-2.0
|
DSPay/DSValue
|
share/qt/extract_strings_qt.py
|
145
|
1900
|
#!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt4 stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import glob
import operator
import os
OUT_CPP="src/qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = glob.glob('src/*.cpp') + glob.glob('src/*.h')
# xgettext -n --keyword=_ $FILES
XGETTEXT=os.getenv('XGETTEXT', 'xgettext')
child = Popen([XGETTEXT,'--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out)
f = open(OUT_CPP, 'w')
f.write("""
#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};\n')
f.close()
|
mit
|
qitaos/robotframework-mabot
|
lib/robot/utils/charwidth.py
|
4
|
7424
|
# Copyright 2008-2012 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A module to handle different character widths on the console.
Some East Asian characters have width of two on console, and combining
characters themselves take no extra space.
See issue 604 [1] for more details about East Asian characters. The issue also
contains `generate_wild_chars.py` script that was originally used to create
`_EAST_ASIAN_WILD_CHARS` mapping. An updated version of the script is attached
to issue 1096. Big thanks for xieyanbo for the script and the original patch.
Note that Python's `unicodedata` module is not used here because importing
it takes several seconds on Jython.
[1] http://code.google.com/p/robotframework/issues/detail?id=604
[2] http://code.google.com/p/robotframework/issues/detail?id=1096
"""
def get_char_width(char):
char = ord(char)
if _char_in_map(char, _COMBINING_CHARS):
return 0
if _char_in_map(char, _EAST_ASIAN_WILD_CHARS):
return 2
return 1
def _char_in_map(char, map):
for begin, end in map:
if char < begin:
break
if begin <= char <= end:
return True
return False
_COMBINING_CHARS = [(768, 879)]
_EAST_ASIAN_WILD_CHARS = [
(888, 889), (895, 899), (907, 907), (909, 909), (930, 930),
(1316, 1328), (1367, 1368), (1376, 1376), (1416, 1416),
(1419, 1424), (1480, 1487), (1515, 1519), (1525, 1535),
(1540, 1541), (1564, 1565), (1568, 1568), (1631, 1631),
(1806, 1806), (1867, 1868), (1970, 1983), (2043, 2304),
(2362, 2363), (2382, 2383), (2389, 2391), (2419, 2426),
(2432, 2432), (2436, 2436), (2445, 2446), (2449, 2450),
(2473, 2473), (2481, 2481), (2483, 2485), (2490, 2491),
(2501, 2502), (2505, 2506), (2511, 2518), (2520, 2523),
(2526, 2526), (2532, 2533), (2555, 2560), (2564, 2564),
(2571, 2574), (2577, 2578), (2601, 2601), (2609, 2609),
(2612, 2612), (2615, 2615), (2618, 2619), (2621, 2621),
(2627, 2630), (2633, 2634), (2638, 2640), (2642, 2648),
(2653, 2653), (2655, 2661), (2678, 2688), (2692, 2692),
(2702, 2702), (2706, 2706), (2729, 2729), (2737, 2737),
(2740, 2740), (2746, 2747), (2758, 2758), (2762, 2762),
(2766, 2767), (2769, 2783), (2788, 2789), (2800, 2800),
(2802, 2816), (2820, 2820), (2829, 2830), (2833, 2834),
(2857, 2857), (2865, 2865), (2868, 2868), (2874, 2875),
(2885, 2886), (2889, 2890), (2894, 2901), (2904, 2907),
(2910, 2910), (2916, 2917), (2930, 2945), (2948, 2948),
(2955, 2957), (2961, 2961), (2966, 2968), (2971, 2971),
(2973, 2973), (2976, 2978), (2981, 2983), (2987, 2989),
(3002, 3005), (3011, 3013), (3017, 3017), (3022, 3023),
(3025, 3030), (3032, 3045), (3067, 3072), (3076, 3076),
(3085, 3085), (3089, 3089), (3113, 3113), (3124, 3124),
(3130, 3132), (3141, 3141), (3145, 3145), (3150, 3156),
(3159, 3159), (3162, 3167), (3172, 3173), (3184, 3191),
(3200, 3201), (3204, 3204), (3213, 3213), (3217, 3217),
(3241, 3241), (3252, 3252), (3258, 3259), (3269, 3269),
(3273, 3273), (3278, 3284), (3287, 3293), (3295, 3295),
(3300, 3301), (3312, 3312), (3315, 3329), (3332, 3332),
(3341, 3341), (3345, 3345), (3369, 3369), (3386, 3388),
(3397, 3397), (3401, 3401), (3406, 3414), (3416, 3423),
(3428, 3429), (3446, 3448), (3456, 3457), (3460, 3460),
(3479, 3481), (3506, 3506), (3516, 3516), (3518, 3519),
(3527, 3529), (3531, 3534), (3541, 3541), (3543, 3543),
(3552, 3569), (3573, 3584), (3643, 3646), (3676, 3712),
(3715, 3715), (3717, 3718), (3721, 3721), (3723, 3724),
(3726, 3731), (3736, 3736), (3744, 3744), (3748, 3748),
(3750, 3750), (3752, 3753), (3756, 3756), (3770, 3770),
(3774, 3775), (3781, 3781), (3783, 3783), (3790, 3791),
(3802, 3803), (3806, 3839), (3912, 3912), (3949, 3952),
(3980, 3983), (3992, 3992), (4029, 4029), (4045, 4045),
(4053, 4095), (4250, 4253), (4294, 4303), (4349, 4447),
(4515, 4519), (4602, 4607), (4681, 4681), (4686, 4687),
(4695, 4695), (4697, 4697), (4702, 4703), (4745, 4745),
(4750, 4751), (4785, 4785), (4790, 4791), (4799, 4799),
(4801, 4801), (4806, 4807), (4823, 4823), (4881, 4881),
(4886, 4887), (4955, 4958), (4989, 4991), (5018, 5023),
(5109, 5120), (5751, 5759), (5789, 5791), (5873, 5887),
(5901, 5901), (5909, 5919), (5943, 5951), (5972, 5983),
(5997, 5997), (6001, 6001), (6004, 6015), (6110, 6111),
(6122, 6127), (6138, 6143), (6159, 6159), (6170, 6175),
(6264, 6271), (6315, 6399), (6429, 6431), (6444, 6447),
(6460, 6463), (6465, 6467), (6510, 6511), (6517, 6527),
(6570, 6575), (6602, 6607), (6618, 6621), (6684, 6685),
(6688, 6911), (6988, 6991), (7037, 7039), (7083, 7085),
(7098, 7167), (7224, 7226), (7242, 7244), (7296, 7423),
(7655, 7677), (7958, 7959), (7966, 7967), (8006, 8007),
(8014, 8015), (8024, 8024), (8026, 8026), (8028, 8028),
(8030, 8030), (8062, 8063), (8117, 8117), (8133, 8133),
(8148, 8149), (8156, 8156), (8176, 8177), (8181, 8181),
(8191, 8191), (8293, 8297), (8306, 8307), (8335, 8335),
(8341, 8351), (8374, 8399), (8433, 8447), (8528, 8530),
(8585, 8591), (9001, 9002), (9192, 9215), (9255, 9279),
(9291, 9311), (9886, 9887), (9917, 9919), (9924, 9984),
(9989, 9989), (9994, 9995), (10024, 10024), (10060, 10060),
(10062, 10062), (10067, 10069), (10071, 10071), (10079, 10080),
(10133, 10135), (10160, 10160), (10175, 10175), (10187, 10187),
(10189, 10191), (11085, 11087), (11093, 11263), (11311, 11311),
(11359, 11359), (11376, 11376), (11390, 11391), (11499, 11512),
(11558, 11567), (11622, 11630), (11632, 11647), (11671, 11679),
(11687, 11687), (11695, 11695), (11703, 11703), (11711, 11711),
(11719, 11719), (11727, 11727), (11735, 11735), (11743, 11743),
(11825, 12350), (12352, 19903), (19968, 42239), (42540, 42559),
(42592, 42593), (42612, 42619), (42648, 42751), (42893, 43002),
(43052, 43071), (43128, 43135), (43205, 43213), (43226, 43263),
(43348, 43358), (43360, 43519), (43575, 43583), (43598, 43599),
(43610, 43611), (43616, 55295), (63744, 64255), (64263, 64274),
(64280, 64284), (64311, 64311), (64317, 64317), (64319, 64319),
(64322, 64322), (64325, 64325), (64434, 64466), (64832, 64847),
(64912, 64913), (64968, 65007), (65022, 65023), (65040, 65055),
(65063, 65135), (65141, 65141), (65277, 65278), (65280, 65376),
(65471, 65473), (65480, 65481), (65488, 65489), (65496, 65497),
(65501, 65511), (65519, 65528), (65534, 65535),
]
|
apache-2.0
|
trondhindenes/ansible
|
test/units/modules/monitoring/test_icinga2_feature.py
|
68
|
3806
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Ansible Project
# Copyright (c) 2018, Abhijeet Kasurde <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible.modules.monitoring import icinga2_feature
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
from units.compat.mock import patch
from ansible.module_utils import basic
def get_bin_path(*args, **kwargs):
"""Function to return path of icinga2 binary."""
return "/bin/icinga2"
class TestIcinga2Feature(ModuleTestCase):
"""Main class for testing icinga2_feature module."""
def setUp(self):
"""Setup."""
super(TestIcinga2Feature, self).setUp()
self.module = icinga2_feature
self.mock_get_bin_path = patch.object(basic.AnsibleModule, 'get_bin_path', get_bin_path)
self.mock_get_bin_path.start()
self.addCleanup(self.mock_get_bin_path.stop) # ensure that the patching is 'undone'
def tearDown(self):
"""Teardown."""
super(TestIcinga2Feature, self).tearDown()
def test_without_required_parameters(self):
"""Failure must occurs when all parameters are missing."""
with self.assertRaises(AnsibleFailJson):
set_module_args({})
self.module.main()
def test_enable_feature(self):
"""Check that result is changed."""
set_module_args({
'name': 'api',
})
with patch.object(basic.AnsibleModule, 'run_command') as run_command:
run_command.return_value = 0, '', '' # successful execution, no output
with self.assertRaises(AnsibleExitJson) as result:
icinga2_feature.main()
self.assertTrue(result.exception.args[0]['changed'])
self.assertEqual(run_command.call_count, 2)
self.assertEqual(run_command.call_args[0][0][-1], 'api')
def test_enable_feature_with_check_mode(self):
"""Check that result is changed in check mode."""
set_module_args({
'name': 'api',
'_ansible_check_mode': True,
})
with patch.object(basic.AnsibleModule, 'run_command') as run_command:
run_command.return_value = 0, '', '' # successful execution, no output
with self.assertRaises(AnsibleExitJson) as result:
icinga2_feature.main()
self.assertTrue(result.exception.args[0]['changed'])
self.assertEqual(run_command.call_count, 1)
def test_disable_feature(self):
"""Check that result is changed."""
set_module_args({
'name': 'api',
'state': 'absent'
})
with patch.object(basic.AnsibleModule, 'run_command') as run_command:
run_command.return_value = 0, '', '' # successful execution, no output
with self.assertRaises(AnsibleExitJson) as result:
icinga2_feature.main()
self.assertTrue(result.exception.args[0]['changed'])
self.assertEqual(run_command.call_count, 2)
self.assertEqual(run_command.call_args[0][0][-1], 'api')
def test_disable_feature_with_check_mode(self):
"""Check that result is changed in check mode."""
set_module_args({
'name': 'api',
'state': 'absent',
'_ansible_check_mode': True,
})
with patch.object(basic.AnsibleModule, 'run_command') as run_command:
run_command.return_value = 0, '', '' # successful execution, no output
with self.assertRaises(AnsibleExitJson) as result:
icinga2_feature.main()
self.assertTrue(result.exception.args[0]['changed'])
self.assertEqual(run_command.call_count, 1)
|
gpl-3.0
|
ololobster/cvidone
|
cvidone/model/password_change_request.py
|
1
|
1582
|
# Released under the MIT license. See the LICENSE file for more information.
# https://github.com/ololobster/cvidone
from .user import User
class PasswordChangeRequest(object):
def __init__(self):
self._token = None
self._user_id = None
@staticmethod
def getExisting(db, token):
req = PasswordChangeRequest()
req._user_id = db.selectOne("""
SELECT user_id
FROM password_change_requests
WHERE (token = %s) AND (expiration_time > NOW())
""", token)
req._token = token
return req
@staticmethod
def getRidOfObsoleteRequests(db):
db.remove("password_change_requests", ["expiration_time < NOW()"])
def save(self, db):
assert(self._user_id is not None)
raw_data = {"user_id": self._user_id}
if (self._token is None):
self._token = db.insertWithToken("password_change_requests", raw_data)["token"]
def remove(self, db):
assert(not self.isNew())
db.remove("password_change_requests", [("token = %s", self._token)])
self._token = None
def performChange(self, db, password):
user = User.getExisting(db, id=self._user_id)
user.setPwd(password)
user.save(db)
user.killAllSessions(db)
@property
def token(self):
return self._token
def isNew(self):
return (self._token is None)
@property
def user_id(self):
return self._user_id
@user_id.setter
def user_id(self, user_id):
self._user_id = user_id
|
mit
|
bdelbosc/bundleman
|
setup.py
|
1
|
3472
|
#! /usr/bin/env python
# (C) Copyright 2006 Nuxeo SAS <http://nuxeo.com>
# Author: [email protected]
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#
"""Bundle Manager setup
$Id: setup.py 49829 2006-10-24 16:34:17Z bdelbosc $
"""
from distutils.core import setup
from bundleman.version import __version__
from tests.distutilstestcommand import TestCommand
setup(
name="bundleman",
version=__version__,
description="Manage svn bundle releasing.",
long_description="""\
BundleMan try to manage releasing of application built on versioned products
under subversion.
An application is seen as a products suite defined using subversion
svn:externals property. An application is a bundle of products. Products are
versioned piece of software.
Releasing an application is about taking care of tagging the source
repository, managing version of each products, managing CHANGELOGs, creating
a source package archive, giving ways to maitain a release without blocking
the trunk development.
Main features:
* BundleMan is free software distributed under the GNU GPL.
* It uses a recommended trunk/branches/tags repository layouts for
products and bundles.
* It uses standard versioning MAJOR.MINOR.BUGFIX-RELEASE for products.
* Versioning of products is done automaticly by analysing a CHANGES file.
* Enforce CHANGELOG quality by requiring a product CHANGES file.
* It generates an application CHANGELOG.
* There is no locking of the trunk or version's conflict when patching a
released application.
* Can manage public, private or remote products.
* BundleMan is written in python and can be easily customized.
""",
author="Benoit Delbosc",
author_email="[email protected]",
url="http://public.dev.nuxeo.com/~ben/bundleman/",
download_url="http://public.dev.nuxeo.com/~ben/bundleman/bundleman-%s.tar.gz"%__version__,
license='GPL',
packages=['bundleman'],
package_dir={'bundleman': 'bundleman'},
scripts=['scripts/bm-bundle',
'scripts/bm-product',
],
keywords='packaging releasing bundle subversion versioning',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Natural Language :: English',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: System :: Software Distribution',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Version Control',
'Topic :: System :: Archiving :: Packaging',
],
cmdclass = { 'test': TestCommand,}
)
|
gpl-2.0
|
ycool/apollo
|
modules/tools/open_space_visualization/hybrid_a_star_visualizer.py
|
2
|
6573
|
#!/usr/bin/env python
###############################################################################
# Copyright 2018 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
from hybrid_a_star_python_interface import *
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from matplotlib import animation
import numpy as np
import time
import math
def HybridAStarPlan(visualize_flag):
# initialze object
HybridAStar = HybridAStarPlanner()
# parameter(except max, min and car size is defined in proto)
num_output_buffer = 100000
sx = -8
sy = 4
sphi = 0.0
scenario = "backward"
# scenario = "parallel"
if scenario == "backward":
# for parking space 11543 in sunnyvale_with_two_offices
left_boundary_x = (
c_double * 3)(*[-13.6407054776, 0.0, 0.0515703622475])
left_boundary_y = (
c_double * 3)(*[0.0140634663703, 0.0, -5.15258191624])
down_boundary_x = (c_double * 2)(*[0.0515703622475, 2.8237895441])
down_boundary_y = (c_double * 2)(*[-5.15258191624, -5.15306980547])
right_boundary_x = (
c_double * 3)(*[2.8237895441, 2.7184833539, 16.3592013995])
right_boundary_y = (
c_double * 3)(*[-5.15306980547, -0.0398078878812, -0.011889513383])
up_boundary_x = (c_double * 2)(*[16.3591910364, -13.6406951857])
up_boundary_y = (c_double * 2)(*[5.60414234644, 5.61797800844])
# obstacles(x, y, size)
HybridAStar.AddVirtualObstacle(left_boundary_x, left_boundary_y, 3)
HybridAStar.AddVirtualObstacle(
down_boundary_x, down_boundary_y, 2)
HybridAStar.AddVirtualObstacle(
right_boundary_x, right_boundary_y, 3)
HybridAStar.AddVirtualObstacle(
up_boundary_x, up_boundary_y, 2)
ex = 1.359
ey = -3.86443643718
ephi = 1.581
XYbounds = [-13.6406951857, 16.3591910364, -
5.15258191624, 5.61797800844]
x = (c_double * num_output_buffer)()
y = (c_double * num_output_buffer)()
phi = (c_double * num_output_buffer)()
v = (c_double * num_output_buffer)()
a = (c_double * num_output_buffer)()
steer = (c_double * num_output_buffer)()
size = (c_ushort * 1)()
XYbounds_ctype = (c_double * 4)(*XYbounds)
start = time.time()
print("planning start")
success = True
if not HybridAStar.Plan(sx, sy, sphi, ex, ey, ephi, XYbounds_ctype):
print("planning fail")
success = False
end = time.time()
planning_time = end - start
print("planning time is " + str(planning_time))
# load result
x_out = []
y_out = []
phi_out = []
v_out = []
a_out = []
steer_out = []
if visualize_flag and success:
HybridAStar.GetResult(x, y, phi, v, a, steer, size)
for i in range(0, size[0]):
x_out.append(float(x[i]))
y_out.append(float(y[i]))
phi_out.append(float(phi[i]))
v_out.append(float(v[i]))
a_out.append(float(a[i]))
steer_out.append(float(steer[i]))
# plot
fig1 = plt.figure(1)
ax = fig1.add_subplot(111)
for i in range(0, size[0]):
downx = 1.055 * math.cos(phi_out[i] - math.pi / 2)
downy = 1.055 * math.sin(phi_out[i] - math.pi / 2)
leftx = 1.043 * math.cos(phi_out[i] - math.pi)
lefty = 1.043 * math.sin(phi_out[i] - math.pi)
x_shift_leftbottom = x_out[i] + downx + leftx
y_shift_leftbottom = y_out[i] + downy + lefty
car = patches.Rectangle((x_shift_leftbottom, y_shift_leftbottom), 3.89 + 1.043, 1.055*2,
angle=phi_out[i] * 180 / math.pi, linewidth=1, edgecolor='r', facecolor='none')
arrow = patches.Arrow(
x_out[i], y_out[i], 0.25*math.cos(phi_out[i]), 0.25*math.sin(phi_out[i]), 0.2)
ax.add_patch(car)
ax.add_patch(arrow)
ax.plot(sx, sy, "s")
ax.plot(ex, ey, "s")
if scenario == "backward":
left_boundary_x = [-13.6407054776, 0.0, 0.0515703622475]
left_boundary_y = [0.0140634663703, 0.0, -5.15258191624]
down_boundary_x = [0.0515703622475, 2.8237895441]
down_boundary_y = [-5.15258191624, -5.15306980547]
right_boundary_x = [2.8237895441, 2.7184833539, 16.3592013995]
right_boundary_y = [-5.15306980547, -0.0398078878812, -0.011889513383]
up_boundary_x = [16.3591910364, -13.6406951857]
up_boundary_y = [5.60414234644, 5.61797800844]
ax.plot(left_boundary_x, left_boundary_y, "k")
ax.plot(down_boundary_x, down_boundary_y, "k")
ax.plot(right_boundary_x, right_boundary_y, "k")
ax.plot(up_boundary_x, up_boundary_y, "k")
plt.axis('equal')
fig2 = plt.figure(2)
v_graph = fig2.add_subplot(311)
v_graph.title.set_text('v')
v_graph.plot(np.linspace(0, size[0], size[0]), v_out)
a_graph = fig2.add_subplot(312)
a_graph.title.set_text('a')
a_graph.plot(np.linspace(0, size[0], size[0]), a_out)
steer_graph = fig2.add_subplot(313)
steer_graph.title.set_text('steering')
steer_graph.plot(np.linspace(0, size[0], size[0]), steer_out)
plt.show()
if not visualize_flag :
if success :
HybridAStar.GetResult(x, y, phi, v, a, steer, size)
for i in range(0, size[0]):
x_out.append(float(x[i]))
y_out.append(float(y[i]))
phi_out.append(float(phi[i]))
v_out.append(float(v[i]))
a_out.append(float(a[i]))
steer_out.append(float(steer[i]))
return success, x_out, y_out, phi_out, v_out, a_out, steer_out, planning_time
if __name__ == '__main__':
visualize_flag = True
HybridAStarPlan(visualize_flag)
|
apache-2.0
|
MQQiang/kbengine
|
kbe/src/lib/python/Lib/test/test_stat.py
|
84
|
6997
|
import unittest
import os
from test.support import TESTFN, import_fresh_module
c_stat = import_fresh_module('stat', fresh=['_stat'])
py_stat = import_fresh_module('stat', blocked=['_stat'])
class TestFilemode:
statmod = None
file_flags = {'SF_APPEND', 'SF_ARCHIVED', 'SF_IMMUTABLE', 'SF_NOUNLINK',
'SF_SNAPSHOT', 'UF_APPEND', 'UF_COMPRESSED', 'UF_HIDDEN',
'UF_IMMUTABLE', 'UF_NODUMP', 'UF_NOUNLINK', 'UF_OPAQUE'}
formats = {'S_IFBLK', 'S_IFCHR', 'S_IFDIR', 'S_IFIFO', 'S_IFLNK',
'S_IFREG', 'S_IFSOCK'}
format_funcs = {'S_ISBLK', 'S_ISCHR', 'S_ISDIR', 'S_ISFIFO', 'S_ISLNK',
'S_ISREG', 'S_ISSOCK'}
stat_struct = {
'ST_MODE': 0,
'ST_INO': 1,
'ST_DEV': 2,
'ST_NLINK': 3,
'ST_UID': 4,
'ST_GID': 5,
'ST_SIZE': 6,
'ST_ATIME': 7,
'ST_MTIME': 8,
'ST_CTIME': 9}
# permission bit value are defined by POSIX
permission_bits = {
'S_ISUID': 0o4000,
'S_ISGID': 0o2000,
'S_ENFMT': 0o2000,
'S_ISVTX': 0o1000,
'S_IRWXU': 0o700,
'S_IRUSR': 0o400,
'S_IREAD': 0o400,
'S_IWUSR': 0o200,
'S_IWRITE': 0o200,
'S_IXUSR': 0o100,
'S_IEXEC': 0o100,
'S_IRWXG': 0o070,
'S_IRGRP': 0o040,
'S_IWGRP': 0o020,
'S_IXGRP': 0o010,
'S_IRWXO': 0o007,
'S_IROTH': 0o004,
'S_IWOTH': 0o002,
'S_IXOTH': 0o001}
def setUp(self):
try:
os.remove(TESTFN)
except OSError:
try:
os.rmdir(TESTFN)
except OSError:
pass
tearDown = setUp
def get_mode(self, fname=TESTFN, lstat=True):
if lstat:
st_mode = os.lstat(fname).st_mode
else:
st_mode = os.stat(fname).st_mode
modestr = self.statmod.filemode(st_mode)
return st_mode, modestr
def assertS_IS(self, name, mode):
# test format, lstrip is for S_IFIFO
fmt = getattr(self.statmod, "S_IF" + name.lstrip("F"))
self.assertEqual(self.statmod.S_IFMT(mode), fmt)
# test that just one function returns true
testname = "S_IS" + name
for funcname in self.format_funcs:
func = getattr(self.statmod, funcname, None)
if func is None:
if funcname == testname:
raise ValueError(funcname)
continue
if funcname == testname:
self.assertTrue(func(mode))
else:
self.assertFalse(func(mode))
def test_mode(self):
with open(TESTFN, 'w'):
pass
if os.name == 'posix':
os.chmod(TESTFN, 0o700)
st_mode, modestr = self.get_mode()
self.assertEqual(modestr, '-rwx------')
self.assertS_IS("REG", st_mode)
self.assertEqual(self.statmod.S_IMODE(st_mode),
self.statmod.S_IRWXU)
os.chmod(TESTFN, 0o070)
st_mode, modestr = self.get_mode()
self.assertEqual(modestr, '----rwx---')
self.assertS_IS("REG", st_mode)
self.assertEqual(self.statmod.S_IMODE(st_mode),
self.statmod.S_IRWXG)
os.chmod(TESTFN, 0o007)
st_mode, modestr = self.get_mode()
self.assertEqual(modestr, '-------rwx')
self.assertS_IS("REG", st_mode)
self.assertEqual(self.statmod.S_IMODE(st_mode),
self.statmod.S_IRWXO)
os.chmod(TESTFN, 0o444)
st_mode, modestr = self.get_mode()
self.assertS_IS("REG", st_mode)
self.assertEqual(modestr, '-r--r--r--')
self.assertEqual(self.statmod.S_IMODE(st_mode), 0o444)
else:
os.chmod(TESTFN, 0o700)
st_mode, modestr = self.get_mode()
self.assertEqual(modestr[:3], '-rw')
self.assertS_IS("REG", st_mode)
self.assertEqual(self.statmod.S_IFMT(st_mode),
self.statmod.S_IFREG)
def test_directory(self):
os.mkdir(TESTFN)
os.chmod(TESTFN, 0o700)
st_mode, modestr = self.get_mode()
self.assertS_IS("DIR", st_mode)
if os.name == 'posix':
self.assertEqual(modestr, 'drwx------')
else:
self.assertEqual(modestr[0], 'd')
@unittest.skipUnless(hasattr(os, 'symlink'), 'os.symlink not available')
def test_link(self):
try:
os.symlink(os.getcwd(), TESTFN)
except (OSError, NotImplementedError) as err:
raise unittest.SkipTest(str(err))
else:
st_mode, modestr = self.get_mode()
self.assertEqual(modestr[0], 'l')
self.assertS_IS("LNK", st_mode)
@unittest.skipUnless(hasattr(os, 'mkfifo'), 'os.mkfifo not available')
def test_fifo(self):
os.mkfifo(TESTFN, 0o700)
st_mode, modestr = self.get_mode()
self.assertEqual(modestr, 'prwx------')
self.assertS_IS("FIFO", st_mode)
@unittest.skipUnless(os.name == 'posix', 'requires Posix')
def test_devices(self):
if os.path.exists(os.devnull):
st_mode, modestr = self.get_mode(os.devnull, lstat=False)
self.assertEqual(modestr[0], 'c')
self.assertS_IS("CHR", st_mode)
# Linux block devices, BSD has no block devices anymore
for blockdev in ("/dev/sda", "/dev/hda"):
if os.path.exists(blockdev):
st_mode, modestr = self.get_mode(blockdev, lstat=False)
self.assertEqual(modestr[0], 'b')
self.assertS_IS("BLK", st_mode)
break
def test_module_attributes(self):
for key, value in self.stat_struct.items():
modvalue = getattr(self.statmod, key)
self.assertEqual(value, modvalue, key)
for key, value in self.permission_bits.items():
modvalue = getattr(self.statmod, key)
self.assertEqual(value, modvalue, key)
for key in self.file_flags:
modvalue = getattr(self.statmod, key)
self.assertIsInstance(modvalue, int)
for key in self.formats:
modvalue = getattr(self.statmod, key)
self.assertIsInstance(modvalue, int)
for key in self.format_funcs:
func = getattr(self.statmod, key)
self.assertTrue(callable(func))
self.assertEqual(func(0), 0)
class TestFilemodeCStat(TestFilemode, unittest.TestCase):
statmod = c_stat
formats = TestFilemode.formats | {'S_IFDOOR', 'S_IFPORT', 'S_IFWHT'}
format_funcs = TestFilemode.format_funcs | {'S_ISDOOR', 'S_ISPORT',
'S_ISWHT'}
class TestFilemodePyStat(TestFilemode, unittest.TestCase):
statmod = py_stat
if __name__ == '__main__':
unittest.main()
|
lgpl-3.0
|
JackPrice/ansible-modules-extras
|
notification/mail.py
|
5
|
9642
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2012 Dag Wieers <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
author: Dag Wieers
module: mail
short_description: Send an email
description:
- This module is useful for sending emails from playbooks.
- One may wonder why automate sending emails? In complex environments
there are from time to time processes that cannot be automated, either
because you lack the authority to make it so, or because not everyone
agrees to a common approach.
- If you cannot automate a specific step, but the step is non-blocking,
sending out an email to the responsible party to make him perform his
part of the bargain is an elegant way to put the responsibility in
someone else's lap.
- Of course sending out a mail can be equally useful as a way to notify
one or more people in a team that a specific action has been
(successfully) taken.
version_added: "0.8"
options:
from:
description:
- The email-address the mail is sent from. May contain address and phrase.
default: root
required: false
to:
description:
- The email-address(es) the mail is being sent to. This is
a comma-separated list, which may contain address and phrase portions.
default: root
required: false
cc:
description:
- The email-address(es) the mail is being copied to. This is
a comma-separated list, which may contain address and phrase portions.
required: false
bcc:
description:
- The email-address(es) the mail is being 'blind' copied to. This is
a comma-separated list, which may contain address and phrase portions.
required: false
subject:
description:
- The subject of the email being sent.
aliases: [ msg ]
required: true
body:
description:
- The body of the email being sent.
default: $subject
required: false
username:
description:
- If SMTP requires username
default: null
required: false
version_added: "1.9"
password:
description:
- If SMTP requires password
default: null
required: false
version_added: "1.9"
host:
description:
- The mail server
default: 'localhost'
required: false
port:
description:
- The mail server port
default: '25'
required: false
version_added: "1.0"
attach:
description:
- A space-separated list of pathnames of files to attach to the message.
Attached files will have their content-type set to C(application/octet-stream).
default: null
required: false
version_added: "1.0"
headers:
description:
- A vertical-bar-separated list of headers which should be added to the message.
Each individual header is specified as C(header=value) (see example below).
default: null
required: false
version_added: "1.0"
charset:
description:
- The character set of email being sent
default: 'us-ascii'
required: false
"""
EXAMPLES = '''
# Example playbook sending mail to root
- local_action: mail msg='System {{ ansible_hostname }} has been successfully provisioned.'
# Sending an e-mail using Gmail SMTP servers
- local_action: mail
host='smtp.gmail.com'
port=587
[email protected]
password='mysecret'
to="John Smith <[email protected]>"
subject='Ansible-report'
msg='System {{ ansible_hostname }} has been successfully provisioned.'
# Send e-mail to a bunch of users, attaching files
- local_action: mail
host='127.0.0.1'
port=2025
subject="Ansible-report"
body="Hello, this is an e-mail. I hope you like it ;-)"
from="[email protected] (Jane Jolie)"
to="John Doe <[email protected]>, Suzie Something <[email protected]>"
cc="Charlie Root <root@localhost>"
attach="/etc/group /tmp/pavatar2.png"
[email protected]|X-Special="Something or other"
charset=utf8
'''
import os
import sys
import smtplib
import ssl
try:
from email import encoders
import email.utils
from email.utils import parseaddr, formataddr
from email.mime.base import MIMEBase
from mail.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
except ImportError:
from email import Encoders as encoders
import email.Utils
from email.Utils import parseaddr, formataddr
from email.MIMEBase import MIMEBase
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
def main():
module = AnsibleModule(
argument_spec = dict(
username = dict(default=None),
password = dict(default=None),
host = dict(default='localhost'),
port = dict(default='25'),
sender = dict(default='root', aliases=['from']),
to = dict(default='root', aliases=['recipients']),
cc = dict(default=None),
bcc = dict(default=None),
subject = dict(required=True, aliases=['msg']),
body = dict(default=None),
attach = dict(default=None),
headers = dict(default=None),
charset = dict(default='us-ascii')
)
)
username = module.params.get('username')
password = module.params.get('password')
host = module.params.get('host')
port = module.params.get('port')
sender = module.params.get('sender')
recipients = module.params.get('to')
copies = module.params.get('cc')
blindcopies = module.params.get('bcc')
subject = module.params.get('subject')
body = module.params.get('body')
attach_files = module.params.get('attach')
headers = module.params.get('headers')
charset = module.params.get('charset')
sender_phrase, sender_addr = parseaddr(sender)
if not body:
body = subject
try:
try:
smtp = smtplib.SMTP_SSL(host, port=int(port))
except (smtplib.SMTPException, ssl.SSLError):
smtp = smtplib.SMTP(host, port=int(port))
except Exception, e:
module.fail_json(rc=1, msg='Failed to send mail to server %s on port %s: %s' % (host, port, e))
smtp.ehlo()
if username and password:
if smtp.has_extn('STARTTLS'):
smtp.starttls()
try:
smtp.login(username, password)
except smtplib.SMTPAuthenticationError:
module.fail_json(msg="Authentication to %s:%s failed, please check your username and/or password" % (host, port))
msg = MIMEMultipart()
msg['Subject'] = subject
msg['From'] = formataddr((sender_phrase, sender_addr))
msg.preamble = "Multipart message"
if headers is not None:
for hdr in [x.strip() for x in headers.split('|')]:
try:
h_key, h_val = hdr.split('=')
msg.add_header(h_key, h_val)
except:
pass
if 'X-Mailer' not in msg:
msg.add_header('X-Mailer', "Ansible")
to_list = []
cc_list = []
addr_list = []
if recipients is not None:
for addr in [x.strip() for x in recipients.split(',')]:
to_list.append( formataddr( parseaddr(addr)) )
addr_list.append( parseaddr(addr)[1] ) # address only, w/o phrase
if copies is not None:
for addr in [x.strip() for x in copies.split(',')]:
cc_list.append( formataddr( parseaddr(addr)) )
addr_list.append( parseaddr(addr)[1] ) # address only, w/o phrase
if blindcopies is not None:
for addr in [x.strip() for x in blindcopies.split(',')]:
addr_list.append( parseaddr(addr)[1] )
if len(to_list) > 0:
msg['To'] = ", ".join(to_list)
if len(cc_list) > 0:
msg['Cc'] = ", ".join(cc_list)
part = MIMEText(body + "\n\n", _charset=charset)
msg.attach(part)
if attach_files is not None:
for file in attach_files.split():
try:
fp = open(file, 'rb')
part = MIMEBase('application', 'octet-stream')
part.set_payload(fp.read())
fp.close()
encoders.encode_base64(part)
part.add_header('Content-disposition', 'attachment', filename=os.path.basename(file))
msg.attach(part)
except Exception, e:
module.fail_json(rc=1, msg="Failed to send mail: can't attach file %s: %s" % (file, e))
sys.exit()
composed = msg.as_string()
try:
smtp.sendmail(sender_addr, set(addr_list), composed)
except Exception, e:
module.fail_json(rc=1, msg='Failed to send mail to %s: %s' % (", ".join(addr_list), e))
smtp.quit()
module.exit_json(changed=False)
# import module snippets
from ansible.module_utils.basic import *
main()
|
gpl-3.0
|
markr622/moose
|
gui/gui/FlowLayout.py
|
42
|
2707
|
try:
from PyQt4 import QtCore, QtGui
QtCore.Signal = QtCore.pyqtSignal
QtCore.Slot = QtCore.pyqtSlot
except ImportError:
try:
from PySide import QtCore, QtGui
QtCore.QString = str
except ImportError:
raise ImportError("Cannot load either PyQt or PySide")
class FlowLayout(QtGui.QLayout):
def __init__(self, parent=None, margin=0, spacing=-1):
super(FlowLayout, self).__init__(parent)
if parent is not None:
self.setMargin(margin)
self.setSpacing(spacing)
self.itemList = []
def __del__(self):
item = self.takeAt(0)
while item:
item = self.takeAt(0)
def addItem(self, item):
self.itemList.append(item)
def count(self):
return len(self.itemList)
def itemAt(self, index):
if index >= 0 and index < len(self.itemList):
return self.itemList[index]
return None
def takeAt(self, index):
if index >= 0 and index < len(self.itemList):
return self.itemList.pop(index)
return None
def expandingDirections(self):
return QtCore.Qt.Orientations(QtCore.Qt.Orientation(0))
def hasHeightForWidth(self):
return True
def heightForWidth(self, width):
height = self.doLayout(QtCore.QRect(0, 0, width, 0), True)
return height
def setGeometry(self, rect):
super(FlowLayout, self).setGeometry(rect)
self.doLayout(rect, False)
def sizeHint(self):
return self.minimumSize()
def minimumSize(self):
w = self.geometry().width()
h = self.doLayout(QtCore.QRect(0, 0, w, 0), True)
return QtCore.QSize(w, h)
def doLayout(self, rect, testOnly):
x = rect.x()
y = rect.y()
lineHeight = 0
for item in self.itemList:
wid = item.widget()
spaceX = self.spacing() + wid.style().layoutSpacing(QtGui.QSizePolicy.PushButton, QtGui.QSizePolicy.PushButton, QtCore.Qt.Horizontal)
spaceY = self.spacing() + wid.style().layoutSpacing(QtGui.QSizePolicy.PushButton, QtGui.QSizePolicy.PushButton, QtCore.Qt.Vertical)
nextX = x + item.sizeHint().width() + spaceX
if nextX - spaceX > rect.right() and lineHeight > 0:
x = rect.x()
y = y + lineHeight + spaceY
nextX = x + item.sizeHint().width() + spaceX
lineHeight = 0
if not testOnly:
item.setGeometry(QtCore.QRect(QtCore.QPoint(x, y), item.sizeHint()))
x = nextX
lineHeight = max(lineHeight, item.sizeHint().height())
return y + lineHeight - rect.y()
|
lgpl-2.1
|
shipci/sympy
|
sympy/physics/optics/__init__.py
|
58
|
1218
|
__all__ = []
# The following pattern is used below for importing sub-modules:
#
# 1. "from foo import *". This imports all the names from foo.__all__ into
# this module. But, this does not put those names into the __all__ of
# this module. This enables "from sympy.physics.optics import TWave" to
# work.
# 2. "import foo; __all__.extend(foo.__all__)". This adds all the names in
# foo.__all__ to the __all__ of this module. The names in __all__
# determine which names are imported when
# "from sympy.physics.optics import *" is done.
from . import waves
from .waves import TWave
__all__.extend(waves.__all__)
from . import gaussopt
from .gaussopt import (RayTransferMatrix, FreeSpace, FlatRefraction,
CurvedRefraction, FlatMirror, CurvedMirror, ThinLens, GeometricRay,
BeamParameter, waist2rayleigh, rayleigh2waist, geometric_conj_ab,
geometric_conj_af, geometric_conj_bf, gaussian_conj, conjugate_gauss_beams)
__all__.extend(gaussopt.__all__)
from . import medium
from .medium import Medium
__all__.extend(medium.__all__)
from . import utils
from .utils import (refraction_angle, deviation, lens_makers_formula,
mirror_formula, lens_formula)
__all__.extend(utils.__all__)
|
bsd-3-clause
|
gst/amqpy
|
amqpy/tests/test_serialization.py
|
1
|
9249
|
from datetime import datetime
from decimal import Decimal
from random import randint
import pytest
from ..message import GenericContent
from ..serialization import AMQPReader, AMQPWriter, FrameSyntaxError
def assert_equal_binary(b, s):
assert b == s.encode('latin_1')
class TestSerialization:
def test_empty_writer(self):
w = AMQPWriter()
assert w.getvalue() == bytes()
def test_single_bit(self):
for val, check in [(True, '\x01'), (False, '\x00')]:
w = AMQPWriter()
w.write_bit(val)
s = w.getvalue()
assert_equal_binary(s, check)
r = AMQPReader(s)
assert r.read_bit() == val
def test_multiple_bits(self):
w = AMQPWriter()
w.write_bit(True)
w.write_bit(True)
w.write_bit(False)
w.write_bit(True)
s = w.getvalue()
assert_equal_binary(s, '\x0b')
r = AMQPReader(s)
assert r.read_bit() is True
assert r.read_bit() is True
assert r.read_bit() is False
assert r.read_bit() is True
def test_multiple_bits2(self):
"""
Check bits mixed with non-bits
"""
w = AMQPWriter()
w.write_bit(True)
w.write_bit(True)
w.write_bit(False)
w.write_octet(10)
w.write_bit(True)
s = w.getvalue()
assert_equal_binary(s, '\x03\x0a\x01')
r = AMQPReader(s)
assert r.read_bit() is True
assert r.read_bit() is True
assert r.read_bit() is False
assert r.read_octet() == 10
assert r.read_bit() is True
def test_multiple_bits3(self):
"""
Check bit groups that span multiple bytes
"""
w = AMQPWriter()
# Spit out 20 bits
for i in range(10):
w.write_bit(True)
w.write_bit(False)
s = w.getvalue()
assert_equal_binary(s, '\x55\x55\x05')
r = AMQPReader(s)
for i in range(10):
assert r.read_bit() is True
assert r.read_bit() is False
#
# Octets
#
def test_octet(self):
for val in range(256):
w = AMQPWriter()
w.write_octet(val)
s = w.getvalue()
assert_equal_binary(s, chr(val))
r = AMQPReader(s)
assert r.read_octet() == val
def test_octet_invalid(self):
w = AMQPWriter()
with pytest.raises(FrameSyntaxError):
w.write_octet(-1)
def test_octet_invalid2(self):
w = AMQPWriter()
with pytest.raises(FrameSyntaxError):
w.write_octet(256)
#
# Shorts
#
def test_short(self):
for i in range(256):
val = randint(0, 65535)
w = AMQPWriter()
w.write_short(val)
s = w.getvalue()
r = AMQPReader(s)
assert r.read_short() == val
def test_short_invalid(self):
w = AMQPWriter()
with pytest.raises(FrameSyntaxError):
w.write_short(-1)
def test_short_invalid2(self):
w = AMQPWriter()
with pytest.raises(FrameSyntaxError):
w.write_short(65536)
#
# Longs
#
def test_long(self):
for i in range(256):
val = randint(0, 4294967295)
w = AMQPWriter()
w.write_long(val)
s = w.getvalue()
r = AMQPReader(s)
assert r.read_long() == val
def test_long_invalid(self):
w = AMQPWriter()
with pytest.raises(FrameSyntaxError):
w.write_long(-1)
def test_long_invalid2(self):
w = AMQPWriter()
with pytest.raises(FrameSyntaxError):
w.write_long(4294967296)
#
# LongLongs
#
def test_longlong(self):
for i in range(256):
val = randint(0, (2 ** 64) - 1)
w = AMQPWriter()
w.write_longlong(val)
s = w.getvalue()
r = AMQPReader(s)
assert r.read_longlong() == val
def test_longlong_invalid(self):
w = AMQPWriter()
with pytest.raises(FrameSyntaxError):
w.write_longlong(-1)
def test_longlong_invalid2(self):
w = AMQPWriter()
with pytest.raises(FrameSyntaxError):
w.write_longlong(2 ** 64)
#
# Shortstr
#
def test_empty_shortstr(self):
w = AMQPWriter()
w.write_shortstr('')
s = w.getvalue()
assert_equal_binary(s, '\x00')
r = AMQPReader(s)
assert r.read_shortstr() == ''
def test_shortstr(self):
w = AMQPWriter()
w.write_shortstr('hello')
s = w.getvalue()
assert_equal_binary(s, '\x05hello')
r = AMQPReader(s)
assert r.read_shortstr() == 'hello'
def test_shortstr_unicode(self):
w = AMQPWriter()
w.write_shortstr('hello')
s = w.getvalue()
assert_equal_binary(s, '\x05hello')
r = AMQPReader(s)
assert r.read_shortstr() == 'hello'
def test_long_shortstr(self):
w = AMQPWriter()
with pytest.raises(FrameSyntaxError):
w.write_shortstr('x' * 256)
def test_long_shortstr_unicode(self):
w = AMQPWriter()
with pytest.raises(FrameSyntaxError):
w.write_shortstr('\u0100' * 128)
#
# Longstr
#
def test_empty_longstr(self):
w = AMQPWriter()
w.write_longstr('')
s = w.getvalue()
assert_equal_binary(s, '\x00\x00\x00\x00')
r = AMQPReader(s)
assert r.read_longstr() == ''
def test_longstr(self):
val = 'a' * 512
w = AMQPWriter()
w.write_longstr(val)
s = w.getvalue()
assert_equal_binary(s, '\x00\x00\x02\x00' + ('a' * 512))
r = AMQPReader(s)
assert r.read_longstr() == str(val)
def test_longstr_unicode(self):
val = 'a' * 512
w = AMQPWriter()
w.write_longstr(val)
s = w.getvalue()
assert_equal_binary(s, '\x00\x00\x02\x00' + ('a' * 512))
r = AMQPReader(s)
assert r.read_longstr() == val
#
# Table
#
def test_table_empty(self):
val = {}
w = AMQPWriter()
w.write_table(val)
s = w.getvalue()
assert_equal_binary(s, '\x00\x00\x00\x00')
r = AMQPReader(s)
assert r.read_table() == val
def test_table(self):
val = {'foo': 7}
w = AMQPWriter()
w.write_table(val)
s = w.getvalue()
assert_equal_binary(s, '\x00\x00\x00\x09\x03fooI\x00\x00\x00\x07')
r = AMQPReader(s)
assert r.read_table() == val
def test_table_invalid(self):
"""
Check that an un-serializable table entry raises a ValueError
"""
val = {'test': object()}
w = AMQPWriter()
with pytest.raises(FrameSyntaxError):
w.write_table(val)
def test_table_multi(self):
val = {
'foo': 7,
'bar': Decimal('123345.1234'),
'baz': 'this is some random string I typed',
'ubaz': 'And something in unicode',
'dday_aniv': datetime(1994, 6, 6),
'nothing': None,
'more': {
'abc': -123,
'def': 'hello world',
'now': datetime(2007, 11, 11, 21, 14, 31),
'qty': Decimal('-123.45'),
'blank': {},
'extra': {
'deeper': 'more strings',
'nums': -12345678,
},
}
}
w = AMQPWriter()
w.write_table(val)
s = w.getvalue()
r = AMQPReader(s)
assert r.read_table() == val
#
# Array
#
def test_array_from_list(self):
val = [1, 'foo', None]
w = AMQPWriter()
w.write_array(val)
s = w.getvalue()
assert_equal_binary(
s, '\x00\x00\x00\x0EI\x00\x00\x00\x01S\x00\x00\x00\x03fooV',
)
r = AMQPReader(s)
assert r.read_array() == val
def test_array_from_tuple(self):
val = (1, 'foo', None)
w = AMQPWriter()
w.write_array(val)
s = w.getvalue()
assert_equal_binary(
s, '\x00\x00\x00\x0EI\x00\x00\x00\x01S\x00\x00\x00\x03fooV',
)
r = AMQPReader(s)
assert r.read_array() == list(val)
def test_table_with_array(self):
val = {
'foo': 7,
'bar': Decimal('123345.1234'),
'baz': 'this is some random string I typed',
'blist': [1, 2, 3],
'nlist': [1, [2, 3, 4]],
'ndictl': {'nfoo': 8, 'nblist': [5, 6, 7]}
}
w = AMQPWriter()
w.write_table(val)
s = w.getvalue()
r = AMQPReader(s)
assert r.read_table() == val
class TestGenericContent:
def test_generic_content_eq(self):
msg_1 = GenericContent({'dummy': 'foo'})
msg_2 = GenericContent({'dummy': 'foo'})
msg_3 = GenericContent({'dummy': 'bar'})
assert msg_1 == msg_1
assert msg_1 == msg_2
assert msg_1 != msg_3
assert msg_1 is not None
|
mit
|
l0kix2/python-debparse
|
tests/debcontrol/test_debcontrol_paragraphs.py
|
1
|
1572
|
# coding: utf-8
from debparse.deb_control import paragraphs
from . import examples
def test_get_raw_paragraphs_correct_count_with_linebreaks():
data = examples.CONTROL_FILE_DATA
raw_paragraph = paragraphs.get_raw_paragraphs(data)
assert len(raw_paragraph) == 3
def test_get_raw_paragraphs_correct_count_stripped_content():
data = examples.CONTROL_FILE_DATA.strip()
raw_paragraph = paragraphs.get_raw_paragraphs(data)
assert len(raw_paragraph) == 3
def test_get_raw_paragraphs_correct_content():
data = examples.CONTROL_FILE_DATA
raw_paragraphs = paragraphs.get_raw_paragraphs(data)
assert raw_paragraphs, 'no paragraphs found, check correct_count tests'
first_paragraph = raw_paragraphs[0]
assert first_paragraph.startswith('Source: nginx')
assert first_paragraph.endswith('Homepage: http://nginx.net')
last_paragraph = raw_paragraphs[-1]
assert last_paragraph.startswith('Package: nginx-doc')
assert last_paragraph.endswith('power of Nginx.')
def test_get_raw_fields_correct_count():
data = examples.PARAGRAPH
raw_fields = paragraphs.get_raw_fields(data)
assert len(raw_fields) == 4
def test_get_raw_fields_correct_content():
data = examples.PARAGRAPH
raw_fields = paragraphs.get_raw_fields(data)
source, uploaders, build_deps, standards = raw_fields
assert source == 'Source: nginx'
assert 'Uploaders' in uploaders
assert '<[email protected]>' in uploaders
assert 'Build-Depends:' in build_deps
assert 'dpkg-dev (>= 1.15.7),' in build_deps
|
bsd-3-clause
|
OpenDaisy/daisy-api
|
daisy/api/backends/os.py
|
1
|
27832
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
/install endpoint for tecs API
"""
import copy
import subprocess
import time
import traceback
import webob.exc
from oslo_config import cfg
from oslo_log import log as logging
from webob.exc import HTTPBadRequest
from webob.exc import HTTPForbidden
from webob.exc import HTTPServerError
import threading
from threading import Thread
from daisy import i18n
from daisy import notifier
from daisy.api import policy
import daisy.api.v1
from daisy.common import exception
import daisy.registry.client.v1.api as registry
from daisy.api.backends.tecs import config
from daisy.api.backends import driver
from daisy.api.network_api import network as neutron
from ironicclient import client as ironic_client
import daisy.api.backends.common as daisy_cmn
import daisy.api.backends.tecs.common as tecs_cmn
try:
import simplejson as json
except ImportError:
import json
LOG = logging.getLogger(__name__)
_ = i18n._
_LE = i18n._LE
_LI = i18n._LI
_LW = i18n._LW
CONF = cfg.CONF
install_opts = [
cfg.StrOpt('max_parallel_os_number', default=10,
help='Maximum number of hosts install os at the same time.'),
]
CONF.register_opts(install_opts)
host_os_status = {
'INIT' : 'init',
'INSTALLING' : 'installing',
'ACTIVE' : 'active',
'INSTALL_FAILED': 'install-failed',
'UPDATING': 'updating',
'UPDATE_FAILED': 'update-failed'
}
daisy_tecs_path = tecs_cmn.daisy_tecs_path
def get_ironicclient(): # pragma: no cover
"""Get Ironic client instance."""
args = {'os_auth_token': 'fake',
'ironic_url':'http://127.0.0.1:6385/v1'}
return ironic_client.get_client(1, **args)
def pxe_server_build(req, install_meta):
cluster_id = install_meta['cluster_id']
try:
networks = registry.get_networks_detail(req.context, cluster_id)
except exception.Invalid as e:
raise HTTPBadRequest(explanation=e.msg, request=req)
try:
ip_inter = lambda x:sum([256**j*int(i) for j,i in enumerate(x.split('.')[::-1])])
inter_ip = lambda x: '.'.join([str(x/(256**i)%256) for i in range(3,-1,-1)])
network_cidr = [network['cidr'] for network in networks if network['name'] == 'DEPLOYMENT'][0]
if not network_cidr:
network_cidr="192.168.1.1/24"
ip_tmp_cidr=network_cidr.split('/')[0]
inter_tmp=(ip_inter(ip_tmp_cidr))+1
pxe_server_ip=inter_ip(inter_tmp)
cidr_end=network_cidr.split('/')[1]
cidr_end_inter=2**32-2**(32-int(cidr_end))
net_mask=inter_ip(cidr_end_inter)
client_tmp_begin=(ip_inter(ip_tmp_cidr))+2
client_ip_begin=inter_ip(client_tmp_begin)
cidr_max_inter=inter_tmp+(2**(32-int(cidr_end)))-4
client_ip_end=inter_ip(cidr_max_inter)
args = {'build_pxe': 'yes', 'eth_name': install_meta['deployment_interface'], 'ip_address': pxe_server_ip, 'net_mask': net_mask,
'client_ip_begin': client_ip_begin, 'client_ip_end': client_ip_end}
ironic = get_ironicclient()
ironic.daisy.build_pxe(**args)
except exception.Invalid as e:
msg = "build pxe server failed"
raise exception.InvalidNetworkConfig(msg)
def _get_network_plat(host_config, cluster_networks, dhcp_mac):
host_config['dhcp_mac'] = dhcp_mac
if host_config['interfaces']:
count = 0
for interface in host_config['interfaces']:
count += 1
if (interface.has_key('assigned_networks') and
interface['assigned_networks']):
interface_networks = copy.deepcopy(interface['assigned_networks'])
host_config['interfaces'][count-1]['assigned_networks'] = []
for network_type in interface_networks:
cluster_network = [network for network in cluster_networks if network['name'] == network_type][0]
# convert cidr to netmask
cidr_to_ip = ""
if cluster_network.get('cidr', None):
inter_ip = lambda x: '.'.join([str(x/(256**i)%256) for i in range(3,-1,-1)])
cidr_to_ip = inter_ip(2**32-2**(32-int(cluster_network['cidr'].split('/')[1])))
network_plat = dict(network_type=cluster_network['network_type'],
ml2_type=cluster_network['ml2_type'],
capability=cluster_network['capability'],
physnet_name=cluster_network['physnet_name'],
gateway=cluster_network.get('gateway', ""),
ip=cluster_network.get('ip', ""),
netmask=cidr_to_ip,
vlan_id=cluster_network.get('vlan_id', ""))
host_config['interfaces'][count-1]['assigned_networks'].append(network_plat)
return host_config
def get_cluster_hosts_config(req, cluster_id):
params = dict(limit=1000000)
try:
cluster_data = registry.get_cluster_metadata(req.context, cluster_id)
networks = registry.get_networks_detail(req.context, cluster_id)
all_roles = registry.get_roles_detail(req.context)
except exception.Invalid as e:
raise HTTPBadRequest(explanation=e.msg, request=req)
roles = [role for role in all_roles if role['cluster_id'] == cluster_id]
all_hosts_ids = cluster_data['nodes']
hosts_config = []
for host_id in all_hosts_ids:
host_detail = daisy_cmn.get_host_detail(req, host_id)
role_host_db_lv_size_lists = list()
if host_detail.has_key('role') and host_detail['role']:
host_roles = host_detail['role']
for role in roles:
if role['name'] in host_detail['role'] and role['glance_lv_size']:
host_detail['glance_lv_size'] = role['glance_lv_size']
if role.get('db_lv_size', None) and host_roles and role['name'] in host_roles:
role_host_db_lv_size_lists.append(role['db_lv_size'])
if role['name'] == 'COMPUTER' and role['name'] in host_detail['role'] and role['nova_lv_size']:
host_detail['nova_lv_size'] = role['nova_lv_size']
if role_host_db_lv_size_lists:
host_detail['db_lv_size'] = max(role_host_db_lv_size_lists)
else:
host_detail['db_lv_size'] = 0
if (host_detail['os_status'] == host_os_status['INIT'] or
host_detail['os_status'] == host_os_status['INSTALLING'] or
host_detail['os_status'] == host_os_status['INSTALL_FAILED']):
host_dhcp_interface = [hi for hi in host_detail['interfaces'] if hi['is_deployment']]
if not host_dhcp_interface:
msg = "cann't find dhcp interface on host %s" % host_detail['id']
raise exception.InvalidNetworkConfig(msg)
if len(host_dhcp_interface) > 1:
msg = "dhcp interface should only has one on host %s" % host_detail['id']
raise exception.InvalidNetworkConfig(msg)
host_config_detail = copy.deepcopy(host_detail)
host_config = _get_network_plat(host_config_detail,
networks,
host_dhcp_interface[0]['mac'])
hosts_config.append(tecs_cmn.sort_interfaces_by_pci(host_config))
return hosts_config
def check_tfg_exist():
get_tfg_patch = "ls %s|grep ^ZXTFG-.*\.bin$" % daisy_tecs_path
obj = subprocess.Popen(get_tfg_patch,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdoutput, erroutput) = obj.communicate()
tfg_patch_pkg_file = ""
tfg_patch_pkg_name = ""
if stdoutput:
tfg_patch_pkg_name = stdoutput.split('\n')[0]
tfg_patch_pkg_file = daisy_tecs_path + tfg_patch_pkg_name
chmod_for_tfg_bin = 'chmod +x %s' % tfg_patch_pkg_file
daisy_cmn.subprocess_call(chmod_for_tfg_bin)
if not stdoutput or not tfg_patch_pkg_name:
LOG.info(_("no ZXTFG patch bin file got in %s" % daisy_tecs_path))
return ""
return tfg_patch_pkg_file
def update_db_host_status(req, host_id, host_status):
"""
Update host status and intallation progress to db.
:return:
"""
try:
host_meta = {}
host_meta['os_progress'] = host_status['os_progress']
host_meta['os_status'] = host_status['os_status']
host_meta['messages'] = host_status['messages']
registry.update_host_metadata(req.context,
host_id,
host_meta)
except exception.Invalid as e:
raise HTTPBadRequest(explanation=e.msg, request=req)
class OSInstall():
"""
Class for install OS.
"""
""" Definition for install states."""
def __init__(self, req, cluster_id):
self.req = req
self.cluster_id = cluster_id
#5s
self.time_step = 5
# 30 min
self.single_host_install_timeout = 30 * (12*self.time_step)
self.max_parallel_os_num = int(CONF.max_parallel_os_number)
self.cluster_hosts_install_timeout = (self.max_parallel_os_num/4 + 2 )* 60 * (12*self.time_step)
self.ironicclient = get_ironicclient()
def _set_boot_or_power_state(self, user, passwd, addr, action):
count = 0
repeat_times = 24
while count < repeat_times:
set_obj = self.ironicclient.daisy.set_boot_or_power_state(user,
passwd,
addr,
action)
set_dict = dict([(f, getattr(set_obj, f, '')) for f in ['return_code', 'info']])
rc = int(set_dict['return_code'])
if rc == 0:
LOG.info(_("set %s to '%s' successfully for %s times by ironic" % (addr,action,count+1)))
break
else:
count += 1
LOG.info(_("try setting %s to '%s' failed for %s times by ironic" % (addr,action,count)))
time.sleep(count*2)
if count >= repeat_times:
message = "set %s to '%s' failed for 10 mins" % (addr,action)
raise exception.IMPIOprationFailed(message=message)
def _baremetal_install_os(self, host_detail):
# os_install_disk = 'sda'
os_version_file = host_detail['os_version_file']
if os_version_file:
test_os_version_exist = 'test -f %s' % os_version_file
daisy_cmn.subprocess_call(test_os_version_exist)
else:
self.message = "no OS version file configed for host %s" % host_detail['id']
raise exception.NotFound(message=self.message)
if host_detail.get('root_disk',None):
root_disk = host_detail['root_disk']
else:
root_disk = 'sda'
if host_detail.get('root_lv_size',None):
root_lv_size_m = host_detail['root_lv_size']
else:
root_lv_size_m = 51200
memory_size_b_str = str(host_detail['memory']['total'])
memory_size_b_int = int(memory_size_b_str.strip().split()[0])
memory_size_m = memory_size_b_int//1024
memory_size_g = memory_size_m//1024
swap_lv_size_m = host_detail['swap_lv_size']
cinder_vg_size_m = 0
disk_list = []
disk_storage_size_b = 0
for key in host_detail['disks']:
disk_list.append(host_detail['disks'][key]['name'])
stroage_size_str = host_detail['disks'][key]['size']
stroage_size_b_int = int(stroage_size_str.strip().split()[0])
disk_storage_size_b = disk_storage_size_b + stroage_size_b_int
disk_list = ','.join(disk_list)
disk_storage_size_m = disk_storage_size_b//(1024*1024)
if host_detail.has_key('root_pwd') and host_detail['root_pwd']:
root_pwd = host_detail['root_pwd']
else:
root_pwd = 'ossdbg1'
tfg_patch_pkg_file = check_tfg_exist()
if (not host_detail['ipmi_user'] or
not host_detail['ipmi_passwd'] or
not host_detail['ipmi_addr'] ):
self.message = "Invalid ipmi information configed for host %s" % host_detail['id']
raise exception.NotFound(message=self.message)
self._set_boot_or_power_state(host_detail['ipmi_user'],
host_detail['ipmi_passwd'],
host_detail['ipmi_addr'],
'pxe')
kwargs = {'hostname':host_detail['name'],
'iso_path':os_version_file,
'tfg_bin':tfg_patch_pkg_file,
'dhcp_mac':host_detail['dhcp_mac'],
'storage_size':disk_storage_size_m,
'memory_size':memory_size_g,
'interfaces':host_detail['interfaces'],
'root_lv_size':root_lv_size_m,
'swap_lv_size':swap_lv_size_m,
'cinder_vg_size':cinder_vg_size_m,
'disk_list':disk_list,
'root_disk':root_disk,
'root_pwd':root_pwd,
'reboot':'no'}
if host_detail.has_key('glance_lv_size'):
kwargs['glance_lv_size'] = host_detail['glance_lv_size']
else:
kwargs['glance_lv_size'] = 0
if host_detail.has_key('db_lv_size') and host_detail['db_lv_size']:
kwargs['db_lv_size'] = host_detail['db_lv_size']
else:
kwargs['db_lv_size'] = 0
if host_detail.has_key('nova_lv_size') and host_detail['nova_lv_size']:
kwargs['nova_lv_size'] = host_detail['nova_lv_size']
else:
kwargs['nova_lv_size'] = 0
install_os_obj = self.ironicclient.daisy.install_os(**kwargs)
install_os_dict = dict([(f, getattr(install_os_obj, f, '')) for f in ['return_code', 'info']])
rc = int(install_os_dict['return_code'])
if rc != 0:
install_os_description = install_os_dict['info']
LOG.info(_("install os config failed because of '%s'" % (install_os_description)))
host_status = {'os_status':host_os_status['INSTALL_FAILED'],
'os_progress':0,
'messages':install_os_description}
update_db_host_status(self.req, host_detail['id'],host_status)
msg = "ironic install os return failed for host %s" % host_detail['id']
raise exception.OSInstallFailed(message=msg)
self._set_boot_or_power_state(host_detail['ipmi_user'],
host_detail['ipmi_passwd'],
host_detail['ipmi_addr'],
'reset')
def _install_os_by_rousource_type(self, hosts_detail):
# all hosts status set to 'init' before install os
for host_detail in hosts_detail:
host_status = {'os_status':host_os_status['INIT'],
'os_progress':0,
'messages':''}
update_db_host_status(self.req, host_detail['id'],host_status)
for host_detail in hosts_detail:
self._baremetal_install_os(host_detail)
def _set_disk_start_mode(self, host_detail):
LOG.info(_("Set boot from disk for host %s" % (host_detail['id'])))
self._set_boot_or_power_state(host_detail['ipmi_user'],
host_detail['ipmi_passwd'],
host_detail['ipmi_addr'],
'disk')
LOG.info(_("reboot host %s" % (host_detail['id'])))
self._set_boot_or_power_state(host_detail['ipmi_user'],
host_detail['ipmi_passwd'],
host_detail['ipmi_addr'],
'reset')
def _init_progress(self, host_detail, hosts_status):
host_id = host_detail['id']
host_status = hosts_status[host_id] = {}
host_status['os_status'] = host_os_status['INSTALLING']
host_status['os_progress'] = 0
host_status['count'] = 0
if host_detail['resource_type'] == 'docker':
host_status['messages'] = "docker container is creating"
else:
host_status['messages'] = "os is installing"
update_db_host_status(self.req, host_id, host_status)
def _query_host_progress(self, host_detail, host_status, host_last_status):
host_id = host_detail['id']
install_result_obj = \
self.ironicclient.daisy.get_install_progress(host_detail['dhcp_mac'])
install_result = dict([(f, getattr(install_result_obj, f, ''))
for f in ['return_code', 'info', 'progress']])
rc = int(install_result['return_code'])
host_status['os_progress'] = int(install_result['progress'])
if rc == 0:
if host_status['os_progress'] == 100:
LOG.info(_("host %s install os completely." % host_id))
host_status['os_status'] = host_os_status['ACTIVE']
host_status['messages'] = "os installed successfully"
# wait for nicfix script complete
time.sleep(10)
self._set_disk_start_mode(host_detail)
else:
if host_status['os_progress'] == host_last_status['os_progress']:
host_status['count'] = host_status['count'] + 1
LOG.debug(_("host %s has kept %ss when progress is %s." % (host_id,
host_status['count']*self.time_step, host_status['os_progress'])))
else:
LOG.info(_("host %s install failed." % host_id))
host_status['os_status'] = host_os_status['INSTALL_FAILED']
host_status['messages'] = install_result['info']
def _query_progress(self, hosts_last_status, hosts_detail):
hosts_status = copy.deepcopy(hosts_last_status)
for host_detail in hosts_detail:
host_id = host_detail['id']
if not hosts_status.has_key(host_id):
self._init_progress(host_detail, hosts_status)
continue
host_status = hosts_status[host_id]
host_last_status = hosts_last_status[host_id]
#only process installing hosts after init, other hosts info will be kept in hosts_status
if host_status['os_status'] != host_os_status['INSTALLING']:
continue
self._query_host_progress(host_detail, host_status, host_last_status)
if host_status['count']*self.time_step >= self.single_host_install_timeout:
host_status['os_status'] = host_os_status['INSTALL_FAILED']
if host_detail['resource_type'] == 'docker':
host_status['messages'] = "docker container created timeout"
else:
host_status['messages'] = "os installed timeout"
if (host_status['os_progress'] != host_last_status['os_progress'] or\
host_status['os_status'] != host_last_status['os_status']):
host_status['count'] = 0
update_db_host_status(self.req, host_id,host_status)
return hosts_status
def _get_install_status(self, hosts_detail):
query_count = 0
hosts_last_status = {}
while True:
hosts_install_status = self._query_progress(hosts_last_status, hosts_detail)
# if all hosts install over, break
installing_hosts = [id for id in hosts_install_status.keys()
if hosts_install_status[id]['os_status'] == host_os_status['INSTALLING']]
if not installing_hosts:
break
#after 3h, if some hosts are not 'active', label them to 'failed'.
elif query_count*self.time_step >= self.cluster_hosts_install_timeout:
for host_id,host_status in hosts_install_status.iteritems():
if (host_status['os_status'] != host_os_status['ACTIVE'] and
host_status['os_status'] != host_os_status['INSTALL_FAILED']):
# label the host install failed because of time out for 3h
host_status['os_status'] = host_os_status['INSTALL_FAILED']
host_status['messages'] = "cluster os installed timeout"
update_db_host_status(self.req, host_id, host_status)
break
else:
query_count += 1
hosts_last_status = hosts_install_status
time.sleep(self.time_step)
return hosts_install_status
def install_os(self, hosts_detail, role_hosts_ids):
if len(hosts_detail) > self.max_parallel_os_num:
install_hosts = hosts_detail[:self.max_parallel_os_num]
hosts_detail = hosts_detail[self.max_parallel_os_num:]
else:
install_hosts = hosts_detail
hosts_detail = []
install_hosts_id = [host_detail['id'] for host_detail in install_hosts]
LOG.info(_("Begin install os for hosts %s." % ','.join(install_hosts_id)))
self._install_os_by_rousource_type(install_hosts)
LOG.info(_("Begin to query install progress..."))
# wait to install completely
cluster_install_status = self._get_install_status(install_hosts)
LOG.info(_("OS install in cluster %s result is:" % self.cluster_id))
LOG.info(_("%s %s %s" % ('host-id', 'os-status', 'description')))
for host_id,host_status in cluster_install_status.iteritems():
LOG.info(_("%s %s %s" % (host_id, host_status['os_status'], host_status['messages'])))
if host_id in role_hosts_ids:
if host_status['os_status'] == host_os_status['INSTALL_FAILED']:
break
else:
role_hosts_ids.remove(host_id)
return (hosts_detail, role_hosts_ids)
def _os_thread_bin(req, host_ip, host_id):
host_meta = {}
password = "ossdbg1"
LOG.info(_("Begin update os for host %s." % (host_ip)))
cmd = 'mkdir -p /var/log/daisy/daisy_update/'
daisy_cmn.subprocess_call(cmd)
var_log_path = "/var/log/daisy/daisy_update/%s_update_tfg.log" % host_ip
with open(var_log_path, "w+") as fp:
cmd = '/var/lib/daisy/tecs/trustme.sh %s %s' % (host_ip, password)
daisy_cmn.subprocess_call(cmd,fp)
cmd = 'clush -S -b -w %s "rm -rf /home/daisy_update"' % (host_ip,)
daisy_cmn.subprocess_call(cmd,fp)
cmd = 'clush -S -w %s "mkdir -p /home/daisy_update"' % (host_ip,)
daisy_cmn.subprocess_call(cmd,fp)
cmd = 'clush -S -w %s -c /var/lib/daisy/tecs/ZXTFG*.bin --dest=/home/daisy_update' % (host_ip,)
daisy_cmn.subprocess_call(cmd,fp)
cmd = 'clush -S -w %s "chmod 777 /home/daisy_update/*"' % (host_ip,)
daisy_cmn.subprocess_call(cmd,fp)
try:
exc_result = subprocess.check_output(
'clush -S -w %s "/home/daisy_update/ZXTFG*.bin upgrade reboot"' % (host_ip,),
shell=True, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
if e.returncode == 255 and "System will reboot" in e.output.strip():
host_meta['os_progress'] = 100
host_meta['os_status'] = host_os_status['ACTIVE']
host_meta['messages'] = e.output.strip()
LOG.info(_("Update tfg for %s successfully,os reboot!" % host_ip))
time.sleep(20)
else:
host_meta['os_progress'] = 0
host_meta['os_status'] = host_os_status['UPDATE_FAILED']
host_meta['messages'] = e.output.strip()
LOG.info(_("Update tfg for %s failed!" % host_ip))
update_db_host_status(req, host_id, host_meta)
fp.write(e.output.strip())
else:
if "System will reboot" in exc_result:
time.sleep(20)
host_meta['os_progress'] = 100
host_meta['os_status'] = host_os_status['ACTIVE']
host_meta['messages'] = ""
update_db_host_status(req, host_id, host_meta)
LOG.info(_("Update os for %s successfully!" % host_ip))
fp.write(exc_result)
# this will be raise raise all the exceptions of the thread to log file
def os_thread_bin(req, host_ip, host_id):
try:
_os_thread_bin(req, host_ip, host_id)
except Exception as e:
LOG.exception(e.message)
def upgrade_os(req, hosts_list):
threads = []
host_meta = {}
for host_info in hosts_list:
host_id = host_info.keys()[0]
host_ip = host_info.values()[0]
host_meta['os_progress'] = 1
host_meta['os_status'] = host_os_status['UPDATING']
host_meta['messages'] = ""
update_db_host_status(req, host_id,host_meta)
t = threading.Thread(target=os_thread_bin,args=(req,host_ip,host_id))
t.setDaemon(True)
t.start()
threads.append(t)
try:
for t in threads:
t.join()
except:
LOG.warn(_("Join update thread %s failed!" % t))
else:
for host_info in hosts_list:
update_failed_flag = False
host_id = host_info.keys()[0]
host_ip = host_info.values()[0]
host = registry.get_host_metadata(req.context, host_id)
if host['os_status'] == host_os_status['UPDATE_FAILED'] or host['os_status'] == host_os_status['INIT']:
update_failed_flag = True
raise exception.ThreadBinException("%s update tfg failed! %s" % (host_ip, host['messages']))
if not update_failed_flag:
host_meta = {}
host_meta['os_progress'] = 100
host_meta['os_status'] = host_os_status['ACTIVE']
host_meta['messages'] = ""
update_db_host_status(req, host_id,host_meta)
|
apache-2.0
|
evanscottgray/ryu
|
ryu/tests/integrated/test_add_flow_v12_matches.py
|
34
|
40869
|
# Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import logging
from ryu.ofproto import ofproto_v1_2
from ryu.ofproto import ether
from ryu.ofproto import inet
from ryu.tests.integrated import tester
LOG = logging.getLogger(__name__)
class RunTest(tester.TestFlowBase):
""" Test case for add flows of Matches
"""
OFP_VERSIONS = [ofproto_v1_2.OFP_VERSION]
def __init__(self, *args, **kwargs):
super(RunTest, self).__init__(*args, **kwargs)
self._verify = {}
def add_matches(self, dp, match):
m = dp.ofproto_parser.OFPFlowMod(dp, 0, 0, 0,
dp.ofproto.OFPFC_ADD,
0, 0, 0, 0xffffffff,
dp.ofproto.OFPP_ANY,
0xffffffff, 0, match, [])
dp.send_msg(m)
def _set_verify(self, headers, value, mask=None,
all_bits_masked=False, type_='int'):
self._verify = {}
self._verify['headers'] = headers
self._verify['value'] = value
self._verify['mask'] = mask
self._verify['all_bits_masked'] = all_bits_masked
self._verify['type'] = type_
def verify_default(self, dp, stats):
type_ = self._verify['type']
headers = self._verify['headers']
value = self._verify['value']
mask = self._verify['mask']
value_masked = self._masked(type_, value, mask)
all_bits_masked = self._verify['all_bits_masked']
field = None
for s in stats:
for f in s.match.fields:
if f.header in headers:
field = f
break
if field is None:
if self._is_all_zero_bit(type_, mask):
return True
return 'Field not found.'
f_value = field.value
if hasattr(field, 'mask'):
f_mask = field.mask
else:
f_mask = None
if (f_value == value) or (f_value == value_masked):
if (f_mask == mask) or (all_bits_masked and f_mask is None):
return True
return "send: %s/%s, reply: %s/%s" \
% (self._cnv_to_str(type_, value, mask, f_value, f_mask))
def _masked(self, type_, value, mask):
if mask is None:
v = value
elif type_ == 'int':
v = value & mask
elif type_ == 'mac':
v = self.haddr_masked(value, mask)
elif type_ == 'ipv4':
v = self.ipv4_masked(value, mask)
elif type_ == 'ipv6':
v = self.ipv6_masked(value, mask)
else:
raise 'Unknown type'
return v
def _is_all_zero_bit(self, type_, val):
if type_ == 'int' or type_ == 'ipv4':
return val == 0
elif type_ == 'mac':
for v in val:
if v != '\x00':
return False
return True
elif type_ == 'ipv6':
for v in val:
if v != 0:
return False
return True
else:
raise 'Unknown type'
def _cnv_to_str(self, type_, value, mask, f_value, f_mask):
func = None
if type_ == 'int':
pass
elif type_ == 'mac':
func = self.haddr_to_str
elif type_ == 'ipv4':
func = self.ipv4_to_str
elif type_ == 'ipv6':
func = self.ipv6_to_str
else:
raise 'Unknown type'
if func:
value = func(value)
f_value = func(f_value)
if mask:
mask = func(mask)
if f_mask:
f_mask = func(f_mask)
return value, mask, f_value, f_mask
def test_rule_set_dl_dst(self, dp):
dl_dst = 'e2:7a:09:79:0b:0f'
dl_dst_bin = self.haddr_to_bin(dl_dst)
headers = [dp.ofproto.OXM_OF_ETH_DST, dp.ofproto.OXM_OF_ETH_DST_W]
self._set_verify(headers, dl_dst_bin, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_dst(dl_dst_bin)
self.add_matches(dp, match)
def test_rule_set_dl_dst_masked_ff(self, dp):
dl_dst = 'd0:98:79:b4:75:b5'
dl_dst_bin = self.haddr_to_bin(dl_dst)
mask = 'ff:ff:ff:ff:ff:ff'
mask_bin = self.haddr_to_bin(mask)
headers = [dp.ofproto.OXM_OF_ETH_DST, dp.ofproto.OXM_OF_ETH_DST_W]
self._set_verify(headers, dl_dst_bin, mask_bin, True, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_dst_masked(dl_dst_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_dl_dst_masked_f0(self, dp):
dl_dst = 'e2:7a:09:79:0b:0f'
dl_dst_bin = self.haddr_to_bin(dl_dst)
mask = 'ff:ff:ff:ff:ff:00'
mask_bin = self.haddr_to_bin(mask)
headers = [dp.ofproto.OXM_OF_ETH_DST, dp.ofproto.OXM_OF_ETH_DST_W]
self._set_verify(headers, dl_dst_bin, mask_bin, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_dst_masked(dl_dst_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_dl_dst_masked_00(self, dp):
dl_dst = 'e2:7a:09:79:0b:0f'
dl_dst_bin = self.haddr_to_bin(dl_dst)
mask = '00:00:00:00:00:00'
mask_bin = self.haddr_to_bin(mask)
headers = [dp.ofproto.OXM_OF_ETH_DST, dp.ofproto.OXM_OF_ETH_DST_W]
self._set_verify(headers, dl_dst_bin, mask_bin, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_dst_masked(dl_dst_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_dl_src(self, dp):
dl_src = 'e2:7a:09:79:0b:0f'
dl_src_bin = self.haddr_to_bin(dl_src)
headers = [dp.ofproto.OXM_OF_ETH_SRC, dp.ofproto.OXM_OF_ETH_SRC_W]
self._set_verify(headers, dl_src_bin, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_src(dl_src_bin)
self.add_matches(dp, match)
def test_rule_set_dl_src_masked_ff(self, dp):
dl_src = 'e2:7a:09:79:0b:0f'
dl_src_bin = self.haddr_to_bin(dl_src)
mask = 'ff:ff:ff:ff:ff:ff'
mask_bin = self.haddr_to_bin(mask)
headers = [dp.ofproto.OXM_OF_ETH_SRC, dp.ofproto.OXM_OF_ETH_SRC_W]
self._set_verify(headers, dl_src_bin, mask_bin, True, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_src_masked(dl_src_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_dl_src_masked_f0(self, dp):
dl_src = 'e2:7a:09:79:0b:0f'
dl_src_bin = self.haddr_to_bin(dl_src)
mask = 'ff:ff:ff:ff:ff:00'
mask_bin = self.haddr_to_bin(mask)
headers = [dp.ofproto.OXM_OF_ETH_SRC, dp.ofproto.OXM_OF_ETH_SRC_W]
self._set_verify(headers, dl_src_bin, mask_bin, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_src_masked(dl_src_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_dl_src_masked_00(self, dp):
dl_src = 'e2:7a:09:79:0b:0f'
dl_src_bin = self.haddr_to_bin(dl_src)
mask = '00:00:00:00:00:00'
mask_bin = self.haddr_to_bin(mask)
headers = [dp.ofproto.OXM_OF_ETH_SRC, dp.ofproto.OXM_OF_ETH_SRC_W]
self._set_verify(headers, dl_src_bin, mask_bin, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_src_masked(dl_src_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_dl_type_ip(self, dp):
dl_type = ether.ETH_TYPE_IP
headers = [dp.ofproto.OXM_OF_ETH_TYPE]
self._set_verify(headers, dl_type)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
self.add_matches(dp, match)
def test_rule_set_dl_type_arp(self, dp):
dl_type = ether.ETH_TYPE_ARP
headers = [dp.ofproto.OXM_OF_ETH_TYPE]
self._set_verify(headers, dl_type)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
self.add_matches(dp, match)
def test_rule_set_dl_type_vlan(self, dp):
dl_type = ether.ETH_TYPE_8021Q
headers = [dp.ofproto.OXM_OF_ETH_TYPE]
self._set_verify(headers, dl_type)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
self.add_matches(dp, match)
def test_rule_set_dl_type_ipv6(self, dp):
dl_type = ether.ETH_TYPE_IPV6
headers = [dp.ofproto.OXM_OF_ETH_TYPE]
self._set_verify(headers, dl_type)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
self.add_matches(dp, match)
def test_rule_set_dl_type_lacp(self, dp):
dl_type = ether.ETH_TYPE_SLOW
headers = [dp.ofproto.OXM_OF_ETH_TYPE]
self._set_verify(headers, dl_type)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
self.add_matches(dp, match)
def test_rule_set_ip_dscp(self, dp):
ip_dscp = 36
dl_type = ether.ETH_TYPE_IP
headers = [dp.ofproto.OXM_OF_IP_DSCP]
self._set_verify(headers, ip_dscp)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_dscp(ip_dscp)
self.add_matches(dp, match)
def test_rule_set_vlan_vid(self, dp):
vlan_vid = 0x4ef
headers = [dp.ofproto.OXM_OF_VLAN_VID, dp.ofproto.OXM_OF_VLAN_VID_W]
self._set_verify(headers, vlan_vid)
match = dp.ofproto_parser.OFPMatch()
match.set_vlan_vid(vlan_vid)
self.add_matches(dp, match)
def test_rule_set_vlan_vid_masked_ff(self, dp):
vlan_vid = 0x4ef
mask = 0xfff
headers = [dp.ofproto.OXM_OF_VLAN_VID, dp.ofproto.OXM_OF_VLAN_VID_W]
self._set_verify(headers, vlan_vid, mask, True)
match = dp.ofproto_parser.OFPMatch()
match.set_vlan_vid_masked(vlan_vid, mask)
self.add_matches(dp, match)
def test_rule_set_vlan_vid_masked_f0(self, dp):
vlan_vid = 0x4ef
mask = 0xff0
headers = [dp.ofproto.OXM_OF_VLAN_VID, dp.ofproto.OXM_OF_VLAN_VID_W]
self._set_verify(headers, vlan_vid, mask)
match = dp.ofproto_parser.OFPMatch()
match.set_vlan_vid_masked(vlan_vid, mask)
self.add_matches(dp, match)
def test_rule_set_vlan_vid_masked_00(self, dp):
vlan_vid = 0x4ef
mask = 0x000
headers = [dp.ofproto.OXM_OF_VLAN_VID, dp.ofproto.OXM_OF_VLAN_VID_W]
self._set_verify(headers, vlan_vid, mask)
match = dp.ofproto_parser.OFPMatch()
match.set_vlan_vid_masked(vlan_vid, mask)
self.add_matches(dp, match)
def test_rule_set_vlan_pcp(self, dp):
vlan_vid = 0x4ef
vlan_pcp = 5
headers = [dp.ofproto.OXM_OF_VLAN_PCP]
self._set_verify(headers, vlan_pcp)
match = dp.ofproto_parser.OFPMatch()
match.set_vlan_vid(vlan_vid)
match.set_vlan_pcp(vlan_pcp)
self.add_matches(dp, match)
def test_rule_set_ip_ecn(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_ecn = 3
headers = [dp.ofproto.OXM_OF_IP_ECN]
self._set_verify(headers, ip_ecn)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_ecn(ip_ecn)
self.add_matches(dp, match)
def test_rule_set_ip_proto_icmp(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_ICMP
headers = [dp.ofproto.OXM_OF_IP_PROTO]
self._set_verify(headers, ip_proto)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ip_proto_tcp(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_TCP
headers = [dp.ofproto.OXM_OF_IP_PROTO]
self._set_verify(headers, ip_proto)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ip_proto_udp(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_UDP
headers = [dp.ofproto.OXM_OF_IP_PROTO]
self._set_verify(headers, ip_proto)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ip_proto_ipv6_route(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_ROUTING
headers = [dp.ofproto.OXM_OF_IP_PROTO]
self._set_verify(headers, ip_proto)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ip_proto_ipv6_frag(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_FRAGMENT
headers = [dp.ofproto.OXM_OF_IP_PROTO]
self._set_verify(headers, ip_proto)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ip_proto_ipv6_icmp(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_ICMPV6
headers = [dp.ofproto.OXM_OF_IP_PROTO]
self._set_verify(headers, ip_proto)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ip_proto_ipv6_none(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_NONE
headers = [dp.ofproto.OXM_OF_IP_PROTO]
self._set_verify(headers, ip_proto)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ip_proto_ipv6_dstopts(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_DSTOPTS
headers = [dp.ofproto.OXM_OF_IP_PROTO]
self._set_verify(headers, ip_proto)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
self.add_matches(dp, match)
def test_rule_set_ipv4_src(self, dp):
dl_type = ether.ETH_TYPE_IP
src = '192.168.196.250'
src_int = self.ipv4_to_int(src)
headers = [dp.ofproto.OXM_OF_IPV4_SRC, dp.ofproto.OXM_OF_IPV4_SRC_W]
self._set_verify(headers, src_int, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_src(src_int)
self.add_matches(dp, match)
def test_rule_set_ipv4_src_masked_32(self, dp):
dl_type = ether.ETH_TYPE_IP
src = '192.168.196.250'
src_int = self.ipv4_to_int(src)
mask = '255.255.255.255'
mask_int = self.ipv4_to_int(mask)
headers = [dp.ofproto.OXM_OF_IPV4_SRC, dp.ofproto.OXM_OF_IPV4_SRC_W]
self._set_verify(headers, src_int, mask_int, True, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_src_masked(src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv4_src_masked_24(self, dp):
dl_type = ether.ETH_TYPE_IP
src = '192.168.196.250'
src_int = self.ipv4_to_int(src)
mask = '255.255.255.0'
mask_int = self.ipv4_to_int(mask)
headers = [dp.ofproto.OXM_OF_IPV4_SRC, dp.ofproto.OXM_OF_IPV4_SRC_W]
self._set_verify(headers, src_int, mask_int, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_src_masked(src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv4_src_masked_0(self, dp):
dl_type = ether.ETH_TYPE_IP
src = '192.168.196.250'
src_int = self.ipv4_to_int(src)
mask = '0.0.0.0'
mask_int = self.ipv4_to_int(mask)
headers = [dp.ofproto.OXM_OF_IPV4_SRC, dp.ofproto.OXM_OF_IPV4_SRC_W]
self._set_verify(headers, src_int, mask_int, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_src_masked(src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv4_dst(self, dp):
dl_type = ether.ETH_TYPE_IP
dst = '192.168.54.155'
dst_int = self.ipv4_to_int(dst)
headers = [dp.ofproto.OXM_OF_IPV4_DST, dp.ofproto.OXM_OF_IPV4_DST_W]
self._set_verify(headers, dst_int, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_dst(dst_int)
self.add_matches(dp, match)
def test_rule_set_ipv4_dst_masked_32(self, dp):
dl_type = ether.ETH_TYPE_IP
dst = '192.168.54.155'
dst_int = self.ipv4_to_int(dst)
mask = '255.255.255.255'
mask_int = self.ipv4_to_int(mask)
headers = [dp.ofproto.OXM_OF_IPV4_DST, dp.ofproto.OXM_OF_IPV4_DST_W]
self._set_verify(headers, dst_int, mask_int, True, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_dst_masked(dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv4_dst_masked_24(self, dp):
dl_type = ether.ETH_TYPE_IP
dst = '192.168.54.155'
dst_int = self.ipv4_to_int(dst)
mask = '255.255.255.0'
mask_int = self.ipv4_to_int(mask)
headers = [dp.ofproto.OXM_OF_IPV4_DST, dp.ofproto.OXM_OF_IPV4_DST_W]
self._set_verify(headers, dst_int, mask_int, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_dst_masked(dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv4_dst_masked_0(self, dp):
dl_type = ether.ETH_TYPE_IP
dst = '192.168.54.155'
dst_int = self.ipv4_to_int(dst)
mask = '0.0.0.0'
mask_int = self.ipv4_to_int(mask)
headers = [dp.ofproto.OXM_OF_IPV4_DST, dp.ofproto.OXM_OF_IPV4_DST_W]
self._set_verify(headers, dst_int, mask_int, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv4_dst_masked(dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_tcp_src(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_TCP
tp_src = 1103
headers = [dp.ofproto.OXM_OF_TCP_SRC]
self._set_verify(headers, tp_src)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_tcp_src(tp_src)
self.add_matches(dp, match)
def test_rule_set_tcp_dst(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_TCP
tp_dst = 236
headers = [dp.ofproto.OXM_OF_TCP_DST]
self._set_verify(headers, tp_dst)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_tcp_dst(tp_dst)
self.add_matches(dp, match)
def test_rule_set_udp_src(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_UDP
tp_src = 56617
headers = [dp.ofproto.OXM_OF_UDP_SRC]
self._set_verify(headers, tp_src)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_udp_src(tp_src)
self.add_matches(dp, match)
def test_rule_set_udp_dst(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_UDP
tp_dst = 61278
headers = [dp.ofproto.OXM_OF_UDP_DST]
self._set_verify(headers, tp_dst)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_udp_dst(tp_dst)
self.add_matches(dp, match)
def test_rule_set_icmpv4_type(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_ICMP
icmp_type = 8
headers = [dp.ofproto.OXM_OF_ICMPV4_TYPE]
self._set_verify(headers, icmp_type)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_icmpv4_type(icmp_type)
self.add_matches(dp, match)
def test_rule_set_icmpv4_code(self, dp):
dl_type = ether.ETH_TYPE_IP
ip_proto = inet.IPPROTO_ICMP
icmp_type = 9
icmp_code = 16
headers = [dp.ofproto.OXM_OF_ICMPV4_CODE]
self._set_verify(headers, icmp_code)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_icmpv4_type(icmp_type)
match.set_icmpv4_code(icmp_code)
self.add_matches(dp, match)
def test_rule_set_arp_opcode(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_op = 1
headers = [dp.ofproto.OXM_OF_ARP_OP]
self._set_verify(headers, arp_op)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_opcode(arp_op)
self.add_matches(dp, match)
def test_rule_set_arp_spa(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_src = '192.168.222.57'
nw_src_int = self.ipv4_to_int(nw_src)
headers = [dp.ofproto.OXM_OF_ARP_SPA, dp.ofproto.OXM_OF_ARP_SPA_W]
self._set_verify(headers, nw_src_int, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_spa(nw_src_int)
self.add_matches(dp, match)
def test_rule_set_arp_spa_masked_32(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_src = '192.168.222.57'
nw_src_int = self.ipv4_to_int(nw_src)
mask = '255.255.255.255'
mask_int = self.ipv4_to_int(mask)
headers = [dp.ofproto.OXM_OF_ARP_SPA, dp.ofproto.OXM_OF_ARP_SPA_W]
self._set_verify(headers, nw_src_int, mask_int, True, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_spa_masked(nw_src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_arp_spa_masked_24(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_src = '192.168.222.57'
nw_src_int = self.ipv4_to_int(nw_src)
mask = '255.255.255.0'
mask_int = self.ipv4_to_int(mask)
headers = [dp.ofproto.OXM_OF_ARP_SPA, dp.ofproto.OXM_OF_ARP_SPA_W]
self._set_verify(headers, nw_src_int, mask_int, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_spa_masked(nw_src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_arp_spa_masked_00(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_src = '192.168.222.57'
nw_src_int = self.ipv4_to_int(nw_src)
mask = '0.0.0.0'
mask_int = self.ipv4_to_int(mask)
headers = [dp.ofproto.OXM_OF_ARP_SPA, dp.ofproto.OXM_OF_ARP_SPA_W]
self._set_verify(headers, nw_src_int, mask_int, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_spa_masked(nw_src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_arp_tpa(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_dst = '192.168.198.233'
nw_dst_int = self.ipv4_to_int(nw_dst)
headers = [dp.ofproto.OXM_OF_ARP_TPA, dp.ofproto.OXM_OF_ARP_TPA_W]
self._set_verify(headers, nw_dst_int, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tpa(nw_dst_int)
self.add_matches(dp, match)
def test_rule_set_arp_tpa_masked_32(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_dst = '192.168.198.233'
nw_dst_int = self.ipv4_to_int(nw_dst)
mask = '255.255.255.255'
mask_int = self.ipv4_to_int(mask)
headers = [dp.ofproto.OXM_OF_ARP_TPA, dp.ofproto.OXM_OF_ARP_TPA_W]
self._set_verify(headers, nw_dst_int, mask_int, True, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tpa_masked(nw_dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_arp_tpa_masked_24(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_dst = '192.168.198.233'
nw_dst_int = self.ipv4_to_int(nw_dst)
mask = '255.255.255.0'
mask_int = self.ipv4_to_int(mask)
headers = [dp.ofproto.OXM_OF_ARP_TPA, dp.ofproto.OXM_OF_ARP_TPA_W]
self._set_verify(headers, nw_dst_int, mask_int, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tpa_masked(nw_dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_arp_tpa_masked_00(self, dp):
dl_type = ether.ETH_TYPE_ARP
nw_dst = '192.168.198.233'
nw_dst_int = self.ipv4_to_int(nw_dst)
mask = '0.0.0.0'
mask_int = self.ipv4_to_int(mask)
headers = [dp.ofproto.OXM_OF_ARP_TPA, dp.ofproto.OXM_OF_ARP_TPA_W]
self._set_verify(headers, nw_dst_int, mask_int, type_='ipv4')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tpa_masked(nw_dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_arp_sha(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_sha = '3e:ec:13:9b:f3:0b'
arp_sha_bin = self.haddr_to_bin(arp_sha)
headers = [dp.ofproto.OXM_OF_ARP_SHA, dp.ofproto.OXM_OF_ARP_SHA_W]
self._set_verify(headers, arp_sha_bin, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_sha(arp_sha_bin)
self.add_matches(dp, match)
def test_rule_set_arp_sha_masked_ff(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_sha = '3e:ec:13:9b:f3:0b'
arp_sha_bin = self.haddr_to_bin(arp_sha)
mask = 'ff:ff:ff:ff:ff:ff'
mask_bin = self.haddr_to_bin(mask)
headers = [dp.ofproto.OXM_OF_ARP_SHA, dp.ofproto.OXM_OF_ARP_SHA_W]
self._set_verify(headers, arp_sha_bin, mask_bin, True, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_sha_masked(arp_sha_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_arp_sha_masked_f0(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_sha = '3e:ec:13:9b:f3:0b'
arp_sha_bin = self.haddr_to_bin(arp_sha)
mask = 'ff:ff:ff:ff:ff:00'
mask_bin = self.haddr_to_bin(mask)
headers = [dp.ofproto.OXM_OF_ARP_SHA, dp.ofproto.OXM_OF_ARP_SHA_W]
self._set_verify(headers, arp_sha_bin, mask_bin, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_sha_masked(arp_sha_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_arp_sha_masked_00(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_sha = '3e:ec:13:9b:f3:0b'
arp_sha_bin = self.haddr_to_bin(arp_sha)
mask = '00:00:00:00:00:00'
mask_bin = self.haddr_to_bin(mask)
headers = [dp.ofproto.OXM_OF_ARP_SHA, dp.ofproto.OXM_OF_ARP_SHA_W]
self._set_verify(headers, arp_sha_bin, mask_bin, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_sha_masked(arp_sha_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_arp_tha(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_tha = '83:6c:21:52:49:68'
arp_tha_bin = self.haddr_to_bin(arp_tha)
headers = [dp.ofproto.OXM_OF_ARP_THA, dp.ofproto.OXM_OF_ARP_THA_W]
self._set_verify(headers, arp_tha_bin, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tha(arp_tha_bin)
self.add_matches(dp, match)
def test_rule_set_arp_tha_masked_ff(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_tha = '83:6c:21:52:49:68'
arp_tha_bin = self.haddr_to_bin(arp_tha)
mask = 'ff:ff:ff:ff:ff:ff'
mask_bin = self.haddr_to_bin(mask)
headers = [dp.ofproto.OXM_OF_ARP_THA, dp.ofproto.OXM_OF_ARP_THA_W]
self._set_verify(headers, arp_tha_bin, mask_bin, True, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tha_masked(arp_tha_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_arp_tha_masked_f0(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_tha = '83:6c:21:52:49:68'
arp_tha_bin = self.haddr_to_bin(arp_tha)
mask = 'ff:ff:ff:ff:ff:00'
mask_bin = self.haddr_to_bin(mask)
headers = [dp.ofproto.OXM_OF_ARP_THA, dp.ofproto.OXM_OF_ARP_THA_W]
self._set_verify(headers, arp_tha_bin, mask_bin, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tha_masked(arp_tha_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_arp_tha_masked_00(self, dp):
dl_type = ether.ETH_TYPE_ARP
arp_tha = '83:6c:21:52:49:68'
arp_tha_bin = self.haddr_to_bin(arp_tha)
mask = '00:00:00:00:00:00'
mask_bin = self.haddr_to_bin(mask)
headers = [dp.ofproto.OXM_OF_ARP_THA, dp.ofproto.OXM_OF_ARP_THA_W]
self._set_verify(headers, arp_tha_bin, mask_bin, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_arp_tha_masked(arp_tha_bin, mask_bin)
self.add_matches(dp, match)
def test_rule_set_ipv6_src(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_src = '2001:db8:bd05:1d2:288a:1fc0:1:10ee'
ipv6_src_int = self.ipv6_to_int(ipv6_src)
headers = [dp.ofproto.OXM_OF_IPV6_SRC, dp.ofproto.OXM_OF_IPV6_SRC_W]
self._set_verify(headers, ipv6_src_int, type_='ipv6')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_src(ipv6_src_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_src_masked_ff(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_src = '2001:db8:bd05:1d2:288a:1fc0:1:10ee'
ipv6_src_int = self.ipv6_to_int(ipv6_src)
mask = 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'
mask_int = self.ipv6_to_int(mask)
headers = [dp.ofproto.OXM_OF_IPV6_SRC, dp.ofproto.OXM_OF_IPV6_SRC_W]
self._set_verify(headers, ipv6_src_int, mask_int, True, type_='ipv6')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_src_masked(ipv6_src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_src_masked_f0(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_src = '2001:db8:bd05:1d2:288a:1fc0:1:10ee'
ipv6_src_int = self.ipv6_to_int(ipv6_src)
mask = 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:0'
mask_int = self.ipv6_to_int(mask)
headers = [dp.ofproto.OXM_OF_IPV6_SRC, dp.ofproto.OXM_OF_IPV6_SRC_W]
self._set_verify(headers, ipv6_src_int, mask_int, type_='ipv6')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_src_masked(ipv6_src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_src_masked_00(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_src = '2001:db8:bd05:1d2:288a:1fc0:1:10ee'
ipv6_src_int = self.ipv6_to_int(ipv6_src)
mask = '0:0:0:0:0:0:0:0'
mask_int = self.ipv6_to_int(mask)
headers = [dp.ofproto.OXM_OF_IPV6_SRC, dp.ofproto.OXM_OF_IPV6_SRC_W]
self._set_verify(headers, ipv6_src_int, mask_int, type_='ipv6')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_src_masked(ipv6_src_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_dst(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_dst = 'e9e8:9ea5:7d67:82cc:ca54:1fc0:2d24:f038'
ipv6_dst_int = self.ipv6_to_int(ipv6_dst)
headers = [dp.ofproto.OXM_OF_IPV6_DST, dp.ofproto.OXM_OF_IPV6_DST_W]
self._set_verify(headers, ipv6_dst_int, type_='ipv6')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_dst(ipv6_dst_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_dst_masked_ff(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_dst = 'e9e8:9ea5:7d67:82cc:ca54:1fc0:2d24:f038'
ipv6_dst_int = self.ipv6_to_int(ipv6_dst)
mask = 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'
mask_int = self.ipv6_to_int(mask)
headers = [dp.ofproto.OXM_OF_IPV6_DST, dp.ofproto.OXM_OF_IPV6_DST_W]
self._set_verify(headers, ipv6_dst_int, mask_int, True, type_='ipv6')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_dst_masked(ipv6_dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_dst_masked_f0(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_dst = 'e9e8:9ea5:7d67:82cc:ca54:1fc0:2d24:f038'
ipv6_dst_int = self.ipv6_to_int(ipv6_dst)
mask = 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:0'
mask_int = self.ipv6_to_int(mask)
headers = [dp.ofproto.OXM_OF_IPV6_DST, dp.ofproto.OXM_OF_IPV6_DST_W]
self._set_verify(headers, ipv6_dst_int, mask_int, type_='ipv6')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_dst_masked(ipv6_dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_dst_masked_00(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_dst = 'e9e8:9ea5:7d67:82cc:ca54:1fc0:2d24:f038'
ipv6_dst_int = self.ipv6_to_int(ipv6_dst)
mask = '0:0:0:0:0:0:0:0'
mask_int = self.ipv6_to_int(mask)
headers = [dp.ofproto.OXM_OF_IPV6_DST, dp.ofproto.OXM_OF_IPV6_DST_W]
self._set_verify(headers, ipv6_dst_int, mask_int, type_='ipv6')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_dst_masked(ipv6_dst_int, mask_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_flabel(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_label = 0xc5384
headers = [dp.ofproto.OXM_OF_IPV6_FLABEL,
dp.ofproto.OXM_OF_IPV6_FLABEL_W]
self._set_verify(headers, ipv6_label)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_flabel(ipv6_label)
self.add_matches(dp, match)
def test_rule_set_ipv6_flabel_masked_ff(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_label = 0xc5384
mask = 0xfffff
headers = [dp.ofproto.OXM_OF_IPV6_FLABEL,
dp.ofproto.OXM_OF_IPV6_FLABEL_W]
self._set_verify(headers, ipv6_label, mask, True)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_flabel_masked(ipv6_label, mask)
self.add_matches(dp, match)
def test_rule_set_ipv6_flabel_masked_f0(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_label = 0xc5384
mask = 0xffff0
headers = [dp.ofproto.OXM_OF_IPV6_FLABEL,
dp.ofproto.OXM_OF_IPV6_FLABEL_W]
self._set_verify(headers, ipv6_label, mask)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_flabel_masked(ipv6_label, mask)
self.add_matches(dp, match)
def test_rule_set_ipv6_flabel_masked_00(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ipv6_label = 0xc5384
mask = 0x0
headers = [dp.ofproto.OXM_OF_IPV6_FLABEL,
dp.ofproto.OXM_OF_IPV6_FLABEL_W]
self._set_verify(headers, ipv6_label, mask)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ipv6_flabel_masked(ipv6_label, mask)
self.add_matches(dp, match)
def test_rule_set_icmpv6_type(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_ICMPV6
icmp_type = 129
headers = [dp.ofproto.OXM_OF_ICMPV6_TYPE]
self._set_verify(headers, icmp_type)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_icmpv6_type(icmp_type)
self.add_matches(dp, match)
def test_rule_set_icmpv6_code(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_ICMPV6
icmp_type = 138
icmp_code = 1
headers = [dp.ofproto.OXM_OF_ICMPV6_CODE]
self._set_verify(headers, icmp_code)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_icmpv6_type(icmp_type)
match.set_icmpv6_code(icmp_code)
self.add_matches(dp, match)
def test_rule_set_ipv6_nd_target(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_ICMPV6
icmp_type = 135
target = "5420:db3f:921b:3e33:2791:98f:dd7f:2e19"
target_int = self.ipv6_to_int(target)
headers = [dp.ofproto.OXM_OF_IPV6_ND_TARGET]
self._set_verify(headers, target_int, type_='ipv6')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_icmpv6_type(icmp_type)
match.set_ipv6_nd_target(target_int)
self.add_matches(dp, match)
def test_rule_set_ipv6_nd_sll(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_ICMPV6
icmp_type = 135
nd_sll = "93:6d:d0:d4:e8:36"
nd_sll_bin = self.haddr_to_bin(nd_sll)
headers = [dp.ofproto.OXM_OF_IPV6_ND_SLL]
self._set_verify(headers, nd_sll_bin, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_icmpv6_type(icmp_type)
match.set_ipv6_nd_sll(nd_sll_bin)
self.add_matches(dp, match)
def test_rule_set_ipv6_nd_tll(self, dp):
dl_type = ether.ETH_TYPE_IPV6
ip_proto = inet.IPPROTO_ICMPV6
icmp_type = 136
nd_tll = "18:f6:66:b6:f1:b3"
nd_tll_bin = self.haddr_to_bin(nd_tll)
headers = [dp.ofproto.OXM_OF_IPV6_ND_TLL]
self._set_verify(headers, nd_tll_bin, type_='mac')
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_ip_proto(ip_proto)
match.set_icmpv6_type(icmp_type)
match.set_ipv6_nd_tll(nd_tll_bin)
self.add_matches(dp, match)
def test_rule_set_mpls_label(self, dp):
dl_type = 0x8847
label = 2144
headers = [dp.ofproto.OXM_OF_MPLS_LABEL]
self._set_verify(headers, label)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_mpls_label(label)
self.add_matches(dp, match)
def test_rule_set_mpls_tc(self, dp):
dl_type = 0x8847
tc = 3
headers = [dp.ofproto.OXM_OF_MPLS_TC]
self._set_verify(headers, tc)
match = dp.ofproto_parser.OFPMatch()
match.set_dl_type(dl_type)
match.set_mpls_tc(tc)
self.add_matches(dp, match)
def is_supported(self, t):
# Open vSwitch 1.10 does not support MPLS yet.
unsupported = [
'test_rule_set_mpls_label',
'test_rule_set_mpls_tc',
]
for u in unsupported:
if t.find(u) != -1:
return False
return True
|
apache-2.0
|
sangh/LaserShow
|
pyglet-hg/contrib/scene2d/scene2d/camera.py
|
29
|
1270
|
#!/usr/bin/env python
'''
Camera for projecting 2d flat scenes
====================================
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
from pyglet.gl import *
from pyglet.event import *
class Camera(object):
def project(self):
'''Set up the GL projection matrix. Leave us in GL_MODELVIEW mode.
'''
raise NotImplemented()
def on_resize(self, width, height):
'''Handle resize of the viewport.
'''
raise NotImplemented()
class FlatCamera(Camera):
def __init__(self, x, y, width, height, near=-50, far=50):
self.x, self.y = x, y
self.width, self.height = width, height
self.near, self.far = near, far
def project(self):
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glViewport(self.x, self.y, self.width, self.height)
glOrtho(0, self.width, 0, self.height, self.near, self.far)
glMatrixMode(GL_MODELVIEW)
def on_resize(self, width, height):
self.width, self.height = width, height
return EVENT_UNHANDLED
def __repr__(self):
return '<%s object at 0x%x pos=(%d,%d) size=(%d,%d)>'%(
self.__class__.__name__, id(self), self.x, self.y, self.width,
self.height)
|
bsd-3-clause
|
tobegit3hub/keystone_docker
|
keystone/common/validation/validators.py
|
24
|
2735
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Internal implementation of request body validating middleware."""
import jsonschema
from keystone import exception
from keystone.i18n import _
class SchemaValidator(object):
"""Resource reference validator class."""
validator = None
validator_org = jsonschema.Draft4Validator
def __init__(self, schema):
# NOTE(lbragstad): If at some point in the future we want to extend
# our validators to include something specific we need to check for,
# we can do it here. Nova's V3 API validators extend the validator to
# include `self._validate_minimum` and `self._validate_maximum`. This
# would be handy if we needed to check for something the jsonschema
# didn't by default. See the Nova V3 validator for details on how this
# is done.
validators = {}
validator_cls = jsonschema.validators.extend(self.validator_org,
validators)
fc = jsonschema.FormatChecker()
self.validator = validator_cls(schema, format_checker=fc)
def validate(self, *args, **kwargs):
try:
self.validator.validate(*args, **kwargs)
except jsonschema.ValidationError as ex:
# NOTE: For whole OpenStack message consistency, this error
# message has been written in a format consistent with WSME.
if len(ex.path) > 0:
# NOTE(lbragstad): Here we could think about using iter_errors
# as a method of providing invalid parameters back to the
# user.
# TODO(lbragstad): If the value of a field is confidential or
# too long, then we should build the masking in here so that
# we don't expose sensitive user information in the event it
# fails validation.
detail = _("Invalid input for field '%(path)s'. The value is "
"'%(value)s'.") % {'path': ex.path.pop(),
'value': ex.instance}
else:
detail = ex.message
raise exception.SchemaValidationError(detail=detail)
|
apache-2.0
|
octobot-dev/pulpo-forms-django
|
views.py
|
2
|
30920
|
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.dispatch import receiver
from django.http import HttpResponse, HttpResponseBadRequest
from django.http.response import HttpResponseRedirect
from django.views.generic import TemplateView
from django.shortcuts import render_to_response
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.conf import settings
from django.template import RequestContext
from django.utils.decorators import method_decorator
from datetime import datetime
import json
import logging
import csv
from rest_framework.decorators import api_view, parser_classes
from rest_framework.parsers import MultiPartParser, FormParser, JSONParser
from rest_framework import generics
from rest_framework import permissions as drf_permissions
from rest_framework import status
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
from io import BytesIO
from .models import Form, FormEntry, Version, FieldEntry, FileEntry
from .fields import PUBLISHED, DRAFT, EXPIRED
from .serializers import FormSerializer, VersionSerializer
from .serializers import FieldEntrySerializer, FormEntrySerializer
from .fields import Field_Data
from .fieldtypes.FieldFactory import FieldFactory as Factory
from .fieldtypes.ModelField import ModelField
from .JSONSerializers import FieldSerializer, AfterSubmitSerializer
from .statistics.StatisticsCtrl import StatisticsCtrl
from pulpo_forms.statistics.StatisticsPdf import StatisticsPdf
from .signals import modified_logic
from .permissions import IsOwnerSuperUserOrReadOnly
class FormList(generics.ListCreateAPIView):
"""
APIView where the forms of the app are listed and a new form can be added.
"""
model = Form
serializer_class = FormSerializer
permission_classes = (
drf_permissions.IsAuthenticated,
IsOwnerSuperUserOrReadOnly
)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(FormList, self).dispatch(*args, **kwargs)
def get(self, request):
user = self.request.user
if user.is_superuser:
forms = Form.objects.all().values()
else:
forms = Form.objects.filter(owner=user).values()
index = 1
for f in forms:
# Obtain the list of versions of the form f
# ordered by version number (descendant)
query_set = Form.objects.get(
slug=f['slug']).versions.order_by('number').reverse()
vers_dict = query_set.values()
# Assign the dict of versions to the form dict
f["versions"] = vers_dict
f["index"] = index
f["username"] = User.objects.get(id=f['owner_id'])
index += 1
# Get the status of the last version,
# to know if there is already a draft in this form
if len(vers_dict) > 0:
last_version = vers_dict[0]
f["lastStatus"] = last_version['status']
return render_to_response(
'mainPage.html', {"formList": forms},
context_instance=RequestContext(request))
@login_required
@api_view(['GET'])
def ordered_forms(request, order="id", ad="asc"):
"""
Gets the list of all forms and versions from the database,
and renders the template to show them
"""
if request.user.is_superuser:
if order == "owner":
f1 = Form.objects.all().order_by('owner__username')
else:
f1 = Form.objects.all().order_by(order)
else:
if order == "owner":
f1 = Form.objects.filter(owner=request.user).order_by(
'owner__username')
else:
f1 = Form.objects.filter(owner=request.user).order_by(order)
if (ad == 'dsc'):
f1 = f1.reverse()
forms = f1.values()
index = 1
for f in forms:
# Obtain the list of versions of the form f
# ordered by version number (descendant)
query_set = Form.objects.get(
slug=f['slug']).versions.order_by('number').reverse()
vers_dict = query_set.values()
# Assign the dict of versions to the form dict
f["versions"] = vers_dict
f["index"] = index
f["username"] = User.objects.get(id=f['owner_id'])
index += 1
# Get the status of the last version,
# to know if there is already a draft in this form
if len(vers_dict) > 0:
last_version = vers_dict[0]
f["lastStatus"] = last_version['status']
return render_to_response(
'mainPage.html', {"formList": forms},
context_instance=RequestContext(request))
class FormDetail(generics.RetrieveUpdateDestroyAPIView):
"""
APIView to see details, modify or delete a form.
"""
queryset = Form.objects.all()
serializer_class = FormSerializer
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
IsOwnerSuperUserOrReadOnly
)
def pre_save(self, obj):
obj.owner = self.request.user
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(FormDetail, self).dispatch(*args, **kwargs)
class VersionList(generics.ListCreateAPIView):
"""
APIView where the version of the selected form are listed
and a new version can be added.
"""
model = Version
serializer_class = VersionSerializer
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
IsOwnerSuperUserOrReadOnly
)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(VersionList, self).dispatch(*args, **kwargs)
def get(self, request, pk, format=None):
try:
versions = Form.objects.get(id=pk).versions.all()
serializer = VersionSerializer(versions, many=True)
return Response(serializer.data)
except Form.DoesNotExist:
content = {"error": "There is no form with that slug"}
return Response(content, status=status.HTTP_404_NOT_FOUND)
def post(self, request, pk, format=None):
serializer = VersionSerializer(data=request.DATA, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class VersionDetail(generics.RetrieveUpdateDestroyAPIView):
"""
APIView to see details, modify or delete a version.
"""
queryset = Version.objects.all()
serializer_class = VersionSerializer
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
IsOwnerSuperUserOrReadOnly
)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(VersionDetail, self).dispatch(*args, **kwargs)
def get_object(self, pk, number):
try:
form = Form.objects.get(id=pk)
return form.versions.get(number=number)
except ObjectDoesNotExist:
content = {
"error": "There is no form with that slug or the \
corresponding form has no version with that number"}
return Response(content, status=status.HTTP_404_NOT_FOUND)
def get(self, request, pk, number, format=None):
version = self.get_object(pk, number)
if type(version) is not Version:
return version
serializer = VersionSerializer(version)
return Response(serializer.data)
def put(self, request, pk, number, format=None):
version = self.get_object(pk, number)
serializer = VersionSerializer(version, data=request.DATA)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, pk, number, format=None):
# Get related form of the version that is going to be deleted
try:
form = Form.objects.get(id=pk)
# Get version
version = Version.objects.get(form=form, number=number)
# Only draft versions can be deleted this way
if version.status == DRAFT:
# If selected form has only a draft and no previous versions
if len(Version.objects.filter(form=form)) == 1:
form.delete()
else:
version.delete()
return Response(status=status.HTTP_200_OK)
else:
return Response(status=status.HTTP_401_UNAUTHORIZED)
except Form.DoesNotExist or Version.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
class NewVersion(generics.CreateAPIView):
"""
APIView to create a new version of a form or duplicate a form
"""
permission_classes = (
drf_permissions.IsAuthenticated,
IsOwnerSuperUserOrReadOnly
)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(NewVersion, self).dispatch(*args, **kwargs)
def get(self, request, pk, number, action):
try:
# Get version of form that is going to be duplicated-
form = Form.objects.get(id=pk)
version = Version.objects.get(form=form, number=number)
except Version.DoesNotExist or Form.DoesNotExist:
content = {
"error": "There is no form with that slug or the \
corresponding form has no version with that number"}
return Response(content, status=status.HTTP_404_NOT_FOUND)
# If action new version
if action == "new":
# Create version and save it on database
new_version = Version(json=version.json, form=form)
new_version.save()
# If action duplicate a version
elif action == "duplicate":
# Create a copy of the form related to selected version
new_form = Form(title=form.title, owner=request.user)
count = 2
try:
f_try = 1
while (f_try is not None):
suffix = "(" + str(count) + ")"
# New_form.title += str(count)
count += 1
f_try = Form.objects.filter(
title=new_form.title + suffix).first()
except Form.DoesNotExist:
pass
new_form.title += suffix
new_form.save()
# Create a copy of the version and save it on database
new_version = Version(json=version.json, form=new_form)
new_version.save()
return HttpResponseRedirect(settings.FORMS_BASE_URL + "main/")
class DeleteVersion(generics.DestroyAPIView):
"""
APIView to delete a form
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
IsOwnerSuperUserOrReadOnly
)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(DeleteVersion, self).dispatch(*args, **kwargs)
def get(self, request, pk, number, format=None):
# Get related form of the version that is going to be deleted
try:
form = Form.objects.get(id=pk)
# Get version
version = Version.objects.get(form=form, number=number)
# Only draft versions can be deleted this way
if version.status == DRAFT:
# If selected form has only a draft and no previous versions
if len(Version.objects.filter(form=form)) == 1:
form.delete()
else:
version.delete()
return HttpResponseRedirect(settings.FORMS_BASE_URL + "main/")
else:
return Response(status=status.HTTP_401_UNAUTHORIZED)
except Form.DoesNotExist or Version.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
class ExpireVersion(generics.DestroyAPIView):
"""
APIView to delete a form
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
IsOwnerSuperUserOrReadOnly
)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ExpireVersion, self).dispatch(*args, **kwargs)
def get(self, request, pk, number, format=None):
# Get related form of the version that is going to expire
try:
form = Form.objects.get(id=pk)
# Get version
version = Version.objects.get(form=form, number=number)
# Only published versions can be expired this way
if version.status == PUBLISHED:
version.status = EXPIRED
version.expiry_date = datetime.now()
version.save()
return HttpResponseRedirect(settings.FORMS_BASE_URL + "main/")
else:
return Response(status=status.HTTP_400_BAD_REQUEST)
except Form.DoesNotExist or Version.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
class DeleteForm(generics.DestroyAPIView):
"""
APIView to delete a form
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
IsOwnerSuperUserOrReadOnly
)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(DeleteForm, self).dispatch(*args, **kwargs)
def get(self, request, pk):
# Get form and delete it
try:
form = Form.objects.get(id=pk)
form.delete()
return HttpResponseRedirect(settings.FORMS_BASE_URL + "main/")
except Form.DoesNotExist:
return HttpResponseRedirect(settings.FORMS_BASE_URL + "chuck/")
class FillForm(generics.RetrieveUpdateDestroyAPIView):
"""
APIView to retrieve current version of a form to be filled
"""
serializer_class = VersionSerializer
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
IsOwnerSuperUserOrReadOnly
)
def get(self, request, slug, format=None):
try:
form_versions = Form.objects.get(slug=slug).versions.all()
# We assume there is only one published version at any given time
final_version = form_versions.filter(status=PUBLISHED).first()
if (not final_version):
error = {"error": "This Form has not been published."}
return Response(
status=status.HTTP_406_NOT_ACCEPTABLE, data=error)
loaded = json.loads(final_version.json)
for p in loaded['pages']:
for f in p['fields']:
fld = (Factory.get_class(f['field_type']))()
if isinstance(fld, ModelField):
f['options'] = fld.find_options()
final_version.json = json.dumps(loaded)
serializer = VersionSerializer(final_version)
return Response(serializer.data)
except Form.DoesNotExist or Version.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
class JSONResponse(HttpResponse):
"""
An HttpResponse that renders its content into JSON.
"""
def __init__(self, data, statusp, **kwargs):
content = JSONRenderer().render(data)
kwargs['content_type'] = 'application/json'
super(JSONResponse, self).__init__(content, status=statusp, **kwargs)
def is_shown(request, version, field, item_id):
logic = version.get_logic()
# If field == True, we check for field logic, otherwise we check page logic
if field:
logic = logic['fields']
else:
logic = logic['pages']
# If there are no logic restrictions to show the item,
# item is always shown
if item_id not in logic:
return True
eval_results = []
conditions = logic[item_id]['conditions']
for condition in conditions:
data = ''
form_json = json.loads(request.DATA['data'])
for field in form_json:
serializer = FieldEntrySerializer(data=field)
if serializer.is_valid():
if serializer.initial_data['field_id'] == condition['field']:
field_org = serializer.initial_data
data = field_org['answer']
break
if data:
operator = ''
if condition['comparator'] == "greater_than":
operator = '>'
elif condition['comparator'] == "greater_than_or_equal":
operator = '>='
elif condition['comparator'] == "equal":
operator = '=='
elif condition['comparator'] == "not_equal":
operator = '!='
elif condition['comparator'] == "less_than_or_equal":
operator = '<='
elif condition['comparator'] == "less_than":
operator = '<'
if operator != '':
expression = data + operator + condition['value'].__str__()
eval_results.append(eval(expression))
# TODO: Missing error handling
else:
pass
else:
eval_results.append(False)
if logic[item_id]['action'] == 'All':
value = True
for result in eval_results:
value = value & result
elif logic[item_id]['action'] == 'Any':
value = False
for result in eval_results:
value = value | result
if logic[item_id]['operation'] == 'Show':
shown = value
else:
shown = not value
return shown
def validate_logic(request, version):
pages = version.get_pages()
page_id = 0
pages_show_value = []
for page in pages:
pages_show_value.append(
is_shown(request, version, False, page_id.__str__()))
page_id += 1
form_json = json.loads(request.DATA['data'])
for field in form_json:
field_page = -1
serializer = FieldEntrySerializer(data=field)
if serializer.is_valid():
obj = serializer.initial_data
index = -1
for page in pages:
index += 1
for page_field in page['fields']:
if page_field['field_id'] == obj['field_id']:
field_page = index
break
if field_page != -1:
break
# If field cannot be found, logic check fails
if field_page == -1:
return False
shown = is_shown(request, version, True, obj['field_id'].__str__())
shown = shown & pages_show_value[field_page]
if shown != obj['shown']:
# If recived shown value differs from calculated,
# we return False
return False
# If there are no errors, logic is valid
return True
@api_view(['POST'])
@parser_classes((FormParser, MultiPartParser, JSONParser))
def submit_form_entry(request, slug, format=None):
"""
APIView to submit a Form Entry.
"""
error_log = {"error": ""}
form_versions = Form.objects.get(slug=slug).versions.all()
final_version = form_versions.filter(status=PUBLISHED).first()
form_json = json.loads(request.DATA['data'])
for field in form_json:
serializer = FieldEntrySerializer(data=field)
if serializer.is_valid():
obj = serializer.initial_data
if (obj['required'] and obj['answer'].__str__() == ''
and obj['shown']):
error_log['error'] += obj['text'] + ': This field \
is required\n'
elif not obj['required'] and obj['answer'].__str__() == '':
pass
elif obj['shown']:
fld = (Factory.get_class(obj['field_type']))()
try:
loaded = json.loads(final_version.json)
f_id = obj['field_id']
kw = {}
f = Field_Data()
data = FieldSerializer(f, field)
if (data.is_valid()):
kw['field'] = f
kw['options'] = fld.get_options(loaded, f_id)
fld.validate(obj['answer'], **kw)
else:
raise ValidationError("Invalid JSON format.")
except ValidationError as e:
error_log['error'] += e.message
else:
return Response(status=status.HTTP_406_NOT_ACCEPTABLE)
# Make sure logic contraints are respected.
logic_check = validate_logic(request, final_version)
if not logic_check:
modified_logic.send(sender=request, sent_data=request.DATA['data'])
return Response(status=status.HTTP_406_NOT_ACCEPTABLE)
if error_log['error'] != "":
return Response(status=status.HTTP_406_NOT_ACCEPTABLE, data=error_log)
entry = FormEntry(version=final_version)
entry.entry_time = datetime.now()
entry.save()
form_json = json.loads(request.DATA['data'])
for field in form_json:
serializer = FieldEntrySerializer(data=field)
if serializer.is_valid():
# serializer.object.entry = entry
if not serializer.initial_data['shown']:
# serializer.object.answer = ''
field_entry = serializer.save(entry=entry, answer='')
field_entry = serializer.save(entry=entry)
# If field is a FileField we find the corresponding file
# and save it to the database
if field_entry.field_type == 'FileField':
data_json = field_entry.answer
if data_json != '':
FileEntry.objects.create(
field_id=field_entry.field_id,
file_type=request.FILES[data_json].content_type,
file_data=request.FILES[data_json],
field_entry=FieldEntry.objects.get(
pk=field_entry.pk),
file_name=request.FILES[data_json].name)
return Response(status=status.HTTP_200_OK)
logger = logging.getLogger(__name__)
@receiver(modified_logic)
def modified_logic_handler(sender, **kwargs):
logger.error("Submitted form logic has been modified. \
DATA:" + kwargs['sent_data'].__str__())
@login_required
@api_view(['GET'])
def get_responses(request, pk, number, format=None):
"""
View to get all the entries for a particular form.
"""
try:
form = Form.objects.get(pk=pk)
if (form.owner != request.user and not request.user.is_superuser):
return HttpResponseBadRequest(
json.dumps({"error": "This survey does not belong to you."}))
v = form.versions.get(number=number)
if (v.status == DRAFT):
content = {"error": "This version's status is Draft."}
return Response(content, status=status.HTTP_406_NOT_ACCEPTABLE)
queryset = v.entries.all()
if queryset:
serializer = FormEntrySerializer(queryset, many=True)
return Response(serializer.data)
else:
return Response(
data="No field entries for this form",
status=status.HTTP_406_NOT_ACCEPTABLE)
except ObjectDoesNotExist:
content = {
"error": "There is no form with that slug or the \
corresponding form has no version with that number"}
return Response(content, status=status.HTTP_404_NOT_FOUND)
@login_required
@api_view(['GET'])
def get_constants(request, format=None):
"""
View to get the available field type IDs.
"""
data = Factory.get_strings()
return Response(status=status.HTTP_200_OK, data=data)
@login_required
@api_view(['GET'])
def get_URL(request, format=None):
"""
View to get the base URL.
"""
data = {'URL': settings.FORMS_BASE_URL}
return Response(status=status.HTTP_200_OK, data=data)
class FieldTemplateView(TemplateView):
"""
Renders the field type templates.
"""
def get_template_names(self):
field = Factory.get_class(self.kwargs.get('type'))
return field().render()
class FieldEditTemplateView(TemplateView):
"""
Renders the field type templates.
"""
def get_template_names(self):
field = Factory.get_class(self.kwargs.get('type'))
return field().render_edit()
class FieldPrpTemplateView(TemplateView):
"""
Renders the field type properties templates.
"""
def get_template_names(self):
if (self.kwargs.get('type') == 'default'):
return 'fields/field_properties_base.html'
field = Factory.get_class(self.kwargs.get('type'))
return field().render_properties()
class FieldStsTemplateView(TemplateView):
"""
Renders the field type statistics templates.
"""
def get_template_names(self):
field = Factory.get_class(self.kwargs.get('type'))
return field().render_statistic()
class StatisticsView(generics.RetrieveAPIView):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
IsOwnerSuperUserOrReadOnly
)
def get(self, request, pk, number):
"""
Returns statistics for version (pk, number)
"""
filters = []
try:
fields = request.GET.get('fields', "NO FIELD")
types = request.GET.get('types', "NO TYPE")
values = request.GET.get('values', "NO VALUE")
if (fields != "NO FIELD"):
fields = fields.split(',')
types = types.split(',')
values = values.split(',')
length = len(fields)
if (length != len(types) or length != len(values)):
return HttpResponseBadRequest(json.dumps(
{"error": "Misconfigured filters"}))
for x in range(0, length):
filter = {
"field": fields[x],
"filter_type": types[x],
"field_value": values[x]
}
filters.append(filter)
statistics = StatisticsCtrl().getStatistics(
pk, number, filters)
return Response(data=statistics, status=status.HTTP_200_OK)
except Exception as e:
error_msg = str(e)
return Response(
data=error_msg, status=status.HTTP_406_NOT_ACCEPTABLE)
@login_required
@api_view(['GET'])
def after_submit_message(request, slug):
form_versions = Form.objects.get(slug=slug).versions.all()
final_version = form_versions.filter(status=PUBLISHED).first()
js = json.loads(final_version.json)
serializer = AfterSubmitSerializer(data=js['after_submit'])
if serializer.is_valid():
d = serializer.initial_data
msj = d['message']
message = msj.split("\n")
return render_to_response(
'form_submitted.html', {"message": message},
context_instance=RequestContext(request))
@login_required
@api_view(['GET'])
def export_csv(request, pk, number, format=None):
"""
Function view for exporting responses of form version in csv format
"""
# Create the HttpResponse object with the appropriate CSV header.
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="responses.csv"'
# Create a csv writer object
writer = csv.writer(response)
try:
# Get version
form = Form.objects.get(pk=pk)
version = form.versions.get(number=number)
# Only from a not draft version a csv file can be exported
if (version.status == DRAFT):
content = {"error": "This version's status is Draft."}
return Response(content, status=status.HTTP_406_NOT_ACCEPTABLE)
# Get all entries
form_entries = version.entries.all()
if form_entries:
initial = form_entries[0]
labels = []
for field in initial.fields.all().order_by("field_id"):
labels.append('"' + field.text + '"')
writer.writerow(labels)
for formEntry in form_entries:
fields = formEntry.fields.all().order_by("field_id")
data = []
for field in fields:
data.append(field.answer)
writer.writerow(data)
return response
else:
return Response(
data="No field entries for this form",
status=status.HTTP_406_NOT_ACCEPTABLE)
except ObjectDoesNotExist:
content = {
"error": "There is no form with that slug or the \
corresponding form has no version with that number"}
return Response(content, status=status.HTTP_404_NOT_FOUND)
@login_required
@api_view(['GET'])
def export_pdf(request, pk, number, field):
"""
View for exporting field statistics on pdf format
"""
try:
statistics = StatisticsCtrl().getFieldStatistics(pk, number, field)
# Create the HttpResponse object with the appropriate PDF headers.
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; \
filename="field_statistics.pdf"'
buffer = BytesIO()
report = StatisticsPdf(buffer, 'A4', statistics)
pdf = report.print_statistics()
response.write(pdf)
return response
except Exception as e:
error_msg = str(e)
return Response(data=error_msg, status=status.HTTP_406_NOT_ACCEPTABLE)
@api_view(['GET'])
def download_file(request, field_id, entry):
field_entry = FieldEntry.objects.get(pk=entry)
file_entry = field_entry.files.get(field_id=field_id)
response = HttpResponse(
file_entry.file_data, content_type=file_entry.file_type)
response['Content-Disposition'] = 'attachment; filename=\
"' + file_entry.file_name + '"'
return response
@api_view(['GET'])
def render_form(request, format=None, **kwargs):
base_url = settings.FORMS_BASE_URL
return render_to_response(
'visor.html', {"instance": kwargs['instance'], "base_url": base_url},
context_instance=RequestContext(request))
|
apache-2.0
|
XiaodunServerGroup/ddyedx
|
lms/djangoapps/courseware/tests/test_module_render.py
|
12
|
30189
|
"""
Test for lms courseware app, module render unit
"""
from ddt import ddt, data
from functools import partial
from mock import MagicMock, patch, Mock
import json
from django.http import Http404, HttpResponse
from django.core.urlresolvers import reverse
from django.conf import settings
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from capa.tests.response_xml_factory import OptionResponseXMLFactory
from xblock.field_data import FieldData
from xblock.runtime import Runtime
from xblock.fields import ScopeIds
from xmodule.lti_module import LTIDescriptor
from xmodule.modulestore import Location
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import ItemFactory, CourseFactory
from xmodule.x_module import XModuleDescriptor
from courseware import module_render as render
from courseware.courses import get_course_with_access, course_image_url, get_course_info_section
from courseware.model_data import FieldDataCache
from courseware.tests.factories import StudentModuleFactory, UserFactory
from courseware.tests.tests import LoginEnrollmentTestCase
from courseware.tests.modulestore_config import TEST_DATA_MIXED_MODULESTORE
from lms.lib.xblock.runtime import quote_slashes
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class ModuleRenderTestCase(ModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Tests of courseware.module_render
"""
def setUp(self):
self.location = Location('i4x', 'edX', 'toy', 'chapter', 'Overview')
self.course_id = 'edX/toy/2012_Fall'
self.toy_course = modulestore().get_course(self.course_id)
self.mock_user = UserFactory()
self.mock_user.id = 1
self.request_factory = RequestFactory()
# Construct a mock module for the modulestore to return
self.mock_module = MagicMock()
self.mock_module.id = 1
self.dispatch = 'score_update'
# Construct a 'standard' xqueue_callback url
self.callback_url = reverse('xqueue_callback', kwargs=dict(course_id=self.course_id,
userid=str(self.mock_user.id),
mod_id=self.mock_module.id,
dispatch=self.dispatch))
def test_get_module(self):
self.assertEqual(
None,
render.get_module('dummyuser', None, 'invalid location', None, None)
)
def test_module_render_with_jump_to_id(self):
"""
This test validates that the /jump_to_id/<id> shorthand for intracourse linking works assertIn
expected. Note there's a HTML element in the 'toy' course with the url_name 'toyjumpto' which
defines this linkage
"""
mock_request = MagicMock()
mock_request.user = self.mock_user
course = get_course_with_access(self.mock_user, self.course_id, 'load')
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.course_id, self.mock_user, course, depth=2)
module = render.get_module(
self.mock_user,
mock_request,
Location('i4x', 'edX', 'toy', 'html', 'toyjumpto'),
field_data_cache,
self.course_id
)
# get the rendered HTML output which should have the rewritten link
html = module.render('student_view').content
# See if the url got rewritten to the target link
# note if the URL mapping changes then this assertion will break
self.assertIn('/courses/' + self.course_id + '/jump_to_id/vertical_test', html)
def test_xqueue_callback_success(self):
"""
Test for happy-path xqueue_callback
"""
fake_key = 'fake key'
xqueue_header = json.dumps({'lms_key': fake_key})
data = {
'xqueue_header': xqueue_header,
'xqueue_body': 'hello world',
}
# Patch getmodule to return our mock module
with patch('courseware.module_render.find_target_student_module') as get_fake_module:
get_fake_module.return_value = self.mock_module
# call xqueue_callback with our mocked information
request = self.request_factory.post(self.callback_url, data)
render.xqueue_callback(request, self.course_id, self.mock_user.id, self.mock_module.id, self.dispatch)
# Verify that handle ajax is called with the correct data
request.POST['queuekey'] = fake_key
self.mock_module.handle_ajax.assert_called_once_with(self.dispatch, request.POST)
def test_xqueue_callback_missing_header_info(self):
data = {
'xqueue_header': '{}',
'xqueue_body': 'hello world',
}
with patch('courseware.module_render.find_target_student_module') as get_fake_module:
get_fake_module.return_value = self.mock_module
# Test with missing xqueue data
with self.assertRaises(Http404):
request = self.request_factory.post(self.callback_url, {})
render.xqueue_callback(request, self.course_id, self.mock_user.id, self.mock_module.id, self.dispatch)
# Test with missing xqueue_header
with self.assertRaises(Http404):
request = self.request_factory.post(self.callback_url, data)
render.xqueue_callback(request, self.course_id, self.mock_user.id, self.mock_module.id, self.dispatch)
def test_get_score_bucket(self):
self.assertEquals(render.get_score_bucket(0, 10), 'incorrect')
self.assertEquals(render.get_score_bucket(1, 10), 'partial')
self.assertEquals(render.get_score_bucket(10, 10), 'correct')
# get_score_bucket calls error cases 'incorrect'
self.assertEquals(render.get_score_bucket(11, 10), 'incorrect')
self.assertEquals(render.get_score_bucket(-1, 10), 'incorrect')
def test_anonymous_handle_xblock_callback(self):
dispatch_url = reverse(
'xblock_handler',
args=[
'edX/toy/2012_Fall',
quote_slashes('i4x://edX/toy/videosequence/Toy_Videos'),
'xmodule_handler',
'goto_position'
]
)
response = self.client.post(dispatch_url, {'position': 2})
self.assertEquals(403, response.status_code)
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class TestHandleXBlockCallback(ModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test the handle_xblock_callback function
"""
def setUp(self):
self.location = Location('i4x', 'edX', 'toy', 'chapter', 'Overview')
self.course_id = 'edX/toy/2012_Fall'
self.toy_course = modulestore().get_course(self.course_id)
self.mock_user = UserFactory()
self.mock_user.id = 1
self.request_factory = RequestFactory()
# Construct a mock module for the modulestore to return
self.mock_module = MagicMock()
self.mock_module.id = 1
self.dispatch = 'score_update'
# Construct a 'standard' xqueue_callback url
self.callback_url = reverse('xqueue_callback', kwargs=dict(course_id=self.course_id,
userid=str(self.mock_user.id),
mod_id=self.mock_module.id,
dispatch=self.dispatch))
def _mock_file(self, name='file', size=10):
"""Create a mock file object for testing uploads"""
mock_file = MagicMock(
size=size,
read=lambda: 'x' * size
)
# We can't use `name` as a kwarg to Mock to set the name attribute
# because mock uses `name` to name the mock itself
mock_file.name = name
return mock_file
def test_invalid_location(self):
request = self.request_factory.post('dummy_url', data={'position': 1})
request.user = self.mock_user
with self.assertRaises(Http404):
render.handle_xblock_callback(
request,
'dummy/course/id',
'invalid Location',
'dummy_handler'
'dummy_dispatch'
)
def test_too_many_files(self):
request = self.request_factory.post(
'dummy_url',
data={'file_id': (self._mock_file(), ) * (settings.MAX_FILEUPLOADS_PER_INPUT + 1)}
)
request.user = self.mock_user
self.assertEquals(
render.handle_xblock_callback(
request,
'dummy/course/id',
quote_slashes(str(self.location)),
'dummy_handler'
).content,
json.dumps({
'success': 'Submission aborted! Maximum %d files may be submitted at once' %
settings.MAX_FILEUPLOADS_PER_INPUT
})
)
def test_too_large_file(self):
inputfile = self._mock_file(size=1 + settings.STUDENT_FILEUPLOAD_MAX_SIZE)
request = self.request_factory.post(
'dummy_url',
data={'file_id': inputfile}
)
request.user = self.mock_user
self.assertEquals(
render.handle_xblock_callback(
request,
'dummy/course/id',
quote_slashes(str(self.location)),
'dummy_handler'
).content,
json.dumps({
'success': 'Submission aborted! Your file "%s" is too large (max size: %d MB)' %
(inputfile.name, settings.STUDENT_FILEUPLOAD_MAX_SIZE / (1000 ** 2))
})
)
def test_xmodule_dispatch(self):
request = self.request_factory.post('dummy_url', data={'position': 1})
request.user = self.mock_user
response = render.handle_xblock_callback(
request,
self.course_id,
quote_slashes(str(self.location)),
'xmodule_handler',
'goto_position',
)
self.assertIsInstance(response, HttpResponse)
def test_bad_course_id(self):
request = self.request_factory.post('dummy_url')
request.user = self.mock_user
with self.assertRaises(Http404):
render.handle_xblock_callback(
request,
'bad_course_id',
quote_slashes(str(self.location)),
'xmodule_handler',
'goto_position',
)
def test_bad_location(self):
request = self.request_factory.post('dummy_url')
request.user = self.mock_user
with self.assertRaises(Http404):
render.handle_xblock_callback(
request,
self.course_id,
quote_slashes(str(Location('i4x', 'edX', 'toy', 'chapter', 'bad_location'))),
'xmodule_handler',
'goto_position',
)
def test_bad_xmodule_dispatch(self):
request = self.request_factory.post('dummy_url')
request.user = self.mock_user
with self.assertRaises(Http404):
render.handle_xblock_callback(
request,
self.course_id,
quote_slashes(str(self.location)),
'xmodule_handler',
'bad_dispatch',
)
def test_missing_handler(self):
request = self.request_factory.post('dummy_url')
request.user = self.mock_user
with self.assertRaises(Http404):
render.handle_xblock_callback(
request,
self.course_id,
quote_slashes(str(self.location)),
'bad_handler',
'bad_dispatch',
)
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class TestTOC(TestCase):
"""Check the Table of Contents for a course"""
def setUp(self):
# Toy courses should be loaded
self.course_name = 'edX/toy/2012_Fall'
self.toy_course = modulestore().get_course(self.course_name)
self.portal_user = UserFactory()
def test_toc_toy_from_chapter(self):
chapter = 'Overview'
chapter_url = '%s/%s/%s' % ('/courses', self.course_name, chapter)
factory = RequestFactory()
request = factory.get(chapter_url)
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.toy_course.id, self.portal_user, self.toy_course, depth=2)
expected = ([{'active': True, 'sections':
[{'url_name': 'Toy_Videos', 'display_name': u'Toy Videos', 'graded': True,
'format': u'Lecture Sequence', 'due': None, 'active': False},
{'url_name': 'Welcome', 'display_name': u'Welcome', 'graded': True,
'format': '', 'due': None, 'active': False},
{'url_name': 'video_123456789012', 'display_name': 'Test Video', 'graded': True,
'format': '', 'due': None, 'active': False},
{'url_name': 'video_4f66f493ac8f', 'display_name': 'Video', 'graded': True,
'format': '', 'due': None, 'active': False}],
'url_name': 'Overview', 'display_name': u'Overview'},
{'active': False, 'sections':
[{'url_name': 'toyvideo', 'display_name': 'toyvideo', 'graded': True,
'format': '', 'due': None, 'active': False}],
'url_name': 'secret:magic', 'display_name': 'secret:magic'}])
actual = render.toc_for_course(self.portal_user, request, self.toy_course, chapter, None, field_data_cache)
for toc_section in expected:
self.assertIn(toc_section, actual)
def test_toc_toy_from_section(self):
chapter = 'Overview'
chapter_url = '%s/%s/%s' % ('/courses', self.course_name, chapter)
section = 'Welcome'
factory = RequestFactory()
request = factory.get(chapter_url)
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.toy_course.id, self.portal_user, self.toy_course, depth=2)
expected = ([{'active': True, 'sections':
[{'url_name': 'Toy_Videos', 'display_name': u'Toy Videos', 'graded': True,
'format': u'Lecture Sequence', 'due': None, 'active': False},
{'url_name': 'Welcome', 'display_name': u'Welcome', 'graded': True,
'format': '', 'due': None, 'active': True},
{'url_name': 'video_123456789012', 'display_name': 'Test Video', 'graded': True,
'format': '', 'due': None, 'active': False},
{'url_name': 'video_4f66f493ac8f', 'display_name': 'Video', 'graded': True,
'format': '', 'due': None, 'active': False}],
'url_name': 'Overview', 'display_name': u'Overview'},
{'active': False, 'sections':
[{'url_name': 'toyvideo', 'display_name': 'toyvideo', 'graded': True,
'format': '', 'due': None, 'active': False}],
'url_name': 'secret:magic', 'display_name': 'secret:magic'}])
actual = render.toc_for_course(self.portal_user, request, self.toy_course, chapter, section, field_data_cache)
for toc_section in expected:
self.assertIn(toc_section, actual)
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class TestHtmlModifiers(ModuleStoreTestCase):
"""
Tests to verify that standard modifications to the output of XModule/XBlock
student_view are taking place
"""
def setUp(self):
self.user = UserFactory.create()
self.request = RequestFactory().get('/')
self.request.user = self.user
self.request.session = {}
self.course = CourseFactory.create()
self.content_string = '<p>This is the content<p>'
self.rewrite_link = '<a href="/static/foo/content">Test rewrite</a>'
self.rewrite_bad_link = '<img src="/static//file.jpg" />'
self.course_link = '<a href="/course/bar/content">Test course rewrite</a>'
self.descriptor = ItemFactory.create(
category='html',
data=self.content_string + self.rewrite_link + self.rewrite_bad_link + self.course_link
)
self.location = self.descriptor.location
self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.course.id,
self.user,
self.descriptor
)
def test_xmodule_display_wrapper_enabled(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
self.course.id,
wrap_xmodule_display=True,
)
result_fragment = module.render('student_view')
self.assertIn('div class="xblock xblock-student_view xmodule_display xmodule_HtmlModule"', result_fragment.content)
def test_xmodule_display_wrapper_disabled(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
self.course.id,
wrap_xmodule_display=False,
)
result_fragment = module.render('student_view')
self.assertNotIn('div class="xblock xblock-student_view xmodule_display xmodule_HtmlModule"', result_fragment.content)
def test_static_link_rewrite(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
self.course.id,
)
result_fragment = module.render('student_view')
self.assertIn(
'/c4x/{org}/{course}/asset/foo_content'.format(
org=self.course.location.org,
course=self.course.location.course,
),
result_fragment.content
)
def test_static_badlink_rewrite(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
self.course.id,
)
result_fragment = module.render('student_view')
self.assertIn(
'/c4x/{org}/{course}/asset/_file.jpg'.format(
org=self.course.location.org,
course=self.course.location.course,
),
result_fragment.content
)
def test_static_asset_path_use(self):
'''
when a course is loaded with do_import_static=False (see xml_importer.py), then
static_asset_path is set as an lms kv in course. That should make static paths
not be mangled (ie not changed to c4x://).
'''
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
self.course.id,
static_asset_path="toy_course_dir",
)
result_fragment = module.render('student_view')
self.assertIn('href="/static/toy_course_dir', result_fragment.content)
def test_course_image(self):
url = course_image_url(self.course)
self.assertTrue(url.startswith('/c4x/'))
self.course.static_asset_path = "toy_course_dir"
url = course_image_url(self.course)
self.assertTrue(url.startswith('/static/toy_course_dir/'))
self.course.static_asset_path = ""
def test_get_course_info_section(self):
self.course.static_asset_path = "toy_course_dir"
get_course_info_section(self.request, self.course, "handouts")
# NOTE: check handouts output...right now test course seems to have no such content
# at least this makes sure get_course_info_section returns without exception
def test_course_link_rewrite(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
self.course.id,
)
result_fragment = module.render('student_view')
self.assertIn(
'/courses/{course_id}/bar/content'.format(
course_id=self.course.id
),
result_fragment.content
)
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
@patch.dict('django.conf.settings.FEATURES', {'DISPLAY_DEBUG_INFO_TO_STAFF': True, 'DISPLAY_HISTOGRAMS_TO_STAFF': True})
@patch('courseware.module_render.has_access', Mock(return_value=True))
class TestStaffDebugInfo(ModuleStoreTestCase):
"""Tests to verify that Staff Debug Info panel and histograms are displayed to staff."""
def setUp(self):
self.user = UserFactory.create()
self.request = RequestFactory().get('/')
self.request.user = self.user
self.request.session = {}
self.course = CourseFactory.create()
problem_xml = OptionResponseXMLFactory().build_xml(
question_text='The correct answer is Correct',
num_inputs=2,
weight=2,
options=['Correct', 'Incorrect'],
correct_option='Correct'
)
self.descriptor = ItemFactory.create(
category='problem',
data=problem_xml,
display_name='Option Response Problem'
)
self.location = self.descriptor.location
self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.course.id,
self.user,
self.descriptor
)
@patch.dict('django.conf.settings.FEATURES', {'DISPLAY_DEBUG_INFO_TO_STAFF': False})
def test_staff_debug_info_disabled(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
self.course.id,
)
result_fragment = module.render('student_view')
self.assertNotIn('Staff Debug', result_fragment.content)
def test_staff_debug_info_enabled(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
self.course.id,
)
result_fragment = module.render('student_view')
self.assertIn('Staff Debug', result_fragment.content)
@patch.dict('django.conf.settings.FEATURES', {'DISPLAY_HISTOGRAMS_TO_STAFF': False})
def test_histogram_disabled(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
self.course.id,
)
result_fragment = module.render('student_view')
self.assertNotIn('histrogram', result_fragment.content)
def test_histogram_enabled_for_unscored_xmodules(self):
"""Histograms should not display for xmodules which are not scored."""
html_descriptor = ItemFactory.create(
category='html',
data='Here are some course details.'
)
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.course.id,
self.user,
self.descriptor
)
with patch('xmodule_modifiers.grade_histogram') as mock_grade_histogram:
mock_grade_histogram.return_value = []
module = render.get_module(
self.user,
self.request,
html_descriptor.location,
field_data_cache,
self.course.id,
)
module.render('student_view')
self.assertFalse(mock_grade_histogram.called)
def test_histogram_enabled_for_scored_xmodules(self):
"""Histograms should display for xmodules which are scored."""
StudentModuleFactory.create(
course_id=self.course.id,
module_state_key=self.location,
student=UserFactory(),
grade=1,
max_grade=1,
state="{}",
)
with patch('xmodule_modifiers.grade_histogram') as mock_grade_histogram:
mock_grade_histogram.return_value = []
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
self.course.id,
)
module.render('student_view')
self.assertTrue(mock_grade_histogram.called)
PER_COURSE_ANONYMIZED_DESCRIPTORS = (LTIDescriptor, )
PER_STUDENT_ANONYMIZED_DESCRIPTORS = [
class_ for (name, class_) in XModuleDescriptor.load_classes()
if not issubclass(class_, PER_COURSE_ANONYMIZED_DESCRIPTORS)
]
@ddt
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class TestAnonymousStudentId(ModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test that anonymous_student_id is set correctly across a variety of XBlock types
"""
def setUp(self):
self.user = UserFactory()
@patch('courseware.module_render.has_access', Mock(return_value=True))
def _get_anonymous_id(self, course_id, xblock_class):
location = Location('dummy_org', 'dummy_course', 'dummy_category', 'dummy_name')
descriptor = Mock(
spec=xblock_class,
_field_data=Mock(spec=FieldData),
location=location,
static_asset_path=None,
runtime=Mock(
spec=Runtime,
resources_fs=None,
mixologist=Mock(_mixins=())
),
scope_ids=Mock(spec=ScopeIds),
)
# Use the xblock_class's bind_for_student method
descriptor.bind_for_student = partial(xblock_class.bind_for_student, descriptor)
if hasattr(xblock_class, 'module_class'):
descriptor.module_class = xblock_class.module_class
return render.get_module_for_descriptor_internal(
self.user,
descriptor,
Mock(spec=FieldDataCache),
course_id,
Mock(), # Track Function
Mock(), # XQueue Callback Url Prefix
).xmodule_runtime.anonymous_student_id
@data(*PER_STUDENT_ANONYMIZED_DESCRIPTORS)
def test_per_student_anonymized_id(self, descriptor_class):
for course_id in ('MITx/6.00x/2012_Fall', 'MITx/6.00x/2013_Spring'):
self.assertEquals(
# This value is set by observation, so that later changes to the student
# id computation don't break old data
'5afe5d9bb03796557ee2614f5c9611fb',
self._get_anonymous_id(course_id, descriptor_class)
)
@data(*PER_COURSE_ANONYMIZED_DESCRIPTORS)
def test_per_course_anonymized_id(self, descriptor_class):
self.assertEquals(
# This value is set by observation, so that later changes to the student
# id computation don't break old data
'e3b0b940318df9c14be59acb08e78af5',
self._get_anonymous_id('MITx/6.00x/2012_Fall', descriptor_class)
)
self.assertEquals(
# This value is set by observation, so that later changes to the student
# id computation don't break old data
'f82b5416c9f54b5ce33989511bb5ef2e',
self._get_anonymous_id('MITx/6.00x/2013_Spring', descriptor_class)
)
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
@patch('track.views.tracker')
class TestModuleTrackingContext(ModuleStoreTestCase):
"""
Ensure correct tracking information is included in events emitted during XBlock callback handling.
"""
def setUp(self):
self.user = UserFactory.create()
self.request = RequestFactory().get('/')
self.request.user = self.user
self.request.session = {}
self.course = CourseFactory.create()
self.problem_xml = OptionResponseXMLFactory().build_xml(
question_text='The correct answer is Correct',
num_inputs=2,
weight=2,
options=['Correct', 'Incorrect'],
correct_option='Correct'
)
def test_context_contains_display_name(self, mock_tracker):
problem_display_name = u'Option Response Problem'
actual_display_name = self.handle_callback_and_get_display_name_from_event(mock_tracker, problem_display_name)
self.assertEquals(problem_display_name, actual_display_name)
def handle_callback_and_get_display_name_from_event(self, mock_tracker, problem_display_name=None):
"""
Creates a fake module, invokes the callback and extracts the display name from the emitted problem_check event.
"""
descriptor_kwargs = {
'category': 'problem',
'data': self.problem_xml
}
if problem_display_name:
descriptor_kwargs['display_name'] = problem_display_name
descriptor = ItemFactory.create(**descriptor_kwargs)
render.handle_xblock_callback(
self.request,
self.course.id,
quote_slashes(str(descriptor.location)),
'xmodule_handler',
'problem_check',
)
self.assertEquals(len(mock_tracker.send.mock_calls), 1)
mock_call = mock_tracker.send.mock_calls[0]
event = mock_call[1][0]
self.assertEquals(event['event_type'], 'problem_check')
return event['context']['module']['display_name']
def test_missing_display_name(self, mock_tracker):
actual_display_name = self.handle_callback_and_get_display_name_from_event(mock_tracker)
self.assertTrue(actual_display_name.startswith('problem'))
|
agpl-3.0
|
mhotwagner/abackend
|
abackend-env/lib/python3.5/site-packages/django/contrib/staticfiles/views.py
|
581
|
1329
|
"""
Views and functions for serving static files. These are only to be used during
development, and SHOULD NOT be used in a production setting.
"""
import os
import posixpath
from django.conf import settings
from django.contrib.staticfiles import finders
from django.http import Http404
from django.utils.six.moves.urllib.parse import unquote
from django.views import static
def serve(request, path, insecure=False, **kwargs):
"""
Serve static files below a given point in the directory structure or
from locations inferred from the staticfiles finders.
To use, put a URL pattern such as::
from django.contrib.staticfiles import views
url(r'^(?P<path>.*)$', views.serve)
in your URLconf.
It uses the django.views.static.serve() view to serve the found files.
"""
if not settings.DEBUG and not insecure:
raise Http404
normalized_path = posixpath.normpath(unquote(path)).lstrip('/')
absolute_path = finders.find(normalized_path)
if not absolute_path:
if path.endswith('/') or path == '':
raise Http404("Directory indexes are not allowed here.")
raise Http404("'%s' could not be found" % path)
document_root, path = os.path.split(absolute_path)
return static.serve(request, path, document_root=document_root, **kwargs)
|
mit
|
momingsong/ns-3
|
src/core/examples/sample-simulator.py
|
43
|
2339
|
# -*- Mode:Python; -*-
# /*
# * Copyright (c) 2010 INRIA
# *
# * This program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License version 2 as
# * published by the Free Software Foundation;
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# *
# * Authors: Mathieu Lacage <[email protected]>
# */
#
# Python version of sample-simulator.cc
import ns.core
class MyModel(object):
"""Simple model object to illustrate event handling."""
## \returns None.
def Start(self):
"""Start model execution by scheduling a HandleEvent."""
ns.core.Simulator.Schedule(ns.core.Seconds(10.0), self.HandleEvent, ns.core.Simulator.Now().GetSeconds())
## \param [in] value Event argument.
## \return None.
def HandleEvent(self, value):
"""Simple event handler."""
print "Member method received event at", ns.core.Simulator.Now().GetSeconds(), \
"s started at", value, "s"
def ExampleFunction(model):
print "ExampleFunction received event at", ns.core.Simulator.Now().GetSeconds(), "s"
model.Start()
def RandomFunction(model):
print "RandomFunction received event at", ns.core.Simulator.Now().GetSeconds(), "s"
def CancelledEvent():
print "I should never be called... "
def main(dummy_argv):
model = MyModel()
v = ns.core.UniformRandomVariable()
v.SetAttribute("Min", ns.core.DoubleValue (10))
v.SetAttribute("Max", ns.core.DoubleValue (20))
ns.core.Simulator.Schedule(ns.core.Seconds(10.0), ExampleFunction, model)
ns.core.Simulator.Schedule(ns.core.Seconds(v.GetValue()), RandomFunction, model)
id = ns.core.Simulator.Schedule(ns.core.Seconds(30.0), CancelledEvent)
ns.core.Simulator.Cancel(id)
ns.core.Simulator.Run()
ns.core.Simulator.Destroy()
if __name__ == '__main__':
import sys
main(sys.argv)
|
gpl-2.0
|
shupelneker/gae_new_structure
|
boilerplate/external/requests/packages/chardet/jpcntx.py
|
949
|
19104
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .compat import wrap_ord
NUM_OF_CATEGORY = 6
DONT_KNOW = -1
ENOUGH_REL_THRESHOLD = 100
MAX_REL_THRESHOLD = 1000
MINIMUM_DATA_THRESHOLD = 4
# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
jp2CharContext = (
(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
)
class JapaneseContextAnalysis:
def __init__(self):
self.reset()
def reset(self):
self._mTotalRel = 0 # total sequence received
# category counters, each interger counts sequence in its category
self._mRelSample = [0] * NUM_OF_CATEGORY
# if last byte in current buffer is not the last byte of a character,
# we need to know how many bytes to skip in next buffer
self._mNeedToSkipCharNum = 0
self._mLastCharOrder = -1 # The order of previous char
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
def feed(self, aBuf, aLen):
if self._mDone:
return
# The buffer we got is byte oriented, and a character may span in more than one
# buffers. In case the last one or two byte in last buffer is not
# complete, we record how many byte needed to complete that character
# and skip these bytes here. We can choose to record those bytes as
# well and analyse the character once it is complete, but since a
# character will not make much difference, by simply skipping
# this character will simply our logic and improve performance.
i = self._mNeedToSkipCharNum
while i < aLen:
order, charLen = self.get_order(aBuf[i:i + 2])
i += charLen
if i > aLen:
self._mNeedToSkipCharNum = i - aLen
self._mLastCharOrder = -1
else:
if (order != -1) and (self._mLastCharOrder != -1):
self._mTotalRel += 1
if self._mTotalRel > MAX_REL_THRESHOLD:
self._mDone = True
break
self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1
self._mLastCharOrder = order
def got_enough_data(self):
return self._mTotalRel > ENOUGH_REL_THRESHOLD
def get_confidence(self):
# This is just one way to calculate confidence. It works well for me.
if self._mTotalRel > MINIMUM_DATA_THRESHOLD:
return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel
else:
return DONT_KNOW
def get_order(self, aBuf):
return -1, 1
class SJISContextAnalysis(JapaneseContextAnalysis):
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
charLen = 2
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 202) and (0x9F <= second_char <= 0xF1):
return second_char - 0x9F, charLen
return -1, charLen
class EUCJPContextAnalysis(JapaneseContextAnalysis):
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
charLen = 2
elif first_char == 0x8F:
charLen = 3
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
return second_char - 0xA1, charLen
return -1, charLen
# flake8: noqa
|
lgpl-3.0
|
Peddle/hue
|
desktop/core/ext-py/python-daemon/test/test_runner.py
|
39
|
23973
|
# -*- coding: utf-8 -*-
#
# test/test_runner.py
# Part of python-daemon, an implementation of PEP 3143.
#
# Copyright © 2009 Ben Finney <[email protected]>
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Unit test for runner module.
"""
import __builtin__
import os
import sys
import tempfile
import errno
import signal
import scaffold
from test_pidlockfile import (
FakeFileDescriptorStringIO,
setup_pidfile_fixtures,
make_pidlockfile_scenarios,
setup_lockfile_method_mocks,
)
from test_daemon import (
setup_streams_fixtures,
)
import daemon.daemon
from daemon import pidlockfile
from daemon import runner
class Exception_TestCase(scaffold.Exception_TestCase):
""" Test cases for module exception classes. """
def __init__(self, *args, **kwargs):
""" Set up a new instance. """
super(Exception_TestCase, self).__init__(*args, **kwargs)
self.valid_exceptions = {
runner.DaemonRunnerError: dict(
min_args = 1,
types = (Exception,),
),
runner.DaemonRunnerInvalidActionError: dict(
min_args = 1,
types = (runner.DaemonRunnerError, ValueError),
),
runner.DaemonRunnerStartFailureError: dict(
min_args = 1,
types = (runner.DaemonRunnerError, RuntimeError),
),
runner.DaemonRunnerStopFailureError: dict(
min_args = 1,
types = (runner.DaemonRunnerError, RuntimeError),
),
}
def make_runner_scenarios():
""" Make a collection of scenarios for testing DaemonRunner instances. """
pidlockfile_scenarios = make_pidlockfile_scenarios()
scenarios = {
'simple': {
'pidlockfile_scenario_name': 'simple',
},
'pidfile-locked': {
'pidlockfile_scenario_name': 'exist-other-pid-locked',
},
}
for scenario in scenarios.values():
if 'pidlockfile_scenario_name' in scenario:
pidlockfile_scenario = pidlockfile_scenarios.pop(
scenario['pidlockfile_scenario_name'])
scenario['pid'] = pidlockfile_scenario['pid']
scenario['pidfile_path'] = pidlockfile_scenario['path']
scenario['pidfile_timeout'] = 23
scenario['pidlockfile_scenario'] = pidlockfile_scenario
return scenarios
def set_runner_scenario(testcase, scenario_name, clear_tracker=True):
""" Set the DaemonRunner test scenario for the test case. """
scenarios = testcase.runner_scenarios
testcase.scenario = scenarios[scenario_name]
set_pidlockfile_scenario(
testcase, testcase.scenario['pidlockfile_scenario_name'])
if clear_tracker:
testcase.mock_tracker.clear()
def set_pidlockfile_scenario(testcase, scenario_name):
""" Set the PIDLockFile test scenario for the test case. """
scenarios = testcase.pidlockfile_scenarios
testcase.pidlockfile_scenario = scenarios[scenario_name]
setup_lockfile_method_mocks(
testcase, testcase.pidlockfile_scenario,
testcase.lockfile_class_name)
def setup_runner_fixtures(testcase):
""" Set up common test fixtures for DaemonRunner test case. """
testcase.mock_tracker = scaffold.MockTracker()
setup_pidfile_fixtures(testcase)
setup_streams_fixtures(testcase)
testcase.runner_scenarios = make_runner_scenarios()
testcase.mock_stderr = FakeFileDescriptorStringIO()
scaffold.mock(
"sys.stderr",
mock_obj=testcase.mock_stderr,
tracker=testcase.mock_tracker)
simple_scenario = testcase.runner_scenarios['simple']
testcase.lockfile_class_name = "pidlockfile.TimeoutPIDLockFile"
testcase.mock_runner_lock = scaffold.Mock(
testcase.lockfile_class_name,
tracker=testcase.mock_tracker)
testcase.mock_runner_lock.path = simple_scenario['pidfile_path']
scaffold.mock(
testcase.lockfile_class_name,
returns=testcase.mock_runner_lock,
tracker=testcase.mock_tracker)
class TestApp(object):
def __init__(self):
self.stdin_path = testcase.stream_file_paths['stdin']
self.stdout_path = testcase.stream_file_paths['stdout']
self.stderr_path = testcase.stream_file_paths['stderr']
self.pidfile_path = simple_scenario['pidfile_path']
self.pidfile_timeout = simple_scenario['pidfile_timeout']
run = scaffold.Mock(
"TestApp.run",
tracker=testcase.mock_tracker)
testcase.TestApp = TestApp
scaffold.mock(
"daemon.runner.DaemonContext",
returns=scaffold.Mock(
"DaemonContext",
tracker=testcase.mock_tracker),
tracker=testcase.mock_tracker)
testcase.test_app = testcase.TestApp()
testcase.test_program_name = "bazprog"
testcase.test_program_path = (
"/foo/bar/%(test_program_name)s" % vars(testcase))
testcase.valid_argv_params = {
'start': [testcase.test_program_path, 'start'],
'stop': [testcase.test_program_path, 'stop'],
'restart': [testcase.test_program_path, 'restart'],
}
def mock_open(filename, mode=None, buffering=None):
if filename in testcase.stream_files_by_path:
result = testcase.stream_files_by_path[filename]
else:
result = FakeFileDescriptorStringIO()
result.mode = mode
result.buffering = buffering
return result
scaffold.mock(
"__builtin__.open",
returns_func=mock_open,
tracker=testcase.mock_tracker)
scaffold.mock(
"os.kill",
tracker=testcase.mock_tracker)
scaffold.mock(
"sys.argv",
mock_obj=testcase.valid_argv_params['start'],
tracker=testcase.mock_tracker)
testcase.test_instance = runner.DaemonRunner(testcase.test_app)
testcase.scenario = NotImplemented
class DaemonRunner_TestCase(scaffold.TestCase):
""" Test cases for DaemonRunner class. """
def setUp(self):
""" Set up test fixtures. """
setup_runner_fixtures(self)
set_runner_scenario(self, 'simple')
scaffold.mock(
"runner.DaemonRunner.parse_args",
tracker=self.mock_tracker)
self.test_instance = runner.DaemonRunner(self.test_app)
def tearDown(self):
""" Tear down test fixtures. """
scaffold.mock_restore()
def test_instantiate(self):
""" New instance of DaemonRunner should be created. """
self.failUnlessIsInstance(self.test_instance, runner.DaemonRunner)
def test_parses_commandline_args(self):
""" Should parse commandline arguments. """
expect_mock_output = """\
Called runner.DaemonRunner.parse_args()
...
"""
self.failUnlessMockCheckerMatch(expect_mock_output)
def test_has_specified_app(self):
""" Should have specified application object. """
self.failUnlessIs(self.test_app, self.test_instance.app)
def test_sets_pidfile_none_when_pidfile_path_is_none(self):
""" Should set ‘pidfile’ to ‘None’ when ‘pidfile_path’ is ‘None’. """
pidfile_path = None
self.test_app.pidfile_path = pidfile_path
expect_pidfile = None
instance = runner.DaemonRunner(self.test_app)
self.failUnlessIs(expect_pidfile, instance.pidfile)
def test_error_when_pidfile_path_not_string(self):
""" Should raise ValueError when PID file path not a string. """
pidfile_path = object()
self.test_app.pidfile_path = pidfile_path
expect_error = ValueError
self.failUnlessRaises(
expect_error,
runner.DaemonRunner, self.test_app)
def test_error_when_pidfile_path_not_absolute(self):
""" Should raise ValueError when PID file path not absolute. """
pidfile_path = "foo/bar.pid"
self.test_app.pidfile_path = pidfile_path
expect_error = ValueError
self.failUnlessRaises(
expect_error,
runner.DaemonRunner, self.test_app)
def test_creates_lock_with_specified_parameters(self):
""" Should create a TimeoutPIDLockFile with specified params. """
pidfile_path = self.scenario['pidfile_path']
pidfile_timeout = self.scenario['pidfile_timeout']
lockfile_class_name = self.lockfile_class_name
expect_mock_output = """\
...
Called %(lockfile_class_name)s(
%(pidfile_path)r,
%(pidfile_timeout)r)
""" % vars()
scaffold.mock_restore()
self.failUnlessMockCheckerMatch(expect_mock_output)
def test_has_created_pidfile(self):
""" Should have new PID lock file as `pidfile` attribute. """
expect_pidfile = self.mock_runner_lock
instance = self.test_instance
self.failUnlessIs(
expect_pidfile, instance.pidfile)
def test_daemon_context_has_created_pidfile(self):
""" DaemonContext component should have new PID lock file. """
expect_pidfile = self.mock_runner_lock
daemon_context = self.test_instance.daemon_context
self.failUnlessIs(
expect_pidfile, daemon_context.pidfile)
def test_daemon_context_has_specified_stdin_stream(self):
""" DaemonContext component should have specified stdin file. """
test_app = self.test_app
expect_file = self.stream_files_by_name['stdin']
daemon_context = self.test_instance.daemon_context
self.failUnlessEqual(expect_file, daemon_context.stdin)
def test_daemon_context_has_stdin_in_read_mode(self):
""" DaemonContext component should open stdin file for read. """
expect_mode = 'r'
daemon_context = self.test_instance.daemon_context
self.failUnlessIn(daemon_context.stdin.mode, expect_mode)
def test_daemon_context_has_specified_stdout_stream(self):
""" DaemonContext component should have specified stdout file. """
test_app = self.test_app
expect_file = self.stream_files_by_name['stdout']
daemon_context = self.test_instance.daemon_context
self.failUnlessEqual(expect_file, daemon_context.stdout)
def test_daemon_context_has_stdout_in_append_mode(self):
""" DaemonContext component should open stdout file for append. """
expect_mode = 'w+'
daemon_context = self.test_instance.daemon_context
self.failUnlessIn(daemon_context.stdout.mode, expect_mode)
def test_daemon_context_has_specified_stderr_stream(self):
""" DaemonContext component should have specified stderr file. """
test_app = self.test_app
expect_file = self.stream_files_by_name['stderr']
daemon_context = self.test_instance.daemon_context
self.failUnlessEqual(expect_file, daemon_context.stderr)
def test_daemon_context_has_stderr_in_append_mode(self):
""" DaemonContext component should open stderr file for append. """
expect_mode = 'w+'
daemon_context = self.test_instance.daemon_context
self.failUnlessIn(daemon_context.stderr.mode, expect_mode)
def test_daemon_context_has_stderr_with_no_buffering(self):
""" DaemonContext component should open stderr file unbuffered. """
expect_buffering = 0
daemon_context = self.test_instance.daemon_context
self.failUnlessEqual(
expect_buffering, daemon_context.stderr.buffering)
class DaemonRunner_usage_exit_TestCase(scaffold.TestCase):
""" Test cases for DaemonRunner.usage_exit method. """
def setUp(self):
""" Set up test fixtures. """
setup_runner_fixtures(self)
set_runner_scenario(self, 'simple')
def tearDown(self):
""" Tear down test fixtures. """
scaffold.mock_restore()
def test_raises_system_exit(self):
""" Should raise SystemExit exception. """
instance = self.test_instance
argv = [self.test_program_path]
self.failUnlessRaises(
SystemExit,
instance._usage_exit, argv)
def test_message_follows_conventional_format(self):
""" Should emit a conventional usage message. """
instance = self.test_instance
progname = self.test_program_name
argv = [self.test_program_path]
expect_stderr_output = """\
usage: %(progname)s ...
""" % vars()
self.failUnlessRaises(
SystemExit,
instance._usage_exit, argv)
self.failUnlessOutputCheckerMatch(
expect_stderr_output, self.mock_stderr.getvalue())
class DaemonRunner_parse_args_TestCase(scaffold.TestCase):
""" Test cases for DaemonRunner.parse_args method. """
def setUp(self):
""" Set up test fixtures. """
setup_runner_fixtures(self)
set_runner_scenario(self, 'simple')
scaffold.mock(
"daemon.runner.DaemonRunner._usage_exit",
raises=NotImplementedError,
tracker=self.mock_tracker)
def tearDown(self):
""" Tear down test fixtures. """
scaffold.mock_restore()
def test_emits_usage_message_if_insufficient_args(self):
""" Should emit a usage message and exit if too few arguments. """
instance = self.test_instance
argv = [self.test_program_path]
expect_mock_output = """\
Called daemon.runner.DaemonRunner._usage_exit(%(argv)r)
""" % vars()
try:
instance.parse_args(argv)
except NotImplementedError:
pass
self.failUnlessMockCheckerMatch(expect_mock_output)
def test_emits_usage_message_if_unknown_action_arg(self):
""" Should emit a usage message and exit if unknown action. """
instance = self.test_instance
progname = self.test_program_name
argv = [self.test_program_path, 'bogus']
expect_mock_output = """\
Called daemon.runner.DaemonRunner._usage_exit(%(argv)r)
""" % vars()
try:
instance.parse_args(argv)
except NotImplementedError:
pass
self.failUnlessMockCheckerMatch(expect_mock_output)
def test_should_parse_system_argv_by_default(self):
""" Should parse sys.argv by default. """
instance = self.test_instance
expect_action = 'start'
argv = self.valid_argv_params['start']
scaffold.mock(
"sys.argv",
mock_obj=argv,
tracker=self.mock_tracker)
instance.parse_args()
self.failUnlessEqual(expect_action, instance.action)
def test_sets_action_from_first_argument(self):
""" Should set action from first commandline argument. """
instance = self.test_instance
for name, argv in self.valid_argv_params.items():
expect_action = name
instance.parse_args(argv)
self.failUnlessEqual(expect_action, instance.action)
class DaemonRunner_do_action_TestCase(scaffold.TestCase):
""" Test cases for DaemonRunner.do_action method. """
def setUp(self):
""" Set up test fixtures. """
setup_runner_fixtures(self)
set_runner_scenario(self, 'simple')
def tearDown(self):
""" Tear down test fixtures. """
scaffold.mock_restore()
def test_raises_error_if_unknown_action(self):
""" Should emit a usage message and exit if action is unknown. """
instance = self.test_instance
instance.action = 'bogus'
expect_error = runner.DaemonRunnerInvalidActionError
self.failUnlessRaises(
expect_error,
instance.do_action)
class DaemonRunner_do_action_start_TestCase(scaffold.TestCase):
""" Test cases for DaemonRunner.do_action method, action 'start'. """
def setUp(self):
""" Set up test fixtures. """
setup_runner_fixtures(self)
set_runner_scenario(self, 'simple')
self.test_instance.action = 'start'
def tearDown(self):
""" Tear down test fixtures. """
scaffold.mock_restore()
def test_raises_error_if_pidfile_locked(self):
""" Should raise error if PID file is locked. """
set_pidlockfile_scenario(self, 'exist-other-pid-locked')
instance = self.test_instance
instance.daemon_context.open.mock_raises = (
pidlockfile.AlreadyLocked)
pidfile_path = self.scenario['pidfile_path']
expect_error = runner.DaemonRunnerStartFailureError
expect_message_content = pidfile_path
try:
instance.do_action()
except expect_error, exc:
pass
else:
raise self.failureException(
"Failed to raise " + expect_error.__name__)
self.failUnlessIn(exc.message, expect_message_content)
def test_breaks_lock_if_no_such_process(self):
""" Should request breaking lock if PID file process is not running. """
set_runner_scenario(self, 'pidfile-locked')
instance = self.test_instance
self.mock_runner_lock.read_pid.mock_returns = (
self.scenario['pidlockfile_scenario']['pidfile_pid'])
pidfile_path = self.scenario['pidfile_path']
test_pid = self.scenario['pidlockfile_scenario']['pidfile_pid']
expect_signal = signal.SIG_DFL
error = OSError(errno.ESRCH, "Not running")
os.kill.mock_raises = error
lockfile_class_name = self.lockfile_class_name
expect_mock_output = """\
...
Called os.kill(%(test_pid)r, %(expect_signal)r)
Called %(lockfile_class_name)s.break_lock()
...
""" % vars()
instance.do_action()
scaffold.mock_restore()
self.failUnlessMockCheckerMatch(expect_mock_output)
def test_requests_daemon_context_open(self):
""" Should request the daemon context to open. """
instance = self.test_instance
expect_mock_output = """\
...
Called DaemonContext.open()
...
"""
instance.do_action()
self.failUnlessMockCheckerMatch(expect_mock_output)
def test_emits_start_message_to_stderr(self):
""" Should emit start message to stderr. """
instance = self.test_instance
current_pid = self.scenario['pid']
expect_stderr = """\
started with pid %(current_pid)d
""" % vars()
instance.do_action()
self.failUnlessOutputCheckerMatch(
expect_stderr, self.mock_stderr.getvalue())
def test_requests_app_run(self):
""" Should request the application to run. """
instance = self.test_instance
expect_mock_output = """\
...
Called TestApp.run()
"""
instance.do_action()
self.failUnlessMockCheckerMatch(expect_mock_output)
class DaemonRunner_do_action_stop_TestCase(scaffold.TestCase):
""" Test cases for DaemonRunner.do_action method, action 'stop'. """
def setUp(self):
""" Set up test fixtures. """
setup_runner_fixtures(self)
set_runner_scenario(self, 'pidfile-locked')
self.test_instance.action = 'stop'
self.mock_runner_lock.is_locked.mock_returns = True
self.mock_runner_lock.i_am_locking.mock_returns = False
self.mock_runner_lock.read_pid.mock_returns = (
self.scenario['pidlockfile_scenario']['pidfile_pid'])
def tearDown(self):
""" Tear down test fixtures. """
scaffold.mock_restore()
def test_raises_error_if_pidfile_not_locked(self):
""" Should raise error if PID file is not locked. """
set_runner_scenario(self, 'simple')
instance = self.test_instance
self.mock_runner_lock.is_locked.mock_returns = False
self.mock_runner_lock.i_am_locking.mock_returns = False
self.mock_runner_lock.read_pid.mock_returns = (
self.scenario['pidlockfile_scenario']['pidfile_pid'])
pidfile_path = self.scenario['pidfile_path']
expect_error = runner.DaemonRunnerStopFailureError
expect_message_content = pidfile_path
try:
instance.do_action()
except expect_error, exc:
pass
else:
raise self.failureException(
"Failed to raise " + expect_error.__name__)
scaffold.mock_restore()
self.failUnlessIn(exc.message, expect_message_content)
def test_breaks_lock_if_pidfile_stale(self):
""" Should break lock if PID file is stale. """
instance = self.test_instance
pidfile_path = self.scenario['pidfile_path']
test_pid = self.scenario['pidlockfile_scenario']['pidfile_pid']
expect_signal = signal.SIG_DFL
error = OSError(errno.ESRCH, "Not running")
os.kill.mock_raises = error
lockfile_class_name = self.lockfile_class_name
expect_mock_output = """\
...
Called %(lockfile_class_name)s.break_lock()
""" % vars()
instance.do_action()
scaffold.mock_restore()
self.failUnlessMockCheckerMatch(expect_mock_output)
def test_sends_terminate_signal_to_process_from_pidfile(self):
""" Should send SIGTERM to the daemon process. """
instance = self.test_instance
test_pid = self.scenario['pidlockfile_scenario']['pidfile_pid']
expect_signal = signal.SIGTERM
expect_mock_output = """\
...
Called os.kill(%(test_pid)r, %(expect_signal)r)
""" % vars()
instance.do_action()
scaffold.mock_restore()
self.failUnlessMockCheckerMatch(expect_mock_output)
def test_raises_error_if_cannot_send_signal_to_process(self):
""" Should raise error if cannot send signal to daemon process. """
instance = self.test_instance
test_pid = self.scenario['pidlockfile_scenario']['pidfile_pid']
pidfile_path = self.scenario['pidfile_path']
error = OSError(errno.EPERM, "Nice try")
os.kill.mock_raises = error
expect_error = runner.DaemonRunnerStopFailureError
expect_message_content = str(test_pid)
try:
instance.do_action()
except expect_error, exc:
pass
else:
raise self.failureException(
"Failed to raise " + expect_error.__name__)
scaffold.mock_restore()
self.failUnlessIn(exc.message, expect_message_content)
class DaemonRunner_do_action_restart_TestCase(scaffold.TestCase):
""" Test cases for DaemonRunner.do_action method, action 'restart'. """
def setUp(self):
""" Set up test fixtures. """
setup_runner_fixtures(self)
set_runner_scenario(self, 'pidfile-locked')
self.test_instance.action = 'restart'
def tearDown(self):
""" Tear down test fixtures. """
scaffold.mock_restore()
def test_requests_stop_then_start(self):
""" Should request stop, then start. """
instance = self.test_instance
scaffold.mock(
"daemon.runner.DaemonRunner._start",
tracker=self.mock_tracker)
scaffold.mock(
"daemon.runner.DaemonRunner._stop",
tracker=self.mock_tracker)
expect_mock_output = """\
Called daemon.runner.DaemonRunner._stop()
Called daemon.runner.DaemonRunner._start()
"""
instance.do_action()
self.failUnlessMockCheckerMatch(expect_mock_output)
|
apache-2.0
|
amyvmiwei/kbengine
|
kbe/res/scripts/common/Lib/site-packages/pip/commands/bundle.py
|
392
|
1787
|
import textwrap
from pip.locations import build_prefix, src_prefix
from pip.util import display_path, backup_dir
from pip.log import logger
from pip.exceptions import InstallationError
from pip.commands.install import InstallCommand
class BundleCommand(InstallCommand):
"""Create pybundles (archives containing multiple packages)."""
name = 'bundle'
usage = """
%prog [options] <bundle name>.pybundle <package>..."""
summary = 'DEPRECATED. Create pybundles.'
bundle = True
def __init__(self, *args, **kw):
super(BundleCommand, self).__init__(*args, **kw)
# bundle uses different default source and build dirs
build_opt = self.parser.get_option("--build")
build_opt.default = backup_dir(build_prefix, '-bundle')
src_opt = self.parser.get_option("--src")
src_opt.default = backup_dir(src_prefix, '-bundle')
self.parser.set_defaults(**{
src_opt.dest: src_opt.default,
build_opt.dest: build_opt.default,
})
def run(self, options, args):
logger.deprecated('1.6', "DEPRECATION: 'pip bundle' and support for installing from *.pybundle files is deprecated. "
"See https://github.com/pypa/pip/pull/1046")
if not args:
raise InstallationError('You must give a bundle filename')
# We have to get everything when creating a bundle:
options.ignore_installed = True
logger.notify('Putting temporary build files in %s and source/develop files in %s'
% (display_path(options.build_dir), display_path(options.src_dir)))
self.bundle_filename = args.pop(0)
requirement_set = super(BundleCommand, self).run(options, args)
return requirement_set
|
lgpl-3.0
|
utamaro/youtube-dl
|
youtube_dl/extractor/metacafe.py
|
87
|
10165
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_parse_qs,
compat_urllib_parse,
compat_urllib_parse_unquote,
compat_urllib_request,
)
from ..utils import (
determine_ext,
ExtractorError,
int_or_none,
)
class MetacafeIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?metacafe\.com/watch/([^/]+)/([^/]+)/.*'
_DISCLAIMER = 'http://www.metacafe.com/family_filter/'
_FILTER_POST = 'http://www.metacafe.com/f/index.php?inputType=filter&controllerGroup=user'
IE_NAME = 'metacafe'
_TESTS = [
# Youtube video
{
'add_ie': ['Youtube'],
'url': 'http://metacafe.com/watch/yt-_aUehQsCQtM/the_electric_company_short_i_pbs_kids_go/',
'info_dict': {
'id': '_aUehQsCQtM',
'ext': 'mp4',
'upload_date': '20090102',
'title': 'The Electric Company | "Short I" | PBS KIDS GO!',
'description': 'md5:2439a8ef6d5a70e380c22f5ad323e5a8',
'uploader': 'PBS',
'uploader_id': 'PBS'
}
},
# Normal metacafe video
{
'url': 'http://www.metacafe.com/watch/11121940/news_stuff_you_wont_do_with_your_playstation_4/',
'md5': '6e0bca200eaad2552e6915ed6fd4d9ad',
'info_dict': {
'id': '11121940',
'ext': 'mp4',
'title': 'News: Stuff You Won\'t Do with Your PlayStation 4',
'uploader': 'ign',
'description': 'Sony released a massive FAQ on the PlayStation Blog detailing the PS4\'s capabilities and limitations.',
},
},
# AnyClip video
{
'url': 'http://www.metacafe.com/watch/an-dVVXnuY7Jh77J/the_andromeda_strain_1971_stop_the_bomb_part_3/',
'info_dict': {
'id': 'an-dVVXnuY7Jh77J',
'ext': 'mp4',
'title': 'The Andromeda Strain (1971): Stop the Bomb Part 3',
'uploader': 'anyclip',
'description': 'md5:38c711dd98f5bb87acf973d573442e67',
},
},
# age-restricted video
{
'url': 'http://www.metacafe.com/watch/5186653/bbc_internal_christmas_tape_79_uncensored_outtakes_etc/',
'md5': '98dde7c1a35d02178e8ab7560fe8bd09',
'info_dict': {
'id': '5186653',
'ext': 'mp4',
'title': 'BBC INTERNAL Christmas Tape \'79 - UNCENSORED Outtakes, Etc.',
'uploader': 'Dwayne Pipe',
'description': 'md5:950bf4c581e2c059911fa3ffbe377e4b',
'age_limit': 18,
},
},
# cbs video
{
'url': 'http://www.metacafe.com/watch/cb-8VD4r_Zws8VP/open_this_is_face_the_nation_february_9/',
'info_dict': {
'id': '8VD4r_Zws8VP',
'ext': 'flv',
'title': 'Open: This is Face the Nation, February 9',
'description': 'md5:8a9ceec26d1f7ed6eab610834cc1a476',
'duration': 96,
},
'params': {
# rtmp download
'skip_download': True,
},
},
# Movieclips.com video
{
'url': 'http://www.metacafe.com/watch/mv-Wy7ZU/my_week_with_marilyn_do_you_love_me/',
'info_dict': {
'id': 'mv-Wy7ZU',
'ext': 'mp4',
'title': 'My Week with Marilyn - Do You Love Me?',
'description': 'From the movie My Week with Marilyn - Colin (Eddie Redmayne) professes his love to Marilyn (Michelle Williams) and gets her to promise to return to set and finish the movie.',
'uploader': 'movie_trailers',
'duration': 176,
},
'params': {
'skip_download': 'requires rtmpdump',
}
}
]
def report_disclaimer(self):
self.to_screen('Retrieving disclaimer')
def _real_initialize(self):
# Retrieve disclaimer
self.report_disclaimer()
self._download_webpage(self._DISCLAIMER, None, False, 'Unable to retrieve disclaimer')
# Confirm age
disclaimer_form = {
'filters': '0',
'submit': "Continue - I'm over 18",
}
request = compat_urllib_request.Request(self._FILTER_POST, compat_urllib_parse.urlencode(disclaimer_form))
request.add_header('Content-Type', 'application/x-www-form-urlencoded')
self.report_age_confirmation()
self._download_webpage(request, None, False, 'Unable to confirm age')
def _real_extract(self, url):
# Extract id and simplified title from URL
mobj = re.match(self._VALID_URL, url)
if mobj is None:
raise ExtractorError('Invalid URL: %s' % url)
video_id = mobj.group(1)
# the video may come from an external site
m_external = re.match('^(\w{2})-(.*)$', video_id)
if m_external is not None:
prefix, ext_id = m_external.groups()
# Check if video comes from YouTube
if prefix == 'yt':
return self.url_result('http://www.youtube.com/watch?v=%s' % ext_id, 'Youtube')
# CBS videos use theplatform.com
if prefix == 'cb':
return self.url_result('theplatform:%s' % ext_id, 'ThePlatform')
# Retrieve video webpage to extract further information
req = compat_urllib_request.Request('http://www.metacafe.com/watch/%s/' % video_id)
# AnyClip videos require the flashversion cookie so that we get the link
# to the mp4 file
mobj_an = re.match(r'^an-(.*?)$', video_id)
if mobj_an:
req.headers['Cookie'] = 'flashVersion=0;'
webpage = self._download_webpage(req, video_id)
# Extract URL, uploader and title from webpage
self.report_extraction(video_id)
video_url = None
mobj = re.search(r'(?m)&mediaURL=([^&]+)', webpage)
if mobj is not None:
mediaURL = compat_urllib_parse_unquote(mobj.group(1))
video_ext = mediaURL[-3:]
# Extract gdaKey if available
mobj = re.search(r'(?m)&gdaKey=(.*?)&', webpage)
if mobj is None:
video_url = mediaURL
else:
gdaKey = mobj.group(1)
video_url = '%s?__gda__=%s' % (mediaURL, gdaKey)
if video_url is None:
mobj = re.search(r'<video src="([^"]+)"', webpage)
if mobj:
video_url = mobj.group(1)
video_ext = 'mp4'
if video_url is None:
flashvars = self._search_regex(
r' name="flashvars" value="(.*?)"', webpage, 'flashvars',
default=None)
if flashvars:
vardict = compat_parse_qs(flashvars)
if 'mediaData' not in vardict:
raise ExtractorError('Unable to extract media URL')
mobj = re.search(
r'"mediaURL":"(?P<mediaURL>http.*?)",(.*?)"key":"(?P<key>.*?)"', vardict['mediaData'][0])
if mobj is None:
raise ExtractorError('Unable to extract media URL')
mediaURL = mobj.group('mediaURL').replace('\\/', '/')
video_url = '%s?__gda__=%s' % (mediaURL, mobj.group('key'))
video_ext = determine_ext(video_url)
if video_url is None:
player_url = self._search_regex(
r"swfobject\.embedSWF\('([^']+)'",
webpage, 'config URL', default=None)
if player_url:
config_url = self._search_regex(
r'config=(.+)$', player_url, 'config URL')
config_doc = self._download_xml(
config_url, video_id,
note='Downloading video config')
smil_url = config_doc.find('.//properties').attrib['smil_file']
smil_doc = self._download_xml(
smil_url, video_id,
note='Downloading SMIL document')
base_url = smil_doc.find('./head/meta').attrib['base']
video_url = []
for vn in smil_doc.findall('.//video'):
br = int(vn.attrib['system-bitrate'])
play_path = vn.attrib['src']
video_url.append({
'format_id': 'smil-%d' % br,
'url': base_url,
'play_path': play_path,
'page_url': url,
'player_url': player_url,
'ext': play_path.partition(':')[0],
})
if video_url is None:
raise ExtractorError('Unsupported video type')
video_title = self._html_search_regex(
r'(?im)<title>(.*) - Video</title>', webpage, 'title')
description = self._og_search_description(webpage)
thumbnail = self._og_search_thumbnail(webpage)
video_uploader = self._html_search_regex(
r'submitter=(.*?);|googletag\.pubads\(\)\.setTargeting\("(?:channel|submiter)","([^"]+)"\);',
webpage, 'uploader nickname', fatal=False)
duration = int_or_none(
self._html_search_meta('video:duration', webpage))
age_limit = (
18
if re.search(r'"contentRating":"restricted"', webpage)
else 0)
if isinstance(video_url, list):
formats = video_url
else:
formats = [{
'url': video_url,
'ext': video_ext,
}]
self._sort_formats(formats)
return {
'id': video_id,
'description': description,
'uploader': video_uploader,
'title': video_title,
'thumbnail': thumbnail,
'age_limit': age_limit,
'formats': formats,
'duration': duration,
}
|
unlicense
|
gregerts/debian-qpid-cpp
|
bindings/qpid/examples/python/statistics.py
|
4
|
3983
|
#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import time
TS = "ts"
TIME_SEC = 1000000000
MILLISECOND = 1000
class Statistic:
def message(self, msg):
return
def report(self):
return ""
def header(self):
return ""
class Throughput(Statistic):
def __init__(self):
self.messages = 0
self.started = False
def message(self, m):
self.messages += 1
if not self.started:
self.start = time.time()
self.started = True
def header(self):
return "tp(m/s)"
def report(self):
if self.started:
elapsed = time.time() - self.start
return str(int(self.messages/elapsed))
else:
return "0"
class ThroughputAndLatency(Throughput):
def __init__(self):
Throughput.__init__(self)
self.total = 0.0
self.min = float('inf')
self.max = -float('inf')
self.samples = 0
def message(self, m):
Throughput.message(self, m)
if TS in m.properties:
self.samples+=1
latency = MILLISECOND * (time.time() - float(m.properties[TS])/TIME_SEC)
if latency > 0:
self.total += latency
if latency < self.min:
self.min = latency
if latency > self.max:
self.max = latency
def header(self):
# Throughput.header(self)
return "%s\tl-min\tl-max\tl-avg" % Throughput.header(self)
def report(self):
output = Throughput.report(self)
if (self.samples > 0):
output += "\t%.2f\t%.2f\t%.2f" %(self.min, self.max, self.total/self.samples)
return output
# Report batch and overall statistics
class ReporterBase:
def __init__(self, batch, wantHeader):
self.batchSize = batch
self.batchCount = 0
self.headerPrinted = not wantHeader
self.overall = None
self.batch = None
def create(self):
return
# Count message in the statistics
def message(self, m):
if self.overall == None:
self.overall = self.create()
self.overall.message(m)
if self.batchSize:
if self.batch == None:
self.batch = self.create()
self.batch.message(m)
self.batchCount+=1
if self.batchCount == self.batchSize:
self.header()
print self.batch.report()
self.create()
self.batchCount = 0
# Print overall report.
def report(self):
if self.overall == None:
self.overall = self.create()
self.header()
print self.overall.report()
def header(self):
if not self.headerPrinted:
if self.overall == None:
self.overall = self.create()
print self.overall.header()
self.headerPrinted = True
class Reporter(ReporterBase):
def __init__(self, batchSize, wantHeader, Stats):
ReporterBase.__init__(self, batchSize, wantHeader)
self.__stats = Stats
def create(self):
ClassName = self.__stats.__class__
return ClassName()
|
apache-2.0
|
Cactuslegs/audacity-of-nope
|
lib-src/lv2/lv2/plugins/eg01-amp.lv2/waflib/extras/autowaf.py
|
65
|
22357
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import glob
import os
import subprocess
import sys
from waflib import Configure,Context,Logs,Node,Options,Task,Utils
from waflib.TaskGen import feature,before,after
global g_is_child
g_is_child=False
global g_step
g_step=0
@feature('c','cxx')
@after('apply_incpaths')
def include_config_h(self):
self.env.append_value('INCPATHS',self.bld.bldnode.abspath())
def set_options(opt,debug_by_default=False):
global g_step
if g_step>0:
return
dirs_options=opt.add_option_group('Installation directories','')
for k in('--prefix','--destdir'):
option=opt.parser.get_option(k)
if option:
opt.parser.remove_option(k)
dirs_options.add_option(option)
dirs_options.add_option('--bindir',type='string',help="Executable programs [Default: PREFIX/bin]")
dirs_options.add_option('--configdir',type='string',help="Configuration data [Default: PREFIX/etc]")
dirs_options.add_option('--datadir',type='string',help="Shared data [Default: PREFIX/share]")
dirs_options.add_option('--includedir',type='string',help="Header files [Default: PREFIX/include]")
dirs_options.add_option('--libdir',type='string',help="Libraries [Default: PREFIX/lib]")
dirs_options.add_option('--mandir',type='string',help="Manual pages [Default: DATADIR/man]")
dirs_options.add_option('--docdir',type='string',help="HTML documentation [Default: DATADIR/doc]")
if debug_by_default:
opt.add_option('--optimize',action='store_false',default=True,dest='debug',help="Build optimized binaries")
else:
opt.add_option('--debug',action='store_true',default=False,dest='debug',help="Build debuggable binaries")
opt.add_option('--pardebug',action='store_true',default=False,dest='pardebug',help="Build parallel-installable debuggable libraries with D suffix")
opt.add_option('--grind',action='store_true',default=False,dest='grind',help="Run tests in valgrind")
opt.add_option('--strict',action='store_true',default=False,dest='strict',help="Use strict compiler flags and show all warnings")
opt.add_option('--ultra-strict',action='store_true',default=False,dest='ultra_strict',help="Use even stricter compiler flags (likely to trigger many warnings in library headers)")
opt.add_option('--docs',action='store_true',default=False,dest='docs',help="Build documentation - requires doxygen")
opt.add_option('--lv2-user',action='store_true',default=False,dest='lv2_user',help="Install LV2 bundles to user location")
opt.add_option('--lv2-system',action='store_true',default=False,dest='lv2_system',help="Install LV2 bundles to system location")
dirs_options.add_option('--lv2dir',type='string',help="LV2 bundles [Default: LIBDIR/lv2]")
g_step=1
def check_header(conf,lang,name,define='',mandatory=True):
includes=''
if sys.platform=="darwin":
includes='/opt/local/include'
if lang=='c':
check_func=conf.check_cc
elif lang=='cxx':
check_func=conf.check_cxx
else:
Logs.error("Unknown header language `%s'"%lang)
return
if define!='':
check_func(header_name=name,includes=includes,define_name=define,mandatory=mandatory)
else:
check_func(header_name=name,includes=includes,mandatory=mandatory)
def nameify(name):
return name.replace('/','_').replace('++','PP').replace('-','_').replace('.','_')
def define(conf,var_name,value):
conf.define(var_name,value)
conf.env[var_name]=value
def check_pkg(conf,name,**args):
if args['uselib_store'].lower()in conf.env['AUTOWAF_LOCAL_LIBS']:
return
class CheckType:
OPTIONAL=1
MANDATORY=2
var_name='CHECKED_'+nameify(args['uselib_store'])
check=not var_name in conf.env
mandatory=not'mandatory'in args or args['mandatory']
if not check and'atleast_version'in args:
checked_version=conf.env['VERSION_'+name]
if checked_version and checked_version<args['atleast_version']:
check=True;
if not check and mandatory and conf.env[var_name]==CheckType.OPTIONAL:
check=True;
if check:
found=None
pkg_var_name='PKG_'+name.replace('-','_')
pkg_name=name
if conf.env.PARDEBUG:
args['mandatory']=False
found=conf.check_cfg(package=pkg_name+'D',args="--cflags --libs",**args)
if found:
pkg_name+='D'
if mandatory:
args['mandatory']=True
if not found:
found=conf.check_cfg(package=pkg_name,args="--cflags --libs",**args)
if found:
conf.env[pkg_var_name]=pkg_name
if'atleast_version'in args:
conf.env['VERSION_'+name]=args['atleast_version']
if mandatory:
conf.env[var_name]=CheckType.MANDATORY
else:
conf.env[var_name]=CheckType.OPTIONAL
def normpath(path):
if sys.platform=='win32':
return os.path.normpath(path).replace('\\','/')
else:
return os.path.normpath(path)
def configure(conf):
global g_step
if g_step>1:
return
def append_cxx_flags(flags):
conf.env.append_value('CFLAGS',flags)
conf.env.append_value('CXXFLAGS',flags)
print('')
display_header('Global Configuration')
if Options.options.docs:
conf.load('doxygen')
conf.env['DOCS']=Options.options.docs
conf.env['DEBUG']=Options.options.debug or Options.options.pardebug
conf.env['PARDEBUG']=Options.options.pardebug
conf.env['PREFIX']=normpath(os.path.abspath(os.path.expanduser(conf.env['PREFIX'])))
def config_dir(var,opt,default):
if opt:
conf.env[var]=normpath(opt)
else:
conf.env[var]=normpath(default)
opts=Options.options
prefix=conf.env['PREFIX']
config_dir('BINDIR',opts.bindir,os.path.join(prefix,'bin'))
config_dir('SYSCONFDIR',opts.configdir,os.path.join(prefix,'etc'))
config_dir('DATADIR',opts.datadir,os.path.join(prefix,'share'))
config_dir('INCLUDEDIR',opts.includedir,os.path.join(prefix,'include'))
config_dir('LIBDIR',opts.libdir,os.path.join(prefix,'lib'))
config_dir('MANDIR',opts.mandir,os.path.join(conf.env['DATADIR'],'man'))
config_dir('DOCDIR',opts.docdir,os.path.join(conf.env['DATADIR'],'doc'))
if Options.options.lv2dir:
conf.env['LV2DIR']=Options.options.lv2dir
elif Options.options.lv2_user:
if sys.platform=="darwin":
conf.env['LV2DIR']=os.path.join(os.getenv('HOME'),'Library/Audio/Plug-Ins/LV2')
elif sys.platform=="win32":
conf.env['LV2DIR']=os.path.join(os.getenv('APPDATA'),'LV2')
else:
conf.env['LV2DIR']=os.path.join(os.getenv('HOME'),'.lv2')
elif Options.options.lv2_system:
if sys.platform=="darwin":
conf.env['LV2DIR']='/Library/Audio/Plug-Ins/LV2'
elif sys.platform=="win32":
conf.env['LV2DIR']=os.path.join(os.getenv('COMMONPROGRAMFILES'),'LV2')
else:
conf.env['LV2DIR']=os.path.join(conf.env['LIBDIR'],'lv2')
else:
conf.env['LV2DIR']=os.path.join(conf.env['LIBDIR'],'lv2')
conf.env['LV2DIR']=normpath(conf.env['LV2DIR'])
if Options.options.docs:
doxygen=conf.find_program('doxygen')
if not doxygen:
conf.fatal("Doxygen is required to build with --docs")
dot=conf.find_program('dot')
if not dot:
conf.fatal("Graphviz (dot) is required to build with --docs")
if Options.options.debug:
if conf.env['MSVC_COMPILER']:
conf.env['CFLAGS']=['/Od','/Zi','/MTd']
conf.env['CXXFLAGS']=['/Od','/Zi','/MTd']
conf.env['LINKFLAGS']=['/DEBUG']
else:
conf.env['CFLAGS']=['-O0','-g']
conf.env['CXXFLAGS']=['-O0','-g']
else:
if conf.env['MSVC_COMPILER']:
conf.env['CFLAGS']=['/MD']
conf.env['CXXFLAGS']=['/MD']
append_cxx_flags(['-DNDEBUG'])
if Options.options.ultra_strict:
Options.options.strict=True
conf.env.append_value('CFLAGS',['-Wredundant-decls','-Wstrict-prototypes','-Wmissing-prototypes'])
if Options.options.strict:
conf.env.append_value('CFLAGS',['-pedantic','-Wshadow'])
conf.env.append_value('CXXFLAGS',['-ansi','-Wnon-virtual-dtor','-Woverloaded-virtual'])
append_cxx_flags(['-Wall','-Wcast-align','-Wextra','-Wmissing-declarations','-Wno-unused-parameter','-Wstrict-overflow','-Wundef','-Wwrite-strings','-fstrict-overflow'])
if not conf.check_cc(fragment='''
#ifndef __clang__
#error
#endif
int main() { return 0; }''',features='c',mandatory=False,execute=False,msg='Checking for clang'):
append_cxx_flags(['-Wlogical-op','-Wsuggest-attribute=noreturn','-Wunsafe-loop-optimizations'])
if not conf.env['MSVC_COMPILER']:
append_cxx_flags(['-fshow-column'])
conf.env.prepend_value('CFLAGS','-I'+os.path.abspath('.'))
conf.env.prepend_value('CXXFLAGS','-I'+os.path.abspath('.'))
display_msg(conf,"Install prefix",conf.env['PREFIX'])
display_msg(conf,"Debuggable build",str(conf.env['DEBUG']))
display_msg(conf,"Build documentation",str(conf.env['DOCS']))
print('')
g_step=2
def set_c99_mode(conf):
if conf.env.MSVC_COMPILER:
conf.env.append_unique('CFLAGS',['-TP'])
else:
conf.env.append_unique('CFLAGS',['-std=c99'])
def set_local_lib(conf,name,has_objects):
var_name='HAVE_'+nameify(name.upper())
define(conf,var_name,1)
if has_objects:
if type(conf.env['AUTOWAF_LOCAL_LIBS'])!=dict:
conf.env['AUTOWAF_LOCAL_LIBS']={}
conf.env['AUTOWAF_LOCAL_LIBS'][name.lower()]=True
else:
if type(conf.env['AUTOWAF_LOCAL_HEADERS'])!=dict:
conf.env['AUTOWAF_LOCAL_HEADERS']={}
conf.env['AUTOWAF_LOCAL_HEADERS'][name.lower()]=True
def append_property(obj,key,val):
if hasattr(obj,key):
setattr(obj,key,getattr(obj,key)+val)
else:
setattr(obj,key,val)
def use_lib(bld,obj,libs):
abssrcdir=os.path.abspath('.')
libs_list=libs.split()
for l in libs_list:
in_headers=l.lower()in bld.env['AUTOWAF_LOCAL_HEADERS']
in_libs=l.lower()in bld.env['AUTOWAF_LOCAL_LIBS']
if in_libs:
append_property(obj,'use',' lib%s '%l.lower())
append_property(obj,'framework',bld.env['FRAMEWORK_'+l])
if in_headers or in_libs:
inc_flag='-iquote '+os.path.join(abssrcdir,l.lower())
for f in['CFLAGS','CXXFLAGS']:
if not inc_flag in bld.env[f]:
bld.env.prepend_value(f,inc_flag)
else:
append_property(obj,'uselib',' '+l)
@feature('c','cxx')
@before('apply_link')
def version_lib(self):
if sys.platform=='win32':
self.vnum=None
if self.env['PARDEBUG']:
applicable=['cshlib','cxxshlib','cstlib','cxxstlib']
if[x for x in applicable if x in self.features]:
self.target=self.target+'D'
def set_lib_env(conf,name,version):
'Set up environment for local library as if found via pkg-config.'
NAME=name.upper()
major_ver=version.split('.')[0]
pkg_var_name='PKG_'+name.replace('-','_')+'_'+major_ver
lib_name='%s-%s'%(name,major_ver)
if conf.env.PARDEBUG:
lib_name+='D'
conf.env[pkg_var_name]=lib_name
conf.env['INCLUDES_'+NAME]=['${INCLUDEDIR}/%s-%s'%(name,major_ver)]
conf.env['LIBPATH_'+NAME]=[conf.env.LIBDIR]
conf.env['LIB_'+NAME]=[lib_name]
def display_header(title):
Logs.pprint('BOLD',title)
def display_msg(conf,msg,status=None,color=None):
color='CYAN'
if type(status)==bool and status or status=="True":
color='GREEN'
elif type(status)==bool and not status or status=="False":
color='YELLOW'
Logs.pprint('BOLD'," *",sep='')
Logs.pprint('NORMAL',"%s"%msg.ljust(conf.line_just-3),sep='')
Logs.pprint('BOLD',":",sep='')
Logs.pprint(color,status)
def link_flags(env,lib):
return' '.join(map(lambda x:env['LIB_ST']%x,env['LIB_'+lib]))
def compile_flags(env,lib):
return' '.join(map(lambda x:env['CPPPATH_ST']%x,env['INCLUDES_'+lib]))
def set_recursive():
global g_is_child
g_is_child=True
def is_child():
global g_is_child
return g_is_child
def build_pc(bld,name,version,version_suffix,libs,subst_dict={}):
'''Build a pkg-config file for a library.
name -- uppercase variable name (e.g. 'SOMENAME')
version -- version string (e.g. '1.2.3')
version_suffix -- name version suffix (e.g. '2')
libs -- string/list of dependencies (e.g. 'LIBFOO GLIB')
'''
pkg_prefix=bld.env['PREFIX']
if pkg_prefix[-1]=='/':
pkg_prefix=pkg_prefix[:-1]
target=name.lower()
if version_suffix!='':
target+='-'+version_suffix
if bld.env['PARDEBUG']:
target+='D'
target+='.pc'
libdir=bld.env['LIBDIR']
if libdir.startswith(pkg_prefix):
libdir=libdir.replace(pkg_prefix,'${exec_prefix}')
includedir=bld.env['INCLUDEDIR']
if includedir.startswith(pkg_prefix):
includedir=includedir.replace(pkg_prefix,'${prefix}')
obj=bld(features='subst',source='%s.pc.in'%name.lower(),target=target,install_path=os.path.join(bld.env['LIBDIR'],'pkgconfig'),exec_prefix='${prefix}',PREFIX=pkg_prefix,EXEC_PREFIX='${prefix}',LIBDIR=libdir,INCLUDEDIR=includedir)
if type(libs)!=list:
libs=libs.split()
subst_dict[name+'_VERSION']=version
subst_dict[name+'_MAJOR_VERSION']=version[0:version.find('.')]
for i in libs:
subst_dict[i+'_LIBS']=link_flags(bld.env,i)
lib_cflags=compile_flags(bld.env,i)
if lib_cflags=='':
lib_cflags=' '
subst_dict[i+'_CFLAGS']=lib_cflags
obj.__dict__.update(subst_dict)
def build_dir(name,subdir):
if is_child():
return os.path.join('build',name,subdir)
else:
return os.path.join('build',subdir)
def make_simple_dox(name):
name=name.lower()
NAME=name.upper()
try:
top=os.getcwd()
os.chdir(build_dir(name,'doc/html'))
page='group__%s.html'%name
if not os.path.exists(page):
return
for i in[['%s_API '%NAME,''],['%s_DEPRECATED '%NAME,''],['group__%s.html'%name,''],[' ',''],['<script.*><\/script>',''],['<hr\/><a name="details" id="details"><\/a><h2>.*<\/h2>',''],['<link href=\"tabs.css\" rel=\"stylesheet\" type=\"text\/css\"\/>',''],['<img class=\"footer\" src=\"doxygen.png\" alt=\"doxygen\"\/>','Doxygen']]:
os.system("sed -i 's/%s/%s/g' %s"%(i[0],i[1],page))
os.rename('group__%s.html'%name,'index.html')
for i in(glob.glob('*.png')+glob.glob('*.html')+glob.glob('*.js')+glob.glob('*.css')):
if i!='index.html'and i!='style.css':
os.remove(i)
os.chdir(top)
os.chdir(build_dir(name,'doc/man/man3'))
for i in glob.glob('*.3'):
os.system("sed -i 's/%s_API //' %s"%(NAME,i))
for i in glob.glob('_*'):
os.remove(i)
os.chdir(top)
except Exception ,e:
Logs.error("Failed to fix up %s documentation: %s"%(name,e))
def build_dox(bld,name,version,srcdir,blddir,outdir='',versioned=True):
if not bld.env['DOCS']:
return
if is_child():
src_dir=os.path.join(srcdir,name.lower())
doc_dir=os.path.join(blddir,name.lower(),'doc')
else:
src_dir=srcdir
doc_dir=os.path.join(blddir,'doc')
subst_tg=bld(features='subst',source='doc/reference.doxygen.in',target='doc/reference.doxygen',install_path='',name='doxyfile')
subst_dict={name+'_VERSION':version,name+'_SRCDIR':os.path.abspath(src_dir),name+'_DOC_DIR':os.path.abspath(doc_dir)}
subst_tg.__dict__.update(subst_dict)
subst_tg.post()
docs=bld(features='doxygen',doxyfile='doc/reference.doxygen')
docs.post()
outname=name.lower()
if versioned:
outname+='-%d'%int(version[0:version.find('.')])
bld.install_files(os.path.join('${DOCDIR}',outname,outdir,'html'),bld.path.get_bld().ant_glob('doc/html/*'))
for i in range(1,8):
bld.install_files('${MANDIR}/man%d'%i,bld.path.get_bld().ant_glob('doc/man/man%d/*'%i,excl='**/_*'))
def build_version_files(header_path,source_path,domain,major,minor,micro):
header_path=os.path.abspath(header_path)
source_path=os.path.abspath(source_path)
text="int "+domain+"_major_version = "+str(major)+";\n"
text+="int "+domain+"_minor_version = "+str(minor)+";\n"
text+="int "+domain+"_micro_version = "+str(micro)+";\n"
try:
o=open(source_path,'w')
o.write(text)
o.close()
except IOError:
Logs.error('Failed to open %s for writing\n'%source_path)
sys.exit(-1)
text="#ifndef __"+domain+"_version_h__\n"
text+="#define __"+domain+"_version_h__\n"
text+="extern const char* "+domain+"_revision;\n"
text+="extern int "+domain+"_major_version;\n"
text+="extern int "+domain+"_minor_version;\n"
text+="extern int "+domain+"_micro_version;\n"
text+="#endif /* __"+domain+"_version_h__ */\n"
try:
o=open(header_path,'w')
o.write(text)
o.close()
except IOError:
Logs.warn('Failed to open %s for writing\n'%header_path)
sys.exit(-1)
return None
def build_i18n_pot(bld,srcdir,dir,name,sources,copyright_holder=None):
Logs.info('Generating pot file from %s'%name)
pot_file='%s.pot'%name
cmd=['xgettext','--keyword=_','--keyword=N_','--keyword=S_','--from-code=UTF-8','-o',pot_file]
if copyright_holder:
cmd+=['--copyright-holder="%s"'%copyright_holder]
cmd+=sources
Logs.info('Updating '+pot_file)
subprocess.call(cmd,cwd=os.path.join(srcdir,dir))
def build_i18n_po(bld,srcdir,dir,name,sources,copyright_holder=None):
pwd=os.getcwd()
os.chdir(os.path.join(srcdir,dir))
pot_file='%s.pot'%name
po_files=glob.glob('po/*.po')
for po_file in po_files:
cmd=['msgmerge','--update',po_file,pot_file]
Logs.info('Updating '+po_file)
subprocess.call(cmd)
os.chdir(pwd)
def build_i18n_mo(bld,srcdir,dir,name,sources,copyright_holder=None):
pwd=os.getcwd()
os.chdir(os.path.join(srcdir,dir))
pot_file='%s.pot'%name
po_files=glob.glob('po/*.po')
for po_file in po_files:
mo_file=po_file.replace('.po','.mo')
cmd=['msgfmt','-c','-f','-o',mo_file,po_file]
Logs.info('Generating '+po_file)
subprocess.call(cmd)
os.chdir(pwd)
def build_i18n(bld,srcdir,dir,name,sources,copyright_holder=None):
build_i18n_pot(bld,srcdir,dir,name,sources,copyright_holder)
build_i18n_po(bld,srcdir,dir,name,sources,copyright_holder)
build_i18n_mo(bld,srcdir,dir,name,sources,copyright_holder)
def cd_to_build_dir(ctx,appname):
orig_dir=os.path.abspath(os.curdir)
top_level=(len(ctx.stack_path)>1)
if top_level:
os.chdir(os.path.join('build',appname))
else:
os.chdir('build')
Logs.pprint('GREEN',"Waf: Entering directory `%s'"%os.path.abspath(os.getcwd()))
def cd_to_orig_dir(ctx,child):
if child:
os.chdir(os.path.join('..','..'))
else:
os.chdir('..')
def pre_test(ctx,appname,dirs=['src']):
diropts=''
for i in dirs:
diropts+=' -d '+i
cd_to_build_dir(ctx,appname)
clear_log=open('lcov-clear.log','w')
try:
try:
subprocess.call(('lcov %s -z'%diropts).split(),stdout=clear_log,stderr=clear_log)
except:
Logs.warn('Failed to run lcov, no coverage report will be generated')
finally:
clear_log.close()
def post_test(ctx,appname,dirs=['src'],remove=['*boost*','c++*']):
diropts=''
for i in dirs:
diropts+=' -d '+i
coverage_log=open('lcov-coverage.log','w')
coverage_lcov=open('coverage.lcov','w')
coverage_stripped_lcov=open('coverage-stripped.lcov','w')
try:
try:
base='.'
if g_is_child:
base='..'
subprocess.call(('lcov -c %s -b %s'%(diropts,base)).split(),stdout=coverage_lcov,stderr=coverage_log)
subprocess.call(['lcov','--remove','coverage.lcov']+remove,stdout=coverage_stripped_lcov,stderr=coverage_log)
if not os.path.isdir('coverage'):
os.makedirs('coverage')
subprocess.call('genhtml -o coverage coverage-stripped.lcov'.split(),stdout=coverage_log,stderr=coverage_log)
except:
Logs.warn('Failed to run lcov, no coverage report will be generated')
finally:
coverage_stripped_lcov.close()
coverage_lcov.close()
coverage_log.close()
print('')
Logs.pprint('GREEN',"Waf: Leaving directory `%s'"%os.path.abspath(os.getcwd()))
top_level=(len(ctx.stack_path)>1)
if top_level:
cd_to_orig_dir(ctx,top_level)
print('')
Logs.pprint('BOLD','Coverage:',sep='')
print('<file://%s>\n\n'%os.path.abspath('coverage/index.html'))
def run_test(ctx,appname,test,desired_status=0,dirs=['src'],name='',header=False):
s=test
if type(test)==type([]):
s=' '.join(i)
if header:
Logs.pprint('BOLD','** Test',sep='')
Logs.pprint('NORMAL','%s'%s)
cmd=test
if Options.options.grind:
cmd='valgrind '+test
if subprocess.call(cmd,shell=True)==desired_status:
Logs.pprint('GREEN','** Pass %s'%name)
return True
else:
Logs.pprint('RED','** FAIL %s'%name)
return False
def run_tests(ctx,appname,tests,desired_status=0,dirs=['src'],name='*',headers=False):
failures=0
diropts=''
for i in dirs:
diropts+=' -d '+i
for i in tests:
if not run_test(ctx,appname,i,desired_status,dirs,i,headers):
failures+=1
print('')
if failures==0:
Logs.pprint('GREEN','** Pass: All %s.%s tests passed'%(appname,name))
else:
Logs.pprint('RED','** FAIL: %d %s.%s tests failed'%(failures,appname,name))
def run_ldconfig(ctx):
if(ctx.cmd=='install'and not ctx.env['RAN_LDCONFIG']and ctx.env['LIBDIR']and not'DESTDIR'in os.environ and not Options.options.destdir):
try:
Logs.info("Waf: Running `/sbin/ldconfig %s'"%ctx.env['LIBDIR'])
subprocess.call(['/sbin/ldconfig',ctx.env['LIBDIR']])
ctx.env['RAN_LDCONFIG']=True
except:
pass
def write_news(name,in_files,out_file,top_entries=None,extra_entries=None):
import rdflib
import textwrap
from time import strftime,strptime
doap=rdflib.Namespace('http://usefulinc.com/ns/doap#')
dcs=rdflib.Namespace('http://ontologi.es/doap-changeset#')
rdfs=rdflib.Namespace('http://www.w3.org/2000/01/rdf-schema#')
foaf=rdflib.Namespace('http://xmlns.com/foaf/0.1/')
rdf=rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#')
m=rdflib.ConjunctiveGraph()
try:
for i in in_files:
m.parse(i,format='n3')
except:
Logs.warn('Error parsing data, unable to generate NEWS')
return
proj=m.value(None,rdf.type,doap.Project)
for f in m.triples([proj,rdfs.seeAlso,None]):
if f[2].endswith('.ttl'):
m.parse(f[2],format='n3')
entries={}
for r in m.triples([proj,doap.release,None]):
release=r[2]
revision=m.value(release,doap.revision,None)
date=m.value(release,doap.created,None)
blamee=m.value(release,dcs.blame,None)
changeset=m.value(release,dcs.changeset,None)
dist=m.value(release,doap['file-release'],None)
if revision and date and blamee and changeset:
entry='%s (%s) stable;\n'%(name,revision)
for i in m.triples([changeset,dcs.item,None]):
item=textwrap.wrap(m.value(i[2],rdfs.label,None),width=79)
entry+='\n * '+'\n '.join(item)
if dist and top_entries is not None:
if not str(dist)in top_entries:
top_entries[str(dist)]=[]
top_entries[str(dist)]+=['%s: %s'%(name,'\n '.join(item))]
if extra_entries:
for i in extra_entries[str(dist)]:
entry+='\n * '+i
entry+='\n\n --'
blamee_name=m.value(blamee,foaf.name,None)
blamee_mbox=m.value(blamee,foaf.mbox,None)
if blamee_name and blamee_mbox:
entry+=' %s <%s>'%(blamee_name,blamee_mbox.replace('mailto:',''))
entry+=' %s\n\n'%(strftime('%a, %d %b %Y %H:%M:%S +0000',strptime(date,'%Y-%m-%d')))
entries[revision]=entry
else:
Logs.warn('Ignored incomplete %s release description'%name)
if len(entries)>0:
news=open(out_file,'w')
for e in sorted(entries.keys(),reverse=True):
news.write(entries[e])
news.close()
|
gpl-2.0
|
codecollision/DropboxToFlickr
|
django/contrib/formtools/utils.py
|
245
|
2158
|
try:
import cPickle as pickle
except ImportError:
import pickle
from django.conf import settings
from django.forms import BooleanField
from django.utils.crypto import salted_hmac
from django.utils.hashcompat import md5_constructor
def security_hash(request, form, *args):
"""
Calculates a security hash for the given Form instance.
This creates a list of the form field names/values in a deterministic
order, pickles the result with the SECRET_KEY setting, then takes an md5
hash of that.
"""
import warnings
warnings.warn("security_hash is deprecated; use form_hmac instead",
PendingDeprecationWarning)
data = []
for bf in form:
# Get the value from the form data. If the form allows empty or hasn't
# changed then don't call clean() to avoid trigger validation errors.
if form.empty_permitted and not form.has_changed():
value = bf.data or ''
else:
value = bf.field.clean(bf.data) or ''
if isinstance(value, basestring):
value = value.strip()
data.append((bf.name, value))
data.extend(args)
data.append(settings.SECRET_KEY)
# Use HIGHEST_PROTOCOL because it's the most efficient. It requires
# Python 2.3, but Django requires 2.4 anyway, so that's OK.
pickled = pickle.dumps(data, pickle.HIGHEST_PROTOCOL)
return md5_constructor(pickled).hexdigest()
def form_hmac(form):
"""
Calculates a security hash for the given Form instance.
"""
data = []
for bf in form:
# Get the value from the form data. If the form allows empty or hasn't
# changed then don't call clean() to avoid trigger validation errors.
if form.empty_permitted and not form.has_changed():
value = bf.data or ''
else:
value = bf.field.clean(bf.data) or ''
if isinstance(value, basestring):
value = value.strip()
data.append((bf.name, value))
pickled = pickle.dumps(data, pickle.HIGHEST_PROTOCOL)
key_salt = 'django.contrib.formtools'
return salted_hmac(key_salt, pickled).hexdigest()
|
bsd-3-clause
|
qjw/grit-i18n
|
grit/test_suite_all.py
|
32
|
7518
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit test suite that collects all test cases for GRIT.'''
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import unittest
# TODO(joi) Use unittest.defaultTestLoader to automatically load tests
# from modules. Iterating over the directory and importing could then
# automate this all the way, if desired.
class TestSuiteAll(unittest.TestSuite):
def __init__(self):
super(TestSuiteAll, self).__init__()
# Imports placed here to prevent circular imports.
# pylint: disable-msg=C6204
import grit.clique_unittest
import grit.grd_reader_unittest
import grit.grit_runner_unittest
import grit.lazy_re_unittest
import grit.shortcuts_unittests
import grit.tclib_unittest
import grit.util_unittest
import grit.xtb_reader_unittest
import grit.format.android_xml_unittest
import grit.format.c_format_unittest
import grit.format.chrome_messages_json_unittest
import grit.format.data_pack_unittest
import grit.format.html_inline_unittest
import grit.format.js_map_format_unittest
import grit.format.rc_header_unittest
import grit.format.rc_unittest
import grit.format.resource_map_unittest
import grit.format.policy_templates.policy_template_generator_unittest
import grit.format.policy_templates.writers.adm_writer_unittest
import grit.format.policy_templates.writers.adml_writer_unittest
import grit.format.policy_templates.writers.admx_writer_unittest
import grit.format.policy_templates.writers.android_policy_writer_unittest
import grit.format.policy_templates.writers.doc_writer_unittest
import grit.format.policy_templates.writers.ios_plist_writer_unittest
import grit.format.policy_templates.writers.json_writer_unittest
import grit.format.policy_templates.writers.plist_strings_writer_unittest
import grit.format.policy_templates.writers.plist_writer_unittest
import grit.format.policy_templates.writers.reg_writer_unittest
import grit.format.policy_templates.writers.template_writer_unittest
import grit.format.policy_templates.writers.xml_writer_base_unittest
import grit.gather.admin_template_unittest
import grit.gather.chrome_html_unittest
import grit.gather.chrome_scaled_image_unittest
import grit.gather.igoogle_strings_unittest
import grit.gather.muppet_strings_unittest
import grit.gather.policy_json_unittest
import grit.gather.rc_unittest
import grit.gather.tr_html_unittest
import grit.gather.txt_unittest
import grit.node.base_unittest
import grit.node.io_unittest
import grit.node.include_unittest
import grit.node.message_unittest
import grit.node.misc_unittest
import grit.node.structure_unittest #
import grit.node.custom.filename_unittest
import grit.tool.android2grd_unittest
import grit.tool.build_unittest
import grit.tool.buildinfo_unittest
import grit.tool.postprocess_unittest
import grit.tool.preprocess_unittest
import grit.tool.rc2grd_unittest
import grit.tool.transl2tc_unittest
import grit.tool.xmb_unittest
test_classes = [
grit.clique_unittest.MessageCliqueUnittest,
grit.grd_reader_unittest.GrdReaderUnittest,
grit.grit_runner_unittest.OptionArgsUnittest,
grit.lazy_re_unittest.LazyReUnittest,
grit.shortcuts_unittests.ShortcutsUnittest,
grit.tclib_unittest.TclibUnittest,
grit.util_unittest.UtilUnittest,
grit.xtb_reader_unittest.XtbReaderUnittest,
grit.format.android_xml_unittest.AndroidXmlUnittest,
grit.format.c_format_unittest.CFormatUnittest,
grit.format.chrome_messages_json_unittest.
ChromeMessagesJsonFormatUnittest,
grit.format.data_pack_unittest.FormatDataPackUnittest,
grit.format.html_inline_unittest.HtmlInlineUnittest,
grit.format.js_map_format_unittest.JsMapFormatUnittest,
grit.format.rc_header_unittest.RcHeaderFormatterUnittest,
grit.format.rc_unittest.FormatRcUnittest,
grit.format.resource_map_unittest.FormatResourceMapUnittest,
grit.format.policy_templates.policy_template_generator_unittest.
PolicyTemplateGeneratorUnittest,
grit.format.policy_templates.writers.adm_writer_unittest.
AdmWriterUnittest,
grit.format.policy_templates.writers.adml_writer_unittest.
AdmlWriterUnittest,
grit.format.policy_templates.writers.admx_writer_unittest.
AdmxWriterUnittest,
grit.format.policy_templates.writers.android_policy_writer_unittest.
AndroidPolicyWriterUnittest,
grit.format.policy_templates.writers.doc_writer_unittest.
DocWriterUnittest,
grit.format.policy_templates.writers.ios_plist_writer_unittest.
IOSPListWriterUnittest,
grit.format.policy_templates.writers.json_writer_unittest.
JsonWriterUnittest,
grit.format.policy_templates.writers.plist_strings_writer_unittest.
PListStringsWriterUnittest,
grit.format.policy_templates.writers.plist_writer_unittest.
PListWriterUnittest,
grit.format.policy_templates.writers.reg_writer_unittest.
RegWriterUnittest,
grit.format.policy_templates.writers.template_writer_unittest.
TemplateWriterUnittests,
grit.format.policy_templates.writers.xml_writer_base_unittest.
XmlWriterBaseTest,
grit.gather.admin_template_unittest.AdmGathererUnittest,
grit.gather.chrome_html_unittest.ChromeHtmlUnittest,
grit.gather.chrome_scaled_image_unittest.ChromeScaledImageUnittest,
grit.gather.igoogle_strings_unittest.IgoogleStringsUnittest,
grit.gather.muppet_strings_unittest.MuppetStringsUnittest,
grit.gather.policy_json_unittest.PolicyJsonUnittest,
grit.gather.rc_unittest.RcUnittest,
grit.gather.tr_html_unittest.ParserUnittest,
grit.gather.tr_html_unittest.TrHtmlUnittest,
grit.gather.txt_unittest.TxtUnittest,
grit.node.base_unittest.NodeUnittest,
grit.node.io_unittest.FileNodeUnittest,
grit.node.include_unittest.IncludeNodeUnittest,
grit.node.message_unittest.MessageUnittest,
grit.node.misc_unittest.GritNodeUnittest,
grit.node.misc_unittest.IfNodeUnittest,
grit.node.misc_unittest.ReleaseNodeUnittest,
grit.node.structure_unittest.StructureUnittest,
grit.node.custom.filename_unittest.WindowsFilenameUnittest,
grit.tool.android2grd_unittest.Android2GrdUnittest,
grit.tool.build_unittest.BuildUnittest,
grit.tool.buildinfo_unittest.BuildInfoUnittest,
grit.tool.postprocess_unittest.PostProcessingUnittest,
grit.tool.preprocess_unittest.PreProcessingUnittest,
grit.tool.rc2grd_unittest.Rc2GrdUnittest,
grit.tool.transl2tc_unittest.TranslationToTcUnittest,
grit.tool.xmb_unittest.XmbUnittest,
# add test classes here, in alphabetical order...
]
for test_class in test_classes:
self.addTest(unittest.makeSuite(test_class))
if __name__ == '__main__':
test_result = unittest.TextTestRunner(verbosity=2).run(TestSuiteAll())
sys.exit(len(test_result.errors) + len(test_result.failures))
|
bsd-2-clause
|
jstriebel/webcam-effects
|
pipes/dank_pipe.py
|
1
|
1040
|
from pipes.pipe import Pipe
import numpy as np
import cv2
class DankPipe(Pipe):
def pipe(self, data):
data = cv2.blur(data, (8, 8))
result = np.ndarray(data.shape)
# define colors
blue = [255, 0, 0]
green = [0, 255, 0]
red = [0, 0, 255]
# replace blue-ish pixels with green
result[np.logical_and(data[:, :, 0] > data[:, :, 1] + 10,
data[:, :, 0] > data[:, :, 2] + 10)] = green
# replace green-ish pixels with red
result[np.logical_and(data[:, :, 1] > data[:, :, 0] + 10,
data[:, :, 1] > data[:, :, 2] + 10)] = red
# replace red-ish pixels with blue
result[np.logical_and(data[:, :, 2] > data[:, :, 1] + 10,
data[:, :, 2] > data[:, :, 0] + 10)] = blue
return result
def __enter__(self):
return self
def __exit__(self, exit_type, value, traceback):
pass
|
mit
|
agrista/odoo-saas
|
addons/website_blog/wizard/__init__.py
|
373
|
1077
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import document_page_show_diff
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
TheTimmy/spack
|
var/spack/repos/builtin/packages/msgpack-c/package.py
|
3
|
1585
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class MsgpackC(CMakePackage):
"""A small, fast binary interchange format convertible to/from JSON"""
homepage = "http://www.msgpack.org"
url = "https://github.com/msgpack/msgpack-c/archive/cpp-1.4.1.tar.gz"
version('1.4.1', 'e2fd3a7419b9bc49e5017fdbefab87e0')
depends_on('[email protected]:', type='build')
|
lgpl-2.1
|
immesys/RiSyn
|
dist/tools/mcuboot/imgtool/version.py
|
50
|
1149
|
"""
Semi Semantic Versioning
Implements a subset of semantic versioning that is supportable by the image header.
"""
import argparse
from collections import namedtuple
import re
SemiSemVersion = namedtuple('SemiSemVersion', ['major', 'minor', 'revision', 'build'])
version_re = re.compile(r"""^([1-9]\d*|0)(\.([1-9]\d*|0)(\.([1-9]\d*|0)(\+([1-9]\d*|0))?)?)?$""")
def decode_version(text):
"""Decode the version string, which should be of the form maj.min.rev+build"""
m = version_re.match(text)
# print("decode:", text, m.groups())
if m:
result = SemiSemVersion(
int(m.group(1)) if m.group(1) else 0,
int(m.group(3)) if m.group(3) else 0,
int(m.group(5)) if m.group(5) else 0,
int(m.group(7)) if m.group(7) else 0)
return result
else:
msg = "Invalid version number, should be maj.min.rev+build with later parts optional"
raise argparse.ArgumentTypeError(msg)
if __name__ == '__main__':
print(decode_version("1.2"))
print(decode_version("1.0"))
print(decode_version("0.0.2+75"))
print(decode_version("0.0.0+00"))
|
lgpl-2.1
|
cnewcome/sos
|
sos/archive.py
|
1
|
15414
|
# Copyright (C) 2012 Red Hat, Inc.,
# Jesse Jaggars <[email protected]>
# Bryn M. Reeves <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import os
import time
import tarfile
import shutil
import logging
import shlex
import re
import codecs
import sys
import errno
# required for compression callout (FIXME: move to policy?)
from subprocess import Popen, PIPE
from sos.utilities import sos_get_command_output, is_executable
try:
import selinux
except ImportError:
pass
# PYCOMPAT
import six
if six.PY3:
long = int
class Archive(object):
"""Abstract base class for archives."""
@classmethod
def archive_type(cls):
"""Returns the archive class's name as a string.
"""
return cls.__name__
log = logging.getLogger("sos")
_name = "unset"
_debug = False
def _format_msg(self, msg):
return "[archive:%s] %s" % (self.archive_type(), msg)
def set_debug(self, debug):
self._debug = debug
def log_error(self, msg):
self.log.error(self._format_msg(msg))
def log_warn(self, msg):
self.log.warning(self._format_msg(msg))
def log_info(self, msg):
self.log.info(self._format_msg(msg))
def log_debug(self, msg):
if not self._debug:
return
self.log.debug(self._format_msg(msg))
# this is our contract to clients of the Archive class hierarchy.
# All sub-classes need to implement these methods (or inherit concrete
# implementations from a parent class.
def add_file(self, src, dest=None):
raise NotImplementedError
def add_string(self, content, dest):
raise NotImplementedError
def add_link(self, source, link_name):
raise NotImplementedError
def add_dir(self, path):
raise NotImplementedError
def add_node(self, path, mode, device):
raise NotImplementedError
def get_tmp_dir(self):
"""Return a temporary directory that clients of the archive may
use to write content to. The content of the path is guaranteed
to be included in the generated archive."""
raise NotImplementedError
def name_max(self):
"""Return the maximum file name length this archive can support.
This is the lesser of the name length limit of the archive
format and any temporary file system based cache."""
raise NotImplementedError
def get_archive_path(self):
"""Return a string representing the path to the temporary
archive. For archive classes that implement in-line handling
this will be the archive file itself. Archives that use a
directory based cache prior to packaging should return the
path to the temporary directory where the report content is
located"""
pass
def cleanup(self):
"""Clean up any temporary resources used by an Archive class."""
pass
def finalize(self, method):
"""Finalize an archive object via method. This may involve creating
An archive that is subsequently compressed or simply closing an
archive that supports in-line handling. If method is automatic then
the following methods are tried in order: xz, bz2 and gzip"""
self.close()
class FileCacheArchive(Archive):
""" Abstract superclass for archive types that use a temporary cache
directory in the file system. """
_tmp_dir = ""
_archive_root = ""
_archive_name = ""
def __init__(self, name, tmpdir):
self._name = name
self._tmp_dir = tmpdir
self._archive_root = os.path.join(tmpdir, name)
os.makedirs(self._archive_root, 0o700)
self.log_info("initialised empty FileCacheArchive at '%s'" %
(self._archive_root,))
def dest_path(self, name):
if os.path.isabs(name):
name = name.lstrip(os.sep)
return (os.path.join(self._archive_root, name))
def _check_path(self, dest):
dest_dir = os.path.split(dest)[0]
if not dest_dir:
return
if not os.path.isdir(dest_dir):
self._makedirs(dest_dir)
def add_file(self, src, dest=None):
if not dest:
dest = src
dest = self.dest_path(dest)
self._check_path(dest)
# Handle adding a file from either a string respresenting
# a path, or a File object open for reading.
if not getattr(src, "read", None):
# path case
try:
shutil.copy(src, dest)
except IOError as e:
self.log_info("caught '%s' copying '%s'" % (e, src))
try:
shutil.copystat(src, dest)
except OSError:
# SELinux xattrs in /proc and /sys throw this
pass
try:
stat = os.stat(src)
os.chown(dest, stat.st_uid, stat.st_gid)
except Exception as e:
self.log_debug("caught '%s' setting ownership of '%s'"
% (e, dest))
file_name = "'%s'" % src
else:
# Open file case: first rewind the file to obtain
# everything written to it.
src.seek(0)
with open(dest, "w") as f:
for line in src:
f.write(line)
file_name = "open file"
self.log_debug("added %s to FileCacheArchive '%s'" %
(file_name, self._archive_root))
def add_string(self, content, dest):
src = dest
dest = self.dest_path(dest)
self._check_path(dest)
f = codecs.open(dest, 'w', encoding='utf-8')
if isinstance(content, bytes):
content = content.decode('utf8', 'ignore')
f.write(content)
if os.path.exists(src):
try:
shutil.copystat(src, dest)
except OSError as e:
self.log_error(
"Unable to add '%s' to FileCacheArchive: %s" % (dest, e))
self.log_debug("added string at '%s' to FileCacheArchive '%s'"
% (src, self._archive_root))
def add_link(self, source, link_name):
dest = self.dest_path(link_name)
self._check_path(dest)
if not os.path.lexists(dest):
os.symlink(source, dest)
self.log_debug("added symlink at '%s' to '%s' in FileCacheArchive '%s'"
% (dest, source, self._archive_root))
def add_dir(self, path):
self.makedirs(path)
def add_node(self, path, mode, device):
dest = self.dest_path(path)
self._check_path(dest)
if not os.path.exists(dest):
try:
os.mknod(dest, mode, device)
except OSError as e:
if e.errno == errno.EPERM:
msg = "Operation not permitted"
self.log_info("add_node: %s - mknod '%s'" % (msg, dest))
return
raise e
shutil.copystat(path, dest)
def _makedirs(self, path, mode=0o700):
os.makedirs(path, mode)
def name_max(self):
if 'PC_NAME_MAX' in os.pathconf_names:
pc_name_max = os.pathconf_names['PC_NAME_MAX']
return os.pathconf(self._archive_root, pc_name_max)
else:
return 255
def get_tmp_dir(self):
return self._archive_root
def get_archive_path(self):
return self._archive_root
def makedirs(self, path, mode=0o700):
self._makedirs(self.dest_path(path))
self.log_debug("created directory at '%s' in FileCacheArchive '%s'"
% (path, self._archive_root))
def open_file(self, path):
path = self.dest_path(path)
return codecs.open(path, "r", encoding='utf-8')
def cleanup(self):
shutil.rmtree(self._archive_root)
def finalize(self, method):
self.log_info("finalizing archive '%s' using method '%s'"
% (self._archive_root, method))
self._build_archive()
self.cleanup()
self.log_info("built archive at '%s' (size=%d)" % (self._archive_name,
os.stat(self._archive_name).st_size))
self.method = method
try:
return self._compress()
except Exception as e:
exp_msg = "An error occurred compressing the archive: "
self.log_error("%s %s" % (exp_msg, e))
return self.name()
# Compatibility version of the tarfile.TarFile class. This exists to allow
# compatibility with PY2 runtimes that lack the 'filter' parameter to the
# TarFile.add() method. The wrapper class is used on python2.6 and earlier
# only; all later versions include 'filter' and the native TarFile class is
# used directly.
class _TarFile(tarfile.TarFile):
# Taken from the python 2.7.5 tarfile.py
def add(self, name, arcname=None, recursive=True,
exclude=None, filter=None):
"""Add the file `name' to the archive. `name' may be any type of file
(directory, fifo, symbolic link, etc.). If given, `arcname'
specifies an alternative name for the file in the archive.
Directories are added recursively by default. This can be avoided by
setting `recursive' to False. `exclude' is a function that should
return True for each filename to be excluded. `filter' is a function
that expects a TarInfo object argument and returns the changed
TarInfo object, if it returns None the TarInfo object will be
excluded from the archive.
"""
self._check("aw")
if arcname is None:
arcname = name
# Exclude pathnames.
if exclude is not None:
import warnings
warnings.warn("use the filter argument instead",
DeprecationWarning, 2)
if exclude(name):
self._dbg(2, "tarfile: Excluded %r" % name)
return
# Skip if somebody tries to archive the archive...
if self.name is not None and os.path.abspath(name) == self.name:
self._dbg(2, "tarfile: Skipped %r" % name)
return
self._dbg(1, name)
# Create a TarInfo object from the file.
tarinfo = self.gettarinfo(name, arcname)
if tarinfo is None:
self._dbg(1, "tarfile: Unsupported type %r" % name)
return
# Change or exclude the TarInfo object.
if filter is not None:
tarinfo = filter(tarinfo)
if tarinfo is None:
self._dbg(2, "tarfile: Excluded %r" % name)
return
# Append the tar header and data to the archive.
if tarinfo.isreg():
with tarfile.bltn_open(name, "rb") as f:
self.addfile(tarinfo, f)
elif tarinfo.isdir():
self.addfile(tarinfo)
if recursive:
for f in os.listdir(name):
self.add(os.path.join(name, f), os.path.join(arcname, f),
recursive, exclude, filter)
else:
self.addfile(tarinfo)
class TarFileArchive(FileCacheArchive):
""" archive class using python TarFile to create tar archives"""
method = None
_with_selinux_context = False
def __init__(self, name, tmpdir):
super(TarFileArchive, self).__init__(name, tmpdir)
self._suffix = "tar"
self._archive_name = os.path.join(tmpdir, self.name())
def set_tarinfo_from_stat(self, tar_info, fstat, mode=None):
tar_info.mtime = fstat.st_mtime
tar_info.pax_headers['atime'] = "%.9f" % fstat.st_atime
tar_info.pax_headers['ctime'] = "%.9f" % fstat.st_ctime
if mode:
tar_info.mode = mode
else:
tar_info.mode = fstat.st_mode
tar_info.uid = fstat.st_uid
tar_info.gid = fstat.st_gid
# this can be used to set permissions if using the
# tarfile.add() interface to add directory trees.
def copy_permissions_filter(self, tarinfo):
orig_path = tarinfo.name[len(os.path.split(self._name)[-1]):]
if not orig_path:
orig_path = self._archive_root
try:
fstat = os.stat(orig_path)
except OSError:
return tarinfo
if self._with_selinux_context:
context = self.get_selinux_context(orig_path)
if(context):
tarinfo.pax_headers['RHT.security.selinux'] = context
self.set_tarinfo_from_stat(tarinfo, fstat)
return tarinfo
def get_selinux_context(self, path):
try:
(rc, c) = selinux.getfilecon(path)
return c
except:
return None
def name(self):
return "%s.%s" % (self._name, self._suffix)
def name_max(self):
# GNU Tar format supports unlimited file name length. Just return
# the limit of the underlying FileCacheArchive.
return super(TarFileArchive, self).name_max()
def _build_archive(self):
# python2.6 TarFile lacks the filter parameter
if not six.PY3 and sys.version_info[1] < 7:
tar = _TarFile.open(self._archive_name, mode="w")
else:
tar = tarfile.open(self._archive_name, mode="w")
# we need to pass the absolute path to the archive root but we
# want the names used in the archive to be relative.
tar.add(self._archive_root, arcname=os.path.split(self._name)[1],
filter=self.copy_permissions_filter)
tar.close()
def _compress(self):
methods = []
# Make sure that valid compression commands exist.
for method in ['xz', 'bzip2', 'gzip']:
if is_executable(method):
methods.append(method)
else:
self.log_error("\"%s\" command not found." % method)
if self.method in methods:
methods = [self.method]
exp_msg = "No compression utilities found."
last_error = Exception(exp_msg)
for cmd in methods:
suffix = "." + cmd.replace('ip', '')
# use fast compression if using xz or bz2
if cmd != "gzip":
cmd = "%s -1" % cmd
try:
r = sos_get_command_output("%s %s" % (cmd, self.name()),
timeout=0)
if r['status']:
self.log_info(r['output'])
self._suffix += suffix
return self.name()
except Exception as e:
last_error = e
raise last_error
# vim: set et ts=4 sw=4 :
|
gpl-2.0
|
Sylrob434/CouchPotatoServer
|
couchpotato/core/plugins/userscript/main.py
|
44
|
3074
|
import os
from couchpotato import index
from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent, addEvent
from couchpotato.core.helpers.variable import isDict
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
from tornado.web import RequestHandler
log = CPLog(__name__)
class Userscript(Plugin):
version = 5
def __init__(self):
addApiView('userscript.get/(.*)/(.*)', self.getUserScript, static = True)
addApiView('userscript', self.iFrame)
addApiView('userscript.add_via_url', self.getViaUrl)
addApiView('userscript.includes', self.getIncludes)
addApiView('userscript.bookmark', self.bookmark)
addEvent('userscript.get_version', self.getVersion)
def bookmark(self, host = None, **kwargs):
params = {
'includes': fireEvent('userscript.get_includes', merge = True),
'excludes': fireEvent('userscript.get_excludes', merge = True),
'host': host,
}
return self.renderTemplate(__file__, 'bookmark.js_tmpl', **params)
def getIncludes(self, **kwargs):
return {
'includes': fireEvent('userscript.get_includes', merge = True),
'excludes': fireEvent('userscript.get_excludes', merge = True),
}
def getUserScript(self, script_route, **kwargs):
klass = self
class UserscriptHandler(RequestHandler):
def get(self, random, route):
params = {
'includes': fireEvent('userscript.get_includes', merge = True),
'excludes': fireEvent('userscript.get_excludes', merge = True),
'version': klass.getVersion(),
'api': '%suserscript/' % Env.get('api_base'),
'host': '%s://%s' % (self.request.protocol, self.request.headers.get('X-Forwarded-Host') or self.request.headers.get('host')),
}
script = klass.renderTemplate(__file__, 'template.js_tmpl', **params)
klass.createFile(os.path.join(Env.get('cache_dir'), 'couchpotato.user.js'), script)
self.redirect(Env.get('api_base') + 'file.cache/couchpotato.user.js')
Env.get('app').add_handlers(".*$", [('%s%s' % (Env.get('api_base'), script_route), UserscriptHandler)])
def getVersion(self):
versions = fireEvent('userscript.get_provider_version')
version = self.version
for v in versions:
version += v
return version
def iFrame(self, **kwargs):
return index()
def getViaUrl(self, url = None, **kwargs):
params = {
'url': url,
'movie': fireEvent('userscript.get_movie_via_url', url = url, single = True)
}
if not isDict(params['movie']):
log.error('Failed adding movie via url: %s', url)
params['error'] = params['movie'] if params['movie'] else 'Failed getting movie info'
return params
|
gpl-3.0
|
dyyi/moneybook
|
venv/Lib/site-packages/django/contrib/auth/base_user.py
|
41
|
4434
|
"""
This module allows importing AbstractBaseUser even when django.contrib.auth is
not in INSTALLED_APPS.
"""
from __future__ import unicode_literals
from django.contrib.auth import password_validation
from django.contrib.auth.hashers import (
check_password, is_password_usable, make_password,
)
from django.db import models
from django.utils.crypto import get_random_string, salted_hmac
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
class BaseUserManager(models.Manager):
@classmethod
def normalize_email(cls, email):
"""
Normalize the email address by lowercasing the domain part of it.
"""
email = email or ''
try:
email_name, domain_part = email.strip().rsplit('@', 1)
except ValueError:
pass
else:
email = '@'.join([email_name, domain_part.lower()])
return email
def make_random_password(self, length=10,
allowed_chars='abcdefghjkmnpqrstuvwxyz'
'ABCDEFGHJKLMNPQRSTUVWXYZ'
'23456789'):
"""
Generate a random password with the given length and given
allowed_chars. The default value of allowed_chars does not have "I" or
"O" or letters and digits that look similar -- just to avoid confusion.
"""
return get_random_string(length, allowed_chars)
def get_by_natural_key(self, username):
return self.get(**{self.model.USERNAME_FIELD: username})
@python_2_unicode_compatible
class AbstractBaseUser(models.Model):
password = models.CharField(_('password'), max_length=128)
last_login = models.DateTimeField(_('last login'), blank=True, null=True)
is_active = True
REQUIRED_FIELDS = []
class Meta:
abstract = True
def get_username(self):
"Return the identifying username for this User"
return getattr(self, self.USERNAME_FIELD)
def __init__(self, *args, **kwargs):
super(AbstractBaseUser, self).__init__(*args, **kwargs)
# Stores the raw password if set_password() is called so that it can
# be passed to password_changed() after the model is saved.
self._password = None
def __str__(self):
return self.get_username()
def save(self, *args, **kwargs):
super(AbstractBaseUser, self).save(*args, **kwargs)
if self._password is not None:
password_validation.password_changed(self._password, self)
self._password = None
def natural_key(self):
return (self.get_username(),)
def is_anonymous(self):
"""
Always return False. This is a way of comparing User objects to
anonymous users.
"""
return False
def is_authenticated(self):
"""
Always return True. This is a way to tell if the user has been
authenticated in templates.
"""
return True
def set_password(self, raw_password):
self.password = make_password(raw_password)
self._password = raw_password
def check_password(self, raw_password):
"""
Return a boolean of whether the raw_password was correct. Handles
hashing formats behind the scenes.
"""
def setter(raw_password):
self.set_password(raw_password)
# Password hash upgrades shouldn't be considered password changes.
self._password = None
self.save(update_fields=["password"])
return check_password(raw_password, self.password, setter)
def set_unusable_password(self):
# Set a value that will never be a valid hash
self.password = make_password(None)
def has_usable_password(self):
return is_password_usable(self.password)
def get_full_name(self):
raise NotImplementedError('subclasses of AbstractBaseUser must provide a get_full_name() method')
def get_short_name(self):
raise NotImplementedError('subclasses of AbstractBaseUser must provide a get_short_name() method.')
def get_session_auth_hash(self):
"""
Return an HMAC of the password field.
"""
key_salt = "django.contrib.auth.models.AbstractBaseUser.get_session_auth_hash"
return salted_hmac(key_salt, self.password).hexdigest()
|
apache-2.0
|
blairg23/Apocalypse-Defense
|
src/apocalypsedefense/workspace/ApocalypseDefenseJython/src/items.py
|
1
|
1677
|
'''
Author: Blair Gemmer
Purpose: Creates items in the game, specifically weapons
'''
#from pylab import *
#TODO: REMOVE ALL PRINT STATEMENTS FOR ANDROID IMPLEMENTATION
#TODO: Remove useless comments.
class Weapon():
def __init__(self, wType='Default Weapon Type', name='Default Weapon',
damage=10, rate=1, attackRange=10):
self.name = name
self.wType = wType
self.damage = damage
self.rate = rate
self.range = attackRange
def attack(self):
return self.damage
class Gun(Weapon):
def __init__(self, wType='Default Gun Type', name='Default Gun', damage=10,
rate=1, attackRange=300, clipSize=50, reloadRate=1):
Weapon.__init__(self, wType, name, damage, rate, attackRange)
self.clipSize = clipSize
self.reloadRate = reloadRate
self.currentAmmo = clipSize
def shoot(self):
ammoExpelled = 1/self.rate
self.currentAmmo -= ammoExpelled
if self.currentAmmo < 0: #Can't go below 0 bullets
self.currentAmmo = 0
self.reloadGun() #Reload the gun
return self.damage*ammoExpelled #Every shot hits
def reloadGun(self):
#NEED TO ADD RELOAD RATE
print 'RELOADING!'
self.currentAmmo = self.clipSize
class Armor():
def __init__(self, armorHP=100):
self.hp = armorHP
def takeDamage(self, damage):
self.hp = self.hp - damage
###Testing purposes:
##g = Gun(rate=1/5.)
##for t in range(0,100):
## print 'Current Ammo: ' + str(g.currentAmmo)
## g.shoot()
## if g.currentAmmo == 0:
## print 'RELOADING!'
## g.reload()
##
|
mit
|
locusf/linux
|
tools/perf/scripts/python/sctop.py
|
1996
|
2102
|
# system call top
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
raw_syscalls__sys_enter(**locals())
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
gpl-2.0
|
sandeepkoduri/GAE-html-to-pdf
|
libs/reportlab/graphics/widgets/eventcal.py
|
6
|
13234
|
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/widgets/eventcal.py
# Event Calendar widget
# author: Andy Robinson
__version__='3.3.0'
__doc__="""This file is a
"""
from reportlab.lib import colors
from reportlab.lib.validators import *
from reportlab.lib.attrmap import *
from reportlab.graphics.shapes import Line, Rect, Polygon, Drawing, Group, String, Circle, Wedge
from reportlab.graphics.charts.textlabels import Label
from reportlab.graphics.widgetbase import Widget
from reportlab.graphics import renderPDF
class EventCalendar(Widget):
def __init__(self):
self.x = 0
self.y = 0
self.width = 300
self.height = 150
self.timeColWidth = None # if declared, use it; otherwise auto-size.
self.trackRowHeight = 20
self.data = [] # list of Event objects
self.trackNames = None
self.startTime = None #displays ALL data on day if not set
self.endTime = None # displays ALL data on day if not set
self.day = 0
# we will keep any internal geometry variables
# here. These are computed by computeSize(),
# which is the first thing done when drawing.
self._talksVisible = [] # subset of data which will get plotted, cache
self._startTime = None
self._endTime = None
self._trackCount = 0
self._colWidths = []
self._colLeftEdges = [] # left edge of each column
def computeSize(self):
"Called at start of draw. Sets various column widths"
self._talksVisible = self.getRelevantTalks(self.data)
self._trackCount = len(self.getAllTracks())
self.computeStartAndEndTimes()
self._colLeftEdges = [self.x]
if self.timeColWidth is None:
w = self.width / (1 + self._trackCount)
self._colWidths = [w] * (1+ self._trackCount)
for i in range(self._trackCount):
self._colLeftEdges.append(self._colLeftEdges[-1] + w)
else:
self._colWidths = [self.timeColWidth]
w = (self.width - self.timeColWidth) / self._trackCount
for i in range(self._trackCount):
self._colWidths.append(w)
self._colLeftEdges.append(self._colLeftEdges[-1] + w)
def computeStartAndEndTimes(self):
"Work out first and last times to display"
if self.startTime:
self._startTime = self.startTime
else:
for (title, speaker, trackId, day, start, duration) in self._talksVisible:
if self._startTime is None: #first one
self._startTime = start
else:
if start < self._startTime:
self._startTime = start
if self.endTime:
self._endTime = self.endTime
else:
for (title, speaker, trackId, day, start, duration) in self._talksVisible:
if self._endTime is None: #first one
self._endTime = start + duration
else:
if start + duration > self._endTime:
self._endTime = start + duration
def getAllTracks(self):
tracks = []
for (title, speaker, trackId, day, hours, duration) in self.data:
if trackId is not None:
if trackId not in tracks:
tracks.append(trackId)
tracks.sort()
return tracks
def getRelevantTalks(self, talkList):
"Scans for tracks actually used"
used = []
for talk in talkList:
(title, speaker, trackId, day, hours, duration) = talk
assert trackId != 0, "trackId must be None or 1,2,3... zero not allowed!"
if day == self.day:
if (((self.startTime is None) or ((hours + duration) >= self.startTime))
and ((self.endTime is None) or (hours <= self.endTime))):
used.append(talk)
return used
def scaleTime(self, theTime):
"Return y-value corresponding to times given"
axisHeight = self.height - self.trackRowHeight
# compute fraction between 0 and 1, 0 is at start of period
proportionUp = ((theTime - self._startTime) / (self._endTime - self._startTime))
y = self.y + axisHeight - (axisHeight * proportionUp)
return y
def getTalkRect(self, startTime, duration, trackId, text):
"Return shapes for a specific talk"
g = Group()
y_bottom = self.scaleTime(startTime + duration)
y_top = self.scaleTime(startTime)
y_height = y_top - y_bottom
if trackId is None:
#spans all columns
x = self._colLeftEdges[1]
width = self.width - self._colWidths[0]
else:
#trackId is 1-based and these arrays have the margin info in column
#zero, so no need to add 1
x = self._colLeftEdges[trackId]
width = self._colWidths[trackId]
lab = Label()
lab.setText(text)
lab.setOrigin(x + 0.5*width, y_bottom+0.5*y_height)
lab.boxAnchor = 'c'
lab.width = width
lab.height = y_height
lab.fontSize = 6
r = Rect(x, y_bottom, width, y_height, fillColor=colors.cyan)
g.add(r)
g.add(lab)
#now for a label
# would expect to color-code and add text
return g
def draw(self):
self.computeSize()
g = Group()
# time column
g.add(Rect(self.x, self.y, self._colWidths[0], self.height - self.trackRowHeight, fillColor=colors.cornsilk))
# track headers
x = self.x + self._colWidths[0]
y = self.y + self.height - self.trackRowHeight
for trk in range(self._trackCount):
wid = self._colWidths[trk+1]
r = Rect(x, y, wid, self.trackRowHeight, fillColor=colors.yellow)
s = String(x + 0.5*wid, y, 'Track %d' % trk, align='middle')
g.add(r)
g.add(s)
x = x + wid
for talk in self._talksVisible:
(title, speaker, trackId, day, start, duration) = talk
r = self.getTalkRect(start, duration, trackId, title + '\n' + speaker)
g.add(r)
return g
def test():
"Make a conference event for day 1 of UP Python 2003"
d = Drawing(400,200)
cal = EventCalendar()
cal.x = 50
cal.y = 25
cal.data = [
# these might be better as objects instead of tuples, since I
# predict a large number of "optionsl" variables to affect
# formatting in future.
#title, speaker, track id, day, start time (hrs), duration (hrs)
# track ID is 1-based not zero-based!
('Keynote: Why design another programming language?', 'Guido van Rossum', None, 1, 9.0, 1.0),
('Siena Web Service Architecture', 'Marc-Andre Lemburg', 1, 1, 10.5, 1.5),
('Extreme Programming in Python', 'Chris Withers', 2, 1, 10.5, 1.5),
('Pattern Experiences in C++', 'Mark Radford', 3, 1, 10.5, 1.5),
('What is the Type of std::toupper()', 'Gabriel Dos Reis', 4, 1, 10.5, 1.5),
('Linguistic Variables: Clear Thinking with Fuzzy Logic ', 'Walter Banks', 5, 1, 10.5, 1.5),
('lunch, short presentations, vendor presentations', '', None, 1, 12.0, 2.0),
("CORBA? Isn't that obsolete", 'Duncan Grisby', 1, 1, 14.0, 1.5),
("Python Design Patterns", 'Duncan Booth', 2, 1, 14.0, 1.5),
("Inside Security Checks and Safe Exceptions", 'Brandon Bray', 3, 1, 14.0, 1.5),
("Studying at a Distance", 'Panel Discussion, Panel to include Alan Lenton & Francis Glassborow', 4, 1, 14.0, 1.5),
("Coding Standards - Given the ANSI C Standard why do I still need a coding Standard", 'Randy Marques', 5, 1, 14.0, 1.5),
("RESTful Python", 'Hamish Lawson', 1, 1, 16.0, 1.5),
("Parsing made easier - a radical old idea", 'Andrew Koenig', 2, 1, 16.0, 1.5),
("C++ & Multimethods", 'Julian Smith', 3, 1, 16.0, 1.5),
("C++ Threading", 'Kevlin Henney', 4, 1, 16.0, 1.5),
("The Organisation Strikes Back", 'Alan Griffiths & Sarah Lees', 5, 1, 16.0, 1.5),
('Birds of a Feather meeting', '', None, 1, 17.5, 2.0),
('Keynote: In the Spirit of C', 'Greg Colvin', None, 2, 9.0, 1.0),
('The Infinite Filing Cabinet - object storage in Python', 'Jacob Hallen', 1, 2, 10.5, 1.5),
('Introduction to Python and Jython for C++ and Java Programmers', 'Alex Martelli', 2, 2, 10.5, 1.5),
('Template metaprogramming in Haskell', 'Simon Peyton Jones', 3, 2, 10.5, 1.5),
('Plenty People Programming: C++ Programming in a Group, Workshop with a difference', 'Nico Josuttis', 4, 2, 10.5, 1.5),
('Design and Implementation of the Boost Graph Library', 'Jeremy Siek', 5, 2, 10.5, 1.5),
('lunch, short presentations, vendor presentations', '', None, 2, 12.0, 2.0),
("Building GUI Applications with PythonCard and PyCrust", 'Andy Todd', 1, 2, 14.0, 1.5),
("Integrating Python, C and C++", 'Duncan Booth', 2, 2, 14.0, 1.5),
("Secrets and Pitfalls of Templates", 'Nicolai Josuttis & David Vandevoorde', 3, 2, 14.0, 1.5),
("Being a Mentor", 'Panel Discussion, Panel to include Alan Lenton & Francis Glassborow', 4, 2, 14.0, 1.5),
("The Embedded C Extensions to C", 'Willem Wakker', 5, 2, 14.0, 1.5),
("Lightning Talks", 'Paul Brian', 1, 2, 16.0, 1.5),
("Scripting Java Applications with Jython", 'Anthony Eden', 2, 2, 16.0, 1.5),
("Metaprogramming and the Boost Metaprogramming Library", 'David Abrahams', 3, 2, 16.0, 1.5),
("A Common Vendor ABI for C++ -- GCC's why, what and not", 'Nathan Sidwell & Gabriel Dos Reis', 4, 2, 16.0, 1.5),
("The Timing and Cost of Choices", 'Hubert Matthews', 5, 2, 16.0, 1.5),
('Birds of a Feather meeting', '', None, 2, 17.5, 2.0),
('Keynote: The Cost of C & C++ Compatibility', 'Andy Koenig', None, 3, 9.0, 1.0),
('Prying Eyes: Generic Observer Implementations in C++', 'Andrei Alexandrescu', 1, 2, 10.5, 1.5),
('The Roadmap to Generative Programming With C++', 'Ulrich Eisenecker', 2, 2, 10.5, 1.5),
('Design Patterns in C++ and C# for the Common Language Runtime', 'Brandon Bray', 3, 2, 10.5, 1.5),
('Extreme Hour (XH): (workshop) - Jutta Eckstein and Nico Josuttis', 'Jutta Ecstein', 4, 2, 10.5, 1.5),
('The Lambda Library : Unnamed Functions for C++', 'Jaako Jarvi', 5, 2, 10.5, 1.5),
('lunch, short presentations, vendor presentations', '', None, 3, 12.0, 2.0),
('Reflective Metaprogramming', 'Daveed Vandevoorde', 1, 3, 14.0, 1.5),
('Advanced Template Issues and Solutions (double session)', 'Herb Sutter',2, 3, 14.0, 3),
('Concurrent Programming in Java (double session)', 'Angelika Langer', 3, 3, 14.0, 3),
('What can MISRA-C (2nd Edition) do for us?', 'Chris Hills', 4, 3, 14.0, 1.5),
('C++ Metaprogramming Concepts and Results', 'Walter E Brown', 5, 3, 14.0, 1.5),
('Binding C++ to Python with the Boost Python Library', 'David Abrahams', 1, 3, 16.0, 1.5),
('Using Aspect Oriented Programming for Enterprise Application Integration', 'Arno Schmidmeier', 4, 3, 16.0, 1.5),
('Defective C++', 'Marc Paterno', 5, 3, 16.0, 1.5),
("Speakers' Banquet & Birds of a Feather meeting", '', None, 3, 17.5, 2.0),
('Keynote: The Internet, Software and Computers - A Report Card', 'Alan Lenton', None, 4, 9.0, 1.0),
('Multi-Platform Software Development; Lessons from the Boost libraries', 'Beman Dawes', 1, 5, 10.5, 1.5),
('The Stability of the C++ ABI', 'Steve Clamage', 2, 5, 10.5, 1.5),
('Generic Build Support - A Pragmatic Approach to the Software Build Process', 'Randy Marques', 3, 5, 10.5, 1.5),
('How to Handle Project Managers: a survival guide', 'Barb Byro', 4, 5, 10.5, 1.5),
('lunch, ACCU AGM', '', None, 5, 12.0, 2.0),
('Sauce: An OO recursive descent parser; its design and implementation.', 'Jon Jagger', 1, 5, 14.0, 1.5),
('GNIRTS ESAC REWOL - Bringing the UNIX filters to the C++ iostream library.', 'JC van Winkel', 2, 5, 14.0, 1.5),
('Pattern Writing: Live and Direct', 'Frank Buschmann & Kevlin Henney', 3, 5, 14.0, 3.0),
('The Future of Programming Languages - A Goldfish Bowl', 'Francis Glassborow and friends', 3, 5, 14.0, 1.5),
('Honey, I Shrunk the Threads: Compile-time checked multithreaded transactions in C++', 'Andrei Alexandrescu', 1, 5, 16.0, 1.5),
('Fun and Functionality with Functors', 'Lois Goldthwaite', 2, 5, 16.0, 1.5),
('Agile Enough?', 'Alan Griffiths', 4, 5, 16.0, 1.5),
("Conference Closure: A brief plenary session", '', None, 5, 17.5, 0.5),
]
#return cal
cal.day = 1
d.add(cal)
for format in ['pdf']:#,'gif','png']:
out = d.asString(format)
open('eventcal.%s' % format, 'wb').write(out)
print('saved eventcal.%s' % format)
if __name__=='__main__':
test()
|
mit
|
markslwong/tensorflow
|
tensorflow/contrib/imperative/examples/mnist.py
|
69
|
4576
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""MNIST training in imperative mode TensorFlow."""
# pylint: disable=redefined-outer-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.contrib.imperative as tf
from tensorflow.contrib.learn.python.learn.datasets.mnist import read_data_sets
IMAGE_SIZE = 28
IMAGE_PIXELS = IMAGE_SIZE * IMAGE_SIZE
NUM_CLASSES = 10
BATCH_SIZE = 100
NUM_EPOCHS = 2
LEARNING_RATE = 0.1
class Model(object):
"""Fully connected model for MNIST."""
def __init__(self, hidden1_units, hidden2_units):
"""Create the model parameters."""
self.params = []
# Hidden 1
with tf.name_scope('hidden1'):
self.weights1 = tf.Variable(
np.random.normal(scale=1.0 / np.sqrt(float(IMAGE_PIXELS)),
size=[IMAGE_PIXELS, hidden1_units]),
dtype=tf.float32,
name='weights')
self.biases1 = tf.Variable(
np.zeros([hidden1_units]),
dtype=tf.float32,
name='biases')
# Hidden 2
with tf.name_scope('hidden2'):
self.weights2 = tf.Variable(
np.random.normal(scale=1.0 / np.sqrt(float(hidden1_units)),
size=[hidden1_units, hidden2_units]),
dtype=tf.float32,
name='weights')
self.biases2 = tf.Variable(
np.zeros([hidden2_units]),
dtype=tf.float32,
name='biases')
# Linear
with tf.name_scope('softmax_linear'):
self.sm_w = tf.Variable(
np.random.normal(scale=1.0 / np.sqrt(float(hidden2_units)),
size=[hidden2_units, NUM_CLASSES]),
dtype=tf.float32,
name='weights')
self.sm_b = tf.Variable(
np.zeros([NUM_CLASSES]),
dtype=tf.float32,
name='biases')
self.params = [self.weights1, self.biases1,
self.weights2, self.biases2,
self.sm_w, self.sm_b]
def __call__(self, images):
"""Run the model's forward prop on `images`."""
hidden1 = tf.nn.relu(tf.matmul(images, self.weights1) + self.biases1)
hidden2 = tf.nn.relu(tf.matmul(hidden1, self.weights2) + self.biases2)
logits = tf.matmul(hidden2, self.sm_w) + self.sm_b
return logits
model = Model(128, 32)
data = read_data_sets('/tmp/mnist_train')
def get_test_accuracy():
"""Gets the model's classification accuracy on test data."""
num_examples = data.test.num_examples
test_images = np.split(data.test.images, num_examples/BATCH_SIZE)
test_labels = np.split(data.test.labels.astype(np.int32),
num_examples/BATCH_SIZE)
num_correct = 0
for _, (images, labels) in enumerate(zip(test_images, test_labels)):
with tf.new_step():
logits = model(images)
predictions = tf.argmax(tf.nn.softmax(logits), axis=1)
num_correct += np.sum(predictions.value == labels)
return float(num_correct) / float(num_examples)
num_examples = data.train.num_examples
train_images = np.split(data.train.images, num_examples/BATCH_SIZE)
train_labels = np.split(data.train.labels.astype(np.int32),
num_examples/BATCH_SIZE)
for epoch in range(NUM_EPOCHS):
for i, (images, labels) in enumerate(zip(train_images, train_labels)):
with tf.new_step() as step:
logits = model(images)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=labels, logits=logits, name='xentropy')
loss = tf.reduce_mean(cross_entropy, name='xentropy_mean')
gradients = tf.gradients(loss, model.params)
step.run([v.assign_sub(LEARNING_RATE * g)
for g, v in zip(gradients, model.params)])
if i % 10 == 0:
print('Loss after {} steps = {}'.format(i, loss))
if i % 100 == 0:
print('Test accuracy after {} steps = {}'
.format(i, get_test_accuracy()))
|
apache-2.0
|
tb0hdan/voiceplay
|
voiceplay/datasources/playlists/libraries/textfile.py
|
1
|
1180
|
#-*- coding: utf-8 -*-
""" Plaintext playlist module """
class TextFileLibrary(object):
"""
(not) Very basic and silly TXT parser
"""
def __init__(self):
self.checks = [lambda x: x.startswith('#'), lambda x: x.startswith('//'),
lambda x: x.startswith('/*')]
def line_ok(self, line):
"""
Confirm that line is okay by honoring comments
"""
status = True
for check in self.checks:
if check(line):
status = False
break
return status
def text_parser(self, data):
"""
.txt file parser
"""
tracks = []
for line in data.splitlines():
track = line.strip()
if not self.line_ok(track):
continue
tracks.append(track)
return tracks
def parse(self, library_file):
"""
Process TXT playlist, return items
"""
tracks = []
with open(library_file, 'rb') as text_file:
data = text_file.read()
data = data.decode()
tracks = self.text_parser(data)
return tracks
|
unlicense
|
antonioUnina/neutron
|
neutron/db/migration/alembic_migrations/versions/327ee5fde2c7_set_innodb_engine.py
|
15
|
1236
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""set_innodb_engine
Revision ID: 327ee5fde2c7
Revises: 2026156eab2f
Create Date: 2014-07-24 12:00:38.791287
"""
# revision identifiers, used by Alembic.
revision = '327ee5fde2c7'
down_revision = '4eba2f05c2f4'
from alembic import op
# This list contain tables that could be deployed before change that converts
# all tables to InnoDB appeared
TABLES = ['router_extra_attributes', 'dvr_host_macs', 'ml2_dvr_port_bindings',
'csnat_l3_agent_bindings']
def upgrade():
if op.get_bind().dialect.name == 'mysql':
for table in TABLES:
op.execute("ALTER TABLE %s ENGINE=InnoDB" % table)
|
apache-2.0
|
Kilmannan/HippieStation13
|
bot/C_maths.py
|
67
|
2451
|
### EXPERIMENTAL PROTOTYPE ###
# e = 2.7182818284590452353602874713526624977572
# pi = math.pi
from __future__ import division #PYTHON Y U NO TELL ME THIS BEFORE
import math
import random
import re
e = "2.7182818284590452353602874713526624977572"
pi = str(math.pi)
global pre
pre = len("maths ")
def maths(influx,prefix="!",sender="NaN",debug=True,method="n"):
global pre
influx = influx.lower()
influx = influx[len(prefix)+pre:]
influx = influx.replace("pie",pi+"*"+e)
influx = influx.replace("e*",e+"*")
influx = influx.replace("*e","*"+e)
influx = influx.replace("pi",pi)
if debug:
print sender+":"+prefix+"maths"
if influx.count("**") == 0 and influx.count('"') == 0 and influx.count("'") == 0 and influx.count(";") == 0 and influx.count(":") == 0:
influx_low = influx.lower()
influx_hi = influx.upper()
if "0b" in influx_low:
influx_low = re.sub("0b[0-1]*","",influx_low)
influx_hi = re.sub("0B[0-1]*","",influx_hi)
if "0x" in influx_low:
influx_low = re.sub("0x[a-f0-9]*","",influx_low)
influx_hi = re.sub("0X[A-F0-9]*","",influx_hi)
if "rand" in influx_low:
influx_low = re.sub("rand","",influx_low)
influx_hi = re.sub("RAND","",influx_hi)
if influx_low == influx_hi:
influx = re.sub("rand","random.random()",influx)
try:
result = eval(influx.lower())
except ZeroDivisionError:
return "Divide by zero detected."
except SyntaxError:
return "Syntax Error detected."
except TypeError:
return "Type Error detected."
except:
return "Unknown Error detected."
else:
if method == "n": #Normal
return result
elif method == "i": #Forced Int
return int(result)
elif method == "h": #Hex
try:
if "L" in hex(result)[2:]:
return hex(result)[2:-1]
else:
return hex(result)[2:].upper()
except TypeError:
return "That value (%s) cannot be interpreted properly using !hmaths" %(str(result))
elif method == "b": #Binary
try:
return bin(result)[2:].upper()
except TypeError:
return "That value (%s) cannot be interpreted properly using !bmaths" %(str(result))
else:
return result
else:
return "What are you trying to make me do again?"
else:
return "Those are likely to make me hang"
|
agpl-3.0
|
moijes12/oh-mainline
|
vendor/packages/twisted/twisted/spread/jelly.py
|
18
|
36237
|
# -*- test-case-name: twisted.test.test_jelly -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
S-expression-based persistence of python objects.
It does something very much like L{Pickle<pickle>}; however, pickle's main goal
seems to be efficiency (both in space and time); jelly's main goals are
security, human readability, and portability to other environments.
This is how Jelly converts various objects to s-expressions.
Boolean::
True --> ['boolean', 'true']
Integer::
1 --> 1
List::
[1, 2] --> ['list', 1, 2]
String::
\"hello\" --> \"hello\"
Float::
2.3 --> 2.3
Dictionary::
{'a': 1, 'b': 'c'} --> ['dictionary', ['b', 'c'], ['a', 1]]
Module::
UserString --> ['module', 'UserString']
Class::
UserString.UserString --> ['class', ['module', 'UserString'], 'UserString']
Function::
string.join --> ['function', 'join', ['module', 'string']]
Instance: s is an instance of UserString.UserString, with a __dict__
{'data': 'hello'}::
[\"UserString.UserString\", ['dictionary', ['data', 'hello']]]
Class Method: UserString.UserString.center::
['method', 'center', ['None'], ['class', ['module', 'UserString'],
'UserString']]
Instance Method: s.center, where s is an instance of UserString.UserString::
['method', 'center', ['instance', ['reference', 1, ['class',
['module', 'UserString'], 'UserString']], ['dictionary', ['data', 'd']]],
['dereference', 1]]
The C{set} builtin and the C{sets.Set} class are serialized to the same
thing, and unserialized to C{set} if available, else to C{sets.Set}. It means
that there's a possibility of type switching in the serialization process. The
solution is to always use C{set} if possible, and only use C{sets.Set} under
Python 2.3; this can be accomplished by using L{twisted.python.compat.set}.
The same rule applies for C{frozenset} and C{sets.ImmutableSet}.
@author: Glyph Lefkowitz
"""
# System Imports
import pickle
import types
import warnings
from types import StringType
from types import UnicodeType
from types import IntType
from types import TupleType
from types import ListType
from types import LongType
from types import FloatType
from types import FunctionType
from types import MethodType
from types import ModuleType
from types import DictionaryType
from types import InstanceType
from types import NoneType
from types import ClassType
import copy
import datetime
from types import BooleanType
try:
import decimal
except ImportError:
decimal = None
try:
_set = set
except NameError:
_set = None
try:
# Filter out deprecation warning for Python >= 2.6
warnings.filterwarnings("ignore", category=DeprecationWarning,
message="the sets module is deprecated", append=True)
import sets as _sets
finally:
warnings.filters.pop()
from new import instance
from new import instancemethod
from zope.interface import implements
# Twisted Imports
from twisted.python.reflect import namedObject, qual
from twisted.persisted.crefutil import NotKnown, _Tuple, _InstanceMethod
from twisted.persisted.crefutil import _DictKeyAndValue, _Dereference
from twisted.persisted.crefutil import _Container
from twisted.python.compat import reduce
from twisted.spread.interfaces import IJellyable, IUnjellyable
DictTypes = (DictionaryType,)
None_atom = "None" # N
# code
class_atom = "class" # c
module_atom = "module" # m
function_atom = "function" # f
# references
dereference_atom = 'dereference' # D
persistent_atom = 'persistent' # p
reference_atom = 'reference' # r
# mutable collections
dictionary_atom = "dictionary" # d
list_atom = 'list' # l
set_atom = 'set'
# immutable collections
# (assignment to __dict__ and __class__ still might go away!)
tuple_atom = "tuple" # t
instance_atom = 'instance' # i
frozenset_atom = 'frozenset'
# errors
unpersistable_atom = "unpersistable"# u
unjellyableRegistry = {}
unjellyableFactoryRegistry = {}
_NO_STATE = object()
def _newInstance(cls, state=_NO_STATE):
"""
Make a new instance of a class without calling its __init__ method.
Supports both new- and old-style classes.
@param state: A C{dict} used to update C{inst.__dict__} or C{_NO_STATE}
to skip this part of initialization.
@return: A new instance of C{cls}.
"""
if not isinstance(cls, types.ClassType):
# new-style
inst = cls.__new__(cls)
if state is not _NO_STATE:
inst.__dict__.update(state) # Copy 'instance' behaviour
else:
if state is not _NO_STATE:
inst = instance(cls, state)
else:
inst = instance(cls)
return inst
def _maybeClass(classnamep):
try:
object
except NameError:
isObject = 0
else:
isObject = isinstance(classnamep, type)
if isinstance(classnamep, ClassType) or isObject:
return qual(classnamep)
return classnamep
def setUnjellyableForClass(classname, unjellyable):
"""
Set which local class will represent a remote type.
If you have written a Copyable class that you expect your client to be
receiving, write a local "copy" class to represent it, then call::
jellier.setUnjellyableForClass('module.package.Class', MyCopier).
Call this at the module level immediately after its class
definition. MyCopier should be a subclass of RemoteCopy.
The classname may be a special tag returned by
'Copyable.getTypeToCopyFor' rather than an actual classname.
This call is also for cached classes, since there will be no
overlap. The rules are the same.
"""
global unjellyableRegistry
classname = _maybeClass(classname)
unjellyableRegistry[classname] = unjellyable
globalSecurity.allowTypes(classname)
def setUnjellyableFactoryForClass(classname, copyFactory):
"""
Set the factory to construct a remote instance of a type::
jellier.setUnjellyableFactoryForClass('module.package.Class', MyFactory)
Call this at the module level immediately after its class definition.
C{copyFactory} should return an instance or subclass of
L{RemoteCopy<pb.RemoteCopy>}.
Similar to L{setUnjellyableForClass} except it uses a factory instead
of creating an instance.
"""
global unjellyableFactoryRegistry
classname = _maybeClass(classname)
unjellyableFactoryRegistry[classname] = copyFactory
globalSecurity.allowTypes(classname)
def setUnjellyableForClassTree(module, baseClass, prefix=None):
"""
Set all classes in a module derived from C{baseClass} as copiers for
a corresponding remote class.
When you have a heirarchy of Copyable (or Cacheable) classes on one
side, and a mirror structure of Copied (or RemoteCache) classes on the
other, use this to setUnjellyableForClass all your Copieds for the
Copyables.
Each copyTag (the \"classname\" argument to getTypeToCopyFor, and
what the Copyable's getTypeToCopyFor returns) is formed from
adding a prefix to the Copied's class name. The prefix defaults
to module.__name__. If you wish the copy tag to consist of solely
the classname, pass the empty string \'\'.
@param module: a module object from which to pull the Copied classes.
(passing sys.modules[__name__] might be useful)
@param baseClass: the base class from which all your Copied classes derive.
@param prefix: the string prefixed to classnames to form the
unjellyableRegistry.
"""
if prefix is None:
prefix = module.__name__
if prefix:
prefix = "%s." % prefix
for i in dir(module):
i_ = getattr(module, i)
if type(i_) == types.ClassType:
if issubclass(i_, baseClass):
setUnjellyableForClass('%s%s' % (prefix, i), i_)
def getInstanceState(inst, jellier):
"""
Utility method to default to 'normal' state rules in serialization.
"""
if hasattr(inst, "__getstate__"):
state = inst.__getstate__()
else:
state = inst.__dict__
sxp = jellier.prepare(inst)
sxp.extend([qual(inst.__class__), jellier.jelly(state)])
return jellier.preserve(inst, sxp)
def setInstanceState(inst, unjellier, jellyList):
"""
Utility method to default to 'normal' state rules in unserialization.
"""
state = unjellier.unjelly(jellyList[1])
if hasattr(inst, "__setstate__"):
inst.__setstate__(state)
else:
inst.__dict__ = state
return inst
class Unpersistable:
"""
This is an instance of a class that comes back when something couldn't be
unpersisted.
"""
def __init__(self, reason):
"""
Initialize an unpersistable object with a descriptive C{reason} string.
"""
self.reason = reason
def __repr__(self):
return "Unpersistable(%s)" % repr(self.reason)
class Jellyable:
"""
Inherit from me to Jelly yourself directly with the `getStateFor'
convenience method.
"""
implements(IJellyable)
def getStateFor(self, jellier):
return self.__dict__
def jellyFor(self, jellier):
"""
@see: L{twisted.spread.interfaces.IJellyable.jellyFor}
"""
sxp = jellier.prepare(self)
sxp.extend([
qual(self.__class__),
jellier.jelly(self.getStateFor(jellier))])
return jellier.preserve(self, sxp)
class Unjellyable:
"""
Inherit from me to Unjelly yourself directly with the
C{setStateFor} convenience method.
"""
implements(IUnjellyable)
def setStateFor(self, unjellier, state):
self.__dict__ = state
def unjellyFor(self, unjellier, jellyList):
"""
Perform the inverse operation of L{Jellyable.jellyFor}.
@see: L{twisted.spread.interfaces.IUnjellyable.unjellyFor}
"""
state = unjellier.unjelly(jellyList[1])
self.setStateFor(unjellier, state)
return self
class _Jellier:
"""
(Internal) This class manages state for a call to jelly()
"""
def __init__(self, taster, persistentStore, invoker):
"""
Initialize.
"""
self.taster = taster
# `preserved' is a dict of previously seen instances.
self.preserved = {}
# `cooked' is a dict of previously backreferenced instances to their
# `ref' lists.
self.cooked = {}
self.cooker = {}
self._ref_id = 1
self.persistentStore = persistentStore
self.invoker = invoker
def _cook(self, object):
"""
(internal) Backreference an object.
Notes on this method for the hapless future maintainer: If I've already
gone through the prepare/preserve cycle on the specified object (it is
being referenced after the serializer is \"done with\" it, e.g. this
reference is NOT circular), the copy-in-place of aList is relevant,
since the list being modified is the actual, pre-existing jelly
expression that was returned for that object. If not, it's technically
superfluous, since the value in self.preserved didn't need to be set,
but the invariant that self.preserved[id(object)] is a list is
convenient because that means we don't have to test and create it or
not create it here, creating fewer code-paths. that's why
self.preserved is always set to a list.
Sorry that this code is so hard to follow, but Python objects are
tricky to persist correctly. -glyph
"""
aList = self.preserved[id(object)]
newList = copy.copy(aList)
# make a new reference ID
refid = self._ref_id
self._ref_id = self._ref_id + 1
# replace the old list in-place, so that we don't have to track the
# previous reference to it.
aList[:] = [reference_atom, refid, newList]
self.cooked[id(object)] = [dereference_atom, refid]
return aList
def prepare(self, object):
"""
(internal) Create a list for persisting an object to. This will allow
backreferences to be made internal to the object. (circular
references).
The reason this needs to happen is that we don't generate an ID for
every object, so we won't necessarily know which ID the object will
have in the future. When it is 'cooked' ( see _cook ), it will be
assigned an ID, and the temporary placeholder list created here will be
modified in-place to create an expression that gives this object an ID:
[reference id# [object-jelly]].
"""
# create a placeholder list to be preserved
self.preserved[id(object)] = []
# keep a reference to this object around, so it doesn't disappear!
# (This isn't always necessary, but for cases where the objects are
# dynamically generated by __getstate__ or getStateToCopyFor calls, it
# is; id() will return the same value for a different object if it gets
# garbage collected. This may be optimized later.)
self.cooker[id(object)] = object
return []
def preserve(self, object, sexp):
"""
(internal) Mark an object's persistent list for later referral.
"""
# if I've been cooked in the meanwhile,
if id(object) in self.cooked:
# replace the placeholder empty list with the real one
self.preserved[id(object)][2] = sexp
# but give this one back.
sexp = self.preserved[id(object)]
else:
self.preserved[id(object)] = sexp
return sexp
constantTypes = {types.StringType : 1, types.IntType : 1,
types.FloatType : 1, types.LongType : 1}
def _checkMutable(self,obj):
objId = id(obj)
if objId in self.cooked:
return self.cooked[objId]
if objId in self.preserved:
self._cook(obj)
return self.cooked[objId]
def jelly(self, obj):
if isinstance(obj, Jellyable):
preRef = self._checkMutable(obj)
if preRef:
return preRef
return obj.jellyFor(self)
objType = type(obj)
if self.taster.isTypeAllowed(qual(objType)):
# "Immutable" Types
if ((objType is StringType) or
(objType is IntType) or
(objType is LongType) or
(objType is FloatType)):
return obj
elif objType is MethodType:
return ["method",
obj.im_func.__name__,
self.jelly(obj.im_self),
self.jelly(obj.im_class)]
elif UnicodeType and objType is UnicodeType:
return ['unicode', obj.encode('UTF-8')]
elif objType is NoneType:
return ['None']
elif objType is FunctionType:
name = obj.__name__
return ['function', str(pickle.whichmodule(obj, obj.__name__))
+ '.' +
name]
elif objType is ModuleType:
return ['module', obj.__name__]
elif objType is BooleanType:
return ['boolean', obj and 'true' or 'false']
elif objType is datetime.datetime:
if obj.tzinfo:
raise NotImplementedError(
"Currently can't jelly datetime objects with tzinfo")
return ['datetime', '%s %s %s %s %s %s %s' % (
obj.year, obj.month, obj.day, obj.hour,
obj.minute, obj.second, obj.microsecond)]
elif objType is datetime.time:
if obj.tzinfo:
raise NotImplementedError(
"Currently can't jelly datetime objects with tzinfo")
return ['time', '%s %s %s %s' % (obj.hour, obj.minute,
obj.second, obj.microsecond)]
elif objType is datetime.date:
return ['date', '%s %s %s' % (obj.year, obj.month, obj.day)]
elif objType is datetime.timedelta:
return ['timedelta', '%s %s %s' % (obj.days, obj.seconds,
obj.microseconds)]
elif objType is ClassType or issubclass(objType, type):
return ['class', qual(obj)]
elif decimal is not None and objType is decimal.Decimal:
return self.jelly_decimal(obj)
else:
preRef = self._checkMutable(obj)
if preRef:
return preRef
# "Mutable" Types
sxp = self.prepare(obj)
if objType is ListType:
sxp.extend(self._jellyIterable(list_atom, obj))
elif objType is TupleType:
sxp.extend(self._jellyIterable(tuple_atom, obj))
elif objType in DictTypes:
sxp.append(dictionary_atom)
for key, val in obj.items():
sxp.append([self.jelly(key), self.jelly(val)])
elif (_set is not None and objType is set or
objType is _sets.Set):
sxp.extend(self._jellyIterable(set_atom, obj))
elif (_set is not None and objType is frozenset or
objType is _sets.ImmutableSet):
sxp.extend(self._jellyIterable(frozenset_atom, obj))
else:
className = qual(obj.__class__)
persistent = None
if self.persistentStore:
persistent = self.persistentStore(obj, self)
if persistent is not None:
sxp.append(persistent_atom)
sxp.append(persistent)
elif self.taster.isClassAllowed(obj.__class__):
sxp.append(className)
if hasattr(obj, "__getstate__"):
state = obj.__getstate__()
else:
state = obj.__dict__
sxp.append(self.jelly(state))
else:
self.unpersistable(
"instance of class %s deemed insecure" %
qual(obj.__class__), sxp)
return self.preserve(obj, sxp)
else:
if objType is InstanceType:
raise InsecureJelly("Class not allowed for instance: %s %s" %
(obj.__class__, obj))
raise InsecureJelly("Type not allowed for object: %s %s" %
(objType, obj))
def _jellyIterable(self, atom, obj):
"""
Jelly an iterable object.
@param atom: the identifier atom of the object.
@type atom: C{str}
@param obj: any iterable object.
@type obj: C{iterable}
@return: a generator of jellied data.
@rtype: C{generator}
"""
yield atom
for item in obj:
yield self.jelly(item)
def jelly_decimal(self, d):
"""
Jelly a decimal object.
@param d: a decimal object to serialize.
@type d: C{decimal.Decimal}
@return: jelly for the decimal object.
@rtype: C{list}
"""
sign, guts, exponent = d.as_tuple()
value = reduce(lambda left, right: left * 10 + right, guts)
if sign:
value = -value
return ['decimal', value, exponent]
def unpersistable(self, reason, sxp=None):
"""
(internal) Returns an sexp: (unpersistable "reason"). Utility method
for making note that a particular object could not be serialized.
"""
if sxp is None:
sxp = []
sxp.append(unpersistable_atom)
sxp.append(reason)
return sxp
class _Unjellier:
def __init__(self, taster, persistentLoad, invoker):
self.taster = taster
self.persistentLoad = persistentLoad
self.references = {}
self.postCallbacks = []
self.invoker = invoker
def unjellyFull(self, obj):
o = self.unjelly(obj)
for m in self.postCallbacks:
m()
return o
def unjelly(self, obj):
if type(obj) is not types.ListType:
return obj
jelType = obj[0]
if not self.taster.isTypeAllowed(jelType):
raise InsecureJelly(jelType)
regClass = unjellyableRegistry.get(jelType)
if regClass is not None:
if isinstance(regClass, ClassType):
inst = _Dummy() # XXX chomp, chomp
inst.__class__ = regClass
method = inst.unjellyFor
elif isinstance(regClass, type):
# regClass.__new__ does not call regClass.__init__
inst = regClass.__new__(regClass)
method = inst.unjellyFor
else:
method = regClass # this is how it ought to be done
val = method(self, obj)
if hasattr(val, 'postUnjelly'):
self.postCallbacks.append(inst.postUnjelly)
return val
regFactory = unjellyableFactoryRegistry.get(jelType)
if regFactory is not None:
state = self.unjelly(obj[1])
inst = regFactory(state)
if hasattr(inst, 'postUnjelly'):
self.postCallbacks.append(inst.postUnjelly)
return inst
thunk = getattr(self, '_unjelly_%s'%jelType, None)
if thunk is not None:
ret = thunk(obj[1:])
else:
nameSplit = jelType.split('.')
modName = '.'.join(nameSplit[:-1])
if not self.taster.isModuleAllowed(modName):
raise InsecureJelly(
"Module %s not allowed (in type %s)." % (modName, jelType))
clz = namedObject(jelType)
if not self.taster.isClassAllowed(clz):
raise InsecureJelly("Class %s not allowed." % jelType)
if hasattr(clz, "__setstate__"):
ret = _newInstance(clz)
state = self.unjelly(obj[1])
ret.__setstate__(state)
else:
state = self.unjelly(obj[1])
ret = _newInstance(clz, state)
if hasattr(clz, 'postUnjelly'):
self.postCallbacks.append(ret.postUnjelly)
return ret
def _unjelly_None(self, exp):
return None
def _unjelly_unicode(self, exp):
if UnicodeType:
return unicode(exp[0], "UTF-8")
else:
return Unpersistable("Could not unpersist unicode: %s" % (exp[0],))
def _unjelly_decimal(self, exp):
"""
Unjelly decimal objects, if decimal is available. If not, return a
L{Unpersistable} object instead.
"""
if decimal is None:
return Unpersistable(
"Could not unpersist decimal: %s" % (exp[0] * (10**exp[1]),))
value = exp[0]
exponent = exp[1]
if value < 0:
sign = 1
else:
sign = 0
guts = decimal.Decimal(value).as_tuple()[1]
return decimal.Decimal((sign, guts, exponent))
def _unjelly_boolean(self, exp):
if BooleanType:
assert exp[0] in ('true', 'false')
return exp[0] == 'true'
else:
return Unpersistable("Could not unpersist boolean: %s" % (exp[0],))
def _unjelly_datetime(self, exp):
return datetime.datetime(*map(int, exp[0].split()))
def _unjelly_date(self, exp):
return datetime.date(*map(int, exp[0].split()))
def _unjelly_time(self, exp):
return datetime.time(*map(int, exp[0].split()))
def _unjelly_timedelta(self, exp):
days, seconds, microseconds = map(int, exp[0].split())
return datetime.timedelta(
days=days, seconds=seconds, microseconds=microseconds)
def unjellyInto(self, obj, loc, jel):
o = self.unjelly(jel)
if isinstance(o, NotKnown):
o.addDependant(obj, loc)
obj[loc] = o
return o
def _unjelly_dereference(self, lst):
refid = lst[0]
x = self.references.get(refid)
if x is not None:
return x
der = _Dereference(refid)
self.references[refid] = der
return der
def _unjelly_reference(self, lst):
refid = lst[0]
exp = lst[1]
o = self.unjelly(exp)
ref = self.references.get(refid)
if (ref is None):
self.references[refid] = o
elif isinstance(ref, NotKnown):
ref.resolveDependants(o)
self.references[refid] = o
else:
assert 0, "Multiple references with same ID!"
return o
def _unjelly_tuple(self, lst):
l = range(len(lst))
finished = 1
for elem in l:
if isinstance(self.unjellyInto(l, elem, lst[elem]), NotKnown):
finished = 0
if finished:
return tuple(l)
else:
return _Tuple(l)
def _unjelly_list(self, lst):
l = range(len(lst))
for elem in l:
self.unjellyInto(l, elem, lst[elem])
return l
def _unjellySetOrFrozenset(self, lst, containerType):
"""
Helper method to unjelly set or frozenset.
@param lst: the content of the set.
@type lst: C{list}
@param containerType: the type of C{set} to use.
"""
l = range(len(lst))
finished = True
for elem in l:
data = self.unjellyInto(l, elem, lst[elem])
if isinstance(data, NotKnown):
finished = False
if not finished:
return _Container(l, containerType)
else:
return containerType(l)
def _unjelly_set(self, lst):
"""
Unjelly set using either the C{set} builtin if available, or
C{sets.Set} as fallback.
"""
if _set is not None:
containerType = set
else:
containerType = _sets.Set
return self._unjellySetOrFrozenset(lst, containerType)
def _unjelly_frozenset(self, lst):
"""
Unjelly frozenset using either the C{frozenset} builtin if available,
or C{sets.ImmutableSet} as fallback.
"""
if _set is not None:
containerType = frozenset
else:
containerType = _sets.ImmutableSet
return self._unjellySetOrFrozenset(lst, containerType)
def _unjelly_dictionary(self, lst):
d = {}
for k, v in lst:
kvd = _DictKeyAndValue(d)
self.unjellyInto(kvd, 0, k)
self.unjellyInto(kvd, 1, v)
return d
def _unjelly_module(self, rest):
moduleName = rest[0]
if type(moduleName) != types.StringType:
raise InsecureJelly(
"Attempted to unjelly a module with a non-string name.")
if not self.taster.isModuleAllowed(moduleName):
raise InsecureJelly(
"Attempted to unjelly module named %r" % (moduleName,))
mod = __import__(moduleName, {}, {},"x")
return mod
def _unjelly_class(self, rest):
clist = rest[0].split('.')
modName = '.'.join(clist[:-1])
if not self.taster.isModuleAllowed(modName):
raise InsecureJelly("module %s not allowed" % modName)
klaus = namedObject(rest[0])
objType = type(klaus)
if objType not in (types.ClassType, types.TypeType):
raise InsecureJelly(
"class %r unjellied to something that isn't a class: %r" % (
rest[0], klaus))
if not self.taster.isClassAllowed(klaus):
raise InsecureJelly("class not allowed: %s" % qual(klaus))
return klaus
def _unjelly_function(self, rest):
modSplit = rest[0].split('.')
modName = '.'.join(modSplit[:-1])
if not self.taster.isModuleAllowed(modName):
raise InsecureJelly("Module not allowed: %s"% modName)
# XXX do I need an isFunctionAllowed?
function = namedObject(rest[0])
return function
def _unjelly_persistent(self, rest):
if self.persistentLoad:
pload = self.persistentLoad(rest[0], self)
return pload
else:
return Unpersistable("Persistent callback not found")
def _unjelly_instance(self, rest):
clz = self.unjelly(rest[0])
if type(clz) is not types.ClassType:
raise InsecureJelly("Instance found with non-class class.")
if hasattr(clz, "__setstate__"):
inst = _newInstance(clz, {})
state = self.unjelly(rest[1])
inst.__setstate__(state)
else:
state = self.unjelly(rest[1])
inst = _newInstance(clz, state)
if hasattr(clz, 'postUnjelly'):
self.postCallbacks.append(inst.postUnjelly)
return inst
def _unjelly_unpersistable(self, rest):
return Unpersistable("Unpersistable data: %s" % (rest[0],))
def _unjelly_method(self, rest):
"""
(internal) Unjelly a method.
"""
im_name = rest[0]
im_self = self.unjelly(rest[1])
im_class = self.unjelly(rest[2])
if type(im_class) is not types.ClassType:
raise InsecureJelly("Method found with non-class class.")
if im_name in im_class.__dict__:
if im_self is None:
im = getattr(im_class, im_name)
elif isinstance(im_self, NotKnown):
im = _InstanceMethod(im_name, im_self, im_class)
else:
im = instancemethod(im_class.__dict__[im_name],
im_self,
im_class)
else:
raise TypeError('instance method changed')
return im
class _Dummy:
"""
(Internal) Dummy class, used for unserializing instances.
"""
class _DummyNewStyle(object):
"""
(Internal) Dummy class, used for unserializing instances of new-style
classes.
"""
#### Published Interface.
class InsecureJelly(Exception):
"""
This exception will be raised when a jelly is deemed `insecure'; e.g. it
contains a type, class, or module disallowed by the specified `taster'
"""
class DummySecurityOptions:
"""
DummySecurityOptions() -> insecure security options
Dummy security options -- this class will allow anything.
"""
def isModuleAllowed(self, moduleName):
"""
DummySecurityOptions.isModuleAllowed(moduleName) -> boolean
returns 1 if a module by that name is allowed, 0 otherwise
"""
return 1
def isClassAllowed(self, klass):
"""
DummySecurityOptions.isClassAllowed(class) -> boolean
Assumes the module has already been allowed. Returns 1 if the given
class is allowed, 0 otherwise.
"""
return 1
def isTypeAllowed(self, typeName):
"""
DummySecurityOptions.isTypeAllowed(typeName) -> boolean
Returns 1 if the given type is allowed, 0 otherwise.
"""
return 1
class SecurityOptions:
"""
This will by default disallow everything, except for 'none'.
"""
basicTypes = ["dictionary", "list", "tuple",
"reference", "dereference", "unpersistable",
"persistent", "long_int", "long", "dict"]
def __init__(self):
"""
SecurityOptions() initialize.
"""
# I don't believe any of these types can ever pose a security hazard,
# except perhaps "reference"...
self.allowedTypes = {"None": 1,
"bool": 1,
"boolean": 1,
"string": 1,
"str": 1,
"int": 1,
"float": 1,
"datetime": 1,
"time": 1,
"date": 1,
"timedelta": 1,
"NoneType": 1}
if hasattr(types, 'UnicodeType'):
self.allowedTypes['unicode'] = 1
if decimal is not None:
self.allowedTypes['decimal'] = 1
self.allowedTypes['set'] = 1
self.allowedTypes['frozenset'] = 1
self.allowedModules = {}
self.allowedClasses = {}
def allowBasicTypes(self):
"""
Allow all `basic' types. (Dictionary and list. Int, string, and float
are implicitly allowed.)
"""
self.allowTypes(*self.basicTypes)
def allowTypes(self, *types):
"""
SecurityOptions.allowTypes(typeString): Allow a particular type, by its
name.
"""
for typ in types:
if not isinstance(typ, str):
typ = qual(typ)
self.allowedTypes[typ] = 1
def allowInstancesOf(self, *classes):
"""
SecurityOptions.allowInstances(klass, klass, ...): allow instances
of the specified classes
This will also allow the 'instance', 'class' (renamed 'classobj' in
Python 2.3), and 'module' types, as well as basic types.
"""
self.allowBasicTypes()
self.allowTypes("instance", "class", "classobj", "module")
for klass in classes:
self.allowTypes(qual(klass))
self.allowModules(klass.__module__)
self.allowedClasses[klass] = 1
def allowModules(self, *modules):
"""
SecurityOptions.allowModules(module, module, ...): allow modules by
name. This will also allow the 'module' type.
"""
for module in modules:
if type(module) == types.ModuleType:
module = module.__name__
self.allowedModules[module] = 1
def isModuleAllowed(self, moduleName):
"""
SecurityOptions.isModuleAllowed(moduleName) -> boolean
returns 1 if a module by that name is allowed, 0 otherwise
"""
return moduleName in self.allowedModules
def isClassAllowed(self, klass):
"""
SecurityOptions.isClassAllowed(class) -> boolean
Assumes the module has already been allowed. Returns 1 if the given
class is allowed, 0 otherwise.
"""
return klass in self.allowedClasses
def isTypeAllowed(self, typeName):
"""
SecurityOptions.isTypeAllowed(typeName) -> boolean
Returns 1 if the given type is allowed, 0 otherwise.
"""
return (typeName in self.allowedTypes or '.' in typeName)
globalSecurity = SecurityOptions()
globalSecurity.allowBasicTypes()
def jelly(object, taster=DummySecurityOptions(), persistentStore=None,
invoker=None):
"""
Serialize to s-expression.
Returns a list which is the serialized representation of an object. An
optional 'taster' argument takes a SecurityOptions and will mark any
insecure objects as unpersistable rather than serializing them.
"""
return _Jellier(taster, persistentStore, invoker).jelly(object)
def unjelly(sexp, taster=DummySecurityOptions(), persistentLoad=None,
invoker=None):
"""
Unserialize from s-expression.
Takes an list that was the result from a call to jelly() and unserializes
an arbitrary object from it. The optional 'taster' argument, an instance
of SecurityOptions, will cause an InsecureJelly exception to be raised if a
disallowed type, module, or class attempted to unserialize.
"""
return _Unjellier(taster, persistentLoad, invoker).unjellyFull(sexp)
|
agpl-3.0
|
maxamillion/anaconda
|
pyanaconda/ui/tui/spokes/software.py
|
3
|
9880
|
# Software selection text spoke
#
# Copyright (C) 2013 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Samantha N. Bueno <[email protected]>
#
from pyanaconda.flags import flags
from pyanaconda.ui.categories.software import SoftwareCategory
from pyanaconda.ui.tui.spokes import NormalTUISpoke
from pyanaconda.ui.tui.simpleline import TextWidget, ColumnWidget, CheckboxWidget
from pyanaconda.threads import threadMgr, AnacondaThread
from pyanaconda.packaging import DependencyError, PackagePayload, payloadMgr
from pyanaconda.i18n import N_, _, C_
from pyanaconda.constants import THREAD_PAYLOAD
from pyanaconda.constants import THREAD_CHECK_SOFTWARE
from pyanaconda.constants import THREAD_SOFTWARE_WATCHER
from pyanaconda.constants_text import INPUT_PROCESSED
__all__ = ["SoftwareSpoke"]
class SoftwareSpoke(NormalTUISpoke):
""" Spoke used to read new value of text to represent source repo. """
title = N_("Software selection")
category = SoftwareCategory
def __init__(self, app, data, storage, payload, instclass):
NormalTUISpoke.__init__(self, app, data, storage, payload, instclass)
self.errors = []
self._tx_id = None
self._selection = None
self.environment = None
# for detecting later whether any changes have been made
self._origEnv = None
# are we taking values (package list) from a kickstart file?
self._kickstarted = flags.automatedInstall and self.data.packages.seen
# Register event listeners to update our status on payload events
payloadMgr.addListener(payloadMgr.STATE_START, self._payload_start)
payloadMgr.addListener(payloadMgr.STATE_FINISHED, self._payload_finished)
payloadMgr.addListener(payloadMgr.STATE_ERROR, self._payload_error)
def initialize(self):
# Start a thread to wait for the payload and run the first, automatic
# dependency check
super(SoftwareSpoke, self).initialize()
threadMgr.add(AnacondaThread(name=THREAD_SOFTWARE_WATCHER,
target=self._initialize))
def _initialize(self):
threadMgr.wait(THREAD_PAYLOAD)
if not self._kickstarted:
# If an environment was specified in the instclass, use that.
# Otherwise, select the first environment.
if self.payload.environments:
environments = self.payload.environments
instclass = self.payload.instclass
if instclass and instclass.defaultPackageEnvironment and \
instclass.defaultPackageEnvironment in environments:
self._selection = environments.index(instclass.defaultPackageEnvironment)
else:
self._selection = 0
# Apply the initial selection
self._apply()
def _payload_start(self):
# Source is changing, invalidate the software selection and clear the
# errors
self._selection = None
self.errors = []
def _payload_finished(self):
self.environment = self.data.packages.environment
def _payload_error(self):
self.errors = [payloadMgr.error]
@property
def showable(self):
return isinstance(self.payload, PackagePayload)
@property
def status(self):
""" Where we are in the process """
if self.errors:
return _("Error checking software selection")
if not self.ready:
return _("Processing...")
if not self.payload.baseRepo:
return _("Installation source not set up")
if not self.txid_valid:
return _("Source changed - please verify")
if not self.environment:
# Ks installs with %packages will have an env selected, unless
# they did an install without a desktop environment. This should
# catch that one case.
if self._kickstarted:
return _("Custom software selected")
return _("Nothing selected")
return self.payload.environmentDescription(self.environment)[0]
@property
def completed(self):
""" Make sure our threads are done running and vars are set.
WARNING: This can be called before the spoke is finished initializing
if the spoke starts a thread. It should make sure it doesn't access
things until they are completely setup.
"""
processingDone = self.ready and not self.errors and self.txid_valid
if flags.automatedInstall:
return processingDone and self.payload.baseRepo and self.data.packages.seen
else:
return processingDone and self.payload.baseRepo and self.environment is not None
def refresh(self, args=None):
""" Refresh screen. """
NormalTUISpoke.refresh(self, args)
threadMgr.wait(THREAD_PAYLOAD)
if not self.payload.baseRepo:
message = TextWidget(_("Installation source needs to be set up first."))
self._window.append(message)
# add some more space below
self._window.append(TextWidget(""))
return True
threadMgr.wait(THREAD_CHECK_SOFTWARE)
# put a title above the list and some space below it
self._window.append(TextWidget(_("Base environment")))
self._window.append(TextWidget(""))
environments = self.payload.environments
displayed = []
for env in environments:
name = self.payload.environmentDescription(env)[0]
displayed.append(CheckboxWidget(title="%s" % name, completed=(environments.index(env) == self._selection)))
def _prep(i, w):
""" Do some format magic for display. """
num = TextWidget("%2d)" % (i + 1))
return ColumnWidget([(4, [num]), (None, [w])], 1)
# split list of DE's into two columns
mid = len(environments) / 2
left = [_prep(i, w) for i, w in enumerate(displayed) if i <= mid]
right = [_prep(i, w) for i, w in enumerate(displayed) if i > mid]
cw = ColumnWidget([(38, left), (38, right)], 2)
self._window.append(cw)
return True
def input(self, args, key):
""" Handle the input; this chooses the desktop environment. """
try:
keyid = int(key) - 1
except ValueError:
# TRANSLATORS: 'c' to continue
if key.lower() == C_("TUI|Spoke Navigation", "c") and \
0 <= self._selection < len(self.payload.environments):
self.apply()
self.close()
return INPUT_PROCESSED
else:
return key
if 0 <= keyid < len(self.payload.environments):
self._selection = keyid
return INPUT_PROCESSED
@property
def ready(self):
""" If we're ready to move on. """
return (not threadMgr.get(THREAD_PAYLOAD) and
not threadMgr.get(THREAD_CHECK_SOFTWARE) and
not threadMgr.get(THREAD_SOFTWARE_WATCHER))
def apply(self):
""" Apply our selections """
self._apply()
# no longer using values from kickstart
self._kickstarted = False
self.data.packages.seen = True
def _apply(self):
""" Private apply. """
# self._selection can be None during kickstart installation
if self._selection is not None and 0 <= self._selection < len(self.payload.environments):
self.environment = self.payload.environments[self._selection]
else:
self.environment = None
return
changed = False
# Not a kickstart with packages, setup the selected environment
if not self._kickstarted:
if not self._origEnv:
# nothing selected before, select the environment
self.payload.selectEnvironment(self.environment)
changed = True
elif self._origEnv != self.environment:
# environment changed, clear the list of packages and select the new
# one
self.payload.data.packages.groupList = []
self.payload.selectEnvironment(self.environment)
changed = True
self._origEnv = self.environment
# Check the software selection
if changed:
threadMgr.add(AnacondaThread(name=THREAD_CHECK_SOFTWARE,
target=self.checkSoftwareSelection))
def checkSoftwareSelection(self):
""" Depsolving """
try:
self.payload.checkSoftwareSelection()
except DependencyError as e:
self.errors = [str(e)]
self._tx_id = None
else:
self._tx_id = self.payload.txID
@property
def txid_valid(self):
""" Whether we have a valid dnf tx id. """
return self._tx_id == self.payload.txID
|
gpl-2.0
|
mjirayu/sit_academy
|
common/djangoapps/request_cache/middleware.py
|
98
|
1225
|
import threading
class _RequestCache(threading.local):
"""
A thread-local for storing the per-request cache.
"""
def __init__(self):
super(_RequestCache, self).__init__()
self.data = {}
self.request = None
REQUEST_CACHE = _RequestCache()
class RequestCache(object):
@classmethod
def get_request_cache(cls, name=None):
"""
This method is deprecated. Please use :func:`request_cache.get_cache`.
"""
if name is None:
return REQUEST_CACHE
else:
return REQUEST_CACHE.data.setdefault(name, {})
@classmethod
def get_current_request(cls):
"""
This method is deprecated. Please use :func:`request_cache.get_request`.
"""
return REQUEST_CACHE.request
@classmethod
def clear_request_cache(cls):
"""
Empty the request cache.
"""
REQUEST_CACHE.data = {}
REQUEST_CACHE.request = None
def process_request(self, request):
self.clear_request_cache()
REQUEST_CACHE.request = request
return None
def process_response(self, request, response):
self.clear_request_cache()
return response
|
agpl-3.0
|
hendrix/hendrix
|
test/test_resources.py
|
2
|
2267
|
import unittest
from twisted.web.resource import getChildForRequest, NoResource
from twisted.web.test.requesthelper import DummyRequest
try:
from unittest import mock
except ImportError:
import mock
from hendrix.facilities.resources import HendrixResource, NamedResource, WSGIResource
class TestHendrixResource(unittest.TestCase):
def setUp(self):
path = '/path/to/child/'
self.res = NamedResource(path)
self.hr = HendrixResource(None, None, None)
self.hr.putNamedChild(self.res)
def test_putNamedChild_success(self):
with mock.patch('hendrix.facilities.resources.WSGIResource') as wsgi:
request = DummyRequest(['path', 'to', 'child'])
actual_res = getChildForRequest(self.hr, request)
self.assertEqual(actual_res, self.res)
def test_putNamedChild_very_wrong_request(self):
"check that requests outside of the children go to the WSGIResoure"
with mock.patch('hendrix.facilities.resources.WSGIResource') as wsgi:
request = DummyRequest(['very', 'wrong', 'uri'])
actual_res = getChildForRequest(self.hr, request)
self.assertIsInstance(actual_res, WSGIResource)
def test_putNamedChild_sort_of_wrong_request(self):
"requests to incorrect subpaths go to NoResource"
with mock.patch('hendrix.facilities.resources.WSGIResource') as wsgi:
request = DummyRequest(['path', 'to', 'wrong'])
actual_res = getChildForRequest(self.hr, request)
self.assertIsInstance(actual_res, NoResource)
def test_putNamedChild_duplicate(self):
"check that duplicate resources work"
with mock.patch('hendrix.facilities.resources.WSGIResource') as wsgi:
request = DummyRequest(['path', 'to', 'child'])
actual_res = getChildForRequest(self.hr, request)
self.assertEqual(actual_res, self.res) # Before duplicate
duplicate = NamedResource(self.res.namespace)
self.hr.putNamedChild(duplicate)
request = DummyRequest(['path', 'to', 'child'])
actual_duplicate_res = getChildForRequest(self.hr, request)
self.assertEqual(duplicate, actual_duplicate_res) # After duplicate
|
mit
|
martindemello/vroom
|
vroom/shell.py
|
3
|
10594
|
"""Vroom fake shell bridge."""
import json
import os
import os.path
import pickle
import pipes
import re
import tempfile
import vroom
import vroom.controls
import vroom.test
from vroom.result import Result
# Pylint is not smart enough to notice that all the exceptions here inherit from
# vroom.test.Failure, which is a standard Exception.
# pylint: disable-msg=nonstandard-exception
VROOMFILE_VAR = 'VROOMFILE'
VROOMDIR_VAR = 'VROOMDIR'
LOG_FILENAME_VAR = 'VROOM_SHELL_LOGFILE'
CONTROL_FILENAME_VAR = 'VROOM_SHELL_CONTROLLFILE'
ERROR_FILENAME_VAR = 'VROOM_SHELL_ERRORFILE'
CONTROL = vroom.Specification(
EXPECT='expect',
RESPOND='respond')
STRICTNESS = vroom.Specification(
STRICT='STRICT',
RELAXED='RELAXED')
OUTCHANNEL = vroom.Specification(
COMMAND='command',
STDOUT='stdout',
STDERR='stderr',
STATUS='status')
DEFAULT_MODE = vroom.controls.MODE.REGEX
def Load(filename):
"""Loads a shell file into python space.
Args:
filename: The shell file to load.
Returns:
The file contents.
Raises:
FakeShellNotWorking
"""
try:
with open(filename, 'rb') as f:
return pickle.load(f)
except IOError:
raise FakeShellNotWorking
def Send(filename, data):
"""Sends python data to a shell file.
Args:
filename: The shell file to send to.
data: The python data to send.
"""
with open(filename, 'wb') as f:
pickle.dump(data, f)
class Communicator(object):
"""Object to communicate with the fake shell."""
def __init__(self, filename, env, writer):
self.vroom_env = env
self.writer = writer.syscalls
self.commands_writer = writer.commands
_, self.control_filename = tempfile.mkstemp()
_, self.log_filename = tempfile.mkstemp()
_, self.error_filename = tempfile.mkstemp()
Send(self.control_filename, [])
Send(self.log_filename, [])
Send(self.error_filename, [])
self.env = os.environ.copy()
self.env[VROOMFILE_VAR] = filename
self.env[VROOMDIR_VAR] = os.path.dirname(filename) or '.'
self.env[vroom.shell.LOG_FILENAME_VAR] = self.log_filename
self.env[vroom.shell.CONTROL_FILENAME_VAR] = self.control_filename
self.env[vroom.shell.ERROR_FILENAME_VAR] = self.error_filename
self._copied_logs = 0
def Control(self, hijacks):
"""Tell the shell the system control specifications."""
existing = Load(self.control_filename)
Send(self.control_filename, existing + hijacks)
def Verify(self):
"""Checks that system output was caught and handled satisfactorily.
Returns:
Result.Error(vroom.test.Failures[FakeShellFailure]):
If any shell failures were detected.
Result.Success(): Otherwise.
Raises:
FakeShellNotWorking: If it can't load the shell file.
"""
# Copy any new logs into the logger.
logs = Load(self.log_filename)
for log in logs[self._copied_logs:]:
self.writer.Log(log)
self._copied_logs = len(logs)
failures = []
# Check for shell errors.
errors = Load(self.error_filename)
if errors:
failures.append(FakeShellNotWorking(errors))
commands_logs = self.commands_writer.Logs()
# Check that all controls have been handled.
controls = Load(self.control_filename)
if controls:
Send(self.control_filename, [])
missed = controls[0]
if missed.expectation:
failures.append(SystemNotCalled(logs, controls, commands_logs))
failures.append(NoChanceForResponse(
logs, missed, commands_logs))
# Check for unexpected calls, if they user is into that.
if self.vroom_env.system_strictness == STRICTNESS.STRICT:
logs = self.writer.Logs()
if [log for log in logs if log.TYPE == vroom.test.LOG.UNEXPECTED]:
failures.append(UnexpectedSystemCalls(logs, commands_logs))
if failures:
return Result.Error(vroom.test.Failures(failures))
else:
return Result.Success()
class Hijack(object):
"""An object used to tell the fake shell what to do about system calls.
It can contain a single expectation (of a system call) and any number of
responses (text to return when the expected call is seen).
If no expectation is given, it will match any command.
If no responses are given, the command will be allowed through the fake shell.
The Hijack can be 'Open' or 'Closed': we need a way to distinguish
between this:
$ One
$ Two
and this:
$ One
$ Two
The former responds "One\\nTwo" to any command. The latter responds "One" to
the first command, whatever it may be, and then "Two" to the next command.
The solution is that line breaks "Close" an expectation. In this way, we can
tell if a new respones should be part of the previous expectation or part of
a new one.
"""
def __init__(self, fakecmd, expectation=None, mode=None):
self.closed = False
self.fakecmd = fakecmd
self.response = {}
self.expectation = expectation
self.mode = mode or DEFAULT_MODE
def Response(self, command):
"""Returns the command that should be done in place of the true command.
This will either be the original command or a call to respond.vroomfaker.
Args:
command: The vim-requested command.
Returns:
The user-specified command.
"""
if self.expectation is not None:
if not vroom.test.Matches(self.expectation, self.mode, command):
return False
# We don't want to do this on init because regexes don't repr() as nicely as
# strings do.
if self.expectation and self.mode == vroom.controls.MODE.REGEX:
try:
match_regex = re.compile(self.expectation)
except re.error as e:
raise vroom.ParseError("Can't match command. Invalid regex. %s'" % e)
else:
match_regex = re.compile(r'.*')
# The actual response won't be exactly like the internal response, because
# we've got to do some regex group binding magic.
response = {}
# Expand all of the responders that want to be bound to the regex.
for channel in (
OUTCHANNEL.COMMAND,
OUTCHANNEL.STDOUT,
OUTCHANNEL.STDERR):
for line in self.response.get(channel, []):
# We do an re.sub() regardless of whether the control was bound as
# a regex: this forces you to escape consistently between all match
# groups, which will help prevent your tests from breaking if you later
# switch the command matching to regex from verbatim/glob.
try:
line = match_regex.sub(line, command)
except re.error as e:
# 'invalid group reference' is the expected message here.
# Unfortunately the python re module doesn't differentiate its
# exceptions well.
if self.mode != vroom.controls.MODE.REGEX:
raise vroom.ParseError(
'Substitution error. '
'Ensure that matchgroups (such as \\1) are escaped.')
raise vroom.ParseError('Substitution error: %s.' % e)
response.setdefault(channel, []).append(line)
# The return status can't be regex-bound.
if OUTCHANNEL.STATUS in self.response:
response[OUTCHANNEL.STATUS] = self.response[OUTCHANNEL.STATUS]
# If we actually want to do anything, call out to the responder.
if response:
return '%s %s' % (self.fakecmd, pipes.quote(json.dumps(response)))
return command
def Respond(self, line, channel=None):
"""Adds a response to this expectation.
Args:
line: The response to add.
channel: The output channel to respond with 'line' in.
"""
if channel is None:
channel = OUTCHANNEL.STDOUT
if channel == OUTCHANNEL.COMMAND:
self.response.setdefault(OUTCHANNEL.COMMAND, []).append(line)
elif channel == OUTCHANNEL.STDOUT:
self.response.setdefault(OUTCHANNEL.STDOUT, []).append(line)
elif channel == OUTCHANNEL.STDERR:
self.response.setdefault(OUTCHANNEL.STDERR, []).append(line)
elif channel == OUTCHANNEL.STATUS:
if OUTCHANNEL.STATUS in self.response:
raise vroom.ParseError('A system call cannot return two statuses!')
try:
status = int(line)
except ValueError:
raise vroom.ParseError('Returned status must be a number.')
self.response[OUTCHANNEL.STATUS] = status
else:
assert False, 'Unrecognized output channel word.'
def __repr__(self):
return 'Hijack(%s, %s, %s)' % (self.expectation, self.mode, self.response)
def __str__(self):
out = ''
# %07s pads things out to match with "COMMAND:"
if self.expectation is not None:
out += ' EXPECT:\t%s (%s mode)\n' % (self.expectation, self.mode)
rejoiner = '\n%07s\t' % ''
if OUTCHANNEL.COMMAND in self.response:
out += 'COMMAND:\t%s\n' % rejoiner.join(self.response[OUTCHANNEL.COMMAND])
if OUTCHANNEL.STDOUT in self.response:
out += ' STDOUT:\t%s\n' % rejoiner.join(self.response[OUTCHANNEL.STDOUT])
if OUTCHANNEL.STDERR in self.response:
out += ' STDERR:\t%s\n' % rejoiner.join(self.response[OUTCHANNEL.STDERR])
if 'status' in self.response:
out += ' STATUS:\t%s' % self.response['status']
return out.rstrip('\n')
class FakeShellNotWorking(Exception):
"""Called when the fake shell is not working."""
def __init__(self, errors):
self.shell_errors = errors
super(FakeShellNotWorking, self).__init__()
def __str__(self):
return 'The fake shell is not working as anticipated.'
class FakeShellFailure(vroom.test.Failure):
"""Generic fake shell error. Please raise its implementors."""
DESCRIPTION = 'System failure'
CONTEXT = 12
def __init__(self, logs, commands, message=None):
self.syscalls = logs[-self.CONTEXT:]
self.commands = commands
super(FakeShellFailure, self).__init__(message or self.DESCRIPTION)
class UnexpectedSystemCalls(FakeShellFailure):
"""Raised when a system call is made unexpectedly."""
DESCRIPTION = 'Unexpected system call.'
class SystemNotCalled(FakeShellFailure):
"""Raised when an expected system call is not made."""
DESCRIPTION = 'Expected system call not received.'
def __init__(self, logs, expectations, commands):
self.expectations = expectations
super(SystemNotCalled, self).__init__(logs, commands)
class NoChanceForResponse(FakeShellFailure):
"""Raised when no system calls were made, but a response was specified."""
DESCRIPTION = 'Got no chance to inject response: \n%s'
def __init__(self, logs, response, commands):
super(NoChanceForResponse, self).__init__(
logs, commands, self.DESCRIPTION % response)
|
apache-2.0
|
chutsu/robotics
|
prototype/tests/vision/test_geometry.py
|
1
|
2838
|
import unittest
import numpy as np
import matplotlib.pylab as plt
from mpl_toolkits.mplot3d import Axes3D # NOQA
from prototype.utils.utils import roty
from prototype.utils.utils import deg2rad
from prototype.vision.common import focal_length
from prototype.vision.common import camera_intrinsics
from prototype.vision.common import rand3dfeatures
from prototype.vision.camera.camera_model import PinholeCameraModel
from prototype.vision.geometry import triangulate
from prototype.vision.geometry import triangulate_point
class GeometryTest(unittest.TestCase):
def setUp(self):
# Generate random features
nb_features = 100
feature_bounds = {
"x": {"min": -1.0, "max": 1.0},
"y": {"min": -1.0, "max": 1.0},
"z": {"min": 10.0, "max": 20.0}
}
self.features = rand3dfeatures(nb_features, feature_bounds)
# Pinhole Camera model
image_width = 640
image_height = 480
fov = 60
fx, fy = focal_length(image_width, image_height, fov)
cx, cy = (image_width / 2.0, image_height / 2.0)
K = camera_intrinsics(fx, fy, cx, cy)
self.cam = PinholeCameraModel(image_width, image_height, K)
# Rotation and translation of camera 0 and camera 1
self.R_0 = np.eye(3)
self.t_0 = np.zeros((3, 1))
self.R_1 = roty(deg2rad(10.0))
self.t_1 = np.array([1.0, 0.0, 0.0]).reshape((3, 1))
# Points as observed by camera 0 and camera 1
self.obs0 = self.project_points(self.features, self.cam,
self.R_0, self.t_0)
self.obs1 = self.project_points(self.features, self.cam,
self.R_1, self.t_1)
def project_points(self, features, camera, R, t):
obs = []
# Make 3D feature homogenenous and project and store pixel measurement
for f in features.T:
x = camera.project(f, R, t)
obs.append(x.ravel()[0:2])
return np.array(obs).T
def test_triangulate_point(self):
# Triangulate a single point
x1 = self.obs0[:, 0]
x2 = self.obs1[:, 0]
P1 = self.cam.P(self.R_0, self.t_0)
P2 = self.cam.P(self.R_1, self.t_1)
X = triangulate_point(x1, x2, P1, P2)
X = X[0:3]
# Assert
self.assertTrue(np.linalg.norm(X - self.features[:, 0]) < 0.1)
def test_triangulate(self):
# Triangulate a set of features
x1 = self.obs0
x2 = self.obs1
P1 = self.cam.P(self.R_0, self.t_0)
P2 = self.cam.P(self.R_1, self.t_1)
result = triangulate(x1, x2, P1, P2)
# Assert
for i in range(result.shape[1]):
X = result[:3, i]
self.assertTrue(np.linalg.norm(X - self.features[:, i]) < 0.1)
|
gpl-3.0
|
geodynamics/gale
|
config/scons/scons-local-1.2.0/SCons/Variables/__init__.py
|
12
|
10733
|
"""engine.SCons.Variables
This file defines the Variables class that is used to add user-friendly
customizable variables to an SCons build.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Variables/__init__.py 3842 2008/12/20 22:59:52 scons"
import os.path
import string
import sys
import SCons.Environment
import SCons.Errors
import SCons.Util
import SCons.Warnings
from BoolVariable import BoolVariable # okay
from EnumVariable import EnumVariable # okay
from ListVariable import ListVariable # naja
from PackageVariable import PackageVariable # naja
from PathVariable import PathVariable # okay
class Variables:
instance=None
"""
Holds all the options, updates the environment with the variables,
and renders the help text.
"""
def __init__(self, files=[], args={}, is_global=1):
"""
files - [optional] List of option configuration files to load
(backward compatibility) If a single string is passed it is
automatically placed in a file list
"""
self.options = []
self.args = args
if not SCons.Util.is_List(files):
if files:
files = [ files ]
else:
files = []
self.files = files
self.unknown = {}
# create the singleton instance
if is_global:
self=Variables.instance
if not Variables.instance:
Variables.instance=self
def _do_add(self, key, help="", default=None, validator=None, converter=None):
class Variable:
pass
option = Variable()
# if we get a list or a tuple, we take the first element as the
# option key and store the remaining in aliases.
if SCons.Util.is_List(key) or SCons.Util.is_Tuple(key):
option.key = key[0]
option.aliases = key[1:]
else:
option.key = key
option.aliases = [ key ]
option.help = help
option.default = default
option.validator = validator
option.converter = converter
self.options.append(option)
def keys(self):
"""
Returns the keywords for the options
"""
return map(lambda o: o.key, self.options)
def Add(self, key, help="", default=None, validator=None, converter=None, **kw):
"""
Add an option.
key - the name of the variable, or a list or tuple of arguments
help - optional help text for the options
default - optional default value
validator - optional function that is called to validate the option's value
Called with (key, value, environment)
converter - optional function that is called to convert the option's value before
putting it in the environment.
"""
if SCons.Util.is_List(key) or type(key) == type(()):
apply(self._do_add, key)
return
if not SCons.Util.is_String(key) or \
not SCons.Environment.is_valid_construction_var(key):
raise SCons.Errors.UserError, "Illegal Variables.Add() key `%s'" % str(key)
self._do_add(key, help, default, validator, converter)
def AddVariables(self, *optlist):
"""
Add a list of options.
Each list element is a tuple/list of arguments to be passed on
to the underlying method for adding options.
Example:
opt.AddVariables(
('debug', '', 0),
('CC', 'The C compiler'),
('VALIDATE', 'An option for testing validation', 'notset',
validator, None),
)
"""
for o in optlist:
apply(self._do_add, o)
def Update(self, env, args=None):
"""
Update an environment with the option variables.
env - the environment to update.
"""
values = {}
# first set the defaults:
for option in self.options:
if not option.default is None:
values[option.key] = option.default
# next set the value specified in the options file
for filename in self.files:
if os.path.exists(filename):
dir = os.path.split(os.path.abspath(filename))[0]
if dir:
sys.path.insert(0, dir)
try:
values['__name__'] = filename
execfile(filename, {}, values)
finally:
if dir:
del sys.path[0]
del values['__name__']
# set the values specified on the command line
if args is None:
args = self.args
for arg, value in args.items():
added = False
for option in self.options:
if arg in option.aliases + [ option.key ]:
values[option.key] = value
added = True
if not added:
self.unknown[arg] = value
# put the variables in the environment:
# (don't copy over variables that are not declared as options)
for option in self.options:
try:
env[option.key] = values[option.key]
except KeyError:
pass
# Call the convert functions:
for option in self.options:
if option.converter and values.has_key(option.key):
value = env.subst('${%s}'%option.key)
try:
try:
env[option.key] = option.converter(value)
except TypeError:
env[option.key] = option.converter(value, env)
except ValueError, x:
raise SCons.Errors.UserError, 'Error converting option: %s\n%s'%(option.key, x)
# Finally validate the values:
for option in self.options:
if option.validator and values.has_key(option.key):
option.validator(option.key, env.subst('${%s}'%option.key), env)
def UnknownVariables(self):
"""
Returns any options in the specified arguments lists that
were not known, declared options in this object.
"""
return self.unknown
def Save(self, filename, env):
"""
Saves all the options in the given file. This file can
then be used to load the options next run. This can be used
to create an option cache file.
filename - Name of the file to save into
env - the environment get the option values from
"""
# Create the file and write out the header
try:
fh = open(filename, 'w')
try:
# Make an assignment in the file for each option
# within the environment that was assigned a value
# other than the default.
for option in self.options:
try:
value = env[option.key]
try:
prepare = value.prepare_to_store
except AttributeError:
try:
eval(repr(value))
except KeyboardInterrupt:
raise
except:
# Convert stuff that has a repr() that
# cannot be evaluated into a string
value = SCons.Util.to_String(value)
else:
value = prepare()
defaultVal = env.subst(SCons.Util.to_String(option.default))
if option.converter:
defaultVal = option.converter(defaultVal)
if str(env.subst('${%s}' % option.key)) != str(defaultVal):
fh.write('%s = %s\n' % (option.key, repr(value)))
except KeyError:
pass
finally:
fh.close()
except IOError, x:
raise SCons.Errors.UserError, 'Error writing options to file: %s\n%s' % (filename, x)
def GenerateHelpText(self, env, sort=None):
"""
Generate the help text for the options.
env - an environment that is used to get the current values
of the options.
"""
if sort:
options = self.options[:]
options.sort(lambda x,y,func=sort: func(x.key,y.key))
else:
options = self.options
def format(opt, self=self, env=env):
if env.has_key(opt.key):
actual = env.subst('${%s}' % opt.key)
else:
actual = None
return self.FormatVariableHelpText(env, opt.key, opt.help, opt.default, actual, opt.aliases)
lines = filter(None, map(format, options))
return string.join(lines, '')
format = '\n%s: %s\n default: %s\n actual: %s\n'
format_ = '\n%s: %s\n default: %s\n actual: %s\n aliases: %s\n'
def FormatVariableHelpText(self, env, key, help, default, actual, aliases=[]):
# Don't display the key name itself as an alias.
aliases = filter(lambda a, k=key: a != k, aliases)
if len(aliases)==0:
return self.format % (key, help, default, actual)
else:
return self.format_ % (key, help, default, actual, aliases)
|
gpl-2.0
|
eladhoffer/convNet.pytorch
|
models/googlenet.py
|
1
|
4998
|
from collections import OrderedDict
import torch
import torch.nn as nn
__all__ = ['googlenet']
class Inception_v1_GoogLeNet(nn.Module):
input_side = 227
rescale = 255.0
rgb_mean = [122.7717, 115.9465, 102.9801]
rgb_std = [1, 1, 1]
def __init__(self, num_classes=1000):
super(Inception_v1_GoogLeNet, self).__init__()
self.num_classes = num_classes
self.features = nn.Sequential(
OrderedDict([
('conv1', nn.Sequential(OrderedDict([
('7x7_s2', nn.Conv2d(3, 64, (7, 7), (2, 2), (3, 3), bias=False)),
('7x7_s2_bn', nn.BatchNorm2d(64, affine=True)),
('relu1', nn.ReLU(True)),
('pool1', nn.MaxPool2d((3, 3), (2, 2), padding=(1,1)))
]))),
('conv2', nn.Sequential(OrderedDict([
('3x3_reduce', nn.Conv2d(64, 64, (1, 1), (1, 1), (0, 0), bias=False)),
('3x3_reduce_bn', nn.BatchNorm2d(64, affine=True)),
('relu1', nn.ReLU(True)),
('3x3', nn.Conv2d(64, 192, (3, 3), (1, 1), (1, 1), bias=False)),
('3x3_bn', nn.BatchNorm2d(192, affine=True)),
('relu2', nn.ReLU(True)),
('pool2', nn.MaxPool2d((3, 3), (2, 2), padding=(1,1)))
]))),
('inception_3a', InceptionModule(192, 64, 96, 128, 16, 32, 32)),
('inception_3b', InceptionModule(256, 128, 128, 192, 32, 96, 64)),
('pool3', nn.MaxPool2d((3, 3), (2, 2), padding=(1,1))),
('inception_4a', InceptionModule(480, 192, 96, 208, 16, 48, 64)),
('inception_4b', InceptionModule(512, 160, 112, 224, 24, 64, 64)),
('inception_4c', InceptionModule(512, 128, 128, 256, 24, 64, 64)),
('inception_4d', InceptionModule(512, 112, 144, 288, 32, 64, 64)),
('inception_4e', InceptionModule(528, 256, 160, 320, 32, 128, 128)),
('pool4', nn.MaxPool2d((3, 3), (2, 2), padding=(1,1))),
('inception_5a', InceptionModule(832, 256, 160, 320, 32, 128, 128)),
('inception_5b', InceptionModule(832, 384, 192, 384, 48, 128, 128)),
('pool5', nn.AvgPool2d((7, 7), (1, 1))),
('drop5', nn.Dropout(0.2))
]))
self.classifier = nn.Linear(1024, self.num_classes)
self.regime = [
{'epoch': 0, 'optimizer': 'SGD', 'lr': 1e-1,
'weight_decay': 1e-4, 'momentum': 0.9},
{'epoch': 30, 'lr': 1e-2},
{'epoch': 60, 'lr': 1e-3, 'weight_decay': 0},
{'epoch': 90, 'lr': 1e-3, 'optimizer': 'Adam'}
]
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
class InceptionModule(nn.Module):
def __init__(self, inplane, outplane_a1x1, outplane_b3x3_reduce, outplane_b3x3, outplane_c5x5_reduce, outplane_c5x5,
outplane_pool_proj):
super(InceptionModule, self).__init__()
a = nn.Sequential(OrderedDict([
('1x1', nn.Conv2d(inplane, outplane_a1x1, (1, 1), (1, 1), (0, 0), bias=False)),
('1x1_bn', nn.BatchNorm2d(outplane_a1x1, affine=True)),
('1x1_relu', nn.ReLU(True))
]))
b = nn.Sequential(OrderedDict([
('3x3_reduce', nn.Conv2d(inplane, outplane_b3x3_reduce, (1, 1), (1, 1), (0, 0), bias=False)),
('3x3_reduce_bn', nn.BatchNorm2d(outplane_b3x3_reduce, affine=True)),
('3x3_relu1', nn.ReLU(True)),
('3x3', nn.Conv2d(outplane_b3x3_reduce, outplane_b3x3, (3, 3), (1, 1), (1, 1), bias=False)),
('3x3_bn', nn.BatchNorm2d(outplane_b3x3, affine=True)),
('3x3_relu2', nn.ReLU(True))
]))
c = nn.Sequential(OrderedDict([
('5x5_reduce', nn.Conv2d(inplane, outplane_c5x5_reduce, (1, 1), (1, 1), (0, 0), bias=False)),
('5x5_reduce_bn', nn.BatchNorm2d(outplane_c5x5_reduce, affine=True)),
('5x5_relu1', nn.ReLU(True)),
('5x5', nn.Conv2d(outplane_c5x5_reduce, outplane_c5x5, (5, 5), (1, 1), (2, 2), bias=False)),
('5x5_bn', nn.BatchNorm2d(outplane_c5x5, affine=True)),
('5x5_relu2', nn.ReLU(True))
]))
d = nn.Sequential(OrderedDict([
('pool_pool', nn.MaxPool2d((3, 3), (1, 1), (1, 1))),
('pool_proj', nn.Conv2d(inplane, outplane_pool_proj, (1, 1), (1, 1), (0, 0))),
('pool_proj_bn', nn.BatchNorm2d(outplane_pool_proj, affine=True)),
('pool_relu', nn.ReLU(True))
]))
for container in [a, b, c, d]:
for name, module in container.named_children():
self.add_module(name, module)
self.branches = [a, b, c, d]
def forward(self, input):
return torch.cat([branch(input) for branch in self.branches], 1)
def googlenet(**kwargs):
num_classes = getattr(kwargs, 'num_classes', 1000)
return Inception_v1_GoogLeNet(num_classes)
|
mit
|
bhdn/jurt
|
jurtlib/su.py
|
1
|
14276
|
#
# Copyright (c) 2011 Bogdano Arendartchuk <[email protected]>
#
# Written by Bogdano Arendartchuk <[email protected]>
#
# This file is part of Jurt Build Bot.
#
# Jurt Build Bot is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# Jurt Build Bot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Jurt Build Bot; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import os
import select
import subprocess
import logging
import shlex
from jurtlib import Error, CommandError, SetupError
from jurtlib.registry import Registry
from cStringIO import StringIO
logger = logging.getLogger("jurt.su")
def my_username():
import pwd
uid = os.geteuid()
try:
pw = pwd.getpwuid(uid)
name = pw.pw_name
except KeyError:
name = str(uid)
return name, uid
class CommandTimeout(Error):
pass
class SuError(Error):
pass
class AgentError(SuError):
pass
class SudoNotSetup(SuError):
pass
class SuWrapper:
def add_user(self, username, uid, gid):
raise NotImplementedError
def copy(self, srcpath, dstpath, uid=None, gid=None, mode=None,
cheap=False):
raise NotImplementedError
def copyout(self, srcpaths, dstpath, uid=None, gid=None, mode=None,
cheap=False):
raise NotImplementedError
def run_package_manager(self, pmname, args):
raise NotImplementedError
class JurtRootWrapper(SuWrapper):
def __init__(self, targetname, suconf, globalconf):
self.targetname = targetname
self.sucmd = shlex.split(suconf.sudo_command)
self.jurtrootcmd = shlex.split(suconf.jurt_root_command_command)
self.cmdpolltime = float(suconf.command_poll_time)
self.builduser = suconf.build_user
self.agentrunning = False
self.agentproc = None
self.agentcmdline = None
self.agentcookie = str(id(self))
def start(self):
cmd = self.sucmd[:]
cmd.extend(self.jurtrootcmd)
cmd.append("--agent")
cmd.extend(("--cookie", self.agentcookie))
logger.debug("starting the superuser agent with %s", cmd)
try:
proc = subprocess.Popen(args=cmd, shell=False,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
stdin=subprocess.PIPE, bufsize=0)
except OSError, e:
cmdline = subprocess.list2cmdline(cmd)
raise SetupError, ("failed to execute the superuser "
"agent %r: %s" % (cmdline, e))
self.agentcmdline = cmd
self.agentproc = proc
self.agentrunning = True
def _check_agent_output(self, data):
returncode = None
newdata = data
magic = "\n" + self.agentcookie
index = data.find(magic)
if index != -1:
newdata = data[:index]
tail = data[index:]
status = tail.split(None, 2)[1]
if status == "OK":
returncode = 0
elif status == "ERROR":
logger.debug("agent ERROR line: %s", tail)
try:
rawreason = tail.split(None, 3)[2]
except IndexError:
pass # ignore invalid stderr messages
else:
try:
reason = int(rawreason)
except ValueError:
reason = 1
returncode = reason
return returncode, newdata
def _collect_from_agent(self, targetfile, outputlogger):
rfd = self.agentproc.stdout.fileno()
efd = self.agentproc.stderr.fileno()
rl = [efd, rfd]
returncode = None
done = False
while not done:
try:
nrl, _, nxl = select.select(rl, [], rl,
self.cmdpolltime)
except KeyboardInterrupt:
logger.debug("root agent possibly got SIGINT, we'd "
"better reap it to allow starting a new one\n")
self.agentproc.wait()
self.agentrunning = False
raise
if rfd in nrl:
data = os.read(rfd, 8196)
if not data:
#FIXME properly handle the exception
pass
targetfile.write(data)
if efd in nrl:
data = os.read(efd, 8196)
returncode, newdata = self._check_agent_output(data)
targetfile.write(newdata)
if returncode is not None:
done = True
if self.agentproc.poll() is not None:
self.agentrunning = False
if outputlogger:
raise CommandError(self.agentproc.returncode,
self.agentcmdline, "(output available in log files)")
else:
# If we are using the outputlogger, just let it
# fail with CommandError and make the stack trace
# available in the log files, otherwise, fail with
# the output from targetfile:
raise AgentError, ("Ouch! There was an unhandled "
"exception in the root helper "
"agent:\n%s\n" % (targetfile.getvalue()))
return returncode
def _exec_wrapper(self, type, args, root=None, arch=None,
outputlogger=None, timeout=None, ignoreerrors=False,
interactive=False, quiet=False, ignorestderr=False,
remount=False):
assert not (interactive and outputlogger)
basecmd = self.jurtrootcmd[:]
basecmd.extend(("--type", type))
basecmd.extend(("--target", self.targetname))
if timeout is not None:
basecmd.extend(("--timeout", str(timeout)))
if root is not None:
basecmd.extend(("--root", root))
if remount:
basecmd.append("--remount")
if arch is not None:
basecmd.extend(("--arch", arch))
if ignoreerrors:
basecmd.append("--ignore-errors")
if quiet:
basecmd.append("--quiet")
if ignorestderr:
basecmd.append("--ignore-stderr")
basecmd.extend(args)
if interactive:
fullcmd = self.sucmd[:]
fullcmd.extend(basecmd)
cmdline = subprocess.list2cmdline(fullcmd)
proc = subprocess.Popen(args=fullcmd, shell=False, bufsize=-1)
proc.wait()
returncode = proc.returncode
output = "(interactive command, no output)"
else:
cmdline = subprocess.list2cmdline(basecmd)
if outputlogger and not quiet:
outputlogger.write(">>>> running privilleged agent: %s\n" % (cmdline))
outputlogger.flush()
if not self.agentrunning:
self.start()
logger.debug("sending command to agent: %s", cmdline)
self.agentproc.stdin.write(cmdline + "\n")
self.agentproc.stdin.flush()
if outputlogger:
targetfile = outputlogger
else:
targetfile = StringIO()
returncode = self._collect_from_agent(targetfile, outputlogger)
if outputlogger:
output = "(error in log available in log files)"
else:
output = targetfile.getvalue()
# check for error:
if returncode != 0:
if timeout is not None and returncode == 124:
# command timeout
raise CommandTimeout, ("command timed out:\n%s\n" %
(cmdline))
raise CommandError(returncode, cmdline, output)
return output
def add_user(self, username, uid, root=None, arch=None):
return self._exec_wrapper("adduser", ["-u", str(uid), username],
root=root, arch=arch)
def run_package_manager(self, pmname, pmargs, root=None, arch=None,
outputlogger=None):
execargs = ["--pm", pmname, "--"]
execargs.extend(pmargs)
return self._exec_wrapper("runpm", execargs, root=root, arch=arch,
outputlogger=outputlogger)
def run_as(self, args, user, root=None, arch=None, timeout=None,
outputlogger=None, quiet=False, ignorestderr=False,
remount=False):
execargs = ["--run-as", user, "--"]
execargs.extend(args)
return self._exec_wrapper("runcmd", execargs, root=root, arch=arch,
timeout=timeout, outputlogger=outputlogger, quiet=quiet,
ignorestderr=ignorestderr, remount=False)
def _perm_args(self, uid, gid, mode):
args = []
if uid is not None:
args.extend(("-u", str(uid)))
if gid is not None:
args.extend(("-g", str(gid)))
if mode is not None:
args.extend(("-m", mode))
return args
def rename(self, srcpath, dstpath):
args = [srcpath, dstpath]
return self._exec_wrapper("rename", args)
def mkdir(self, path_or_paths, uid=None, gid=None, mode="0755"):
args = self._perm_args(uid, gid, mode)
if isinstance(path_or_paths, basestring):
args.append(path_or_paths)
else:
args.extend(path_or_paths)
return self._exec_wrapper("mkdir", args)
def create_devs(self, root):
self._exec_wrapper("createdevs", root=root, args=[])
def _copy_args(self, src_path_or_paths, dstpath, uid=None, gid=None, mode="0644"):
args = self._perm_args(uid, gid, mode)
if isinstance(src_path_or_paths, basestring):
args.append(src_path_or_paths)
else:
args.extend(src_path_or_paths)
args.append(dstpath)
return args
def copy(self, *args, **kwargs):
return self._exec_wrapper("copy", self._copy_args(*args, **kwargs))
def copyout(self, *args, **kwargs):
return self._exec_wrapper("copyout", self._copy_args(*args, **kwargs))
def cheapcopy(self, srcpath, dstpath):
args = [srcpath, dstpath]
return self._exec_wrapper("cheapcopy", args)
def mount_virtual_filesystems(self, root, arch=None):
return self._exec_wrapper("mountall", [], root=root, arch=arch)
def umount_virtual_filesystems(self, root, arch=None):
return self._exec_wrapper("umountall", [], root=root, arch=arch,
ignoreerrors=False)
def compress_root(self, root, file):
args = [root, file]
return self._exec_wrapper("rootcompress", args)
def decompress_root(self, file, root):
args = [root, file]
return self._exec_wrapper("rootdecompress", args)
def mount_tmpfs(self, root):
return self._exec_wrapper("mounttmpfs", [root])
def umount_tmpfs(self, root):
return self._exec_wrapper("umounttmpfs", [root])
def post_root_command(self, root=None, arch=None):
return self._exec_wrapper("postcommand", [], root=root, arch=arch)
def interactive_prepare_conf(self, username, root=None, arch=None):
return self._exec_wrapper("interactiveprepare", [username],
root=root, arch=arch)
def interactive_shell(self, username, root=None, arch=None):
return self._exec_wrapper("interactiveshell", [username],
root=root, arch=arch, interactive=True, remount=True)
def test_sudo(self, interactive=True):
try:
return self._exec_wrapper("test", [])
except AgentError, e:
raise SudoNotSetup, str(e)
def btrfs_snapshot(self, from_, to):
logger.debug("creating btrfs snapshot from %s to %s" % (from_, to))
return self._exec_wrapper("btrfssnapshot", [from_, to])
def btrfs_create(self, dest):
logger.debug("creating btrfs subvolume %s" % (dest))
return self._exec_wrapper("btrfscreate", [dest])
def destroy_root(self, path):
return self._exec_wrapper("destroyroot", [path])
class SuChrootWrapper:
def __init__(self, root, suwrapper):
self.root = root
self.suwrapper = suwrapper
def add_user(self, username, uid, gid):
return self.suwrapper.add_user(username, uid, gid, self.root.path)
def run_package_manager(self, pmname, pmargs, outputlogger=None):
return self.suwrapper.run_package_manager(pmname, pmargs,
root=self.root.path, arch=self.root.arch, outputlogger=outputlogger)
def run_as(self, args, user, timeout=None, outputlogger=None,
quiet=False, ignorestderr=False, remount=False):
return self.suwrapper.run_as(args, user=user, root=self.root.path,
arch=self.root.arch, timeout=timeout,
outputlogger=outputlogger, quiet=quiet,
ignorestderr=ignorestderr,
remount=False)
def post_root_command(self):
return self.suwrapper.post_root_command(root=self.root.path,
arch=self.root.arch)
def interactive_prepare_conf(self, username):
return self.suwrapper.interactive_prepare_conf(username, root=self.root.path,
arch=self.root.arch)
def interactive_shell(self, username):
return self.suwrapper.interactive_shell(username, root=self.root.path,
arch=self.root.arch)
def __getattr__(self, name):
return getattr(self.suwrapper, name)
su_wrappers = Registry("sudo wrapper")
su_wrappers.register("jurt-root-wrapper", JurtRootWrapper)
def get_su_wrapper(targetname, suconf, globalconf):
instance = su_wrappers.get_instance(suconf.su_type, targetname, suconf,
globalconf)
return instance
|
gpl-2.0
|
xxd3vin/spp-sdk
|
opt/Python27/Lib/encodings/zlib_codec.py
|
533
|
3015
|
""" Python 'zlib_codec' Codec - zlib compression encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Written by Marc-Andre Lemburg ([email protected]).
"""
import codecs
import zlib # this codec needs the optional zlib module !
### Codec APIs
def zlib_encode(input,errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = zlib.compress(input)
return (output, len(input))
def zlib_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = zlib.decompress(input)
return (output, len(input))
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return zlib_encode(input, errors)
def decode(self, input, errors='strict'):
return zlib_decode(input, errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors='strict'):
assert errors == 'strict'
self.errors = errors
self.compressobj = zlib.compressobj()
def encode(self, input, final=False):
if final:
c = self.compressobj.compress(input)
return c + self.compressobj.flush()
else:
return self.compressobj.compress(input)
def reset(self):
self.compressobj = zlib.compressobj()
class IncrementalDecoder(codecs.IncrementalDecoder):
def __init__(self, errors='strict'):
assert errors == 'strict'
self.errors = errors
self.decompressobj = zlib.decompressobj()
def decode(self, input, final=False):
if final:
c = self.decompressobj.decompress(input)
return c + self.decompressobj.flush()
else:
return self.decompressobj.decompress(input)
def reset(self):
self.decompressobj = zlib.decompressobj()
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='zlib',
encode=zlib_encode,
decode=zlib_decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
mit
|
hakuya/higu
|
scripts/insertfile.py
|
1
|
3161
|
#!/usr/bin/python
import sys
import os
import logging
log = logging.getLogger( __name__ )
logging.basicConfig()
import hdbfs
import higu.config
MAX_TEXT_LEN = 2**18
def create_album( name, text, tags ):
album = h.create_album()
if( name is not None ):
album.add_name( name )
if( text is not None ):
album.set_text( text )
for t in tags:
album.assign( t )
return album
if( __name__ == '__main__' ):
import optparse
parser = optparse.OptionParser( usage = 'Usage: %prog [options] files...' )
parser.add_option( '-c', '--config',
dest = 'config',
help = 'Configuration File' )
parser.add_option( '-p', '--pretend',
dest = 'pretend', action = 'store_true', default = False,
help = 'Pretend, don\'t actually do anything' )
parser.add_option( '-r', '--recovery',
dest = 'recovery', action = 'store_true', default = False,
help = 'Recovery mode' )
parser.add_option( '-a', '--album',
dest = 'album',
help = 'Create album and add files to album' )
parser.add_option( '-x', '--text',
dest = 'text_data',
help = 'Add text description to album (txt file)' )
parser.add_option( '-t', '--tags',
dest = 'taglist',
help = 'List of tags (\',\' separated) to apply' )
parser.add_option( '-T', '--newtags',
dest = 'taglist_new',
help = 'Same as -t, but creates tags if they don\'t exist' )
parser.add_option( '-n', '--name-policy',
dest = 'name_policy',
help = 'Policy for persisting names ("noreg", "noset", "setundef", "setall")' )
opts, files = parser.parse_args()
if( len( files ) < 1 ):
parser.print_help()
sys.exit( 0 )
if( opts.config is not None ):
cfg = higu.config.init( opts.config )
hdbfs.init( cfg.get_path( 'library' ) )
else:
hdbfs.init()
h = hdbfs.Database()
h.enable_write_access()
if( opts.recovery ):
h.recover_files( files )
sys.exit( 0 )
tags = opts.taglist.split( ',' ) if( opts.taglist is not None ) else []
tags_new = opts.taglist_new.split( ',' ) if( opts.taglist_new is not None ) else []
create_album = opts.album is not None
album_name = opts.album if( opts.album != '-' ) else None
if( create_album and opts.text_data is not None ):
textfile = open( opts.text_data, 'r' )
text_data = unicode( textfile.read( MAX_TEXT_LEN ), 'utf-8' )
assert textfile.read( 1 ) == '', 'Text file too long'
else:
text_data = None
name_policy = hdbfs.NAME_POLICY_SET_IF_UNDEF
if( opts.name_policy == "noreg" ):
name_policy = hdbfs.NAME_POLICY_DONT_REGISTER
elif( opts.name_policy == "noset" ):
name_policy = hdbfs.NAME_POLICY_DONT_SET
elif( opts.name_policy == "setundef" ):
name_policy = hdbfs.NAME_POLICY_SET_IF_UNDEF
elif( opts.name_policy == "setall" ):
name_policy = hdbfs.NAME_POLICY_SET_ALWAYS
h.batch_add_files( files, tags, tags_new, name_policy,
create_album, album_name, text_data )
# vim:sts=4:et:sw=4
|
bsd-2-clause
|
Ayub-Khan/edx-platform
|
common/test/acceptance/tests/lms/test_lms_entrance_exams.py
|
55
|
4073
|
# -*- coding: utf-8 -*-
"""
Bok choy acceptance tests for Entrance exams in the LMS
"""
from textwrap import dedent
from ..helpers import UniqueCourseTest
from ...pages.studio.auto_auth import AutoAuthPage
from ...pages.lms.courseware import CoursewarePage
from ...pages.lms.problem import ProblemPage
from ...fixtures.course import CourseFixture, XBlockFixtureDesc
class EntranceExamTest(UniqueCourseTest):
"""
Base class for tests of Entrance Exams in the LMS.
"""
USERNAME = "joe_student"
EMAIL = "[email protected]"
def setUp(self):
super(EntranceExamTest, self).setUp()
self.xqueue_grade_response = None
self.courseware_page = CoursewarePage(self.browser, self.course_id)
# Install a course with a hierarchy and problems
course_fixture = CourseFixture(
self.course_info['org'], self.course_info['number'],
self.course_info['run'], self.course_info['display_name'],
settings={
'entrance_exam_enabled': 'true',
'entrance_exam_minimum_score_pct': '50'
}
)
problem = self.get_problem()
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(problem)
)
).install()
entrance_exam_subsection = None
outline = course_fixture.course_outline
for child in outline['child_info']['children']:
if child.get('display_name') == "Entrance Exam":
entrance_exam_subsection = child['child_info']['children'][0]
if entrance_exam_subsection:
course_fixture.create_xblock(entrance_exam_subsection['id'], problem)
# Auto-auth register for the course.
AutoAuthPage(self.browser, username=self.USERNAME, email=self.EMAIL,
course_id=self.course_id, staff=False).visit()
def get_problem(self):
""" Subclasses should override this to complete the fixture """
raise NotImplementedError()
class EntranceExamPassTest(EntranceExamTest):
"""
Tests the scenario when a student passes entrance exam.
"""
def get_problem(self):
"""
Create a multiple choice problem
"""
xml = dedent("""
<problem>
<p>What is height of eiffel tower without the antenna?.</p>
<multiplechoiceresponse>
<choicegroup label="What is height of eiffel tower without the antenna?" type="MultipleChoice">
<choice correct="false">324 meters<choicehint>Antenna is 24 meters high</choicehint></choice>
<choice correct="true">300 meters</choice>
<choice correct="false">224 meters</choice>
<choice correct="false">400 meters</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'HEIGHT OF EIFFEL TOWER', data=xml)
def test_course_is_unblocked_as_soon_as_student_passes_entrance_exam(self):
"""
Scenario: Ensure that entrance exam status message is updated and courseware is unblocked as soon as
student passes entrance exam.
Given I have a course with entrance exam as pre-requisite
When I pass entrance exam
Then I can see complete TOC of course
And I can see message indicating my pass status
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
self.assertEqual(problem_page.wait_for_page().problem_name, 'HEIGHT OF EIFFEL TOWER')
self.assertTrue(self.courseware_page.has_entrance_exam_message())
self.assertFalse(self.courseware_page.has_passed_message())
problem_page.click_choice('choice_1')
problem_page.click_check()
self.courseware_page.wait_for_page()
self.assertTrue(self.courseware_page.has_passed_message())
self.assertEqual(self.courseware_page.chapter_count_in_navigation, 2)
|
agpl-3.0
|
bpagon13/vent
|
docs/source/conf.py
|
1
|
5203
|
# -*- coding: utf-8 -*-
#
# Vent documentation build configuration file, created by
# sphinx-quickstart on Mon Jul 17 12:58:13 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../../'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
needs_sphinx = '1.1'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Vent'
copyright = u'2017, In-Q-Tel, Inc'
author = u'Cyber Reboot'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.4.5.dev'
# The full version, including alpha/beta/rc tags.
release = u'0.4.5.dev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['modules.rst',
'vent.rst']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
'donate.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Ventdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Vent.tex', u'Vent Documentation',
u'Cyber Reboot', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'vent', u'Vent Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Vent', u'Vent Documentation',
author, 'Vent', 'One line description of project.',
'Miscellaneous'),
]
|
apache-2.0
|
Evervolv/android_external_chromium
|
testing/gtest/test/gtest_xml_outfiles_test.py
|
718
|
5312
|
#!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module."""
__author__ = "[email protected] (Keith Ray)"
import os
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_OUTPUT_SUBDIR = "xml_outfiles"
GTEST_OUTPUT_1_TEST = "gtest_xml_outfile1_test_"
GTEST_OUTPUT_2_TEST = "gtest_xml_outfile2_test_"
EXPECTED_XML_1 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" name="AllTests">
<testsuite name="PropertyOne" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyOne" SetUpProp="1" TestSomeProperty="1" TearDownProp="1" />
</testsuite>
</testsuites>
"""
EXPECTED_XML_2 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" name="AllTests">
<testsuite name="PropertyTwo" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyTwo" SetUpProp="2" TestSomeProperty="2" TearDownProp="2" />
</testsuite>
</testsuites>
"""
class GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase):
"""Unit test for Google Test's XML output functionality."""
def setUp(self):
# We want the trailing '/' that the last "" provides in os.path.join, for
# telling Google Test to create an output directory instead of a single file
# for xml output.
self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_OUTPUT_SUBDIR, "")
self.DeleteFilesAndDir()
def tearDown(self):
self.DeleteFilesAndDir()
def DeleteFilesAndDir(self):
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_1_TEST + ".xml"))
except os.error:
pass
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_2_TEST + ".xml"))
except os.error:
pass
try:
os.rmdir(self.output_dir_)
except os.error:
pass
def testOutfile1(self):
self._TestOutFile(GTEST_OUTPUT_1_TEST, EXPECTED_XML_1)
def testOutfile2(self):
self._TestOutFile(GTEST_OUTPUT_2_TEST, EXPECTED_XML_2)
def _TestOutFile(self, test_name, expected_xml):
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name)
command = [gtest_prog_path, "--gtest_output=xml:%s" % self.output_dir_]
p = gtest_test_utils.Subprocess(command,
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
# TODO([email protected]): libtool causes the built test binary to be
# named lt-gtest_xml_outfiles_test_ instead of
# gtest_xml_outfiles_test_. To account for this possibillity, we
# allow both names in the following code. We should remove this
# hack when Chandler Carruth's libtool replacement tool is ready.
output_file_name1 = test_name + ".xml"
output_file1 = os.path.join(self.output_dir_, output_file_name1)
output_file_name2 = 'lt-' + output_file_name1
output_file2 = os.path.join(self.output_dir_, output_file_name2)
self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2),
output_file1)
expected = minidom.parseString(expected_xml)
if os.path.isfile(output_file1):
actual = minidom.parse(output_file1)
else:
actual = minidom.parse(output_file2)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == "__main__":
os.environ["GTEST_STACK_TRACE_DEPTH"] = "0"
gtest_test_utils.Main()
|
bsd-3-clause
|
earnaway/db-tools
|
pgschemagraph.py
|
1
|
11306
|
import sys
import psycopg2
from xml.sax.saxutils import escape
from optparse import OptionParser
def connect(**params):
connstr = "dbname='%(dbname)s' user='%(user)s' host='%(host)s' password='%(password)s'" % params
conn = psycopg2.connect(connstr)
return conn
class Table(object):
def __init__(self, schema, name):
self.schema = schema
self.name = name
self.columns = []
self.pk = None
self.fks = []
def add_column(self, name, type, nullable):
self.columns.append((name, type, nullable))
def set_pk(self, pk_columns):
self.pk = pk_columns
def add_fk(self, from_cols, to_schema, to_name, to_cols):
self.fks.append((from_cols, to_schema, to_name, to_cols))
def __repr__(self):
col_list = ', '.join('%s %s' % (cn, ct) for cn, ct, cnn in self.columns)
return '%s.%s(%s)' % (self.schema, self.name, col_list)
def get_tables(conn, schemas):
schema_list = ", ".join("'%s'" % sn for sn in schemas)
cursor = conn.cursor()
sql = """SELECT table_schema, table_name FROM information_schema.tables WHERE table_schema IN (%s) AND table_type = 'BASE TABLE' AND table_name NOT IN ('migrations', 'jobs', 'failed_jobs')""" % schema_list
all_tables = []
cursor.execute(sql)
for table_schema, table_name in cursor.fetchall():
t = Table(table_schema, table_name)
sql = """SELECT column_name, COALESCE(domain_name, data_type) AS data_type, is_nullable FROM information_schema.columns
WHERE table_schema = '%s' AND table_name = '%s' ORDER BY ordinal_position""" % (table_schema, table_name)
cursor.execute(sql)
for column_name, data_type, is_nullable in cursor.fetchall():
t.add_column(column_name, data_type, (is_nullable == 'YES'))
sql = """SELECT column_name FROM information_schema.constraint_column_usage
WHERE constraint_name = (SELECT constraint_name FROM information_schema.table_constraints
WHERE table_schema = '%s' AND table_name = '%s' AND constraint_type = 'PRIMARY KEY')""" % (table_schema, table_name)
cursor.execute(sql)
pk_cols = []
for column_name, in cursor.fetchall():
pk_cols.append(column_name)
t.set_pk(pk_cols)
sql = """SELECT constraint_name FROM information_schema.table_constraints
WHERE table_schema = '%s' AND table_name = '%s' AND constraint_type = 'FOREIGN KEY'""" % (table_schema, table_name)
cursor.execute(sql)
for constraint_name, in cursor.fetchall():
sql = """SELECT column_name FROM information_schema.key_column_usage
WHERE constraint_name = '%s' ORDER BY position_in_unique_constraint""" % constraint_name
cursor.execute(sql)
from_cols = []
for column_name in cursor.fetchall():
from_cols.append(column_name)
sql = """SELECT table_schema, table_name, column_name FROM information_schema.constraint_column_usage
WHERE constraint_name = '%s'""" % constraint_name
cursor.execute(sql)
to_cols = []
to_name = None
for table_schema, table_name, column_name in cursor.fetchall():
to_schema = table_schema
to_name = table_name
to_cols.append(column_name)
if to_name is None:
continue
t.add_fk(from_cols, to_schema, to_name, to_cols)
all_tables.append(t)
return all_tables
def to_html_table(table):
parts = []
parts.append('<table>\n')
for name, type, nullable in table.columns:
parts.append('<tr>')
parts.append('<td>')
if name in table.pk:
parts.append('<u>%s</u>' % name)
elif nullable:
parts.append('<i>%s</i>' % name)
else:
parts.append(name)
parts.append('</td>')
parts.append('<td>%s</td>' % type)
parts.append('</tr>\n')
parts.append('</table>')
return ''.join(parts)
def guess_node_height(table):
ROW_HEIGHT = 23
EXTRA = 36
return len(table.columns) * ROW_HEIGHT + EXTRA
def guess_node_width(table):
COL_WIDTH = 8
EXTRA = 20
namewidth = 0
name = ''
for name, type, nullable in table.columns:
label = "%s" % (name)
if len(label) > namewidth:
name = label
namewidth = len(label)
typewidth = 0
type = ''
for name, type, nullable in table.columns:
label = "%s" % (type)
if len(label) > typewidth:
type = label
typewidth = len(label)
return (namewidth+typewidth)*COL_WIDTH+EXTRA
def to_graphml_node(table, node_id):
label = "%s" % (table.name)
content = escape('<html>' + to_html_table(table))
height = guess_node_height(table)
width = guess_node_width(table)
return """<node id="%(node_id)s">
<data key="d6">
<y:GenericNode configuration="com.yworks.entityRelationship.big_entity">
<y:Geometry height="%(height)d" width="%(width)d" x="-76.5" y="226.5"/>
<y:Fill color="#E8EEF7" color2="#B7C9E3" transparent="false"/>
<y:BorderStyle color="#000000" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" backgroundColor="#B7C9E3"
configuration="com.yworks.entityRelationship.label.name" fontFamily="Dialog"
fontSize="12" fontStyle="plain" hasLineColor="false" height="18.701171875"
modelName="internal" modelPosition="t" textColor="#000000" visible="true"
width="45.349609375" x="53.8251953125" y="4.0">%(label)s</y:NodeLabel>
<y:NodeLabel alignment="left" autoSizePolicy="content"
configuration="com.yworks.entityRelationship.label.attributes" fontFamily="Dialog"
fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false"
height="114.0" modelName="custom" textColor="#000000" visible="true" width="146.0"
x="2.0" y="30.701171875">%(content)s<y:LabelModel>
<y:ErdAttributesNodeLabelModel/>
</y:LabelModel>
<y:ModelParameter>
<y:ErdAttributesNodeLabelModelParameter/>
</y:ModelParameter>
</y:NodeLabel>
<y:StyleProperties>
<y:Property class="java.lang.Boolean" name="y.view.ShadowNodePainter.SHADOW_PAINTING" value="true"/>
</y:StyleProperties>
</y:GenericNode>
</data>
</node>""" % locals()
def to_graphml_edge(from_id, to_id, edge_id, name):
name = "%s" % (name)
return """ <edge id="%s" source="%s" target="%s"><data key="d10">
<y:PolyLineEdge>
<y:Path sx="0.0" sy="213.5" tx="0.0" ty="-190.5"/>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:EdgeLabel alignment="center" configuration="AutoFlippingLabel" distance="0.5" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" modelName="custom" preferredPlacement="anywhere" ratio="0.5" textColor="#000000" visible="true" >%s<y:LabelModel>
<y:SmartEdgeLabelModel autoRotationEnabled="true" defaultAngle="0.0" defaultDistance="5.0"/>
</y:LabelModel>
<y:ModelParameter>
<y:SmartEdgeLabelModelParameter angle="0.0" distance="11.0" distanceToCenter="true" position="right" ratio="0.5" segment="0"/>
</y:ModelParameter>
<y:PreferredPlacementDescriptor angle="0.0" angleOffsetOnRightSide="0" angleReference="absolute" angleRotationOnRightSide="co" distance="-1.0" frozen="true" placement="anywhere" side="anywhere" sideReference="relative_to_edge_flow"/>
</y:EdgeLabel>
<y:BendStyle smoothed="true"/>
</y:PolyLineEdge>
</data>
</edge>""" % (edge_id, from_id, to_id, name)
def to_graphml(all_tables):
next_id = 1
for t in all_tables:
t.node_id = next_id
next_id += 1
parts = []
parts.append("""<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<graphml xmlns="http://graphml.graphdrawing.org/xmlns" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:y="http://www.yworks.com/xml/graphml" xmlns:yed="http://www.yworks.com/xml/yed/3"
xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns http://www.yworks.com/xml/schema/graphml/1.1/ygraphml.xsd">
<key for="graphml" id="d0" yfiles.type="resources"/>
<key for="port" id="d1" yfiles.type="portgraphics"/>
<key for="port" id="d2" yfiles.type="portgeometry"/>
<key for="port" id="d3" yfiles.type="portuserdata"/>
<key attr.name="url" attr.type="string" for="node" id="d4"/>
<key attr.name="description" attr.type="string" for="node" id="d5"/>
<key for="node" id="d6" yfiles.type="nodegraphics"/>
<key attr.name="Description" attr.type="string" for="graph" id="d7"/>
<key attr.name="url" attr.type="string" for="edge" id="d8"/>
<key attr.name="description" attr.type="string" for="edge" id="d9"/>
<key for="edge" id="d10" yfiles.type="edgegraphics"/>
<graph edgedefault="directed" id="G">
<data key="d7"/>
""")
for t in all_tables:
parts.append(to_graphml_node(t, t.node_id))
tables_by_name = {}
for t in all_tables:
tables_by_name[(t.schema, t.name)] = t
for t in all_tables:
for fk in t.fks:
from_id = t.node_id
to_id = tables_by_name[(fk[1], fk[2])].node_id
parts.append(to_graphml_edge(from_id, to_id, next_id, fk[0][0]))
next_id += 1
parts.append(""" </graph>
<data key="d0">
<y:Resources/>
</data>
</graphml>
""")
return ''.join(parts)
def main():
usage = """usage: %prog -h HOST -d DATBASE -u USER -p PASSWORD [-s SCHEMAS]"""
desc = """Generate GraphML schema diagrams for PostgreSQL databases."""
parser = OptionParser(usage=usage, description=desc, add_help_option=False)
parser.add_option("-h", "--host", action="store", help="server host name")
parser.add_option("-d", "--database", action="store", help="database name")
parser.add_option("-u", "--user", action="store", help="database user to login as")
parser.add_option("-p", "--password", action="store", help="user's password")
parser.add_option("-s", "--schemas", action="store", default='public', help="list of schemas to process")
options, args = parser.parse_args()
if options.host is None:
parser.error('host is required')
if options.database is None:
parser.error('database is required')
if options.user is None:
parser.error('user is required')
if options.password is None:
parser.error('password is required')
params = {'user': options.user, 'password': options.password, 'dbname': options.database, 'host': options.host}
schema_list = options.schemas.split(',')
conn = connect(**params)
all_tables = get_tables(conn, schema_list)
conn.close()
print to_graphml(all_tables)
if __name__ == '__main__':
main()
|
gpl-2.0
|
Workday/OpenFrame
|
mandoline/tools/android/run_mandoline.py
|
15
|
3209
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import atexit
import logging
import os
import shutil
import sys
import tempfile
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
os.pardir, 'build', 'android'))
from pylib import constants
sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname(__file__)),
os.pardir, os.pardir, os.pardir, 'mojo',
'tools'))
from mopy.android import AndroidShell
from mopy.config import Config
USAGE = ('run_mandoline.py [<shell-and-app-args>] [<start-page-url>]')
def _CreateSOLinks(dest_dir):
'''Creates links from files (eg. *.mojo) to the real .so for gdb to find.'''
# The files to create links for. The key is the name as seen on the device,
# and the target an array of path elements as to where the .so lives (relative
# to the output directory).
# TODO(sky): come up with some way to automate this.
files_to_link = {
'html_viewer.mojo': ['libhtml_viewer_library.so'],
'libmandoline_runner.so': ['mandoline_runner'],
}
build_dir = constants.GetOutDirectory()
print build_dir
for android_name, so_path in files_to_link.iteritems():
src = os.path.join(build_dir, *so_path)
if not os.path.isfile(src):
print '*** Expected file not found', src
print '*** Aborting launch.'
sys.exit(-1)
os.symlink(src, os.path.join(dest_dir, android_name))
def main():
logging.basicConfig()
parser = argparse.ArgumentParser(usage=USAGE)
debug_group = parser.add_mutually_exclusive_group()
debug_group.add_argument('--debug', help='Debug build (default)',
default=True, action='store_true')
debug_group.add_argument('--release', help='Release build', default=False,
dest='debug', action='store_false')
parser.add_argument('--build-dir', help='Build directory')
parser.add_argument('--target-cpu', help='CPU architecture to run for.',
choices=['x64', 'x86', 'arm'], default='arm')
parser.add_argument('--device', help='Serial number of the target device.')
parser.add_argument('--gdb', help='Run gdb',
default=False, action='store_true')
runner_args, args = parser.parse_known_args()
config = Config(build_dir=runner_args.build_dir,
target_os=Config.OS_ANDROID,
target_cpu=runner_args.target_cpu,
is_debug=runner_args.debug,
apk_name='Mandoline.apk')
shell = AndroidShell(config)
shell.InitShell(runner_args.device)
p = shell.ShowLogs()
temp_gdb_dir = None
if runner_args.gdb:
temp_gdb_dir = tempfile.mkdtemp()
atexit.register(shutil.rmtree, temp_gdb_dir, True)
_CreateSOLinks(temp_gdb_dir)
shell.StartActivity('MandolineActivity',
args,
sys.stdout,
p.terminate,
temp_gdb_dir)
return 0
if __name__ == '__main__':
sys.exit(main())
|
bsd-3-clause
|
dknez/libmesh
|
doc/statistics/libmesh_sflogos.py
|
7
|
6095
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
import numpy as np
# Import stuff for working with dates
from datetime import datetime
from matplotlib.dates import date2num
# SF.net pages and SFLogo Impressions.
# On the site, located under "Sourceforge Traffic". Number of logos
# (last column) seems to be the most useful one.
#
# This view should give you the last 12 months data
# https://sourceforge.net/project/stats/detail.php?group_id=71130&ugn=libmesh&mode=12months&type=sfweb
# This data has now changed to Google-analytics style...
# After you select the proper date range, scroll down to the bottom
# of the screen and it should show the totals for the two categories,
# which are listed as "SF Logo" and "other"
# Other SF Logo
data = [
'Jan 2003', 681, 479,
'Feb 2003', 659, 1939,
'Mar 2003', 488, 1754,
'Apr 2003', 667, 3202,
'May 2003', 608, 2552,
'Jun 2003', 562, 2190,
'Jul 2003', 997, 3097,
'Aug 2003', 745, 4708,
'Sep 2003', 906, 4937,
'Oct 2003', 892, 6834,
'Nov 2003', 1257, 8495,
'Dec 2003', 1147, 6439,
'Jan 2004', 823, 7791,
'Feb 2004', 906, 8787,
'Mar 2004', 979, 11309,
'Apr 2004', 835, 9393,
'May 2004', 851, 9796,
'Jun 2004', 750, 9961,
'Jul 2004', 618, 6337,
'Aug 2004', 912, 6647,
'Sep 2004', 554, 5736,
'Oct 2004', 524, 6144,
'Nov 2004', 685, 8122,
'Dec 2004', 583, 6136,
'Jan 2005', 215, 2668,
'Feb 2005', 732, 7618,
'Mar 2005', 944, 10283,
'Apr 2005', 837, 9605,
'May 2005', 1420, 9994,
'Jun 2005', 1691, 12031,
'Jul 2005', 849, 6740,
'Aug 2005', 1068, 11771,
'Sep 2005', 1119, 11459,
'Oct 2005', 772, 8614,
'Nov 2005', 845, 9383,
'Dec 2005', 814, 10606,
'Jan 2006', 1004, 11511,
'Feb 2006', 819, 10693,
'Mar 2006', 1097, 11925,
'Apr 2006', 960, 15664,
'May 2006', 1091, 14194,
'Jun 2006', 906, 12118,
'Jul 2006', 1022, 8935,
'Aug 2006', 914, 9370,
'Sep 2006', 1087, 11397,
'Oct 2006', 1311, 11516,
'Nov 2006', 1182, 10795,
'Dec 2006', 811, 9418,
'Jan 2007', 1236, 11522,
'Feb 2007', 1410, 10669,
'Mar 2007', 1568, 13141,
'Apr 2007', 1544, 12285,
'May 2007', 1362, 14992,
'Jun 2007', 2229, 17716,
'Jul 2007', 1822, 15192,
'Aug 2007', 1446, 12300,
'Sep 2007', 2045, 19599,
'Oct 2007', 2680, 14694,
'Nov 2007', 2344, 15211,
'Dec 2007', 2235, 10683,
'Jan 2008', 1582, 11290,
'Feb 2008', 1712, 12376,
'Mar 2008', 1908, 13204,
'Apr 2008', 2308, 13046,
'May 2008', 2013, 10312,
'Jun 2008', 2082, 11522,
'Jul 2008', 1880, 10859,
'Aug 2008', 2083, 11677,
'Sep 2008', 1739, 11446,
'Oct 2008', 2546, 13463,
'Nov 2008', 2152, 14491,
'Dec 2008', 2600, 15275,
'Jan 2009', 1897, 12910,
'Feb 2009', 1880, 12008,
'Mar 2009', 6348, 12696,
'Apr 2009', 1799, 14048,
'May 2009', 1771, 13122,
'Jun 2009', 1811, 12114,
'Jul 2009', 1878, 13600,
'Aug 2009', 2047, 10828,
'Sep 2009', 2807, 12914,
'Oct 2009', 4025, 17326,
'Nov 2009', 3702, 15648,
'Dec 2009', 3409, 12510,
'Jan 2010', 3737, 31211,
'Feb 2010', 5015, 28772,
'Mar 2010', 5652, 17882,
'Apr 2010', 4019, 17495,
'May 2010', 3336, 18117,
'Jun 2010', 2174, 21288,
'Jul 2010', 874, 13900,
'Aug 2010', 1160, 15153,
'Sep 2010', 1317, 13836,
'Oct 2010', 3543, 15279,
'Nov 2010', 3072, 18663,
'Dec 2010', 2257, 16381,
'Jan 2011', 2513, 19798,
'Feb 2011', 1678, 17870,
'Mar 2011', 1878, 17759,
'Apr 2011', 1948, 21264,
'May 2011', 2696, 15953,
'Jun 2011', 1514, 18409,
'Jul 2011', 1422, 13071,
'Aug 2011', 906, 7857,
'Sep 2011', 976, 9764,
'Oct 2011', 1699, 13285,
'Nov 2011', 1952, 16431,
'Dec 2011', 2735, 17849,
'Jan 2012', 1741, 14358,
'Feb 2012', 1017, 14262,
'Mar 2012', 1361, 14379,
'Apr 2012', 967, 15483,
'May 2012', 2384, 13656,
'Jun 2012', 1337, 14370,
'Jul 2012', 2107, 17286,
'Aug 2012', 8165, 53331,
'Sep 2012', 2268, 14704,
'Oct 2012', 738, 7333, # No data recorded from Oct 10 thru 28?
'Nov 2012', 6104, 39650,
'Dec 2012', 3439, 24706, # libmesh switched to github Dec 10, 2012
'Jan 2013', 2552, 31993,
'Feb 2013', 2107, 24913,
'Mar 2013', 1376, 23953,
'Apr 2013', 1582, 19285,
'May 2013', 1257, 16753,
'Jun 2013', 482, 14458,
'Jul 2013', 465, 11325,
'Aug 2013', 306, 7653,
'Sep 2013', 731, 11332,
'Oct 2013', 795, 15619,
'Nov 2013', 753, 16199,
'Dec 2013', 593, 11596,
'Jan 2014', 489, 11195,
'Feb 2014', 484, 14375,
'Mar 2014', 363, 13050,
'Apr 2014', 357, 15700, # As of June 1, 2014 the site above no longer exists...
]
# Extract list of date strings
date_strings = data[0::3]
# Convert date strings into numbers
date_nums = []
for d in date_strings:
date_nums.append(date2num(datetime.strptime(d, '%b %Y')))
# Strip out number of logos/month for plotting
n_logos_month = data[2::3]
# Scale by 1000
n_logos_month = np.divide(n_logos_month, 1000.)
# Get a reference to the figure
fig = plt.figure()
# 111 is equivalent to Matlab's subplot(1,1,1) command
ax = fig.add_subplot(111)
# Make the bar chart. One number/month, so width=30
# makes sense.
ax.bar(date_nums, n_logos_month, width=30, color='b')
# Set tick labels at desired locations
xticklabels = ['Jan\n2003', 'Jan\n2005', 'Jan\n2007', 'Jan\n2009', 'Jan\n2011', 'Jan\n2013']
# Get numerical values for the tick labels
tick_nums = []
for x in xticklabels:
tick_nums.append(date2num(datetime.strptime(x, '%b\n%Y')))
ax.set_xticks(tick_nums)
ax.set_xticklabels(xticklabels)
# Make x-axis tick marks point outward
ax.get_xaxis().set_tick_params(direction='out')
# Set the xlimits
plt.xlim(date_nums[0], date_nums[-1]+30);
# Create title
fig.suptitle('SFLogo Pages/Month (in Thousands)')
# Save as PDF
plt.savefig('libmesh_sflogos.pdf')
# Local Variables:
# python-indent: 2
# End:
|
lgpl-2.1
|
xxshutong/openerp-7.0
|
openerp/addons/l10n_be_coda/wizard/__init__.py
|
439
|
1098
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_coda_import
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
credativUK/account-financial-tools
|
account_move_locking/account.py
|
27
|
2000
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville.
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
from openerp import models, fields, api, exceptions, _
class AccountMove(models.Model):
_inherit = 'account.move'
locked = fields.Boolean('Locked', readonly=True)
@api.multi
def write(self, vals):
for move in self:
if move.locked:
raise exceptions.Warning(_('Move Locked!'),
move.name)
return super(AccountMove, self).write(vals)
@api.multi
def unlink(self):
for move in self:
if move.locked:
raise exceptions.Warning(_('Move Locked!'),
move.name)
return super(AccountMove, self).unlink()
@api.multi
def button_cancel(self):
# Cancel a move was done directly in SQL
# so we need to test manualy if the move is locked
for move in self:
if move.locked:
raise exceptions.Warning(_('Move Locked!'),
move.name)
return super(AccountMove, self).button_cancel()
|
agpl-3.0
|
bigdatauniversity/edx-platform
|
lms/djangoapps/course_blocks/transformers/tests/test_helpers.py
|
23
|
12832
|
"""
Test helpers for testing course block transformers.
"""
from course_modes.models import CourseMode
from student.tests.factories import CourseEnrollmentFactory, UserFactory
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from lms.djangoapps.courseware.access import has_access
from ...api import get_course_blocks
class CourseStructureTestCase(ModuleStoreTestCase):
"""
Helper for test cases that need to build course structures.
"""
def setUp(self):
"""
Create users.
"""
super(CourseStructureTestCase, self).setUp()
# Set up users.
self.password = 'test'
self.user = UserFactory.create(password=self.password)
self.staff = UserFactory.create(password=self.password, is_staff=True)
def create_block_id(self, block_type, block_ref):
"""
Returns the block id (display name) that is used in the test
course structures for the given block type and block reference
string.
"""
return '{}_{}'.format(block_type, block_ref)
def build_xblock(self, block_hierarchy, block_map, parent):
"""
Build an XBlock, add it to block_map, and call build_xblock on
the children defined in block_dict.
Arguments:
block_hierarchy (BlockStructureDict): Definition of
hierarchy, from this block down.
block_map (dict[str: XBlock]): Mapping from '#ref' values to
their XBlocks.
parent (XBlock): Parent block for this xBlock.
"""
block_type = block_hierarchy['#type']
block_ref = block_hierarchy['#ref']
factory = (CourseFactory if block_type == 'course' else ItemFactory)
kwargs = {key: value for key, value in block_hierarchy.iteritems() if key[0] != '#'}
if block_type != 'course':
kwargs['category'] = block_type
if parent:
kwargs['parent'] = parent
xblock = factory.create(
display_name=self.create_block_id(block_type, block_ref),
publish_item=True,
**kwargs
)
block_map[block_ref] = xblock
for child_hierarchy in block_hierarchy.get('#children', []):
self.build_xblock(child_hierarchy, block_map, xblock)
def add_parents(self, block_hierarchy, block_map):
"""
Recursively traverse the block_hierarchy and add additional
parents. This method is expected to be called only after all
blocks have been created.
The additional parents are obtained from the '#parents' field
and is expected to be a list of '#ref' values of the parents.
Note: if a '#parents' field is found, the block is removed from
the course block since it is expected to not belong to the root.
If the block is meant to be a direct child of the course as
well, the course should be explicitly listed in '#parents'.
Arguments:
block_hierarchy (BlockStructureDict):
Definition of block hierarchy.
block_map (dict[str: XBlock]):
Mapping from '#ref' values to their XBlocks.
"""
parents = block_hierarchy.get('#parents', [])
if parents:
block_key = block_map[block_hierarchy['#ref']].location
# First remove the block from the course.
# It would be re-added to the course if the course was
# explicitly listed in parents.
course = modulestore().get_item(block_map['course'].location)
course.children.remove(block_key)
block_map['course'] = update_block(course)
# Add this to block to each listed parent.
for parent_ref in parents:
parent_block = modulestore().get_item(block_map[parent_ref].location)
parent_block.children.append(block_key)
block_map[parent_ref] = update_block(parent_block)
# recursively call the children
for child_hierarchy in block_hierarchy.get('#children', []):
self.add_parents(child_hierarchy, block_map)
def build_course(self, course_hierarchy):
"""
Build a hierarchy of XBlocks.
Arguments:
course_hierarchy (BlockStructureDict): Definition of course
hierarchy.
where a BlockStructureDict is a list of dicts in the form {
'key1': 'value1',
...
'keyN': 'valueN',
'#type': block_type,
'#ref': short_string_for_referencing_block,
'#children': list[BlockStructureDict],
'#parents': list['#ref' values]
}
Special keys start with '#'; the rest just get passed as
kwargs to Factory.create.
Note: the caller has a choice of whether to create
(1) a nested block structure with children blocks embedded
within their parents, or
(2) a flat block structure with children blocks defined
alongside their parents and attached via the #parents
field, or
(3) a combination of both #1 and #2 used for whichever
blocks.
Note 2: When the #parents field is used in addition to the
nested pattern for a block, it specifies additional parents
that aren't already implied by having the block exist within
another block's #children field.
Returns:
dict[str: XBlock]:
Mapping from '#ref' values to their XBlocks.
"""
block_map = {}
# build the course tree
for block_hierarchy in course_hierarchy:
self.build_xblock(block_hierarchy, block_map, parent=None)
# add additional parents if the course is a DAG or built
# linearly (without specifying '#children' values)
for block_hierarchy in course_hierarchy:
self.add_parents(block_hierarchy, block_map)
return block_map
def get_block_key_set(self, blocks, *refs):
"""
Gets the set of usage keys that correspond to the list of
#ref values as defined on blocks.
Returns: set[UsageKey]
"""
xblocks = (blocks[ref] for ref in refs)
return set([xblock.location for xblock in xblocks])
class BlockParentsMapTestCase(ModuleStoreTestCase):
"""
Test helper class for creating a test course of
a graph of vertical blocks based on a parents_map.
"""
# Tree formed by parent_map:
# 0
# / \
# 1 2
# / \ / \
# 3 4 / 5
# \ /
# 6
# Note the parents must always have lower indices than their
# children.
parents_map = [[], [0], [0], [1], [1], [2], [2, 4]]
def setUp(self, **kwargs):
super(BlockParentsMapTestCase, self).setUp(**kwargs)
# create the course
self.course = CourseFactory.create()
# an ordered list of block locations, where the index
# corresponds to the block's index in the parents_map.
self.xblock_keys = [self.course.location]
# create all other blocks in the course
for i, parents_index in enumerate(self.parents_map):
if i == 0:
continue # course already created
# create the block as a vertical
self.xblock_keys.append(
ItemFactory.create(
parent=self.get_block(parents_index[0]),
category="vertical",
).location
)
# add additional parents
if len(parents_index) > 1:
for index in range(1, len(parents_index)):
parent_index = parents_index[index]
parent_block = self.get_block(parent_index)
parent_block.children.append(self.xblock_keys[i])
update_block(parent_block)
self.password = 'test'
self.student = UserFactory.create(is_staff=False, username='test_student', password=self.password)
self.staff = UserFactory.create(is_staff=True, username='test_staff', password=self.password)
CourseEnrollmentFactory.create(
is_active=True,
mode=CourseMode.DEFAULT_MODE_SLUG,
user=self.student,
course_id=self.course.id
)
def assert_transform_results(
self,
test_user,
expected_user_accessible_blocks,
blocks_with_differing_access,
transformers=None,
):
"""
Verifies the results of transforming the blocks in the course.
Arguments:
test_user (User): The non-staff user that is being tested.
For example, self.student.
expected_user_accessible_blocks (set(int)): Set of blocks
(indices) that a student user is expected to have access
to after the transformers are executed.
blocks_with_differing_access (set(int)): Set of
blocks (indices) whose access will differ from the
transformers result and the current implementation of
has_access.
transformers (BlockStructureTransformer): An optional list
of transformer that are to be executed. If not
provided, the default value used by get_course_blocks
is used.
"""
def check_results(user, expected_accessible_blocks, blocks_with_differing_access):
"""
Verifies the results of transforming the blocks in the
course for the given user.
"""
self.client.login(username=user.username, password=self.password)
block_structure = get_course_blocks(user, self.course.location, transformers=transformers)
# Enumerate through all the blocks that were created in the
# course
for i, xblock_key in enumerate(self.xblock_keys):
# verify existence of the block
block_structure_result = block_structure.has_block(xblock_key)
has_access_result = bool(has_access(user, 'load', self.get_block(i), course_key=self.course.id))
# compare with expected value
self.assertEquals(
block_structure_result,
i in expected_accessible_blocks,
"block_structure return value {0} not equal to expected value for block {1} for user {2}".format(
block_structure_result, i, user.username
)
)
# compare with has_access result
if i in blocks_with_differing_access:
self.assertNotEqual(
block_structure_result,
has_access_result,
"block structure ({0}) & has_access ({1}) results are equal for block {2} for user {3}".format(
block_structure_result, has_access_result, i, user.username
)
)
else:
self.assertEquals(
block_structure_result,
has_access_result,
"block structure ({0}) & has_access ({1}) results not equal for block {2} for user {3}".format(
block_structure_result, has_access_result, i, user.username
)
)
self.client.logout()
# verify given test user has access to expected blocks
check_results(
test_user,
expected_user_accessible_blocks,
blocks_with_differing_access
)
# verify staff has access to all blocks
check_results(self.staff, set(range(len(self.parents_map))), {})
def get_block(self, block_index):
"""
Helper method to retrieve the requested block (index) from the
modulestore
"""
return modulestore().get_item(self.xblock_keys[block_index])
def update_block(block):
"""
Helper method to update the block in the modulestore
"""
return modulestore().update_item(block, 'test_user')
def create_location(org, course, run, block_type, block_id):
"""
Returns the usage key for the given key parameters using the
default modulestore
"""
return modulestore().make_course_key(org, course, run).make_usage_key(block_type, block_id)
|
agpl-3.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.