repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
SAM-IT-SA/odoo | openerp/addons/base/tests/test_translate.py | 460 | 1941 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 OpenERP S.A. http://www.openerp.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import unittest
from openerp.tools.translate import quote, unquote
class TranslationToolsTestCase(unittest.TestCase):
def test_quote_unquote(self):
def test_string(str):
quoted = quote(str)
#print "\n1:", repr(str)
#print "2:", repr(quoted)
unquoted = unquote("".join(quoted.split('"\n"')))
#print "3:", repr(unquoted)
self.assertEquals(str, unquoted)
test_string("""test \nall kinds\n \n o\r
\\\\ nope\n\n"
""")
# The ones with 1+ backslashes directly followed by
# a newline or literal N can fail... we would need a
# state-machine parser to handle these, but this would
# be much slower so it's better to avoid them at the moment
self.assertRaises(AssertionError, quote, """test \nall kinds\n\no\r
\\\\nope\n\n"
""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
sylvchev/PyRiemannEigen | Benchmark/TangentSpace/Tangent.py | 3 | 1304 | import os
import sys
import timeit
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..")))
from Utils.CovMat import CovMat
from Utils.CovMats import CovMats
size = [10, 25, 50, 75, 100, 250, 500, 750, 1000]
# WARMUP
print("Warm up...")
for i in range(0, 10):
warm_up_covmat = CovMat.random(1000)
warm_up_covmat.expm
for i in range(0, len(size)):
covmats = CovMats.random(10, size[i])
covmat = CovMat.random(size[i])
t = timeit.Timer("tangent_space(covmats.numpy_array, covmat.numpy_array)",
setup="from __main__ import covmats, covmat; from oldPyRiemann.tangentspace import tangent_space; import Utils.OpenBLAS")
old_time = t.timeit(number=size[len(size) - i - 1]) / size[len(size) - i - 1]
t = timeit.Timer("covmats.reset_fields(); covmat.reset_fields(); TangentSpace.tangent(covmats, covmat)",
setup="from Utils.TangentSpace import TangentSpace; from __main__ import covmats, covmat")
new_time = t.timeit(number=size[len(size) - i - 1]) / size[len(size) - i - 1]
print("matrix size : " + "10x" + str(size[i]) + "x" + str(size[i]) + "\t\told time : " + str(
old_time) + " sec\t\t" + "new time : " + str(new_time) + " sec\t\t" + "speed up : " + str(
old_time / new_time))
| gpl-3.0 |
repotvsupertuga/tvsupertuga.repository | instal/plugin.video.SportsDevil/lib/downloader.py | 25 | 2296 | # -*- coding: utf-8 -*-
import common
import urllib
import os.path
import xbmc, xbmcgui
class Downloader(object):
def __init__(self):
self.pDialog = None
def downloadWithJDownloader(self, url, title):
common.runPlugin('plugin://plugin.program.jdownloader/?action=addlink&url=' + url)
common.showNotification('Sent to JDownloader:')
def downloadMovie(self, url, path, title, extension):
if not os.path.exists(path):
common.log('Path does not exist')
return None
if title == '':
common.log('No title given')
return None
file_path = xbmc.makeLegalFilename(os.path.join(path, title + extension))
file_path = urllib.unquote_plus(file_path)
# Overwrite existing file?
if os.path.isfile(file_path):
self.pDialog = xbmcgui.Dialog()
if not common.ask('File already exists. Overwrite?\n' + os.path.basename(file_path)):
title = common.showOSK(urllib.unquote_plus(title), common.translate(30102))
if not title:
return None
file_path = xbmc.makeLegalFilename(os.path.join(path, title + extension))
file_path = urllib.unquote_plus(file_path)
success = self.__download(url, file_path)
if success:
return file_path
else:
return None
def __download(self, url, file_path):
try:
# Setup progress dialog and download
self.pDialog = xbmcgui.DialogProgress()
self.pDialog.create('SportsDevil', common.translate(30050), common.translate(30051))
urllib.urlretrieve(url, file_path, self.video_report_hook)
self.pDialog.close()
return True
except IOError:
self.pDialog.close()
common.showError(common.translate(30053))
except KeyboardInterrupt:
self.pDialog.close()
return False
def video_report_hook(self, count, blocksize, totalsize):
percent = int(float(count * blocksize * 100) / totalsize)
self.pDialog.update(percent, common.translate(30050), common.translate(30051))
if self.pDialog.iscanceled():
raise KeyboardInterrupt
| gpl-2.0 |
lemonjia/Bravado | pycchecker/pycparser/_ast_gen.py | 5 | 8427 | #-----------------------------------------------------------------
# _ast_gen.py
#
# Generates the AST Node classes from a specification given in
# a configuration file
#
# The design of this module was inspired by astgen.py from the
# Python 2.5 code-base.
#
# Copyright (C) 2008-2013, Eli Bendersky
# License: BSD
#-----------------------------------------------------------------
import pprint
from string import Template
class ASTCodeGenerator(object):
def __init__(self, cfg_filename='_c_ast.cfg'):
""" Initialize the code generator from a configuration
file.
"""
self.cfg_filename = cfg_filename
self.node_cfg = [NodeCfg(name, contents)
for (name, contents) in self.parse_cfgfile(cfg_filename)]
def generate(self, file=None):
""" Generates the code into file, an open file buffer.
"""
src = Template(_PROLOGUE_COMMENT).substitute(
cfg_filename=self.cfg_filename)
src += _PROLOGUE_CODE
for node_cfg in self.node_cfg:
src += node_cfg.generate_source() + '\n\n'
file.write(src)
def parse_cfgfile(self, filename):
""" Parse the configuration file and yield pairs of
(name, contents) for each node.
"""
with open(filename, "r") as f:
for line in f:
line = line.strip()
if not line or line.startswith('#'):
continue
colon_i = line.find(':')
lbracket_i = line.find('[')
rbracket_i = line.find(']')
if colon_i < 1 or lbracket_i <= colon_i or rbracket_i <= lbracket_i:
raise RuntimeError("Invalid line in %s:\n%s\n" % (filename, line))
name = line[:colon_i]
val = line[lbracket_i + 1:rbracket_i]
vallist = [v.strip() for v in val.split(',')] if val else []
yield name, vallist
class NodeCfg(object):
""" Node configuration.
name: node name
contents: a list of contents - attributes and child nodes
See comment at the top of the configuration file for details.
"""
def __init__(self, name, contents):
self.name = name
self.all_entries = []
self.attr = []
self.child = []
self.seq_child = []
for entry in contents:
clean_entry = entry.rstrip('*')
self.all_entries.append(clean_entry)
if entry.endswith('**'):
self.seq_child.append(clean_entry)
elif entry.endswith('*'):
self.child.append(clean_entry)
else:
self.attr.append(entry)
def generate_source(self):
src = self._gen_init()
src += '\n' + self._gen_children()
src += '\n' + self._gen_attr_names()
return src
def _gen_init(self):
src = "class %s(Node):\n" % self.name
if self.all_entries:
args = ', '.join(self.all_entries)
arglist = '(self, %s, coord=None)' % args
else:
arglist = '(self, coord=None)'
src += " def __init__%s:\n" % arglist
for name in self.all_entries + ['coord']:
src += " self.%s = %s\n" % (name, name)
return src
def _gen_children(self):
src = ' def children(self):\n'
if self.all_entries:
src += ' nodelist = []\n'
for child in self.child:
src += (
' if self.%(child)s is not None:' +
' nodelist.append(("%(child)s", self.%(child)s))\n') % (
dict(child=child))
for seq_child in self.seq_child:
src += (
' for i, child in enumerate(self.%(child)s or []):\n'
' nodelist.append(("%(child)s[%%d]" %% i, child))\n') % (
dict(child=seq_child))
src += ' return tuple(nodelist)\n'
else:
src += ' return ()\n'
return src
def _gen_attr_names(self):
src = " attr_names = (" + ''.join("%r," % nm for nm in self.attr) + ')'
return src
_PROLOGUE_COMMENT = \
r'''#-----------------------------------------------------------------
# ** ATTENTION **
# This code was automatically generated from the file:
# $cfg_filename
#
# Do not modify it directly. Modify the configuration file and
# run the generator again.
# ** ** *** ** **
#
# pycparser: c_ast.py
#
# AST Node classes.
#
# Copyright (C) 2008-2013, Eli Bendersky
# License: BSD
#-----------------------------------------------------------------
'''
_PROLOGUE_CODE = r'''
import sys
class Node(object):
""" Abstract base class for AST nodes.
"""
def children(self):
""" A sequence of all children that are Nodes
"""
pass
def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None):
""" Pretty print the Node and all its attributes and
children (recursively) to a buffer.
buf:
Open IO buffer into which the Node is printed.
offset:
Initial offset (amount of leading spaces)
attrnames:
True if you want to see the attribute names in
name=value pairs. False to only see the values.
nodenames:
True if you want to see the actual node names
within their parents.
showcoord:
Do you want the coordinates of each Node to be
displayed.
"""
lead = ' ' * offset
if nodenames and _my_node_name is not None:
buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ')
else:
buf.write(lead + self.__class__.__name__+ ': ')
if self.attr_names:
if attrnames:
nvlist = [(n, getattr(self,n)) for n in self.attr_names]
attrstr = ', '.join('%s=%s' % nv for nv in nvlist)
else:
vlist = [getattr(self, n) for n in self.attr_names]
attrstr = ', '.join('%s' % v for v in vlist)
buf.write(attrstr)
if showcoord:
buf.write(' (at %s)' % self.coord)
buf.write('\n')
for (child_name, child) in self.children():
child.show(
buf,
offset=offset + 2,
attrnames=attrnames,
nodenames=nodenames,
showcoord=showcoord,
_my_node_name=child_name)
class NodeVisitor(object):
""" A base NodeVisitor class for visiting c_ast nodes.
Subclass it and define your own visit_XXX methods, where
XXX is the class name you want to visit with these
methods.
For example:
class ConstantVisitor(NodeVisitor):
def __init__(self):
self.values = []
def visit_Constant(self, node):
self.values.append(node.value)
Creates a list of values of all the constant nodes
encountered below the given node. To use it:
cv = ConstantVisitor()
cv.visit(node)
Notes:
* generic_visit() will be called for AST nodes for which
no visit_XXX method was defined.
* The children of nodes for which a visit_XXX was
defined will not be visited - if you need this, call
generic_visit() on the node.
You can use:
NodeVisitor.generic_visit(self, node)
* Modeled after Python's own AST visiting facilities
(the ast module of Python 3.0)
"""
def visit(self, node):
""" Visit a node.
"""
method = 'visit_' + node.__class__.__name__
visitor = getattr(self, method, self.generic_visit)
return visitor(node)
def generic_visit(self, node):
""" Called if no explicit visitor function exists for a
node. Implements preorder visiting of the node.
"""
for c_name, c in node.children():
self.visit(c)
'''
if __name__ == "__main__":
import sys
ast_gen = ASTCodeGenerator('_c_ast.cfg')
ast_gen.generate(open('c_ast.py', 'w'))
| apache-2.0 |
jholkeboer/tau-graphical-crawler | lib/werkzeug/contrib/fixers.py | 259 | 10183 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.fixers
~~~~~~~~~~~~~~~~~~~~~~~
.. versionadded:: 0.5
This module includes various helpers that fix bugs in web servers. They may
be necessary for some versions of a buggy web server but not others. We try
to stay updated with the status of the bugs as good as possible but you have
to make sure whether they fix the problem you encounter.
If you notice bugs in webservers not fixed in this module consider
contributing a patch.
:copyright: Copyright 2009 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib import unquote
except ImportError:
from urllib.parse import unquote
from werkzeug.http import parse_options_header, parse_cache_control_header, \
parse_set_header
from werkzeug.useragents import UserAgent
from werkzeug.datastructures import Headers, ResponseCacheControl
class CGIRootFix(object):
"""Wrap the application in this middleware if you are using FastCGI or CGI
and you have problems with your app root being set to the cgi script's path
instead of the path users are going to visit
.. versionchanged:: 0.9
Added `app_root` parameter and renamed from `LighttpdCGIRootFix`.
:param app: the WSGI application
:param app_root: Defaulting to ``'/'``, you can set this to something else
if your app is mounted somewhere else.
"""
def __init__(self, app, app_root='/'):
self.app = app
self.app_root = app_root
def __call__(self, environ, start_response):
# only set PATH_INFO for older versions of Lighty or if no
# server software is provided. That's because the test was
# added in newer Werkzeug versions and we don't want to break
# people's code if they are using this fixer in a test that
# does not set the SERVER_SOFTWARE key.
if 'SERVER_SOFTWARE' not in environ or \
environ['SERVER_SOFTWARE'] < 'lighttpd/1.4.28':
environ['PATH_INFO'] = environ.get('SCRIPT_NAME', '') + \
environ.get('PATH_INFO', '')
environ['SCRIPT_NAME'] = self.app_root.strip('/')
return self.app(environ, start_response)
# backwards compatibility
LighttpdCGIRootFix = CGIRootFix
class PathInfoFromRequestUriFix(object):
"""On windows environment variables are limited to the system charset
which makes it impossible to store the `PATH_INFO` variable in the
environment without loss of information on some systems.
This is for example a problem for CGI scripts on a Windows Apache.
This fixer works by recreating the `PATH_INFO` from `REQUEST_URI`,
`REQUEST_URL`, or `UNENCODED_URL` (whatever is available). Thus the
fix can only be applied if the webserver supports either of these
variables.
:param app: the WSGI application
"""
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
for key in 'REQUEST_URL', 'REQUEST_URI', 'UNENCODED_URL':
if key not in environ:
continue
request_uri = unquote(environ[key])
script_name = unquote(environ.get('SCRIPT_NAME', ''))
if request_uri.startswith(script_name):
environ['PATH_INFO'] = request_uri[len(script_name):] \
.split('?', 1)[0]
break
return self.app(environ, start_response)
class ProxyFix(object):
"""This middleware can be applied to add HTTP proxy support to an
application that was not designed with HTTP proxies in mind. It
sets `REMOTE_ADDR`, `HTTP_HOST` from `X-Forwarded` headers. While
Werkzeug-based applications already can use
:py:func:`werkzeug.wsgi.get_host` to retrieve the current host even if
behind proxy setups, this middleware can be used for applications which
access the WSGI environment directly.
If you have more than one proxy server in front of your app, set
`num_proxies` accordingly.
Do not use this middleware in non-proxy setups for security reasons.
The original values of `REMOTE_ADDR` and `HTTP_HOST` are stored in
the WSGI environment as `werkzeug.proxy_fix.orig_remote_addr` and
`werkzeug.proxy_fix.orig_http_host`.
:param app: the WSGI application
:param num_proxies: the number of proxy servers in front of the app.
"""
def __init__(self, app, num_proxies=1):
self.app = app
self.num_proxies = num_proxies
def get_remote_addr(self, forwarded_for):
"""Selects the new remote addr from the given list of ips in
X-Forwarded-For. By default it picks the one that the `num_proxies`
proxy server provides. Before 0.9 it would always pick the first.
.. versionadded:: 0.8
"""
if len(forwarded_for) >= self.num_proxies:
return forwarded_for[-1 * self.num_proxies]
def __call__(self, environ, start_response):
getter = environ.get
forwarded_proto = getter('HTTP_X_FORWARDED_PROTO', '')
forwarded_for = getter('HTTP_X_FORWARDED_FOR', '').split(',')
forwarded_host = getter('HTTP_X_FORWARDED_HOST', '')
environ.update({
'werkzeug.proxy_fix.orig_wsgi_url_scheme': getter('wsgi.url_scheme'),
'werkzeug.proxy_fix.orig_remote_addr': getter('REMOTE_ADDR'),
'werkzeug.proxy_fix.orig_http_host': getter('HTTP_HOST')
})
forwarded_for = [x for x in [x.strip() for x in forwarded_for] if x]
remote_addr = self.get_remote_addr(forwarded_for)
if remote_addr is not None:
environ['REMOTE_ADDR'] = remote_addr
if forwarded_host:
environ['HTTP_HOST'] = forwarded_host
if forwarded_proto:
environ['wsgi.url_scheme'] = forwarded_proto
return self.app(environ, start_response)
class HeaderRewriterFix(object):
"""This middleware can remove response headers and add others. This
is for example useful to remove the `Date` header from responses if you
are using a server that adds that header, no matter if it's present or
not or to add `X-Powered-By` headers::
app = HeaderRewriterFix(app, remove_headers=['Date'],
add_headers=[('X-Powered-By', 'WSGI')])
:param app: the WSGI application
:param remove_headers: a sequence of header keys that should be
removed.
:param add_headers: a sequence of ``(key, value)`` tuples that should
be added.
"""
def __init__(self, app, remove_headers=None, add_headers=None):
self.app = app
self.remove_headers = set(x.lower() for x in (remove_headers or ()))
self.add_headers = list(add_headers or ())
def __call__(self, environ, start_response):
def rewriting_start_response(status, headers, exc_info=None):
new_headers = []
for key, value in headers:
if key.lower() not in self.remove_headers:
new_headers.append((key, value))
new_headers += self.add_headers
return start_response(status, new_headers, exc_info)
return self.app(environ, rewriting_start_response)
class InternetExplorerFix(object):
"""This middleware fixes a couple of bugs with Microsoft Internet
Explorer. Currently the following fixes are applied:
- removing of `Vary` headers for unsupported mimetypes which
causes troubles with caching. Can be disabled by passing
``fix_vary=False`` to the constructor.
see: http://support.microsoft.com/kb/824847/en-us
- removes offending headers to work around caching bugs in
Internet Explorer if `Content-Disposition` is set. Can be
disabled by passing ``fix_attach=False`` to the constructor.
If it does not detect affected Internet Explorer versions it won't touch
the request / response.
"""
# This code was inspired by Django fixers for the same bugs. The
# fix_vary and fix_attach fixers were originally implemented in Django
# by Michael Axiak and is available as part of the Django project:
# http://code.djangoproject.com/ticket/4148
def __init__(self, app, fix_vary=True, fix_attach=True):
self.app = app
self.fix_vary = fix_vary
self.fix_attach = fix_attach
def fix_headers(self, environ, headers, status=None):
if self.fix_vary:
header = headers.get('content-type', '')
mimetype, options = parse_options_header(header)
if mimetype not in ('text/html', 'text/plain', 'text/sgml'):
headers.pop('vary', None)
if self.fix_attach and 'content-disposition' in headers:
pragma = parse_set_header(headers.get('pragma', ''))
pragma.discard('no-cache')
header = pragma.to_header()
if not header:
headers.pop('pragma', '')
else:
headers['Pragma'] = header
header = headers.get('cache-control', '')
if header:
cc = parse_cache_control_header(header,
cls=ResponseCacheControl)
cc.no_cache = None
cc.no_store = False
header = cc.to_header()
if not header:
headers.pop('cache-control', '')
else:
headers['Cache-Control'] = header
def run_fixed(self, environ, start_response):
def fixing_start_response(status, headers, exc_info=None):
headers = Headers(headers)
self.fix_headers(environ, headers, status)
return start_response(status, headers.to_wsgi_list(), exc_info)
return self.app(environ, fixing_start_response)
def __call__(self, environ, start_response):
ua = UserAgent(environ)
if ua.browser != 'msie':
return self.app(environ, start_response)
return self.run_fixed(environ, start_response)
| apache-2.0 |
kracwarlock/Lasagne | lasagne/objectives.py | 9 | 9540 | """
Provides some minimal help with building loss expressions for training or
validating a neural network.
Three functions build element- or item-wise loss expressions from network
predictions and targets:
.. autosummary::
:nosignatures:
binary_crossentropy
categorical_crossentropy
squared_error
A convenience function aggregates such losses into a scalar expression
suitable for differentiation:
.. autosummary::
:nosignatures:
aggregate
Note that these functions only serve to write more readable code, but are
completely optional. Essentially, any differentiable scalar Theano expression
can be used as a training objective.
Examples
--------
Assuming you have a simple neural network for 3-way classification:
>>> from lasagne.layers import InputLayer, DenseLayer, get_output
>>> from lasagne.nonlinearities import softmax, rectify
>>> l_in = InputLayer((100, 20))
>>> l_hid = DenseLayer(l_in, num_units=30, nonlinearity=rectify)
>>> l_out = DenseLayer(l_hid, num_units=3, nonlinearity=softmax)
And Theano variables representing your network input and targets:
>>> import theano
>>> data = theano.tensor.matrix('data')
>>> targets = theano.tensor.matrix('targets')
You'd first construct an element-wise loss expression:
>>> from lasagne.objectives import categorical_crossentropy, aggregate
>>> predictions = get_output(l_out, data)
>>> loss = categorical_crossentropy(predictions, targets)
Then aggregate it into a scalar (you could also just call ``mean()`` on it):
>>> loss = aggregate(loss, mode='mean')
Finally, this gives a loss expression you can pass to any of the update
methods in :mod:`lasagne.updates`. For validation of a network, you will
usually want to repeat these steps with deterministic network output, i.e.,
without dropout or any other nondeterministic computation in between:
>>> test_predictions = get_output(l_out, data, deterministic=True)
>>> test_loss = categorical_crossentropy(test_predictions, targets)
>>> test_loss = aggregate(test_loss)
This gives a loss expression good for monitoring validation error.
"""
import theano.tensor.nnet
from lasagne.layers import get_output
__all__ = [
"binary_crossentropy",
"categorical_crossentropy",
"squared_error",
"aggregate",
"mse", "Objective", "MaskedObjective", # deprecated
]
def binary_crossentropy(predictions, targets):
"""Computes the binary cross-entropy between predictions and targets.
.. math:: L = -t \\log(p) - (1 - t) \\log(1 - p)
Parameters
----------
predictions : Theano tensor
Predictions in (0, 1), such as sigmoidal output of a neural network.
targets : Theano tensor
Targets in [0, 1], such as ground truth labels.
Returns
-------
Theano tensor
An expression for the element-wise binary cross-entropy.
Notes
-----
This is the loss function of choice for binary classification problems
and sigmoid output units.
"""
return theano.tensor.nnet.binary_crossentropy(predictions, targets)
def categorical_crossentropy(predictions, targets):
"""Computes the categorical cross-entropy between predictions and targets.
.. math:: L_i = - \\sum_j{t_{i,j} \\log(p_{i,j})}
Parameters
----------
predictions : Theano 2D tensor
Predictions in (0, 1), such as softmax output of a neural network,
with data points in rows and class probabilities in columns.
targets : Theano 2D tensor or 1D tensor
Either targets in [0, 1] matching the layout of `predictions`, or
a vector of int giving the correct class index per data point.
Returns
-------
Theano 1D tensor
An expression for the item-wise categorical cross-entropy.
Notes
-----
This is the loss function of choice for multi-class classification
problems and softmax output units. For hard targets, i.e., targets
that assign all of the probability to a single class per data point,
providing a vector of int for the targets is usually slightly more
efficient than providing a matrix with a single 1.0 per row.
"""
return theano.tensor.nnet.categorical_crossentropy(predictions, targets)
def squared_error(a, b):
"""Computes the element-wise squared difference between two tensors.
.. math:: L = (p - t)^2
Parameters
----------
a, b : Theano tensor
The tensors to compute the squared difference between.
Returns
-------
Theano tensor
An expression for the item-wise squared difference.
Notes
-----
This is the loss function of choice for many regression problems
or auto-encoders with linear output units.
"""
return (a - b)**2
def aggregate(loss, weights=None, mode='mean'):
"""Aggregates an element- or item-wise loss to a scalar loss.
Parameters
----------
loss : Theano tensor
The loss expression to aggregate.
weights : Theano tensor, optional
The weights for each element or item, must be broadcastable to
the same shape as `loss` if given. If omitted, all elements will
be weighted the same.
mode : {'mean', 'sum', 'normalized_sum'}
Whether to aggregate by averaging, by summing or by summing and
dividing by the total weights (which requires `weights` to be given).
Returns
-------
Theano scalar
A scalar loss expression suitable for differentiation.
Notes
-----
By supplying binary weights (i.e., only using values 0 and 1), this
function can also be used for masking out particular entries in the
loss expression. Note that masked entries still need to be valid
values, not-a-numbers (NaNs) will propagate through.
When applied to batch-wise loss expressions, setting `mode` to
``'normalized_sum'`` ensures that the loss per batch is of a similar
magnitude, independent of associated weights. However, it means that
a given datapoint contributes more to the loss when it shares a batch
with low-weighted or masked datapoints than with high-weighted ones.
"""
if weights is not None:
loss = loss * weights
if mode == 'mean':
return loss.mean()
elif mode == 'sum':
return loss.sum()
elif mode == 'normalized_sum':
if weights is None:
raise ValueError("require weights for mode='normalized_sum'")
return loss.sum() / weights.sum()
else:
raise ValueError("mode must be 'mean', 'sum' or 'normalized_sum', "
"got %r" % mode)
def mse(x, t): # pragma no cover
"""Deprecated. Use :func:`squared_error()` instead."""
import warnings
warnings.warn("lasagne.objectives.mse() is deprecated and will be removed "
"for the first release of Lasagne. Use "
"lasagne.objectives.squared_error() instead.", stacklevel=2)
return squared_error(x, t)
class Objective(object): # pragma no cover
"""
Deprecated. See docstring of :mod:`lasagne.objectives` for alternatives.
"""
def __init__(self, input_layer, loss_function=squared_error,
aggregation='mean'):
import warnings
warnings.warn("lasagne.objectives.Objective is deprecated and "
"will be removed for the first release of Lasagne. For "
"alternatives, please see: "
"http://lasagne.readthedocs.org/en/latest/"
"modules/objectives.html", stacklevel=2)
import theano.tensor as T
self.input_layer = input_layer
self.loss_function = loss_function
self.target_var = T.matrix("target")
self.aggregation = aggregation
def get_loss(self, input=None, target=None, aggregation=None, **kwargs):
from lasagne.layers import get_output
network_output = get_output(self.input_layer, input, **kwargs)
if target is None:
target = self.target_var
if aggregation is None:
aggregation = self.aggregation
losses = self.loss_function(network_output, target)
return aggregate(losses, mode=aggregation)
class MaskedObjective(object): # pragma no cover
"""
Deprecated. See docstring of :mod:`lasagne.objectives` for alternatives.
"""
def __init__(self, input_layer, loss_function=mse, aggregation='mean'):
import warnings
warnings.warn("lasagne.objectives.MaskedObjective is deprecated and "
"will be removed for the first release of Lasagne. For "
"alternatives, please see: "
"http://lasagne.readthedocs.org/en/latest/"
"modules/objectives.html", stacklevel=2)
import theano.tensor as T
self.input_layer = input_layer
self.loss_function = loss_function
self.target_var = T.matrix("target")
self.mask_var = T.matrix("mask")
self.aggregation = aggregation
def get_loss(self, input=None, target=None, mask=None,
aggregation=None, **kwargs):
from lasagne.layers import get_output
network_output = get_output(self.input_layer, input, **kwargs)
if target is None:
target = self.target_var
if mask is None:
mask = self.mask_var
if aggregation is None:
aggregation = self.aggregation
losses = self.loss_function(network_output, target)
return aggregate(losses, mask, mode=aggregation)
| mit |
semplea/characters-meta | python/alchemy/examples/watson_developer_cloud/language_translation_v2.py | 2 | 3350 | # Copyright 2015 IBM All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The v1 Language Translation service
(https://http://www.ibm.com/smarterplanet/us/en/ibmwatson/developercloud/language-translation.html)
"""
from .watson_developer_cloud_service import WatsonDeveloperCloudService
from .watson_developer_cloud_service import WatsonInvalidArgument
class LanguageTranslationV2(WatsonDeveloperCloudService):
default_url = "https://gateway.watsonplatform.net/language-translation/api"
def __init__(self, url=default_url, **kwargs):
WatsonDeveloperCloudService.__init__(
self, 'language_translation', url, **kwargs)
def identify(self, text):
"""
Identifies the language of given source text
"""
return self.request(method='POST', url='/v2/identify', data=text, headers={'content-type': 'text/plain'},
accept_json=True)
def get_identifiable_languages(self):
return self.request(method='GET', url='/v2/identifiable_languages', accept_json=True)
def get_models(self, default=None, source=None, target=None):
"""
Get the available models for translation
"""
params = {'default': default, 'source': source, 'target': target}
return self.request(method='GET', url='/v2/models', params=params, accept_json=True)
def create_model(self, base_model_id, name=None, forced_glossary=None, parallel_corpus=None,
monolingual_corpus=None):
if forced_glossary is None and parallel_corpus is None and monolingual_corpus is None:
raise WatsonInvalidArgument('A glossary or corpus must be provided')
params = {'name': name,
'base_model_id': base_model_id}
files = {'forced_glossary': forced_glossary,
'parallel_corpus': parallel_corpus,
'monolingual_corpus': monolingual_corpus}
return self.request(method='POST', url='/v2/models', params=params, files=files, accept_json=True)
def get_model(self, model_id):
return self.request(method='GET', url='/v2/models/{0}'.format(model_id), accept_json=True)
def delete_model(self, model_id):
return self.request(method='DELETE', url='/v2/models/{0}'.format(model_id), accept_json=True)
def translate(self, text, source=None, target=None, model=None):
"""
Translates text from a source language to a target language
"""
if model is None and (source is None or target is None):
raise WatsonInvalidArgument('Either model or source and target must be specified')
data = {'text': text, 'source': source, 'target': target, 'model': model}
# data=data or json=data
return self.request(method='POST', url='/v2/translate', json=data).text
| mit |
jn7163/django | django/contrib/gis/utils/wkt.py | 589 | 1923 | """
Utilities for manipulating Geometry WKT.
"""
from django.utils import six
def precision_wkt(geom, prec):
"""
Returns WKT text of the geometry according to the given precision (an
integer or a string). If the precision is an integer, then the decimal
places of coordinates WKT will be truncated to that number:
>>> from django.contrib.gis.geos import Point
>>> pnt = Point(5, 23)
>>> pnt.wkt
'POINT (5.0000000000000000 23.0000000000000000)'
>>> precision_wkt(pnt, 1)
'POINT (5.0 23.0)'
If the precision is a string, it must be valid Python format string
(e.g., '%20.7f') -- thus, you should know what you're doing.
"""
if isinstance(prec, int):
num_fmt = '%%.%df' % prec
elif isinstance(prec, six.string_types):
num_fmt = prec
else:
raise TypeError
# TODO: Support 3D geometries.
coord_fmt = ' '.join([num_fmt, num_fmt])
def formatted_coords(coords):
return ','.join(coord_fmt % c[:2] for c in coords)
def formatted_poly(poly):
return ','.join('(%s)' % formatted_coords(r) for r in poly)
def formatted_geom(g):
gtype = str(g.geom_type).upper()
yield '%s(' % gtype
if gtype == 'POINT':
yield formatted_coords((g.coords,))
elif gtype in ('LINESTRING', 'LINEARRING'):
yield formatted_coords(g.coords)
elif gtype in ('POLYGON', 'MULTILINESTRING'):
yield formatted_poly(g)
elif gtype == 'MULTIPOINT':
yield formatted_coords(g.coords)
elif gtype == 'MULTIPOLYGON':
yield ','.join('(%s)' % formatted_poly(p) for p in g)
elif gtype == 'GEOMETRYCOLLECTION':
yield ','.join(''.join(wkt for wkt in formatted_geom(child)) for child in g)
else:
raise TypeError
yield ')'
return ''.join(wkt for wkt in formatted_geom(geom))
| bsd-3-clause |
jatinmistry13/pattern | pattern/text/de/inflect.py | 21 | 29115 | #### PATTERN | DE | INFLECT ########################################################################
# -*- coding: utf-8 -*-
# Copyright (c) 2012 University of Antwerp, Belgium
# Author: Tom De Smedt <[email protected]>
# License: BSD (see LICENSE.txt for details).
####################################################################################################
# Regular expressions-based rules for German word inflection:
# - pluralization and singularization of nouns and adjectives,
# - conjugation of verbs,
# - attributive and predicative of adjectives,
# - comparative and superlative of adjectives.
# Accuracy (measured on CELEX German morphology word forms):
# 75% for gender()
# 72% for pluralize()
# 84% for singularize() (for nominative)
# 87% for Verbs.find_lemma()
# 87% for Verbs.find_lexeme()
# 98% for predicative
import os
import sys
import re
try:
MODULE = os.path.dirname(os.path.realpath(__file__))
except:
MODULE = ""
sys.path.insert(0, os.path.join(MODULE, "..", "..", "..", ".."))
from pattern.text import Verbs as _Verbs
from pattern.text import (
INFINITIVE, PRESENT, PAST, FUTURE,
FIRST, SECOND, THIRD,
SINGULAR, PLURAL, SG, PL,
INDICATIVE, IMPERATIVE, SUBJUNCTIVE,
PROGRESSIVE,
PARTICIPLE, GERUND
)
sys.path.pop(0)
VERB, NOUN, ADJECTIVE, ADVERB = "VB", "NN", "JJ", "RB"
VOWELS = "aeiouy"
re_vowel = re.compile(r"a|e|i|o|u|y", re.I)
is_vowel = lambda ch: ch in VOWELS
#### ARTICLE #######################################################################################
# German inflection of depends on gender, role and number + the determiner (if any).
# Inflection gender.
# Masculine is the most common, so it is the default for all functions.
MASCULINE, FEMININE, NEUTER, PLURAL = \
MALE, FEMALE, NEUTRAL, PLURAL = \
M, F, N, PL = "m", "f", "n", "p"
# Inflection role.
# - nom = subject, "Der Hund bellt" (the dog barks).
# - acc = object, "Das Mädchen küsst den Hund" (the girl kisses the dog).
# - dat = object (indirect), "Der Mann gibt einen Knochen zum Hund" (the man gives the dog a bone).
# - gen = property, "die Knochen des Hundes" (the dog's bone).
NOMINATIVE, ACCUSATIVE, DATIVE, GENITIVE = SUBJECT, OBJECT, INDIRECT, PROPERTY = \
"nominative", "accusative", "dative", "genitive"
article_definite = {
("m", "nom"): "der", ("f", "nom"): "die", ("n", "nom"): "das", ("p", "nom"): "die",
("m", "acc"): "den", ("f", "acc"): "die", ("n", "acc"): "das", ("p", "acc"): "die",
("m", "dat"): "dem", ("f", "dat"): "der", ("n", "dat"): "dem", ("p", "dat"): "den",
("m", "gen"): "des", ("f", "gen"): "der", ("n", "gen"): "des", ("p", "gen"): "der",
}
article_indefinite = {
("m", "nom"): "ein" , ("f", "nom"): "eine" , ("n", "nom"): "ein" , ("p", "nom"): "eine",
("m", "acc"): "einen", ("f", "acc"): "eine" , ("n", "acc"): "ein" , ("p", "acc"): "eine",
("m", "dat"): "einem", ("f", "dat"): "einer", ("n", "dat"): "einem", ("p", "dat"): "einen",
("m", "gen"): "eines", ("f", "gen"): "einer", ("n", "gen"): "eines", ("p", "gen"): "einer",
}
def definite_article(word, gender=MALE, role=SUBJECT):
""" Returns the definite article (der/die/das/die) for a given word.
"""
return article_definite.get((gender[:1].lower(), role[:3].lower()))
def indefinite_article(word, gender=MALE, role=SUBJECT):
""" Returns the indefinite article (ein) for a given word.
"""
return article_indefinite.get((gender[:1].lower(), role[:3].lower()))
DEFINITE = "definite"
INDEFINITE = "indefinite"
def article(word, function=INDEFINITE, gender=MALE, role=SUBJECT):
""" Returns the indefinite (ein) or definite (der/die/das/die) article for the given word.
"""
return function == DEFINITE \
and definite_article(word, gender, role) \
or indefinite_article(word, gender, role)
_article = article
def referenced(word, article=INDEFINITE, gender=MALE, role=SUBJECT):
""" Returns a string with the article + the word.
"""
return "%s %s" % (_article(word, article, gender, role), word)
#### GENDER #########################################################################################
gender_masculine = (
"ant", "ast", "ich", "ig", "ismus", "ling", "or", "us"
)
gender_feminine = (
"a", "anz", "ei", "enz", "heit", "ie", "ik", "in", "keit", "schaf", "sion", "sis",
u"tät", "tion", "ung", "ur"
)
gender_neuter = (
"chen", "icht", "il", "it", "lein", "ma", "ment", "tel", "tum", "um","al", "an", "ar",
u"ät", "ent", "ett", "ier", "iv", "o", "on", "nis", "sal"
)
gender_majority_vote = {
MASCULINE: (
"ab", "af", "ag", "ak", "am", "an", "ar", "at", "au", "ch", "ck", "eb", "ef", "eg",
"el", "er", "es", "ex", "ff", "go", "hn", "hs", "ib", "if", "ig", "ir", "kt", "lf",
"li", "ll", "lm", "ls", "lt", "mi", "nd", "nk", "nn", "nt", "od", "of", "og", "or",
"pf", "ph", "pp", "ps", "rb", "rd", "rf", "rg", "ri", "rl", "rm", "rr", "rs", "rt",
"rz", "ss", "st", "tz", "ub", "uf", "ug", "uh", "un", "us", "ut", "xt", "zt"
),
FEMININE: (
"be", "ce", "da", "de", "dt", "ee", "ei", "et", "eu", "fe", "ft", "ge", "he", "hr",
"ht", "ia", "ie", "ik", "in", "it", "iz", "ka", "ke", "la", "le", "me", "na", "ne",
"ng", "nz", "on", "pe", "ra", "re", "se", "ta", "te", "ue", "ur", "ve", "ze"
),
NEUTER: (
"ad", "al", "as", "do", "ed", "eh", "em", "en", "hl", "id", "il", "im", "io", "is",
"iv", "ix", "ld", "lk", "lo", "lz", "ma", "md", "mm", "mt", "no", "ns", "ol", "om",
"op", "os", "ot", "pt", "rk", "rn", "ro", "to", "tt", "ul", "um", "uz"
)
}
def gender(word, pos=NOUN):
""" Returns the gender (MALE, FEMALE or NEUTRAL) for nouns (majority vote).
Returns None for words that are not nouns.
"""
w = word.lower()
if pos == NOUN:
# Default rules (baseline = 32%).
if w.endswith(gender_masculine):
return MASCULINE
if w.endswith(gender_feminine):
return FEMININE
if w.endswith(gender_neuter):
return NEUTER
# Majority vote.
for g in gender_majority_vote:
if w.endswith(gender_majority_vote[g]):
return g
#### PLURALIZE ######################################################################################
plural_inflections = [
("aal", u"äle" ), ("aat", "aaten"), ( "abe", "aben" ), ("ach", u"ächer"), ("ade", "aden" ),
("age", "agen" ), ("ahn", "ahnen"), ( "ahr", "ahre" ), ("akt", "akte" ), ("ale", "alen" ),
("ame", "amen" ), ("amt", u"ämter"), ( "ane", "anen" ), ("ang", u"änge" ), ("ank", u"änke" ),
("ann", u"änner" ), ("ant", "anten"), ( "aph", "aphen"), ("are", "aren" ), ("arn", "arne" ),
("ase", "asen" ), ("ate", "aten" ), ( "att", u"ätter"), ("atz", u"ätze" ), ("aum", "äume" ),
("aus", u"äuser" ), ("bad", u"bäder"), ( "bel", "bel" ), ("ben", "ben" ), ("ber", "ber" ),
("bot", "bote" ), ("che", "chen" ), ( "chs", "chse" ), ("cke", "cken" ), ("del", "del" ),
("den", "den" ), ("der", "der" ), ( "ebe", "ebe" ), ("ede", "eden" ), ("ehl", "ehle" ),
("ehr", "ehr" ), ("eil", "eile" ), ( "eim", "eime" ), ("eis", "eise" ), ("eit", "eit" ),
("ekt", "ekte" ), ("eld", "elder"), ( "ell", "elle" ), ("ene", "enen" ), ("enz", "enzen" ),
("erd", "erde" ), ("ere", "eren" ), ( "erk", "erke" ), ("ern", "erne" ), ("ert", "erte" ),
("ese", "esen" ), ("ess", "esse" ), ( "est", "este" ), ("etz", "etze" ), ("eug", "euge" ),
("eur", "eure" ), ("fel", "fel" ), ( "fen", "fen" ), ("fer", "fer" ), ("ffe", "ffen" ),
("gel", "gel" ), ("gen", "gen" ), ( "ger", "ger" ), ("gie", "gie" ), ("hen", "hen" ),
("her", "her" ), ("hie", "hien" ), ( "hle", "hlen" ), ("hme", "hmen" ), ("hne", "hnen" ),
("hof", u"höfe" ), ("hre", "hren" ), ( "hrt", "hrten"), ("hse", "hsen" ), ("hte", "hten" ),
("ich", "iche" ), ("ick", "icke" ), ( "ide", "iden" ), ("ieb", "iebe" ), ("ief", "iefe" ),
("ieg", "iege" ), ("iel", "iele" ), ( "ien", "ium" ), ("iet", "iete" ), ("ife", "ifen" ),
("iff", "iffe" ), ("ift", "iften"), ( "ige", "igen" ), ("ika", "ikum" ), ("ild", "ilder" ),
("ilm", "ilme" ), ("ine", "inen" ), ( "ing", "inge" ), ("ion", "ionen"), ("ise", "isen" ),
("iss", "isse" ), ("ist", "isten"), ( "ite", "iten" ), ("itt", "itte" ), ("itz", "itze" ),
("ium", "ium" ), ("kel", "kel" ), ( "ken", "ken" ), ("ker", "ker" ), ("lag", u"läge" ),
("lan", u"läne" ), ("lar", "lare" ), ( "lei", "leien"), ("len", "len" ), ("ler", "ler" ),
("lge", "lgen" ), ("lie", "lien" ), ( "lle", "llen" ), ("mel", "mel" ), ("mer", "mer" ),
("mme", "mmen" ), ("mpe", "mpen" ), ( "mpf", "mpfe" ), ("mus", "mus" ), ("mut", "mut" ),
("nat", "nate" ), ("nde", "nden" ), ( "nen", "nen" ), ("ner", "ner" ), ("nge", "ngen" ),
("nie", "nien" ), ("nis", "nisse"), ( "nke", "nken" ), ("nkt", "nkte" ), ("nne", "nnen" ),
("nst", "nste" ), ("nte", "nten" ), ( "nze", "nzen" ), ("ock", u"öcke" ), ("ode", "oden" ),
("off", "offe" ), ("oge", "ogen" ), ( "ohn", u"öhne" ), ("ohr", "ohre" ), ("olz", u"ölzer" ),
("one", "onen" ), ("oot", "oote" ), ( "opf", u"öpfe" ), ("ord", "orde" ), ("orm", "ormen" ),
("orn", u"örner" ), ("ose", "osen" ), ( "ote", "oten" ), ("pel", "pel" ), ("pen", "pen" ),
("per", "per" ), ("pie", "pien" ), ( "ppe", "ppen" ), ("rag", u"räge" ), ("rau", u"raün" ),
("rbe", "rben" ), ("rde", "rden" ), ( "rei", "reien"), ("rer", "rer" ), ("rie", "rien" ),
("rin", "rinnen"), ("rke", "rken" ), ( "rot", "rote" ), ("rre", "rren" ), ("rte", "rten" ),
("ruf", "rufe" ), ("rzt", "rzte" ), ( "sel", "sel" ), ("sen", "sen" ), ("ser", "ser" ),
("sie", "sien" ), ("sik", "sik" ), ( "sse", "ssen" ), ("ste", "sten" ), ("tag", "tage" ),
("tel", "tel" ), ("ten", "ten" ), ( "ter", "ter" ), ("tie", "tien" ), ("tin", "tinnen"),
("tiv", "tive" ), ("tor", "toren"), ( "tte", "tten" ), ("tum", "tum" ), ("tur", "turen" ),
("tze", "tzen" ), ("ube", "uben" ), ( "ude", "uden" ), ("ufe", "ufen" ), ("uge", "ugen" ),
("uhr", "uhren" ), ("ule", "ulen" ), ( "ume", "umen" ), ("ung", "ungen"), ("use", "usen" ),
("uss", u"üsse" ), ("ute", "uten" ), ( "utz", "utz" ), ("ver", "ver" ), ("weg", "wege" ),
("zer", "zer" ), ("zug", u"züge" ), (u"ück", u"ücke" )
]
def pluralize(word, pos=NOUN, gender=MALE, role=SUBJECT, custom={}):
""" Returns the plural of a given word.
The inflection is based on probability rather than gender and role.
"""
w = word.lower().capitalize()
if word in custom:
return custom[word]
if pos == NOUN:
for a, b in plural_inflections:
if w.endswith(a):
return w[:-len(a)] + b
# Default rules (baseline = 69%).
if w.startswith("ge"):
return w
if w.endswith("gie"):
return w
if w.endswith("e"):
return w + "n"
if w.endswith("ien"):
return w[:-2] + "um"
if w.endswith(("au", "ein", "eit", "er", "en", "el", "chen", "mus", u"tät", "tik", "tum", "u")):
return w
if w.endswith(("ant", "ei", "enz", "ion", "ist", "or", "schaft", "tur", "ung")):
return w + "en"
if w.endswith("in"):
return w + "nen"
if w.endswith("nis"):
return w + "se"
if w.endswith(("eld", "ild", "ind")):
return w + "er"
if w.endswith("o"):
return w + "s"
if w.endswith("a"):
return w[:-1] + "en"
# Inflect common umlaut vowels: Kopf => Köpfe.
if w.endswith(("all", "and", "ang", "ank", "atz", "auf", "ock", "opf", "uch", "uss")):
umlaut = w[-3]
umlaut = umlaut.replace("a", u"ä")
umlaut = umlaut.replace("o", u"ö")
umlaut = umlaut.replace("u", u"ü")
return w[:-3] + umlaut + w[-2:] + "e"
for a, b in (
("ag", u"äge"),
("ann", u"änner"),
("aum", u"äume"),
("aus", u"äuser"),
("zug", u"züge")):
if w.endswith(a):
return w[:-len(a)] + b
return w + "e"
return w
#### SINGULARIZE ###################################################################################
singular_inflections = [
( "innen", "in" ), (u"täten", u"tät"), ( "ahnen", "ahn"), ( "enten", "ent"), (u"räser", "ras"),
( "hrten", "hrt"), (u"ücher", "uch"), (u"örner", "orn"), (u"änder", "and"), (u"ürmer", "urm"),
( "ahlen", "ahl"), ( "uhren", "uhr"), (u"ätter", "att"), ( "suren", "sur"), ( "chten", "cht"),
( "kuren", "kur"), ( "erzen", "erz"), (u"güter", "gut"), ( "soren", "sor"), (u"änner", "ann"),
(u"äuser", "aus"), ( "taten", "tat"), ( "isten", "ist"), (u"bäder", "bad"), (u"ämter", "amt"),
( "eiten", "eit"), ( "raten", "rat"), ( "ormen", "orm"), ( "ionen", "ion"), ( "nisse", "nis"),
(u"ölzer", "olz"), ( "ungen", "ung"), (u"läser", "las"), (u"ächer", "ach"), ( "urten", "urt"),
( "enzen", "enz"), ( "aaten", "aat"), ( "aphen", "aph"), (u"öcher", "och"), (u"türen", u"tür"),
( "sonen", "son"), (u"ühren", u"ühr"), (u"ühner", "uhn"), ( "toren", "tor"), (u"örter", "ort"),
( "anten", "ant"), (u"räder", "rad"), ( "turen", "tur"), (u"äuler", "aul"), ( u"änze", "anz"),
( "tten", "tte"), ( "mben", "mbe"), ( u"ädte", "adt"), ( "llen", "lle"), ( "ysen", "yse"),
( "rben", "rbe"), ( "hsen", "hse"), ( u"raün", "rau"), ( "rven", "rve"), ( "rken", "rke"),
( u"ünge", "ung"), ( u"üten", u"üte"), ( "usen", "use"), ( "tien", "tie"), ( u"läne", "lan"),
( "iben", "ibe"), ( "ifen", "ife"), ( "ssen", "sse"), ( "gien", "gie"), ( "eten", "ete"),
( "rden", "rde"), ( u"öhne", "ohn"), ( u"ärte", "art"), ( "ncen", "nce"), ( u"ünde", "und"),
( "uben", "ube"), ( "lben", "lbe"), ( u"üsse", "uss"), ( "agen", "age"), ( u"räge", "rag"),
( "ogen", "oge"), ( "anen", "ane"), ( "sken", "ske"), ( "eden", "ede"), ( u"össe", "oss"),
( u"ürme", "urm"), ( "ggen", "gge"), ( u"üren", u"üre"), ( "nten", "nte"), ( u"ühle", u"ühl"),
( u"änge", "ang"), ( "mmen", "mme"), ( "igen", "ige"), ( "nken", "nke"), ( u"äcke", "ack"),
( "oden", "ode"), ( "oben", "obe"), ( u"ähne", "ahn"), ( u"änke", "ank"), ( "inen", "ine"),
( "seen", "see"), ( u"äfte", "aft"), ( "ulen", "ule"), ( u"äste", "ast"), ( "hren", "hre"),
( u"öcke", "ock"), ( "aben", "abe"), ( u"öpfe", "opf"), ( "ugen", "uge"), ( "lien", "lie"),
( u"ände", "and"), ( u"ücke", u"ück"), ( "asen", "ase"), ( "aden", "ade"), ( "dien", "die"),
( "aren", "are"), ( "tzen", "tze"), ( u"züge", "zug"), ( u"üfte", "uft"), ( "hien", "hie"),
( "nden", "nde"), ( u"älle", "all"), ( "hmen", "hme"), ( "ffen", "ffe"), ( "rmen", "rma"),
( "olen", "ole"), ( "sten", "ste"), ( "amen", "ame"), ( u"höfe", "hof"), ( u"üste", "ust"),
( "hnen", "hne"), ( u"ähte", "aht"), ( "umen", "ume"), ( "nnen", "nne"), ( "alen", "ale"),
( "mpen", "mpe"), ( "mien", "mie"), ( "rten", "rte"), ( "rien", "rie"), ( u"äute", "aut"),
( "uden", "ude"), ( "lgen", "lge"), ( "ngen", "nge"), ( "iden", "ide"), ( u"ässe", "ass"),
( "osen", "ose"), ( "lken", "lke"), ( "eren", "ere"), ( u"üche", "uch"), ( u"lüge", "lug"),
( "hlen", "hle"), ( "isen", "ise"), ( u"ären", u"äre"), ( u"töne", "ton"), ( "onen", "one"),
( "rnen", "rne"), ( u"üsen", u"üse"), ( u"haün", "hau"), ( "pien", "pie"), ( "ihen", "ihe"),
( u"ürfe", "urf"), ( "esen", "ese"), ( u"ätze", "atz"), ( "sien", "sie"), ( u"läge", "lag"),
( "iven", "ive"), ( u"ämme", "amm"), ( u"äufe", "auf"), ( "ppen", "ppe"), ( "enen", "ene"),
( "lfen", "lfe"), ( u"äume", "aum"), ( "nien", "nie"), ( "unen", "une"), ( "cken", "cke"),
( "oten", "ote"), ( "mie", "mie"), ( "rie", "rie"), ( "sis", "sen"), ( "rin", "rin"),
( "ein", "ein"), ( "age", "age"), ( "ern", "ern"), ( "ber", "ber"), ( "ion", "ion"),
( "inn", "inn"), ( "ben", "ben"), ( u"äse", u"äse"), ( "eis", "eis"), ( "hme", "hme"),
( "iss", "iss"), ( "hen", "hen"), ( "fer", "fer"), ( "gie", "gie"), ( "fen", "fen"),
( "her", "her"), ( "ker", "ker"), ( "nie", "nie"), ( "mer", "mer"), ( "ler", "ler"),
( "men", "men"), ( "ass", "ass"), ( "ner", "ner"), ( "per", "per"), ( "rer", "rer"),
( "mus", "mus"), ( "abe", "abe"), ( "ter", "ter"), ( "ser", "ser"), ( u"äle", "aal"),
( "hie", "hie"), ( "ger", "ger"), ( "tus", "tus"), ( "gen", "gen"), ( "ier", "ier"),
( "ver", "ver"), ( "zer", "zer"),
]
singular = {
u"Löwen": u"Löwe",
}
def singularize(word, pos=NOUN, gender=MALE, role=SUBJECT, custom={}):
""" Returns the singular of a given word.
The inflection is based on probability rather than gender and role.
"""
w = word.lower().capitalize()
if word in custom:
return custom[word]
if word in singular:
return singular[word]
if pos == NOUN:
for a, b in singular_inflections:
if w.endswith(a):
return w[:-len(a)] + b
# Default rule: strip known plural suffixes (baseline = 51%).
for suffix in ("nen", "en", "n", "e", "er", "s"):
if w.endswith(suffix):
w = w[:-len(suffix)]
break
# Corrections (these add about 1% accuracy):
if w.endswith(("rr", "rv", "nz")):
return w + "e"
return w
return w
#### VERB CONJUGATION ##############################################################################
# The verb table was trained on CELEX and contains the top 2000 most frequent verbs.
prefix_inseparable = (
"be", "emp", "ent", "er", "ge", "miss", u"über", "unter", "ver", "voll", "wider", "zer"
)
prefix_separable = (
"ab", "an", "auf", "aus", "bei", "durch", "ein", "fort", "mit", "nach", "vor", "weg",
u"zurück", "zusammen", "zu", "dabei", "daran", "da", "empor", "entgegen", "entlang",
"fehl", "fest", u"gegenüber", "gleich", "herab", "heran", "herauf", "heraus", "herum",
"her", "hinweg", "hinzu", "hin", "los", "nieder", "statt", "umher", "um", "weg",
"weiter", "wieder", "zwischen"
) + ( # There are many more...
"dort", "fertig", "frei", "gut", "heim", "hoch", "klein", "klar", "nahe", "offen", "richtig"
)
prefixes = prefix_inseparable + prefix_separable
def encode_sz(s):
return s.replace(u"ß", "ss")
def decode_sz(s):
return s.replace("ss", u"ß")
class Verbs(_Verbs):
def __init__(self):
_Verbs.__init__(self, os.path.join(MODULE, "de-verbs.txt"),
language = "de",
format = [0, 1, 2, 3, 4, 5, 8, 17, 18, 19, 20, 21, 24, 52, 54, 53, 55, 56, 58, 59, 67, 68, 70, 71],
default = {6: 4, 22: 20, 57: 55, 60: 58, 69: 67, 72: 70}
)
def find_lemma(self, verb):
""" Returns the base form of the given inflected verb, using a rule-based approach.
"""
v = verb.lower()
# Common prefixes: be-finden and emp-finden probably inflect like finden.
if not (v.startswith("ge") and v.endswith("t")): # Probably gerund.
for prefix in prefixes:
if v.startswith(prefix) and v[len(prefix):] in self.inflections:
return prefix + self.inflections[v[len(prefix):]]
# Common sufixes: setze nieder => niedersetzen.
b, suffix = " " in v and v.split()[:2] or (v, "")
# Infinitive -ln: trommeln.
if b.endswith(("ln", "rn")):
return b
# Lemmatize regular inflections.
for x in ("test", "est", "end", "ten", "tet", "en", "et", "te", "st", "e", "t"):
if b.endswith(x): b = b[:-len(x)]; break
# Subjunctive: hielte => halten, schnitte => schneiden.
for x, y in (
("ieb", "eib"), ( "ied", "eid"), ( "ief", "auf" ), ( "ieg", "eig" ), ("iel", "alt"),
("ien", "ein"), ("iess", "ass"), (u"ieß", u"aß" ), ( "iff", "eif" ), ("iss", "eiss"),
(u"iß", u"eiß"), ( "it", "eid"), ( "oss", "iess"), (u"öss", "iess")):
if b.endswith(x): b = b[:-len(x)] + y; break
b = b.replace("eeiss", "eiss")
b = b.replace("eeid", "eit")
# Subjunctive: wechselte => wechseln
if not b.endswith(("e", "l")) and not (b.endswith("er") and len(b) >= 3 and not b[-3] in VOWELS):
b = b + "e"
# abknallst != abknalln => abknallen
if b.endswith(("hl", "ll", "ul", "eil")):
b = b + "e"
# Strip ge- from (likely) gerund:
if b.startswith("ge") and v.endswith("t"):
b = b[2:]
# Corrections (these add about 1.5% accuracy):
if b.endswith(("lnde", "rnde")):
b = b[:-3]
if b.endswith(("ae", "al", u"öe", u"üe")):
b = b.rstrip("e") + "te"
if b.endswith(u"äl"):
b = b + "e"
return suffix + b + "n"
def find_lexeme(self, verb):
""" For a regular verb (base form), returns the forms using a rule-based approach.
"""
v = verb.lower()
# Stem = infinitive minus -en, -ln, -rn.
b = b0 = re.sub("en$", "", re.sub("ln$", "l", re.sub("rn$", "r", v)))
# Split common prefixes.
x, x1, x2 = "", "", ""
for prefix in prefix_separable:
if v.startswith(prefix):
b, x = b[len(prefix):], prefix
x1 = (" " + x).rstrip()
x2 = x + "ge"
break
# Present tense 1sg and subjunctive -el: handeln => ich handle, du handlest.
pl = b.endswith("el") and b[:-2]+"l" or b
# Present tense 1pl -el: handeln => wir handeln
pw = v.endswith(("ln", "rn")) and v or b+"en"
# Present tense ending in -d or -t gets -e:
pr = b.endswith(("d", "t")) and b+"e" or b
# Present tense 2sg gets -st, unless stem ends with -s or -z.
p2 = pr.endswith(("s","z")) and pr+"t" or pr+"st"
# Present participle: spiel + -end, arbeiten + -d:
pp = v.endswith(("en", "ln", "rn")) and v+"d" or v+"end"
# Past tense regular:
pt = encode_sz(pr) + "t"
# Past participle: haushalten => hausgehalten
ge = (v.startswith(prefix_inseparable) or b.endswith(("r","t"))) and pt or "ge"+pt
ge = x and x+"ge"+pt or ge
# Present subjunctive: stem + -e, -est, -en, -et:
s1 = encode_sz(pl)
# Past subjunctive: past (usually with Umlaut) + -e, -est, -en, -et:
s2 = encode_sz(pt)
# Construct the lexeme:
lexeme = a = [
v,
pl+"e"+x1, p2+x1, pr+"t"+x1, pw+x1, pr+"t"+x1, pp, # present
pt+"e"+x1, pt+"est"+x1, pt+"e"+x1, pt+"en"+x1, pt+"et"+x1, ge, # past
b+"e"+x1, pr+"t"+x1, x+pw, # imperative
s1+"e"+x1, s1+"est"+x1, s1+"en"+x1, s1+"et"+x1, # subjunctive I
s2+"e"+x1, s2+"est"+x1, s2+"en"+x1, s2+"et"+x1 # subjunctive II
]
# Encode Eszett (ß) and attempt to retrieve from the lexicon.
# Decode Eszett for present and imperative.
if encode_sz(v) in self:
a = self[encode_sz(v)]
a = [decode_sz(v) for v in a[:7]] + a[7:13] + [decode_sz(v) for v in a[13:20]] + a[20:]
# Since the lexicon does not contain imperative for all verbs, don't simply return it.
# Instead, update the rule-based lexeme with inflections from the lexicon.
return [a[i] or lexeme[i] for i in range(len(a))]
def tenses(self, verb, parse=True):
""" Returns a list of possible tenses for the given inflected verb.
"""
tenses = _Verbs.tenses(self, verb, parse)
if len(tenses) == 0:
# auswirkte => wirkte aus
for prefix in prefix_separable:
if verb.startswith(prefix):
tenses = _Verbs.tenses(self, verb[len(prefix):] + " " + prefix, parse)
break
return tenses
verbs = Verbs()
conjugate, lemma, lexeme, tenses = \
verbs.conjugate, verbs.lemma, verbs.lexeme, verbs.tenses
#### ATTRIBUTIVE & PREDICATIVE #####################################################################
# Strong inflection: no article.
adjectives_strong = {
("m", "nom"): "er", ("f", "nom"): "e" , ("n", "nom"): "es", ("p", "nom"): "e",
("m", "acc"): "en", ("f", "acc"): "e" , ("n", "acc"): "es", ("p", "acc"): "e",
("m", "dat"): "em", ("f", "dat"): "er", ("n", "dat"): "em", ("p", "dat"): "en",
("m", "gen"): "en", ("f", "gen"): "er", ("n", "gen"): "en", ("p", "gen"): "er",
}
# Mixed inflection: after indefinite article ein & kein and possessive determiners.
adjectives_mixed = {
("m", "nom"): "er", ("f", "nom"): "e" , ("n", "nom"): "es", ("p", "nom"): "en",
("m", "acc"): "en", ("f", "acc"): "e" , ("n", "acc"): "es", ("p", "acc"): "en",
("m", "dat"): "en", ("f", "dat"): "en", ("n", "dat"): "en", ("p", "dat"): "en",
("m", "gen"): "en", ("f", "gen"): "en", ("n", "gen"): "en", ("p", "gen"): "en",
}
# Weak inflection: after definite article.
adjectives_weak = {
("m", "nom"): "e", ("f", "nom"): "e" , ("n", "nom"): "e", ("p", "nom"): "en",
("m", "acc"): "en", ("f", "acc"): "e" , ("n", "acc"): "e", ("p", "acc"): "en",
("m", "dat"): "en", ("f", "dat"): "en", ("n", "dat"): "en", ("p", "dat"): "en",
("m", "gen"): "en", ("f", "gen"): "en", ("n", "gen"): "en", ("p", "gen"): "en",
}
# Uninflected + exceptions.
adjective_attributive = {
"etwas" : "etwas",
"genug" : "genug",
"viel" : "viel",
"wenig" : "wenig"
}
def attributive(adjective, gender=MALE, role=SUBJECT, article=None):
""" For a predicative adjective, returns the attributive form (lowercase).
In German, the attributive is formed with -e, -em, -en, -er or -es,
depending on gender (masculine, feminine, neuter or plural) and role
(nominative, accusative, dative, genitive).
"""
w, g, c, a = \
adjective.lower(), gender[:1].lower(), role[:3].lower(), article and article.lower() or None
if w in adjective_attributive:
return adjective_attributive[w]
if a is None \
or a in ("mir", "dir", "ihm") \
or a in ("ein", "etwas", "mehr") \
or a.startswith(("all", "mehrer", "wenig", "viel")):
return w + adjectives_strong.get((g, c), "")
if a.startswith(("ein", "kein")) \
or a.startswith(("mein", "dein", "sein", "ihr", "Ihr", "unser", "euer")):
return w + adjectives_mixed.get((g, c), "")
if a in ("arm", "alt", "all", "der", "die", "das", "den", "dem", "des") \
or a.startswith((
"derselb", "derjenig", "jed", "jeglich", "jen", "manch",
"dies", "solch", "welch")):
return w + adjectives_weak.get((g, c), "")
# Default to strong inflection.
return w + adjectives_strong.get((g, c), "")
def predicative(adjective):
""" Returns the predicative adjective (lowercase).
In German, the attributive form preceding a noun is always used:
"ein kleiner Junge" => strong, masculine, nominative,
"eine schöne Frau" => mixed, feminine, nominative,
"der kleine Prinz" => weak, masculine, nominative, etc.
The predicative is useful for lemmatization.
"""
w = adjective.lower()
if len(w) > 3:
for suffix in ("em", "en", "er", "es", "e"):
if w.endswith(suffix):
b = w[:max(-len(suffix), -(len(w)-3))]
if b.endswith("bl"): # plausibles => plausibel
b = b[:-1] + "el"
if b.endswith("pr"): # propres => proper
b = b[:-1] + "er"
return b
return w
#### COMPARATIVE & SUPERLATIVE #####################################################################
COMPARATIVE = "er"
SUPERLATIVE = "st"
def grade(adjective, suffix=COMPARATIVE):
""" Returns the comparative or superlative form of the given (inflected) adjective.
"""
b = predicative(adjective)
# groß => großt, schön => schönst
if suffix == SUPERLATIVE and b.endswith(("s", u"ß")):
suffix = suffix[1:]
# große => großere, schönes => schöneres
return adjective[:len(b)] + suffix + adjective[len(b):]
def comparative(adjective):
return grade(adjective, COMPARATIVE)
def superlative(adjective):
return grade(adjective, SUPERLATIVE)
#print(comparative(u"schönes"))
#print(superlative(u"schönes"))
#print(superlative(u"große"))
| bsd-3-clause |
Conflei/ATI | [ATI] Misfenterest/Frontend/env/lib/python2.6/site-packages/setuptools/archive_util.py | 520 | 6609 | """Utilities for extracting common archive formats"""
__all__ = [
"unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
"UnrecognizedFormat", "extraction_drivers", "unpack_directory",
]
import zipfile
import tarfile
import os
import shutil
import posixpath
import contextlib
from pkg_resources import ensure_directory, ContextualZipFile
from distutils.errors import DistutilsError
class UnrecognizedFormat(DistutilsError):
"""Couldn't recognize the archive type"""
def default_filter(src,dst):
"""The default progress/filter callback; returns True for all files"""
return dst
def unpack_archive(filename, extract_dir, progress_filter=default_filter,
drivers=None):
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
`progress_filter` is a function taking two arguments: a source path
internal to the archive ('/'-separated), and a filesystem path where it
will be extracted. The callback must return the desired extract path
(which may be the same as the one passed in), or else ``None`` to skip
that file or directory. The callback can thus be used to report on the
progress of the extraction, as well as to filter the items extracted or
alter their extraction paths.
`drivers`, if supplied, must be a non-empty sequence of functions with the
same signature as this function (minus the `drivers` argument), that raise
``UnrecognizedFormat`` if they do not support extracting the designated
archive type. The `drivers` are tried in sequence until one is found that
does not raise an error, or until all are exhausted (in which case
``UnrecognizedFormat`` is raised). If you do not supply a sequence of
drivers, the module's ``extraction_drivers`` constant will be used, which
means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
order.
"""
for driver in drivers or extraction_drivers:
try:
driver(filename, extract_dir, progress_filter)
except UnrecognizedFormat:
continue
else:
return
else:
raise UnrecognizedFormat(
"Not a recognized archive type: %s" % filename
)
def unpack_directory(filename, extract_dir, progress_filter=default_filter):
""""Unpack" a directory, using the same interface as for archives
Raises ``UnrecognizedFormat`` if `filename` is not a directory
"""
if not os.path.isdir(filename):
raise UnrecognizedFormat("%s is not a directory" % filename)
paths = {
filename: ('', extract_dir),
}
for base, dirs, files in os.walk(filename):
src, dst = paths[base]
for d in dirs:
paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
for f in files:
target = os.path.join(dst, f)
target = progress_filter(src + f, target)
if not target:
# skip non-files
continue
ensure_directory(target)
f = os.path.join(base, f)
shutil.copyfile(f, target)
shutil.copystat(f, target)
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
"""Unpack zip `filename` to `extract_dir`
Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
of the `progress_filter` argument.
"""
if not zipfile.is_zipfile(filename):
raise UnrecognizedFormat("%s is not a zip file" % (filename,))
with ContextualZipFile(filename) as z:
for info in z.infolist():
name = info.filename
# don't extract absolute paths or ones with .. in them
if name.startswith('/') or '..' in name.split('/'):
continue
target = os.path.join(extract_dir, *name.split('/'))
target = progress_filter(name, target)
if not target:
continue
if name.endswith('/'):
# directory
ensure_directory(target)
else:
# file
ensure_directory(target)
data = z.read(info.filename)
with open(target, 'wb') as f:
f.write(data)
unix_attributes = info.external_attr >> 16
if unix_attributes:
os.chmod(target, unix_attributes)
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
of the `progress_filter` argument.
"""
try:
tarobj = tarfile.open(filename)
except tarfile.TarError:
raise UnrecognizedFormat(
"%s is not a compressed or uncompressed tar file" % (filename,)
)
with contextlib.closing(tarobj):
# don't do any chowning!
tarobj.chown = lambda *args: None
for member in tarobj:
name = member.name
# don't extract absolute paths or ones with .. in them
if not name.startswith('/') and '..' not in name.split('/'):
prelim_dst = os.path.join(extract_dir, *name.split('/'))
# resolve any links and to extract the link targets as normal
# files
while member is not None and (member.islnk() or member.issym()):
linkpath = member.linkname
if member.issym():
base = posixpath.dirname(member.name)
linkpath = posixpath.join(base, linkpath)
linkpath = posixpath.normpath(linkpath)
member = tarobj._getmember(linkpath)
if member is not None and (member.isfile() or member.isdir()):
final_dst = progress_filter(name, prelim_dst)
if final_dst:
if final_dst.endswith(os.sep):
final_dst = final_dst[:-1]
try:
# XXX Ugh
tarobj._extract_member(member, final_dst)
except tarfile.ExtractError:
# chown/chmod/mkfifo/mknode/makedev failed
pass
return True
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
| mit |
faynwol/gfw_whitelist | list_black.py | 7 | 4090 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import urlparse
import logging
import copy
from blacklist import ban
from blacklist import builtin
from blacklist import custom
from blacklist import tld
__all__ = ['main']
def decode_gfwlist(content):
# decode base64 if have to
try:
if '.' in content:
raise
return content.decode('base64')
except:
return content
def get_hostname(something):
try:
# quite enough for GFW
if not something.startswith('http:'):
something = 'http://' + something
r = urlparse.urlparse(something)
return r.hostname
except Exception as e:
logging.error(e)
return None
def add_domain_to_set(s, something):
hostname = get_hostname(something)
if hostname is not None:
if hostname.startswith('.'):
hostname = hostname.lstrip('.')
if hostname.endswith('/'):
hostname = hostname.rstrip('/')
if hostname:
s.add(hostname)
def parse_gfwlist(content):
gfwlist = content.splitlines(False)
domains = builtin.getlist()
for line in gfwlist:
if line.find('.*') >= 0:
continue
elif line.find('*') >= 0:
line = line.replace('*', '/')
if line.startswith('!'):
continue
elif line.startswith('['):
continue
elif line.startswith('@'):
# ignore white list
continue
elif line.startswith('||!--'):
continue
elif line.startswith('||'):
add_domain_to_set(domains, line.lstrip('||'))
elif line.startswith('|'):
add_domain_to_set(domains, line.lstrip('|'))
elif line.startswith('.'):
add_domain_to_set(domains, line.lstrip('.'))
else:
add_domain_to_set(domains, line)
return domains
def reduce_domains(domains):
# reduce 'www.google.com' to 'google.com'
# remove invalid domains
tlds = tld.getlist()
cuss = custom.getlist()
bans = ban.getlist()
new_domains = set()
for cus in cuss:
new_domains.add(cus)
for domain in domains:
domain_parts = domain.split('.')
last_root_domain = None
for i in xrange(0, len(domain_parts)):
root_domain = '.'.join(domain_parts[len(domain_parts) - i - 1:])
if i == 0:
if not tlds.__contains__(root_domain):
# root_domain is not a valid tld
break
last_root_domain = root_domain
if tlds.__contains__(root_domain):
continue
else:
break
if last_root_domain is not None \
and last_root_domain not in bans \
and last_root_domain not in new_domains:
same = False
for cus in new_domains:
if len(cus) < len(last_root_domain):
if cmp(cus[::-1] + '.', last_root_domain[::-1][0:len(cus)+1]) == 0 :
same = True
break
elif len(cus) > len(last_root_domain):
if cmp(last_root_domain[::-1] + '.', cus[::-1][0:len(last_root_domain)+1]) == 0 :
new_domains.remove(cus)
break
if not same :
new_domains.add(last_root_domain)
return new_domains
def obfs(url):
ret = ''
index = 0
for c in url:
if c == '.' or ((index & 3) == 0 and index > 0):
last = ord(ret[-1])
if last < 64:
ret = "%s\\%o" % (ret[:-1], last)
else:
ret = "%s\\x%x" % (ret[:-1], last)
ret += c
index += 1
return ret
def obfs_list(list_result):
ret = set()
for item in list_result:
ret.add( obfs(item) )
return ret
def get_all_list(lists):
all_list = lists
result = list()
all_list.remove('')
url_dict = {}
for item in all_list:
parts = item.split('.')
if not url_dict.has_key(parts[-1]) :
url_dict[parts[-1]] = list()
url_dict[parts[-1]].append( obfs('.'.join(parts[:-1])) )
key_comma = ''
url_dict_keys = url_dict.keys()
url_dict_keys.sort()
for key in url_dict_keys:
url_dict[key].sort()
result.append('%s"%s":{' % (key_comma, key) )
comma = ''
for item in url_dict[key]:
result.append(comma + '\n"' + item + '":1')
comma = ','
result.append('\n}')
key_comma = ','
return result
def final_list():
with open('gfwlist.txt', 'r') as f:
content = f.read()
content = decode_gfwlist(content)
#with open('gfwlist_ogn.txt', 'w') as f:
# f.write(content)
domains = parse_gfwlist(content)
list_result = get_all_list(reduce_domains(domains))
content = ''.join(list_result)
content = '{' + content + "\n}"
return content
| mit |
chjw8016/GreenOdoo7-haibao | openerp/addons/event_moodle/event_moodle.py | 50 | 13316 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2012 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import xmlrpclib
import string
import time
import random
from random import sample
from openerp.tools.translate import _
class event_moodle(osv.osv):
_name = 'event.moodle.config.wiz'
_columns = {
'moodle_username' : fields.char('Moodle Username', 128, help="You can also connect with your username that you define when you create a token"),
'moodle_password' : fields.char('Moodle Password', 128),
'moodle_token' : fields.char('Moodle Token', 128, help="Put your token that you created in your moodle server"),
'server_moodle': fields.char('Moodle Server', 128, required=True,help="URL where you have your moodle server. For exemple: 'http://127.0.0.1' or 'http://localhost'"),
'url': fields.char('URL to Moodle Server', size=256, help="The url that will be used for the connection with moodle in xml-rpc"),
}
_order = 'create_date desc'
_defaults = {
'server_moodle': 'http://127.0.0.1',
}
def configure_moodle(self, cr, uid, ids, context=None):
url = self.make_url(cr, uid, ids, context=context)
self.write(cr, uid, ids, {'url': url})
return {'type': 'ir.actions.act_window_close'}
def find(self, cr, uid, context=None):
"""
Find the config wizard containing the configuration and raise and error if none is available.
"""
moodle_config_wiz_ids = self.search(cr, uid, [], context=context)
if not moodle_config_wiz_ids:
raise osv.except_osv(_('Error!'),_("First configure your moodle connection."))
return moodle_config_wiz_ids[0]
def make_url(self, cr, uid, ids, context=None):
"""
create the good url with the information of the configuration
@return url for moodle connexion
"""
def _encode_password(password):
for i in range(len(password)):
x = password[i]
if x not in string.ascii_letters + string.digits:
unicode_car = (hex(ord(x)))
hex_car = '%'+str(unicode_car[2:])
password = password.replace(x,hex_car)
return password
url=""
config_moodle = self.browse(cr, uid, ids[0], context=context)
if config_moodle.moodle_username and config_moodle.moodle_password:
#connexion with password and username
password = _encode_password(config_moodle.moodle_password)
url = config_moodle.server_moodle + '/moodle/webservice/xmlrpc/simpleserver.php?wsusername=' + config_moodle.moodle_username + '&wspassword=' + password
if config_moodle.moodle_token:
#connexion with token
url = config_moodle.server_moodle + '/moodle/webservice/xmlrpc/server.php?wstoken=' + config_moodle.moodle_token
return url
def create_moodle_user(self, cr, uid, id, dic_user, context=None):
"""
create a moodle user
@param dic_user : is a list of dictonnaries with the moodle information
@return a liste of dictonaries with the create user id
"""
#connect to moodle
url = self.browse(cr, uid, id, context=context).url
sock = xmlrpclib.ServerProxy(url)
#add user in moodle and return list of id and username
return sock.core_user_create_users(dic_user)
def create_moodle_courses(self, cr, uid, id, courses, context=None):
"""
create a mmodle course
@param courses : is a list of dictionaries with the moodle course information
@return a list of dictionaries with the create course id
"""
#connect to moodle
url = self.browse(cr, uid, id, context=context).url
if not url:
raise osv.except_osv(_('Error!'),_("You must configure your moodle connection."))
sock = xmlrpclib.ServerProxy(url)
return sock.core_course_create_courses(courses)
def moodle_enrolled(self, cr, uid, id, enrolled, context=None):
"""
this method is used to match a course with users
@param enrolled : list of dictonaries with the course id and the user id
"""
#connect to moodle
url = self.browse(cr, uid, id, context=context).url
sock = xmlrpclib.ServerProxy(url)
#add enrolled in moodle
sock.enrol_manual_enrol_users(enrolled)
def create_password(self):
"""
create a random password
"""
rand = string.ascii_letters + string.digits
length = 8
passwd = ''.join(sample(rand, length))
passwd = passwd + '+'
return passwd
def check_email(self, email):
"""
check if email is correct
"""
if email:
if (email.count('@') != 1 and email.count('.') < 1):
raise osv.except_osv(_('Error!'),_("Your email '%s' is wrong.") % (email))
def make_username(self, username, response_courses):
"""
create a moodle username with a random number for the uniqueness
@return the moodle username
"""
if username:
#remove space in the name
username = username.replace(" ","_")
#give an user name
name_user = username + "%d" % (response_courses,) + "%d" % (random.randint(1,999999),)
else:
name_user = "moodle_" + "%d" % (response_courses,) + "%d" % (random.randint(1,999999),)
return name_user
event_moodle()
class event_event(osv.osv):
_inherit = "event.event"
_columns={
'moodle_id': fields.integer('Moodle ID', help='The identifier of this event in Moodle'),
}
def check_registration_limits(self, cr, uid, ids, context=None):
"""
create moodle courses ,users and match them when an event is confirmed
if the event_registration is not confirmed then it doesn t nothing
"""
res = super(event_event, self).check_registration_limits(cr, uid, ids, context=context)
moodle_pool = self.pool.get('event.moodle.config.wiz')
moodle_config_wiz_id = moodle_pool.find(cr, uid, context=context)
list_users=[]
userid = []
for event in self.browse(cr, uid, ids, context=context):
#moodle use time() to store the date
date = time.strptime(event.date_begin, '%Y-%m-%d %H:%M:%S')
date = int (time.mktime(date))
#create the dict of values to create the course in Moodle
dic_courses= [{
'fullname': event.name,
'shortname': '',
'startdate': date,
'summary': event.note,
'categoryid':1, #the category hardcoded is 'Miscellaneous'
}]
#create a course in moodle and keep the id
response_courses = moodle_pool.create_moodle_courses(cr, uid, moodle_config_wiz_id, dic_courses, context=context)
self.write(cr, uid, event.id, {'moodle_id': response_courses[0]['id']})
moodle_uids = []
for registration in event.registration_ids:
if registration.state == 'open':
#enroll the registration in Moodle as it is confirmed
if not registration.moodle_uid:
#create a dictionary for an user
name_user = moodle_pool.make_username(registration.name, response_courses[0]['id'])
moodle_pool.check_email(registration.email)
passwd = moodle_pool.create_password()
dic_users={
'username' : name_user,
'password' : passwd,
'city' : registration.city,
'firstname' : registration.name ,
'lastname': '',
'email': registration.email
}
#create the user in moodle
response_user = moodle_pool.create_moodle_user(cr, uid, moodle_config_wiz_id, [dic_users], context=context)
for user in response_user:
self.pool.get('event.registration').write(cr,uid,[registration.id],{'moodle_uid': user['id'], 'moodle_user_password': passwd, 'moodle_username': name_user})
moodle_uids.append(user['id'])
else:
moodle_uids.append(registration.moodle_uid)
#link the course with users
enrolled = []
for moodle_user in moodle_uids:
enrolled.append({
'roleid' :'5', #mark as 'Student'
'userid' : moodle_user,
'courseid' :response_courses[0]['id']
})
moodle_pool.moodle_enrolled(cr, uid, moodle_config_wiz_id, enrolled, context=context)
return res
event_event()
class event_registration(osv.osv):
_inherit = "event.registration"
_columns={
'moodle_user_password': fields.char('Password for Moodle User', 128),
'moodle_username': fields.char('Moodle Username', 128),
'moodle_uid': fields.integer('Moodle User ID'),
}
def confirm_registration(self, cr, uid, ids, context=None):
"""
create a user and match to a course if the event is already confirmed
"""
res = super(event_registration, self).confirm_registration(cr, uid, ids, context=context)
moodle_pool = self.pool.get('event.moodle.config.wiz')
moodle_config_wiz_id = moodle_pool.find(cr, uid, context=context)
for register in self.browse(cr, uid, ids, context=context):
if register.event_id.state == 'confirm' and register.event_id.moodle_id:
if not register.moodle_uid:
#create the user in moodle
name_user = moodle_pool.make_username(register.name, register.event_id.moodle_id)
moodle_pool.check_email(register.email)
passwd = moodle_pool.create_password()
dic_users = [{
'username': name_user,
'password': passwd,
'city': register.city,
'firstname': register.name,
'lastname': '', #we could make a split of the register.name on ' ' but it would be inaccurate, so it seems better to let it empty as it's not really useful
'email': register.email,
}]
response_user = moodle_pool.create_moodle_user(cr, uid, moodle_config_wiz_id, dic_users, context=context)
#write in database the password and the username
moodle_user_id = response_user[0]['id']
self.pool.get('event.registration').write(cr, uid, ids, {'moodle_uid': moodle_user_id, 'moodle_user_password': passwd, 'moodle_username': name_user})
else:
moodle_user_id = register.moodle_uid
enrolled=[{
'roleid': '5', #mark as student
'userid': moodle_user_id,
'courseid': register.event_id.moodle_id
}]
moodle_pool.moodle_enrolled(cr, uid, moodle_config_wiz_id, enrolled, context=context)
return res
def onchange_moodle_name(self, cr, uid, ids, moodle_username, context=None):
"""
This onchange receive as parameter a username moddle and will fill the moodle_uid and password fields if existing records with this username are found
@param moodle_username: the existing moodle username
"""
res = {}
reg_ids = self.search(cr, uid, [('moodle_username', '=', moodle_username)], order='create_date desc', context=context)
if reg_ids:
reg = self.browse(cr, uid, reg_ids[0], context=context)
res = {'value' :{
'moodle_uid': reg.moodle_uid,
'name': reg.name,
'email':reg.email,
'phone':reg.phone,
'city': reg.city,
'street': reg.street}}
return res
event_registration()
| mit |
fusionpig/ansible | lib/ansible/executor/task_executor.py | 1 | 22726 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import pipes
import subprocess
import sys
import time
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable
from ansible.playbook.conditional import Conditional
from ansible.playbook.task import Task
from ansible.plugins import connection_loader, action_loader
from ansible.template import Templar
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.unicode import to_unicode
from ansible.utils.debug import debug
__all__ = ['TaskExecutor']
class TaskExecutor:
'''
This is the main worker class for the executor pipeline, which
handles loading an action plugin to actually dispatch the task to
a given host. This class roughly corresponds to the old Runner()
class.
'''
# Modules that we optimize by squashing loop items into a single call to
# the module
SQUASH_ACTIONS = frozenset(('apt', 'yum', 'pkgng', 'zypper', 'dnf'))
def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj):
self._host = host
self._task = task
self._job_vars = job_vars
self._play_context = play_context
self._new_stdin = new_stdin
self._loader = loader
self._shared_loader_obj = shared_loader_obj
try:
from __main__ import display
self._display = display
except ImportError:
from ansible.utils.display import Display
self._display = Display()
def run(self):
'''
The main executor entrypoint, where we determine if the specified
task requires looping and either runs the task with
'''
debug("in run()")
try:
# lookup plugins need to know if this task is executing from
# a role, so that it can properly find files/templates/etc.
roledir = None
if self._task._role:
roledir = self._task._role._role_path
self._job_vars['roledir'] = roledir
items = self._get_loop_items()
if items is not None:
if len(items) > 0:
item_results = self._run_loop(items)
# loop through the item results, and remember the changed/failed
# result flags based on any item there.
changed = False
failed = False
for item in item_results:
if 'changed' in item and item['changed']:
changed = True
if 'failed' in item and item['failed']:
failed = True
# create the overall result item, and set the changed/failed
# flags there to reflect the overall result of the loop
res = dict(results=item_results)
if changed:
res['changed'] = True
if failed:
res['failed'] = True
res['msg'] = 'One or more items failed'
else:
res['msg'] = 'All items completed'
else:
res = dict(changed=False, skipped=True, skipped_reason='No items in the list', results=[])
else:
debug("calling self._execute()")
res = self._execute()
debug("_execute() done")
# make sure changed is set in the result, if it's not present
if 'changed' not in res:
res['changed'] = False
debug("dumping result to json")
result = json.dumps(res)
debug("done dumping result, returning")
return result
except AnsibleError, e:
return dict(failed=True, msg=to_unicode(e, nonstring='simplerepr'))
finally:
try:
self._connection.close()
except AttributeError:
pass
except Exception, e:
debug("error closing connection: %s" % to_unicode(e))
def _get_loop_items(self):
'''
Loads a lookup plugin to handle the with_* portion of a task (if specified),
and returns the items result.
'''
# create a copy of the job vars here so that we can modify
# them temporarily without changing them too early for other
# parts of the code that might still need a pristine version
vars_copy = self._job_vars.copy()
# now we update them with the play context vars
self._play_context.update_vars(vars_copy)
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=vars_copy)
items = None
if self._task.loop:
if self._task.loop in self._shared_loader_obj.lookup_loader:
#TODO: remove convert_bare true and deprecate this in with_
try:
loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar, loader=self._loader, fail_on_undefined=True, convert_bare=True)
except AnsibleUndefinedVariable as e:
if 'has no attribute' in str(e):
loop_terms = []
self._display.deprecated("Skipping task due to undefined attribute, in the future this will be a fatal error.")
else:
raise
items = self._shared_loader_obj.lookup_loader.get(self._task.loop, loader=self._loader, templar=templar).run(terms=loop_terms, variables=vars_copy)
else:
raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % self._task.loop)
return items
def _run_loop(self, items):
'''
Runs the task with the loop items specified and collates the result
into an array named 'results' which is inserted into the final result
along with the item for which the loop ran.
'''
results = []
# make copies of the job vars and task so we can add the item to
# the variables and re-validate the task with the item variable
task_vars = self._job_vars.copy()
items = self._squash_items(items, task_vars)
for item in items:
task_vars['item'] = item
try:
tmp_task = self._task.copy()
except AnsibleParserError, e:
results.append(dict(failed=True, msg=str(e)))
continue
# now we swap the internal task with the copy, execute,
# and swap them back so we can do the next iteration cleanly
(self._task, tmp_task) = (tmp_task, self._task)
res = self._execute(variables=task_vars)
(self._task, tmp_task) = (tmp_task, self._task)
# now update the result with the item info, and append the result
# to the list of results
res['item'] = item
results.append(res)
return results
def _squash_items(self, items, variables):
'''
Squash items down to a comma-separated list for certain modules which support it
(typically package management modules).
'''
if len(items) > 0 and self._task.action in self.SQUASH_ACTIONS:
final_items = []
name = self._task.args.pop('name', None) or self._task.args.pop('pkg', None)
for item in items:
variables['item'] = item
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
if self._task.evaluate_conditional(templar, variables):
if templar._contains_vars(name):
new_item = templar.template(name)
final_items.append(new_item)
else:
final_items.append(item)
joined_items = ",".join(final_items)
self._task.args['name'] = joined_items
return [joined_items]
else:
return items
def _execute(self, variables=None):
'''
The primary workhorse of the executor system, this runs the task
on the specified host (which may be the delegated_to host) and handles
the retry/until and block rescue/always execution
'''
if variables is None:
variables = self._job_vars
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
# fields set from the play/task may be based on variables, so we have to
# do the same kind of post validation step on it here before we use it.
self._play_context.post_validate(templar=templar)
# now that the play context is finalized, we can add 'magic'
# variables to the variable dictionary
self._play_context.update_vars(variables)
# Evaluate the conditional (if any) for this task, which we do before running
# the final task post-validation. We do this before the post validation due to
# the fact that the conditional may specify that the task be skipped due to a
# variable not being present which would otherwise cause validation to fail
if not self._task.evaluate_conditional(templar, variables):
debug("when evaulation failed, skipping this task")
return dict(changed=False, skipped=True, skip_reason='Conditional check failed')
# Now we do final validation on the task, which sets all fields to their final values.
# In the case of debug tasks, we save any 'var' params and restore them after validating
# so that variables are not replaced too early.
prev_var = None
if self._task.action == 'debug' and 'var' in self._task.args:
prev_var = self._task.args.pop('var')
self._task.post_validate(templar=templar)
if '_variable_params' in self._task.args:
variable_params = self._task.args.pop('_variable_params')
if isinstance(variable_params, dict):
self._display.deprecated("Using variables for task params is unsafe, especially if the variables come from an external source like facts")
variable_params.update(self._task.args)
self._task.args = variable_params
if prev_var is not None:
self._task.args['var'] = prev_var
# if this task is a TaskInclude, we just return now with a success code so the
# main thread can expand the task list for the given host
if self._task.action == 'include':
include_variables = self._task.args.copy()
include_file = include_variables.get('_raw_params')
del include_variables['_raw_params']
return dict(include=include_file, include_variables=include_variables)
# get the connection and the handler for this execution
self._connection = self._get_connection(variables)
self._connection.set_host_overrides(host=self._host)
self._handler = self._get_action_handler(connection=self._connection, templar=templar)
# And filter out any fields which were set to default(omit), and got the omit token value
omit_token = variables.get('omit')
if omit_token is not None:
self._task.args = dict(filter(lambda x: x[1] != omit_token, self._task.args.iteritems()))
# Read some values from the task, so that we can modify them if need be
retries = self._task.retries
if retries <= 0:
retries = 1
delay = self._task.delay
if delay < 0:
delay = 1
# make a copy of the job vars here, in case we need to update them
# with the registered variable value later on when testing conditions
vars_copy = variables.copy()
debug("starting attempt loop")
result = None
for attempt in range(retries):
if attempt > 0:
# FIXME: this should use the callback/message passing mechanism
print("FAILED - RETRYING: %s (%d retries left). Result was: %s" % (self._task, retries-attempt, result))
result['attempts'] = attempt + 1
debug("running the handler")
result = self._handler.run(task_vars=variables)
debug("handler run complete")
if self._task.async > 0:
# the async_wrapper module returns dumped JSON via its stdout
# response, so we parse it here and replace the result
try:
result = json.loads(result.get('stdout'))
except (TypeError, ValueError) as e:
return dict(failed=True, msg="The async task did not return valid JSON: %s" % str(e))
if self._task.poll > 0:
result = self._poll_async_result(result=result, templar=templar)
# update the local copy of vars with the registered value, if specified,
# or any facts which may have been generated by the module execution
if self._task.register:
vars_copy[self._task.register] = result
if 'ansible_facts' in result:
vars_copy.update(result['ansible_facts'])
# create a conditional object to evaluate task conditions
cond = Conditional(loader=self._loader)
# FIXME: make sure until is mutually exclusive with changed_when/failed_when
if self._task.until:
cond.when = self._task.until
if cond.evaluate_conditional(templar, vars_copy):
break
elif (self._task.changed_when or self._task.failed_when) and 'skipped' not in result:
if self._task.changed_when:
cond.when = [ self._task.changed_when ]
result['changed'] = cond.evaluate_conditional(templar, vars_copy)
if self._task.failed_when:
cond.when = [ self._task.failed_when ]
failed_when_result = cond.evaluate_conditional(templar, vars_copy)
result['failed_when_result'] = result['failed'] = failed_when_result
if failed_when_result:
break
elif 'failed' not in result:
if result.get('rc', 0) != 0:
result['failed'] = True
else:
# if the result is not failed, stop trying
break
if attempt < retries - 1:
time.sleep(delay)
# do the final update of the local variables here, for both registered
# values and any facts which may have been created
if self._task.register:
variables[self._task.register] = result
if 'ansible_facts' in result:
variables.update(result['ansible_facts'])
# save the notification target in the result, if it was specified, as
# this task may be running in a loop in which case the notification
# may be item-specific, ie. "notify: service {{item}}"
if self._task.notify is not None:
result['_ansible_notify'] = self._task.notify
# and return
debug("attempt loop complete, returning result")
return result
def _poll_async_result(self, result, templar):
'''
Polls for the specified JID to be complete
'''
async_jid = result.get('ansible_job_id')
if async_jid is None:
return dict(failed=True, msg="No job id was returned by the async task")
# Create a new psuedo-task to run the async_status module, and run
# that (with a sleep for "poll" seconds between each retry) until the
# async time limit is exceeded.
async_task = Task().load(dict(action='async_status jid=%s' % async_jid))
# Because this is an async task, the action handler is async. However,
# we need the 'normal' action handler for the status check, so get it
# now via the action_loader
normal_handler = action_loader.get(
'normal',
task=async_task,
connection=self._connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
time_left = self._task.async
while time_left > 0:
time.sleep(self._task.poll)
async_result = normal_handler.run()
if int(async_result.get('finished', 0)) == 1 or 'failed' in async_result or 'skipped' in async_result:
break
time_left -= self._task.poll
if int(async_result.get('finished', 0)) != 1:
return dict(failed=True, msg="async task did not complete within the requested time")
else:
return async_result
def _get_connection(self, variables):
'''
Reads the connection property for the host, and returns the
correct connection object from the list of connection plugins
'''
# FIXME: calculation of connection params/auth stuff should be done here
if not self._play_context.remote_addr:
self._play_context.remote_addr = self._host.ipv4_address
if self._task.delegate_to is not None:
self._compute_delegate(variables)
conn_type = self._play_context.connection
if conn_type == 'smart':
conn_type = 'ssh'
if sys.platform.startswith('darwin') and self._play_context.password:
# due to a current bug in sshpass on OSX, which can trigger
# a kernel panic even for non-privileged users, we revert to
# paramiko on that OS when a SSH password is specified
conn_type = "paramiko"
else:
# see if SSH can support ControlPersist if not use paramiko
try:
cmd = subprocess.Popen(['ssh','-o','ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
if "Bad configuration option" in err:
conn_type = "paramiko"
except OSError:
conn_type = "paramiko"
connection = connection_loader.get(conn_type, self._play_context, self._new_stdin)
if not connection:
raise AnsibleError("the connection plugin '%s' was not found" % conn_type)
return connection
def _get_action_handler(self, connection, templar):
'''
Returns the correct action plugin to handle the requestion task action
'''
if self._task.action in action_loader:
if self._task.async != 0:
raise AnsibleError("async mode is not supported with the %s module" % module_name)
handler_name = self._task.action
elif self._task.async == 0:
handler_name = 'normal'
else:
handler_name = 'async'
handler = action_loader.get(
handler_name,
task=self._task,
connection=connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
if not handler:
raise AnsibleError("the handler '%s' was not found" % handler_name)
return handler
def _compute_delegate(self, variables):
# get the vars for the delegate by its name
try:
self._display.debug("Delegating to %s" % self._task.delegate_to)
this_info = variables['hostvars'][self._task.delegate_to]
# get the real ssh_address for the delegate and allow ansible_ssh_host to be templated
self._play_context.remote_addr = this_info.get('ansible_ssh_host', self._task.delegate_to)
self._play_context.remote_user = this_info.get('ansible_remote_user', self._task.remote_user)
self._play_context.port = this_info.get('ansible_ssh_port', self._play_context.port)
self._play_context.password = this_info.get('ansible_ssh_pass', self._play_context.password)
self._play_context.private_key_file = this_info.get('ansible_ssh_private_key_file', self._play_context.private_key_file)
self._play_context.become_pass = this_info.get('ansible_sudo_pass', self._play_context.become_pass)
conn = this_info.get('ansible_connection', self._task.connection)
if conn:
self._play_context.connection = conn
except Exception as e:
# make sure the inject is empty for non-inventory hosts
this_info = {}
self._display.debug("Delegate due to: %s" % str(e))
# Last chance to get private_key_file from global variables.
# this is useful if delegated host is not defined in the inventory
if self._play_context.private_key_file is None:
self._play_context.private_key_file = this_info.get('ansible_ssh_private_key_file', None)
if self._play_context.private_key_file is None:
key = this_info.get('private_key_file', None)
if key:
self._play_context.private_key_file = os.path.expanduser(key)
for i in this_info:
if i.startswith("ansible_") and i.endswith("_interpreter"):
variables[i] = this_info[i]
| gpl-3.0 |
MayOneUS/pledgeservice | testlib/setuptools/extension.py | 165 | 1731 | import sys
import re
import functools
import distutils.core
import distutils.extension
from setuptools.dist import _get_unpatched
_Extension = _get_unpatched(distutils.core.Extension)
def have_pyrex():
"""
Return True if Cython or Pyrex can be imported.
"""
pyrex_impls = 'Cython.Distutils.build_ext', 'Pyrex.Distutils.build_ext'
for pyrex_impl in pyrex_impls:
try:
# from (pyrex_impl) import build_ext
__import__(pyrex_impl, fromlist=['build_ext']).build_ext
return True
except Exception:
pass
return False
class Extension(_Extension):
"""Extension that uses '.c' files in place of '.pyx' files"""
def __init__(self, *args, **kw):
_Extension.__init__(self, *args, **kw)
self._convert_pyx_sources_to_lang()
def _convert_pyx_sources_to_lang(self):
"""
Replace sources with .pyx extensions to sources with the target
language extension. This mechanism allows language authors to supply
pre-converted sources but to prefer the .pyx sources.
"""
if have_pyrex():
# the build has Cython, so allow it to compile the .pyx files
return
lang = self.language or ''
target_ext = '.cpp' if lang.lower() == 'c++' else '.c'
sub = functools.partial(re.sub, '.pyx$', target_ext)
self.sources = list(map(sub, self.sources))
class Library(Extension):
"""Just like a regular Extension, but built as a library instead"""
distutils.core.Extension = Extension
distutils.extension.Extension = Extension
if 'distutils.command.build_ext' in sys.modules:
sys.modules['distutils.command.build_ext'].Extension = Extension
| apache-2.0 |
Godiyos/python-for-android | python3-alpha/python3-src/Lib/wsgiref/util.py | 55 | 5595 | """Miscellaneous WSGI-related Utilities"""
import posixpath
__all__ = [
'FileWrapper', 'guess_scheme', 'application_uri', 'request_uri',
'shift_path_info', 'setup_testing_defaults',
]
class FileWrapper:
"""Wrapper to convert file-like objects to iterables"""
def __init__(self, filelike, blksize=8192):
self.filelike = filelike
self.blksize = blksize
if hasattr(filelike,'close'):
self.close = filelike.close
def __getitem__(self,key):
data = self.filelike.read(self.blksize)
if data:
return data
raise IndexError
def __iter__(self):
return self
def __next__(self):
data = self.filelike.read(self.blksize)
if data:
return data
raise StopIteration
def guess_scheme(environ):
"""Return a guess for whether 'wsgi.url_scheme' should be 'http' or 'https'
"""
if environ.get("HTTPS") in ('yes','on','1'):
return 'https'
else:
return 'http'
def application_uri(environ):
"""Return the application's base URI (no PATH_INFO or QUERY_STRING)"""
url = environ['wsgi.url_scheme']+'://'
from urllib.parse import quote
if environ.get('HTTP_HOST'):
url += environ['HTTP_HOST']
else:
url += environ['SERVER_NAME']
if environ['wsgi.url_scheme'] == 'https':
if environ['SERVER_PORT'] != '443':
url += ':' + environ['SERVER_PORT']
else:
if environ['SERVER_PORT'] != '80':
url += ':' + environ['SERVER_PORT']
url += quote(environ.get('SCRIPT_NAME') or '/')
return url
def request_uri(environ, include_query=True):
"""Return the full request URI, optionally including the query string"""
url = application_uri(environ)
from urllib.parse import quote
path_info = quote(environ.get('PATH_INFO',''),safe='/;=,')
if not environ.get('SCRIPT_NAME'):
url += path_info[1:]
else:
url += path_info
if include_query and environ.get('QUERY_STRING'):
url += '?' + environ['QUERY_STRING']
return url
def shift_path_info(environ):
"""Shift a name from PATH_INFO to SCRIPT_NAME, returning it
If there are no remaining path segments in PATH_INFO, return None.
Note: 'environ' is modified in-place; use a copy if you need to keep
the original PATH_INFO or SCRIPT_NAME.
Note: when PATH_INFO is just a '/', this returns '' and appends a trailing
'/' to SCRIPT_NAME, even though empty path segments are normally ignored,
and SCRIPT_NAME doesn't normally end in a '/'. This is intentional
behavior, to ensure that an application can tell the difference between
'/x' and '/x/' when traversing to objects.
"""
path_info = environ.get('PATH_INFO','')
if not path_info:
return None
path_parts = path_info.split('/')
path_parts[1:-1] = [p for p in path_parts[1:-1] if p and p != '.']
name = path_parts[1]
del path_parts[1]
script_name = environ.get('SCRIPT_NAME','')
script_name = posixpath.normpath(script_name+'/'+name)
if script_name.endswith('/'):
script_name = script_name[:-1]
if not name and not script_name.endswith('/'):
script_name += '/'
environ['SCRIPT_NAME'] = script_name
environ['PATH_INFO'] = '/'.join(path_parts)
# Special case: '/.' on PATH_INFO doesn't get stripped,
# because we don't strip the last element of PATH_INFO
# if there's only one path part left. Instead of fixing this
# above, we fix it here so that PATH_INFO gets normalized to
# an empty string in the environ.
if name=='.':
name = None
return name
def setup_testing_defaults(environ):
"""Update 'environ' with trivial defaults for testing purposes
This adds various parameters required for WSGI, including HTTP_HOST,
SERVER_NAME, SERVER_PORT, REQUEST_METHOD, SCRIPT_NAME, PATH_INFO,
and all of the wsgi.* variables. It only supplies default values,
and does not replace any existing settings for these variables.
This routine is intended to make it easier for unit tests of WSGI
servers and applications to set up dummy environments. It should *not*
be used by actual WSGI servers or applications, since the data is fake!
"""
environ.setdefault('SERVER_NAME','127.0.0.1')
environ.setdefault('SERVER_PROTOCOL','HTTP/1.0')
environ.setdefault('HTTP_HOST',environ['SERVER_NAME'])
environ.setdefault('REQUEST_METHOD','GET')
if 'SCRIPT_NAME' not in environ and 'PATH_INFO' not in environ:
environ.setdefault('SCRIPT_NAME','')
environ.setdefault('PATH_INFO','/')
environ.setdefault('wsgi.version', (1,0))
environ.setdefault('wsgi.run_once', 0)
environ.setdefault('wsgi.multithread', 0)
environ.setdefault('wsgi.multiprocess', 0)
from io import StringIO, BytesIO
environ.setdefault('wsgi.input', BytesIO())
environ.setdefault('wsgi.errors', StringIO())
environ.setdefault('wsgi.url_scheme',guess_scheme(environ))
if environ['wsgi.url_scheme']=='http':
environ.setdefault('SERVER_PORT', '80')
elif environ['wsgi.url_scheme']=='https':
environ.setdefault('SERVER_PORT', '443')
_hoppish = {
'connection':1, 'keep-alive':1, 'proxy-authenticate':1,
'proxy-authorization':1, 'te':1, 'trailers':1, 'transfer-encoding':1,
'upgrade':1
}.__contains__
def is_hop_by_hop(header_name):
"""Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header"""
return _hoppish(header_name.lower())
| apache-2.0 |
sgraham/nope | chrome/common/extensions/docs/server2/url_constants.py | 41 | 1163 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
GITILES_BASE = 'https://chromium.googlesource.com'
GITILES_SRC_ROOT = 'chromium/src/+'
GITILES_BRANCHES_PATH = 'refs/branch-heads'
GITILES_OAUTH2_SCOPE = 'https://www.googleapis.com/auth/gerritcodereview'
GITHUB_REPOS = 'https://api.github.com/repos'
GITHUB_BASE = 'https://github.com/GoogleChrome/chrome-app-samples/tree/master/samples'
RAW_GITHUB_BASE = ('https://github.com/GoogleChrome/chrome-app-samples/raw/'
'master')
OMAHA_PROXY_URL = 'http://omahaproxy.appspot.com/json'
# TODO(kalman): Change this to http://omahaproxy.appspot.com/history.json
OMAHA_HISTORY = 'http://10.omahaproxy-hrd.appspot.com/history.json'
OMAHA_DEV_HISTORY = '%s?channel=dev&os=win&json=1' % OMAHA_HISTORY
SVN_URL = 'http://src.chromium.org/chrome'
VIEWVC_URL = 'http://src.chromium.org/viewvc/chrome'
EXTENSIONS_SAMPLES = ('http://src.chromium.org/viewvc/chrome/trunk/src/chrome/'
'common/extensions/docs/examples')
CODEREVIEW_SERVER = 'https://codereview.chromium.org'
| bsd-3-clause |
mano3m/CouchPotatoServer | couchpotato/core/plugins/subtitle/main.py | 3 | 2705 | from couchpotato import get_session
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.core.settings.model import Library, FileType
from couchpotato.environment import Env
import subliminal
import traceback
log = CPLog(__name__)
class Subtitle(Plugin):
services = ['opensubtitles', 'thesubdb', 'subswiki', 'podnapisi']
def __init__(self):
addEvent('renamer.before', self.searchSingle)
def searchLibrary(self):
# Get all active and online movies
db = get_session()
library = db.query(Library).all()
done_status = fireEvent('status.get', 'done', single = True)
for movie in library.movies:
for release in movie.releases:
# get releases and their movie files
if release.status_id is done_status.get('id'):
files = []
for file in release.files.filter(FileType.status.has(identifier = 'movie')).all():
files.append(file.path)
# get subtitles for those files
subliminal.list_subtitles(files, cache_dir = Env.get('cache_dir'), multi = True, languages = self.getLanguages(), services = self.services)
def searchSingle(self, group):
if self.isDisabled(): return
try:
available_languages = sum(group['subtitle_language'].itervalues(), [])
downloaded = []
files = [toUnicode(x) for x in group['files']['movie']]
log.debug('Searching for subtitles for: %s', files)
for lang in self.getLanguages():
if lang not in available_languages:
download = subliminal.download_subtitles(files, multi = True, force = False, languages = [lang], services = self.services, cache_dir = Env.get('cache_dir'))
for subtitle in download:
downloaded.extend(download[subtitle])
for d_sub in downloaded:
log.info('Found subtitle (%s): %s', (d_sub.language.alpha2, files))
group['files']['subtitle'].append(d_sub.path)
group['before_rename'].append(d_sub.path)
group['subtitle_language'][d_sub.path] = [d_sub.language.alpha2]
return True
except:
log.error('Failed searching for subtitle: %s', (traceback.format_exc()))
return False
def getLanguages(self):
return splitString(self.conf('languages'))
| gpl-3.0 |
danielward/health-tracker | tracker/tracker/settings.py | 1 | 2699 | """
Django settings for tracker project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
ROOT_DIR = os.path.realpath(os.path.join(BASE_DIR, '..'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'cd$+f!jl(j$$l6tq+g_5awefyci0j#8&mu-homknx6oi_^0*^!'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'accounts',
'frontend',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
)
ROOT_URLCONF = 'tracker.urls'
WSGI_APPLICATION = 'tracker.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
AUTH_USER_MODEL = 'accounts.User'
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-GB'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Test-specific (declared here so can be easily overridden)
SELENIUM_DRIVER = 'chrome'
TEST_RUN = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
# Template searching
TEMPLATE_DIRS = (
os.path.join(ROOT_DIR, 'TEMPLATES'),
)
# Import any overrides
try:
from settings_override import *
except ImportError:
pass
| gpl-2.0 |
ryansturmer/cuttlebug | cuttlebug/settings.py | 1 | 7027 | import wx
import util
from ui.options import *
import os, pickle
from lxml import etree
session = {}
FALSE = 0
TRUE = 1
PROMPT = 2
def load_session(filename='.session'):
global session
try:
session = util.unpickle_file(filename)
except Exception, e:
print "Couldn't load session: %s" % e
session = {}
if not isinstance(session, dict):
session = {}
def save_session(filename = '.session'):
global session
util.pickle_file(session,filename)
def session_get(key):
global session
return session.get(key, None)
def session_set(key, value):
global session
session[key] = value
class Settings(util.Category):
def __init__(self):
super(self.__class__, self).__init__()
self.filename = ''
# Overview
editor = self.add_category('editor')
editor.add_item('view_whitespace', False)
editor.add_item('line_visible', False)
editor.add_item('exec_marker_color', (255,255,0))
editor.add_item('fold', True)
editor.add_item('fold_margin_size', 16)
cursor = editor.add_category('cursor')
cursor.add_item('visible', True)
cursor.add_item('period', 1000)
cursor.add_item('width', 1)
cursor.add_item('foreground_color', (0,0,0))
cursor.add_item('background_color', (255,255,255))
cursor.add_item('selection_foreground_color', (0,0,255))
cursor.add_item('selection_background_color', (255,0,0))
page = editor.add_category('page')
page.add_item('wrap', False)
page.add_item('edge_column', 1)
page.add_item('edge_mode', 0)
page.add_item('end_at_last_line', False)
page.add_item('horizontal_scrollbar', True)
page.add_item('margin_left', 0)
page.add_item('margin_right', 0)
page.add_item('show_line_numbers', True)
indent = editor.add_category('indent')
indent.add_item('backspace_unindents', True)
indent.add_item('indentation_guides', True)
indent.add_item('tab_indents', True)
indent.add_item('tab_width', 4)
indent.add_item('use_tabs', False)
debug = self.add_category('debug')
debug.add_item('jump_to_exec_location', False)
debug.add_item('run_after_download', False)
debug.add_item('load_after_build', 'no')
@staticmethod
def load(filename):
path = os.path.abspath(filename)
fp = open(path, 'r')
tree = etree.parse(fp)
settings = Settings()
settings.filename = path
settings.modified = False
def walk(current_cat, element):
for item in element:
if item.tag == "category":
name = item.get('name')
if name not in current_cat:
cat = current_cat.add_category(name)
else:
cat = current_cat[name]
walk(cat, item)
elif item.tag == "item":
name, value = item.get('name'), item.text.strip()
if name not in current_cat:
current_cat[name] = eval(value)
else:
current_cat.add_item(name, eval(value))
else:
pass # We could log an error here or something
walk(settings, tree.getroot())
return settings
@staticmethod
def create(filename):
settings = Settings()
settings.filename = os.path.abspath(filename)
settings.save()
return settings
def save(self):
fp = open(self.filename,'wb')
fp.write(util.settings_template.render(category=self))
fp.close()
self.modified = False
class SettingsDialog(OptionsDialog):
def __init__(self, parent, title="Settings", size=(600,400), settings=None, on_apply=None):
OptionsDialog.__init__(self, parent, title=title, size=size, data=settings, icons=["style.png"], on_apply=on_apply)
self.create_style_panels()
def create_style_panels(self):
# Main Panel
editor_panel = OptionsPanel(self, "Editor")
editor_panel.add("Misc", "Show Whitespace", CheckboxWidget, key="editor.view_whitespace")
editor_panel.add("Misc", "Code Folding", CheckboxWidget, key="editor.fold")
editor_panel.add("Misc", "Execution Marker Color", ColorWidget, key="editor.exec_marker_color")
# Cursor
cursor_panel = OptionsPanel(self, "Cursor")
cursor_panel.add("Selection", "Selection Background Color", ColorWidget, key="editor.cursor.background_color")
cursor_panel.add("Selection", "Selection Foreground Color", ColorWidget, key="editor.cursor.foreground_color")
editor_panel.add("Selection", "Hilight Current Line", CheckboxWidget, key="editor.line_visible")
cursor_panel.add("Cursor", "Cursor Visible", CheckboxWidget, key="editor.cursor.visible")
cursor_panel.add("Cursor", "Cursor Period (ms)", SpinWidget, key="editor.cursor.period")
# Page
page_panel = OptionsPanel(self, "Page")
page_panel.add("Margins", "Left Margin (px)", SpinWidget, key="editor.page.margin_left")
page_panel.add("Margins", "Right Margin (px)", SpinWidget, key="editor.page.margin_right")
page_panel.add("Margins", "Show Line Numbers", CheckboxWidget, key="editor.page.show_line_numbers")
page_panel.add("Wrap", "Wrap", CheckboxWidget, key="editor.page.wrap", label_on_right=True)
page_panel.add("Wrap", "Horizontal Scrollbar", CheckboxWidget, key="editor.page.horizontal_scrollbar", label_on_right=True)
debug_panel = OptionsPanel(self, "Debug")
debug_panel.add("Running", "Jump to Execution Location on HALT", CheckboxWidget, key="debug.jump_to_exec_location")
debug_panel.add("Running", "Run After Download", CheckboxWidget, key="debug.run_after_download")
debug_panel.add("Running", "Download After Successful Build", ComboBoxWidget(debug_panel, choices=['Yes', 'No', 'Prompt']), key="debug.load_after_build")
self.add_panel(editor_panel, icon='style.png')
self.add_panel(cursor_panel, parent=editor_panel, icon='textfield_rename.png')
self.add_panel(page_panel, parent=editor_panel, icon='page.png')
self.add_panel(debug_panel, icon='bug.png')
@staticmethod
def show(parent, settings=None, on_apply=None):
dialog = SettingsDialog(parent, settings=settings, on_apply=on_apply)
dialog.Centre()
dialog.ShowModal()
if __name__ == "__main__":
#settings = Settings.create(".settings_test")
#print settings.__dict__
settings = Settings.load(".settings_test")
settings.save()
| mit |
xavfernandez/pip | tests/functional/test_install.py | 1 | 64161 | import distutils
import glob
import os
import re
import shutil
import ssl
import sys
import textwrap
from os.path import curdir, join, pardir
import pytest
from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.models.index import PyPI, TestPyPI
from pip._internal.utils.misc import rmtree
from tests.lib import (
_create_svn_repo,
_create_test_package,
create_basic_wheel_for_package,
create_test_package_with_setup,
need_bzr,
need_mercurial,
need_svn,
path_to_url,
pyversion,
pyversion_tuple,
requirements_file,
skip_if_not_python2,
skip_if_python2,
)
from tests.lib.filesystem import make_socket_file
from tests.lib.local_repos import local_checkout
from tests.lib.path import Path
from tests.lib.server import (
file_response,
make_mock_server,
package_page,
server_running,
)
@pytest.mark.parametrize('command', ('install', 'wheel'))
@pytest.mark.parametrize('variant', ('missing_setuptools', 'bad_setuptools'))
def test_pep518_uses_build_env(script, data, common_wheels, command, variant):
if variant == 'missing_setuptools':
script.pip("uninstall", "-y", "setuptools")
elif variant == 'bad_setuptools':
setuptools_mod = script.site_packages_path.joinpath("setuptools.py")
with open(setuptools_mod, 'a') as f:
f.write('\nraise ImportError("toto")')
else:
raise ValueError(variant)
script.pip(
command, '--no-index', '-f', common_wheels, '-f', data.packages,
data.src.joinpath("pep518-3.0"),
)
def test_pep518_build_env_uses_same_pip(
script, data, pip_src, common_wheels, deprecated_python):
"""Ensure the subprocess call to pip for installing the
build dependencies is using the same version of pip.
"""
with open(script.scratch_path / 'pip.py', 'w') as fp:
fp.write('raise ImportError')
script.run(
'python', pip_src / 'src/pip', 'install', '--no-index',
'-f', common_wheels, '-f', data.packages,
data.src.joinpath("pep518-3.0"),
expect_stderr=deprecated_python,
)
def test_pep518_refuses_conflicting_requires(script, data):
create_basic_wheel_for_package(script, 'setuptools', '1.0')
create_basic_wheel_for_package(script, 'wheel', '1.0')
project_dir = data.src.joinpath("pep518_conflicting_requires")
result = script.pip_install_local('-f', script.scratch_path,
project_dir, expect_error=True)
assert (
result.returncode != 0 and
('Some build dependencies for %s conflict with PEP 517/518 supported '
'requirements: setuptools==1.0 is incompatible with '
'setuptools>=40.8.0.' % path_to_url(project_dir)) in result.stderr
), str(result)
def test_pep518_refuses_invalid_requires(script, data, common_wheels):
result = script.pip(
'install', '-f', common_wheels,
data.src.joinpath("pep518_invalid_requires"),
expect_error=True
)
assert result.returncode == 1
assert "does not comply with PEP 518" in result.stderr
def test_pep518_refuses_invalid_build_system(script, data, common_wheels):
result = script.pip(
'install', '-f', common_wheels,
data.src.joinpath("pep518_invalid_build_system"),
expect_error=True
)
assert result.returncode == 1
assert "does not comply with PEP 518" in result.stderr
def test_pep518_allows_missing_requires(script, data, common_wheels):
result = script.pip(
'install', '-f', common_wheels,
data.src.joinpath("pep518_missing_requires"),
expect_stderr=True
)
# Make sure we don't warn when this occurs.
assert "does not comply with PEP 518" not in result.stderr
# We want it to go through isolation for now.
assert "Installing build dependencies" in result.stdout, result.stdout
assert result.returncode == 0
assert result.files_created
@pytest.mark.incompatible_with_test_venv
def test_pep518_with_user_pip(script, pip_src, data, common_wheels):
"""
Check that build dependencies are installed into the build
environment without using build isolation for the pip invocation.
To ensure that we're not using build isolation when installing
the build dependencies, we install a user copy of pip in the
non-isolated environment, and break pip in the system site-packages,
so that isolated uses of pip will fail.
"""
script.pip("install", "--ignore-installed",
"-f", common_wheels, "--user", pip_src)
system_pip_dir = script.site_packages_path / 'pip'
assert not system_pip_dir.exists()
system_pip_dir.mkdir()
with open(system_pip_dir / '__init__.py', 'w') as fp:
fp.write('raise ImportError\n')
script.pip(
'wheel', '--no-index', '-f', common_wheels, '-f', data.packages,
data.src.joinpath("pep518-3.0"),
)
def test_pep518_with_extra_and_markers(script, data, common_wheels):
script.pip(
'wheel', '--no-index',
'-f', common_wheels,
'-f', data.find_links,
data.src.joinpath("pep518_with_extra_and_markers-1.0"),
)
def test_pep518_with_namespace_package(script, data, common_wheels):
script.pip(
'wheel', '--no-index',
'-f', common_wheels,
'-f', data.find_links,
data.src.joinpath("pep518_with_namespace_package-1.0"),
use_module=True,
)
@pytest.mark.timeout(60)
@pytest.mark.parametrize('command', ('install', 'wheel'))
@pytest.mark.parametrize('package', ('pep518_forkbomb',
'pep518_twin_forkbombs_first',
'pep518_twin_forkbombs_second'))
def test_pep518_forkbombs(script, data, common_wheels, command, package):
package_source = next(data.packages.glob(package + '-[0-9]*.tar.gz'))
result = script.pip(
command, '--no-index', '-v',
'-f', common_wheels,
'-f', data.find_links,
package,
expect_error=True,
)
assert '{1} is already being built: {0} from {1}'.format(
package, path_to_url(package_source),
) in result.stderr, str(result)
@pytest.mark.network
def test_pip_second_command_line_interface_works(
script, pip_src, data, common_wheels, deprecated_python):
"""
Check if ``pip<PYVERSION>`` commands behaves equally
"""
# Re-install pip so we get the launchers.
script.pip_install_local('-f', common_wheels, pip_src)
# On old versions of Python, urllib3/requests will raise a warning about
# the lack of an SSLContext.
kwargs = {'expect_stderr': deprecated_python}
if pyversion_tuple < (2, 7, 9):
kwargs['expect_stderr'] = True
args = ['pip%s' % pyversion]
args.extend(['install', 'INITools==0.2'])
args.extend(['-f', data.packages])
result = script.run(*args, **kwargs)
egg_info_folder = (
script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.site_packages / 'initools'
assert egg_info_folder in result.files_created, str(result)
assert initools_folder in result.files_created, str(result)
def test_install_exit_status_code_when_no_requirements(script):
"""
Test install exit status code when no requirements specified
"""
result = script.pip('install', expect_error=True)
assert "You must give at least one requirement to install" in result.stderr
assert result.returncode == ERROR
def test_install_exit_status_code_when_blank_requirements_file(script):
"""
Test install exit status code when blank requirements file specified
"""
script.scratch_path.joinpath("blank.txt").write_text("\n")
script.pip('install', '-r', 'blank.txt')
@pytest.mark.network
def test_basic_install_from_pypi(script):
"""
Test installing a package from PyPI.
"""
result = script.pip('install', 'INITools==0.2')
egg_info_folder = (
script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.site_packages / 'initools'
assert egg_info_folder in result.files_created, str(result)
assert initools_folder in result.files_created, str(result)
# Should not display where it's looking for files
assert "Looking in indexes: " not in result.stdout
assert "Looking in links: " not in result.stdout
# Ensure that we don't print the full URL.
# The URL should be trimmed to only the last part of the path in it,
# when installing from PyPI. The assertion here only checks for
# `https://` since that's likely to show up if we're not trimming in
# the correct circumstances.
assert "https://" not in result.stdout
def test_basic_editable_install(script):
"""
Test editable installation.
"""
result = script.pip('install', '-e', 'INITools==0.2', expect_error=True)
assert (
"INITools==0.2 is not a valid editable requirement"
in result.stderr
)
assert not result.files_created
@need_svn
def test_basic_install_editable_from_svn(script):
"""
Test checking out from svn.
"""
checkout_path = _create_test_package(script)
repo_url = _create_svn_repo(script, checkout_path)
result = script.pip(
'install',
'-e', 'svn+' + repo_url + '#egg=version-pkg'
)
result.assert_installed('version-pkg', with_files=['.svn'])
def _test_install_editable_from_git(script, tmpdir):
"""Test cloning from Git."""
pkg_path = _create_test_package(script, name='testpackage', vcs='git')
args = ['install', '-e', 'git+%s#egg=testpackage' % path_to_url(pkg_path)]
result = script.pip(*args)
result.assert_installed('testpackage', with_files=['.git'])
def test_basic_install_editable_from_git(script, tmpdir):
_test_install_editable_from_git(script, tmpdir)
def test_install_editable_from_git_autobuild_wheel(
script, tmpdir, with_wheel):
_test_install_editable_from_git(script, tmpdir)
@pytest.mark.network
def test_install_editable_uninstalls_existing(data, script, tmpdir):
"""
Test that installing an editable uninstalls a previously installed
non-editable version.
https://github.com/pypa/pip/issues/1548
https://github.com/pypa/pip/pull/1552
"""
to_install = data.packages.joinpath("pip-test-package-0.1.tar.gz")
result = script.pip_install_local(to_install)
assert 'Successfully installed pip-test-package' in result.stdout
result.assert_installed('piptestpackage', editable=False)
result = script.pip(
'install', '-e',
'%s#egg=pip-test-package' %
local_checkout(
'git+https://github.com/pypa/pip-test-package.git', tmpdir,
),
)
result.assert_installed('pip-test-package', with_files=['.git'])
assert 'Found existing installation: pip-test-package 0.1' in result.stdout
assert 'Uninstalling pip-test-package-' in result.stdout
assert 'Successfully uninstalled pip-test-package' in result.stdout
def test_install_editable_uninstalls_existing_from_path(script, data):
"""
Test that installing an editable uninstalls a previously installed
non-editable version from path
"""
to_install = data.src.joinpath('simplewheel-1.0')
result = script.pip_install_local(to_install)
assert 'Successfully installed simplewheel' in result.stdout
simple_folder = script.site_packages / 'simplewheel'
result.assert_installed('simplewheel', editable=False)
assert simple_folder in result.files_created, str(result.stdout)
result = script.pip(
'install', '-e',
to_install,
)
install_path = script.site_packages / 'simplewheel.egg-link'
assert install_path in result.files_created, str(result)
assert 'Found existing installation: simplewheel 1.0' in result.stdout
assert 'Uninstalling simplewheel-' in result.stdout
assert 'Successfully uninstalled simplewheel' in result.stdout
assert simple_folder in result.files_deleted, str(result.stdout)
@need_mercurial
def test_basic_install_editable_from_hg(script, tmpdir):
"""Test cloning and hg+file install from Mercurial."""
pkg_path = _create_test_package(script, name='testpackage', vcs='hg')
url = 'hg+{}#egg=testpackage'.format(path_to_url(pkg_path))
assert url.startswith('hg+file')
args = ['install', '-e', url]
result = script.pip(*args)
result.assert_installed('testpackage', with_files=['.hg'])
@need_mercurial
def test_vcs_url_final_slash_normalization(script, tmpdir):
"""
Test that presence or absence of final slash in VCS URL is normalized.
"""
pkg_path = _create_test_package(script, name='testpackage', vcs='hg')
args = ['install', '-e', 'hg+%s/#egg=testpackage' % path_to_url(pkg_path)]
result = script.pip(*args)
result.assert_installed('testpackage', with_files=['.hg'])
@need_bzr
def test_install_editable_from_bazaar(script, tmpdir):
"""Test checking out from Bazaar."""
pkg_path = _create_test_package(script, name='testpackage', vcs='bazaar')
args = ['install', '-e', 'bzr+%s/#egg=testpackage' % path_to_url(pkg_path)]
result = script.pip(*args)
result.assert_installed('testpackage', with_files=['.bzr'])
@pytest.mark.network
@need_bzr
def test_vcs_url_urlquote_normalization(script, tmpdir):
"""
Test that urlquoted characters are normalized for repo URL comparison.
"""
script.pip(
'install', '-e',
'%s/#egg=django-wikiapp' %
local_checkout(
'bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp'
'/release-0.1',
tmpdir,
),
)
def test_basic_install_from_local_directory(script, data):
"""
Test installing from a local directory.
"""
to_install = data.packages.joinpath("FSPkg")
result = script.pip('install', to_install)
fspkg_folder = script.site_packages / 'fspkg'
egg_info_folder = (
script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
def test_basic_install_relative_directory(script, data):
"""
Test installing a requirement using a relative path.
"""
egg_info_file = (
script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
egg_link_file = (
script.site_packages / 'FSPkg.egg-link'
)
package_folder = script.site_packages / 'fspkg'
# Compute relative install path to FSPkg from scratch path.
full_rel_path = Path(
os.path.relpath(data.packages.joinpath('FSPkg'), script.scratch_path)
)
full_rel_url = (
'file:' + full_rel_path.replace(os.path.sep, '/') + '#egg=FSPkg'
)
embedded_rel_path = script.scratch_path.joinpath(full_rel_path)
# For each relative path, install as either editable or not using either
# URLs with egg links or not.
for req_path in (full_rel_path, full_rel_url, embedded_rel_path):
# Regular install.
result = script.pip('install', req_path,
cwd=script.scratch_path)
assert egg_info_file in result.files_created, str(result)
assert package_folder in result.files_created, str(result)
script.pip('uninstall', '-y', 'fspkg')
# Editable install.
result = script.pip('install', '-e' + req_path,
cwd=script.scratch_path)
assert egg_link_file in result.files_created, str(result)
script.pip('uninstall', '-y', 'fspkg')
def test_install_quiet(script, data):
"""
Test that install -q is actually quiet.
"""
# Apparently if pip install -q is not actually quiet, then it breaks
# everything. See:
# https://github.com/pypa/pip/issues/3418
# https://github.com/docker-library/python/issues/83
to_install = data.packages.joinpath("FSPkg")
result = script.pip('install', '-qqq', to_install)
assert result.stdout == ""
assert result.stderr == ""
def test_hashed_install_success(script, data, tmpdir):
"""
Test that installing various sorts of requirements with correct hashes
works.
Test file URLs and index packages (which become HTTP URLs behind the
scenes).
"""
file_url = path_to_url(
(data.packages / 'simple-1.0.tar.gz').resolve())
with requirements_file(
'simple2==1.0 --hash=sha256:9336af72ca661e6336eb87bc7de3e8844d853e'
'3848c2b9bbd2e8bf01db88c2c7\n'
'{simple} --hash=sha256:393043e672415891885c9a2a0929b1af95fb866d6c'
'a016b42d2e6ce53619b653'.format(simple=file_url),
tmpdir) as reqs_file:
script.pip_install_local('-r', reqs_file.resolve())
def test_hashed_install_failure(script, tmpdir):
"""Test that wrong hashes stop installation.
This makes sure prepare_files() is called in the course of installation
and so has the opportunity to halt if hashes are wrong. Checks on various
kinds of hashes are in test_req.py.
"""
with requirements_file('simple2==1.0 --hash=sha256:9336af72ca661e6336eb87b'
'c7de3e8844d853e3848c2b9bbd2e8bf01db88c2c\n',
tmpdir) as reqs_file:
result = script.pip_install_local('-r',
reqs_file.resolve(),
expect_error=True)
assert len(result.files_created) == 0
def assert_re_match(pattern, text):
assert re.search(pattern, text), (
"Could not find {!r} in {!r}".format(pattern, text)
)
@pytest.mark.network
def test_hashed_install_failure_later_flag(script, tmpdir):
with requirements_file(
"blessings==1.0\n"
"tracefront==0.1 --hash=sha256:somehash\n"
"https://files.pythonhosted.org/packages/source/m/more-itertools/"
"more-itertools-1.0.tar.gz#md5=b21850c3cfa7efbb70fd662ab5413bdd\n"
"https://files.pythonhosted.org/"
"packages/source/p/peep/peep-3.1.1.tar.gz\n",
tmpdir,
) as reqs_file:
result = script.pip(
"install", "-r", reqs_file.resolve(), expect_error=True
)
assert_re_match(
r'Hashes are required in --require-hashes mode, but they are '
r'missing .*\n'
r' https://files\.pythonhosted\.org/packages/source/p/peep/peep'
r'-3\.1\.1\.tar\.gz --hash=sha256:[0-9a-f]+\n'
r' blessings==1.0 --hash=sha256:[0-9a-f]+\n'
r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n'
r' tracefront==0.1 .*:\n'
r' Expected sha256 somehash\n'
r' Got [0-9a-f]+',
result.stderr,
)
def test_install_from_local_directory_with_symlinks_to_directories(
script, data):
"""
Test installing from a local directory containing symlinks to directories.
"""
to_install = data.packages.joinpath("symlinks")
result = script.pip('install', to_install)
pkg_folder = script.site_packages / 'symlinks'
egg_info_folder = (
script.site_packages / 'symlinks-0.1.dev0-py%s.egg-info' % pyversion
)
assert pkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
@pytest.mark.skipif("sys.platform == 'win32' or sys.version_info < (3,)")
def test_install_from_local_directory_with_socket_file(script, data, tmpdir):
"""
Test installing from a local directory containing a socket file.
"""
egg_info_file = (
script.site_packages / "FSPkg-0.1.dev0-py%s.egg-info" % pyversion
)
package_folder = script.site_packages / "fspkg"
to_copy = data.packages.joinpath("FSPkg")
to_install = tmpdir.joinpath("src")
shutil.copytree(to_copy, to_install)
# Socket file, should be ignored.
socket_file_path = os.path.join(to_install, "example")
make_socket_file(socket_file_path)
result = script.pip("install", "--verbose", to_install)
assert package_folder in result.files_created, str(result.stdout)
assert egg_info_file in result.files_created, str(result)
assert str(socket_file_path) in result.stderr
def test_install_from_local_directory_with_no_setup_py(script, data):
"""
Test installing from a local directory with no 'setup.py'.
"""
result = script.pip('install', data.root, expect_error=True)
assert not result.files_created
assert "is not installable." in result.stderr
assert "Neither 'setup.py' nor 'pyproject.toml' found." in result.stderr
def test_editable_install__local_dir_no_setup_py(
script, data, deprecated_python):
"""
Test installing in editable mode from a local directory with no setup.py.
"""
result = script.pip('install', '-e', data.root, expect_error=True)
assert not result.files_created
msg = result.stderr
if deprecated_python:
assert 'File "setup.py" not found. ' in msg
else:
assert msg.startswith('ERROR: File "setup.py" not found. ')
assert 'pyproject.toml' not in msg
def test_editable_install__local_dir_no_setup_py_with_pyproject(
script, deprecated_python):
"""
Test installing in editable mode from a local directory with no setup.py
but that does have pyproject.toml.
"""
local_dir = script.scratch_path.joinpath('temp')
local_dir.mkdir()
pyproject_path = local_dir.joinpath('pyproject.toml')
pyproject_path.write_text('')
result = script.pip('install', '-e', local_dir, expect_error=True)
assert not result.files_created
msg = result.stderr
if deprecated_python:
assert 'File "setup.py" not found. ' in msg
else:
assert msg.startswith('ERROR: File "setup.py" not found. ')
assert 'A "pyproject.toml" file was found' in msg
@skip_if_not_python2
@pytest.mark.xfail
def test_install_argparse_shadowed(script):
# When argparse is in the stdlib, we support installing it
# even though that's pretty useless because older packages did need to
# depend on it, and not having its metadata will cause pkg_resources
# requirements checks to fail // trigger easy-install, both of which are
# bad.
# XXX: Note, this test hits the outside-environment check, not the
# in-stdlib check, because our tests run in virtualenvs...
result = script.pip('install', 'argparse>=1.4')
assert "Not uninstalling argparse" in result.stdout
@pytest.mark.network
@skip_if_python2
def test_upgrade_argparse_shadowed(script):
# If argparse is installed - even if shadowed for imported - we support
# upgrading it and properly remove the older versions files.
script.pip('install', 'argparse==1.3')
result = script.pip('install', 'argparse>=1.4')
assert "Not uninstalling argparse" not in result.stdout
def test_install_curdir(script, data):
"""
Test installing current directory ('.').
"""
run_from = data.packages.joinpath("FSPkg")
# Python 2.4 Windows balks if this exists already
egg_info = join(run_from, "FSPkg.egg-info")
if os.path.isdir(egg_info):
rmtree(egg_info)
result = script.pip('install', curdir, cwd=run_from)
fspkg_folder = script.site_packages / 'fspkg'
egg_info_folder = (
script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
def test_install_pardir(script, data):
"""
Test installing parent directory ('..').
"""
run_from = data.packages.joinpath("FSPkg", "fspkg")
result = script.pip('install', pardir, cwd=run_from)
fspkg_folder = script.site_packages / 'fspkg'
egg_info_folder = (
script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
@pytest.mark.network
def test_install_global_option(script):
"""
Test using global distutils options.
(In particular those that disable the actual install action)
"""
result = script.pip(
'install', '--global-option=--version', "INITools==0.1",
expect_stderr=True)
assert 'INITools==0.1\n' in result.stdout
assert not result.files_created
def test_install_with_hacked_egg_info(script, data):
"""
test installing a package which defines its own egg_info class
"""
run_from = data.packages.joinpath("HackedEggInfo")
result = script.pip('install', '.', cwd=run_from)
assert 'Successfully installed hackedegginfo-0.0.0\n' in result.stdout
@pytest.mark.network
def test_install_using_install_option_and_editable(script, tmpdir):
"""
Test installing a tool using -e and --install-option
"""
folder = 'script_folder'
script.scratch_path.joinpath(folder).mkdir()
url = 'git+git://github.com/pypa/pip-test-package'
result = script.pip(
'install', '-e', '%s#egg=pip-test-package' %
local_checkout(url, tmpdir),
'--install-option=--script-dir=%s' % folder,
expect_stderr=True)
script_file = (
script.venv / 'src' / 'pip-test-package' /
folder / 'pip-test-package' + script.exe
)
assert script_file in result.files_created
@pytest.mark.network
@need_mercurial
def test_install_global_option_using_editable(script, tmpdir):
"""
Test using global distutils options, but in an editable installation
"""
url = 'hg+http://bitbucket.org/runeh/anyjson'
result = script.pip(
'install', '--global-option=--version', '-e',
'%[email protected]#egg=anyjson' % local_checkout(url, tmpdir),
expect_stderr=True)
assert 'Successfully installed anyjson' in result.stdout
@pytest.mark.network
def test_install_package_with_same_name_in_curdir(script):
"""
Test installing a package with the same name of a local folder
"""
script.scratch_path.joinpath("mock==0.6").mkdir()
result = script.pip('install', 'mock==0.6')
egg_folder = script.site_packages / 'mock-0.6.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
mock100_setup_py = textwrap.dedent('''\
from setuptools import setup
setup(name='mock',
version='100.1')''')
def test_install_folder_using_dot_slash(script):
"""
Test installing a folder using pip install ./foldername
"""
script.scratch_path.joinpath("mock").mkdir()
pkg_path = script.scratch_path / 'mock'
pkg_path.joinpath("setup.py").write_text(mock100_setup_py)
result = script.pip('install', './mock')
egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
def test_install_folder_using_slash_in_the_end(script):
r"""
Test installing a folder using pip install foldername/ or foldername\
"""
script.scratch_path.joinpath("mock").mkdir()
pkg_path = script.scratch_path / 'mock'
pkg_path.joinpath("setup.py").write_text(mock100_setup_py)
result = script.pip('install', 'mock' + os.path.sep)
egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
def test_install_folder_using_relative_path(script):
"""
Test installing a folder using pip install folder1/folder2
"""
script.scratch_path.joinpath("initools").mkdir()
script.scratch_path.joinpath("initools", "mock").mkdir()
pkg_path = script.scratch_path / 'initools' / 'mock'
pkg_path.joinpath("setup.py").write_text(mock100_setup_py)
result = script.pip('install', Path('initools') / 'mock')
egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
@pytest.mark.network
def test_install_package_which_contains_dev_in_name(script):
"""
Test installing package from PyPI which contains 'dev' in name
"""
result = script.pip('install', 'django-devserver==0.0.4')
devserver_folder = script.site_packages / 'devserver'
egg_info_folder = (
script.site_packages / 'django_devserver-0.0.4-py%s.egg-info' %
pyversion
)
assert devserver_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
def test_install_package_with_target(script):
"""
Test installing a package using pip install --target
"""
target_dir = script.scratch_path / 'target'
result = script.pip_install_local('-t', target_dir, "simple==1.0")
assert Path('scratch') / 'target' / 'simple' in result.files_created, (
str(result)
)
# Test repeated call without --upgrade, no files should have changed
result = script.pip_install_local(
'-t', target_dir, "simple==1.0", expect_stderr=True,
)
assert not Path('scratch') / 'target' / 'simple' in result.files_updated
# Test upgrade call, check that new version is installed
result = script.pip_install_local('--upgrade', '-t',
target_dir, "simple==2.0")
assert Path('scratch') / 'target' / 'simple' in result.files_updated, (
str(result)
)
egg_folder = (
Path('scratch') / 'target' / 'simple-2.0-py%s.egg-info' % pyversion)
assert egg_folder in result.files_created, (
str(result)
)
# Test install and upgrade of single-module package
result = script.pip_install_local('-t', target_dir, 'singlemodule==0.0.0')
singlemodule_py = Path('scratch') / 'target' / 'singlemodule.py'
assert singlemodule_py in result.files_created, str(result)
result = script.pip_install_local('-t', target_dir, 'singlemodule==0.0.1',
'--upgrade')
assert singlemodule_py in result.files_updated, str(result)
def test_install_nonlocal_compatible_wheel(script, data):
target_dir = script.scratch_path / 'target'
# Test install with --target
result = script.pip(
'install',
'-t', target_dir,
'--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--python', '3',
'--platform', 'fakeplat',
'--abi', 'fakeabi',
'simplewheel',
)
assert result.returncode == SUCCESS
distinfo = Path('scratch') / 'target' / 'simplewheel-2.0-1.dist-info'
assert distinfo in result.files_created
# Test install without --target
result = script.pip(
'install',
'--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--python', '3',
'--platform', 'fakeplat',
'--abi', 'fakeabi',
'simplewheel',
expect_error=True
)
assert result.returncode == ERROR
def test_install_nonlocal_compatible_wheel_path(script, data):
target_dir = script.scratch_path / 'target'
# Test a full path requirement
result = script.pip(
'install',
'-t', target_dir,
'--no-index',
'--only-binary=:all:',
Path(data.packages) / 'simplewheel-2.0-py3-fakeabi-fakeplat.whl'
)
assert result.returncode == SUCCESS
distinfo = Path('scratch') / 'target' / 'simplewheel-2.0.dist-info'
assert distinfo in result.files_created
# Test a full path requirement (without --target)
result = script.pip(
'install',
'--no-index',
'--only-binary=:all:',
Path(data.packages) / 'simplewheel-2.0-py3-fakeabi-fakeplat.whl',
expect_error=True
)
assert result.returncode == ERROR
def test_install_with_target_and_scripts_no_warning(script, with_wheel):
"""
Test that installing with --target does not trigger the "script not
in PATH" warning (issue #5201)
"""
target_dir = script.scratch_path / 'target'
pkga_path = script.scratch_path / 'pkga'
pkga_path.mkdir()
pkga_path.joinpath("setup.py").write_text(textwrap.dedent("""
from setuptools import setup
setup(name='pkga',
version='0.1',
py_modules=["pkga"],
entry_points={
'console_scripts': ['pkga=pkga:main']
}
)
"""))
pkga_path.joinpath("pkga.py").write_text(textwrap.dedent("""
def main(): pass
"""))
result = script.pip('install', '--target', target_dir, pkga_path)
# This assertion isn't actually needed, if we get the script warning
# the script.pip() call will fail with "stderr not expected". But we
# leave the assertion to make the intention of the code clearer.
assert "--no-warn-script-location" not in result.stderr, str(result)
def test_install_package_with_root(script, data):
"""
Test installing a package using pip install --root
"""
root_dir = script.scratch_path / 'root'
result = script.pip(
'install', '--root', root_dir, '-f', data.find_links, '--no-index',
'simple==1.0',
)
normal_install_path = (
script.base_path / script.site_packages / 'simple-1.0-py%s.egg-info' %
pyversion
)
# use distutils to change the root exactly how the --root option does it
from distutils.util import change_root
root_path = change_root(
os.path.join(script.scratch, 'root'),
normal_install_path
)
assert root_path in result.files_created, str(result)
# Should show find-links location in output
assert "Looking in indexes: " not in result.stdout
assert "Looking in links: " in result.stdout
def test_install_package_with_prefix(script, data):
"""
Test installing a package using pip install --prefix
"""
prefix_path = script.scratch_path / 'prefix'
result = script.pip(
'install', '--prefix', prefix_path, '-f', data.find_links,
'--no-binary', 'simple', '--no-index', 'simple==1.0',
)
rel_prefix_path = script.scratch / 'prefix'
install_path = (
distutils.sysconfig.get_python_lib(prefix=rel_prefix_path) /
'simple-1.0-py{}.egg-info'.format(pyversion)
)
assert install_path in result.files_created, str(result)
def test_install_editable_with_prefix(script):
# make a dummy project
pkga_path = script.scratch_path / 'pkga'
pkga_path.mkdir()
pkga_path.joinpath("setup.py").write_text(textwrap.dedent("""
from setuptools import setup
setup(name='pkga',
version='0.1')
"""))
if hasattr(sys, "pypy_version_info"):
site_packages = os.path.join(
'prefix', 'lib', 'python{}'.format(pyversion), 'site-packages')
else:
site_packages = distutils.sysconfig.get_python_lib(prefix='prefix')
# make sure target path is in PYTHONPATH
pythonpath = script.scratch_path / site_packages
pythonpath.mkdir(parents=True)
script.environ["PYTHONPATH"] = pythonpath
# install pkga package into the absolute prefix directory
prefix_path = script.scratch_path / 'prefix'
result = script.pip(
'install', '--editable', pkga_path, '--prefix', prefix_path)
# assert pkga is installed at correct location
install_path = script.scratch / site_packages / 'pkga.egg-link'
assert install_path in result.files_created, str(result)
def test_install_package_conflict_prefix_and_user(script, data):
"""
Test installing a package using pip install --prefix --user errors out
"""
prefix_path = script.scratch_path / 'prefix'
result = script.pip(
'install', '-f', data.find_links, '--no-index', '--user',
'--prefix', prefix_path, 'simple==1.0',
expect_error=True, quiet=True,
)
assert (
"Can not combine '--user' and '--prefix'" in result.stderr
)
def test_install_package_that_emits_unicode(script, data):
"""
Install a package with a setup.py that emits UTF-8 output and then fails.
Refs https://github.com/pypa/pip/issues/326
"""
to_install = data.packages.joinpath("BrokenEmitsUTF8")
result = script.pip(
'install', to_install, expect_error=True, expect_temp=True, quiet=True,
)
assert (
'FakeError: this package designed to fail on install' in result.stderr
), 'stderr: {}'.format(result.stderr)
assert 'UnicodeDecodeError' not in result.stderr
assert 'UnicodeDecodeError' not in result.stdout
def test_install_package_with_utf8_setup(script, data):
"""Install a package with a setup.py that declares a utf-8 encoding."""
to_install = data.packages.joinpath("SetupPyUTF8")
script.pip('install', to_install)
def test_install_package_with_latin1_setup(script, data):
"""Install a package with a setup.py that declares a latin-1 encoding."""
to_install = data.packages.joinpath("SetupPyLatin1")
script.pip('install', to_install)
def test_url_req_case_mismatch_no_index(script, data):
"""
tar ball url requirements (with no egg fragment), that happen to have upper
case project names, should be considered equal to later requirements that
reference the project name using lower case.
tests/data/packages contains Upper-1.0.tar.gz and Upper-2.0.tar.gz
'requiresupper' has install_requires = ['upper']
"""
Upper = '/'.join((data.find_links, 'Upper-1.0.tar.gz'))
result = script.pip(
'install', '--no-index', '-f', data.find_links, Upper, 'requiresupper'
)
# only Upper-1.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Upper-1.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
egg_folder = script.site_packages / 'Upper-2.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
def test_url_req_case_mismatch_file_index(script, data):
"""
tar ball url requirements (with no egg fragment), that happen to have upper
case project names, should be considered equal to later requirements that
reference the project name using lower case.
tests/data/packages3 contains Dinner-1.0.tar.gz and Dinner-2.0.tar.gz
'requiredinner' has install_requires = ['dinner']
This test is similar to test_url_req_case_mismatch_no_index; that test
tests behaviour when using "--no-index -f", while this one does the same
test when using "--index-url". Unfortunately this requires a different
set of packages as it requires a prepared index.html file and
subdirectory-per-package structure.
"""
Dinner = '/'.join((data.find_links3, 'dinner', 'Dinner-1.0.tar.gz'))
result = script.pip(
'install', '--index-url', data.find_links3, Dinner, 'requiredinner'
)
# only Upper-1.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Dinner-1.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
egg_folder = script.site_packages / 'Dinner-2.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
def test_url_incorrect_case_no_index(script, data):
"""
Same as test_url_req_case_mismatch_no_index, except testing for the case
where the incorrect case is given in the name of the package to install
rather than in a requirements file.
"""
result = script.pip(
'install', '--no-index', '-f', data.find_links, "upper",
)
# only Upper-2.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Upper-1.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
egg_folder = script.site_packages / 'Upper-2.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
def test_url_incorrect_case_file_index(script, data):
"""
Same as test_url_req_case_mismatch_file_index, except testing for the case
where the incorrect case is given in the name of the package to install
rather than in a requirements file.
"""
result = script.pip(
'install', '--index-url', data.find_links3, "dinner",
expect_stderr=True,
)
# only Upper-2.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Dinner-1.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
egg_folder = script.site_packages / 'Dinner-2.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
# Should show index-url location in output
assert "Looking in indexes: " in result.stdout
assert "Looking in links: " not in result.stdout
@pytest.mark.network
def test_compiles_pyc(script):
"""
Test installing with --compile on
"""
del script.environ["PYTHONDONTWRITEBYTECODE"]
script.pip("install", "--compile", "--no-binary=:all:", "INITools==0.2")
# There are many locations for the __init__.pyc file so attempt to find
# any of them
exists = [
os.path.exists(script.site_packages_path / "initools/__init__.pyc"),
]
exists += glob.glob(
script.site_packages_path / "initools/__pycache__/__init__*.pyc"
)
assert any(exists)
@pytest.mark.network
def test_no_compiles_pyc(script):
"""
Test installing from wheel with --compile on
"""
del script.environ["PYTHONDONTWRITEBYTECODE"]
script.pip("install", "--no-compile", "--no-binary=:all:", "INITools==0.2")
# There are many locations for the __init__.pyc file so attempt to find
# any of them
exists = [
os.path.exists(script.site_packages_path / "initools/__init__.pyc"),
]
exists += glob.glob(
script.site_packages_path / "initools/__pycache__/__init__*.pyc"
)
assert not any(exists)
def test_install_upgrade_editable_depending_on_other_editable(script):
script.scratch_path.joinpath("pkga").mkdir()
pkga_path = script.scratch_path / 'pkga'
pkga_path.joinpath("setup.py").write_text(textwrap.dedent("""
from setuptools import setup
setup(name='pkga',
version='0.1')
"""))
script.pip('install', '--editable', pkga_path)
result = script.pip('list', '--format=freeze')
assert "pkga==0.1" in result.stdout
script.scratch_path.joinpath("pkgb").mkdir()
pkgb_path = script.scratch_path / 'pkgb'
pkgb_path.joinpath("setup.py").write_text(textwrap.dedent("""
from setuptools import setup
setup(name='pkgb',
version='0.1',
install_requires=['pkga'])
"""))
script.pip('install', '--upgrade', '--editable', pkgb_path, '--no-index')
result = script.pip('list', '--format=freeze')
assert "pkgb==0.1" in result.stdout
def test_install_subprocess_output_handling(script, data):
args = ['install', data.src.joinpath('chattymodule')]
# Regular install should not show output from the chatty setup.py
result = script.pip(*args)
assert 0 == result.stdout.count("HELLO FROM CHATTYMODULE")
script.pip("uninstall", "-y", "chattymodule")
# With --verbose we should show the output.
# Only count examples with sys.argv[1] == egg_info, because we call
# setup.py multiple times, which should not count as duplicate output.
result = script.pip(*(args + ["--verbose"]), expect_stderr=True)
assert 1 == result.stderr.count("HELLO FROM CHATTYMODULE egg_info")
script.pip("uninstall", "-y", "chattymodule")
# If the install fails, then we *should* show the output... but only once,
# even if --verbose is given.
result = script.pip(*(args + ["--global-option=--fail"]),
expect_error=True)
assert 1 == result.stderr.count("I DIE, I DIE")
result = script.pip(*(args + ["--global-option=--fail", "--verbose"]),
expect_error=True)
assert 1 == result.stderr.count("I DIE, I DIE")
def test_install_log(script, data, tmpdir):
# test that verbose logs go to "--log" file
f = tmpdir.joinpath("log.txt")
args = ['--log=%s' % f,
'install', data.src.joinpath('chattymodule')]
result = script.pip(*args)
assert 0 == result.stdout.count("HELLO FROM CHATTYMODULE")
with open(f, 'r') as fp:
# one from egg_info, one from install
assert 2 == fp.read().count("HELLO FROM CHATTYMODULE")
def test_install_topological_sort(script, data):
args = ['install', 'TopoRequires4', '--no-index', '-f', data.packages]
res = str(script.pip(*args))
order1 = 'TopoRequires, TopoRequires2, TopoRequires3, TopoRequires4'
order2 = 'TopoRequires, TopoRequires3, TopoRequires2, TopoRequires4'
assert order1 in res or order2 in res, res
def test_install_wheel_broken(script, with_wheel):
res = script.pip_install_local('wheelbroken', expect_stderr=True)
assert "Successfully installed wheelbroken-0.1" in str(res), str(res)
def test_cleanup_after_failed_wheel(script, with_wheel):
res = script.pip_install_local('wheelbrokenafter', expect_stderr=True)
# One of the effects of not cleaning up is broken scripts:
script_py = script.bin_path / "script.py"
assert script_py.exists(), script_py
shebang = open(script_py, 'r').readline().strip()
assert shebang != '#!python', shebang
# OK, assert that we *said* we were cleaning up:
# /!\ if in need to change this, also change test_pep517_no_legacy_cleanup
assert "Running setup.py clean for wheelbrokenafter" in str(res), str(res)
def test_install_builds_wheels(script, data, with_wheel):
# We need to use a subprocess to get the right value on Windows.
res = script.run('python', '-c', (
'from pip._internal.utils import appdirs; '
'print(appdirs.user_cache_dir("pip"))'
))
wheels_cache = os.path.join(res.stdout.rstrip('\n'), 'wheels')
# NB This incidentally tests a local tree + tarball inputs
# see test_install_editable_from_git_autobuild_wheel for editable
# vcs coverage.
to_install = data.packages.joinpath('requires_wheelbroken_upper')
res = script.pip(
'install', '--no-index', '-f', data.find_links,
to_install, expect_stderr=True)
expected = ("Successfully installed requires-wheelbroken-upper-0"
" upper-2.0 wheelbroken-0.1")
# Must have installed it all
assert expected in str(res), str(res)
wheels = []
for top, dirs, files in os.walk(wheels_cache):
wheels.extend(files)
# and built wheels for upper and wheelbroken
assert "Building wheel for upper" in str(res), str(res)
assert "Building wheel for wheelb" in str(res), str(res)
# Wheels are built for local directories, but not cached.
assert "Building wheel for requir" in str(res), str(res)
# wheelbroken has to run install
# into the cache
assert wheels != [], str(res)
# and installed from the wheel
assert "Running setup.py install for upper" not in str(res), str(res)
# Wheels are built for local directories, but not cached.
assert "Running setup.py install for requir" not in str(res), str(res)
# wheelbroken has to run install
assert "Running setup.py install for wheelb" in str(res), str(res)
# We want to make sure pure python wheels do not have an implementation tag
assert wheels == [
"Upper-2.0-py{}-none-any.whl".format(sys.version_info[0]),
]
def test_install_no_binary_disables_building_wheels(script, data, with_wheel):
to_install = data.packages.joinpath('requires_wheelbroken_upper')
res = script.pip(
'install', '--no-index', '--no-binary=upper', '-f', data.find_links,
to_install, expect_stderr=True)
expected = ("Successfully installed requires-wheelbroken-upper-0"
" upper-2.0 wheelbroken-0.1")
# Must have installed it all
assert expected in str(res), str(res)
# and built wheels for wheelbroken only
assert "Building wheel for wheelb" in str(res), str(res)
# Wheels are built for local directories, but not cached across runs
assert "Building wheel for requir" in str(res), str(res)
# Don't build wheel for upper which was blacklisted
assert "Building wheel for upper" not in str(res), str(res)
# Wheels are built for local directories, but not cached across runs
assert "Running setup.py install for requir" not in str(res), str(res)
# And these two fell back to sdist based installed.
assert "Running setup.py install for wheelb" in str(res), str(res)
assert "Running setup.py install for upper" in str(res), str(res)
@pytest.mark.network
def test_install_no_binary_builds_pep_517_wheel(script, data, with_wheel):
to_install = data.packages.joinpath('pep517_setup_and_pyproject')
res = script.pip(
'install', '--no-binary=:all:', '-f', data.find_links, to_install
)
expected = ("Successfully installed pep517-setup-and-pyproject")
# Must have installed the package
assert expected in str(res), str(res)
assert "Building wheel for pep517-setup" in str(res), str(res)
assert "Running setup.py install for pep517-set" not in str(res), str(res)
@pytest.mark.network
def test_install_no_binary_uses_local_backend(
script, data, with_wheel, tmpdir):
to_install = data.packages.joinpath('pep517_wrapper_buildsys')
script.environ['PIP_TEST_MARKER_FILE'] = marker = str(tmpdir / 'marker')
res = script.pip(
'install', '--no-binary=:all:', '-f', data.find_links, to_install
)
expected = "Successfully installed pep517-wrapper-buildsys"
# Must have installed the package
assert expected in str(res), str(res)
assert os.path.isfile(marker), "Local PEP 517 backend not used"
def test_install_no_binary_disables_cached_wheels(script, data, with_wheel):
# Seed the cache
script.pip(
'install', '--no-index', '-f', data.find_links,
'upper')
script.pip('uninstall', 'upper', '-y')
res = script.pip(
'install', '--no-index', '--no-binary=:all:', '-f', data.find_links,
'upper', expect_stderr=True)
assert "Successfully installed upper-2.0" in str(res), str(res)
# No wheel building for upper, which was blacklisted
assert "Building wheel for upper" not in str(res), str(res)
# Must have used source, not a cached wheel to install upper.
assert "Running setup.py install for upper" in str(res), str(res)
def test_install_editable_with_wrong_egg_name(script):
script.scratch_path.joinpath("pkga").mkdir()
pkga_path = script.scratch_path / 'pkga'
pkga_path.joinpath("setup.py").write_text(textwrap.dedent("""
from setuptools import setup
setup(name='pkga',
version='0.1')
"""))
result = script.pip(
'install', '--editable', 'file://%s#egg=pkgb' % pkga_path
)
assert ("Generating metadata for package pkgb produced metadata "
"for project name pkga. Fix your #egg=pkgb "
"fragments.") in result.stderr
assert "Successfully installed pkga" in str(result), str(result)
def test_install_tar_xz(script, data):
try:
import lzma # noqa
except ImportError:
pytest.skip("No lzma support")
res = script.pip('install', data.packages / 'singlemodule-0.0.1.tar.xz')
assert "Successfully installed singlemodule-0.0.1" in res.stdout, res
def test_install_tar_lzma(script, data):
try:
import lzma # noqa
except ImportError:
pytest.skip("No lzma support")
res = script.pip('install', data.packages / 'singlemodule-0.0.1.tar.lzma')
assert "Successfully installed singlemodule-0.0.1" in res.stdout, res
def test_double_install(script):
"""
Test double install passing with two same version requirements
"""
result = script.pip('install', 'pip', 'pip')
msg = "Double requirement given: pip (already in pip, name='pip')"
assert msg not in result.stderr
def test_double_install_fail(script):
"""
Test double install failing with two different version requirements
"""
result = script.pip('install', 'pip==*', 'pip==7.1.2', expect_error=True)
msg = ("Double requirement given: pip==7.1.2 (already in pip==*, "
"name='pip')")
assert msg in result.stderr
def _get_expected_error_text():
return (
"Package 'pkga' requires a different Python: {} not in '<1.0'"
).format('.'.join(map(str, sys.version_info[:3])))
def test_install_incompatible_python_requires(script):
script.scratch_path.joinpath("pkga").mkdir()
pkga_path = script.scratch_path / 'pkga'
pkga_path.joinpath("setup.py").write_text(textwrap.dedent("""
from setuptools import setup
setup(name='pkga',
python_requires='<1.0',
version='0.1')
"""))
result = script.pip('install', pkga_path, expect_error=True)
assert _get_expected_error_text() in result.stderr, str(result)
def test_install_incompatible_python_requires_editable(script):
script.scratch_path.joinpath("pkga").mkdir()
pkga_path = script.scratch_path / 'pkga'
pkga_path.joinpath("setup.py").write_text(textwrap.dedent("""
from setuptools import setup
setup(name='pkga',
python_requires='<1.0',
version='0.1')
"""))
result = script.pip(
'install', '--editable=%s' % pkga_path, expect_error=True)
assert _get_expected_error_text() in result.stderr, str(result)
def test_install_incompatible_python_requires_wheel(script, with_wheel):
script.scratch_path.joinpath("pkga").mkdir()
pkga_path = script.scratch_path / 'pkga'
pkga_path.joinpath("setup.py").write_text(textwrap.dedent("""
from setuptools import setup
setup(name='pkga',
python_requires='<1.0',
version='0.1')
"""))
script.run(
'python', 'setup.py', 'bdist_wheel', '--universal', cwd=pkga_path)
result = script.pip('install', './pkga/dist/pkga-0.1-py2.py3-none-any.whl',
expect_error=True)
assert _get_expected_error_text() in result.stderr, str(result)
def test_install_compatible_python_requires(script):
script.scratch_path.joinpath("pkga").mkdir()
pkga_path = script.scratch_path / 'pkga'
pkga_path.joinpath("setup.py").write_text(textwrap.dedent("""
from setuptools import setup
setup(name='pkga',
python_requires='>1.0',
version='0.1')
"""))
res = script.pip('install', pkga_path)
assert "Successfully installed pkga-0.1" in res.stdout, res
@pytest.mark.network
def test_install_pep508_with_url(script):
res = script.pip(
'install', '--no-index',
'packaging@https://files.pythonhosted.org/packages/2f/2b/'
'c681de3e1dbcd469537aefb15186b800209aa1f299d933d23b48d85c9d56/'
'packaging-15.3-py2.py3-none-any.whl#sha256='
'ce1a869fe039fbf7e217df36c4653d1dbe657778b2d41709593a0003584405f4'
)
assert "Successfully installed packaging-15.3" in str(res), str(res)
@pytest.mark.network
def test_install_pep508_with_url_in_install_requires(script):
pkga_path = create_test_package_with_setup(
script, name='pkga', version='1.0',
install_requires=[
'packaging@https://files.pythonhosted.org/packages/2f/2b/'
'c681de3e1dbcd469537aefb15186b800209aa1f299d933d23b48d85c9d56/'
'packaging-15.3-py2.py3-none-any.whl#sha256='
'ce1a869fe039fbf7e217df36c4653d1dbe657778b2d41709593a0003584405f4'
],
)
res = script.pip('install', pkga_path)
assert "Successfully installed packaging-15.3" in str(res), str(res)
@pytest.mark.network
@pytest.mark.parametrize('index', (PyPI.simple_url, TestPyPI.simple_url))
def test_install_from_test_pypi_with_ext_url_dep_is_blocked(script, index):
res = script.pip(
'install',
'--index-url',
index,
'pep-508-url-deps',
expect_error=True,
)
error_message = (
"Packages installed from PyPI cannot depend on packages "
"which are not also hosted on PyPI."
)
error_cause = (
"pep-508-url-deps depends on sampleproject@ "
"https://github.com/pypa/sampleproject/archive/master.zip"
)
assert res.returncode == 1
assert error_message in res.stderr, str(res)
assert error_cause in res.stderr, str(res)
def test_installing_scripts_outside_path_prints_warning(script):
result = script.pip_install_local(
"--prefix", script.scratch_path, "script_wheel1"
)
assert "Successfully installed script-wheel1" in result.stdout, str(result)
assert "--no-warn-script-location" in result.stderr
def test_installing_scripts_outside_path_can_suppress_warning(script):
result = script.pip_install_local(
"--prefix", script.scratch_path, "--no-warn-script-location",
"script_wheel1"
)
assert "Successfully installed script-wheel1" in result.stdout, str(result)
assert "--no-warn-script-location" not in result.stderr
def test_installing_scripts_on_path_does_not_print_warning(script):
result = script.pip_install_local("script_wheel1")
assert "Successfully installed script-wheel1" in result.stdout, str(result)
assert "--no-warn-script-location" not in result.stderr
def test_installed_files_recorded_in_deterministic_order(script, data):
"""
Ensure that we record the files installed by a package in a deterministic
order, to make installs reproducible.
"""
to_install = data.packages.joinpath("FSPkg")
result = script.pip('install', to_install)
fspkg_folder = script.site_packages / 'fspkg'
egg_info = 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
installed_files_path = (
script.site_packages / egg_info / 'installed-files.txt'
)
assert fspkg_folder in result.files_created, str(result.stdout)
assert installed_files_path in result.files_created, str(result)
installed_files_path = result.files_created[installed_files_path].full
installed_files_lines = [
p for p in Path(installed_files_path).read_text().split('\n') if p
]
assert installed_files_lines == sorted(installed_files_lines)
def test_install_conflict_results_in_warning(script, data):
pkgA_path = create_test_package_with_setup(
script,
name='pkgA', version='1.0', install_requires=['pkgb == 1.0'],
)
pkgB_path = create_test_package_with_setup(
script,
name='pkgB', version='2.0',
)
# Install pkgA without its dependency
result1 = script.pip('install', '--no-index', pkgA_path, '--no-deps')
assert "Successfully installed pkgA-1.0" in result1.stdout, str(result1)
# Then install an incorrect version of the dependency
result2 = script.pip(
'install', '--no-index', pkgB_path, allow_stderr_error=True,
)
assert "pkga 1.0 has requirement pkgb==1.0" in result2.stderr, str(result2)
assert "Successfully installed pkgB-2.0" in result2.stdout, str(result2)
def test_install_conflict_warning_can_be_suppressed(script, data):
pkgA_path = create_test_package_with_setup(
script,
name='pkgA', version='1.0', install_requires=['pkgb == 1.0'],
)
pkgB_path = create_test_package_with_setup(
script,
name='pkgB', version='2.0',
)
# Install pkgA without its dependency
result1 = script.pip('install', '--no-index', pkgA_path, '--no-deps')
assert "Successfully installed pkgA-1.0" in result1.stdout, str(result1)
# Then install an incorrect version of the dependency; suppressing warning
result2 = script.pip(
'install', '--no-index', pkgB_path, '--no-warn-conflicts'
)
assert "Successfully installed pkgB-2.0" in result2.stdout, str(result2)
def test_target_install_ignores_distutils_config_install_prefix(script):
prefix = script.scratch_path / 'prefix'
distutils_config = Path(os.path.expanduser('~'),
'pydistutils.cfg' if sys.platform == 'win32'
else '.pydistutils.cfg')
distutils_config.write_text(textwrap.dedent(
'''
[install]
prefix=%s
''' % str(prefix)))
target = script.scratch_path / 'target'
result = script.pip_install_local('simplewheel', '-t', target)
assert "Successfully installed simplewheel" in result.stdout
relative_target = os.path.relpath(target, script.base_path)
relative_script_base = os.path.relpath(prefix, script.base_path)
assert relative_target in result.files_created
assert relative_script_base not in result.files_created
@pytest.mark.incompatible_with_test_venv
def test_user_config_accepted(script):
# user set in the config file is parsed as 0/1 instead of True/False.
# Check that this doesn't cause a problem.
config_file = script.scratch_path / 'pip.conf'
script.environ['PIP_CONFIG_FILE'] = str(config_file)
config_file.write_text("[install]\nuser = true")
result = script.pip_install_local('simplewheel')
assert "Successfully installed simplewheel" in result.stdout
relative_user = os.path.relpath(script.user_site_path, script.base_path)
assert join(relative_user, 'simplewheel') in result.files_created
@pytest.mark.parametrize(
'install_args, expected_message', [
([], 'Requirement already satisfied: pip in'),
(['--upgrade'], 'Requirement already up-to-date: pip in'),
]
)
@pytest.mark.parametrize("use_module", [True, False])
def test_install_pip_does_not_modify_pip_when_satisfied(
script, install_args, expected_message, use_module):
"""
Test it doesn't upgrade the pip if it already satisfies the requirement.
"""
result = script.pip_install_local(
'pip', *install_args, use_module=use_module
)
assert expected_message in result.stdout, str(result)
def test_ignore_yanked_file(script, data):
"""
Test ignore a "yanked" file.
"""
result = script.pip(
'install', 'simple',
'--index-url', data.index_url('yanked'),
)
# Make sure a "yanked" release is ignored
assert 'Successfully installed simple-2.0\n' in result.stdout, str(result)
def test_install_yanked_file_and_print_warning(script, data):
"""
Test install a "yanked" file and print a warning.
Yanked files are always ignored, unless they are the only file that
matches a version specifier that "pins" to an exact version (PEP 592).
"""
result = script.pip(
'install', 'simple==3.0',
'--index-url', data.index_url('yanked'),
expect_stderr=True,
)
expected_warning = 'Reason for being yanked: test reason message'
assert expected_warning in result.stderr, str(result)
# Make sure a "yanked" release is installed
assert 'Successfully installed simple-3.0\n' in result.stdout, str(result)
@pytest.mark.parametrize("install_args", [
(),
("--trusted-host", "localhost"),
])
def test_install_sends_client_cert(install_args, script, cert_factory, data):
cert_path = cert_factory()
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ctx.load_cert_chain(cert_path, cert_path)
ctx.load_verify_locations(cafile=cert_path)
ctx.verify_mode = ssl.CERT_REQUIRED
server = make_mock_server(ssl_context=ctx)
server.mock.side_effect = [
package_page({
"simple-3.0.tar.gz": "/files/simple-3.0.tar.gz",
}),
file_response(str(data.packages / "simple-3.0.tar.gz")),
]
url = "https://{}:{}/simple".format(server.host, server.port)
args = ["install", "-vvv", "--cert", cert_path, "--client-cert", cert_path]
args.extend(["--index-url", url])
args.extend(install_args)
args.append("simple")
with server_running(server):
script.pip(*args)
assert server.mock.call_count == 2
for call_args in server.mock.call_args_list:
environ, _ = call_args.args
assert "SSL_CLIENT_CERT" in environ
assert environ["SSL_CLIENT_CERT"]
| mit |
kurtrwall/wagtail | wagtail/wagtailcore/permission_policies/base.py | 7 | 16143 | from __future__ import absolute_import, unicode_literals
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import FieldDoesNotExist, ImproperlyConfigured
from django.db.models import Q
from django.utils.functional import cached_property
from wagtail.utils.compat import user_is_authenticated
class BasePermissionPolicy(object):
"""
A 'permission policy' is an object that handles all decisions about the actions
users are allowed to perform on a given model. The mechanism by which it does this
is arbitrary, and may or may not involve the django.contrib.auth Permission model;
it could be as simple as "allow all users to do everything".
In this way, admin apps can change their permission-handling logic just by swapping
to a different policy object, rather than having that logic spread across numerous
view functions.
BasePermissionPolicy is an abstract class that all permission policies inherit from.
The only method that subclasses need to implement is users_with_any_permission;
all other methods can be derived from that (but in practice, subclasses will probably
want to override additional methods, either for efficiency or to implement more
fine-grained permission logic).
"""
def __init__(self, model):
self.model = model
# Basic user permission tests. Most policies are expected to override these,
# since the default implementation is to query the set of permitted users
# (which is pretty inefficient).
def user_has_permission(self, user, action):
"""
Return whether the given user has permission to perform the given action
on some or all instances of this model
"""
return (user in self.users_with_permission(action))
def user_has_any_permission(self, user, actions):
"""
Return whether the given user has permission to perform any of the given actions
on some or all instances of this model
"""
return any(self.user_has_permission(user, action) for action in actions)
# Operations for retrieving a list of users matching the permission criteria.
# All policies must implement, at minimum, users_with_any_permission.
def users_with_any_permission(self, actions):
"""
Return a queryset of users who have permission to perform any of the given actions
on some or all instances of this model
"""
raise NotImplementedError
def users_with_permission(self, action):
"""
Return a queryset of users who have permission to perform the given action on
some or all instances of this model
"""
return self.users_with_any_permission([action])
# Per-instance permission tests. In the simplest cases - corresponding to the
# basic Django permission model - permissions are enforced on a per-model basis
# and so these methods can simply defer to the per-model tests. Policies that
# require per-instance permission logic must override, at minimum:
# user_has_permission_for_instance
# instances_user_has_any_permission_for
# users_with_any_permission_for_instance
def user_has_permission_for_instance(self, user, action, instance):
"""
Return whether the given user has permission to perform the given action on the
given model instance
"""
return self.user_has_permission(user, action)
def user_has_any_permission_for_instance(self, user, actions, instance):
"""
Return whether the given user has permission to perform any of the given actions
on the given model instance
"""
return any(
self.user_has_permission_for_instance(user, action, instance)
for action in actions
)
def instances_user_has_any_permission_for(self, user, actions):
"""
Return a queryset of all instances of this model for which the given user has
permission to perform any of the given actions
"""
if self.user_has_any_permission(user, actions):
return self.model.objects.all()
else:
return self.model.objects.none()
def instances_user_has_permission_for(self, user, action):
"""
Return a queryset of all instances of this model for which the given user has
permission to perform the given action
"""
return self.instances_user_has_any_permission_for(user, [action])
def users_with_any_permission_for_instance(self, actions, instance):
"""
Return a queryset of all users who have permission to perform any of the given
actions on the given model instance
"""
return self.users_with_any_permission(actions)
def users_with_permission_for_instance(self, action, instance):
return self.users_with_any_permission_for_instance([action], instance)
class BlanketPermissionPolicy(BasePermissionPolicy):
"""
A permission policy that gives everyone (including anonymous users)
full permission over the given model
"""
def user_has_permission(self, user, action):
return True
def user_has_any_permission(self, user, actions):
return True
def users_with_any_permission(self, actions):
# Here we filter out inactive users from the results, even though inactive users
# - and for that matter anonymous users - still have permission according to the
# user_has_permission method. This is appropriate because, for most applications,
# setting is_active=False is equivalent to deleting the user account; you would
# not want these accounts to appear in, for example, a dropdown of users to
# assign a task to. The result here could never be completely logically correct
# (because it will not include anonymous users), so as the next best thing we
# return the "least surprise" result.
return get_user_model().objects.filter(is_active=True)
def users_with_permission(self, action):
return get_user_model().objects.filter(is_active=True)
class AuthenticationOnlyPermissionPolicy(BasePermissionPolicy):
"""
A permission policy that gives all active authenticated users
full permission over the given model
"""
def user_has_permission(self, user, action):
return user_is_authenticated(user) and user.is_active
def user_has_any_permission(self, user, actions):
return user_is_authenticated(user) and user.is_active
def users_with_any_permission(self, actions):
return get_user_model().objects.filter(is_active=True)
def users_with_permission(self, action):
return get_user_model().objects.filter(is_active=True)
class BaseDjangoAuthPermissionPolicy(BasePermissionPolicy):
"""
Extends BasePermissionPolicy with helper methods useful for policies that need to
perform lookups against the django.contrib.auth permission model
"""
def __init__(self, model, auth_model=None):
# `auth_model` specifies the model to be used for permission record lookups;
# usually this will match `model` (which specifies the type of instances that
# `instances_user_has_permission_for` will return), but this may differ when
# swappable models are in use - for example, an interface for editing user
# records might use a custom User model but will typically still refer to the
# permission records for auth.user.
super(BaseDjangoAuthPermissionPolicy, self).__init__(model)
self.auth_model = auth_model or self.model
self.app_label = self.auth_model._meta.app_label
self.model_name = self.auth_model._meta.model_name
@cached_property
def _content_type(self):
return ContentType.objects.get_for_model(self.auth_model)
def _get_permission_name(self, action):
"""
Get the full app-label-qualified permission name (as required by
user.has_perm(...) ) for the given action on this model
"""
return '%s.%s_%s' % (self.app_label, action, self.model_name)
def _get_users_with_any_permission_codenames_filter(self, permission_codenames):
"""
Given a list of permission codenames, return a filter expression which
will find all users which have any of those permissions - either
through group permissions, user permissions, or implicitly through
being a superuser.
"""
permissions = Permission.objects.filter(
content_type=self._content_type,
codename__in=permission_codenames
)
return (
Q(is_superuser=True) |
Q(user_permissions__in=permissions) |
Q(groups__permissions__in=permissions)
) & Q(is_active=True)
def _get_users_with_any_permission_codenames(self, permission_codenames):
"""
Given a list of permission codenames, return a queryset of users which
have any of those permissions - either through group permissions, user
permissions, or implicitly through being a superuser.
"""
filter_expr = self._get_users_with_any_permission_codenames_filter(permission_codenames)
return get_user_model().objects.filter(filter_expr).distinct()
class ModelPermissionPolicy(BaseDjangoAuthPermissionPolicy):
"""
A permission policy that enforces permissions at the model level, by consulting
the standard django.contrib.auth permission model directly
"""
def user_has_permission(self, user, action):
return user.has_perm(self._get_permission_name(action))
def users_with_any_permission(self, actions):
permission_codenames = [
'%s_%s' % (action, self.model_name)
for action in actions
]
return self._get_users_with_any_permission_codenames(permission_codenames)
class OwnershipPermissionPolicy(BaseDjangoAuthPermissionPolicy):
"""
A permission policy for objects that support a concept of 'ownership', where
the owner is typically the user who created the object.
This policy piggybacks off 'add' and 'change' permissions defined through the
django.contrib.auth Permission model, as follows:
* any user with 'add' permission can create instances, and ALSO edit instances
that they own
* any user with 'change' permission can edit instances regardless of ownership
* ability to edit also implies ability to delete
Besides 'add', 'change' and 'delete', no other actions are recognised or permitted
(unless the user is an active superuser, in which case they can do everything).
"""
def __init__(self, model, auth_model=None, owner_field_name='owner'):
super(OwnershipPermissionPolicy, self).__init__(model, auth_model=auth_model)
self.owner_field_name = owner_field_name
# make sure owner_field_name is a field that exists on the model
try:
self.model._meta.get_field(self.owner_field_name)
except FieldDoesNotExist:
raise ImproperlyConfigured(
"%s has no field named '%s'. To use this model with OwnershipPermissionPolicy, "
"you must specify a valid field name as owner_field_name."
% (self.model, self.owner_field_name)
)
def user_has_permission(self, user, action):
if action == 'add':
return user.has_perm(self._get_permission_name('add'))
elif action == 'change' or action == 'delete':
return (
# having 'add' permission means that there are *potentially*
# some instances they can edit (namely: ones they own),
# which is sufficient for returning True here
user.has_perm(self._get_permission_name('add')) or
user.has_perm(self._get_permission_name('change'))
)
else:
# unrecognised actions are only allowed for active superusers
return user.is_active and user.is_superuser
def users_with_any_permission(self, actions):
if 'change' in actions or 'delete' in actions:
# either 'add' or 'change' permission means that there are *potentially*
# some instances they can edit
permission_codenames = [
'add_%s' % self.model_name,
'change_%s' % self.model_name
]
elif 'add' in actions:
permission_codenames = [
'add_%s' % self.model_name,
]
else:
# none of the actions passed in here are ones that we recognise, so only
# allow them for active superusers
return get_user_model().objects.filter(is_active=True, is_superuser=True)
return self._get_users_with_any_permission_codenames(permission_codenames)
def user_has_permission_for_instance(self, user, action, instance):
return self.user_has_any_permission_for_instance(user, [action], instance)
def user_has_any_permission_for_instance(self, user, actions, instance):
if 'change' in actions or 'delete' in actions:
if user.has_perm(self._get_permission_name('change')):
return True
elif (
user.has_perm(self._get_permission_name('add')) and
getattr(instance, self.owner_field_name) == user
):
return True
else:
return False
else:
# 'change' and 'delete' are the only actions that are well-defined
# for specific instances. Other actions are only available to
# active superusers.
return user.is_active and user.is_superuser
def instances_user_has_any_permission_for(self, user, actions):
if user.is_active and user.is_superuser:
# active superusers can perform any action (including unrecognised ones)
# on any instance
return self.model.objects.all()
elif 'change' in actions or 'delete' in actions:
if user.has_perm(self._get_permission_name('change')):
# user can edit all instances
return self.model.objects.all()
elif user.has_perm(self._get_permission_name('add')):
# user can edit their own instances
return self.model.objects.filter(**{self.owner_field_name: user})
else:
# user has no permissions at all on this model
return self.model.objects.none()
else:
# action is either not recognised, or is the 'add' action which is
# not meaningful for existing instances. As such, non-superusers
# cannot perform it on any existing instances.
return self.model.objects.none()
def users_with_any_permission_for_instance(self, actions, instance):
if 'change' in actions or 'delete' in actions:
# get filter expression for users with 'change' permission
filter_expr = self._get_users_with_any_permission_codenames_filter([
'change_%s' % self.model_name
])
# add on the item's owner, if they still have 'add' permission
# (and the owner field isn't blank)
owner = getattr(instance, self.owner_field_name)
if owner is not None and owner.has_perm(self._get_permission_name('add')):
filter_expr = filter_expr | Q(pk=owner.pk)
# return the filtered queryset
return get_user_model().objects.filter(filter_expr).distinct()
else:
# action is either not recognised, or is the 'add' action which is
# not meaningful for existing instances. As such, the action is only
# available to superusers
return get_user_model().objects.filter(is_active=True, is_superuser=True)
| bsd-3-clause |
popazerty/SDG-e2 | lib/python/Components/FanControl.py | 61 | 3723 | import os
from Components.config import config, ConfigSubList, ConfigSubsection, ConfigSlider
from Tools.BoundFunction import boundFunction
import NavigationInstance
from enigma import iRecordableService
class FanControl:
# ATM there's only support for one fan
def __init__(self):
if os.path.exists("/proc/stb/fp/fan_vlt") or os.path.exists("/proc/stb/fp/fan_pwm") or os.path.exists("/proc/stb/fp/fan_speed"):
self.fancount = 1
else:
self.fancount = 0
self.createConfig()
config.misc.standbyCounter.addNotifier(self.standbyCounterChanged, initial_call = False)
def setVoltage_PWM(self):
for fanid in range(self.getFanCount()):
cfg = self.getConfig(fanid)
self.setVoltage(fanid, cfg.vlt.value)
self.setPWM(fanid, cfg.pwm.value)
print "[FanControl]: setting fan values: fanid = %d, voltage = %d, pwm = %d" % (fanid, cfg.vlt.value, cfg.pwm.value)
def setVoltage_PWM_Standby(self):
for fanid in range(self.getFanCount()):
cfg = self.getConfig(fanid)
self.setVoltage(fanid, cfg.vlt_standby.value)
self.setPWM(fanid, cfg.pwm_standby.value)
print "[FanControl]: setting fan values (standby mode): fanid = %d, voltage = %d, pwm = %d" % (fanid, cfg.vlt_standby.value, cfg.pwm_standby.value)
def getRecordEvent(self, recservice, event):
recordings = len(NavigationInstance.instance.getRecordings())
if event == iRecordableService.evEnd:
if recordings == 0:
self.setVoltage_PWM_Standby()
elif event == iRecordableService.evStart:
if recordings == 1:
self.setVoltage_PWM()
def leaveStandby(self):
NavigationInstance.instance.record_event.remove(self.getRecordEvent)
recordings = NavigationInstance.instance.getRecordings()
if not recordings:
self.setVoltage_PWM()
def standbyCounterChanged(self, configElement):
from Screens.Standby import inStandby
inStandby.onClose.append(self.leaveStandby)
recordings = NavigationInstance.instance.getRecordings()
NavigationInstance.instance.record_event.append(self.getRecordEvent)
if not recordings:
self.setVoltage_PWM_Standby()
def createConfig(self):
def setVlt(fancontrol, fanid, configElement):
fancontrol.setVoltage(fanid, configElement.value)
def setPWM(fancontrol, fanid, configElement):
fancontrol.setPWM(fanid, configElement.value)
config.fans = ConfigSubList()
for fanid in range(self.getFanCount()):
fan = ConfigSubsection()
fan.vlt = ConfigSlider(default = 15, increment = 5, limits = (0, 255))
fan.pwm = ConfigSlider(default = 0, increment = 5, limits = (0, 255))
fan.vlt_standby = ConfigSlider(default = 5, increment = 5, limits = (0, 255))
fan.pwm_standby = ConfigSlider(default = 0, increment = 5, limits = (0, 255))
fan.vlt.addNotifier(boundFunction(setVlt, self, fanid))
fan.pwm.addNotifier(boundFunction(setPWM, self, fanid))
config.fans.append(fan)
def getConfig(self, fanid):
return config.fans[fanid]
def getFanCount(self):
return self.fancount
def hasRPMSensor(self, fanid):
return os.path.exists("/proc/stb/fp/fan_speed")
def hasFanControl(self, fanid):
return os.path.exists("/proc/stb/fp/fan_vlt") or os.path.exists("/proc/stb/fp/fan_pwm")
def getFanSpeed(self, fanid):
return int(open("/proc/stb/fp/fan_speed", "r").readline().strip()[:-4])
def getVoltage(self, fanid):
return int(open("/proc/stb/fp/fan_vlt", "r").readline().strip(), 16)
def setVoltage(self, fanid, value):
if value > 255:
return
open("/proc/stb/fp/fan_vlt", "w").write("%x" % value)
def getPWM(self, fanid):
return int(open("/proc/stb/fp/fan_pwm", "r").readline().strip(), 16)
def setPWM(self, fanid, value):
if value > 255:
return
open("/proc/stb/fp/fan_pwm", "w").write("%x" % value)
fancontrol = FanControl()
| gpl-2.0 |
abelg/virtual_io_acceleration | tools/perf/util/setup.py | 220 | 1407 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = ['-fno-strict-aliasing', '-Wno-write-strings']
cflags += getenv('CFLAGS', '').split()
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
libtraceevent = getenv('LIBTRACEEVENT')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
extra_objects = [libtraceevent],
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='[email protected]',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 |
oracleyue/rubber-for-latex | src/converters/latex.py | 1 | 39183 | # This file is part of Rubber and thus covered by the GPL
# (c) Emmanuel Beffara, 2002--2006
# (c) Sebastian Kapfer 2015
# vim: noet:ts=4
"""
LaTeX document building system for Rubber.
This module contains all the code in Rubber that actually does the job of
building a LaTeX document from start to finish.
"""
import os, os.path, sys, imp
import re
import string
from rubber import _
from rubber.util import *
import rubber.depend
import rubber.latex_modules
import rubber.module_interface
from rubber.tex import EOF, OPEN, SPACE, END_LINE
#---- Module handler ----{{{1
class Modules:
"""
This class gathers all operations related to the management of modules.
The modules are searched for first in the current directory, then as
scripts in the 'modules' directory in the program's data directort, then
as a Python module in the package `rubber.latex'.
"""
def __init__ (self, env):
self.env = env
self.objects = {}
self.commands = {}
def __getitem__ (self, name):
"""
Return the module object of the given name.
"""
return self.objects[name]
def __contains__ (self, other):
return other in self.objects
def has_key (self, name):
"""
Check if a given module is loaded.
"""
return self.objects.has_key(name)
def register (self, name, context={}):
"""
Attempt to register a module with the specified name. If the module is
already loaded, do nothing. If it is found and not yet loaded, then
load it, initialise it (using the context passed as optional argument)
and run any delayed commands for it.
"""
if self.has_key(name):
msg.debug(_("module %s already registered") % name, pkg='latex')
return 2
assert name != ''
# First look for a script
mod = None
rub_searchpath = [
"", # working dir
rubber.latex_modules.__path__[0], # builtin rubber modules
# these are different from pre-1.4 search paths to avoid pulling
# in old modules from previous installs.
"/usr/local/share/rubber/latex_modules",
"/usr/share/rubber/latex_modules",
# FIXME allow the user to configure this, e.g. via RUBINPUTS
]
for path in rub_searchpath:
file = os.path.join(path, name + ".rub")
if os.path.exists(file):
mod = ScriptModule(self.env, file)
msg.log(_("script module %s registered") % name, pkg='latex')
break
# Then look for a Python module
if not mod:
f = None # so finally works even if find_module raises an exception
try:
(f, path, (suffix, mode, file_type)) = imp.find_module (
name,
rubber.latex_modules.__path__)
if f == None or suffix != ".py" or file_type != imp.PY_SOURCE:
raise ImportError
source = imp.load_module (name, f, path, (suffix, mode, file_type))
except ImportError:
msg.debug(_("no support found for %s") % name, pkg='latex')
return 0
finally:
if f != None:
f.close ()
if not (hasattr (source, "Module")
and issubclass (source.Module, rubber.module_interface.Module)):
## oracleyue: fix annoying error msg for nabib
# msg.error (_("{}.Module must subclass rubber.module_interface.Module".format (name)))
return 0
mod = source.Module (document=self.env, context=context)
msg.log (_("built-in module %s registered") % name, pkg='latex')
# Run any delayed commands.
if self.commands.has_key(name):
for (cmd, args, vars) in self.commands[name]:
msg.push_pos(vars)
try:
# put the variables as they were when the directive was
# found
saved_vars = self.env.vars
self.env.vars = vars
try:
# call the command
mod.command(cmd, args)
finally:
# restore the variables to their current state
self.env.vars = saved_vars
except AttributeError:
msg.warn(_("unknown directive '%s.%s'") % (name, cmd))
except TypeError:
msg.warn(_("wrong syntax for '%s.%s'") % (name, cmd))
msg.pop_pos()
del self.commands[name]
self.objects[name] = mod
return 1
def command (self, mod, cmd, args):
"""
Send a command to a particular module. If this module is not loaded,
store the command so that it will be sent when the module is register.
"""
if self.objects.has_key(mod):
self.objects[mod].command(cmd, args)
else:
if not self.commands.has_key(mod):
self.commands[mod] = []
self.commands[mod].append((cmd, args, self.env.vars))
#---- Log parser ----{{{1
re_loghead = re.compile("This is [0-9a-zA-Z-]*")
re_file = re.compile("(\\((?P<file>[^ \n\t(){}]*)|\\))")
re_badbox = re.compile(r"(Ov|Und)erfull \\[hv]box ")
re_line = re.compile(r"(l\.(?P<line>[0-9]+)( (?P<code>.*))?$|<\*>)")
re_cseq = re.compile(r".*(?P<seq>(\\|\.\.\.)[^ ]*) ?$")
re_macro = re.compile(r"^(?P<macro>\\.*) ->")
re_page = re.compile("\[(?P<num>[0-9]+)\]")
re_atline = re.compile(
"( detected| in paragraph)? at lines? (?P<line>[0-9]*)(--(?P<last>[0-9]*))?")
re_reference = re.compile("LaTeX Warning: Reference `(?P<ref>.*)' \
on page (?P<page>[0-9]*) undefined on input line (?P<line>[0-9]*)\\.$")
re_label = re.compile("LaTeX Warning: (?P<text>Label .*)$")
re_warning = re.compile(
"(LaTeX|Package)( (?P<pkg>.*))? Warning: (?P<text>.*)$")
re_online = re.compile("(; reported)? on input line (?P<line>[0-9]*)")
re_ignored = re.compile("; all text was ignored after line (?P<line>[0-9]*).$")
class LogCheck (object):
"""
This class performs all the extraction of information from the log file.
For efficiency, the instances contain the whole file as a list of strings
so that it can be read several times with no disk access.
"""
#-- Initialization {{{2
def __init__ (self):
self.lines = None
def readlog (self, name, limit):
"""
Read the specified log file, checking that it was produced by the
right compiler. Returns False if the log file is invalid or does not
exist.
"""
self.lines = None
try:
with open (name) as fp:
line = fp.readline ()
if not line or not re_loghead.match (line):
msg.log (_('empty log'), pkg='latex')
return False
# do not read the whole log unconditionally
whole_file = fp.read (limit)
self.lines = whole_file.split ('\n')
if fp.read (1) != '':
# more data to be read
msg.warn (_('log file is very long, and will not be read completely.'), pkg='latex')
return True
except IOError:
msg.log (_('IO Error with log'), pkg='latex')
return False
#-- Process information {{{2
def errors (self):
"""
Returns true if there was an error during the compilation.
"""
skipping = 0
for line in self.lines:
if line.strip() == "":
skipping = 0
continue
if skipping:
continue
m = re_badbox.match(line)
if m:
skipping = 1
continue
if line[0] == "!":
# We check for the substring "pdfTeX warning" because pdfTeX
# sometimes issues warnings (like undefined references) in the
# form of errors...
if string.find(line, "pdfTeX warning") == -1:
return 1
return 0
#-- Information extraction {{{2
def continued (self, line):
"""
Check if a line in the log is continued on the next line. This is
needed because TeX breaks messages at 79 characters per line. We make
this into a method because the test is slightly different in Metapost.
"""
return len(line) == 79
def parse (self, errors=0, boxes=0, refs=0, warnings=0):
"""
Parse the log file for relevant information. The named arguments are
booleans that indicate which information should be extracted:
- errors: all errors
- boxes: bad boxes
- refs: warnings about references
- warnings: all other warnings
The function returns a generator. Each generated item is a dictionary
that contains (some of) the following entries:
- kind: the kind of information ("error", "box", "ref", "warning")
- text: the text of the error or warning
- code: the piece of code that caused an error
- file, line, last, pkg: as used by Message.format_pos.
"""
if not self.lines:
return
last_file = None
pos = [last_file]
page = 1
parsing = 0 # 1 if we are parsing an error's text
skipping = 0 # 1 if we are skipping text until an empty line
something = 0 # 1 if some error was found
prefix = None # the prefix for warning messages from packages
accu = "" # accumulated text from the previous line
macro = None # the macro in which the error occurs
cseqs = {} # undefined control sequences so far
for line in self.lines:
# TeX breaks messages at 79 characters, just to make parsing
# trickier...
if not parsing and self.continued(line):
accu += line
continue
line = accu + line
accu = ""
# Text that should be skipped (from bad box messages)
if prefix is None and line == "":
skipping = 0
continue
if skipping:
continue
# Errors (including aborted compilation)
if parsing:
if error == "Undefined control sequence.":
# This is a special case in order to report which control
# sequence is undefined.
m = re_cseq.match(line)
if m:
seq = m.group("seq")
if cseqs.has_key(seq):
error = None
else:
cseqs[seq] = None
error = "Undefined control sequence %s." % m.group("seq")
m = re_macro.match(line)
if m:
macro = m.group("macro")
m = re_line.match(line)
if m:
parsing = 0
skipping = 1
pdfTeX = string.find(line, "pdfTeX warning") != -1
if error is not None and ((pdfTeX and warnings) or (errors and not pdfTeX)):
if pdfTeX:
d = {
"kind": "warning",
"pkg": "pdfTeX",
"text": error[error.find(":")+2:]
}
else:
d = {
"kind": "error",
"text": error
}
d.update( m.groupdict() )
m = re_ignored.search(error)
if m:
d["file"] = last_file
if d.has_key("code"):
del d["code"]
d.update( m.groupdict() )
elif pos[-1] is None:
d["file"] = last_file
else:
d["file"] = pos[-1]
if macro is not None:
d["macro"] = macro
macro = None
yield d
elif line[0] == "!":
error = line[2:]
elif line[0:3] == "***":
parsing = 0
skipping = 1
if errors:
yield {
"kind": "abort",
"text": error,
"why" : line[4:],
"file": last_file
}
elif line[0:15] == "Type X to quit ":
parsing = 0
skipping = 0
if errors:
yield {
"kind": "error",
"text": error,
"file": pos[-1]
}
continue
if len(line) > 0 and line[0] == "!":
error = line[2:]
parsing = 1
continue
if line == "Runaway argument?":
error = line
parsing = 1
continue
if line[:17] == "Output written on":
continue
# Long warnings
if prefix is not None:
if line[:len(prefix)] == prefix:
text.append(string.strip(line[len(prefix):]))
else:
text = " ".join(text)
m = re_online.search(text)
if m:
info["line"] = m.group("line")
text = text[:m.start()] + text[m.end():]
if warnings:
info["text"] = text
d = { "kind": "warning" }
d.update( info )
yield d
prefix = None
continue
# Undefined references
m = re_reference.match(line)
if m:
if refs:
d = {
"kind": "warning",
"text": _("Reference `%s' undefined.") % m.group("ref"),
"file": pos[-1]
}
d.update( m.groupdict() )
yield d
continue
m = re_label.match(line)
if m:
if refs:
d = {
"kind": "warning",
"file": pos[-1]
}
d.update( m.groupdict() )
yield d
continue
# Other warnings
if line.find("Warning") != -1:
m = re_warning.match(line)
if m:
info = m.groupdict()
info["file"] = pos[-1]
info["page"] = page
if info["pkg"] is None:
del info["pkg"]
prefix = ""
else:
prefix = ("(%s)" % info["pkg"])
prefix = prefix.ljust(m.start("text"))
text = [info["text"]]
continue
# Bad box messages
m = re_badbox.match(line)
if m:
if boxes:
mpos = { "file": pos[-1], "page": page }
m = re_atline.search(line)
if m:
md = m.groupdict()
for key in "line", "last":
if md[key]: mpos[key] = md[key]
line = line[:m.start()]
d = {
"kind": "warning",
"text": line
}
d.update( mpos )
yield d
skipping = 1
continue
# If there is no message, track source names and page numbers.
last_file = self.update_file(line, pos, last_file)
page = self.update_page(line, page)
def get_errors (self):
return self.parse(errors=1)
def get_boxes (self):
return self.parse(boxes=1)
def get_references (self):
return self.parse(refs=1)
def get_warnings (self):
return self.parse(warnings=1)
def update_file (self, line, stack, last):
"""
Parse the given line of log file for file openings and closings and
update the list `stack'. Newly opened files are at the end, therefore
stack[1] is the main source while stack[-1] is the current one. The
first element, stack[0], contains the value None for errors that may
happen outside the source. Return the last file from which text was
read (the new stack top, or the one before the last closing
parenthesis).
"""
m = re_file.search(line)
while m:
if line[m.start()] == '(':
last = m.group("file")
stack.append(last)
else:
last = stack[-1]
del stack[-1]
line = line[m.end():]
m = re_file.search(line)
return last
def update_page (self, line, before):
"""
Parse the given line and return the number of the page that is being
built after that line, assuming the current page before the line was
`before'.
"""
ms = re_page.findall(line)
if ms == []:
return before
return int(ms[-1]) + 1
#---- Parsing and compiling ----{{{1
re_command = re.compile("%[% ]*rubber: *(?P<cmd>[^ ]*) *(?P<arg>.*).*")
class SourceParser (rubber.tex.Parser):
"""
Extends the general-purpose TeX parser to handle Rubber directives in the
comment lines.
"""
def __init__ (self, file, dep):
super (SourceParser, self).__init__(file)
self.latex_dep = dep
def read_line (self):
while rubber.tex.Parser.read_line(self):
match = re_command.match(self.line.strip())
if match is None:
return True
vars = dict(self.latex_dep.vars.items())
vars['line'] = self.pos_line
args = parse_line(match.group("arg"), vars)
self.latex_dep.command(match.group("cmd"), args, vars)
return False
def skip_until (self, expr):
regexp = re.compile(expr)
while rubber.tex.Parser.read_line(self):
match = regexp.match(self.line)
if match is None:
continue
self.line = self.line[match.end():]
self.pos_char += match.end()
return
class EndDocument:
""" This is the exception raised when \\end{document} is found. """
pass
class EndInput:
""" This is the exception raised when \\endinput is found. """
pass
class LaTeXDep (rubber.depend.Node):
"""
This class represents dependency nodes for LaTeX compilation. It handles
the cyclic LaTeX compilation until a stable output, including actual
compilation (with a parametrable executable) and possible processing of
compilation results (e.g. running BibTeX).
Before building (or cleaning) the document, the method `parse' must be
called to load and configure all required modules. Text lines are read
from the files and parsed to extract LaTeX macro calls. When such a macro
is found, a handler is searched for in the `hooks' dictionary. Handlers
are called with one argument: the dictionary for the regular expression
that matches the macro call.
"""
#-- Initialization {{{2
def __init__ (self, env, src, job):
"""
Initialize the environment. This prepares the processing steps for the
given file (all steps are initialized empty) and sets the regular
expressions and the hook dictionary.
"""
super (LaTeXDep, self).__init__(env.depends)
self.env = env
self.log = LogCheck()
self.modules = Modules(self)
self.vars = Variables(env.vars, {
"program": "latex",
"engine": "TeX",
"paper": "",
"arguments": [],
"src-specials": "",
"source": None,
"target": None,
"path": None,
"base": None,
"ext": None,
"job": None,
"logfile_limit": 1000000,
"graphics_suffixes" : [] })
self.cmdline = ["\\nonstopmode", "\\input{%s}"]
# the initial hooks:
self.hooks = {
"begin": ("a", self.h_begin),
"end": ("a", self.h_end),
"pdfoutput": ("", self.h_pdfoutput),
"input" : ("", self.h_input),
"include" : ("a", self.h_include),
"includeonly": ("a", self.h_includeonly),
"usepackage" : ("oa", self.h_usepackage),
"RequirePackage" : ("oa", self.h_usepackage),
"documentclass" : ("oa", self.h_documentclass),
"LoadClass" : ("oa", self.h_documentclass),
# LoadClassWithOptions doesn't take optional arguments, but
# we recycle the same handler
"LoadClassWithOptions" : ("oa", self.h_documentclass),
"tableofcontents" : ("", self.h_tableofcontents),
"listoffigures" : ("", self.h_listoffigures),
"listoftables" : ("", self.h_listoftables),
"bibliography" : ("a", self.h_bibliography),
"bibliographystyle" : ("a", self.h_bibliographystyle),
"endinput" : ("", self.h_endinput)
}
self.begin_hooks = {
"verbatim": self.h_begin_verbatim,
"verbatim*": lambda loc: self.h_begin_verbatim(loc, env="verbatim\\*")
}
self.end_hooks = {
"document": self.h_end_document
}
self.hooks_changed = True
self.include_only = {}
# FIXME interim solution for BibTeX module -- rewrite it.
self.aux_files = []
# description of the building process:
self.onchange_md5 = {}
self.onchange_cmd = {}
self.removed_files = []
# state of the builder:
self.processed_sources = {}
self.failed_module = None
self.set_source (src, job)
def set_source (self, path, jobname):
"""
Specify the main source for the document. The exact path and file name
are determined, and the source building process is updated if needed,
according the the source file's extension. The optional argument
'jobname' can be used to specify the job name to something else that
the base of the file name.
"""
assert os.path.exists(path)
self.sources = []
self.vars['source'] = path
(src_path, name) = os.path.split(path)
self.vars['path'] = src_path
# derive jobname, which latex uses as the basename for all output
(job, self.vars['ext']) = os.path.splitext(name)
if jobname is None:
self.set_job = 0
else:
self.set_job = 1
job = jobname
self.vars['job'] = job
if src_path == "":
src_path = "."
self.vars['base'] = job
else:
self.env.path.append(src_path)
self.vars['base'] = os.path.join(src_path, job)
source = path
prefix = os.path.join(self.vars["cwd"], "")
if source[:len(prefix)] == prefix:
comp_name = source[len(prefix):]
else:
comp_name = source
if comp_name.find('"') >= 0:
msg.error(_("The filename contains \", latex cannot handle this."))
return 1
for c in " \n\t()":
if source.find(c) >= 0:
msg.warn(_("Source path uses special characters, error tracking might get confused."))
break
self.add_product (self.basename (with_suffix=".dvi"))
self.add_product (self.basename (with_suffix=".log"))
# always expect a primary aux file
self.new_aux_file (self.basename (with_suffix=".aux"))
self.add_product (self.basename (with_suffix=".synctex.gz"))
return 0
def basename (self, with_suffix=""):
return self.vars["job"] + with_suffix
def set_primary_product_suffix (self, suffix=".dvi"):
"""Change the suffix of the primary product"""
del self.set[self.products[0]]
self.products[0] = self.basename (with_suffix=suffix)
self.add_product (self.products[0])
def new_aux_file (self, aux_file):
"""Register a new latex .aux file"""
self.add_source (aux_file, track_contents=True)
self.add_product (aux_file)
self.aux_files.append (aux_file)
def includeonly (self, files):
"""
Use partial compilation, by appending a call to \\includeonly on the
command line on compilation.
"""
if self.vars["engine"] == "VTeX":
msg.error(_("I don't know how to do partial compilation on VTeX."))
return
if self.cmdline[-2][:13] == "\\includeonly{":
self.cmdline[-2] = "\\includeonly{" + ",".join(files) + "}"
else:
self.cmdline.insert(-1, "\\includeonly{" + ",".join(files) + "}")
for f in files:
self.include_only[f] = None
def source (self):
"""
Return the main source's complete filename.
"""
return self.vars['source']
#-- LaTeX source parsing {{{2
def parse (self):
"""
Parse the source for packages and supported macros.
"""
try:
self.process(self.source())
except EndDocument:
pass
msg.log(_("dependencies: %r") % self.sources, pkg='latex')
def parse_file (self, file):
"""
Process a LaTeX source. The file must be open, it is read to the end
calling the handlers for the macro calls. This recursively processes
the included sources.
"""
parser = SourceParser(file, self)
parser.set_hooks(self.hooks.keys())
self.hooks_changed = False
while True:
if self.hooks_changed:
parser.set_hooks(self.hooks.keys())
self.hooks_changed = False
token = parser.next_hook()
if token.cat == EOF:
break
format, function = self.hooks[token.val]
args = []
for arg in format:
if arg == '*':
args.append(parser.get_latex_star())
elif arg == 'a':
args.append(parser.get_argument_text())
elif arg == 'o':
args.append(parser.get_latex_optional_text())
self.parser = parser
self.vars['line'] = parser.pos_line
function(self.vars, *args)
def process (self, path):
"""
This method is called when an included file is processed. The argument
must be a valid file name.
"""
if self.processed_sources.has_key(path):
msg.debug(_("%s already parsed") % path, pkg='latex')
return
self.processed_sources[path] = None
if path not in self.sources:
self.add_source(path)
try:
saved_vars = self.vars
try:
msg.log(_("parsing %s") % path, pkg='latex')
self.vars = Variables(saved_vars,
{ "file": path, "line": None })
file = open(path)
try:
self.parse_file(file)
finally:
file.close()
finally:
self.vars = saved_vars
msg.debug(_("end of %s") % path, pkg='latex')
except EndInput:
pass
def input_file (self, name, loc={}):
"""
Treat the given name as a source file to be read. If this source can
be the result of some conversion, then the conversion is performed,
otherwise the source is parsed. The returned value is a couple
(name,dep) where `name' is the actual LaTeX source and `dep' is
its dependency node. The return value is (None,None) if the source
could neither be read nor built.
"""
if name.find("\\") >= 0 or name.find("#") >= 0:
return None, None
for path in self.env.path:
pname = os.path.join(path, name)
dep = self.env.convert(pname, suffixes=[".tex",""], context=self.vars)
if dep:
file = dep.products[0]
else:
file = self.env.find_file(name, ".tex")
if not file:
continue
dep = None
self.add_source(file)
if dep is None or dep.is_leaf():
self.process(file)
if dep is None:
return file, self.set[file]
else:
return file, dep
return None, None
#-- Directives {{{2
def command (self, cmd, args, pos=None):
"""
Execute the rubber command 'cmd' with arguments 'args'. This is called
when a command is found in the source file or in a configuration file.
A command name of the form 'foo.bar' is considered to be a command
'bar' for module 'foo'. The argument 'pos' describes the position
(file and line) where the command occurs.
"""
if pos is None:
pos = self.vars
# Calls to this method are actually translated into calls to "do_*"
# methods, except for calls to module directives.
lst = string.split(cmd, ".", 1)
#try:
if len(lst) > 1:
self.modules.command(lst[0], lst[1], args)
elif not hasattr(self, "do_" + cmd):
msg.warn(_("unknown directive '%s'") % cmd, **pos)
else:
msg.log(_("directive: %s") % ' '.join([cmd]+args), pkg='latex')
getattr(self, "do_" + cmd)(*args)
#except TypeError:
# msg.warn(_("wrong syntax for '%s'") % cmd, **pos)
def do_alias (self, name, val):
if self.hooks.has_key(val):
self.hooks[name] = self.hooks[val]
self.hooks_changed = True
def do_clean (self, *args):
for file in args:
self.removed_files.append(file)
def do_depend (self, *args):
for arg in args:
file = self.env.find_file(arg)
if file:
self.add_source(file)
else:
msg.warn(_("dependency '%s' not found") % arg, **self.vars)
def do_make (self, file, *args):
vars = { "target": file }
while len(args) > 1:
if args[0] == "from":
vars["source"] = args[1]
elif args[0] == "with":
vars["name"] = args[1]
else:
break
args = args[2:]
if len(args) != 0:
msg.error(_("invalid syntax for 'make'"), **self.vars)
return
self.env.conv_set(file, vars)
def do_module (self, mod, opt=None):
self.modules.register (mod, context = {'arg':mod, 'opt':opt})
def do_onchange (self, file, cmd):
self.onchange_cmd[file] = cmd
self.onchange_md5[file] = md5_file(file)
def do_paper (self, arg):
self.vars["paper"] = arg
def do_path (self, name):
self.env.path.append(name)
def do_read (self, name):
saved_vars = self.vars
try:
self.vars = Variables (self.vars,
{ "file": name, "line": None })
with open(name) as file:
lineno = 0
for line in file:
lineno += 1
line = line.strip()
if line == "" or line[0] == "%":
continue
self.vars["line"] = lineno
lst = parse_line(line, self.vars)
self.command(lst[0], lst[1:])
except IOError:
msg.warn(_("cannot read option file %s") % name, **self.vars)
finally:
self.vars = saved_vars
def do_rules (self, file):
name = self.env.find_file(file)
if name is None:
msg.warn(_("cannot read rule file %s") % file, **self.vars)
else:
self.env.converter.read_ini(name)
def do_set (self, name, val):
try:
if type (self.vars[name]) is list:
msg.warn (_("cannot set list-type variable to scalar: set %s %s (ignored; use setlist, not set)") % (name, val))
return
if type (self.vars[name]) is int:
try:
val = int (val)
except:
msg.warn (_("cannot set int variable %s to value %s (ignored)") % (name, val))
return
self.vars[name] = val
except KeyError:
msg.warn(_("unknown variable: %s") % name, **self.vars)
def do_shell_escape (self):
self.env.doc_requires_shell_ = True
def do_synctex (self):
self.env.synctex = True
def do_setlist (self, name, *val):
try:
self.vars[name] = list(val)
except KeyError:
msg.warn(_("unknown variable: %s") % name, **self.vars)
def do_produce (self, *args):
for arg in args:
self.add_product(arg)
def do_watch (self, *args):
for arg in args:
self.watch_file(arg)
#-- Macro handling {{{2
def hook_macro (self, name, format, fun):
self.hooks[name] = (format, fun)
self.hooks_changed = True
def hook_begin (self, name, fun):
self.begin_hooks[name] = fun
def hook_end (self, name, fun):
self.end_hooks[name] = fun
# Now the macro handlers:
def h_begin (self, loc, env):
if self.begin_hooks.has_key(env):
self.begin_hooks[env](loc)
def h_end (self, loc, env):
if self.end_hooks.has_key(env):
self.end_hooks[env](loc)
def h_pdfoutput (self, loc):
"""
Called when \\pdfoutput is found. Tries to guess if it is a definition
that asks for the output to be in PDF or DVI.
"""
parser = self.parser
token = parser.get_token()
if token.raw == '=':
token2 = parser.get_token()
if token2.raw == '0':
mode = 0
elif token2.raw == '1':
mode = 1
else:
parser.put_token(token2)
return
elif token.raw == '0':
mode = 0
elif token.raw == '1':
mode = 1
else:
parser.put_token(token)
return
if mode == 0:
if 'pdftex' in self.modules:
self.modules['pdftex'].mode_dvi()
else:
self.modules.register('pdftex', {'opt': 'dvi'})
else:
if 'pdftex' in self.modules:
self.modules['pdftex'].mode_pdf()
else:
self.modules.register('pdftex')
def h_input (self, loc):
"""
Called when an \\input macro is found. This calls the `process' method
if the included file is found.
"""
token = self.parser.get_token()
if token.cat == OPEN:
file = self.parser.get_group_text()
else:
file = ""
while token.cat not in (EOF, SPACE, END_LINE):
file += token.raw
token = self.parser.get_token()
self.input_file(file, loc)
def h_include (self, loc, filename):
"""
Called when an \\include macro is found. This includes files into the
source in a way very similar to \\input, except that LaTeX also
creates .aux files for them, so we have to notice this.
"""
if self.include_only and not self.include_only.has_key(filename):
return
file, _ = self.input_file(filename, loc)
if file:
self.new_aux_file (filename + ".aux")
def h_includeonly (self, loc, files):
"""
Called when the macro \\includeonly is found, indicates the
comma-separated list of files that should be included, so that the
othe \\include are ignored.
"""
self.include_only = {}
for name in files.split(","):
name = name.strip()
if name != "":
self.include_only[name] = None
def h_documentclass (self, loc, opt, name):
"""
Called when the macro \\documentclass is found. It almost has the same
effect as `usepackage': if the source's directory contains the class
file, in which case this file is treated as an input, otherwise a
module is searched for to support the class.
"""
file = self.env.find_file(name + ".cls")
if file:
self.process(file)
else:
self.modules.register (name,
context = Variables (self.vars, {'opt': opt}))
def h_usepackage (self, loc, opt, names):
"""
Called when a \\usepackage macro is found. If there is a package in the
directory of the source file, then it is treated as an include file
unless there is a supporting module in the current directory,
otherwise it is treated as a package.
"""
for name in string.split(names, ","):
name = name.strip()
if name == '': continue # \usepackage{a,}
file = self.env.find_file(name + ".sty")
if file and not os.path.exists(name + ".py"):
self.process(file)
else:
self.modules.register (name,
context = Variables (self.vars, {'opt':opt}))
def h_tableofcontents (self, loc):
self.add_product(self.basename (with_suffix=".toc"))
self.add_source(self.basename (with_suffix=".toc"), track_contents=True)
def h_listoffigures (self, loc):
self.add_product(self.basename (with_suffix=".lof"))
self.add_source(self.basename (with_suffix=".lof"), track_contents=True)
def h_listoftables (self, loc):
self.add_product(self.basename (with_suffix=".lot"))
self.add_source(self.basename (with_suffix=".lot"), track_contents=True)
def h_bibliography (self, loc, names):
"""
Called when the macro \\bibliography is found. This method actually
registers the module bibtex (if not already done) and registers the
databases.
"""
self.modules.register ("bibtex")
# This registers the actual hooks, so that subsequent occurrences of
# \bibliography and \bibliographystyle will be caught by the module.
# However, the first time, we have to call the hooks from here. The
# line below assumes that the new hook has the same syntax.
self.hooks['bibliography'][1](loc, names)
def h_bibliographystyle (self, loc, name):
"""
Called when \\bibliographystyle is found. This registers the module
bibtex (if not already done) and calls the method set_style() of the
module.
"""
self.modules.register ("bibtex")
# The same remark as in 'h_bibliography' applies here.
self.hooks['bibliographystyle'][1](loc, name)
def h_begin_verbatim (self, loc, env="verbatim"):
"""
Called when \\begin{verbatim} is found. This disables all macro
handling and comment parsing until the end of the environment. The
optional argument 'end' specifies the end marker, by default it is
"\\end{verbatim}".
"""
self.parser.skip_until(r"[ \t]*\\end\{%s\}.*" % env)
def h_endinput (self, loc):
"""
Called when \\endinput is found. This stops the processing of the
current input file, thus ignoring any code that appears afterwards.
"""
raise EndInput
def h_end_document (self, loc):
"""
Called when \\end{document} is found. This stops the processing of any
input file, thus ignoring any code that appears afterwards.
"""
raise EndDocument
#-- Compilation steps {{{2
def compile (self):
"""
Run one LaTeX compilation on the source. Return true on success or
false if errors occured.
"""
msg.progress(_("compiling %s") % msg.simplify(self.source()))
file = self.source()
prefix = os.path.join(self.vars["cwd"], "")
if file[:len(prefix)] == prefix:
file = file[len(prefix):]
if file.find(" ") >= 0:
file = '"%s"' % file
cmd = [self.vars["program"]]
if self.set_job:
if self.vars["engine"] == "VTeX":
msg.error(_("I don't know how set the job name with %s.")
% self.vars["engine"])
else:
cmd.append("-jobname=" + self.vars["job"])
specials = self.vars["src-specials"]
if specials != "":
if self.vars["engine"] == "VTeX":
msg.warn(_("I don't know how to make source specials with %s.")
% self.vars["engine"])
self.vars["src-specials"] = ""
elif specials == "yes":
cmd.append("-src-specials")
else:
cmd.append("-src-specials=" + specials)
if self.env.is_in_unsafe_mode_:
cmd += [ '--shell-escape' ]
elif self.env.doc_requires_shell_:
msg.error (_("the document tries to run external programs which could be dangerous. use rubber --unsafe if the document is trusted."))
if self.env.synctex:
cmd += [ "-synctex=1" ]
# make sure the arguments actually are a list, otherwise the
# characters of the string might be passed as individual arguments
assert type (self.vars["arguments"]) is list
# arguments inserted by the document allowed only in unsafe mode, since
# this could do arbitrary things such as enable shell escape (write18)
if self.env.is_in_unsafe_mode_:
cmd += self.vars["arguments"]
elif len (self.vars["arguments"]) > 0:
msg.error (_("the document tries to modify the LaTeX command line which could be dangerous. use rubber --unsafe if the document is trusted."))
cmd += [x.replace("%s",file) for x in self.cmdline]
# Remove the CWD from elements inthe path, to avoid potential problems
# with special characters if there are any (except that ':' in paths
# is not handled).
prefix = self.env.vars["cwd"]
prefix_ = os.path.join(prefix, "")
paths = []
for p in self.env.path:
if p == prefix:
paths.append(".")
elif p[:len(prefix_)] == prefix_:
paths.append("." + p[len(prefix):])
else:
paths.append(p)
inputs = string.join(paths, ":")
if inputs == "":
env = {}
else:
inputs = inputs + ":" + os.getenv("TEXINPUTS", "")
env = {"TEXINPUTS": inputs}
self.env.execute(cmd, env, kpse=1)
if not self.parse_log ():
msg.error(_("Running %s failed.") % cmd[0])
return False
if self.log.errors():
return False
if not os.access(self.products[0], os.F_OK):
msg.error(_("Output file `%s' was not produced.") %
msg.simplify(self.products[0]))
return False
return True
def parse_log (self):
logfile_name = self.basename (with_suffix=".log")
logfile_limit = self.vars["logfile_limit"]
return self.log.readlog (logfile_name, logfile_limit)
def pre_compile (self):
"""
Prepare the source for compilation using package-specific functions.
This function must return False on failure.
"""
msg.log(_("building additional files..."), pkg='latex')
for mod in self.modules.objects.values():
if not mod.pre_compile():
self.failed_module = mod
return False
return True
def post_compile (self):
"""
Run the package-specific operations that are to be performed after
each compilation of the main source. Returns true on success or false
on failure.
"""
msg.log(_("running post-compilation scripts..."), pkg='latex')
for file, md5 in self.onchange_md5.items():
new = md5_file(file)
if md5 != new:
self.onchange_md5[file] = new
if new != None:
msg.progress(_("running %s") % self.onchange_cmd[file])
# FIXME portability issue: explicit reference to shell
self.env.execute(["sh", "-c", self.onchange_cmd[file]])
for mod in self.modules.objects.values():
if not mod.post_compile():
self.failed_module = mod
return False
return True
def clean (self):
"""
Remove all files that are produced by compilation.
"""
super (LaTeXDep, self).clean ()
for file in self.removed_files:
rubber.util.verbose_remove (file, pkg = "latex")
msg.log(_("cleaning additional files..."), pkg='latex')
for mod in self.modules.objects.values():
mod.clean()
#-- Building routine {{{2
def run (self):
"""
Run the building process until the last compilation, or stop on error.
This method supposes that the inputs were parsed to register packages
and that the LaTeX source is ready. If the second (optional) argument
is true, then at least one compilation is done. As specified by the
parent class, the method returns True on success and False on
failure.
"""
if not self.pre_compile():
return False
# If an error occurs after this point, it will be while LaTeXing.
self.failed_dep = self
self.failed_module = None
if not self.compile():
return False
if not self.post_compile():
return False
# Finally there was no error.
self.failed_dep = None
return True
#-- Utility methods {{{2
def get_errors (self):
if self.failed_module is None:
return self.log.get_errors()
else:
return self.failed_module.get_errors()
def watch_file (self, filename):
"""
Register the given file (typically "jobname.toc" or such) to be
watched. When the file changes during a compilation, it means that
another compilation has to be done.
"""
self.add_source (filename, track_contents=True)
def remove_suffixes (self, list):
"""
Remove all files derived from the main source with one of the
specified suffixes.
"""
for suffix in list:
file = self.basename (with_suffix=suffix)
rubber.util.verbose_remove (file, pkg = "latex")
class ScriptModule (rubber.module_interface.Module):
# TODO: the constructor is not conformant with the one of the parent class.
"""
This class represents modules that are defined as Rubber scripts.
"""
def __init__ (self, env, filename):
vars = Variables(env.vars, {
'file': filename,
'line': None })
lineno = 0
with open(filename) as file:
for line in file:
line = line.strip()
lineno = lineno + 1
if line == "" or line[0] == "%":
continue
vars['line'] = lineno
lst = parse_line(line, vars)
env.command(lst[0], lst[1:], vars)
| gpl-2.0 |
VinceZK/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_mock.py | 122 | 14927 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import datetime
import logging
from .bug import Bug
from .attachment import Attachment
from webkitpy.common.config.committers import CommitterList, Reviewer
_log = logging.getLogger(__name__)
def _id_to_object_dictionary(*objects):
dictionary = {}
for thing in objects:
dictionary[thing["id"]] = thing
return dictionary
# Testing
_patch1 = {
"id": 10000,
"bug_id": 50000,
"url": "http://example.com/10000",
"name": "Patch1",
"is_obsolete": False,
"is_patch": True,
"review": "+",
"reviewer_email": "[email protected]",
"commit-queue": "+",
"committer_email": "[email protected]",
"attacher_email": "Contributer1",
}
_patch2 = {
"id": 10001,
"bug_id": 50000,
"url": "http://example.com/10001",
"name": "Patch2",
"is_obsolete": False,
"is_patch": True,
"review": "+",
"reviewer_email": "[email protected]",
"commit-queue": "+",
"committer_email": "[email protected]",
"attacher_email": "[email protected]",
}
_patch3 = {
"id": 10002,
"bug_id": 50001,
"url": "http://example.com/10002",
"name": "Patch3",
"is_obsolete": False,
"is_patch": True,
"review": "?",
"commit-queue": "-",
"attacher_email": "[email protected]",
"attach_date": datetime.datetime.today(),
}
_patch4 = {
"id": 10003,
"bug_id": 50003,
"url": "http://example.com/10002",
"name": "Patch3",
"is_obsolete": False,
"is_patch": True,
"review": "+",
"commit-queue": "?",
"reviewer_email": "[email protected]",
"attacher_email": "Contributer2",
}
_patch5 = {
"id": 10004,
"bug_id": 50003,
"url": "http://example.com/10002",
"name": "Patch5",
"is_obsolete": False,
"is_patch": True,
"review": "+",
"reviewer_email": "[email protected]",
"attacher_email": "[email protected]",
}
_patch6 = { # Valid committer, but no reviewer.
"id": 10005,
"bug_id": 50003,
"url": "http://example.com/10002",
"name": "ROLLOUT of r3489",
"is_obsolete": False,
"is_patch": True,
"commit-queue": "+",
"committer_email": "[email protected]",
"attacher_email": "[email protected]",
}
_patch7 = { # Valid review, patch is marked obsolete.
"id": 10006,
"bug_id": 50002,
"url": "http://example.com/10002",
"name": "Patch7",
"is_obsolete": True,
"is_patch": True,
"review": "+",
"reviewer_email": "[email protected]",
"attacher_email": "[email protected]",
}
# This matches one of Bug.unassigned_emails
_unassigned_email = "[email protected]"
# This is needed for the FlakyTestReporter to believe the bug
# was filed by one of the webkitpy bots.
_commit_queue_email = "[email protected]"
_bug1 = {
"id": 50000,
"title": "Bug with two r+'d and cq+'d patches, one of which has an "
"invalid commit-queue setter.",
"reporter_email": "[email protected]",
"assigned_to_email": _unassigned_email,
"cc_emails": [],
"attachments": [_patch1, _patch2],
"bug_status": "UNCONFIRMED",
"comments": [],
}
_bug2 = {
"id": 50001,
"title": "Bug with a patch needing review.",
"reporter_email": "[email protected]",
"assigned_to_email": "[email protected]",
"cc_emails": ["[email protected]", ],
"attachments": [_patch3],
"bug_status": "ASSIGNED",
"comments": [{"comment_date": datetime.datetime(2011, 6, 11, 9, 4, 3),
"comment_email": "[email protected]",
"text": "Message1.\nCommitted r35: <http://trac.webkit.org/changeset/35>",
},
],
}
_bug3 = {
"id": 50002,
"title": "The third bug",
"reporter_email": "[email protected]",
"assigned_to_email": _unassigned_email,
"cc_emails": [],
"attachments": [_patch7],
"bug_status": "NEW",
"comments": [{"comment_date": datetime.datetime(2011, 6, 11, 9, 4, 3),
"comment_email": "[email protected]",
"text": "Committed r30: <http://trac.webkit.org/changeset/30>",
},
{"comment_date": datetime.datetime(2011, 6, 11, 9, 4, 3),
"comment_email": "[email protected]",
"text": "Committed r31: <http://trac.webkit.org/changeset/31>",
},
],
}
_bug4 = {
"id": 50003,
"title": "The fourth bug",
"reporter_email": "[email protected]",
"assigned_to_email": "[email protected]",
"cc_emails": [],
"attachments": [_patch4, _patch5, _patch6],
"bug_status": "REOPENED",
"comments": [{"comment_date": datetime.datetime(2011, 6, 11, 9, 4, 3),
"comment_email": "[email protected]",
"text": "Committed r25: <http://trac.webkit.org/changeset/30>",
},
{"comment_date": datetime.datetime(2011, 6, 11, 9, 4, 3),
"comment_email": "[email protected]",
"text": "Rolled out in <http://trac.webkit.org/changeset/26",
},
],
}
_bug5 = {
"id": 50004,
"title": "The fifth bug",
"reporter_email": _commit_queue_email,
"assigned_to_email": "[email protected]",
"cc_emails": [],
"attachments": [],
"bug_status": "RESOLVED",
"dup_id": 50002,
"comments": [{"comment_date": datetime.datetime(2011, 6, 11, 9, 4, 3),
"comment_email": "[email protected]",
"text": "Committed r15: <http://trac.webkit.org/changeset/15>",
},
],
}
class MockBugzillaQueries(object):
def __init__(self, bugzilla):
self._bugzilla = bugzilla
def _all_bugs(self):
return map(lambda bug_dictionary: Bug(bug_dictionary, self._bugzilla),
self._bugzilla.bug_cache.values())
def fetch_bug_ids_from_commit_queue(self):
bugs_with_commit_queued_patches = filter(
lambda bug: bug.commit_queued_patches(),
self._all_bugs())
return map(lambda bug: bug.id(), bugs_with_commit_queued_patches)
def fetch_attachment_ids_from_review_queue(self):
unreviewed_patches = sum([bug.unreviewed_patches()
for bug in self._all_bugs()], [])
return map(lambda patch: patch.id(), unreviewed_patches)
def fetch_patches_from_commit_queue(self):
return sum([bug.commit_queued_patches()
for bug in self._all_bugs()], [])
def fetch_bug_ids_from_pending_commit_list(self):
bugs_with_reviewed_patches = filter(lambda bug: bug.reviewed_patches(),
self._all_bugs())
bug_ids = map(lambda bug: bug.id(), bugs_with_reviewed_patches)
# NOTE: This manual hack here is to allow testing logging in
# test_assign_to_committer the real pending-commit query on bugzilla
# will return bugs with patches which have r+, but are also obsolete.
return bug_ids + [50002]
def fetch_bugs_from_review_queue(self, cc_email=None):
unreviewed_bugs = [bug for bug in self._all_bugs() if bug.unreviewed_patches()]
if cc_email:
return [bug for bug in unreviewed_bugs if cc_email in bug.cc_emails()]
return unreviewed_bugs
def fetch_patches_from_pending_commit_list(self):
return sum([bug.reviewed_patches() for bug in self._all_bugs()], [])
def fetch_bugs_matching_search(self, search_string):
return [self._bugzilla.fetch_bug(50004), self._bugzilla.fetch_bug(50003)]
def fetch_bugs_matching_quicksearch(self, search_string):
return [self._bugzilla.fetch_bug(50001), self._bugzilla.fetch_bug(50002),
self._bugzilla.fetch_bug(50003), self._bugzilla.fetch_bug(50004)]
_mock_reviewers = [Reviewer("Foo Bar", "[email protected]"),
Reviewer("Reviewer2", "[email protected]")]
# FIXME: Bugzilla is the wrong Mock-point. Once we have a BugzillaNetwork
# class we should mock that instead.
# Most of this class is just copy/paste from Bugzilla.
class MockBugzilla(object):
bug_server_url = "http://example.com"
bug_cache = _id_to_object_dictionary(_bug1, _bug2, _bug3, _bug4, _bug5)
attachment_cache = _id_to_object_dictionary(_patch1,
_patch2,
_patch3,
_patch4,
_patch5,
_patch6,
_patch7)
def __init__(self):
self.queries = MockBugzillaQueries(self)
# FIXME: This should move onto the Host object, and we should use a MockCommitterList
self.committers = CommitterList(reviewers=_mock_reviewers)
self.username = None
self._override_patch = None
def authenticate(self):
self.username = "[email protected]"
def create_bug(self,
bug_title,
bug_description,
component=None,
diff=None,
patch_description=None,
cc=None,
blocked=None,
mark_for_review=False,
mark_for_commit_queue=False):
_log.info("MOCK create_bug")
_log.info("bug_title: %s" % bug_title)
_log.info("bug_description: %s" % bug_description)
if component:
_log.info("component: %s" % component)
if cc:
_log.info("cc: %s" % cc)
if blocked:
_log.info("blocked: %s" % blocked)
return 60001
def quips(self):
return ["Good artists copy. Great artists steal. - Pablo Picasso"]
def fetch_bug(self, bug_id):
return Bug(self.bug_cache.get(int(bug_id)), self)
def set_override_patch(self, patch):
self._override_patch = patch
def fetch_attachment(self, attachment_id):
if self._override_patch:
return self._override_patch
attachment_dictionary = self.attachment_cache.get(attachment_id)
if not attachment_dictionary:
print "MOCK: fetch_attachment: %s is not a known attachment id" % attachment_id
return None
bug = self.fetch_bug(attachment_dictionary["bug_id"])
for attachment in bug.attachments(include_obsolete=True):
if attachment.id() == int(attachment_id):
return attachment
def bug_url_for_bug_id(self, bug_id):
return "%s/%s" % (self.bug_server_url, bug_id)
def fetch_bug_dictionary(self, bug_id):
return self.bug_cache.get(bug_id)
def attachment_url_for_id(self, attachment_id, action="view"):
action_param = ""
if action and action != "view":
action_param = "&action=%s" % action
return "%s/%s%s" % (self.bug_server_url, attachment_id, action_param)
def reassign_bug(self, bug_id, assignee=None, comment_text=None):
_log.info("MOCK reassign_bug: bug_id=%s, assignee=%s" % (bug_id, assignee))
if comment_text:
_log.info("-- Begin comment --")
_log.info(comment_text)
_log.info("-- End comment --")
def set_flag_on_attachment(self,
attachment_id,
flag_name,
flag_value,
comment_text=None):
_log.info("MOCK setting flag '%s' to '%s' on attachment '%s' with comment '%s'" % (
flag_name, flag_value, attachment_id, comment_text))
def post_comment_to_bug(self, bug_id, comment_text, cc=None):
_log.info("MOCK bug comment: bug_id=%s, cc=%s\n--- Begin comment ---\n%s\n--- End comment ---\n" % (
bug_id, cc, comment_text))
def add_attachment_to_bug(self, bug_id, file_or_string, description, filename=None, comment_text=None, mimetype=None):
_log.info("MOCK add_attachment_to_bug: bug_id=%s, description=%s filename=%s mimetype=%s" %
(bug_id, description, filename, mimetype))
if comment_text:
_log.info("-- Begin comment --")
_log.info(comment_text)
_log.info("-- End comment --")
def add_patch_to_bug(self,
bug_id,
diff,
description,
comment_text=None,
mark_for_review=False,
mark_for_commit_queue=False,
mark_for_landing=False):
_log.info("MOCK add_patch_to_bug: bug_id=%s, description=%s, mark_for_review=%s, mark_for_commit_queue=%s, mark_for_landing=%s" %
(bug_id, description, mark_for_review, mark_for_commit_queue, mark_for_landing))
if comment_text:
_log.info("-- Begin comment --")
_log.info(comment_text)
_log.info("-- End comment --")
def add_cc_to_bug(self, bug_id, ccs):
pass
def obsolete_attachment(self, attachment_id, message=None):
pass
def reopen_bug(self, bug_id, message):
_log.info("MOCK reopen_bug %s with comment '%s'" % (bug_id, message))
def close_bug_as_fixed(self, bug_id, message):
pass
def clear_attachment_flags(self, attachment_id, message):
pass
| bsd-3-clause |
tillahoffmann/tensorflow | tensorflow/python/ops/tensor_array_grad.py | 71 | 9083 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradients for operators defined in tensor_array_ops.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import tensor_array_ops
# TODO(b/31222613): These ops may be differentiable, and there may be
# latent bugs here.
ops.NotDifferentiable("TensorArray")
ops.NotDifferentiable("TensorArrayGrad")
ops.NotDifferentiable("TensorArraySize")
ops.NotDifferentiable("TensorArrayClose")
ops.NotDifferentiable("TensorArrayV2")
ops.NotDifferentiable("TensorArrayGradV2")
ops.NotDifferentiable("TensorArraySizeV2")
ops.NotDifferentiable("TensorArrayCloseV2")
ops.NotDifferentiable("TensorArrayV3")
ops.NotDifferentiable("TensorArrayGradV3")
ops.NotDifferentiable("TensorArraySizeV3")
ops.NotDifferentiable("TensorArrayCloseV3")
def _GetGradSource(op_or_tensor):
"""Identify which call to tf.gradients created this gradient op or tensor.
TensorArray gradient calls use an accumulator TensorArray object. If
multiple gradients are calculated and run in the same session, the multiple
gradient nodes may accidentally flow throuth the same accumulator TensorArray.
This double counting breaks the TensorArray gradient flow.
The solution is to identify which gradient call this particular
TensorArray*Grad is being called in, by looking at the input gradient
tensor's name, and create or lookup an accumulator gradient TensorArray
associated with this specific call. This solves any confusion and ensures
different gradients from the same forward graph get their own accumulators.
This function creates the unique label associated with the tf.gradients call
that is used to create the gradient TensorArray.
Args:
op_or_tensor: `Tensor` or `Operation` which is an input to a
TensorArray*Grad call.
Returns:
A python string, the unique label associated with this particular
gradients calculation.
Raises:
ValueError: If not called within a gradients calculation.
"""
name_tokens = op_or_tensor.name.split("/")
grad_pos = [i for i, x in enumerate(name_tokens) if x.startswith("gradients")]
if not grad_pos:
raise ValueError(
"Expected op/tensor name to start with gradients (excluding scope)"
", got: %s" % op_or_tensor.name)
return "/".join(name_tokens[:grad_pos[-1] + 1])
@ops.RegisterGradient("TensorArrayRead")
@ops.RegisterGradient("TensorArrayReadV2")
@ops.RegisterGradient("TensorArrayReadV3")
def _TensorArrayReadGrad(op, grad):
"""Gradient for TensorArrayRead.
Args:
op: Forward TensorArrayRead op.
grad: Gradient `Tensor` to TensorArrayRead.
Returns:
A flow `Tensor`, which can be used in control dependencies to
force the write of `grad` to the gradient `TensorArray`.
"""
# Note: the forward flow dependency in the call to grad() is necessary for
# the case of dynamic sized TensorArrays. When creating the gradient
# TensorArray, the final size of the forward array must be known.
# For this we need to wait until it has been created by depending on
# the input flow of the original op.
handle = op.inputs[0]
index = op.inputs[1]
flow = op.inputs[2]
dtype = op.get_attr("dtype")
grad_source = _GetGradSource(grad)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
w_g = g.write(index, grad)
return [None, None, w_g.flow]
@ops.RegisterGradient("TensorArrayWrite")
@ops.RegisterGradient("TensorArrayWriteV2")
@ops.RegisterGradient("TensorArrayWriteV3")
def _TensorArrayWriteGrad(op, flow):
"""Gradient for TensorArrayWrite.
Args:
op: Forward TensorArrayWrite op.
flow: Gradient `Tensor` flow to TensorArrayWrite.
Returns:
A grad `Tensor`, the gradient created in an upstream ReadGrad or PackGrad.
"""
# handle is the output store_handle of TensorArrayReadGrad or
# the handle output of TensorArrayWriteGrad. we must use this one.
handle = op.inputs[0]
index = op.inputs[1]
dtype = op.get_attr("T")
grad_source = _GetGradSource(flow)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
grad = g.read(index)
return [None, None, grad, flow]
@ops.RegisterGradient("TensorArrayGather")
@ops.RegisterGradient("TensorArrayGatherV2")
@ops.RegisterGradient("TensorArrayGatherV3")
def _TensorArrayGatherGrad(op, grad):
"""Gradient for TensorArrayGather.
Args:
op: Forward TensorArrayGather op.
grad: Gradient `Tensor` to TensorArrayGather.
Returns:
A flow `Tensor`, which can be used in control dependencies to
force the write of `grad` to the gradient `TensorArray`.
"""
# Note: the forward flow dependency in the call to grad() is necessary for
# the case of dynamic sized TensorArrays. When creating the gradient
# TensorArray, the final size of the forward array must be known.
# For this we need to wait until it has been created by depending on
# the input flow of the original op.
handle = op.inputs[0]
indices = op.inputs[1]
flow = op.inputs[2]
dtype = op.get_attr("dtype")
grad_source = _GetGradSource(grad)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
u_g = g.scatter(indices, grad)
return [None, None, u_g.flow]
@ops.RegisterGradient("TensorArrayScatter")
@ops.RegisterGradient("TensorArrayScatterV2")
@ops.RegisterGradient("TensorArrayScatterV3")
def _TensorArrayScatterGrad(op, flow):
"""Gradient for TensorArrayScatter.
Args:
op: Forward TensorArrayScatter op.
flow: Gradient `Tensor` flow to TensorArrayScatter.
Returns:
A grad `Tensor`, the gradient created in upstream ReadGrads or PackGrad.
"""
handle = op.inputs[0]
indices = op.inputs[1]
dtype = op.get_attr("T")
grad_source = _GetGradSource(flow)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
grad = g.gather(indices)
return [None, None, grad, flow]
@ops.RegisterGradient("TensorArrayConcat")
@ops.RegisterGradient("TensorArrayConcatV2")
@ops.RegisterGradient("TensorArrayConcatV3")
def _TensorArrayConcatGrad(op, grad, unused_lengths_grad):
"""Gradient for TensorArrayConcat.
Args:
op: Forward TensorArrayConcat op.
grad: Gradient `Tensor` to TensorArrayConcat.
Returns:
A flow `Tensor`, which can be used in control dependencies to
force the write of `grad` to the gradient `TensorArray`.
"""
# Note: the forward flow dependency in the call to grad() is necessary for
# the case of dynamic sized TensorArrays. When creating the gradient
# TensorArray, the final size of the forward array must be known.
# For this we need to wait until it has been created by depending on
# the input flow of the original op.
handle = op.inputs[0]
flow = op.inputs[1]
lengths = op.outputs[1]
dtype = op.get_attr("dtype")
grad_source = _GetGradSource(grad)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
u_g = g.split(grad, lengths=lengths)
# handle, flow_in
return [None, u_g.flow]
@ops.RegisterGradient("TensorArraySplit")
@ops.RegisterGradient("TensorArraySplitV2")
@ops.RegisterGradient("TensorArraySplitV3")
def _TensorArraySplitGrad(op, flow):
"""Gradient for TensorArraySplit.
Args:
op: Forward TensorArraySplit op.
flow: Gradient `Tensor` flow to TensorArraySplit.
Returns:
A grad `Tensor`, the gradient created in upstream ReadGrads or PackGrad.
"""
handle = op.inputs[0]
dtype = op.get_attr("T")
grad_source = _GetGradSource(flow)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
grad = g.concat()
# handle, value, lengths, flow_in
return [None, grad, None, flow]
| apache-2.0 |
norayr/unisubs | libs/markdown/extensions/html_tidy.py | 123 | 2093 | #!/usr/bin/env python
"""
HTML Tidy Extension for Python-Markdown
=======================================
Runs [HTML Tidy][] on the output of Python-Markdown using the [uTidylib][]
Python wrapper. Both libtidy and uTidylib must be installed on your system.
Note than any Tidy [options][] can be passed in as extension configs. So,
for example, to output HTML rather than XHTML, set ``output_xhtml=0``. To
indent the output, set ``indent=auto`` and to have Tidy wrap the output in
``<html>`` and ``<body>`` tags, set ``show_body_only=0``.
[HTML Tidy]: http://tidy.sourceforge.net/
[uTidylib]: http://utidylib.berlios.de/
[options]: http://tidy.sourceforge.net/docs/quickref.html
Copyright (c)2008 [Waylan Limberg](http://achinghead.com)
License: [BSD](http://www.opensource.org/licenses/bsd-license.php)
Dependencies:
* [Python2.3+](http://python.org)
* [Markdown 2.0+](http://www.freewisdom.org/projects/python-markdown/)
* [HTML Tidy](http://utidylib.berlios.de/)
* [uTidylib](http://utidylib.berlios.de/)
"""
import markdown
import tidy
class TidyExtension(markdown.Extension):
def __init__(self, configs):
# Set defaults to match typical markdown behavior.
self.config = dict(output_xhtml=1,
show_body_only=1,
)
# Merge in user defined configs overriding any present if nessecary.
for c in configs:
self.config[c[0]] = c[1]
def extendMarkdown(self, md, md_globals):
# Save options to markdown instance
md.tidy_options = self.config
# Add TidyProcessor to postprocessors
md.postprocessors['tidy'] = TidyProcessor(md)
class TidyProcessor(markdown.postprocessors.Postprocessor):
def run(self, text):
# Pass text to Tidy. As Tidy does not accept unicode we need to encode
# it and decode its return value.
return unicode(tidy.parseString(text.encode('utf-8'),
**self.markdown.tidy_options))
def makeExtension(configs=None):
return TidyExtension(configs=configs)
| agpl-3.0 |
pratikmallya/hue | desktop/core/ext-py/Mako-0.8.1/mako/lookup.py | 38 | 13054 | # mako/lookup.py
# Copyright (C) 2006-2012 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import os, stat, posixpath, re
from mako import exceptions, util
from mako.template import Template
try:
import threading
except:
import dummy_threading as threading
class TemplateCollection(object):
"""Represent a collection of :class:`.Template` objects,
identifiable via URI.
A :class:`.TemplateCollection` is linked to the usage of
all template tags that address other templates, such
as ``<%include>``, ``<%namespace>``, and ``<%inherit>``.
The ``file`` attribute of each of those tags refers
to a string URI that is passed to that :class:`.Template`
object's :class:`.TemplateCollection` for resolution.
:class:`.TemplateCollection` is an abstract class,
with the usual default implementation being :class:`.TemplateLookup`.
"""
def has_template(self, uri):
"""Return ``True`` if this :class:`.TemplateLookup` is
capable of returning a :class:`.Template` object for the
given ``uri``.
:param uri: String URI of the template to be resolved.
"""
try:
self.get_template(uri)
return True
except exceptions.TemplateLookupException:
return False
def get_template(self, uri, relativeto=None):
"""Return a :class:`.Template` object corresponding to the given
``uri``.
The default implementation raises
:class:`.NotImplementedError`. Implementations should
raise :class:`.TemplateLookupException` if the given ``uri``
cannot be resolved.
:param uri: String URI of the template to be resolved.
:param relativeto: if present, the given ``uri`` is assumed to
be relative to this URI.
"""
raise NotImplementedError()
def filename_to_uri(self, uri, filename):
"""Convert the given ``filename`` to a URI relative to
this :class:`.TemplateCollection`."""
return uri
def adjust_uri(self, uri, filename):
"""Adjust the given ``uri`` based on the calling ``filename``.
When this method is called from the runtime, the
``filename`` parameter is taken directly to the ``filename``
attribute of the calling template. Therefore a custom
:class:`.TemplateCollection` subclass can place any string
identifier desired in the ``filename`` parameter of the
:class:`.Template` objects it constructs and have them come back
here.
"""
return uri
class TemplateLookup(TemplateCollection):
"""Represent a collection of templates that locates template source files
from the local filesystem.
The primary argument is the ``directories`` argument, the list of
directories to search:
.. sourcecode:: python
lookup = TemplateLookup(["/path/to/templates"])
some_template = lookup.get_template("/index.html")
The :class:`.TemplateLookup` can also be given :class:`.Template` objects
programatically using :meth:`.put_string` or :meth:`.put_template`:
.. sourcecode:: python
lookup = TemplateLookup()
lookup.put_string("base.html", '''
<html><body>${self.next()}</body></html>
''')
lookup.put_string("hello.html", '''
<%include file='base.html'/>
Hello, world !
''')
:param directories: A list of directory names which will be
searched for a particular template URI. The URI is appended
to each directory and the filesystem checked.
:param collection_size: Approximate size of the collection used
to store templates. If left at its default of ``-1``, the size
is unbounded, and a plain Python dictionary is used to
relate URI strings to :class:`.Template` instances.
Otherwise, a least-recently-used cache object is used which
will maintain the size of the collection approximately to
the number given.
:param filesystem_checks: When at its default value of ``True``,
each call to :meth:`.TemplateLookup.get_template()` will
compare the filesystem last modified time to the time in
which an existing :class:`.Template` object was created.
This allows the :class:`.TemplateLookup` to regenerate a
new :class:`.Template` whenever the original source has
been updated. Set this to ``False`` for a very minor
performance increase.
:param modulename_callable: A callable which, when present,
is passed the path of the source file as well as the
requested URI, and then returns the full path of the
generated Python module file. This is used to inject
alternate schemes for Python module location. If left at
its default of ``None``, the built in system of generation
based on ``module_directory`` plus ``uri`` is used.
All other keyword parameters available for
:class:`.Template` are mirrored here. When new
:class:`.Template` objects are created, the keywords
established with this :class:`.TemplateLookup` are passed on
to each new :class:`.Template`.
"""
def __init__(self,
directories=None,
module_directory=None,
filesystem_checks=True,
collection_size=-1,
format_exceptions=False,
error_handler=None,
disable_unicode=False,
bytestring_passthrough=False,
output_encoding=None,
encoding_errors='strict',
cache_args=None,
cache_impl='beaker',
cache_enabled=True,
cache_type=None,
cache_dir=None,
cache_url=None,
modulename_callable=None,
module_writer=None,
default_filters=None,
buffer_filters=(),
strict_undefined=False,
imports=None,
future_imports=None,
enable_loop=True,
input_encoding=None,
preprocessor=None,
lexer_cls=None):
self.directories = [posixpath.normpath(d) for d in
util.to_list(directories, ())
]
self.module_directory = module_directory
self.modulename_callable = modulename_callable
self.filesystem_checks = filesystem_checks
self.collection_size = collection_size
if cache_args is None:
cache_args = {}
# transfer deprecated cache_* args
if cache_dir:
cache_args.setdefault('dir', cache_dir)
if cache_url:
cache_args.setdefault('url', cache_url)
if cache_type:
cache_args.setdefault('type', cache_type)
self.template_args = {
'format_exceptions':format_exceptions,
'error_handler':error_handler,
'disable_unicode':disable_unicode,
'bytestring_passthrough':bytestring_passthrough,
'output_encoding':output_encoding,
'cache_impl':cache_impl,
'encoding_errors':encoding_errors,
'input_encoding':input_encoding,
'module_directory':module_directory,
'module_writer':module_writer,
'cache_args':cache_args,
'cache_enabled':cache_enabled,
'default_filters':default_filters,
'buffer_filters':buffer_filters,
'strict_undefined':strict_undefined,
'imports':imports,
'future_imports':future_imports,
'enable_loop':enable_loop,
'preprocessor':preprocessor,
'lexer_cls':lexer_cls
}
if collection_size == -1:
self._collection = {}
self._uri_cache = {}
else:
self._collection = util.LRUCache(collection_size)
self._uri_cache = util.LRUCache(collection_size)
self._mutex = threading.Lock()
def get_template(self, uri):
"""Return a :class:`.Template` object corresponding to the given
``uri``.
.. note:: The ``relativeto`` argument is not supported here at the moment.
"""
try:
if self.filesystem_checks:
return self._check(uri, self._collection[uri])
else:
return self._collection[uri]
except KeyError:
u = re.sub(r'^\/+', '', uri)
for dir in self.directories:
srcfile = posixpath.normpath(posixpath.join(dir, u))
if os.path.isfile(srcfile):
return self._load(srcfile, uri)
else:
raise exceptions.TopLevelLookupException(
"Cant locate template for uri %r" % uri)
def adjust_uri(self, uri, relativeto):
"""Adjust the given ``uri`` based on the given relative URI."""
key = (uri, relativeto)
if key in self._uri_cache:
return self._uri_cache[key]
if uri[0] != '/':
if relativeto is not None:
v = self._uri_cache[key] = posixpath.join(
posixpath.dirname(relativeto), uri)
else:
v = self._uri_cache[key] = '/' + uri
else:
v = self._uri_cache[key] = uri
return v
def filename_to_uri(self, filename):
"""Convert the given ``filename`` to a URI relative to
this :class:`.TemplateCollection`."""
try:
return self._uri_cache[filename]
except KeyError:
value = self._relativeize(filename)
self._uri_cache[filename] = value
return value
def _relativeize(self, filename):
"""Return the portion of a filename that is 'relative'
to the directories in this lookup.
"""
filename = posixpath.normpath(filename)
for dir in self.directories:
if filename[0:len(dir)] == dir:
return filename[len(dir):]
else:
return None
def _load(self, filename, uri):
self._mutex.acquire()
try:
try:
# try returning from collection one
# more time in case concurrent thread already loaded
return self._collection[uri]
except KeyError:
pass
try:
if self.modulename_callable is not None:
module_filename = self.modulename_callable(filename, uri)
else:
module_filename = None
self._collection[uri] = template = Template(
uri=uri,
filename=posixpath.normpath(filename),
lookup=self,
module_filename=module_filename,
**self.template_args)
return template
except:
# if compilation fails etc, ensure
# template is removed from collection,
# re-raise
self._collection.pop(uri, None)
raise
finally:
self._mutex.release()
def _check(self, uri, template):
if template.filename is None:
return template
try:
template_stat = os.stat(template.filename)
if template.module._modified_time < \
template_stat[stat.ST_MTIME]:
self._collection.pop(uri, None)
return self._load(template.filename, uri)
else:
return template
except OSError:
self._collection.pop(uri, None)
raise exceptions.TemplateLookupException(
"Cant locate template for uri %r" % uri)
def put_string(self, uri, text):
"""Place a new :class:`.Template` object into this
:class:`.TemplateLookup`, based on the given string of
``text``.
"""
self._collection[uri] = Template(
text,
lookup=self,
uri=uri,
**self.template_args)
def put_template(self, uri, template):
"""Place a new :class:`.Template` object into this
:class:`.TemplateLookup`, based on the given
:class:`.Template` object.
"""
self._collection[uri] = template
| apache-2.0 |
hernandito/SickRage | lib/sqlalchemy/testing/plugin/plugin_base.py | 76 | 14817 | # plugin/plugin_base.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Testing extensions.
this module is designed to work as a testing-framework-agnostic library,
so that we can continue to support nose and also begin adding new functionality
via py.test.
"""
from __future__ import absolute_import
try:
# unitttest has a SkipTest also but pytest doesn't
# honor it unless nose is imported too...
from nose import SkipTest
except ImportError:
from _pytest.runner import Skipped as SkipTest
import sys
import re
py3k = sys.version_info >= (3, 0)
if py3k:
import configparser
else:
import ConfigParser as configparser
# late imports
fixtures = None
engines = None
exclusions = None
warnings = None
profiling = None
assertions = None
requirements = None
config = None
testing = None
util = None
file_config = None
logging = None
db_opts = {}
options = None
def setup_options(make_option):
make_option("--log-info", action="callback", type="string", callback=_log,
help="turn on info logging for <LOG> (multiple OK)")
make_option("--log-debug", action="callback", type="string", callback=_log,
help="turn on debug logging for <LOG> (multiple OK)")
make_option("--db", action="append", type="string", dest="db",
help="Use prefab database uri. Multiple OK, "
"first one is run by default.")
make_option('--dbs', action='callback', callback=_list_dbs,
help="List available prefab dbs")
make_option("--dburi", action="append", type="string", dest="dburi",
help="Database uri. Multiple OK, first one is run by default.")
make_option("--dropfirst", action="store_true", dest="dropfirst",
help="Drop all tables in the target database first")
make_option("--backend-only", action="store_true", dest="backend_only",
help="Run only tests marked with __backend__")
make_option("--mockpool", action="store_true", dest="mockpool",
help="Use mock pool (asserts only one connection used)")
make_option("--low-connections", action="store_true", dest="low_connections",
help="Use a low number of distinct connections - i.e. for Oracle TNS"
)
make_option("--reversetop", action="store_true", dest="reversetop", default=False,
help="Use a random-ordering set implementation in the ORM (helps "
"reveal dependency issues)")
make_option("--requirements", action="callback", type="string",
callback=_requirements_opt,
help="requirements class for testing, overrides setup.cfg")
make_option("--with-cdecimal", action="store_true", dest="cdecimal", default=False,
help="Monkeypatch the cdecimal library into Python 'decimal' for all tests")
make_option("--serverside", action="callback", callback=_server_side_cursors,
help="Turn on server side cursors for PG")
make_option("--mysql-engine", action="store", dest="mysql_engine", default=None,
help="Use the specified MySQL storage engine for all tables, default is "
"a db-default/InnoDB combo.")
make_option("--tableopts", action="append", dest="tableopts", default=[],
help="Add a dialect-specific table option, key=value")
make_option("--write-profiles", action="store_true", dest="write_profiles", default=False,
help="Write/update profiling data.")
def read_config():
global file_config
file_config = configparser.ConfigParser()
file_config.read(['setup.cfg', 'test.cfg'])
def pre_begin(opt):
"""things to set up early, before coverage might be setup."""
global options
options = opt
for fn in pre_configure:
fn(options, file_config)
def set_coverage_flag(value):
options.has_coverage = value
def post_begin():
"""things to set up later, once we know coverage is running."""
# Lazy setup of other options (post coverage)
for fn in post_configure:
fn(options, file_config)
# late imports, has to happen after config as well
# as nose plugins like coverage
global util, fixtures, engines, exclusions, \
assertions, warnings, profiling,\
config, testing
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, engines, exclusions, \
assertions, warnings, profiling, config
from sqlalchemy import util
def _log(opt_str, value, parser):
global logging
if not logging:
import logging
logging.basicConfig()
if opt_str.endswith('-info'):
logging.getLogger(value).setLevel(logging.INFO)
elif opt_str.endswith('-debug'):
logging.getLogger(value).setLevel(logging.DEBUG)
def _list_dbs(*args):
print("Available --db options (use --dburi to override)")
for macro in sorted(file_config.options('db')):
print("%20s\t%s" % (macro, file_config.get('db', macro)))
sys.exit(0)
def _server_side_cursors(opt_str, value, parser):
db_opts['server_side_cursors'] = True
def _requirements_opt(opt_str, value, parser):
_setup_requirements(value)
pre_configure = []
post_configure = []
def pre(fn):
pre_configure.append(fn)
return fn
def post(fn):
post_configure.append(fn)
return fn
@pre
def _setup_options(opt, file_config):
global options
options = opt
@pre
def _monkeypatch_cdecimal(options, file_config):
if options.cdecimal:
import cdecimal
sys.modules['decimal'] = cdecimal
@post
def _engine_uri(options, file_config):
from sqlalchemy.testing import engines, config
from sqlalchemy import testing
if options.dburi:
db_urls = list(options.dburi)
else:
db_urls = []
if options.db:
for db_token in options.db:
for db in re.split(r'[,\s]+', db_token):
if db not in file_config.options('db'):
raise RuntimeError(
"Unknown URI specifier '%s'. Specify --dbs for known uris."
% db)
else:
db_urls.append(file_config.get('db', db))
if not db_urls:
db_urls.append(file_config.get('db', 'default'))
for db_url in db_urls:
eng = engines.testing_engine(db_url, db_opts)
eng.connect().close()
config.Config.register(eng, db_opts, options, file_config, testing)
config.db_opts = db_opts
@post
def _engine_pool(options, file_config):
if options.mockpool:
from sqlalchemy import pool
db_opts['poolclass'] = pool.AssertionPool
@post
def _requirements(options, file_config):
requirement_cls = file_config.get('sqla_testing', "requirement_cls")
_setup_requirements(requirement_cls)
def _setup_requirements(argument):
from sqlalchemy.testing import config
from sqlalchemy import testing
if config.requirements is not None:
return
modname, clsname = argument.split(":")
# importlib.import_module() only introduced in 2.7, a little
# late
mod = __import__(modname)
for component in modname.split(".")[1:]:
mod = getattr(mod, component)
req_cls = getattr(mod, clsname)
config.requirements = testing.requires = req_cls()
@post
def _prep_testing_database(options, file_config):
from sqlalchemy.testing import config
from sqlalchemy import schema, inspect
if options.dropfirst:
for cfg in config.Config.all_configs():
e = cfg.db
inspector = inspect(e)
try:
view_names = inspector.get_view_names()
except NotImplementedError:
pass
else:
for vname in view_names:
e.execute(schema._DropView(schema.Table(vname, schema.MetaData())))
if config.requirements.schemas.enabled_for_config(cfg):
try:
view_names = inspector.get_view_names(schema="test_schema")
except NotImplementedError:
pass
else:
for vname in view_names:
e.execute(schema._DropView(
schema.Table(vname,
schema.MetaData(), schema="test_schema")))
for tname in reversed(inspector.get_table_names(order_by="foreign_key")):
e.execute(schema.DropTable(schema.Table(tname, schema.MetaData())))
if config.requirements.schemas.enabled_for_config(cfg):
for tname in reversed(inspector.get_table_names(
order_by="foreign_key", schema="test_schema")):
e.execute(schema.DropTable(
schema.Table(tname, schema.MetaData(), schema="test_schema")))
@post
def _set_table_options(options, file_config):
from sqlalchemy.testing import schema
table_options = schema.table_options
for spec in options.tableopts:
key, value = spec.split('=')
table_options[key] = value
if options.mysql_engine:
table_options['mysql_engine'] = options.mysql_engine
@post
def _reverse_topological(options, file_config):
if options.reversetop:
from sqlalchemy.orm.util import randomize_unitofwork
randomize_unitofwork()
@post
def _post_setup_options(opt, file_config):
from sqlalchemy.testing import config
config.options = options
config.file_config = file_config
@post
def _setup_profiling(options, file_config):
from sqlalchemy.testing import profiling
profiling._profile_stats = profiling.ProfileStatsFile(
file_config.get('sqla_testing', 'profile_file'))
def want_class(cls):
if not issubclass(cls, fixtures.TestBase):
return False
elif cls.__name__.startswith('_'):
return False
elif config.options.backend_only and not getattr(cls, '__backend__', False):
return False
else:
return True
def generate_sub_tests(cls, module):
if getattr(cls, '__backend__', False):
for cfg in config.Config.all_configs():
name = "%s_%s_%s" % (cls.__name__, cfg.db.name, cfg.db.driver)
subcls = type(
name,
(cls, ),
{
"__only_on__": ("%s+%s" % (cfg.db.name, cfg.db.driver)),
"__backend__": False}
)
setattr(module, name, subcls)
yield subcls
else:
yield cls
def start_test_class(cls):
_do_skips(cls)
_setup_engine(cls)
def stop_test_class(cls):
engines.testing_reaper._stop_test_ctx()
if not options.low_connections:
assertions.global_cleanup_assertions()
_restore_engine()
def _restore_engine():
config._current.reset(testing)
def _setup_engine(cls):
if getattr(cls, '__engine_options__', None):
eng = engines.testing_engine(options=cls.__engine_options__)
config._current.push_engine(eng, testing)
def before_test(test, test_module_name, test_class, test_name):
# like a nose id, e.g.:
# "test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause"
name = test_class.__name__
suffix = "_%s_%s" % (config.db.name, config.db.driver)
if name.endswith(suffix):
name = name[0:-(len(suffix))]
id_ = "%s.%s.%s" % (test_module_name, name, test_name)
warnings.resetwarnings()
profiling._current_test = id_
def after_test(test):
engines.testing_reaper._after_test_ctx()
warnings.resetwarnings()
def _do_skips(cls):
all_configs = set(config.Config.all_configs())
reasons = []
if hasattr(cls, '__requires__'):
requirements = config.requirements
for config_obj in list(all_configs):
for requirement in cls.__requires__:
check = getattr(requirements, requirement)
if check.predicate(config_obj):
all_configs.remove(config_obj)
if check.reason:
reasons.append(check.reason)
break
if hasattr(cls, '__prefer_requires__'):
non_preferred = set()
requirements = config.requirements
for config_obj in list(all_configs):
for requirement in cls.__prefer_requires__:
check = getattr(requirements, requirement)
if check.predicate(config_obj):
non_preferred.add(config_obj)
if all_configs.difference(non_preferred):
all_configs.difference_update(non_preferred)
if cls.__unsupported_on__:
spec = exclusions.db_spec(*cls.__unsupported_on__)
for config_obj in list(all_configs):
if spec(config_obj):
all_configs.remove(config_obj)
if getattr(cls, '__only_on__', None):
spec = exclusions.db_spec(*util.to_list(cls.__only_on__))
for config_obj in list(all_configs):
if not spec(config_obj):
all_configs.remove(config_obj)
if getattr(cls, '__skip_if__', False):
for c in getattr(cls, '__skip_if__'):
if c():
raise SkipTest("'%s' skipped by %s" % (
cls.__name__, c.__name__)
)
for db_spec, op, spec in getattr(cls, '__excluded_on__', ()):
for config_obj in list(all_configs):
if exclusions.skip_if(
exclusions.SpecPredicate(db_spec, op, spec)
).predicate(config_obj):
all_configs.remove(config_obj)
if not all_configs:
raise SkipTest(
"'%s' unsupported on DB implementation %s%s" % (
cls.__name__,
", ".join("'%s' = %s" % (
config_obj.db.name,
config_obj.db.dialect.server_version_info)
for config_obj in config.Config.all_configs()
),
", ".join(reasons)
)
)
elif hasattr(cls, '__prefer_backends__'):
non_preferred = set()
spec = exclusions.db_spec(*util.to_list(cls.__prefer_backends__))
for config_obj in all_configs:
if not spec(config_obj):
non_preferred.add(config_obj)
if all_configs.difference(non_preferred):
all_configs.difference_update(non_preferred)
if config._current not in all_configs:
_setup_config(all_configs.pop(), cls)
def _setup_config(config_obj, ctx):
config._current.push(config_obj, testing)
| gpl-3.0 |
CydarLtd/ansible | lib/ansible/modules/database/misc/riak.py | 70 | 8254 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, James Martin <[email protected]>, Drew Kerrigan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: riak
short_description: This module handles some common Riak operations
description:
- This module can be used to join nodes to a cluster, check
the status of the cluster.
version_added: "1.2"
author:
- "James Martin (@jsmartin)"
- "Drew Kerrigan (@drewkerrigan)"
options:
command:
description:
- The command you would like to perform against the cluster.
required: false
default: null
choices: ['ping', 'kv_test', 'join', 'plan', 'commit']
config_dir:
description:
- The path to the riak configuration directory
required: false
default: /etc/riak
http_conn:
description:
- The ip address and port that is listening for Riak HTTP queries
required: false
default: 127.0.0.1:8098
target_node:
description:
- The target node for certain operations (join, ping)
required: false
default: [email protected]
wait_for_handoffs:
description:
- Number of seconds to wait for handoffs to complete.
required: false
default: null
wait_for_ring:
description:
- Number of seconds to wait for all nodes to agree on the ring.
required: false
default: null
wait_for_service:
description:
- Waits for a riak service to come online before continuing.
required: false
default: None
choices: ['kv']
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: 1.5.1
'''
EXAMPLES = '''
# Join's a Riak node to another node
- riak:
command: join
target_node: [email protected]
# Wait for handoffs to finish. Use with async and poll.
- riak:
wait_for_handoffs: yes
# Wait for riak_kv service to startup
- riak:
wait_for_service: kv
'''
import time
import socket
import sys
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
# Let snippet from module_utils/basic.py return a proper error in this case
pass
def ring_check(module, riak_admin_bin):
cmd = '%s ringready' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if rc == 0 and 'TRUE All nodes agree on the ring' in out:
return True
else:
return False
def main():
module = AnsibleModule(
argument_spec=dict(
command=dict(required=False, default=None, choices=[
'ping', 'kv_test', 'join', 'plan', 'commit']),
config_dir=dict(default='/etc/riak', type='path'),
http_conn=dict(required=False, default='127.0.0.1:8098'),
target_node=dict(default='[email protected]', required=False),
wait_for_handoffs=dict(default=False, type='int'),
wait_for_ring=dict(default=False, type='int'),
wait_for_service=dict(
required=False, default=None, choices=['kv']),
validate_certs = dict(default='yes', type='bool'))
)
command = module.params.get('command')
config_dir = module.params.get('config_dir')
http_conn = module.params.get('http_conn')
target_node = module.params.get('target_node')
wait_for_handoffs = module.params.get('wait_for_handoffs')
wait_for_ring = module.params.get('wait_for_ring')
wait_for_service = module.params.get('wait_for_service')
validate_certs = module.params.get('validate_certs')
#make sure riak commands are on the path
riak_bin = module.get_bin_path('riak')
riak_admin_bin = module.get_bin_path('riak-admin')
timeout = time.time() + 120
while True:
if time.time() > timeout:
module.fail_json(msg='Timeout, could not fetch Riak stats.')
(response, info) = fetch_url(module, 'http://%s/stats' % (http_conn), force=True, timeout=5)
if info['status'] == 200:
stats_raw = response.read()
break
time.sleep(5)
# here we attempt to load those stats,
try:
stats = json.loads(stats_raw)
except:
module.fail_json(msg='Could not parse Riak stats.')
node_name = stats['nodename']
nodes = stats['ring_members']
ring_size = stats['ring_creation_size']
rc, out, err = module.run_command([riak_bin, 'version'] )
version = out.strip()
result = dict(node_name=node_name,
nodes=nodes,
ring_size=ring_size,
version=version)
if command == 'ping':
cmd = '%s ping %s' % ( riak_bin, target_node )
rc, out, err = module.run_command(cmd)
if rc == 0:
result['ping'] = out
else:
module.fail_json(msg=out)
elif command == 'kv_test':
cmd = '%s test' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if rc == 0:
result['kv_test'] = out
else:
module.fail_json(msg=out)
elif command == 'join':
if nodes.count(node_name) == 1 and len(nodes) > 1:
result['join'] = 'Node is already in cluster or staged to be in cluster.'
else:
cmd = '%s cluster join %s' % (riak_admin_bin, target_node)
rc, out, err = module.run_command(cmd)
if rc == 0:
result['join'] = out
result['changed'] = True
else:
module.fail_json(msg=out)
elif command == 'plan':
cmd = '%s cluster plan' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if rc == 0:
result['plan'] = out
if 'Staged Changes' in out:
result['changed'] = True
else:
module.fail_json(msg=out)
elif command == 'commit':
cmd = '%s cluster commit' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if rc == 0:
result['commit'] = out
result['changed'] = True
else:
module.fail_json(msg=out)
# this could take a while, recommend to run in async mode
if wait_for_handoffs:
timeout = time.time() + wait_for_handoffs
while True:
cmd = '%s transfers' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if 'No transfers active' in out:
result['handoffs'] = 'No transfers active.'
break
time.sleep(10)
if time.time() > timeout:
module.fail_json(msg='Timeout waiting for handoffs.')
if wait_for_service:
cmd = [riak_admin_bin, 'wait_for_service', 'riak_%s' % wait_for_service, node_name ]
rc, out, err = module.run_command(cmd)
result['service'] = out
if wait_for_ring:
timeout = time.time() + wait_for_ring
while True:
if ring_check(module, riak_admin_bin):
break
time.sleep(10)
if time.time() > timeout:
module.fail_json(msg='Timeout waiting for nodes to agree on ring.')
result['ring_ready'] = ring_check(module, riak_admin_bin)
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
| gpl-3.0 |
arpitparmar5739/youtube-dl | youtube_dl/extractor/fourtube.py | 65 | 3914 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_request,
)
from ..utils import (
parse_duration,
parse_iso8601,
str_to_int,
)
class FourTubeIE(InfoExtractor):
IE_NAME = '4tube'
_VALID_URL = r'https?://(?:www\.)?4tube\.com/videos/(?P<id>\d+)'
_TEST = {
'url': 'http://www.4tube.com/videos/209733/hot-babe-holly-michaels-gets-her-ass-stuffed-by-black',
'md5': '6516c8ac63b03de06bc8eac14362db4f',
'info_dict': {
'id': '209733',
'ext': 'mp4',
'title': 'Hot Babe Holly Michaels gets her ass stuffed by black',
'uploader': 'WCP Club',
'uploader_id': 'wcp-club',
'upload_date': '20131031',
'timestamp': 1383263892,
'duration': 583,
'view_count': int,
'like_count': int,
'categories': list,
'age_limit': 18,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
title = self._html_search_meta('name', webpage)
timestamp = parse_iso8601(self._html_search_meta(
'uploadDate', webpage))
thumbnail = self._html_search_meta('thumbnailUrl', webpage)
uploader_id = self._html_search_regex(
r'<a class="img-avatar" href="[^"]+/channels/([^/"]+)" title="Go to [^"]+ page">',
webpage, 'uploader id')
uploader = self._html_search_regex(
r'<a class="img-avatar" href="[^"]+/channels/[^/"]+" title="Go to ([^"]+) page">',
webpage, 'uploader')
categories_html = self._search_regex(
r'(?s)><i class="icon icon-tag"></i>\s*Categories / Tags\s*.*?<ul class="list">(.*?)</ul>',
webpage, 'categories', fatal=False)
categories = None
if categories_html:
categories = [
c.strip() for c in re.findall(
r'(?s)<li><a.*?>(.*?)</a>', categories_html)]
view_count = str_to_int(self._search_regex(
r'<meta itemprop="interactionCount" content="UserPlays:([0-9,]+)">',
webpage, 'view count', fatal=False))
like_count = str_to_int(self._search_regex(
r'<meta itemprop="interactionCount" content="UserLikes:([0-9,]+)">',
webpage, 'like count', fatal=False))
duration = parse_duration(self._html_search_meta('duration', webpage))
params_js = self._search_regex(
r'\$\.ajax\(url,\ opts\);\s*\}\s*\}\)\(([0-9,\[\] ]+)\)',
webpage, 'initialization parameters'
)
params = self._parse_json('[%s]' % params_js, video_id)
media_id = params[0]
sources = ['%s' % p for p in params[2]]
token_url = 'http://tkn.4tube.com/{0}/desktop/{1}'.format(
media_id, '+'.join(sources))
headers = {
b'Content-Type': b'application/x-www-form-urlencoded',
b'Origin': b'http://www.4tube.com',
}
token_req = compat_urllib_request.Request(token_url, b'{}', headers)
tokens = self._download_json(token_req, video_id)
formats = [{
'url': tokens[format]['token'],
'format_id': format + 'p',
'resolution': format + 'p',
'quality': int(format),
} for format in sources]
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'formats': formats,
'categories': categories,
'thumbnail': thumbnail,
'uploader': uploader,
'uploader_id': uploader_id,
'timestamp': timestamp,
'like_count': like_count,
'view_count': view_count,
'duration': duration,
'age_limit': 18,
}
| unlicense |
felliott/waterbutler | tests/auth/osf/test_handler.py | 2 | 8673 | from unittest import mock
import pytest
import tornado
from tests import utils
from tests.server.api.v1.utils import ServerTestCase
from waterbutler.auth.osf import settings
from waterbutler.core.auth import AuthType
from waterbutler.auth.osf.handler import OsfAuthHandler
from waterbutler.core.exceptions import (UnsupportedHTTPMethodError,
UnsupportedActionError)
class TestOsfAuthHandler(ServerTestCase):
def setUp(self):
super().setUp()
self.handler = OsfAuthHandler()
self.request = tornado.httputil.HTTPServerRequest(uri=settings.API_URL)
mock_auth = utils.MockCoroutine(return_value={'auth': {}, 'callback_url': 'test.com'})
self.mock_auth_patcher = mock.patch(
'waterbutler.auth.osf.handler.OsfAuthHandler.make_request',
mock_auth
)
self.mock_auth_patcher.start()
def tearDown(self):
self.mock_auth_patcher.stop()
super().tearDown()
@tornado.testing.gen_test
async def test_supported_and_unsupported_methods(self):
supported_methods = ['put', 'get', 'head', 'delete']
post_actions = ['copy', 'rename', 'move']
unsupported_actions = ['ma1f0rmed', 'put', 'get', 'head', 'delete']
unsupported_methods = ['post', 'trace', 'connect', 'patch', 'ma1f0rmed']
resource = 'test'
provider = 'test'
assert all(method in self.handler.ACTION_MAP.keys() for method in supported_methods)
for auth_type in AuthType:
for action in post_actions:
self.request.method = 'post'
await self.handler.get(resource, provider,
self.request, action=action, auth_type=auth_type)
for method in supported_methods:
self.request.method = method
await self.handler.get(resource, provider, self.request)
for method in unsupported_methods:
self.request.method = method
with pytest.raises(UnsupportedHTTPMethodError):
await self.handler.get(resource, provider, self.request)
for action in unsupported_actions:
self.request.method = 'post'
with pytest.raises(UnsupportedActionError):
await self.handler.get(resource, provider, self.request, action=action)
@tornado.testing.gen_test
async def test_permissions_post_copy_source_destination(self):
resource = 'test'
provider = 'test'
action = 'copy'
self.request.method = 'post'
self.handler.build_payload = mock.Mock()
for auth_type in AuthType:
await self.handler.get(resource, provider, self.request, action=action, auth_type=auth_type)
if auth_type is AuthType.SOURCE:
self.handler.build_payload.assert_called_with({
'nid': 'test',
'provider': 'test',
'action': 'download',
'intent': 'copyfrom',
'path': '',
'version': None,
'metrics': {
'referrer': None,
'origin': None,
'uri': settings.API_URL,
'user_agent': None
}
}, cookie=None, view_only=None)
else:
self.handler.build_payload.assert_called_with({
'nid': 'test',
'provider': 'test',
'action': 'upload',
'intent': 'copyto',
'path': '',
'version': None,
'metrics': {
'referrer': None,
'origin': None,
'uri': settings.API_URL,
'user_agent': None
}
}, cookie=None, view_only=None)
class TestActionMapping:
@pytest.mark.asyncio
@pytest.mark.parametrize('method, action, path, auth_type, headers, query_args, exp_perm_action, exp_intent', [
['post', 'copy', '/folder/', AuthType.SOURCE, None, None, 'download', 'copyfrom'],
['post', 'copy', '/folder/', AuthType.DESTINATION, None, None, 'upload', 'copyto'],
['post', 'move', '/folder/', AuthType.SOURCE, None, None, 'delete', 'movefrom'],
['post', 'move', '/folder/', AuthType.DESTINATION, None, None, 'upload', 'moveto'],
['post', 'rename', '/folder/', AuthType.SOURCE, None, None, 'upload', 'rename'],
['post', 'rename', '/folder/', AuthType.DESTINATION, None, None, 'upload', 'rename'],
['post', 'copy', '/file', AuthType.SOURCE, None, None, 'download', 'copyfrom'],
['post', 'copy', '/file', AuthType.DESTINATION, None, None, 'upload', 'copyto'],
['post', 'move', '/file', AuthType.SOURCE, None, None, 'delete', 'movefrom'],
['post', 'move', '/file', AuthType.DESTINATION, None, None, 'upload', 'moveto'],
['post', 'rename', '/file', AuthType.SOURCE, None, None, 'upload', 'rename'],
['post', 'rename', '/file', AuthType.DESTINATION, None, None, 'upload', 'rename'],
['head', None, '/folder/', None, {settings.MFR_ACTION_HEADER: 'render'}, None, 'render', 'render'],
['head', None, '/folder/', None, {settings.MFR_ACTION_HEADER: 'export'}, None, 'export', 'export'],
['head', None, '/file', None, {settings.MFR_ACTION_HEADER: 'render'}, None, 'render', 'render'],
['head', None, '/file', None, {settings.MFR_ACTION_HEADER: 'export'}, None, 'export', 'export'],
['get', None, '/folder/', None, None, None, 'metadata', 'metadata'],
['head', None, '/folder/', None, None, None, 'metadata', 'metadata'],
['put', None, '/folder/', None, None, {'kind': 'folder'}, 'upload', 'create_dir'],
['put', None, '/folder/', None, None, {'kind': 'file'}, 'upload', 'create_file'],
['delete', None, '/folder/', None, None, None, 'delete', 'delete'],
['get', None, '/folder/', None, None, {'meta': 1}, 'metadata', 'metadata'],
['get', None, '/folder/', None, None, {'revisions': 1}, 'metadata', 'metadata'],
['get', None, '/folder/', None, None, {'zip': 1}, 'download', 'daz'],
['get', None, '/file', None, None, None, 'download', 'download'],
['head', None, '/file', None, None, None, 'metadata', 'metadata'],
['put', None, '/file', None, None, None, 'upload', 'update_file'],
['delete', None, '/file', None, None, None, 'delete', 'delete'],
['get', None, '/file', None, None, {'meta': 1}, 'metadata', 'metadata'],
['get', None, '/file', None, None, {'revisions': 1}, 'revisions', 'revisions'],
['get', None, '/file', None, None, {'zip': 1}, 'download', 'download'],
['get', None, '/file', None, None, {'meta': 1, 'revisions': 1}, 'metadata', 'metadata'],
])
async def test_action_type(self, method, action, path, auth_type, headers, query_args,
exp_perm_action, exp_intent):
handler = OsfAuthHandler()
request = mock.Mock()
request.method = method
request.headers = headers if headers is not None else {}
request.query_arguments = query_args if query_args is not None else {}
request.cookies = {}
kwargs = {'path': path}
if action is not None:
kwargs['action'] = action
if auth_type is not None:
kwargs['auth_type'] = auth_type
handler.build_payload = mock.Mock()
handler.make_request = utils.MockCoroutine(
return_value={'auth': {}, 'callback_url': 'dummy'}
)
await handler.get('test', 'test', request, **kwargs)
handler.build_payload.asssert_called_once()
args, _ = handler.build_payload.call_args
assert args[0]['action'] == exp_perm_action
assert args[0]['intent'] == exp_intent
@pytest.mark.asyncio
async def test_unhandled_mfr_action(self):
handler = OsfAuthHandler()
request = mock.Mock()
request.method = 'head'
request.headers = {settings.MFR_ACTION_HEADER: 'bad-action'}
with pytest.raises(UnsupportedActionError):
await handler.get('test', 'test', request)
| apache-2.0 |
sommergis/gblog | arcgis/restSample.py | 1 | 2014 | # -*- coding: latin-1 -*-
# Liest die LayerIDs und die Namen eines ArcGIS REST Services
# und gibt diese aus.
# Johannes Sommer, 12.01.2011
try:
import simplejson as json
except ImportError:
print 'Benoetigtes Python-Modul fuer das Skript ist:\n'+\
' - Simplejson\n'
# Standard Python Module
import urllib2
URL = "http://server.arcgisonline.com/ArcGIS/rest/services/Demographics/"
def getLayerInfoFromREST(url, _serviceName):
''' Liest die Layer-IDs des ArcGIS REST Dienstes aus
der angegebenen URL und liefert eine Liste mit den
Layer-IDs (Strings) und den Layernamen zurueck.
Benoetigtes Format der URL:
http://<host>/ArcGIS/rest/services/<folder>/'''
serviceName = _serviceName
url = url + serviceName + '/MapServer/?f=pjson'
response = urllib2.urlopen(url).read()
jsonObj = json.loads(response)
resultDict = {}
for key in jsonObj.iterkeys():
# layers entspricht dem Data Frame Namen des zugrunde-
# liegenden ArcMap-Dokument und muss evtl. angepasst
# werden
if key == 'layers':
root = jsonObj.get(key)
for layerInfo in root:
nameString = layerInfo.get('name')
id = int(layerInfo.get('id'))
parentId = int(layerInfo.get('parentLayerId'))
# Nimm nur die Hauptebenen (ohne Subelemente)
#if parentId == -1:
## Konvertierung von UTF8 to Latin-1
#utfString = unicode(nameString)
#latinString = utfString.encode('latin-1')
# Nimm alle Ebenen
# Konvertierung von UTF8 to Latin-1
utfString = unicode(nameString)
latinString = utfString.encode('latin-1')
resultDict[id] = latinString
return resultDict
# Funktionsaufruf
resultDict = getLayerInfoFromREST(URL, "USA_1990-2000_Population_Change")
for item in resultDict.iteritems():
print item
| gpl-3.0 |
pgandhi999/spark | examples/src/main/python/mllib/fpgrowth_example.py | 158 | 1280 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# $example on$
from pyspark.mllib.fpm import FPGrowth
# $example off$
from pyspark import SparkContext
if __name__ == "__main__":
sc = SparkContext(appName="FPGrowth")
# $example on$
data = sc.textFile("data/mllib/sample_fpgrowth.txt")
transactions = data.map(lambda line: line.strip().split(' '))
model = FPGrowth.train(transactions, minSupport=0.2, numPartitions=10)
result = model.freqItemsets().collect()
for fi in result:
print(fi)
# $example off$
| apache-2.0 |
courtarro/gnuradio | docs/doxygen/doxyxml/base.py | 333 | 6794 | #
# Copyright 2010 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
"""
A base class is created.
Classes based upon this are used to make more user-friendly interfaces
to the doxygen xml docs than the generated classes provide.
"""
import os
import pdb
from xml.parsers.expat import ExpatError
from generated import compound
class Base(object):
class Duplicate(StandardError):
pass
class NoSuchMember(StandardError):
pass
class ParsingError(StandardError):
pass
def __init__(self, parse_data, top=None):
self._parsed = False
self._error = False
self._parse_data = parse_data
self._members = []
self._dict_members = {}
self._in_category = {}
self._data = {}
if top is not None:
self._xml_path = top._xml_path
# Set up holder of references
else:
top = self
self._refs = {}
self._xml_path = parse_data
self.top = top
@classmethod
def from_refid(cls, refid, top=None):
""" Instantiate class from a refid rather than parsing object. """
# First check to see if its already been instantiated.
if top is not None and refid in top._refs:
return top._refs[refid]
# Otherwise create a new instance and set refid.
inst = cls(None, top=top)
inst.refid = refid
inst.add_ref(inst)
return inst
@classmethod
def from_parse_data(cls, parse_data, top=None):
refid = getattr(parse_data, 'refid', None)
if refid is not None and top is not None and refid in top._refs:
return top._refs[refid]
inst = cls(parse_data, top=top)
if refid is not None:
inst.refid = refid
inst.add_ref(inst)
return inst
def add_ref(self, obj):
if hasattr(obj, 'refid'):
self.top._refs[obj.refid] = obj
mem_classes = []
def get_cls(self, mem):
for cls in self.mem_classes:
if cls.can_parse(mem):
return cls
raise StandardError(("Did not find a class for object '%s'." \
% (mem.get_name())))
def convert_mem(self, mem):
try:
cls = self.get_cls(mem)
converted = cls.from_parse_data(mem, self.top)
if converted is None:
raise StandardError('No class matched this object.')
self.add_ref(converted)
return converted
except StandardError, e:
print e
@classmethod
def includes(cls, inst):
return isinstance(inst, cls)
@classmethod
def can_parse(cls, obj):
return False
def _parse(self):
self._parsed = True
def _get_dict_members(self, cat=None):
"""
For given category a dictionary is returned mapping member names to
members of that category. For names that are duplicated the name is
mapped to None.
"""
self.confirm_no_error()
if cat not in self._dict_members:
new_dict = {}
for mem in self.in_category(cat):
if mem.name() not in new_dict:
new_dict[mem.name()] = mem
else:
new_dict[mem.name()] = self.Duplicate
self._dict_members[cat] = new_dict
return self._dict_members[cat]
def in_category(self, cat):
self.confirm_no_error()
if cat is None:
return self._members
if cat not in self._in_category:
self._in_category[cat] = [mem for mem in self._members
if cat.includes(mem)]
return self._in_category[cat]
def get_member(self, name, cat=None):
self.confirm_no_error()
# Check if it's in a namespace or class.
bits = name.split('::')
first = bits[0]
rest = '::'.join(bits[1:])
member = self._get_dict_members(cat).get(first, self.NoSuchMember)
# Raise any errors that are returned.
if member in set([self.NoSuchMember, self.Duplicate]):
raise member()
if rest:
return member.get_member(rest, cat=cat)
return member
def has_member(self, name, cat=None):
try:
mem = self.get_member(name, cat=cat)
return True
except self.NoSuchMember:
return False
def data(self):
self.confirm_no_error()
return self._data
def members(self):
self.confirm_no_error()
return self._members
def process_memberdefs(self):
mdtss = []
for sec in self._retrieved_data.compounddef.sectiondef:
mdtss += sec.memberdef
# At the moment we lose all information associated with sections.
# Sometimes a memberdef is in several sectiondef.
# We make sure we don't get duplicates here.
uniques = set([])
for mem in mdtss:
converted = self.convert_mem(mem)
pair = (mem.name, mem.__class__)
if pair not in uniques:
uniques.add(pair)
self._members.append(converted)
def retrieve_data(self):
filename = os.path.join(self._xml_path, self.refid + '.xml')
try:
self._retrieved_data = compound.parse(filename)
except ExpatError:
print('Error in xml in file %s' % filename)
self._error = True
self._retrieved_data = None
def check_parsed(self):
if not self._parsed:
self._parse()
def confirm_no_error(self):
self.check_parsed()
if self._error:
raise self.ParsingError()
def error(self):
self.check_parsed()
return self._error
def name(self):
# first see if we can do it without processing.
if self._parse_data is not None:
return self._parse_data.name
self.check_parsed()
return self._retrieved_data.compounddef.name
| gpl-3.0 |
Mirantis/tempest | tempest/api/data_processing/test_data_sources.py | 2 | 5748 | # Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.data_processing import base as dp_base
from tempest.common.utils import data_utils
from tempest import test
class DataSourceTest(dp_base.BaseDataProcessingTest):
@classmethod
def setUpClass(cls):
super(DataSourceTest, cls).setUpClass()
cls.swift_data_source_with_creds = {
'url': 'swift://sahara-container.sahara/input-source',
'description': 'Test data source',
'credentials': {
'user': cls.os.credentials.username,
'password': cls.os.credentials.password
},
'type': 'swift'
}
cls.swift_data_source = cls.swift_data_source_with_creds.copy()
del cls.swift_data_source['credentials']
cls.local_hdfs_data_source = {
'url': 'input-source',
'description': 'Test data source',
'type': 'hdfs'
}
cls.external_hdfs_data_source = {
'url': 'hdfs://172.18.168.2:8020/usr/hadoop/input-source',
'description': 'Test data source',
'type': 'hdfs'
}
def _create_data_source(self, source_body, source_name=None):
"""Creates Data Source with optional name specified.
It creates a link to input-source file (it may not exist), ensures
source name and response body. Returns id and name of created source.
"""
if not source_name:
# generate random name if it's not specified
source_name = data_utils.rand_name('sahara-data-source')
# create data source
resp_body = self.create_data_source(source_name, **source_body)
# ensure that source created successfully
self.assertEqual(source_name, resp_body['name'])
if source_body['type'] == 'swift':
source_body = self.swift_data_source
self.assertDictContainsSubset(source_body, resp_body)
return resp_body['id'], source_name
def _list_data_sources(self, source_info):
# check for data source in list
_, sources = self.client.list_data_sources()
sources_info = [(source['id'], source['name']) for source in sources]
self.assertIn(source_info, sources_info)
def _get_data_source(self, source_id, source_name, source_body):
# check data source fetch by id
_, source = self.client.get_data_source(source_id)
self.assertEqual(source_name, source['name'])
self.assertDictContainsSubset(source_body, source)
@test.attr(type='smoke')
def test_swift_data_source_create(self):
self._create_data_source(self.swift_data_source_with_creds)
@test.attr(type='smoke')
def test_swift_data_source_list(self):
source_info = (
self._create_data_source(self.swift_data_source_with_creds))
self._list_data_sources(source_info)
@test.attr(type='smoke')
def test_swift_data_source_get(self):
source_id, source_name = (
self._create_data_source(self.swift_data_source_with_creds))
self._get_data_source(source_id, source_name, self.swift_data_source)
@test.attr(type='smoke')
def test_swift_data_source_delete(self):
source_id, _ = (
self._create_data_source(self.swift_data_source_with_creds))
# delete the data source by id
self.client.delete_data_source(source_id)
@test.attr(type='smoke')
def test_local_hdfs_data_source_create(self):
self._create_data_source(self.local_hdfs_data_source)
@test.attr(type='smoke')
def test_local_hdfs_data_source_list(self):
source_info = self._create_data_source(self.local_hdfs_data_source)
self._list_data_sources(source_info)
@test.attr(type='smoke')
def test_local_hdfs_data_source_get(self):
source_id, source_name = (
self._create_data_source(self.local_hdfs_data_source))
self._get_data_source(
source_id, source_name, self.local_hdfs_data_source)
@test.attr(type='smoke')
def test_local_hdfs_data_source_delete(self):
source_id, _ = self._create_data_source(self.local_hdfs_data_source)
# delete the data source by id
self.client.delete_data_source(source_id)
@test.attr(type='smoke')
def test_external_hdfs_data_source_create(self):
self._create_data_source(self.external_hdfs_data_source)
@test.attr(type='smoke')
def test_external_hdfs_data_source_list(self):
source_info = self._create_data_source(self.external_hdfs_data_source)
self._list_data_sources(source_info)
@test.attr(type='smoke')
def test_external_hdfs_data_source_get(self):
source_id, source_name = (
self._create_data_source(self.external_hdfs_data_source))
self._get_data_source(
source_id, source_name, self.external_hdfs_data_source)
@test.attr(type='smoke')
def test_external_hdfs_data_source_delete(self):
source_id, _ = self._create_data_source(self.external_hdfs_data_source)
# delete the data source by id
self.client.delete_data_source(source_id)
| apache-2.0 |
vitan/hue | desktop/core/ext-py/Django-1.6.10/tests/context_processors/tests.py | 63 | 1328 | """
Tests for Django's bundled context processors.
"""
from django.test import TestCase
class RequestContextProcessorTests(TestCase):
"""
Tests for the ``django.core.context_processors.request`` processor.
"""
urls = 'context_processors.urls'
def test_request_attributes(self):
"""
Test that the request object is available in the template and that its
attributes can't be overridden by GET and POST parameters (#3828).
"""
url = '/request_attrs/'
# We should have the request object in the template.
response = self.client.get(url)
self.assertContains(response, 'Have request')
# Test is_secure.
response = self.client.get(url)
self.assertContains(response, 'Not secure')
response = self.client.get(url, {'is_secure': 'blah'})
self.assertContains(response, 'Not secure')
response = self.client.post(url, {'is_secure': 'blah'})
self.assertContains(response, 'Not secure')
# Test path.
response = self.client.get(url)
self.assertContains(response, url)
response = self.client.get(url, {'path': '/blah/'})
self.assertContains(response, url)
response = self.client.post(url, {'path': '/blah/'})
self.assertContains(response, url)
| apache-2.0 |
felixfontein/ansible | test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py | 29 | 6251 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2016 Matt Martz <[email protected]>
# Copyright (C) 2016 Rackspace US, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import runpy
import json
import os
import subprocess
import sys
from contextlib import contextmanager
from ansible.executor.powershell.module_manifest import PSModuleDepFinder
from ansible.module_utils.basic import FILE_COMMON_ARGUMENTS
from ansible.module_utils.six import reraise
from ansible.module_utils._text import to_bytes, to_text
from .utils import CaptureStd, find_executable, get_module_name_from_filename
class AnsibleModuleCallError(RuntimeError):
pass
class AnsibleModuleImportError(ImportError):
pass
class AnsibleModuleNotInitialized(Exception):
pass
class _FakeAnsibleModuleInit:
def __init__(self):
self.args = tuple()
self.kwargs = {}
self.called = False
def __call__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.called = True
raise AnsibleModuleCallError('AnsibleModuleCallError')
def _fake_load_params():
pass
@contextmanager
def setup_env(filename):
# Used to clean up imports later
pre_sys_modules = list(sys.modules.keys())
fake = _FakeAnsibleModuleInit()
module = __import__('ansible.module_utils.basic').module_utils.basic
_original_init = module.AnsibleModule.__init__
_original_load_params = module._load_params
setattr(module.AnsibleModule, '__init__', fake)
setattr(module, '_load_params', _fake_load_params)
try:
yield fake
finally:
setattr(module.AnsibleModule, '__init__', _original_init)
setattr(module, '_load_params', _original_load_params)
# Clean up imports to prevent issues with mutable data being used in modules
for k in list(sys.modules.keys()):
# It's faster if we limit to items in ansible.module_utils
# But if this causes problems later, we should remove it
if k not in pre_sys_modules and k.startswith('ansible.module_utils.'):
del sys.modules[k]
def get_ps_argument_spec(filename, collection):
fqc_name = get_module_name_from_filename(filename, collection)
pwsh = find_executable('pwsh')
if not pwsh:
raise FileNotFoundError('Required program for PowerShell arg spec inspection "pwsh" not found.')
module_path = os.path.join(os.getcwd(), filename)
b_module_path = to_bytes(module_path, errors='surrogate_or_strict')
with open(b_module_path, mode='rb') as module_fd:
b_module_data = module_fd.read()
ps_dep_finder = PSModuleDepFinder()
ps_dep_finder.scan_module(b_module_data, fqn=fqc_name)
# For ps_argspec.ps1 to compile Ansible.Basic it also needs the AddType module_util.
ps_dep_finder._add_module((b"Ansible.ModuleUtils.AddType", ".psm1", None), wrapper=False)
util_manifest = json.dumps({
'module_path': to_text(module_path, errors='surrogiate_or_strict'),
'ansible_basic': ps_dep_finder.cs_utils_module["Ansible.Basic"]['path'],
'ps_utils': dict([(name, info['path']) for name, info in ps_dep_finder.ps_modules.items()]),
})
script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ps_argspec.ps1')
proc = subprocess.Popen([script_path, util_manifest], stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=False)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
raise AnsibleModuleImportError("STDOUT:\n%s\nSTDERR:\n%s" % (stdout.decode('utf-8'), stderr.decode('utf-8')))
kwargs = json.loads(stdout)
# the validate-modules code expects the options spec to be under the argument_spec key not options as set in PS
kwargs['argument_spec'] = kwargs.pop('options', {})
return kwargs['argument_spec'], (), kwargs
def get_py_argument_spec(filename, collection):
name = get_module_name_from_filename(filename, collection)
with setup_env(filename) as fake:
try:
with CaptureStd():
runpy.run_module(name, run_name='__main__', alter_sys=True)
except AnsibleModuleCallError:
pass
except BaseException as e:
# we want to catch all exceptions here, including sys.exit
reraise(AnsibleModuleImportError, AnsibleModuleImportError('%s' % e), sys.exc_info()[2])
if not fake.called:
raise AnsibleModuleNotInitialized()
try:
# for ping kwargs == {'argument_spec':{'data':{'type':'str','default':'pong'}}, 'supports_check_mode':True}
if 'argument_spec' in fake.kwargs:
argument_spec = fake.kwargs['argument_spec']
else:
argument_spec = fake.args[0]
# If add_file_common_args is truish, add options from FILE_COMMON_ARGUMENTS when not present.
# This is the only modification to argument_spec done by AnsibleModule itself, and which is
# not caught by setup_env's AnsibleModule replacement
if fake.kwargs.get('add_file_common_args'):
for k, v in FILE_COMMON_ARGUMENTS.items():
if k not in argument_spec:
argument_spec[k] = v
return argument_spec, fake.args, fake.kwargs
except (TypeError, IndexError):
return {}, (), {}
def get_argument_spec(filename, collection):
if filename.endswith('.py'):
return get_py_argument_spec(filename, collection)
else:
return get_ps_argument_spec(filename, collection)
| gpl-3.0 |
HGeerlings/eratosthenes | neigh/alt/traverse.py | 2 | 3052 | """
Exploring the use of plot.ly for visualizing correctness of a naive nearest-neighbor algorithm.
"""
from numpy.linalg import norm
import numpy as np
import operator
import random
import time
import plotly.plotly as py
import plotly.tools as tls
from plotly.graph_objs import *
from neigh_perf import get_tsne_mapping
mapping = get_tsne_mapping()
def naive_nn(pt, pts):
values = (norm(pt-other) for other in pts)
# returns (min_index, min_value)
return min(enumerate(values), key=operator.itemgetter(1))
def random_naive_neighbor_walk():
"""Simply to get a feel for timing of naive nearest-neighbor search.
Sample without replacement from the mapping and find the naive_nn
from all remaining points. Takes ~6 seconds.
"""
indices = range(len(mapping))
random.shuffle(indices)
pts = [mapping[i]['pt'] for i in indices]
while len(pts) > 1:
naive_nn(pts.pop(), pts)
def init_progress_plot():
stream_ids = tls.get_credentials_file()['stream_ids']
unseen_pts = np.asarray([m['pt'] for m in mapping if 'seen' not in m])
x, y = unseen_pts[:,0], unseen_pts[:,1]
unseen_trace = Scatter(x=x, y=y, mode='markers',
stream=Stream(token=stream_ids[0]))
seen_pts = np.asarray([m['pt'] for m in mapping if 'seen' in m])
x, y = (seen_pts[:,0], seen_pts[:,1]) if len(seen_pts) else ([], [])
seen_trace = Scatter(x=x, y=y, mode='markers',
stream=Stream(token=stream_ids[1]))
data = Data([unseen_trace, seen_trace])
layout = Layout(width=600, height=600)
plot = py.plot(Figure(data=data, layout=layout),
filename='erato-traversal-progress')
stream = {'unseen': py.Stream(stream_ids[0]),
'seen': py.Stream(stream_ids[1])}
for v in stream.values():
v.open()
return stream
# XXX: Mutates `mapping`, so you'll want to reload with
# `mapping = get_tsne_mapping()` on a new traverse().
def traverse(plotting=True, stream=None, plotting_delay=5):
pt = random.choice(mapping)['pt']
for i in range(len(mapping)):
unseen = [m for m in mapping if 'seen' not in m]
unseen_pts = np.asarray([m['pt'] for m in unseen])
if plotting:
# Update plot trace for unseen
x, y = unseen_pts[:,0], unseen_pts[:,1]
stream['unseen'].write(dict(x=x, y=y))
nn_idx, nn_val = naive_nn(pt, unseen_pts) # find neighbor
unseen[nn_idx]['seen'] = True # mark as seen
pt = unseen[nn_idx]['pt'] # update current point
if plotting:
# Update plot trace for seen
seen_pts = np.asarray([m['pt'] for m in mapping if 'seen' in m])
x, y = (seen_pts[:,0], seen_pts[:,1]) if len(seen_pts) else ([], [])
stream['seen'].write(dict(x=x, y=y))
# Space out iterations so you can visually verify nearest-neighbor
# selection
time.sleep(plotting_delay)
if __name__ == "__main__":
traverse(stream=init_progress_plot())
| bsd-2-clause |
gmacchi93/serverInfoParaguay | apps/venv/lib/python2.7/site-packages/django/contrib/gis/utils/srs.py | 450 | 3123 | from django.contrib.gis.gdal import SpatialReference
from django.db import DEFAULT_DB_ALIAS, connections
def add_srs_entry(srs, auth_name='EPSG', auth_srid=None, ref_sys_name=None,
database=None):
"""
This function takes a GDAL SpatialReference system and adds its information
to the `spatial_ref_sys` table of the spatial backend. Doing this enables
database-level spatial transformations for the backend. Thus, this utility
is useful for adding spatial reference systems not included by default with
the backend:
>>> from django.contrib.gis.utils import add_srs_entry
>>> add_srs_entry(3857)
Keyword Arguments:
auth_name:
This keyword may be customized with the value of the `auth_name` field.
Defaults to 'EPSG'.
auth_srid:
This keyword may be customized with the value of the `auth_srid` field.
Defaults to the SRID determined by GDAL.
ref_sys_name:
For SpatiaLite users only, sets the value of the `ref_sys_name` field.
Defaults to the name determined by GDAL.
database:
The name of the database connection to use; the default is the value
of `django.db.DEFAULT_DB_ALIAS` (at the time of this writing, its value
is 'default').
"""
if not database:
database = DEFAULT_DB_ALIAS
connection = connections[database]
if not hasattr(connection.ops, 'spatial_version'):
raise Exception('The `add_srs_entry` utility only works '
'with spatial backends.')
if not connection.features.supports_add_srs_entry:
raise Exception('This utility does not support your database backend.')
SpatialRefSys = connection.ops.spatial_ref_sys()
# If argument is not a `SpatialReference` instance, use it as parameter
# to construct a `SpatialReference` instance.
if not isinstance(srs, SpatialReference):
srs = SpatialReference(srs)
if srs.srid is None:
raise Exception('Spatial reference requires an SRID to be '
'compatible with the spatial backend.')
# Initializing the keyword arguments dictionary for both PostGIS
# and SpatiaLite.
kwargs = {'srid': srs.srid,
'auth_name': auth_name,
'auth_srid': auth_srid or srs.srid,
'proj4text': srs.proj4,
}
# Backend-specific fields for the SpatialRefSys model.
srs_field_names = {f.name for f in SpatialRefSys._meta.get_fields()}
if 'srtext' in srs_field_names:
kwargs['srtext'] = srs.wkt
if 'ref_sys_name' in srs_field_names:
# Spatialite specific
kwargs['ref_sys_name'] = ref_sys_name or srs.name
# Creating the spatial_ref_sys model.
try:
# Try getting via SRID only, because using all kwargs may
# differ from exact wkt/proj in database.
SpatialRefSys.objects.using(database).get(srid=srs.srid)
except SpatialRefSys.DoesNotExist:
SpatialRefSys.objects.using(database).create(**kwargs)
# Alias is for backwards-compatibility purposes.
add_postgis_srs = add_srs_entry
| apache-2.0 |
stonegithubs/odoo | addons/account_voucher/__init__.py | 358 | 1098 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_voucher
import invoice
import report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
beast-2e/vlla | VideoDisplay_24bit/tests/high_frame_rate_24bit.py | 1 | 1090 | # Usage:
# python high_frame_rate_8bit.py > /dev/ttyACM1
import time
import random
fpscolor = [
[255, 250, 0],
[255, 150, 0],
[255, 0, 150],
[150, 0, 255]
]
while True:
for f, fps in enumerate([30, 60, 120, 240]):
for i in range(3*fps):
print bytearray([255]+map(lambda x: (fpscolor[f][x%3] if (((x/3)%60)+240*i/(fps))%20 < 10 else 0), range(60 * 16 * 3)))
time.sleep(1.0/fps)
for f, fps in enumerate([30, 60, 120, 240]):
for i in range(3*fps):
print bytearray([255]+map(lambda x: (fpscolor[f][x%3] if i%2==0 else 0), range(60 * 16 * 3)))
time.sleep(1.0/fps)
for f, fps in enumerate([30, 60, 120, 240]):
for i in range(3*fps):
print bytearray([255]+map(lambda x: (fpscolor[f][x%3] if (1.0*x/3/16/60) < 1.0*i/(3*fps) else 0), range(60 * 16 * 3)))
time.sleep(1.0/fps)
# for fps in [30, 60, 120, 240]:
# for i in range(3*fps):
# print bytearray([255]+map(lambda x: random.randrange(0, 255), range(60 * 16 * 3)))
# time.sleep(1.0/fps)
| mit |
wriggityWrecked/WebScraping | slackReceiver.py | 1 | 6847 | from slackclient import SlackClient
from datetime import timedelta
from subprocess import *
from websocket import *
import os
import datetime
import logging
import logging.handlers
import time
import traceback
import sys
import threading
from utils import robotTextResponses
#todo import from tools
tokenFilePathAndName = os.path.join( os.path.dirname( __file__ ), 'utils/slackToken' )
logDirectory = './data/slackReceiverLog'
logFileName = logDirectory + '/slackReceiverLog_' + datetime.datetime.now().strftime( "%Y-%m-%dT%H:%M:%S" ) + '.log'
commandKey = 'bash'
commandChannel = 'C4UC35TLN'
dbId = 'U4SDBCXBJ'
debugSlackChannel = 'robot_comms'
scrapeKey = 'scrape'
#scrapeOptionsMap = { 'knl' : scrapeKnl, 'etre' : scrapeEtre, 'bh' : scrapeBelgianHappiness, 'biab' : scrapeBiab }
helpKey1 = 'help'
helpKey2 = '?'
maxKeepAlive = 60
if not os.path.isdir( logDirectory ):
print( 'creating ' + logDirectory )
os.makedirs( logDirectory )
logger = logging.getLogger( __name__ )
logging.basicConfig( filename=logFileName, filemode='w', level=logging.INFO, format='%(asctime)s:%(levelname)s:%(message)s', datefmt='%Y-%m-%dT%H:%M:%S' )
handler = logging.handlers.RotatingFileHandler( logFileName, maxBytes=5000, backupCount=5 )
logger.addHandler( handler )
logger.addHandler( logging.StreamHandler(sys.stdout) )
logger.setLevel( logging.INFO )
#todo global thread dictionary to keep track of manual scrapes
#this should be a lib (for fun text responses)
def handleText( text, channel, user ):
logger.info( 'handleText: ' + text + ' ' + channel + ' ' + user )
if not text:
logger.warn( 'ignoring empty message' )
return ''
#todo CPU
#todo diskspace
#split on spaces and grab the first word
key = text.split(" ")
if len( key ) == 0:
logger.warn( 'empty key!' )
return ''
key = key[0]
#handle scraping
if key.lower() == scrapeKey.lower():
return handleScrape( text.replace( scrapeKey + " ", "" ) )
#handle commands
if key.lower() == commandKey.lower():
#there can only be one
if user != dbId:
return 'Sorry, you do not have permission do execute that command.'
return handleCommand( text.replace( commandKey + " ", "" ) )
if key.lower() == helpKey1 or key.lower() == helpKey2:
return handleHelp()
#no commands, let's have fun with parsing text
parsed, response = robotTextResponses.handleTextInput( text )
if parsed:
return response
return ''
def handleCommand( command ):
logger.info( 'handleCommand: ' + str( command ) )
try:
p = Popen( command, shell=True, stdout=PIPE )
stdout, stderr = p.communicate()
logger.debug( stdout )
logger.debug( stderr )
return str( stdout ) + '\n' + str( stderr )
except Exception as e:
exc_type, exc_value, exc_tb = sys.exc_info()
return False, 'Caught ' + str( "".join( traceback.format_exception( exc_type, exc_value, exc_tb ) ) )
def handleScrape( command ):
command = command.strip().lower()
logger.info( 'handleScrape: ' + command )
if command in scrapeOptionsMap:
#todo scrape ALLLLLLLL
target = scrapeOptionsMap[ command ]
#check if already running
if not getattr( target, 'isRunning')():
#not foolproof, someone might have beat us to the punch
#todo hang onto the thread to kill if taking too long, etc
t = threading.Thread( target=getattr( target, 'run' ), args=() )
#t.daemon = True
t.start()
return 'Acknowledged command, started manual ' + command + ' scraping'
else:
return 'Already running ' + str( target ) + '.\nPlease wait for completion.'
#invalid scrape option
return "Invalid scrape option: " + command + '\nAvailable options: ' + ', '.join( scrapeOptionsMap.keys() )
def handleHelp():
helpMessage = "Don't Panic\n\n" + "Available commands:\n\n" + "scrape ["
helpMessage += ' | '.join( scrapeOptionsMap.keys() )+ ']\n\n' + 'bash myBashCommand (only if you are theLostWizard)\n\n'
return helpMessage
def sendReply( sc, ts, channelId, replyText ):
"""http://slackapi.github.io/python-slackclient/real_time_messaging.html#connecting-to-the-real-time-messaging-api"""
logger.debug( ts + ' ' + channelId + ' ' + replyText )
#sc.rtm_send_message(replyText, channelId, ts, True) -- fails
output = sc.api_call(
'chat.postMessage',
ts = ts,
channel = channelId,
text = replyText,
reply_broadcast=True
)
logger.debug( output )
def main():
sleepTimeSeconds = 60
allowableTimeDeltaSeconds = 3 * sleepTimeSeconds
slackToken = "";
with open( tokenFilePathAndName ) as f:
slackToken = str( f.read() ).strip()
print slackToken
sc = SlackClient( slackToken )
#todo post hello
if sc.rtm_connect():
print sc.server
keepAliveCount = 0
while True:
try:
r = sc.rtm_read()
logger.debug( str( time.time() ) + ' ' + str( r ) )
#check time, if it's the time delta is greater than our polling then
if len( r ) > 0:
keepAliveCount = 0
response = r[0]
#check for relevant time key
if 'ts' in response:
ts = response['ts']
elapsedTimeSeconds = time.time() - float( ts )
if elapsedTimeSeconds < allowableTimeDeltaSeconds:
if 'channel' in response and 'user' in response:
#get the message
text = ''
if 'text' in response:
text = response['text']
replyText = handleText( text, response['channel'], response['user'] )
if replyText:
keepAliveCount = 0
sendReply( sc, ts, response['channel'], replyText )
else:
logger.debug( 'replyText is empty!' )
if 'channel' not in response:
logger.debug( 'No Channel in response!' )
if 'user' not in response:
logger.debug( 'No user in response!' )
else:
logger.warn( 'ignoring stale response, elapsedTimeSeconds=' + str( timedelta( seconds=( elapsedTimeSeconds ) ) ) )
if not r and keepAliveCount > maxKeepAlive:
logger.info( 'Sleeping ' + str( sleepTimeSeconds ) + 's' )
time.sleep( sleepTimeSeconds )
keepAliveCount = 0
else:
time.sleep( 2 )
keepAliveCount += 1
except WebSocketConnectionClosedException:
exc_type, exc_value, exc_tb = sys.exc_info()
logger.warn( 'Caught WebSocketConnectionClosedException:' + str( "".join( traceback.format_exception( exc_type, exc_value, exc_tb ) ) ) )
#try to re-connect
sc.rtm_connect()
except Exception as e:
exc_type, exc_value, exc_tb = sys.exc_info()
logger.error( 'Caught ' + str( "".join( traceback.format_exception( exc_type, exc_value, exc_tb ) ) ) )
else:
logger.error( "Connection Failed, invalid token?" )
#todo try and post with sc "goodbye"
if __name__ == "__main__":
main() | apache-2.0 |
seanballais/SAElections | node_modules/grunt-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/generator/ninja.py | 372 | 89149 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import hashlib
import json
import multiprocessing
import os.path
import re
import signal
import subprocess
import sys
import gyp
import gyp.common
import gyp.msvs_emulation
import gyp.MSVSUtil as MSVSUtil
import gyp.xcode_emulation
from cStringIO import StringIO
from gyp.common import GetEnvironFallback
import gyp.ninja_syntax as ninja_syntax
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_PREFIX': 'lib',
# Gyp expects the following variables to be expandable by the build
# system to the appropriate locations. Ninja prefers paths to be
# known at gyp time. To resolve this, introduce special
# variables starting with $! and $| (which begin with a $ so gyp knows it
# should be treated specially, but is otherwise an invalid
# ninja/shell variable) that are passed to gyp here but expanded
# before writing out into the target .ninja files; see
# ExpandSpecial.
# $! is used for variables that represent a path and that can only appear at
# the start of a string, while $| is used for variables that can appear
# anywhere in a string.
'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR',
'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen',
'PRODUCT_DIR': '$!PRODUCT_DIR',
'CONFIGURATION_NAME': '$|CONFIGURATION_NAME',
# Special variables that may be used by gyp 'rule' targets.
# We generate definitions for these variables on the fly when processing a
# rule.
'RULE_INPUT_ROOT': '${root}',
'RULE_INPUT_DIRNAME': '${dirname}',
'RULE_INPUT_PATH': '${source}',
'RULE_INPUT_EXT': '${ext}',
'RULE_INPUT_NAME': '${name}',
}
# Placates pylint.
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
generator_filelist_paths = None
# TODO: figure out how to not build extra host objects in the non-cross-compile
# case when this is enabled, and enable unconditionally.
generator_supports_multiple_toolsets = (
os.environ.get('GYP_CROSSCOMPILE') or
os.environ.get('AR_host') or
os.environ.get('CC_host') or
os.environ.get('CXX_host') or
os.environ.get('AR_target') or
os.environ.get('CC_target') or
os.environ.get('CXX_target'))
def StripPrefix(arg, prefix):
if arg.startswith(prefix):
return arg[len(prefix):]
return arg
def QuoteShellArgument(arg, flavor):
"""Quote a string such that it will be interpreted as a single argument
by the shell."""
# Rather than attempting to enumerate the bad shell characters, just
# whitelist common OK ones and quote anything else.
if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg):
return arg # No quoting necessary.
if flavor == 'win':
return gyp.msvs_emulation.QuoteForRspFile(arg)
return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
def Define(d, flavor):
"""Takes a preprocessor define and returns a -D parameter that's ninja- and
shell-escaped."""
if flavor == 'win':
# cl.exe replaces literal # characters with = in preprocesor definitions for
# some reason. Octal-encode to work around that.
d = d.replace('#', '\\%03o' % ord('#'))
return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
def AddArch(output, arch):
"""Adds an arch string to an output path."""
output, extension = os.path.splitext(output)
return '%s.%s%s' % (output, arch, extension)
class Target:
"""Target represents the paths used within a single gyp target.
Conceptually, building a single target A is a series of steps:
1) actions/rules/copies generates source/resources/etc.
2) compiles generates .o files
3) link generates a binary (library/executable)
4) bundle merges the above in a mac bundle
(Any of these steps can be optional.)
From a build ordering perspective, a dependent target B could just
depend on the last output of this series of steps.
But some dependent commands sometimes need to reach inside the box.
For example, when linking B it needs to get the path to the static
library generated by A.
This object stores those paths. To keep things simple, member
variables only store concrete paths to single files, while methods
compute derived values like "the last output of the target".
"""
def __init__(self, type):
# Gyp type ("static_library", etc.) of this target.
self.type = type
# File representing whether any input dependencies necessary for
# dependent actions have completed.
self.preaction_stamp = None
# File representing whether any input dependencies necessary for
# dependent compiles have completed.
self.precompile_stamp = None
# File representing the completion of actions/rules/copies, if any.
self.actions_stamp = None
# Path to the output of the link step, if any.
self.binary = None
# Path to the file representing the completion of building the bundle,
# if any.
self.bundle = None
# On Windows, incremental linking requires linking against all the .objs
# that compose a .lib (rather than the .lib itself). That list is stored
# here.
self.component_objs = None
# Windows only. The import .lib is the output of a build step, but
# because dependents only link against the lib (not both the lib and the
# dll) we keep track of the import library here.
self.import_lib = None
def Linkable(self):
"""Return true if this is a target that can be linked against."""
return self.type in ('static_library', 'shared_library')
def UsesToc(self, flavor):
"""Return true if the target should produce a restat rule based on a TOC
file."""
# For bundles, the .TOC should be produced for the binary, not for
# FinalOutput(). But the naive approach would put the TOC file into the
# bundle, so don't do this for bundles for now.
if flavor == 'win' or self.bundle:
return False
return self.type in ('shared_library', 'loadable_module')
def PreActionInput(self, flavor):
"""Return the path, if any, that should be used as a dependency of
any dependent action step."""
if self.UsesToc(flavor):
return self.FinalOutput() + '.TOC'
return self.FinalOutput() or self.preaction_stamp
def PreCompileInput(self):
"""Return the path, if any, that should be used as a dependency of
any dependent compile step."""
return self.actions_stamp or self.precompile_stamp
def FinalOutput(self):
"""Return the last output of the target, which depends on all prior
steps."""
return self.bundle or self.binary or self.actions_stamp
# A small discourse on paths as used within the Ninja build:
# All files we produce (both at gyp and at build time) appear in the
# build directory (e.g. out/Debug).
#
# Paths within a given .gyp file are always relative to the directory
# containing the .gyp file. Call these "gyp paths". This includes
# sources as well as the starting directory a given gyp rule/action
# expects to be run from. We call the path from the source root to
# the gyp file the "base directory" within the per-.gyp-file
# NinjaWriter code.
#
# All paths as written into the .ninja files are relative to the build
# directory. Call these paths "ninja paths".
#
# We translate between these two notions of paths with two helper
# functions:
#
# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
# into the equivalent ninja path.
#
# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
# an output file; the result can be namespaced such that it is unique
# to the input file name as well as the output target name.
class NinjaWriter:
def __init__(self, qualified_target, target_outputs, base_dir, build_dir,
output_file, toplevel_build, output_file_name, flavor,
toplevel_dir=None):
"""
base_dir: path from source root to directory containing this gyp file,
by gyp semantics, all input paths are relative to this
build_dir: path from source root to build output
toplevel_dir: path to the toplevel directory
"""
self.qualified_target = qualified_target
self.target_outputs = target_outputs
self.base_dir = base_dir
self.build_dir = build_dir
self.ninja = ninja_syntax.Writer(output_file)
self.toplevel_build = toplevel_build
self.output_file_name = output_file_name
self.flavor = flavor
self.abs_build_dir = None
if toplevel_dir is not None:
self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir,
build_dir))
self.obj_ext = '.obj' if flavor == 'win' else '.o'
if flavor == 'win':
# See docstring of msvs_emulation.GenerateEnvironmentFiles().
self.win_env = {}
for arch in ('x86', 'x64'):
self.win_env[arch] = 'environment.' + arch
# Relative path from build output dir to base dir.
build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
self.build_to_base = os.path.join(build_to_top, base_dir)
# Relative path from base dir to build dir.
base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
self.base_to_build = os.path.join(base_to_top, build_dir)
def ExpandSpecial(self, path, product_dir=None):
"""Expand specials like $!PRODUCT_DIR in |path|.
If |product_dir| is None, assumes the cwd is already the product
dir. Otherwise, |product_dir| is the relative path to the product
dir.
"""
PRODUCT_DIR = '$!PRODUCT_DIR'
if PRODUCT_DIR in path:
if product_dir:
path = path.replace(PRODUCT_DIR, product_dir)
else:
path = path.replace(PRODUCT_DIR + '/', '')
path = path.replace(PRODUCT_DIR + '\\', '')
path = path.replace(PRODUCT_DIR, '.')
INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR'
if INTERMEDIATE_DIR in path:
int_dir = self.GypPathToUniqueOutput('gen')
# GypPathToUniqueOutput generates a path relative to the product dir,
# so insert product_dir in front if it is provided.
path = path.replace(INTERMEDIATE_DIR,
os.path.join(product_dir or '', int_dir))
CONFIGURATION_NAME = '$|CONFIGURATION_NAME'
path = path.replace(CONFIGURATION_NAME, self.config_name)
return path
def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
if self.flavor == 'win':
path = self.msvs_settings.ConvertVSMacros(
path, config=self.config_name)
path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root)
path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'],
dirname)
path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source)
path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext)
path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name)
return path
def GypPathToNinja(self, path, env=None):
"""Translate a gyp path to a ninja path, optionally expanding environment
variable references in |path| with |env|.
See the above discourse on path conversions."""
if env:
if self.flavor == 'mac':
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
elif self.flavor == 'win':
path = gyp.msvs_emulation.ExpandMacros(path, env)
if path.startswith('$!'):
expanded = self.ExpandSpecial(path)
if self.flavor == 'win':
expanded = os.path.normpath(expanded)
return expanded
if '$|' in path:
path = self.ExpandSpecial(path)
assert '$' not in path, path
return os.path.normpath(os.path.join(self.build_to_base, path))
def GypPathToUniqueOutput(self, path, qualified=True):
"""Translate a gyp path to a ninja path for writing output.
If qualified is True, qualify the resulting filename with the name
of the target. This is necessary when e.g. compiling the same
path twice for two separate output targets.
See the above discourse on path conversions."""
path = self.ExpandSpecial(path)
assert not path.startswith('$'), path
# Translate the path following this scheme:
# Input: foo/bar.gyp, target targ, references baz/out.o
# Output: obj/foo/baz/targ.out.o (if qualified)
# obj/foo/baz/out.o (otherwise)
# (and obj.host instead of obj for cross-compiles)
#
# Why this scheme and not some other one?
# 1) for a given input, you can compute all derived outputs by matching
# its path, even if the input is brought via a gyp file with '..'.
# 2) simple files like libraries and stamps have a simple filename.
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
path_dir, path_basename = os.path.split(path)
if qualified:
path_basename = self.name + '.' + path_basename
return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
path_basename))
def WriteCollapsedDependencies(self, name, targets):
"""Given a list of targets, return a path for a single file
representing the result of building all the targets or None.
Uses a stamp file if necessary."""
assert targets == filter(None, targets), targets
if len(targets) == 0:
return None
if len(targets) > 1:
stamp = self.GypPathToUniqueOutput(name + '.stamp')
targets = self.ninja.build(stamp, 'stamp', targets)
self.ninja.newline()
return targets[0]
def _SubninjaNameForArch(self, arch):
output_file_base = os.path.splitext(self.output_file_name)[0]
return '%s.%s.ninja' % (output_file_base, arch)
def WriteSpec(self, spec, config_name, generator_flags):
"""The main entry point for NinjaWriter: write the build rules for a spec.
Returns a Target object, which represents the output paths for this spec.
Returns None if there are no outputs (e.g. a settings-only 'none' type
target)."""
self.config_name = config_name
self.name = spec['target_name']
self.toolset = spec['toolset']
config = spec['configurations'][config_name]
self.target = Target(spec['type'])
self.is_standalone_static_library = bool(
spec.get('standalone_static_library', 0))
# Track if this target contains any C++ files, to decide if gcc or g++
# should be used for linking.
self.uses_cpp = False
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
self.xcode_settings = self.msvs_settings = None
if self.flavor == 'mac':
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
if self.flavor == 'win':
self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
generator_flags)
arch = self.msvs_settings.GetArch(config_name)
self.ninja.variable('arch', self.win_env[arch])
self.ninja.variable('cc', '$cl_' + arch)
self.ninja.variable('cxx', '$cl_' + arch)
if self.flavor == 'mac':
self.archs = self.xcode_settings.GetActiveArchs(config_name)
if len(self.archs) > 1:
self.arch_subninjas = dict(
(arch, ninja_syntax.Writer(
OpenOutput(os.path.join(self.toplevel_build,
self._SubninjaNameForArch(arch)),
'w')))
for arch in self.archs)
# Compute predepends for all rules.
# actions_depends is the dependencies this target depends on before running
# any of its action/rule/copy steps.
# compile_depends is the dependencies this target depends on before running
# any of its compile steps.
actions_depends = []
compile_depends = []
# TODO(evan): it is rather confusing which things are lists and which
# are strings. Fix these.
if 'dependencies' in spec:
for dep in spec['dependencies']:
if dep in self.target_outputs:
target = self.target_outputs[dep]
actions_depends.append(target.PreActionInput(self.flavor))
compile_depends.append(target.PreCompileInput())
actions_depends = filter(None, actions_depends)
compile_depends = filter(None, compile_depends)
actions_depends = self.WriteCollapsedDependencies('actions_depends',
actions_depends)
compile_depends = self.WriteCollapsedDependencies('compile_depends',
compile_depends)
self.target.preaction_stamp = actions_depends
self.target.precompile_stamp = compile_depends
# Write out actions, rules, and copies. These must happen before we
# compile any sources, so compute a list of predependencies for sources
# while we do it.
extra_sources = []
mac_bundle_depends = []
self.target.actions_stamp = self.WriteActionsRulesCopies(
spec, extra_sources, actions_depends, mac_bundle_depends)
# If we have actions/rules/copies, we depend directly on those, but
# otherwise we depend on dependent target's actions/rules/copies etc.
# We never need to explicitly depend on previous target's link steps,
# because no compile ever depends on them.
compile_depends_stamp = (self.target.actions_stamp or compile_depends)
# Write out the compilation steps, if any.
link_deps = []
sources = extra_sources + spec.get('sources', [])
if sources:
if self.flavor == 'mac' and len(self.archs) > 1:
# Write subninja file containing compile and link commands scoped to
# a single arch if a fat binary is being built.
for arch in self.archs:
self.ninja.subninja(self._SubninjaNameForArch(arch))
pch = None
if self.flavor == 'win':
gyp.msvs_emulation.VerifyMissingSources(
sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
pch = gyp.msvs_emulation.PrecompiledHeader(
self.msvs_settings, config_name, self.GypPathToNinja,
self.GypPathToUniqueOutput, self.obj_ext)
else:
pch = gyp.xcode_emulation.MacPrefixHeader(
self.xcode_settings, self.GypPathToNinja,
lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
link_deps = self.WriteSources(
self.ninja, config_name, config, sources, compile_depends_stamp, pch,
spec)
# Some actions/rules output 'sources' that are already object files.
obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
if obj_outputs:
if self.flavor != 'mac' or len(self.archs) == 1:
link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
else:
print "Warning: Actions/rules writing object files don't work with " \
"multiarch targets, dropping. (target %s)" % spec['target_name']
if self.flavor == 'win' and self.target.type == 'static_library':
self.target.component_objs = link_deps
# Write out a link step, if needed.
output = None
is_empty_bundle = not link_deps and not mac_bundle_depends
if link_deps or self.target.actions_stamp or actions_depends:
output = self.WriteTarget(spec, config_name, config, link_deps,
self.target.actions_stamp or actions_depends)
if self.is_mac_bundle:
mac_bundle_depends.append(output)
# Bundle all of the above together, if needed.
if self.is_mac_bundle:
output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
if not output:
return None
assert self.target.FinalOutput(), output
return self.target
def _WinIdlRule(self, source, prebuild, outputs):
"""Handle the implicit VS .idl rule for one source file. Fills |outputs|
with files that are generated."""
outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
source, self.config_name)
outdir = self.GypPathToNinja(outdir)
def fix_path(path, rel=None):
path = os.path.join(outdir, path)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
path = self.ExpandRuleVariables(
path, root, dirname, source, ext, basename)
if rel:
path = os.path.relpath(path, rel)
return path
vars = [(name, fix_path(value, outdir)) for name, value in vars]
output = [fix_path(p) for p in output]
vars.append(('outdir', outdir))
vars.append(('idlflags', flags))
input = self.GypPathToNinja(source)
self.ninja.build(output, 'idl', input,
variables=vars, order_only=prebuild)
outputs.extend(output)
def WriteWinIdlFiles(self, spec, prebuild):
"""Writes rules to match MSVS's implicit idl handling."""
assert self.flavor == 'win'
if self.msvs_settings.HasExplicitIdlRules(spec):
return []
outputs = []
for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
self._WinIdlRule(source, prebuild, outputs)
return outputs
def WriteActionsRulesCopies(self, spec, extra_sources, prebuild,
mac_bundle_depends):
"""Write out the Actions, Rules, and Copies steps. Return a path
representing the outputs of these steps."""
outputs = []
if self.is_mac_bundle:
mac_bundle_resources = spec.get('mac_bundle_resources', [])[:]
else:
mac_bundle_resources = []
extra_mac_bundle_resources = []
if 'actions' in spec:
outputs += self.WriteActions(spec['actions'], extra_sources, prebuild,
extra_mac_bundle_resources)
if 'rules' in spec:
outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
mac_bundle_resources,
extra_mac_bundle_resources)
if 'copies' in spec:
outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
if 'sources' in spec and self.flavor == 'win':
outputs += self.WriteWinIdlFiles(spec, prebuild)
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
if self.is_mac_bundle:
self.WriteMacBundleResources(
extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
self.WriteMacInfoPlist(mac_bundle_depends)
return stamp
def GenerateDescription(self, verb, message, fallback):
"""Generate and return a description of a build step.
|verb| is the short summary, e.g. ACTION or RULE.
|message| is a hand-written description, or None if not available.
|fallback| is the gyp-level name of the step, usable as a fallback.
"""
if self.toolset != 'target':
verb += '(%s)' % self.toolset
if message:
return '%s %s' % (verb, self.ExpandSpecial(message))
else:
return '%s %s: %s' % (verb, self.name, fallback)
def WriteActions(self, actions, extra_sources, prebuild,
extra_mac_bundle_resources):
# Actions cd into the base directory.
env = self.GetSortedXcodeEnv()
if self.flavor == 'win':
env = self.msvs_settings.GetVSMacroEnv(
'$!PRODUCT_DIR', config=self.config_name)
all_outputs = []
for action in actions:
# First write out a rule for the action.
name = '%s_%s' % (action['action_name'],
hashlib.md5(self.qualified_target).hexdigest())
description = self.GenerateDescription('ACTION',
action.get('message', None),
name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
if self.flavor == 'win' else False)
args = action['action']
rule_name, _ = self.WriteNewNinjaRule(name, args, description,
is_cygwin, env=env)
inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
if int(action.get('process_outputs_as_sources', False)):
extra_sources += action['outputs']
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += action['outputs']
outputs = [self.GypPathToNinja(o, env) for o in action['outputs']]
# Then write out an edge using the rule.
self.ninja.build(outputs, rule_name, inputs,
order_only=prebuild)
all_outputs += outputs
self.ninja.newline()
return all_outputs
def WriteRules(self, rules, extra_sources, prebuild,
mac_bundle_resources, extra_mac_bundle_resources):
env = self.GetSortedXcodeEnv()
all_outputs = []
for rule in rules:
# First write out a rule for the rule action.
name = '%s_%s' % (rule['rule_name'],
hashlib.md5(self.qualified_target).hexdigest())
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
args = rule['action']
description = self.GenerateDescription(
'RULE',
rule.get('message', None),
('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
if self.flavor == 'win' else False)
rule_name, args = self.WriteNewNinjaRule(
name, args, description, is_cygwin, env=env)
# TODO: if the command references the outputs directly, we should
# simplify it to just use $out.
# Rules can potentially make use of some special variables which
# must vary per source file.
# Compute the list of variables we'll need to provide.
special_locals = ('source', 'root', 'dirname', 'ext', 'name')
needed_variables = set(['source'])
for argument in args:
for var in special_locals:
if ('${%s}' % var) in argument:
needed_variables.add(var)
def cygwin_munge(path):
if is_cygwin:
return path.replace('\\', '/')
return path
# For each source file, write an edge that generates all the outputs.
for source in rule.get('rule_sources', []):
source = os.path.normpath(source)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
# Gather the list of inputs and outputs, expanding $vars if possible.
outputs = [self.ExpandRuleVariables(o, root, dirname,
source, ext, basename)
for o in rule['outputs']]
inputs = [self.ExpandRuleVariables(i, root, dirname,
source, ext, basename)
for i in rule.get('inputs', [])]
if int(rule.get('process_outputs_as_sources', False)):
extra_sources += outputs
was_mac_bundle_resource = source in mac_bundle_resources
if was_mac_bundle_resource or \
int(rule.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
# Note: This is n_resources * n_outputs_in_rule. Put to-be-removed
# items in a set and remove them all in a single pass if this becomes
# a performance issue.
if was_mac_bundle_resource:
mac_bundle_resources.remove(source)
extra_bindings = []
for var in needed_variables:
if var == 'root':
extra_bindings.append(('root', cygwin_munge(root)))
elif var == 'dirname':
# '$dirname' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
# want $!PRODUCT_DIR in there either.
dirname_expanded = self.ExpandSpecial(dirname, self.base_to_build)
extra_bindings.append(('dirname', cygwin_munge(dirname_expanded)))
elif var == 'source':
# '$source' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
# want $!PRODUCT_DIR in there either.
source_expanded = self.ExpandSpecial(source, self.base_to_build)
extra_bindings.append(('source', cygwin_munge(source_expanded)))
elif var == 'ext':
extra_bindings.append(('ext', ext))
elif var == 'name':
extra_bindings.append(('name', cygwin_munge(basename)))
else:
assert var == None, repr(var)
inputs = [self.GypPathToNinja(i, env) for i in inputs]
outputs = [self.GypPathToNinja(o, env) for o in outputs]
extra_bindings.append(('unique_name',
hashlib.md5(outputs[0]).hexdigest()))
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
implicit=inputs,
order_only=prebuild,
variables=extra_bindings)
all_outputs.extend(outputs)
return all_outputs
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
outputs = []
env = self.GetSortedXcodeEnv()
for copy in copies:
for path in copy['files']:
# Normalize the path so trailing slashes don't confuse us.
path = os.path.normpath(path)
basename = os.path.split(path)[1]
src = self.GypPathToNinja(path, env)
dst = self.GypPathToNinja(os.path.join(copy['destination'], basename),
env)
outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild)
if self.is_mac_bundle:
# gyp has mac_bundle_resources to copy things into a bundle's
# Resources folder, but there's no built-in way to copy files to other
# places in the bundle. Hence, some targets use copies for this. Check
# if this file is copied into the current bundle, and if so add it to
# the bundle depends so that dependent targets get rebuilt if the copy
# input changes.
if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()):
mac_bundle_depends.append(dst)
return outputs
def WriteMacBundleResources(self, resources, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources'."""
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, map(self.GypPathToNinja, resources)):
output = self.ExpandSpecial(output)
self.ninja.build(output, 'mac_tool', res,
variables=[('mactool_cmd', 'copy-bundle-resource')])
bundle_depends.append(output)
def WriteMacInfoPlist(self, bundle_depends):
"""Write build rules for bundle Info.plist files."""
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, self.GypPathToNinja)
if not info_plist:
return
out = self.ExpandSpecial(out)
if defines:
# Create an intermediate file to store preprocessed results.
intermediate_plist = self.GypPathToUniqueOutput(
os.path.basename(info_plist))
defines = ' '.join([Define(d, self.flavor) for d in defines])
info_plist = self.ninja.build(
intermediate_plist, 'preprocess_infoplist', info_plist,
variables=[('defines',defines)])
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
env = self.ComputeExportEnvString(env)
keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
keys = QuoteShellArgument(json.dumps(keys), self.flavor)
self.ninja.build(out, 'copy_infoplist', info_plist,
variables=[('env', env), ('keys', keys)])
bundle_depends.append(out)
def WriteSources(self, ninja_file, config_name, config, sources, predepends,
precompiled_header, spec):
"""Write build rules to compile all of |sources|."""
if self.toolset == 'host':
self.ninja.variable('ar', '$ar_host')
self.ninja.variable('cc', '$cc_host')
self.ninja.variable('cxx', '$cxx_host')
self.ninja.variable('ld', '$ld_host')
self.ninja.variable('ldxx', '$ldxx_host')
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteSourcesForArch(
self.ninja, config_name, config, sources, predepends,
precompiled_header, spec)
else:
return dict((arch, self.WriteSourcesForArch(
self.arch_subninjas[arch], config_name, config, sources, predepends,
precompiled_header, spec, arch=arch))
for arch in self.archs)
def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
predepends, precompiled_header, spec, arch=None):
"""Write build rules to compile all of |sources|."""
extra_defines = []
if self.flavor == 'mac':
cflags = self.xcode_settings.GetCflags(config_name, arch=arch)
cflags_c = self.xcode_settings.GetCflagsC(config_name)
cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
cflags_objc = ['$cflags_c'] + \
self.xcode_settings.GetCflagsObjC(config_name)
cflags_objcc = ['$cflags_cc'] + \
self.xcode_settings.GetCflagsObjCC(config_name)
elif self.flavor == 'win':
cflags = self.msvs_settings.GetCflags(config_name)
cflags_c = self.msvs_settings.GetCflagsC(config_name)
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
extra_defines = self.msvs_settings.GetComputedDefines(config_name)
# See comment at cc_command for why there's two .pdb files.
pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
config_name, self.ExpandSpecial)
if not pdbpath_c:
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
pdbpath_c = pdbpath + '.c.pdb'
pdbpath_cc = pdbpath + '.cc.pdb'
self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c])
self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc])
self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
else:
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cc = config.get('cflags_cc', [])
# Respect environment variables related to build, but target-specific
# flags can still override them.
if self.toolset == 'target':
cflags_c = (os.environ.get('CPPFLAGS', '').split() +
os.environ.get('CFLAGS', '').split() + cflags_c)
cflags_cc = (os.environ.get('CPPFLAGS', '').split() +
os.environ.get('CXXFLAGS', '').split() + cflags_cc)
defines = config.get('defines', []) + extra_defines
self.WriteVariableList(ninja_file, 'defines',
[Define(d, self.flavor) for d in defines])
if self.flavor == 'win':
self.WriteVariableList(ninja_file, 'rcflags',
[QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
for f in self.msvs_settings.GetRcflags(config_name,
self.GypPathToNinja)])
include_dirs = config.get('include_dirs', [])
env = self.GetSortedXcodeEnv()
if self.flavor == 'win':
env = self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
config=config_name)
include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
config_name)
self.WriteVariableList(ninja_file, 'includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in include_dirs])
pch_commands = precompiled_header.GetPchBuildCommands(arch)
if self.flavor == 'mac':
# Most targets use no precompiled headers, so only write these if needed.
for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'),
('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]:
include = precompiled_header.GetInclude(ext, arch)
if include: ninja_file.variable(var, include)
self.WriteVariableList(ninja_file, 'cflags',
map(self.ExpandSpecial, cflags))
self.WriteVariableList(ninja_file, 'cflags_c',
map(self.ExpandSpecial, cflags_c))
self.WriteVariableList(ninja_file, 'cflags_cc',
map(self.ExpandSpecial, cflags_cc))
if self.flavor == 'mac':
self.WriteVariableList(ninja_file, 'cflags_objc',
map(self.ExpandSpecial, cflags_objc))
self.WriteVariableList(ninja_file, 'cflags_objcc',
map(self.ExpandSpecial, cflags_objcc))
ninja_file.newline()
outputs = []
has_rc_source = False
for source in sources:
filename, ext = os.path.splitext(source)
ext = ext[1:]
obj_ext = self.obj_ext
if ext in ('cc', 'cpp', 'cxx'):
command = 'cxx'
self.uses_cpp = True
elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
command = 'cc'
elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
command = 'cc_s'
elif (self.flavor == 'win' and ext == 'asm' and
self.msvs_settings.GetArch(config_name) == 'x86' and
not self.msvs_settings.HasExplicitAsmRules(spec)):
# Asm files only get auto assembled for x86 (not x64).
command = 'asm'
# Add the _asm suffix as msvs is capable of handling .cc and
# .asm files of the same name without collision.
obj_ext = '_asm.obj'
elif self.flavor == 'mac' and ext == 'm':
command = 'objc'
elif self.flavor == 'mac' and ext == 'mm':
command = 'objcxx'
self.uses_cpp = True
elif self.flavor == 'win' and ext == 'rc':
command = 'rc'
obj_ext = '.res'
has_rc_source = True
else:
# Ignore unhandled extensions.
continue
input = self.GypPathToNinja(source)
output = self.GypPathToUniqueOutput(filename + obj_ext)
if arch is not None:
output = AddArch(output, arch)
implicit = precompiled_header.GetObjDependencies([input], [output], arch)
variables = []
if self.flavor == 'win':
variables, output, implicit = precompiled_header.GetFlagsModifications(
input, output, implicit, command, cflags_c, cflags_cc,
self.ExpandSpecial)
ninja_file.build(output, command, input,
implicit=[gch for _, _, gch in implicit],
order_only=predepends, variables=variables)
outputs.append(output)
if has_rc_source:
resource_include_dirs = config.get('resource_include_dirs', include_dirs)
self.WriteVariableList(ninja_file, 'resource_includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in resource_include_dirs])
self.WritePchTargets(ninja_file, pch_commands)
ninja_file.newline()
return outputs
def WritePchTargets(self, ninja_file, pch_commands):
"""Writes ninja rules to compile prefix headers."""
if not pch_commands:
return
for gch, lang_flag, lang, input in pch_commands:
var_name = {
'c': 'cflags_pch_c',
'cc': 'cflags_pch_cc',
'm': 'cflags_pch_objc',
'mm': 'cflags_pch_objcc',
}[lang]
map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
cmd = map.get(lang)
ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
def WriteLink(self, spec, config_name, config, link_deps):
"""Write out a link step. Fills out target.binary. """
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteLinkForArch(
self.ninja, spec, config_name, config, link_deps)
else:
output = self.ComputeOutput(spec)
inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec,
config_name, config, link_deps[arch],
arch=arch)
for arch in self.archs]
extra_bindings = []
if not self.is_mac_bundle:
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
self.ninja.build(output, 'lipo', inputs, variables=extra_bindings)
return output
def WriteLinkForArch(self, ninja_file, spec, config_name, config,
link_deps, arch=None):
"""Write out a link step. Fills out target.binary. """
command = {
'executable': 'link',
'loadable_module': 'solink_module',
'shared_library': 'solink',
}[spec['type']]
command_suffix = ''
implicit_deps = set()
solibs = set()
if 'dependencies' in spec:
# Two kinds of dependencies:
# - Linkable dependencies (like a .a or a .so): add them to the link line.
# - Non-linkable dependencies (like a rule that generates a file
# and writes a stamp file): add them to implicit_deps
extra_link_deps = set()
for dep in spec['dependencies']:
target = self.target_outputs.get(dep)
if not target:
continue
linkable = target.Linkable()
if linkable:
new_deps = []
if (self.flavor == 'win' and
target.component_objs and
self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
new_deps = target.component_objs
elif self.flavor == 'win' and target.import_lib:
new_deps = [target.import_lib]
elif target.UsesToc(self.flavor):
solibs.add(target.binary)
implicit_deps.add(target.binary + '.TOC')
else:
new_deps = [target.binary]
for new_dep in new_deps:
if new_dep not in extra_link_deps:
extra_link_deps.add(new_dep)
link_deps.append(new_dep)
final_output = target.FinalOutput()
if not linkable or final_output != target.binary:
implicit_deps.add(final_output)
extra_bindings = []
if self.uses_cpp and self.flavor != 'win':
extra_bindings.append(('ld', '$ldxx'))
output = self.ComputeOutput(spec, arch)
if arch is None and not self.is_mac_bundle:
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
is_executable = spec['type'] == 'executable'
# The ldflags config key is not used on mac or win. On those platforms
# linker flags are set via xcode_settings and msvs_settings, respectively.
env_ldflags = os.environ.get('LDFLAGS', '').split()
if self.flavor == 'mac':
ldflags = self.xcode_settings.GetLdflags(config_name,
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
self.GypPathToNinja, arch)
ldflags = env_ldflags + ldflags
elif self.flavor == 'win':
manifest_base_name = self.GypPathToUniqueOutput(
self.ComputeOutputFileName(spec))
ldflags, intermediate_manifest, manifest_files = \
self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja,
self.ExpandSpecial, manifest_base_name,
output, is_executable,
self.toplevel_build)
ldflags = env_ldflags + ldflags
self.WriteVariableList(ninja_file, 'manifests', manifest_files)
implicit_deps = implicit_deps.union(manifest_files)
if intermediate_manifest:
self.WriteVariableList(
ninja_file, 'intermediatemanifest', [intermediate_manifest])
command_suffix = _GetWinLinkRuleNameSuffix(
self.msvs_settings.IsEmbedManifest(config_name))
def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
if def_file:
implicit_deps.add(def_file)
else:
# Respect environment variables related to build, but target-specific
# flags can still override them.
ldflags = env_ldflags + config.get('ldflags', [])
if is_executable and len(solibs):
rpath = 'lib/'
if self.toolset != 'target':
rpath += self.toolset
ldflags.append('-Wl,-rpath=\$$ORIGIN/%s' % rpath)
ldflags.append('-Wl,-rpath-link=%s' % rpath)
self.WriteVariableList(ninja_file, 'ldflags',
gyp.common.uniquer(map(self.ExpandSpecial, ldflags)))
library_dirs = config.get('library_dirs', [])
if self.flavor == 'win':
library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name)
for l in library_dirs]
library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l),
self.flavor)
for l in library_dirs]
else:
library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l),
self.flavor)
for l in library_dirs]
libraries = gyp.common.uniquer(map(self.ExpandSpecial,
spec.get('libraries', [])))
if self.flavor == 'mac':
libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
elif self.flavor == 'win':
libraries = self.msvs_settings.AdjustLibraries(libraries)
self.WriteVariableList(ninja_file, 'libs', library_dirs + libraries)
linked_binary = output
if command in ('solink', 'solink_module'):
extra_bindings.append(('soname', os.path.split(output)[1]))
extra_bindings.append(('lib',
gyp.common.EncodePOSIXShellArgument(output)))
if self.flavor == 'win':
extra_bindings.append(('binary', output))
if '/NOENTRY' not in ldflags:
self.target.import_lib = output + '.lib'
extra_bindings.append(('implibflag',
'/IMPLIB:%s' % self.target.import_lib))
pdbname = self.msvs_settings.GetPDBName(
config_name, self.ExpandSpecial, output + '.pdb')
output = [output, self.target.import_lib]
if pdbname:
output.append(pdbname)
elif not self.is_mac_bundle:
output = [output, output + '.TOC']
else:
command = command + '_notoc'
elif self.flavor == 'win':
extra_bindings.append(('binary', output))
pdbname = self.msvs_settings.GetPDBName(
config_name, self.ExpandSpecial, output + '.pdb')
if pdbname:
output = [output, pdbname]
if len(solibs):
extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
ninja_file.build(output, command + command_suffix, link_deps,
implicit=list(implicit_deps),
variables=extra_bindings)
return linked_binary
def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
extra_link_deps = any(self.target_outputs.get(dep).Linkable()
for dep in spec.get('dependencies', [])
if dep in self.target_outputs)
if spec['type'] == 'none' or (not link_deps and not extra_link_deps):
# TODO(evan): don't call this function for 'none' target types, as
# it doesn't do anything, and we fake out a 'binary' with a stamp file.
self.target.binary = compile_deps
self.target.type = 'none'
elif spec['type'] == 'static_library':
self.target.binary = self.ComputeOutput(spec)
if (self.flavor not in ('mac', 'openbsd', 'win') and not
self.is_standalone_static_library):
self.ninja.build(self.target.binary, 'alink_thin', link_deps,
order_only=compile_deps)
else:
variables = []
if self.xcode_settings:
libtool_flags = self.xcode_settings.GetLibtoolflags(config_name)
if libtool_flags:
variables.append(('libtool_flags', libtool_flags))
if self.msvs_settings:
libflags = self.msvs_settings.GetLibFlags(config_name,
self.GypPathToNinja)
variables.append(('libflags', libflags))
if self.flavor != 'mac' or len(self.archs) == 1:
self.AppendPostbuildVariable(variables, spec,
self.target.binary, self.target.binary)
self.ninja.build(self.target.binary, 'alink', link_deps,
order_only=compile_deps, variables=variables)
else:
inputs = []
for arch in self.archs:
output = self.ComputeOutput(spec, arch)
self.arch_subninjas[arch].build(output, 'alink', link_deps[arch],
order_only=compile_deps,
variables=variables)
inputs.append(output)
# TODO: It's not clear if libtool_flags should be passed to the alink
# call that combines single-arch .a files into a fat .a file.
self.AppendPostbuildVariable(variables, spec,
self.target.binary, self.target.binary)
self.ninja.build(self.target.binary, 'alink', inputs,
# FIXME: test proving order_only=compile_deps isn't
# needed.
variables=variables)
else:
self.target.binary = self.WriteLink(spec, config_name, config, link_deps)
return self.target.binary
def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
assert self.is_mac_bundle
package_framework = spec['type'] in ('shared_library', 'loadable_module')
output = self.ComputeMacBundleOutput()
if is_empty:
output += '.stamp'
variables = []
self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
is_command_start=not package_framework)
if package_framework and not is_empty:
variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
self.ninja.build(output, 'package_framework', mac_bundle_depends,
variables=variables)
else:
self.ninja.build(output, 'stamp', mac_bundle_depends,
variables=variables)
self.target.bundle = output
return output
def GetSortedXcodeEnv(self, additional_settings=None):
"""Returns the variables Xcode would set for build steps."""
assert self.abs_build_dir
abs_build_dir = self.abs_build_dir
return gyp.xcode_emulation.GetSortedXcodeEnv(
self.xcode_settings, abs_build_dir,
os.path.join(abs_build_dir, self.build_to_base), self.config_name,
additional_settings)
def GetSortedXcodePostbuildEnv(self):
"""Returns the variables Xcode would set for postbuild steps."""
postbuild_settings = {}
# CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
# TODO(thakis): It would be nice to have some general mechanism instead.
strip_save_file = self.xcode_settings.GetPerTargetSetting(
'CHROMIUM_STRIP_SAVE_FILE')
if strip_save_file:
postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file
return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
def AppendPostbuildVariable(self, variables, spec, output, binary,
is_command_start=False):
"""Adds a 'postbuild' variable if there is a postbuild for |output|."""
postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start)
if postbuild:
variables.append(('postbuilds', postbuild))
def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
"""Returns a shell command that runs all the postbuilds, and removes
|output| if any of them fails. If |is_command_start| is False, then the
returned string will start with ' && '."""
if not self.xcode_settings or spec['type'] == 'none' or not output:
return ''
output = QuoteShellArgument(output, self.flavor)
postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
if output_binary is not None:
postbuilds = self.xcode_settings.AddImplicitPostbuilds(
self.config_name,
os.path.normpath(os.path.join(self.base_to_build, output)),
QuoteShellArgument(
os.path.normpath(os.path.join(self.base_to_build, output_binary)),
self.flavor),
postbuilds, quiet=True)
if not postbuilds:
return ''
# Postbuilds expect to be run in the gyp file's directory, so insert an
# implicit postbuild to cd to there.
postbuilds.insert(0, gyp.common.EncodePOSIXShellList(
['cd', self.build_to_base]))
env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
# G will be non-null if any postbuild fails. Run all postbuilds in a
# subshell.
commands = env + ' (' + \
' && '.join([ninja_syntax.escape(command) for command in postbuilds])
command_string = (commands + '); G=$$?; '
# Remove the final output if any postbuild failed.
'((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)')
if is_command_start:
return '(' + command_string + ' && '
else:
return '$ && (' + command_string
def ComputeExportEnvString(self, env):
"""Given an environment, returns a string looking like
'export FOO=foo; export BAR="${FOO} bar;'
that exports |env| to the shell."""
export_str = []
for k, v in env:
export_str.append('export %s=%s;' %
(k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))))
return ' '.join(export_str)
def ComputeMacBundleOutput(self):
"""Return the 'output' (full output path) to a bundle output directory."""
assert self.is_mac_bundle
path = generator_default_variables['PRODUCT_DIR']
return self.ExpandSpecial(
os.path.join(path, self.xcode_settings.GetWrapperName()))
def ComputeOutputFileName(self, spec, type=None):
"""Compute the filename of the final output for the current target."""
if not type:
type = spec['type']
default_variables = copy.copy(generator_default_variables)
CalculateVariables(default_variables, {'flavor': self.flavor})
# Compute filename prefix: the product prefix, or a default for
# the product type.
DEFAULT_PREFIX = {
'loadable_module': default_variables['SHARED_LIB_PREFIX'],
'shared_library': default_variables['SHARED_LIB_PREFIX'],
'static_library': default_variables['STATIC_LIB_PREFIX'],
'executable': default_variables['EXECUTABLE_PREFIX'],
}
prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, ''))
# Compute filename extension: the product extension, or a default
# for the product type.
DEFAULT_EXTENSION = {
'loadable_module': default_variables['SHARED_LIB_SUFFIX'],
'shared_library': default_variables['SHARED_LIB_SUFFIX'],
'static_library': default_variables['STATIC_LIB_SUFFIX'],
'executable': default_variables['EXECUTABLE_SUFFIX'],
}
extension = spec.get('product_extension')
if extension:
extension = '.' + extension
else:
extension = DEFAULT_EXTENSION.get(type, '')
if 'product_name' in spec:
# If we were given an explicit name, use that.
target = spec['product_name']
else:
# Otherwise, derive a name from the target name.
target = spec['target_name']
if prefix == 'lib':
# Snip out an extra 'lib' from libs if appropriate.
target = StripPrefix(target, 'lib')
if type in ('static_library', 'loadable_module', 'shared_library',
'executable'):
return '%s%s%s' % (prefix, target, extension)
elif type == 'none':
return '%s.stamp' % target
else:
raise Exception('Unhandled output type %s' % type)
def ComputeOutput(self, spec, arch=None):
"""Compute the path for the final output of the spec."""
type = spec['type']
if self.flavor == 'win':
override = self.msvs_settings.GetOutputName(self.config_name,
self.ExpandSpecial)
if override:
return override
if arch is None and self.flavor == 'mac' and type in (
'static_library', 'executable', 'shared_library', 'loadable_module'):
filename = self.xcode_settings.GetExecutablePath()
else:
filename = self.ComputeOutputFileName(spec, type)
if arch is None and 'product_dir' in spec:
path = os.path.join(spec['product_dir'], filename)
return self.ExpandSpecial(path)
# Some products go into the output root, libraries go into shared library
# dir, and everything else goes into the normal place.
type_in_output_root = ['executable', 'loadable_module']
if self.flavor == 'mac' and self.toolset == 'target':
type_in_output_root += ['shared_library', 'static_library']
elif self.flavor == 'win' and self.toolset == 'target':
type_in_output_root += ['shared_library']
if arch is not None:
# Make sure partial executables don't end up in a bundle or the regular
# output directory.
archdir = 'arch'
if self.toolset != 'target':
archdir = os.path.join('arch', '%s' % self.toolset)
return os.path.join(archdir, AddArch(filename, arch))
elif type in type_in_output_root or self.is_standalone_static_library:
return filename
elif type == 'shared_library':
libdir = 'lib'
if self.toolset != 'target':
libdir = os.path.join('lib', '%s' % self.toolset)
return os.path.join(libdir, filename)
else:
return self.GypPathToUniqueOutput(filename, qualified=False)
def WriteVariableList(self, ninja_file, var, values):
assert not isinstance(values, str)
if values is None:
values = []
ninja_file.variable(var, ' '.join(values))
def WriteNewNinjaRule(self, name, args, description, is_cygwin, env):
"""Write out a new ninja "rule" statement for a given command.
Returns the name of the new rule, and a copy of |args| with variables
expanded."""
if self.flavor == 'win':
args = [self.msvs_settings.ConvertVSMacros(
arg, self.base_to_build, config=self.config_name)
for arg in args]
description = self.msvs_settings.ConvertVSMacros(
description, config=self.config_name)
elif self.flavor == 'mac':
# |env| is an empty list on non-mac.
args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
description = gyp.xcode_emulation.ExpandEnvVars(description, env)
# TODO: we shouldn't need to qualify names; we do it because
# currently the ninja rule namespace is global, but it really
# should be scoped to the subninja.
rule_name = self.name
if self.toolset == 'target':
rule_name += '.' + self.toolset
rule_name += '.' + name
rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name)
# Remove variable references, but not if they refer to the magic rule
# variables. This is not quite right, as it also protects these for
# actions, not just for rules where they are valid. Good enough.
protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ]
protect = '(?!' + '|'.join(map(re.escape, protect)) + ')'
description = re.sub(protect + r'\$', '_', description)
# gyp dictates that commands are run from the base directory.
# cd into the directory before running, and adjust paths in
# the arguments to point to the proper locations.
rspfile = None
rspfile_content = None
args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
if self.flavor == 'win':
rspfile = rule_name + '.$unique_name.rsp'
# The cygwin case handles this inside the bash sub-shell.
run_in = '' if is_cygwin else ' ' + self.build_to_base
if is_cygwin:
rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
args, self.build_to_base)
else:
rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args)
command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable +
rspfile + run_in)
else:
env = self.ComputeExportEnvString(env)
command = gyp.common.EncodePOSIXShellList(args)
command = 'cd %s; ' % self.build_to_base + env + command
# GYP rules/actions express being no-ops by not touching their outputs.
# Avoid executing downstream dependencies in this case by specifying
# restat=1 to ninja.
self.ninja.rule(rule_name, command, description, restat=True,
rspfile=rspfile, rspfile_content=rspfile_content)
self.ninja.newline()
return rule_name, args
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
global generator_additional_non_configuration_keys
global generator_additional_path_sections
flavor = gyp.common.GetFlavor(params)
if flavor == 'mac':
default_variables.setdefault('OS', 'mac')
default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
default_variables.setdefault('SHARED_LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
default_variables.setdefault('LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
# Copy additional generator configuration data from Xcode, which is shared
# by the Mac Ninja generator.
import gyp.generator.xcode as xcode_generator
generator_additional_non_configuration_keys = getattr(xcode_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(xcode_generator,
'generator_additional_path_sections', [])
global generator_extra_sources_for_rules
generator_extra_sources_for_rules = getattr(xcode_generator,
'generator_extra_sources_for_rules', [])
elif flavor == 'win':
default_variables.setdefault('OS', 'win')
default_variables['EXECUTABLE_SUFFIX'] = '.exe'
default_variables['STATIC_LIB_PREFIX'] = ''
default_variables['STATIC_LIB_SUFFIX'] = '.lib'
default_variables['SHARED_LIB_PREFIX'] = ''
default_variables['SHARED_LIB_SUFFIX'] = '.dll'
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
else:
operating_system = flavor
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
default_variables.setdefault('SHARED_LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'lib'))
default_variables.setdefault('LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'obj'))
def ComputeOutputDir(params):
"""Returns the path from the toplevel_dir to the build output directory."""
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to ninja easier, ninja doesn't put anything here.
generator_dir = os.path.relpath(params['options'].generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
# Relative path from source root to our output files. e.g. "out"
return os.path.normpath(os.path.join(generator_dir, output_dir))
def CalculateGeneratorInputInfo(params):
"""Called by __init__ to initialize generator values based on params."""
# E.g. "out/gypfiles"
toplevel = params['options'].toplevel_dir
qualified_out_dir = os.path.normpath(os.path.join(
toplevel, ComputeOutputDir(params), 'gypfiles'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': toplevel,
'qualified_out_dir': qualified_out_dir,
}
def OpenOutput(path, mode='w'):
"""Open |path| for writing, creating directories if necessary."""
gyp.common.EnsureDirExists(path)
return open(path, mode)
def CommandWithWrapper(cmd, wrappers, prog):
wrapper = wrappers.get(cmd, '')
if wrapper:
return wrapper + ' ' + prog
return prog
def GetDefaultConcurrentLinks():
"""Returns a best-guess for a number of concurrent links."""
if sys.platform in ('win32', 'cygwin'):
import ctypes
class MEMORYSTATUSEX(ctypes.Structure):
_fields_ = [
("dwLength", ctypes.c_ulong),
("dwMemoryLoad", ctypes.c_ulong),
("ullTotalPhys", ctypes.c_ulonglong),
("ullAvailPhys", ctypes.c_ulonglong),
("ullTotalPageFile", ctypes.c_ulonglong),
("ullAvailPageFile", ctypes.c_ulonglong),
("ullTotalVirtual", ctypes.c_ulonglong),
("ullAvailVirtual", ctypes.c_ulonglong),
("sullAvailExtendedVirtual", ctypes.c_ulonglong),
]
stat = MEMORYSTATUSEX()
stat.dwLength = ctypes.sizeof(stat)
ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
mem_limit = max(1, stat.ullTotalPhys / (4 * (2 ** 30))) # total / 4GB
hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
# return min(mem_limit, hard_cap)
# TODO(scottmg): Temporary speculative fix for OOM on builders
# See http://crbug.com/333000.
return 2
elif sys.platform.startswith('linux'):
with open("/proc/meminfo") as meminfo:
memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
for line in meminfo:
match = memtotal_re.match(line)
if not match:
continue
# Allow 8Gb per link on Linux because Gold is quite memory hungry
return max(1, int(match.group(1)) / (8 * (2 ** 20)))
return 1
elif sys.platform == 'darwin':
try:
avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
# A static library debug build of Chromium's unit_tests takes ~2.7GB, so
# 4GB per ld process allows for some more bloat.
return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB
except:
return 1
else:
# TODO(scottmg): Implement this for other platforms.
return 1
def _GetWinLinkRuleNameSuffix(embed_manifest):
"""Returns the suffix used to select an appropriate linking rule depending on
whether the manifest embedding is enabled."""
return '_embed' if embed_manifest else ''
def _AddWinLinkRules(master_ninja, embed_manifest):
"""Adds link rules for Windows platform to |master_ninja|."""
def FullLinkCommand(ldcmd, out, binary_type):
resource_name = {
'exe': '1',
'dll': '2',
}[binary_type]
return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \
'%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \
'$manifests' % {
'python': sys.executable,
'out': out,
'ldcmd': ldcmd,
'resname': resource_name,
'embed': embed_manifest }
rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
use_separate_mspdbsrv = (
int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0)
dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper()
dllcmd = ('%s gyp-win-tool link-wrapper $arch %s '
'$ld /nologo $implibflag /DLL /OUT:$binary '
'@$binary.rsp' % (sys.executable, use_separate_mspdbsrv))
dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll')
master_ninja.rule('solink' + rule_name_suffix,
description=dlldesc, command=dllcmd,
rspfile='$binary.rsp',
rspfile_content='$libs $in_newline $ldflags',
restat=True,
pool='link_pool')
master_ninja.rule('solink_module' + rule_name_suffix,
description=dlldesc, command=dllcmd,
rspfile='$binary.rsp',
rspfile_content='$libs $in_newline $ldflags',
restat=True,
pool='link_pool')
# Note that ldflags goes at the end so that it has the option of
# overriding default settings earlier in the command line.
exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s '
'$ld /nologo /OUT:$binary @$binary.rsp' %
(sys.executable, use_separate_mspdbsrv))
exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe')
master_ninja.rule('link' + rule_name_suffix,
description='LINK%s $binary' % rule_name_suffix.upper(),
command=exe_cmd,
rspfile='$binary.rsp',
rspfile_content='$in_newline $libs $ldflags',
pool='link_pool')
def GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name):
options = params['options']
flavor = gyp.common.GetFlavor(params)
generator_flags = params.get('generator_flags', {})
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.normpath(
os.path.join(ComputeOutputDir(params), config_name))
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja'))
master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
# Put build-time support tools in out/{config_name}.
gyp.common.CopyTool(flavor, toplevel_build)
# Grab make settings for CC/CXX.
# The rules are
# - The priority from low to high is gcc/g++, the 'make_global_settings' in
# gyp, the environment variable.
# - If there is no 'make_global_settings' for CC.host/CXX.host or
# 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
# to cc/cxx.
if flavor == 'win':
# Overridden by local arch choice in the use_deps case.
# Chromium's ffmpeg c99conv.py currently looks for a 'cc =' line in
# build.ninja so needs something valid here. http://crbug.com/233985
cc = 'cl.exe'
cxx = 'cl.exe'
ld = 'link.exe'
ld_host = '$ld'
else:
cc = 'cc'
cxx = 'c++'
ld = '$cc'
ldxx = '$cxx'
ld_host = '$cc_host'
ldxx_host = '$cxx_host'
cc_host = None
cxx_host = None
cc_host_global_setting = None
cxx_host_global_setting = None
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings = data[build_file].get('make_global_settings', [])
build_to_root = gyp.common.InvertRelativePath(build_dir,
options.toplevel_dir)
wrappers = {}
for key, value in make_global_settings:
if key == 'CC':
cc = os.path.join(build_to_root, value)
if key == 'CXX':
cxx = os.path.join(build_to_root, value)
if key == 'CC.host':
cc_host = os.path.join(build_to_root, value)
cc_host_global_setting = value
if key == 'CXX.host':
cxx_host = os.path.join(build_to_root, value)
cxx_host_global_setting = value
if key.endswith('_wrapper'):
wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
# Support wrappers from environment variables too.
for key, value in os.environ.iteritems():
if key.lower().endswith('_wrapper'):
key_prefix = key[:-len('_wrapper')]
key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
wrappers[key_prefix] = os.path.join(build_to_root, value)
if flavor == 'win':
cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
toplevel_build, generator_flags, OpenOutput)
for arch, path in cl_paths.iteritems():
master_ninja.variable(
'cl_' + arch, CommandWithWrapper('CC', wrappers,
QuoteShellArgument(path, flavor)))
cc = GetEnvironFallback(['CC_target', 'CC'], cc)
master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
if flavor == 'win':
master_ninja.variable('ld', ld)
master_ninja.variable('idl', 'midl.exe')
master_ninja.variable('ar', 'lib.exe')
master_ninja.variable('rc', 'rc.exe')
master_ninja.variable('asm', 'ml.exe')
master_ninja.variable('mt', 'mt.exe')
else:
master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], 'ar'))
if generator_supports_multiple_toolsets:
if not cc_host:
cc_host = cc
if not cxx_host:
cxx_host = cxx
master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], 'ar'))
cc_host = GetEnvironFallback(['CC_host'], cc_host)
cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
# The environment variable could be used in 'make_global_settings', like
# ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
if '$(CC)' in cc_host and cc_host_global_setting:
cc_host = cc_host_global_setting.replace('$(CC)', cc)
if '$(CXX)' in cxx_host and cxx_host_global_setting:
cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
master_ninja.variable('cc_host',
CommandWithWrapper('CC.host', wrappers, cc_host))
master_ninja.variable('cxx_host',
CommandWithWrapper('CXX.host', wrappers, cxx_host))
if flavor == 'win':
master_ninja.variable('ld_host', ld_host)
else:
master_ninja.variable('ld_host', CommandWithWrapper(
'LINK', wrappers, ld_host))
master_ninja.variable('ldxx_host', CommandWithWrapper(
'LINK', wrappers, ldxx_host))
master_ninja.newline()
master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks())
master_ninja.newline()
deps = 'msvc' if flavor == 'win' else 'gcc'
if flavor != 'win':
master_ninja.rule(
'cc',
description='CC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'cc_s',
description='CC $out',
command=('$cc $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'))
master_ninja.rule(
'cxx',
description='CXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
'$cflags_pch_cc -c $in -o $out'),
depfile='$out.d',
deps=deps)
else:
# TODO(scottmg) Separate pdb names is a test to see if it works around
# http://crbug.com/142362. It seems there's a race between the creation of
# the .pdb by the precompiled header step for .cc and the compilation of
# .c files. This should be handled by mspdbsrv, but rarely errors out with
# c1xx : fatal error C1033: cannot open program database
# By making the rules target separate pdb files this might be avoided.
cc_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cc /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname_c ')
cxx_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cxx /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ')
master_ninja.rule(
'cc',
description='CC $out',
command=cc_command,
rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_c',
deps=deps)
master_ninja.rule(
'cxx',
description='CXX $out',
command=cxx_command,
rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_cc',
deps=deps)
master_ninja.rule(
'idl',
description='IDL $in',
command=('%s gyp-win-tool midl-wrapper $arch $outdir '
'$tlb $h $dlldata $iid $proxy $in '
'$idlflags' % sys.executable))
master_ninja.rule(
'rc',
description='RC $in',
# Note: $in must be last otherwise rc.exe complains.
command=('%s gyp-win-tool rc-wrapper '
'$arch $rc $defines $resource_includes $rcflags /fo$out $in' %
sys.executable))
master_ninja.rule(
'asm',
description='ASM $in',
command=('%s gyp-win-tool asm-wrapper '
'$arch $asm $defines $includes /c /Fo $out $in' %
sys.executable))
if flavor != 'mac' and flavor != 'win':
master_ninja.rule(
'alink',
description='AR $out',
command='rm -f $out && $ar rcs $out $in')
master_ninja.rule(
'alink_thin',
description='AR $out',
command='rm -f $out && $ar rcsT $out $in')
# This allows targets that only need to depend on $lib's API to declare an
# order-only dependency on $lib.TOC and avoid relinking such downstream
# dependencies when $lib changes only in non-public ways.
# The resulting string leaves an uninterpolated %{suffix} which
# is used in the final substitution below.
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e ${lib}.TOC ]; then '
'%(solink)s && %(extract_toc)s > ${lib}.TOC; else '
'%(solink)s && %(extract_toc)s > ${lib}.tmp && '
'if ! cmp -s ${lib}.tmp ${lib}.TOC; then mv ${lib}.tmp ${lib}.TOC ; '
'fi; fi'
% { 'solink':
'$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
'extract_toc':
('{ readelf -d ${lib} | grep SONAME ; '
'nm -gD -f p ${lib} | cut -f1-2 -d\' \'; }')})
master_ninja.rule(
'solink',
description='SOLINK $lib',
restat=True,
command=(mtime_preserving_solink_base % {
'suffix': '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive '
'$libs'}),
pool='link_pool')
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib',
restat=True,
command=(mtime_preserving_solink_base % {
'suffix': '-Wl,--start-group $in $solibs -Wl,--end-group '
'$libs'}),
pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out',
command=('$ld $ldflags -o $out '
'-Wl,--start-group $in $solibs -Wl,--end-group $libs'),
pool='link_pool')
elif flavor == 'win':
master_ninja.rule(
'alink',
description='LIB $out',
command=('%s gyp-win-tool link-wrapper $arch False '
'$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' %
sys.executable),
rspfile='$out.rsp',
rspfile_content='$in_newline $libflags')
_AddWinLinkRules(master_ninja, embed_manifest=True)
_AddWinLinkRules(master_ninja, embed_manifest=False)
else:
master_ninja.rule(
'objc',
description='OBJC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
'$cflags_pch_objc -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'objcxx',
description='OBJCXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
'$cflags_pch_objcc -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'alink',
description='LIBTOOL-STATIC $out, POSTBUILDS',
command='rm -f $out && '
'./gyp-mac-tool filter-libtool libtool $libtool_flags '
'-static -o $out $in'
'$postbuilds')
master_ninja.rule(
'lipo',
description='LIPO $out, POSTBUILDS',
command='rm -f $out && lipo -create $in -output $out$postbuilds')
# Record the public interface of $lib in $lib.TOC. See the corresponding
# comment in the posix section above for details.
solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e ${lib}.TOC ] || '
# Always force dependent targets to relink if this library
# reexports something. Handling this correctly would require
# recursive TOC dumping but this is rare in practice, so punt.
'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
'%(solink)s && %(extract_toc)s > ${lib}.TOC; '
'else '
'%(solink)s && %(extract_toc)s > ${lib}.tmp && '
'if ! cmp -s ${lib}.tmp ${lib}.TOC; then '
'mv ${lib}.tmp ${lib}.TOC ; '
'fi; '
'fi'
% { 'solink': solink_base,
'extract_toc':
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
solink_suffix = '$in $solibs $libs$postbuilds'
master_ninja.rule(
'solink',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
'type': '-shared'},
pool='link_pool')
master_ninja.rule(
'solink_notoc',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
pool='link_pool')
solink_module_suffix = '$in $solibs $libs$postbuilds'
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
command=mtime_preserving_solink_base % {'suffix': solink_module_suffix,
'type': '-bundle'},
pool='link_pool')
master_ninja.rule(
'solink_module_notoc',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
command=solink_base % {'suffix': solink_module_suffix, 'type': '-bundle'},
pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out, POSTBUILDS',
command=('$ld $ldflags -o $out '
'$in $solibs $libs$postbuilds'),
pool='link_pool')
master_ninja.rule(
'preprocess_infoplist',
description='PREPROCESS INFOPLIST $out',
command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
'plutil -convert xml1 $out $out'))
master_ninja.rule(
'copy_infoplist',
description='COPY INFOPLIST $in',
command='$env ./gyp-mac-tool copy-info-plist $in $out $keys')
master_ninja.rule(
'mac_tool',
description='MACTOOL $mactool_cmd $in',
command='$env ./gyp-mac-tool $mactool_cmd $in $out')
master_ninja.rule(
'package_framework',
description='PACKAGE FRAMEWORK $out, POSTBUILDS',
command='./gyp-mac-tool package-framework $out $version$postbuilds '
'&& touch $out')
if flavor == 'win':
master_ninja.rule(
'stamp',
description='STAMP $out',
command='%s gyp-win-tool stamp $out' % sys.executable)
master_ninja.rule(
'copy',
description='COPY $in $out',
command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable)
else:
master_ninja.rule(
'stamp',
description='STAMP $out',
command='${postbuilds}touch $out')
master_ninja.rule(
'copy',
description='COPY $in $out',
command='rm -rf $out && cp -af $in $out')
master_ninja.newline()
all_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list,
target_dicts,
os.path.normpath(build_file)):
all_targets.add(target)
all_outputs = set()
# target_outputs is a map from qualified target name to a Target object.
target_outputs = {}
# target_short_names is a map from target short name to a list of Target
# objects.
target_short_names = {}
for qualified_target in target_list:
# qualified_target is like: third_party/icu/icu.gyp:icui18n#target
build_file, name, toolset = \
gyp.common.ParseQualifiedTarget(qualified_target)
this_make_global_settings = data[build_file].get('make_global_settings', [])
assert make_global_settings == this_make_global_settings, (
"make_global_settings needs to be the same for all targets. %s vs. %s" %
(this_make_global_settings, make_global_settings))
spec = target_dicts[qualified_target]
if flavor == 'mac':
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
build_file = gyp.common.RelativePath(build_file, options.toplevel_dir)
base_path = os.path.dirname(build_file)
obj = 'obj'
if toolset != 'target':
obj += '.' + toolset
output_file = os.path.join(obj, base_path, name + '.ninja')
ninja_output = StringIO()
writer = NinjaWriter(qualified_target, target_outputs, base_path, build_dir,
ninja_output,
toplevel_build, output_file,
flavor, toplevel_dir=options.toplevel_dir)
target = writer.WriteSpec(spec, config_name, generator_flags)
if ninja_output.tell() > 0:
# Only create files for ninja files that actually have contents.
with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
ninja_file.write(ninja_output.getvalue())
ninja_output.close()
master_ninja.subninja(output_file)
if target:
if name != target.FinalOutput() and spec['toolset'] == 'target':
target_short_names.setdefault(name, []).append(target)
target_outputs[qualified_target] = target
if qualified_target in all_targets:
all_outputs.add(target.FinalOutput())
if target_short_names:
# Write a short name to build this target. This benefits both the
# "build chrome" case as well as the gyp tests, which expect to be
# able to run actions and build libraries by their short name.
master_ninja.newline()
master_ninja.comment('Short names for targets.')
for short_name in target_short_names:
master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
target_short_names[short_name]])
if all_outputs:
master_ninja.newline()
master_ninja.build('all', 'phony', list(all_outputs))
master_ninja.default(generator_flags.get('default_target', 'all'))
master_ninja_file.close()
def PerformBuild(data, configurations, params):
options = params['options']
for config in configurations:
builddir = os.path.join(options.toplevel_dir, 'out', config)
arguments = ['ninja', '-C', builddir]
print 'Building [%s]: %s' % (config, arguments)
subprocess.check_call(arguments)
def CallGenerateOutputForConfig(arglist):
# Ignore the interrupt signal so that the parent process catches it and
# kills all multiprocessing children.
signal.signal(signal.SIGINT, signal.SIG_IGN)
(target_list, target_dicts, data, params, config_name) = arglist
GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
def GenerateOutput(target_list, target_dicts, data, params):
# Update target_dicts for iOS device builds.
target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
target_dicts)
user_config = params.get('generator_flags', {}).get('config', None)
if gyp.common.GetFlavor(params) == 'win':
target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
if params['parallel']:
try:
pool = multiprocessing.Pool(len(config_names))
arglists = []
for config_name in config_names:
arglists.append(
(target_list, target_dicts, data, params, config_name))
pool.map(CallGenerateOutputForConfig, arglists)
except KeyboardInterrupt, e:
pool.terminate()
raise e
else:
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name)
| mit |
giggsey/SickRage | lib/unidecode/x0ae.py | 253 | 4875 | data = (
'geul', # 0x00
'geulg', # 0x01
'geulm', # 0x02
'geulb', # 0x03
'geuls', # 0x04
'geult', # 0x05
'geulp', # 0x06
'geulh', # 0x07
'geum', # 0x08
'geub', # 0x09
'geubs', # 0x0a
'geus', # 0x0b
'geuss', # 0x0c
'geung', # 0x0d
'geuj', # 0x0e
'geuc', # 0x0f
'geuk', # 0x10
'geut', # 0x11
'geup', # 0x12
'geuh', # 0x13
'gyi', # 0x14
'gyig', # 0x15
'gyigg', # 0x16
'gyigs', # 0x17
'gyin', # 0x18
'gyinj', # 0x19
'gyinh', # 0x1a
'gyid', # 0x1b
'gyil', # 0x1c
'gyilg', # 0x1d
'gyilm', # 0x1e
'gyilb', # 0x1f
'gyils', # 0x20
'gyilt', # 0x21
'gyilp', # 0x22
'gyilh', # 0x23
'gyim', # 0x24
'gyib', # 0x25
'gyibs', # 0x26
'gyis', # 0x27
'gyiss', # 0x28
'gying', # 0x29
'gyij', # 0x2a
'gyic', # 0x2b
'gyik', # 0x2c
'gyit', # 0x2d
'gyip', # 0x2e
'gyih', # 0x2f
'gi', # 0x30
'gig', # 0x31
'gigg', # 0x32
'gigs', # 0x33
'gin', # 0x34
'ginj', # 0x35
'ginh', # 0x36
'gid', # 0x37
'gil', # 0x38
'gilg', # 0x39
'gilm', # 0x3a
'gilb', # 0x3b
'gils', # 0x3c
'gilt', # 0x3d
'gilp', # 0x3e
'gilh', # 0x3f
'gim', # 0x40
'gib', # 0x41
'gibs', # 0x42
'gis', # 0x43
'giss', # 0x44
'ging', # 0x45
'gij', # 0x46
'gic', # 0x47
'gik', # 0x48
'git', # 0x49
'gip', # 0x4a
'gih', # 0x4b
'gga', # 0x4c
'ggag', # 0x4d
'ggagg', # 0x4e
'ggags', # 0x4f
'ggan', # 0x50
'gganj', # 0x51
'gganh', # 0x52
'ggad', # 0x53
'ggal', # 0x54
'ggalg', # 0x55
'ggalm', # 0x56
'ggalb', # 0x57
'ggals', # 0x58
'ggalt', # 0x59
'ggalp', # 0x5a
'ggalh', # 0x5b
'ggam', # 0x5c
'ggab', # 0x5d
'ggabs', # 0x5e
'ggas', # 0x5f
'ggass', # 0x60
'ggang', # 0x61
'ggaj', # 0x62
'ggac', # 0x63
'ggak', # 0x64
'ggat', # 0x65
'ggap', # 0x66
'ggah', # 0x67
'ggae', # 0x68
'ggaeg', # 0x69
'ggaegg', # 0x6a
'ggaegs', # 0x6b
'ggaen', # 0x6c
'ggaenj', # 0x6d
'ggaenh', # 0x6e
'ggaed', # 0x6f
'ggael', # 0x70
'ggaelg', # 0x71
'ggaelm', # 0x72
'ggaelb', # 0x73
'ggaels', # 0x74
'ggaelt', # 0x75
'ggaelp', # 0x76
'ggaelh', # 0x77
'ggaem', # 0x78
'ggaeb', # 0x79
'ggaebs', # 0x7a
'ggaes', # 0x7b
'ggaess', # 0x7c
'ggaeng', # 0x7d
'ggaej', # 0x7e
'ggaec', # 0x7f
'ggaek', # 0x80
'ggaet', # 0x81
'ggaep', # 0x82
'ggaeh', # 0x83
'ggya', # 0x84
'ggyag', # 0x85
'ggyagg', # 0x86
'ggyags', # 0x87
'ggyan', # 0x88
'ggyanj', # 0x89
'ggyanh', # 0x8a
'ggyad', # 0x8b
'ggyal', # 0x8c
'ggyalg', # 0x8d
'ggyalm', # 0x8e
'ggyalb', # 0x8f
'ggyals', # 0x90
'ggyalt', # 0x91
'ggyalp', # 0x92
'ggyalh', # 0x93
'ggyam', # 0x94
'ggyab', # 0x95
'ggyabs', # 0x96
'ggyas', # 0x97
'ggyass', # 0x98
'ggyang', # 0x99
'ggyaj', # 0x9a
'ggyac', # 0x9b
'ggyak', # 0x9c
'ggyat', # 0x9d
'ggyap', # 0x9e
'ggyah', # 0x9f
'ggyae', # 0xa0
'ggyaeg', # 0xa1
'ggyaegg', # 0xa2
'ggyaegs', # 0xa3
'ggyaen', # 0xa4
'ggyaenj', # 0xa5
'ggyaenh', # 0xa6
'ggyaed', # 0xa7
'ggyael', # 0xa8
'ggyaelg', # 0xa9
'ggyaelm', # 0xaa
'ggyaelb', # 0xab
'ggyaels', # 0xac
'ggyaelt', # 0xad
'ggyaelp', # 0xae
'ggyaelh', # 0xaf
'ggyaem', # 0xb0
'ggyaeb', # 0xb1
'ggyaebs', # 0xb2
'ggyaes', # 0xb3
'ggyaess', # 0xb4
'ggyaeng', # 0xb5
'ggyaej', # 0xb6
'ggyaec', # 0xb7
'ggyaek', # 0xb8
'ggyaet', # 0xb9
'ggyaep', # 0xba
'ggyaeh', # 0xbb
'ggeo', # 0xbc
'ggeog', # 0xbd
'ggeogg', # 0xbe
'ggeogs', # 0xbf
'ggeon', # 0xc0
'ggeonj', # 0xc1
'ggeonh', # 0xc2
'ggeod', # 0xc3
'ggeol', # 0xc4
'ggeolg', # 0xc5
'ggeolm', # 0xc6
'ggeolb', # 0xc7
'ggeols', # 0xc8
'ggeolt', # 0xc9
'ggeolp', # 0xca
'ggeolh', # 0xcb
'ggeom', # 0xcc
'ggeob', # 0xcd
'ggeobs', # 0xce
'ggeos', # 0xcf
'ggeoss', # 0xd0
'ggeong', # 0xd1
'ggeoj', # 0xd2
'ggeoc', # 0xd3
'ggeok', # 0xd4
'ggeot', # 0xd5
'ggeop', # 0xd6
'ggeoh', # 0xd7
'gge', # 0xd8
'ggeg', # 0xd9
'ggegg', # 0xda
'ggegs', # 0xdb
'ggen', # 0xdc
'ggenj', # 0xdd
'ggenh', # 0xde
'gged', # 0xdf
'ggel', # 0xe0
'ggelg', # 0xe1
'ggelm', # 0xe2
'ggelb', # 0xe3
'ggels', # 0xe4
'ggelt', # 0xe5
'ggelp', # 0xe6
'ggelh', # 0xe7
'ggem', # 0xe8
'ggeb', # 0xe9
'ggebs', # 0xea
'gges', # 0xeb
'ggess', # 0xec
'ggeng', # 0xed
'ggej', # 0xee
'ggec', # 0xef
'ggek', # 0xf0
'gget', # 0xf1
'ggep', # 0xf2
'ggeh', # 0xf3
'ggyeo', # 0xf4
'ggyeog', # 0xf5
'ggyeogg', # 0xf6
'ggyeogs', # 0xf7
'ggyeon', # 0xf8
'ggyeonj', # 0xf9
'ggyeonh', # 0xfa
'ggyeod', # 0xfb
'ggyeol', # 0xfc
'ggyeolg', # 0xfd
'ggyeolm', # 0xfe
'ggyeolb', # 0xff
)
| gpl-3.0 |
gri-is/lodjob | OLD_REPOSITORY/utils/price_parsing.py | 1 | 2241 | from .data_templates import make_paid_amount_parsed, make_paid_amount_unparsed
currency_divisors = {
'dollars': [100],
'florins': [100],
'francs': [100],
'marks': [100],
'pounds': [20, 12]
}
def make_decimalizer(divisor_2=1, divisor_3=1):
def decimalize(primary=0, secondary=0, tertiary=0):
return primary + secondary/divisor_2 + tertiary/(divisor_2 * divisor_3)
return decimalize
currency_decimalizers = {
currency: make_decimalizer(*currency_divisors[currency])
for currency in currency_divisors
}
def is_parsable(price, currency):
if currency not in currency_divisors:
# currency is not known
return False
for unit in price.split('.'):
if not unit.isnumeric():
return False
return True
def parse_denominations(price, currency):
denominations = [int(denom) for denom in price.split('.')]
if len(denominations) > 1+len(currency_divisors[currency]):
return False
return currency_decimalizers[currency](*denominations)
def parse_price(record, price_field, currency_field, event):
price = record['source'][price_field]
currency = record['source'][currency_field]
if not price and not currency:
event['status'] = 'skipped'
event['message'] = 'missing price and currency values'
return
if not price:
event['status'] = 'skipped'
event['message'] = 'missing price value'
return
if not currency:
event['status'] = 'skipped'
event['message'] = 'missing currency value'
return
if not is_parsable(price, currency):
paid_amount_unparsed = make_paid_amount_unparsed()
paid_amount_unparsed['paid_amount']['label'] = '\n'.join([price, currency])
event['status'] = 'completed'
event['message'] = 'price and currency is not parsable'
return paid_amount_unparsed
price = parse_denominations(price, currency)
paid_amount_parsed = make_paid_amount_parsed()
paid_amount_parsed['paid_amount']['value'] = price
paid_amount_parsed['paid_amount']['currency']['label'] = currency
event['status'] = 'completed'
event['message'] = 'price and currency parsed'
return paid_amount_parsed
| agpl-3.0 |
wangscript/libjingle-1 | trunk/third_party/webrtc/build/generate_asm_header.py | 9 | 2819 | #!/usr/bin/env python
#
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""This script is a tool to generate special header files from input
C source files.
It first assembles the input source files to generate intermediate assembly
files (*.s). Then it parses the .s files and finds declarations of variables
whose names start with the string specified as the third argument in the
command-line, translates the variable names and values into constant defines
and writes them into header files.
"""
import os
import re
import subprocess
import sys
from optparse import OptionParser
def main(argv):
parser = OptionParser()
usage = 'Usage: %prog [options] input_filename'
parser.set_usage(usage)
parser.add_option('--compiler', default = 'gcc', help = 'compiler name')
parser.add_option('--options', default = '-S', help = 'compiler options')
parser.add_option('--pattern', default = 'offset_', help = 'A match pattern'
' used for searching the relevant constants.')
parser.add_option('--dir', default = '.', help = 'output directory')
(options, args) = parser.parse_args()
# Generate complete intermediate and header file names.
input_filename = args[0]
output_root = (options.dir + '/' +
os.path.splitext(os.path.basename(input_filename))[0])
interim_filename = output_root + '.s'
out_filename = output_root + '.h'
# Set the shell command with the compiler and options inputs.
compiler_command = (options.compiler + " " + options.options + " " +
input_filename + " -o " + interim_filename)
# Run the shell command and generate the intermediate file.
subprocess.check_call(compiler_command, shell=True)
interim_file = open(interim_filename) # The intermediate file.
out_file = open(out_filename, 'w') # The output header file.
# Generate the output header file.
while True:
line = interim_file.readline()
if not line: break
if line.startswith(options.pattern):
# Find name of the next constant and write to the output file.
const_name = re.sub(r'^_', '', line.split(':')[0])
out_file.write('#define %s ' % const_name)
# Find value of the constant we just found and write to the output file.
line = interim_file.readline()
const_value = filter(str.isdigit, line.split(' ')[0])
if const_value != '':
out_file.write('%s\n' % const_value)
interim_file.close()
out_file.close()
if __name__ == "__main__":
main(sys.argv[1:])
| bsd-3-clause |
MarcosCommunity/odoo | openerp/addons/base/tests/test_misc.py | 393 | 1111 | import unittest2
from openerp.tools import misc
class test_countingstream(unittest2.TestCase):
def test_empty_stream(self):
s = misc.CountingStream(iter([]))
self.assertEqual(s.index, -1)
self.assertIsNone(next(s, None))
self.assertEqual(s.index, 0)
def test_single(self):
s = misc.CountingStream(xrange(1))
self.assertEqual(s.index, -1)
self.assertEqual(next(s, None), 0)
self.assertIsNone(next(s, None))
self.assertEqual(s.index, 1)
def test_full(self):
s = misc.CountingStream(xrange(42))
for _ in s:
pass
self.assertEqual(s.index, 42)
def test_repeated(self):
""" Once the CountingStream has stopped iterating, the index should not
increase anymore (the internal state should not be allowed to change)
"""
s = misc.CountingStream(iter([]))
self.assertIsNone(next(s, None))
self.assertEqual(s.index, 0)
self.assertIsNone(next(s, None))
self.assertEqual(s.index, 0)
if __name__ == '__main__':
unittest2.main()
| agpl-3.0 |
punchagan/statiki | tests/test_integration.py | 1 | 6132 | # -*- coding: utf-8 -*-
# Copyright © 2014 Puneeth Chaganti and others.
# See the LICENSE file for license rights and limitations (MIT).
""" Integration tests for statiki. """
import os
from os.path import exists, join
import shlex
import shutil
import subprocess
import tempfile
import unittest
from pkg_resources import parse_version
import yaml
import statiki
TEST_REPO = 'punchagan/experiri'
class TestIntegration(unittest.TestCase):
""" Integration tests for statiki. """
#### 'TestCase' protocol ##################################################
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self._setup_env()
self.repo_dir = self._create_github_repo()
self.venv_dir = self._create_venv()
self.old_cwd = os.getcwd()
self.cache_dir = join(self.old_cwd, 'cache')
os.chdir(self.repo_dir)
self._activate_venv()
return
def tearDown(self):
os.chdir(self.old_cwd)
if exists(self.temp_dir):
shutil.rmtree(self.temp_dir)
return
#### Tests ####
def test_should_build_repo(self):
# The test tests installing stuff into the venv, populating an empty
# repo and building a repo. Everything is tested in a single test,
# since this each test takes so long to run!
#### test_should_run_install_steps ####
# Given
install_steps = self._get_install_steps()
# When
for command in install_steps:
subprocess.call(
shlex.split(command),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
# Then
nikola_version = subprocess.check_output(
shlex.split("python -c 'import nikola; print nikola.__version__'")
)
self.assertGreaterEqual(cmp(parse_version(nikola_version), '6.4.0'), 0)
self.assertEqual(
subprocess.call(shlex.split("python -c 'import webassets'")), 0
)
#### test_should_populate_repo ####
# Given
script = self._get_yaml_content()['script']
# When
output, error = subprocess.Popen(
shlex.split(script),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
).communicate()
# Then
self.assertIn('git push', output.splitlines()[-1])
self.assertTrue(exists(join(self.repo_dir, 'conf.py')))
self.assertTrue(exists(join(self.repo_dir, 'files', '.nojekyll')))
#### test_should_build_repo ####
# When
output, error = subprocess.Popen(
shlex.split(script),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
).communicate()
# Then
self.assertIn('git push', output.splitlines()[-1])
self.assertIn('* gh-pages', subprocess.check_output(['git', 'branch']))
self.assertTrue(exists(join(self.repo_dir, '.nojekyll')))
return
#### Private protocol #####################################################
def _activate_venv(self):
""" Activate the venv. """
bin_dir = join(self.venv_dir, 'bin')
os.environ['PATH'] = '%s:%s' % (bin_dir, os.environ.get('PATH'))
pip = subprocess.check_output(['which', 'pip'])
assert pip.startswith(bin_dir), 'venv not activated'
return
def _create_github_repo(self):
""" Create a github repository with the required files. """
repo_dir = join(self.temp_dir, 'repo')
subprocess.check_output(['git', 'init', repo_dir])
subprocess.check_output(
['git', 'config', 'user.email', os.environ['GIT_EMAIL']],
cwd=repo_dir
)
subprocess.check_output(
['git', 'config', 'user.name', os.environ['GIT_NAME']],
cwd=repo_dir
)
content = statiki.get_travis_files_content(TEST_REPO, 'BOGUS', {})
for info in content:
path = join(repo_dir, info['name'])
with open(path, 'w') as f:
f.write(info['content'])
subprocess.check_output(['git', 'add', path], cwd=repo_dir)
subprocess.check_output(
['git', 'commit', '-m', '%s' % info['message']], cwd=repo_dir
)
subprocess.check_output(
shlex.split('git remote add origin ..'), cwd=repo_dir
)
return repo_dir
def _create_venv(self):
""" Create a venv to use for running the commands in .travis.yml. """
venv_dir = join(self.temp_dir, 'venv')
subprocess.check_output(['virtualenv', venv_dir])
return venv_dir
def _fix_install_steps(self, commands):
""" Fix the install steps to reduce work done during tests. """
fixed_commands = []
for i, command in enumerate(commands[:]):
if command.startswith('pip'):
command += ' --download-cache=%s' % self.cache_dir
fixed_commands.append(command)
elif command.startswith('wget'):
zip_file = join(self.cache_dir, 'v2.7.zip')
command += ' -nc -O %s' % zip_file
fixed_commands.append(command)
fixed_commands.append('cp %s .' % zip_file)
else:
fixed_commands.append(command)
return fixed_commands
def _get_install_steps(self):
""" Get the install steps defined in the .travis.yml. """
content = self._get_yaml_content()
return self._fix_install_steps(content['install'])
def _get_yaml_content(self):
""" Return the contents of the .travis.yml file. """
with open('.travis.yml') as f:
content = yaml.load(f)
return content
def _setup_env(self):
""" Setup environment variables used by the fabfile. """
os.environ['GIT_NAME'] = statiki.GIT_NAME
os.environ['GIT_EMAIL'] = statiki.GIT_EMAIL
os.environ['GH_TOKEN'] = 'this-is-a-bogus-token:password'
os.environ['TRAVIS_REPO_SLUG'] = TEST_REPO
return
| mit |
kubeflow/kfserving | python/kfserving/test/test_v1alpha2_inference_service_spec.py | 1 | 1526 | # Copyright 2020 kubeflow.org.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
KFServing
Python SDK for KFServing # noqa: E501
OpenAPI spec version: v0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import kfserving
from kfserving.models.v1alpha2_inference_service_spec import V1alpha2InferenceServiceSpec # noqa: E501
from kfserving.rest import ApiException
class TestV1alpha2InferenceServiceSpec(unittest.TestCase):
"""V1alpha2InferenceServiceSpec unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testV1alpha2InferenceServiceSpec(self):
"""Test V1alpha2InferenceServiceSpec"""
# FIXME: construct object with mandatory attributes with example values
# model = kfserving.models.v1alpha2_inference_service_spec.V1alpha2InferenceServiceSpec() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
colloquium/spacewalk | client/rhel/yum-rhn-plugin/rhnplugin.py | 1 | 23715 | """
Yum plugin for RHN access.
This plugin provides access to Red Hat Network to yum via up2date modules
and XMLRPC calls.
"""
import os
import sys
import urllib
from yum.plugins import TYPE_CORE
from yum.yumRepo import YumRepository
import yum.Errors
from urlgrabber.grabber import URLGrabber
from urlgrabber.grabber import URLGrabError
try:
from urlgrabber.grabber import pycurl
except:
pycurl = None
from iniparse import INIConfig
import gettext
_ = gettext.gettext
# TODO: Get the up2date stuff that we need in a better place,
# so we don't have to do path magic.
sys.path.append("/usr/share/rhn/")
import up2date_client.up2dateAuth as up2dateAuth
from up2date_client import config
from up2date_client import rhnChannel
from up2date_client import rhnPackageInfo
from up2date_client import up2dateErrors
import rhn.transports
__revision__ = "$Rev$"
requires_api_version = '2.5'
plugin_type = TYPE_CORE
pcklAuthFileName = "/var/spool/up2date/loginAuth.pkl"
rhn_enabled = True
COMMUNICATION_ERROR = _("There was an error communicating with RHN.")
from M2Crypto.SSL import SSLError, Connection
def bypass_m2crypto_ssl_connection_check(*args, **kw):
"""This needs to return True, it's used to bypass a check in
M2Crypto.SSL.Connection
"""
return True
def init_hook(conduit):
"""
Plugin initialization hook. We setup the RHN channels here.
We get a list of RHN channels from the server, then make a repo object for
each one. This list of repos is then added to yum's list of repos via the
conduit.
"""
global rhn_enabled
RHN_DISABLED = _("RHN support will be disabled.")
#####bz 332011
#This will bypass a check in M2Crypto.SSL.Connection.
#The check we are bypassing, was causing an exception on multi-homed machines
#when the SSL Cert "commonName" did not match the name used to connect to the host.
#This functionality was different than RHEL4, desire was to bypass the check to
#maintain the functionality in RHEL4
setattr(Connection, "clientPostConnectionCheck", bypass_m2crypto_ssl_connection_check)
if not os.geteuid()==0:
# If non-root notify user RHN repo not accessible
conduit.error(0, _("*Note* Red Hat Network repositories are not listed below. You must run this command as root to access RHN repositories."))
rhn_enabled = False
return
up2date_cfg = config.initUp2dateConfig()
try:
proxy_url = get_proxy_url(up2date_cfg)
proxy_dict = {}
if up2date_cfg['useNoSSLForPackages']:
proxy_dict = {'HTTP' : proxy_url}
else:
proxy_dict = {'HTTPS' : proxy_url}
except BadProxyConfig:
rhn_enabled = False
PROXY_ERROR = _("There was an error parsing the RHN proxy settings.")
conduit.error(0, PROXY_ERROR + "\n" + RHN_DISABLED)
return
# We might not have an opt parser (ie in pirut)
opt_parser = conduit.getOptParser()
if opt_parser:
(opts, commands) = opt_parser.parse_args()
if len(commands) > 0 and commands[0] == 'clean':
formReposForClean(conduit)
conduit.info(10, _("Cleaning") +
"\n" + RHN_DISABLED)
return
if (hasattr(opts,'version') and opts.version) or (len(commands) == 0):
rhn_enabled = False
conduit.info(10, _("Either --version, or no commands entered") +
"\n" + RHN_DISABLED)
return
try:
login_info = up2dateAuth.getLoginInfo()
except up2dateErrors.RhnServerException, e:
rewordError(e)
conduit.error(0, COMMUNICATION_ERROR + "\n" + RHN_DISABLED + "\n" +
str(e))
rhn_enabled = False
return
if not login_info:
conduit.error(0, _("This system is not registered with RHN.") + "\n" +
RHN_DISABLED)
rhn_enabled = False
return
CHANNELS_DISABLED = _("RHN channel support will be disabled.")
try:
svrChannels = rhnChannel.getChannelDetails()
except up2dateErrors.NoChannelsError:
conduit.error(0, _("This system is not subscribed to any channels.") +
"\n" + CHANNELS_DISABLED)
return
except up2dateErrors.NoSystemIdError:
conduit.error(0, _("This system may not be a registered to RHN. SystemId could not be acquired.\n") +
RHN_DISABLED)
rhn_enabled = False
return
except up2dateErrors.RhnServerException, e:
conduit.error(0, COMMUNICATION_ERROR + "\n" + CHANNELS_DISABLED +
"\n" + str(e))
return
repos = conduit.getRepos()
cachedir = conduit.getConf().cachedir
default_gpgcheck = conduit.getConf().gpgcheck
gpgcheck = conduit.confBool('main', 'gpgcheck', default_gpgcheck)
sslcacert = get_ssl_ca_cert(up2date_cfg)
enablegroups = conduit.getConf().enablegroups
for channel in svrChannels:
if channel['version']:
repo = RhnRepo(channel)
repo.basecachedir = cachedir
repo.gpgcheck = gpgcheck
repo.proxy = proxy_url
repo.sslcacert = sslcacert
repo.enablegroups = enablegroups
repoOptions = getRHNRepoOptions(conduit, repo.id)
if repoOptions:
for o in repoOptions:
setattr(repo, o[0], o[1])
conduit.info(5, "Repo '%s' setting option '%s' = '%s'" %
(repo.id, o[0], o[1]))
repos.add(repo)
#bz226151,441265
#Allows a "yum clean all" to succeed without communicating
#to backend. Creating a set of dummy repos which mimic the dirs stored locally
#This gives yum the dir info it needs to peform a clean
#
def formReposForClean(conduit):
repos = conduit.getRepos()
cachedir = conduit.getConf().cachedir
try:
dir_list = os.listdir(cachedir)
except Exception, e:
raise yum.Errors.RepoError(str(e))
urls = ["http://dummyvalue"]
for dir in dir_list:
if dir[0] == ".":
continue
if os.path.isdir(os.path.join(cachedir,dir)):
repo = YumRepository(dir)
repo.basecachedir = cachedir
repo.baseurl = urls
repo.urls = repo.baseurl
repo.enable()
if not repos.findRepos(repo.id):
repos.add(repo)
# cleanup cached login info
if os.path.exists(pcklAuthFileName):
os.unlink(pcklAuthFileName)
def posttrans_hook(conduit):
""" Post rpm transaction hook. We update the RHN profile here. """
global rhn_enabled
if rhn_enabled:
up2date_cfg = config.initUp2dateConfig()
if up2date_cfg.has_key('writeChangesToLog') and up2date_cfg['writeChangesToLog'] == 1:
ts_info = conduit.getTsInfo()
delta = make_package_delta(ts_info)
rhnPackageInfo.logDeltaPackages(delta)
try:
rhnPackageInfo.updatePackageProfile()
except up2dateErrors.RhnServerException, e:
conduit.error(0, COMMUNICATION_ERROR + "\n" +
_("Package profile information could not be sent.") + "\n" +
str(e))
def rewordError(e):
#This is compensating for hosted/satellite returning back an error
#message instructing RHEL5 clients to run "up2date --register"
#bz: 438175
replacedText = _("Error Message:") + "\n\t" + \
_("Please run rhn_register as root on this client")
index = e.errmsg.find(": 9\n")
if index == -1:
return
if e.errmsg.find("up2date", 0, index) == -1:
return
#Find where the "Error Class Code" text begins, to account
#for different languages, looking for new line character
#preceeding the Error Class Code
indexB = e.errmsg.rfind("\n", 0, index)
e.errmsg = "\n" + replacedText + e.errmsg[indexB:]
class RhnRepo(YumRepository):
"""
Repository object for Red Hat Network.
This, along with the RhnPackageSack, adapts up2date for use with
yum.
"""
rhn_needed_headers = ['X-RHN-Server-Id',
'X-RHN-Auth-User-Id',
'X-RHN-Auth',
'X-RHN-Auth-Server-Time',
'X-RHN-Auth-Expire-Offset']
def __init__(self, channel):
YumRepository.__init__(self, channel['label'])
self.name = channel['name']
self.label = channel['label']
self._callbacks_changed = False
# support failover urls, #232567
urls = []
if type(channel['url']) == list:
for url in channel['url']:
urls.append(url + '/GET-REQ/' + self.id)
else:
urls.append(channel['url'] + '/GET-REQ/' + self.id)
self.baseurl = urls
self.urls = self.baseurl
self.failovermethod = 'priority'
self.keepalive = 0
self.bandwidth = 0
self.retries = 1
self.throttle = 0
self.timeout = 60.0
self.http_caching = True
self.gpgkey = []
self.gpgcheck = False
try:
self.gpgkey = get_gpg_key_urls(channel['gpg_key_url'])
except InvalidGpgKeyLocation:
#TODO: Warn about this or log it
pass
self.enable()
def setupRhnHttpHeaders(self):
""" Set up self.http_headers with needed RHN X-RHN-blah headers """
try:
li = up2dateAuth.getLoginInfo()
except up2dateErrors.RhnServerException, e:
raise yum.Errors.RepoError(str(e))
# TODO: do evalution on li auth times to see if we need to obtain a
# new session...
for header in RhnRepo.rhn_needed_headers:
if not li.has_key(header):
error = _("Missing required login information for RHN: %s") \
% header
raise yum.Errors.RepoError(error)
self.http_headers[header] = li[header]
# Set the redirect flag
self.http_headers['X-RHN-Transport-Capability'] = "follow-redirects=3"
# Override the 'private' __get method so we can do our auth stuff.
def _getFile(self, url=None, relative=None, local=None,
start=None, end=None, copy_local=0, checkfunc=None, text=None,
reget='simple', cache=True, size=None):
try:
try:
return self._noExceptionWrappingGet(url, relative, local,
start, end, copy_local, checkfunc, text, reget, cache, size)
except URLGrabError, e:
try:
up2dateAuth.updateLoginInfo()
except up2dateErrors.RhnServerException, e:
raise yum.Errors.RepoError(str(e))
return self._noExceptionWrappingGet(url, relative, local,
start, end, copy_local, checkfunc, text, reget, cache, size)
except URLGrabError, e:
raise yum.Errors.RepoError, \
"failed to retrieve %s from %s\nerror was %s" % (relative,
self.id, e)
except SSLError, e:
raise yum.Errors.RepoError(str(e))
except up2dateErrors.InvalidRedirectionError, e:
raise up2dateErrors.InvalidRedirectionError(e)
_YumRepository__get = _getFile
# This code is copied from yum, we should get the original code to
# provide more detail in its exception, so we don't have to cut n' paste
def _noExceptionWrappingGet(self, url=None, relative=None, local=None,
start=None, end=None, copy_local=0, checkfunc=None, text=None,
reget='simple', cache=True, size=None):
"""retrieve file from the mirrorgroup for the repo
relative to local, optionally get range from
start to end, also optionally retrieve from a specific baseurl"""
# if local or relative is None: raise an exception b/c that shouldn't happen
# return the path to the local file
self.setupRhnHttpHeaders()
if pycurl:
# pycurl/libcurl workaround: in libcurl setting an empty HTTP header means
# remove that header from the list
# but we have to send and empty X-RHN-Auth-User-Id ...
AuthUserH = 'X-RHN-Auth-User-Id'
if (AuthUserH in self.http_headers and not self.http_headers[AuthUserH]):
self.http_headers[AuthUserH] = "\nX-libcurl-Empty-Header-Workaround: *"
# Turn our dict into a list of 2-tuples
headers = YumRepository._YumRepository__headersListFromDict(self)
# We will always prefer to send no-cache.
if not (cache or self.http_headers.has_key('Pragma')):
headers.append(('Pragma', 'no-cache'))
headers = tuple(headers)
if local is None or relative is None:
raise yum.Errors.RepoError, \
"get request for Repo %s, gave no source or dest" % self.id
if self.failure_obj:
(f_func, f_args, f_kwargs) = self.failure_obj
self.failure_obj = (f_func, f_args, f_kwargs)
if self.cache == 1:
if os.path.exists(local): # FIXME - we should figure out a way
return local # to run the checkfunc from here
else: # ain't there - raise
raise yum.Errors.RepoError, \
"Caching enabled but no local cache of %s from %s" % (local,
self)
if url is not None:
remote = url + '/' + relative
result = self.grab.urlgrab(remote, local,
text = text,
range = (start, end),
copy_local=copy_local,
reget = reget,
checkfunc=checkfunc,
http_headers=headers,
ssl_ca_cert = self.sslcacert,
timeout=self.timeout,
size = size
)
return result
result = None
urlException = None
for server in self.baseurl:
#force to http if configured
up2date_cfg = config.initUp2dateConfig()
if up2date_cfg['useNoSSLForPackages'] == 1:
server = force_http(server)
#Sanity check the url
check_url(server)
# Construct the full url string
remote = server + '/' + relative
try:
result = self.grab.urlgrab(remote, local,
text = text,
range = (start, end),
copy_local=copy_local,
reget = reget,
checkfunc=checkfunc,
http_headers=headers,
ssl_ca_cert = self.sslcacert,
timeout=self.timeout,
size = size
)
return result
except URLGrabError, e:
urlException = e
continue
if result == None:
raise urlException
return result
def _setupGrab(self):
"""sets up the grabber functions. We don't want to use mirrors."""
headers = tuple(YumRepository._YumRepository__headersListFromDict(self))
self._grabfunc = URLGrabber(keepalive=self.keepalive,
bandwidth=self.bandwidth,
retry=self.retries,
throttle=self.throttle,
progress_obj=self.callback,
proxies = self.proxy_dict,
interrupt_callback=self.interrupt_callback,
timeout=self.timeout,
http_headers=headers,
reget='simple')
#bz453690 ensure that user-agent header matches for communication from
#up2date library calls, as well as yum-rhn-plugin calls
self._grabfunc.opts.user_agent = rhn.transports.Transport.user_agent
self._grab = self._grabfunc
setupGrab = _setupGrab
def _getgrabfunc(self):
if not self._grabfunc or self._callbacks_changed:
self._setupGrab()
self._callbacks_changed = False
return self._grabfunc
def _getgrab(self):
if not self._grab or self._callbacks_changed:
self._setupGrab()
self._callbacks_changed = False
return self._grab
grabfunc = property(lambda self: self._getgrabfunc())
grab = property(lambda self: self._getgrab())
def _setChannelEnable(self, value=1):
""" Enable or disable channel in file rhnplugin.conf.
channel is label of channel and value should be 1 or 0.
"""
cfg = INIConfig(file('/etc/yum/pluginconf.d/rhnplugin.conf'))
# we cannot use directly cfg[channel].['enabled'], because
# if that section do not exist it raise error
func=getattr(cfg, self.label)
func.enabled=value
f=open('/etc/yum/pluginconf.d/rhnplugin.conf', 'w')
print >>f, cfg
f.close()
def enablePersistent(self):
"""
Persistently enable channel in rhnplugin.conf
"""
self._setChannelEnable(1)
self.enable()
def disablePersistent(self):
"""
Persistently disable channel in rhnplugin.conf
"""
self._setChannelEnable(0)
self.disable()
def _getRepoXML(self):
import yum.Errors
try:
return YumRepository._getRepoXML(self)
except yum.Errors.RepoError, e:
# Refresh our loginInfo then try again
# possibly it's out of date
up2dateAuth.updateLoginInfo()
return YumRepository._getRepoXML(self)
def make_package_delta(ts_info):
"""
Construct an RHN style package delta from a yum TransactionData object.
Return a hash containing two keys: added and removed.
Each key's value is a list of RHN style package tuples.
"""
delta = {}
delta["added"] = []
delta["removed"] = []
# Make sure the transaction data has the packages in nice lists.
ts_info.makelists()
for ts_member in ts_info.installed:
package = ts_member.po
pkgtup = __rhn_pkg_tup_from_po(package)
delta["added"].append(pkgtup)
for ts_member in ts_info.depinstalled:
package = ts_member.po
pkgtup = __rhn_pkg_tup_from_po(package)
delta["added"].append(pkgtup)
for ts_member in ts_info.updated:
package = ts_member.po
pkgtup = __rhn_pkg_tup_from_po(package)
delta["added"].append(pkgtup)
for ts_member in ts_info.depupdated:
package = ts_member.po
pkgtup = __rhn_pkg_tup_from_po(package)
delta["added"].append(pkgtup)
for ts_member in ts_info.removed:
package = ts_member.po
pkgtup = __rhn_pkg_tup_from_po(package)
delta["removed"].append(pkgtup)
for ts_member in ts_info.depremoved:
package = ts_member.po
pkgtup = __rhn_pkg_tup_from_po(package)
delta["removed"].append(pkgtup)
return delta
def __rhn_pkg_tup_from_po(package):
""" Construct an rhn-style package tuple from a yum package object. """
name = package.returnSimple('name')
epoch = package.returnSimple('epoch')
version = package.returnSimple('version')
release = package.returnSimple('release')
arch = package.returnSimple('arch')
return (name, version, release, epoch, arch)
class BadConfig(Exception):
pass
class BadProxyConfig(BadConfig):
pass
class BadSslCaCertConfig(BadConfig):
pass
def get_proxy_url(up2date_cfg):
if not up2date_cfg['enableProxy']:
return None
proxy_url = ""
if up2date_cfg['useNoSSLForPackages']:
proxy_url = 'http://'
else:
proxy_url = 'https://'
if up2date_cfg['enableProxyAuth']:
if not up2date_cfg.has_key('proxyUser') or \
up2date_cfg['proxyUser'] == '':
raise BadProxyConfig
if not up2date_cfg.has_key('proxyPassword') or \
up2date_cfg['proxyPassword'] == '':
raise BadProxyConfig
proxy_url = proxy_url + up2date_cfg['proxyUser']
proxy_url = proxy_url + ':'
proxy_url = proxy_url + urllib.quote(up2date_cfg['proxyPassword'])
proxy_url = proxy_url + '@'
netloc = up2date_cfg['httpProxy']
if netloc == '':
raise BadProxyConfig
# Check if a protocol is supplied. We'll ignore it.
proto_split = netloc.split('://')
if len(proto_split) > 1:
netloc = proto_split[1]
return proxy_url + netloc
class InvalidGpgKeyLocation(Exception):
pass
def is_valid_gpg_key_url(key_url):
proto_split = key_url.split('://')
if len(proto_split) != 2:
return False
proto, path = proto_split
if proto.lower() != 'file':
return False
path = os.path.normpath(path)
if not path.startswith('/etc/pki/rpm-gpg/'):
return False
return True
def get_gpg_key_urls(key_url_string):
"""
Parse the key urls and validate them.
key_url_string is a space seperated list of gpg key urls that must be
located in /etc/pkg/rpm-gpg/.
Return a list of strings containing the key urls.
Raises InvalidGpgKeyLocation if any of the key urls are invalid.
"""
key_urls = key_url_string.split()
for key_url in key_urls:
if not is_valid_gpg_key_url(key_url):
raise InvalidGpgKeyLocation
return key_urls
def get_ssl_ca_cert(up2date_cfg):
if not (up2date_cfg.has_key('sslCACert') and up2date_cfg['sslCACert']):
raise BadSslCaCertConfig
ca_certs = up2date_cfg['sslCACert']
if type(ca_certs) == list:
return ca_certs[0]
return ca_certs
def check_url(serverurl):
typ, uri = urllib.splittype(serverurl)
if typ != None:
typ = typ.lower()
up2date_cfg = config.initUp2dateConfig()
if up2date_cfg['useNoSSLForPackages']:
if typ.strip() not in ("http"):
raise up2dateErrors.InvalidProtocolError("You specified an invalid "
"protocol. Option useNoSSLServerForPackages requires http")
elif typ.strip() not in ("http", "https"):
raise up2dateErrors.InvalidProtocolError("You specified an invalid "
"protocol. Only https and "
"http are allowed.")
def force_http(serverurl):
"""
Returns a url using http
"""
httpUrl = serverurl
typ, uri = urllib.splittype(serverurl)
if typ != None:
typ = typ.lower().strip()
if typ not in ("http"):
httpUrl = "http:" + uri
return httpUrl
def getRHNRepoOptions(conduit, repoid):
from ConfigParser import NoSectionError
conduit.info(5, "Looking for repo options for [%s]" % (repoid))
try:
if conduit:
if hasattr(conduit, "_conf") and hasattr(conduit._conf, "items"):
return conduit._conf.items(repoid)
except NoSectionError, e:
pass
return None
| gpl-2.0 |
bollu/vispy | vispy/scene/cameras/turntable.py | 20 | 5029 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
from __future__ import division
import numpy as np
from .perspective import Base3DRotationCamera
class TurntableCamera(Base3DRotationCamera):
""" 3D camera class that orbits around a center point while
maintaining a view on a center point.
For this camera, the ``scale_factor`` indicates the zoom level, and
the ``center`` indicates the position to put at the center of the
view.
Parameters
----------
fov : float
Field of view. Zero (default) means orthographic projection.
elevation : float
Elevation angle in degrees. Positive angles place the camera
above the cente point, negative angles place the camera below
the center point.
azimuth : float
Azimuth angle in degrees. Zero degrees places the camera on the
positive x-axis, pointing in the negative x direction.
roll : float
Roll angle in degrees
distance : float | None
The distance of the camera from the rotation point (only makes sense
if fov > 0). If None (default) the distance is determined from the
scale_factor and fov.
**kwargs : dict
Keyword arguments to pass to `BaseCamera`.
Notes
-----
Interaction:
* LMB: orbits the view around its center point.
* RMB or scroll: change scale_factor (i.e. zoom level)
* SHIFT + LMB: translate the center point
* SHIFT + RMB: change FOV
"""
_state_props = Base3DRotationCamera._state_props + ('elevation',
'azimuth', 'roll')
def __init__(self, fov=0.0, elevation=30.0, azimuth=30.0, roll=0.0,
distance=None, **kwargs):
super(TurntableCamera, self).__init__(fov=fov, **kwargs)
# Set camera attributes
self.azimuth = azimuth
self.elevation = elevation
self.roll = roll # interaction not implemented yet
self.distance = distance # None means auto-distance
@property
def elevation(self):
""" The angle of the camera in degrees above the horizontal (x, z)
plane.
"""
return self._elevation
@elevation.setter
def elevation(self, elev):
elev = float(elev)
self._elevation = min(90, max(-90, elev))
self.view_changed()
@property
def azimuth(self):
""" The angle of the camera in degrees around the y axis. An angle of
0 places the camera within the (y, z) plane.
"""
return self._azimuth
@azimuth.setter
def azimuth(self, azim):
azim = float(azim)
while azim < -180:
azim += 360
while azim > 180:
azim -= 360
self._azimuth = azim
self.view_changed()
@property
def roll(self):
""" The angle of the camera in degrees around the z axis. An angle of
0 places puts the camera upright.
"""
return self._roll
@roll.setter
def roll(self, roll):
roll = float(roll)
while roll < -180:
roll += 360
while roll > 180:
roll -= 360
self._roll = roll
self.view_changed()
def orbit(self, azim, elev):
""" Orbits the camera around the center position.
Parameters
----------
azim : float
Angle in degrees to rotate horizontally around the center point.
elev : float
Angle in degrees to rotate vertically around the center point.
"""
self.azimuth += azim
self.elevation = np.clip(self.elevation + elev, -90, 90)
self.view_changed()
def _update_rotation(self, event):
"""Update rotation parmeters based on mouse movement"""
p1 = event.mouse_event.press_event.pos
p2 = event.mouse_event.pos
if self._event_value is None:
self._event_value = self.azimuth, self.elevation
self.azimuth = self._event_value[0] - (p2 - p1)[0] * 0.5
self.elevation = self._event_value[1] + (p2 - p1)[1] * 0.5
def _rotate_tr(self):
"""Rotate the transformation matrix based on camera parameters"""
up, forward, right = self._get_dim_vectors()
self.transform.rotate(self.elevation, -right)
self.transform.rotate(self.azimuth, up)
def _dist_to_trans(self, dist):
"""Convert mouse x, y movement into x, y, z translations"""
rae = np.array([self.roll, self.azimuth, self.elevation]) * np.pi / 180
sro, saz, sel = np.sin(rae)
cro, caz, cel = np.cos(rae)
dx = (+ dist[0] * (cro * caz + sro * sel * saz)
+ dist[1] * (sro * caz - cro * sel * saz))
dy = (+ dist[0] * (cro * saz - sro * sel * caz)
+ dist[1] * (sro * saz + cro * sel * caz))
dz = (- dist[0] * sro * cel + dist[1] * cro * cel)
return dx, dy, dz
| bsd-3-clause |
YongseopKim/crosswalk-test-suite | wrt/tct-sp01-wrt-tests/inst.xpk.py | 30 | 6769 | #!/usr/bin/env python
import os
import shutil
import glob
import time
import sys
import subprocess
import string
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PKG_NAME = os.path.basename(SCRIPT_DIR)
PARAMETERS = None
#XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/5000/dbus/user_bus_socket"
SRC_DIR = ""
PKG_SRC_DIR = ""
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code != None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def updateCMD(cmd=None):
if "pkgcmd" in cmd:
cmd = "su - %s -c '%s;%s'" % (PARAMETERS.user, XW_ENV, cmd)
return cmd
def getUSERID():
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell id -u %s" % (
PARAMETERS.device, PARAMETERS.user)
else:
cmd = "ssh %s \"id -u %s\"" % (
PARAMETERS.device, PARAMETERS.user )
return doCMD(cmd)
def getPKGID(pkg_name=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
else:
cmd = "ssh %s \"%s\"" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
(return_code, output) = doCMD(cmd)
if return_code != 0:
return None
test_pkg_id = None
for line in output:
if line.find("[" + pkg_name + "]") != -1:
pkgidIndex = line.split().index("pkgid")
test_pkg_id = line.split()[pkgidIndex+1].strip("[]")
break
return test_pkg_id
def doRemoteCMD(cmd=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (PARAMETERS.device, updateCMD(cmd))
else:
cmd = "ssh %s \"%s\"" % (PARAMETERS.device, updateCMD(cmd))
return doCMD(cmd)
def doRemoteCopy(src=None, dest=None):
if PARAMETERS.mode == "SDB":
cmd_prefix = "sdb -s %s push" % PARAMETERS.device
cmd = "%s %s %s" % (cmd_prefix, src, dest)
else:
cmd = "scp -r %s %s:/%s" % (src, PARAMETERS.device, dest)
(return_code, output) = doCMD(cmd)
doRemoteCMD("sync")
if return_code != 0:
return True
else:
return False
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith(".xpk"):
pkg_id = getPKGID(os.path.basename(os.path.splitext(file)[0]))
if not pkg_id:
action_status = False
continue
(return_code, output) = doRemoteCMD(
"pkgcmd -u -t xpk -q -n %s" % pkg_id)
for line in output:
if "Failure" in line:
action_status = False
break
(return_code, output) = doRemoteCMD(
"rm -rf %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
return action_status
def instPKGs():
action_status = True
(return_code, output) = doRemoteCMD(
"mkdir -p %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
# for file in files:
# if file.endswith(".xpk"):
# if not doRemoteCopy(os.path.join(root, file), "%s/%s" % (SRC_DIR, file)):
# action_status = False
# (return_code, output) = doRemoteCMD(
# "pkgcmd -i -t xpk -q -p %s/%s" % (SRC_DIR, file))
# doRemoteCMD("rm -rf %s/%s" % (SRC_DIR, file))
# for line in output:
# if "Failure" in line:
# action_status = False
# break
# Do some special copy/delete... steps
'''
(return_code, output) = doRemoteCMD(
"mkdir -p %s/tests" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
if not doRemoteCopy("specname/tests", "%s/tests" % PKG_SRC_DIR):
action_status = False
'''
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-m", dest="mode", action="store", help="Specify mode")
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
opts_parser.add_option(
"-a", dest="user", action="store", help="User name")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception, e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.user:
PARAMETERS.user = "app"
global SRC_DIR, PKG_SRC_DIR
SRC_DIR = "/home/%s/content" % PARAMETERS.user
PKG_SRC_DIR = "%s/tct/opt/%s" % (SRC_DIR, PKG_NAME)
if not PARAMETERS.mode:
PARAMETERS.mode = "SDB"
if PARAMETERS.mode == "SDB":
if not PARAMETERS.device:
(return_code, output) = doCMD("sdb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
else:
PARAMETERS.mode = "SSH"
if not PARAMETERS.device:
print "No device provided"
sys.exit(1)
user_info = getUSERID()
re_code = user_info[0]
if re_code == 0 :
global XW_ENV
userid = user_info[1][0]
XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/%s/dbus/user_bus_socket"%str(userid)
else:
print "[Error] cmd commands error : %s"%str(user_info[1])
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
| bsd-3-clause |
akshara775/PerfKitBenchmarker-master-2 | tests/virtual_machine_test.py | 1 | 3034 | # Copyright 2015 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for perfkitbenchmarker.virtual_machine."""
import unittest
from perfkitbenchmarker import errors
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker.configs import option_decoders
class TestVmSpec(virtual_machine.BaseVmSpec):
CLOUD = 'test_cloud'
@classmethod
def _GetOptionDecoderConstructions(cls):
result = super(TestVmSpec, cls)._GetOptionDecoderConstructions()
result['required_string'] = (option_decoders.StringDecoder, {})
result['required_int'] = (option_decoders.IntDecoder, {})
return result
class BaseVmSpecTestCase(unittest.TestCase):
def testDefaults(self):
spec = virtual_machine.BaseVmSpec()
self.assertEqual(spec.image, None)
self.assertEqual(spec.install_packages, True)
self.assertEqual(spec.machine_type, None)
self.assertEqual(spec.zone, None)
def testProvidedValid(self):
spec = virtual_machine.BaseVmSpec(
image='test_image', install_packages=False,
machine_type='test_machine_type', zone='test_zone')
self.assertEqual(spec.image, 'test_image')
self.assertEqual(spec.install_packages, False)
self.assertEqual(spec.machine_type, 'test_machine_type')
self.assertEqual(spec.zone, 'test_zone')
def testUnrecognizedOptions(self):
with self.assertRaises(errors.Config.UnrecognizedOption) as cm:
virtual_machine.BaseVmSpec(color='red', flavor='cherry', texture=None)
self.assertEqual(str(cm.exception), (
'Unrecognized options were found in a VM config: color, flavor, '
'texture.'))
def testMissingOptions(self):
with self.assertRaises(errors.Config.MissingOption) as cm:
TestVmSpec()
self.assertEqual(str(cm.exception), (
'Required options were missing from a test_cloud VM config: '
'required_int, required_string.'))
def testInvalidImage(self):
with self.assertRaises(errors.Config.InvalidValue):
virtual_machine.BaseVmSpec(image=0)
def testInvalidInstallPackages(self):
with self.assertRaises(errors.Config.InvalidValue):
virtual_machine.BaseVmSpec(install_packages='yes')
def testInvalidMachineType(self):
with self.assertRaises(errors.Config.InvalidValue):
virtual_machine.BaseVmSpec(machine_type=True)
def testInvalidZone(self):
with self.assertRaises(errors.Config.InvalidValue):
virtual_machine.BaseVmSpec(zone=0)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
takeshineshiro/cinder | cinder/keymgr/key_mgr.py | 27 | 4244 | # Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Key manager API
"""
import abc
from oslo_config import cfg
import six
encryption_opts = [
cfg.StrOpt('encryption_auth_url',
default='http://localhost:5000/v3',
help='Authentication url for encryption service.'),
cfg.StrOpt('encryption_api_url',
default='http://localhost:9311/v1',
help='Url for encryption service.'),
]
CONF = cfg.CONF
CONF.register_opts(encryption_opts, 'keymgr')
@six.add_metaclass(abc.ABCMeta)
class KeyManager(object):
"""Base Key Manager Interface
A Key Manager is responsible for managing encryption keys for volumes. A
Key Manager is responsible for creating, reading, and deleting keys.
"""
@abc.abstractmethod
def create_key(self, ctxt, algorithm='AES', length=256, expiration=None,
**kwargs):
"""Creates a key.
This method creates a key and returns the key's UUID. If the specified
context does not permit the creation of keys, then a NotAuthorized
exception should be raised.
"""
pass
@abc.abstractmethod
def store_key(self, ctxt, key, expiration=None, **kwargs):
"""Stores (i.e., registers) a key with the key manager.
This method stores the specified key and returns its UUID that
identifies it within the key manager. If the specified context does
not permit the creation of keys, then a NotAuthorized exception should
be raised.
"""
pass
@abc.abstractmethod
def copy_key(self, ctxt, key_id, **kwargs):
"""Copies (i.e., clones) a key stored by the key manager.
This method copies the specified key and returns the copy's UUID. If
the specified context does not permit copying keys, then a
NotAuthorized error should be raised.
Implementation note: This method should behave identically to
store_key(context, get_key(context, <encryption key UUID>))
although it is preferable to perform this operation within the key
manager to avoid unnecessary handling of the key material.
"""
pass
@abc.abstractmethod
def get_key(self, ctxt, key_id, **kwargs):
"""Retrieves the specified key.
Implementations should verify that the caller has permissions to
retrieve the key by checking the context object passed in as ctxt. If
the user lacks permission then a NotAuthorized exception is raised.
If the specified key does not exist, then a KeyError should be raised.
Implementations should preclude users from discerning the UUIDs of
keys that belong to other users by repeatedly calling this method.
That is, keys that belong to other users should be considered "non-
existent" and completely invisible.
"""
pass
@abc.abstractmethod
def delete_key(self, ctxt, key_id, **kwargs):
"""Deletes the specified key.
Implementations should verify that the caller has permission to delete
the key by checking the context object (ctxt). A NotAuthorized
exception should be raised if the caller lacks permission.
If the specified key does not exist, then a KeyError should be raised.
Implementations should preclude users from discerning the UUIDs of
keys that belong to other users by repeatedly calling this method.
That is, keys that belong to other users should be considered "non-
existent" and completely invisible.
"""
pass
| apache-2.0 |
40123142/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/site-packages/pygame/SDL.py | 603 | 1813 | from browser import document
SDL_INIT_VIDEO=0
SDL_GL_DOUBLEBUFFER=1
SDL_GL_DEPTH_SIZE=2
SDL_DOUBLEBUF=3
SDL_ANYFORMAT=4
SDL_ACTIVEEVENT=5
SDL_ALLEVENTS=5
SDL_KEYDOWN=6
SDL_KEYUP=7
SDL_MOUSEMOTION=8
SDL_MOUSEBUTTONDOWN=9
SDL_MOUSEBUTTONUP=10
SDL_JOYAXISMOTION=11
SDL_JOYBALLMOTION=12
SDL_JOYHATMOTION=13
SDL_JOYBUTTONUP=14
SDL_JOYBUTTONDOWN=15
SDL_QUIT=16
SDL_SYSWMEVENT=17
SDL_VIDEORESIZE=18
SDL_VIDEOEXPOSE=19
SDL_NOEVENT=20
SDL_GETEVENT=21
SDL_OPENGL=False
def SDL_WasInit(var):
return True
_attrs={}
_wm={}
def SDL_PeepEvents(num, event, mask):
pass
def SDL_GL_SetAttribute(variable, value):
_attrs[variable]=value
def SDL_GL_GetAttribute(variable):
return _attrs.getvalue(variable, None)
def SDL_GL_SetVideoMode(width, height, depth, flags):
pass
def SDL_WM_SetCaption(title, icontitle):
_wm['title']=title
_wm['icontitle']=icontitle
def SDL_PumpEvents():
pass
def SDL_SetVideoMode(width, height, depth, flags):
pass
def SDL_SetColorKey(surface, key, value):
pass
def SDL_WM_GetCaption():
return _wm.get('title', ''), _wm.get('icontitle', '')
def SDL_UpdateRect(screen, x1, y1, x2, y2):
screen.canvas.style.width=screen.canvas.style.width
def SDL_UpdateRects(screen, rects):
for _rect in rects:
SDL_UpdateRect(screen, _rect)
def SDL_GetVideoSurface():
return _Screen
def SDL_GetVideoInfo():
return
def SDL_VideoModeOK(width, height, depth, flags):
pass
def SDL_SetPalette(surface, sdl_var, colors, flag):
pass
class Screen:
def __init__(self):
self.flags=0
@property
def canvas(self):
return document.get(selector='canvas')[0]
_Screen=Screen()
class SDL_Rect:
def __init__(self, x, y, w, h):
self.x=x
self.y=y
self.w=w
self.h=h
def SDL_Flip(screen):
pass
| gpl-2.0 |
benjystanton/F2D-Prototype | node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/easy_xml_test.py | 2698 | 3270 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the easy_xml.py file. """
import gyp.easy_xml as easy_xml
import unittest
import StringIO
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def test_EasyXml_simple(self):
self.assertEqual(
easy_xml.XmlToString(['test']),
'<?xml version="1.0" encoding="utf-8"?><test/>')
self.assertEqual(
easy_xml.XmlToString(['test'], encoding='Windows-1252'),
'<?xml version="1.0" encoding="Windows-1252"?><test/>')
def test_EasyXml_simple_with_attributes(self):
self.assertEqual(
easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
def test_EasyXml_escaping(self):
original = '<test>\'"\r&\nfoo'
converted = '<test>\'"
&
foo'
converted_apos = converted.replace("'", ''')
self.assertEqual(
easy_xml.XmlToString(['test3', {'a': original}, original]),
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
(converted, converted_apos))
def test_EasyXml_pretty(self):
self.assertEqual(
easy_xml.XmlToString(
['test3',
['GrandParent',
['Parent1',
['Child']
],
['Parent2']
]
],
pretty=True),
'<?xml version="1.0" encoding="utf-8"?>\n'
'<test3>\n'
' <GrandParent>\n'
' <Parent1>\n'
' <Child/>\n'
' </Parent1>\n'
' <Parent2/>\n'
' </GrandParent>\n'
'</test3>\n')
def test_EasyXml_complex(self):
# We want to create:
target = (
'<?xml version="1.0" encoding="utf-8"?>'
'<Project>'
'<PropertyGroup Label="Globals">'
'<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
'<Keyword>Win32Proj</Keyword>'
'<RootNamespace>automated_ui_tests</RootNamespace>'
'</PropertyGroup>'
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
'<PropertyGroup '
'Condition="\'$(Configuration)|$(Platform)\'=='
'\'Debug|Win32\'" Label="Configuration">'
'<ConfigurationType>Application</ConfigurationType>'
'<CharacterSet>Unicode</CharacterSet>'
'</PropertyGroup>'
'</Project>')
xml = easy_xml.XmlToString(
['Project',
['PropertyGroup', {'Label': 'Globals'},
['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
['Keyword', 'Win32Proj'],
['RootNamespace', 'automated_ui_tests']
],
['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
['PropertyGroup',
{'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
'Label': 'Configuration'},
['ConfigurationType', 'Application'],
['CharacterSet', 'Unicode']
]
])
self.assertEqual(xml, target)
if __name__ == '__main__':
unittest.main()
| mit |
trudikampfschaf/flask-microblog | flask/lib/python2.7/site-packages/openid/dh.py | 168 | 1608 | from openid import cryptutil
from openid import oidutil
def strxor(x, y):
if len(x) != len(y):
raise ValueError('Inputs to strxor must have the same length')
xor = lambda (a, b): chr(ord(a) ^ ord(b))
return "".join(map(xor, zip(x, y)))
class DiffieHellman(object):
DEFAULT_MOD = 155172898181473697471232257763715539915724801966915404479707795314057629378541917580651227423698188993727816152646631438561595825688188889951272158842675419950341258706556549803580104870537681476726513255747040765857479291291572334510643245094715007229621094194349783925984760375594985848253359305585439638443L
DEFAULT_GEN = 2
def fromDefaults(cls):
return cls(cls.DEFAULT_MOD, cls.DEFAULT_GEN)
fromDefaults = classmethod(fromDefaults)
def __init__(self, modulus, generator):
self.modulus = long(modulus)
self.generator = long(generator)
self._setPrivate(cryptutil.randrange(1, modulus - 1))
def _setPrivate(self, private):
"""This is here to make testing easier"""
self.private = private
self.public = pow(self.generator, self.private, self.modulus)
def usingDefaultValues(self):
return (self.modulus == self.DEFAULT_MOD and
self.generator == self.DEFAULT_GEN)
def getSharedSecret(self, composite):
return pow(composite, self.private, self.modulus)
def xorSecret(self, composite, secret, hash_func):
dh_shared = self.getSharedSecret(composite)
hashed_dh_shared = hash_func(cryptutil.longToBinary(dh_shared))
return strxor(secret, hashed_dh_shared)
| bsd-3-clause |
randynobx/ansible | test/units/modules/network/vyos/test_vyos_command.py | 62 | 4089 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.vyos import vyos_command
from .vyos_module import TestVyosModule, load_fixture, set_module_args
class TestVyosCommandModule(TestVyosModule):
module = vyos_command
def setUp(self):
self.mock_run_commands = patch('ansible.modules.network.vyos.vyos_command.run_commands')
self.run_commands = self.mock_run_commands.start()
def tearDown(self):
self.mock_run_commands.stop()
def load_fixtures(self, commands=None):
def load_from_file(*args, **kwargs):
module, commands = args
output = list()
for item in commands:
try:
obj = json.loads(item)
command = obj['command']
except ValueError:
command = item
filename = str(command).replace(' ', '_')
output.append(load_fixture(filename))
return output
self.run_commands.side_effect = load_from_file
def test_vyos_command_simple(self):
set_module_args(dict(commands=['show version']))
result = self.execute_module()
self.assertEqual(len(result['stdout']), 1)
self.assertTrue(result['stdout'][0].startswith('Version: VyOS'))
def test_vyos_command_multiple(self):
set_module_args(dict(commands=['show version', 'show version']))
result = self.execute_module()
self.assertEqual(len(result['stdout']), 2)
self.assertTrue(result['stdout'][0].startswith('Version: VyOS'))
def test_vyos_command_wait_for(self):
wait_for = 'result[0] contains "VyOS maintainers"'
set_module_args(dict(commands=['show version'], wait_for=wait_for))
self.execute_module()
def test_vyos_command_wait_for_fails(self):
wait_for = 'result[0] contains "test string"'
set_module_args(dict(commands=['show version'], wait_for=wait_for))
self.execute_module(failed=True)
self.assertEqual(self.run_commands.call_count, 10)
def test_vyos_command_retries(self):
wait_for = 'result[0] contains "test string"'
set_module_args(dict(commands=['show version'], wait_for=wait_for, retries=2))
self.execute_module(failed=True)
self.assertEqual(self.run_commands.call_count, 2)
def test_vyos_command_match_any(self):
wait_for = ['result[0] contains "VyOS maintainers"',
'result[0] contains "test string"']
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='any'))
self.execute_module()
def test_vyos_command_match_all(self):
wait_for = ['result[0] contains "VyOS maintainers"',
'result[0] contains "[email protected]"']
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='all'))
self.execute_module()
def test_vyos_command_match_all_failure(self):
wait_for = ['result[0] contains "VyOS maintainers"',
'result[0] contains "test string"']
commands = ['show version', 'show version']
set_module_args(dict(commands=commands, wait_for=wait_for, match='all'))
self.execute_module(failed=True)
| gpl-3.0 |
mkuron/espresso | src/python/espressomd/virtual_sites.py | 1 | 2520 | # Copyright (C) 2010-2018 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from .__init__ import has_features
from .script_interface import ScriptInterfaceHelper, script_interface_register
if has_features("VIRTUAL_SITES"):
@script_interface_register
class ActiveVirtualSitesHandle(ScriptInterfaceHelper):
"""Handle for the virtual sites implementation active in the core
This should not be used directly.
Attributes
----------
implementation : instance of a virtual sites implementation
"""
_so_name = "VirtualSites::ActiveVirtualSitesHandle"
@script_interface_register
class VirtualSitesOff(ScriptInterfaceHelper):
"""Virtual sites implementation which does nothing (default)"""
_so_name = "VirtualSites::VirtualSitesOff"
if has_features("VIRTUAL_SITES_INERTIALESS_TRACERS"):
@script_interface_register
class VirtualSitesInertialessTracers(ScriptInterfaceHelper):
"""Virtual sites which are advected with an lb fluid without inertia. Forces are on them are transferred to the fluid instantly.
"""
_so_name = "VirtualSites::VirtualSitesInertialessTracers"
if has_features("VIRTUAL_SITES_RELATIVE"):
@script_interface_register
class VirtualSitesRelative(ScriptInterfaceHelper):
"""Virtual sites implementation placing virtual sites relative to other particles.
See :ref:`Rigid arrangements of particles` for details.
Attributes
----------
have_velocity : :obj:`bool`
Determines whether the velocity of the virtual sites is calculated.
This carries a performance cost.
Attributes can be set on the instance or passed to the constructor as
keyword arguments.
"""
_so_name = "VirtualSites::VirtualSitesRelative"
| gpl-3.0 |
Mayurji/Machine-Learning | Email Classification Using ML/Email_Classifiers.py | 1 | 10287 | from sklearn.cross_validation import train_test_split
from sklearn.feature_extraction.text import TfidfVectorizer,TfidfTransformer
from sklearn.feature_extraction.text import CountVectorizer
import scipy as sp
import nltk.stem
from sklearn.model_selection import StratifiedShuffleSplit
from sklearn.model_selection import KFold
english_stemmer = nltk.stem.SnowballStemmer('english')
from sklearn.metrics import accuracy_score
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.linear_model import LogisticRegression
from DateTime import DateTime
import csv
import pandas as pd
import os
import re
from os import path
from Email_Classifier import GeneratingDataSet
from Email_Classifier import fetchEmails
gds = GeneratingDataSet()
global bodyword
global subjectword
global X_trained_
global datetimevalue
global time
global formattedDateTime
time = []
subjectword = []
datetimevalue = []
formattedDateTime = []
# def convertDateTimeFormat(timestamp):
# print(timestamp[0])
# for i in range(len(timestamp)):
# if '-' in timestamp[0][i]:
# splitTZ = timestamp[0][i].split('-')
# y = DateTime(splitTZ[0])
# x = [y.parts()]
# dateval = int(str(x[0][0])+str(x[0][1])+str(x[0][2])+str(x[0][3]))#+str(x[0][4])+str(x[0][5]))
# datetimevalue.append(dateval)
# elif '+' in timestamp[0][i]:
# splitTZ = timestamp[0][i].split('+')
# y = DateTime(splitTZ[0])
# x = [y.parts()]
# dateval = int(str(x[0][0])+str(x[0][1])+str(x[0][2])+str(x[0][3]))#+str(x[0][4])+str(x[0][5]))
# datetimevalue.append(dateval)
# else:
# datetimevalue.append(timestamp)
# return datetimevalue
def DateTimeFormat(time):
dayhours = ['0','1','2','3','4','5','6','7','8','9']
months = ['1','2','3','4','5','6','7','8','9']
days = ['1','2','3','4','5','6','7','8','9']
minus = '-'
addition = '+'
for i in range(len(time)):
print(time[i])
if '-' in time[i]:
splitTZ = time[i].split('-')
y = DateTime(splitTZ[0])
x = [y.parts()]
month = str(x[0][1])
day = str(x[0][2])
hours = str(x[0][3])
if month in months:
month = '0'+month
#return month
if day in days:
day = '0'+day
#return day
if hours in dayhours:
hours = '0'+hours
formattedDateTime.append(int(str(x[0][0])+month+day+hours))
elif '+' in time[i]:
splitTZ = time[i].split('+')
y = DateTime(splitTZ[0])
x = [y.parts()]
month = str(x[0][1])
day = str(x[0][2])
hours = str(x[0][3])
if month in months:
month = '0'+month
#return month
if day in days:
day = '0'+day
#return day
if hours in dayhours:
hours = '0'+hours
formattedDateTime.append(int(str(x[0][0])+month+day+hours))
return formattedDateTime
def distMatrix(v1,v2):
v1_norm = v1/sp.linalg.norm(v1.toarray())
v2_norm = v2/sp.linalg.norm(v2.toarray())
delta = v1_norm - v2_norm
return sp.linalg.norm(delta.toarray())
def similarityMatrixBody(bodyList):
vec = TfidfVectorizer(min_df=1,stop_words = 'english',decode_error='ignore',analyzer='word',ngram_range=(1, 2),token_pattern=r'\b\w+\b')
X_train = vec.fit_transform(bodyword).toarray()
transformer = TfidfTransformer(smooth_idf=False)
print(transformer)
tfidf = transformer.fit_transform(X_train).toarray()
print(tfidf.shape)
return X_train
def testEmailBody(newEmailBody):
classifier = MultinomialNB()
new_vec = vec.transform(newEmailBody)
#print(new_vec)
for i in range(len(emailClassification['SNIPPET'])):
bodyVec = bodyVector[i]
if bodyVec == newEmailBody:
continue
post_vec = X_train.getrow(i)
d = distMatrix(post_vec,new_vec)
print("Post %i with distance = %.2f: %s"%(i,d,bodyVec))
def similarityMatrixSubject(subjectList):
subvec = TfidfVectorizer(min_df=1,stop_words = 'english',decode_error='ignore',analyzer='word',ngram_range=(1, 2),token_pattern=r'\b\w+\b')
X_train = subvec.fit_transform(subjectList).toarray()
transformer = TfidfTransformer(smooth_idf=False)
tfidf = transformer.fit_transform(X_train).toarray()
return X_train
def testEmailSubject(newEmailSubject):
new_vec = Subvec.transform(newEmailSubject)
#print(new_vec)
for i in range(len(emailClassification['SUBJECT'])):
subVec = SubjectVector[i]
if subVec == newEmailSubject:
continue
post_vec = X_train.getrow(i)
d = distMatrix(post_vec,new_vec)
print("Post %i with distance = %.2f: %s"%(i,d,subVec))
def main():
PATH = '/Users/XXXXXXXXX/Documents/Google Gmail API /EmailClass.csv'
global newDateTime
global newSpam
global newSnippet
global newMailID
global newSubject
global getDateTime
getDateTime = []
newDateTime = []
newSpam = []
newSnippet = []
newMailID = []
newSubject = []
if os.path.isfile(PATH):
#print('True')
with open('/Users/XXXXXXXXX/Documents/Google Gmail API /EmailClass.csv','a') as f:
w = csv.writer(f)
getDateTimeCSV = pd.read_csv(PATH,header='infer',sep=',',encoding='ISO-8859-1')
getDateTime = getDateTimeCSV['Date']
global latestDateTime
latestDateTime = max(getDateTime)
times = fetchEmails()
#updatedRecords = DateTimeFormat(times[0])
x = 0
while True:
print(times[0][x])
print(latestDateTime)
if times[0][x] > int(latestDateTime):
newDateTime.append(times[0][x])
newSpam.append(times[1][x])
newSnippet.append(times[2][x])
newMailID.append(times[3][x])
newSubject.append(times[4][x])
x = x + 1
if times[0][x] <= latestDateTime:
break
else:
break
w.writerows(zip(newDateTime,newSpam,newSnippet,newMailID,newSubject))
f.close() #times = DateTimeFormat(fetchEmails())
else:
print('File Not Found')
########################################### MACHINE LEARNING ALGORITHM STARTS ##########################################################
emailClassification = pd.read_csv('/Users/XXXXXXXXX/Documents/Google Gmail API /EmailClass.csv',sep=',',header='infer',encoding='ISO-8859-1')
emailClassification['SNIPPET'] = emailClassification['SNIPPET'].fillna("UNKNOWN")
emailClassification['SUBJECT'] = emailClassification['SUBJECT'].fillna("UNKNOWN")
classifiers = {'Adaptive Boosting Classifier':AdaBoostClassifier(),'Linear Discriminant Analysis':LinearDiscriminantAnalysis(),'Random Forest Classifier': RandomForestClassifier(),'Decision Tree Classifier':DecisionTreeClassifier()}
# CLASSIFIYING EMAIL WITH BOTH FEATURE TOGATHER (SUBJECT AND SNIPPET) WE GET BETTER ACCURACY !!!
# Classifying the Email Based on Subject and SNIPPET togather
X_ = emailClassification[['SUBJECT', 'SNIPPET']].apply(lambda x: ''.join(x), axis=1)
X_trained_= similarityMatrixSubject(X_)
email_Classifier = pd.DataFrame(X_trained_[:,:],columns=X_trained_[0,:])
y_trained = emailClassification['SPAM']
rs = StratifiedShuffleSplit(n_splits=1, test_size=0.2,random_state=0)
for Name,Classify in classifiers.items():
for train_index,test_index in rs.split(email_Classifier,y_trained):
X__tr,X__test = email_Classifier.iloc[train_index], email_Classifier.iloc[test_index]
y,y_test = y_trained.iloc[train_index], y_trained.iloc[test_index]
clf1 = Classify
clf1.fit(X__tr,y)
y_pred1 = clf1.predict(X__test)
print("EMAIL vs SPAM - Accuracy of Classifier "+Name,accuracy_score(y_pred1,y_test))
print(y_test[:30].as_matrix())
print(y_pred1[:30])
# Classifying the Email Based on Subject and SNIPPET Seperately
X_trained_SNIPPET = emailClassification['SNIPPET']
X_trained_SUBJECT = emailClassification['SUBJECT']
X_trained_NP_SUB = similarityMatrixSubject(X_trained_SUBJECT)
X_trained_NP_SNI = similarityMatrixSubject(X_trained_SNIPPET)
X_trained_SNI = pd.DataFrame(X_trained_NP_SNI[:,:],columns=X_trained_NP_SNI[0,:])
X_trained_SUB = pd.DataFrame(X_trained_NP_SUB[:,:],columns=X_trained_NP_SUB[0,:])
for Name,Classify in classifiers.items():
for train_index,test_index in rs.split(X_trained_SNI,y_trained):
X_SNI_tr,X_SNI_test = X_trained_SNI.iloc[train_index], X_trained_SNI.iloc[test_index]
y,y_test = y_trained.iloc[train_index], y_trained.iloc[test_index]
clf1 = Classify
clf1.fit(X_SNI_tr,y)
y_pred1 = clf1.predict(X_SNI_test)
print("SNIPPET vs SPAM - Accuracy of Classifier "+Name,accuracy_score(y_pred1,y_test))
print(y_test[:10].as_matrix())
print(y_pred1[:10])
for train_index,test_index in rs.split(X_trained_SUB,y_trained):
X_SUB_tr,X_SUB_test = X_trained_SUB.iloc[train_index], X_trained_SUB.iloc[test_index]
y,y_test = y_trained.iloc[train_index], y_trained.iloc[test_index]
clf2 = Classify
clf2.fit(X_SUB_tr,y)
y_pred2 = clf2.predict(X_SUB_test)
print("SUBJECT vs SPAM - Accuracy of Classifier "+Name,accuracy_score(y_pred2,y_test))
print(y_test[:10].as_matrix())
print(y_pred2[:10])
if __name__ == '__main__':
main() | gpl-3.0 |
nicky-ji/edx-nicky | common/djangoapps/xmodule_django/models.py | 24 | 6326 | from django.db import models
from django.core.exceptions import ValidationError
from opaque_keys.edx.locations import SlashSeparatedCourseKey, Location
from opaque_keys.edx.keys import CourseKey, UsageKey
from opaque_keys.edx.locator import Locator
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^xmodule_django\.models\.CourseKeyField"])
add_introspection_rules([], ["^xmodule_django\.models\.LocationKeyField"])
class NoneToEmptyManager(models.Manager):
"""
A :class:`django.db.models.Manager` that has a :class:`NoneToEmptyQuerySet`
as its `QuerySet`, initialized with a set of specified `field_names`.
"""
def __init__(self):
"""
Args:
field_names: The list of field names to initialize the :class:`NoneToEmptyQuerySet` with.
"""
super(NoneToEmptyManager, self).__init__()
def get_query_set(self):
return NoneToEmptyQuerySet(self.model, using=self._db)
class NoneToEmptyQuerySet(models.query.QuerySet):
"""
A :class:`django.db.query.QuerySet` that replaces `None` values passed to `filter` and `exclude`
with the corresponding `Empty` value for all fields with an `Empty` attribute.
This is to work around Django automatically converting `exact` queries for `None` into
`isnull` queries before the field has a chance to convert them to queries for it's own
empty value.
"""
def _filter_or_exclude(self, *args, **kwargs):
for name in self.model._meta.get_all_field_names():
field_object, _model, direct, _m2m = self.model._meta.get_field_by_name(name)
if direct and hasattr(field_object, 'Empty'):
for suffix in ('', '_exact'):
key = '{}{}'.format(name, suffix)
if key in kwargs and kwargs[key] is None:
kwargs[key] = field_object.Empty
return super(NoneToEmptyQuerySet, self)._filter_or_exclude(*args, **kwargs)
def _strip_object(key):
"""
Strips branch and version info if the given key supports those attributes.
"""
if hasattr(key, 'version_agnostic') and hasattr(key, 'for_branch'):
return key.for_branch(None).version_agnostic()
else:
return key
def _strip_value(value, lookup='exact'):
"""
Helper function to remove the branch and version information from the given value,
which could be a single object or a list.
"""
if lookup == 'in':
stripped_value = [_strip_object(el) for el in value]
else:
stripped_value = _strip_object(value)
return stripped_value
class CourseKeyField(models.CharField):
description = "A SlashSeparatedCourseKey object, saved to the DB in the form of a string"
__metaclass__ = models.SubfieldBase
Empty = object()
def to_python(self, value):
if value is self.Empty or value is None:
return None
assert isinstance(value, (basestring, CourseKey))
if value == '':
# handle empty string for models being created w/o fields populated
return None
if isinstance(value, basestring):
return SlashSeparatedCourseKey.from_deprecated_string(value)
else:
return value
def get_prep_lookup(self, lookup, value):
if lookup == 'isnull':
raise TypeError('Use CourseKeyField.Empty rather than None to query for a missing CourseKeyField')
return super(CourseKeyField, self).get_prep_lookup(
lookup,
# strip key before comparing
_strip_value(value, lookup)
)
def get_prep_value(self, value):
if value is self.Empty or value is None:
return '' # CharFields should use '' as their empty value, rather than None
assert isinstance(value, CourseKey)
return unicode(_strip_value(value))
def validate(self, value, model_instance):
"""Validate Empty values, otherwise defer to the parent"""
# raise validation error if the use of this field says it can't be blank but it is
if not self.blank and value is self.Empty:
raise ValidationError(self.error_messages['blank'])
else:
return super(CourseKeyField, self).validate(value, model_instance)
def run_validators(self, value):
"""Validate Empty values, otherwise defer to the parent"""
if value is self.Empty:
return
return super(CourseKeyField, self).run_validators(value)
class LocationKeyField(models.CharField):
description = "A Location object, saved to the DB in the form of a string"
__metaclass__ = models.SubfieldBase
Empty = object()
def to_python(self, value):
if value is self.Empty or value is None:
return value
assert isinstance(value, (basestring, UsageKey))
if value == '':
return None
if isinstance(value, basestring):
return Location.from_deprecated_string(value)
else:
return value
def get_prep_lookup(self, lookup, value):
if lookup == 'isnull':
raise TypeError('Use LocationKeyField.Empty rather than None to query for a missing LocationKeyField')
# remove version and branch info before comparing keys
return super(LocationKeyField, self).get_prep_lookup(
lookup,
# strip key before comparing
_strip_value(value, lookup)
)
def get_prep_value(self, value):
if value is self.Empty:
return ''
assert isinstance(value, UsageKey)
return unicode(_strip_value(value))
def validate(self, value, model_instance):
"""Validate Empty values, otherwise defer to the parent"""
# raise validation error if the use of this field says it can't be blank but it is
if not self.blank and value is self.Empty:
raise ValidationError(self.error_messages['blank'])
else:
return super(LocationKeyField, self).validate(value, model_instance)
def run_validators(self, value):
"""Validate Empty values, otherwise defer to the parent"""
if value is self.Empty:
return
return super(LocationKeyField, self).run_validators(value)
| agpl-3.0 |
Dineshs91/youtube-dl | youtube_dl/extractor/ringtv.py | 124 | 1970 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
class RingTVIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?ringtv\.craveonline\.com/(?P<type>news|videos/video)/(?P<id>[^/?#]+)'
_TEST = {
"url": "http://ringtv.craveonline.com/news/310833-luis-collazo-says-victor-ortiz-better-not-quit-on-jan-30",
"md5": "d25945f5df41cdca2d2587165ac28720",
"info_dict": {
'id': '857645',
'ext': 'mp4',
"title": 'Video: Luis Collazo says Victor Ortiz "better not quit on Jan. 30" - Ring TV',
"description": 'Luis Collazo is excited about his Jan. 30 showdown with fellow former welterweight titleholder Victor Ortiz at Barclays Center in his hometown of Brooklyn. The SuperBowl week fight headlines a Golden Boy Live! card on Fox Sports 1.',
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id').split('-')[0]
webpage = self._download_webpage(url, video_id)
if mobj.group('type') == 'news':
video_id = self._search_regex(
r'''(?x)<iframe[^>]+src="http://cms\.springboardplatform\.com/
embed_iframe/[0-9]+/video/([0-9]+)/''',
webpage, 'real video ID')
title = self._og_search_title(webpage)
description = self._html_search_regex(
r'addthis:description="([^"]+)"',
webpage, 'description', fatal=False)
final_url = "http://ringtv.craveonline.springboardplatform.com/storage/ringtv.craveonline.com/conversion/%s.mp4" % video_id
thumbnail_url = "http://ringtv.craveonline.springboardplatform.com/storage/ringtv.craveonline.com/snapshots/%s.jpg" % video_id
return {
'id': video_id,
'url': final_url,
'title': title,
'thumbnail': thumbnail_url,
'description': description,
}
| unlicense |
KaranToor/MA450 | google-cloud-sdk/.install/.backup/lib/third_party/httplib2/__init__.py | 2 | 70781 | from __future__ import generators
"""
httplib2
A caching http interface that supports ETags and gzip
to conserve bandwidth.
Requires Python 2.3 or later
Changelog:
2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
"""
__author__ = "Joe Gregorio ([email protected])"
__copyright__ = "Copyright 2006, Joe Gregorio"
__contributors__ = ["Thomas Broyer ([email protected])",
"James Antill",
"Xavier Verges Farrero",
"Jonathan Feinberg",
"Blair Zajac",
"Sam Ruby",
"Louis Nyffenegger"]
__license__ = "MIT"
__version__ = "0.9.2"
import re
import sys
import email
import email.Utils
import email.Message
import email.FeedParser
import StringIO
import gzip
import zlib
import httplib
import urlparse
import urllib
import base64
import os
import copy
import calendar
import time
import random
import errno
try:
from hashlib import sha1 as _sha, md5 as _md5
except ImportError:
# prior to Python 2.5, these were separate modules
import sha
import md5
_sha = sha.new
_md5 = md5.new
import hmac
from gettext import gettext as _
import socket
try:
from httplib2 import socks
except ImportError:
try:
import socks
except (ImportError, AttributeError):
socks = None
# Build the appropriate socket wrapper for ssl
try:
import ssl # python 2.6
ssl_SSLError = ssl.SSLError
def _ssl_wrap_socket(sock, key_file, cert_file,
disable_validation, ca_certs):
if disable_validation:
cert_reqs = ssl.CERT_NONE
else:
cert_reqs = ssl.CERT_REQUIRED
# We should be specifying SSL version 3 or TLS v1, but the ssl module
# doesn't expose the necessary knobs. So we need to go with the default
# of SSLv23.
return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file,
cert_reqs=cert_reqs, ca_certs=ca_certs)
except (AttributeError, ImportError):
ssl_SSLError = None
def _ssl_wrap_socket(sock, key_file, cert_file,
disable_validation, ca_certs):
if not disable_validation:
raise CertificateValidationUnsupported(
"SSL certificate validation is not supported without "
"the ssl module installed. To avoid this error, install "
"the ssl module, or explicity disable validation.")
ssl_sock = socket.ssl(sock, key_file, cert_file)
return httplib.FakeSocket(sock, ssl_sock)
if sys.version_info >= (2,3):
from iri2uri import iri2uri
else:
def iri2uri(uri):
return uri
def has_timeout(timeout): # python 2.6
if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
return (timeout is not None)
__all__ = [
'Http', 'Response', 'ProxyInfo', 'HttpLib2Error', 'RedirectMissingLocation',
'RedirectLimit', 'FailedToDecompressContent',
'UnimplementedDigestAuthOptionError',
'UnimplementedHmacDigestAuthOptionError',
'debuglevel', 'ProxiesUnavailableError']
# The httplib debug level, set to a non-zero value to get debug output
debuglevel = 0
# A request will be tried 'RETRIES' times if it fails at the socket/connection level.
RETRIES = 2
# Python 2.3 support
if sys.version_info < (2,4):
def sorted(seq):
seq.sort()
return seq
# Python 2.3 support
def HTTPResponse__getheaders(self):
"""Return list of (header, value) tuples."""
if self.msg is None:
raise httplib.ResponseNotReady()
return self.msg.items()
if not hasattr(httplib.HTTPResponse, 'getheaders'):
httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
# All exceptions raised here derive from HttpLib2Error
class HttpLib2Error(Exception): pass
# Some exceptions can be caught and optionally
# be turned back into responses.
class HttpLib2ErrorWithResponse(HttpLib2Error):
def __init__(self, desc, response, content):
self.response = response
self.content = content
HttpLib2Error.__init__(self, desc)
class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
class RedirectLimit(HttpLib2ErrorWithResponse): pass
class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class MalformedHeader(HttpLib2Error): pass
class RelativeURIError(HttpLib2Error): pass
class ServerNotFoundError(HttpLib2Error): pass
class ProxiesUnavailableError(HttpLib2Error): pass
class CertificateValidationUnsupported(HttpLib2Error): pass
class SSLHandshakeError(HttpLib2Error): pass
class NotSupportedOnThisPlatform(HttpLib2Error): pass
class CertificateHostnameMismatch(SSLHandshakeError):
def __init__(self, desc, host, cert):
HttpLib2Error.__init__(self, desc)
self.host = host
self.cert = cert
# Open Items:
# -----------
# Proxy support
# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
# Pluggable cache storage (supports storing the cache in
# flat files by default. We need a plug-in architecture
# that can support Berkeley DB and Squid)
# == Known Issues ==
# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
# Does not handle Cache-Control: max-stale
# Does not use Age: headers when calculating cache freshness.
# The number of redirections to follow before giving up.
# Note that only GET redirects are automatically followed.
# Will also honor 301 requests by saving that info and never
# requesting that URI again.
DEFAULT_MAX_REDIRECTS = 5
try:
# Users can optionally provide a module that tells us where the CA_CERTS
# are located.
import ca_certs_locater
CA_CERTS = ca_certs_locater.get()
except ImportError:
# Default CA certificates file bundled with httplib2.
CA_CERTS = os.path.join(
os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt")
# Which headers are hop-by-hop headers by default
HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
def _get_end2end_headers(response):
hopbyhop = list(HOP_BY_HOP)
hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
return [header for header in response.keys() if header not in hopbyhop]
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
def parse_uri(uri):
"""Parses a URI using the regex given in Appendix B of RFC 3986.
(scheme, authority, path, query, fragment) = parse_uri(uri)
"""
groups = URI.match(uri).groups()
return (groups[1], groups[3], groups[4], groups[6], groups[8])
def urlnorm(uri):
(scheme, authority, path, query, fragment) = parse_uri(uri)
if not scheme or not authority:
raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
authority = authority.lower()
scheme = scheme.lower()
if not path:
path = "/"
# Could do syntax based normalization of the URI before
# computing the digest. See Section 6.2.2 of Std 66.
request_uri = query and "?".join([path, query]) or path
scheme = scheme.lower()
defrag_uri = scheme + "://" + authority + request_uri
return scheme, authority, request_uri, defrag_uri
# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
re_url_scheme = re.compile(r'^\w+://')
re_slash = re.compile(r'[?/:|]+')
def safename(filename):
"""Return a filename suitable for the cache.
Strips dangerous and common characters to create a filename we
can use to store the cache in.
"""
try:
if re_url_scheme.match(filename):
if isinstance(filename,str):
filename = filename.decode('utf-8')
filename = filename.encode('idna')
else:
filename = filename.encode('idna')
except UnicodeError:
pass
if isinstance(filename,unicode):
filename=filename.encode('utf-8')
filemd5 = _md5(filename).hexdigest()
filename = re_url_scheme.sub("", filename)
filename = re_slash.sub(",", filename)
# limit length of filename
if len(filename)>200:
filename=filename[:200]
return ",".join((filename, filemd5))
NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
def _normalize_headers(headers):
return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()])
def _parse_cache_control(headers):
retval = {}
if headers.has_key('cache-control'):
parts = headers['cache-control'].split(',')
parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
retval = dict(parts_with_args + parts_wo_args)
return retval
# Whether to use a strict mode to parse WWW-Authenticate headers
# Might lead to bad results in case of ill-formed header value,
# so disabled by default, falling back to relaxed parsing.
# Set to true to turn on, usefull for testing servers.
USE_WWW_AUTH_STRICT_PARSING = 0
# In regex below:
# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP
# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$")
WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$")
UNQUOTE_PAIRS = re.compile(r'\\(.)')
def _parse_www_authenticate(headers, headername='www-authenticate'):
"""Returns a dictionary of dictionaries, one dict
per auth_scheme."""
retval = {}
if headers.has_key(headername):
try:
authenticate = headers[headername].strip()
www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
while authenticate:
# Break off the scheme at the beginning of the line
if headername == 'authentication-info':
(auth_scheme, the_rest) = ('digest', authenticate)
else:
(auth_scheme, the_rest) = authenticate.split(" ", 1)
# Now loop over all the key value pairs that come after the scheme,
# being careful not to roll into the next scheme
match = www_auth.search(the_rest)
auth_params = {}
while match:
if match and len(match.groups()) == 3:
(key, value, the_rest) = match.groups()
auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
match = www_auth.search(the_rest)
retval[auth_scheme.lower()] = auth_params
authenticate = the_rest.strip()
except ValueError:
raise MalformedHeader("WWW-Authenticate")
return retval
def _entry_disposition(response_headers, request_headers):
"""Determine freshness from the Date, Expires and Cache-Control headers.
We don't handle the following:
1. Cache-Control: max-stale
2. Age: headers are not used in the calculations.
Not that this algorithm is simpler than you might think
because we are operating as a private (non-shared) cache.
This lets us ignore 's-maxage'. We can also ignore
'proxy-invalidate' since we aren't a proxy.
We will never return a stale document as
fresh as a design decision, and thus the non-implementation
of 'max-stale'. This also lets us safely ignore 'must-revalidate'
since we operate as if every server has sent 'must-revalidate'.
Since we are private we get to ignore both 'public' and
'private' parameters. We also ignore 'no-transform' since
we don't do any transformations.
The 'no-store' parameter is handled at a higher level.
So the only Cache-Control parameters we look at are:
no-cache
only-if-cached
max-age
min-fresh
"""
retval = "STALE"
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1:
retval = "TRANSPARENT"
if 'cache-control' not in request_headers:
request_headers['cache-control'] = 'no-cache'
elif cc.has_key('no-cache'):
retval = "TRANSPARENT"
elif cc_response.has_key('no-cache'):
retval = "STALE"
elif cc.has_key('only-if-cached'):
retval = "FRESH"
elif response_headers.has_key('date'):
date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date']))
now = time.time()
current_age = max(0, now - date)
if cc_response.has_key('max-age'):
try:
freshness_lifetime = int(cc_response['max-age'])
except ValueError:
freshness_lifetime = 0
elif response_headers.has_key('expires'):
expires = email.Utils.parsedate_tz(response_headers['expires'])
if None == expires:
freshness_lifetime = 0
else:
freshness_lifetime = max(0, calendar.timegm(expires) - date)
else:
freshness_lifetime = 0
if cc.has_key('max-age'):
try:
freshness_lifetime = int(cc['max-age'])
except ValueError:
freshness_lifetime = 0
if cc.has_key('min-fresh'):
try:
min_fresh = int(cc['min-fresh'])
except ValueError:
min_fresh = 0
current_age += min_fresh
if freshness_lifetime > current_age:
retval = "FRESH"
return retval
def _decompressContent(response, new_content):
content = new_content
try:
encoding = response.get('content-encoding', None)
if encoding in ['gzip', 'deflate']:
if encoding == 'gzip':
content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
if encoding == 'deflate':
content = zlib.decompress(content)
response['content-length'] = str(len(content))
# Record the historical presence of the encoding in a way the won't interfere.
response['-content-encoding'] = response['content-encoding']
del response['content-encoding']
except IOError:
content = ""
raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
return content
def _updateCache(request_headers, response_headers, content, cache, cachekey):
if cachekey:
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
if cc.has_key('no-store') or cc_response.has_key('no-store'):
cache.delete(cachekey)
else:
info = email.Message.Message()
for key, value in response_headers.iteritems():
if key not in ['status','content-encoding','transfer-encoding']:
info[key] = value
# Add annotations to the cache to indicate what headers
# are variant for this request.
vary = response_headers.get('vary', None)
if vary:
vary_headers = vary.lower().replace(' ', '').split(',')
for header in vary_headers:
key = '-varied-%s' % header
try:
info[key] = request_headers[header]
except KeyError:
pass
status = response_headers.status
if status == 304:
status = 200
status_header = 'status: %d\r\n' % status
header_str = info.as_string()
header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
text = "".join([status_header, header_str, content])
cache.set(cachekey, text)
def _cnonce():
dig = _md5("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest()
return dig[:16]
def _wsse_username_token(cnonce, iso_now, password):
return base64.b64encode(_sha("%s%s%s" % (cnonce, iso_now, password)).digest()).strip()
# For credentials we need two things, first
# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
# Then we also need a list of URIs that have already demanded authentication
# That list is tricky since sub-URIs can take the same auth, or the
# auth scheme may change as you descend the tree.
# So we also need each Auth instance to be able to tell us
# how close to the 'top' it is.
class Authentication(object):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
self.path = path
self.host = host
self.credentials = credentials
self.http = http
def depth(self, request_uri):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
return request_uri[len(self.path):].count("/")
def inscope(self, host, request_uri):
# XXX Should we normalize the request_uri?
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
return (host == self.host) and path.startswith(self.path)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header. Over-ride this in sub-classes."""
pass
def response(self, response, content):
"""Gives us a chance to update with new nonces
or such returned from the last authorized response.
Over-rise this in sub-classes if necessary.
Return TRUE is the request is to be retried, for
example Digest may return stale=true.
"""
return False
class BasicAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'Basic ' + base64.b64encode("%s:%s" % self.credentials).strip()
class DigestAuthentication(Authentication):
"""Only do qop='auth' and MD5, since that
is all Apache currently implements"""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
self.challenge = challenge['digest']
qop = self.challenge.get('qop', 'auth')
self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
if self.challenge['qop'] is None:
raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop))
self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
if self.challenge['algorithm'] != 'MD5':
raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
self.challenge['nc'] = 1
def request(self, method, request_uri, headers, content, cnonce = None):
"""Modify the request headers"""
H = lambda x: _md5(x).hexdigest()
KD = lambda s, d: H("%s:%s" % (s, d))
A2 = "".join([method, ":", request_uri])
self.challenge['cnonce'] = cnonce or _cnonce()
request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (
self.challenge['nonce'],
'%08x' % self.challenge['nc'],
self.challenge['cnonce'],
self.challenge['qop'], H(A2)))
headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
self.credentials[0],
self.challenge['realm'],
self.challenge['nonce'],
request_uri,
self.challenge['algorithm'],
request_digest,
self.challenge['qop'],
self.challenge['nc'],
self.challenge['cnonce'])
if self.challenge.get('opaque'):
headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
self.challenge['nc'] += 1
def response(self, response, content):
if not response.has_key('authentication-info'):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
if 'true' == challenge.get('stale'):
self.challenge['nonce'] = challenge['nonce']
self.challenge['nc'] = 1
return True
else:
updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
if updated_challenge.has_key('nextnonce'):
self.challenge['nonce'] = updated_challenge['nextnonce']
self.challenge['nc'] = 1
return False
class HmacDigestAuthentication(Authentication):
"""Adapted from Robert Sayre's code and DigestAuthentication above."""
__author__ = "Thomas Broyer ([email protected])"
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
self.challenge = challenge['hmacdigest']
# TODO: self.challenge['domain']
self.challenge['reason'] = self.challenge.get('reason', 'unauthorized')
if self.challenge['reason'] not in ['unauthorized', 'integrity']:
self.challenge['reason'] = 'unauthorized'
self.challenge['salt'] = self.challenge.get('salt', '')
if not self.challenge.get('snonce'):
raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty."))
self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1')
if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']:
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1')
if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']:
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm']))
if self.challenge['algorithm'] == 'HMAC-MD5':
self.hashmod = _md5
else:
self.hashmod = _sha
if self.challenge['pw-algorithm'] == 'MD5':
self.pwhashmod = _md5
else:
self.pwhashmod = _sha
self.key = "".join([self.credentials[0], ":",
self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
":", self.challenge['realm']])
self.key = self.pwhashmod.new(self.key).hexdigest().lower()
def request(self, method, request_uri, headers, content):
"""Modify the request headers"""
keys = _get_end2end_headers(headers)
keylist = "".join(["%s " % k for k in keys])
headers_val = "".join([headers[k] for k in keys])
created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())
cnonce = _cnonce()
request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
self.credentials[0],
self.challenge['realm'],
self.challenge['snonce'],
cnonce,
request_uri,
created,
request_digest,
keylist)
def response(self, response, content):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
if challenge.get('reason') in ['integrity', 'stale']:
return True
return False
class WsseAuthentication(Authentication):
"""This is thinly tested and should not be relied upon.
At this time there isn't any third party server to test against.
Blogger and TypePad implemented this algorithm at one point
but Blogger has since switched to Basic over HTTPS and
TypePad has implemented it wrong, by never issuing a 401
challenge but instead requiring your client to telepathically know that
their endpoint is expecting WSSE profile="UsernameToken"."""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'WSSE profile="UsernameToken"'
iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
cnonce = _cnonce()
password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
self.credentials[0],
password_digest,
cnonce,
iso_now)
class GoogleLoginAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
from urllib import urlencode
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
service = challenge['googlelogin'].get('service', 'xapi')
# Bloggger actually returns the service in the challenge
# For the rest we guess based on the URI
if service == 'xapi' and request_uri.find("calendar") > 0:
service = "cl"
# No point in guessing Base or Spreadsheet
#elif request_uri.find("spreadsheets") > 0:
# service = "wise"
auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent'])
resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'})
lines = content.split('\n')
d = dict([tuple(line.split("=", 1)) for line in lines if line])
if resp.status == 403:
self.Auth = ""
else:
self.Auth = d['Auth']
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
AUTH_SCHEME_CLASSES = {
"basic": BasicAuthentication,
"wsse": WsseAuthentication,
"digest": DigestAuthentication,
"hmacdigest": HmacDigestAuthentication,
"googlelogin": GoogleLoginAuthentication
}
AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
class FileCache(object):
"""Uses a local directory as a store for cached files.
Not really safe to use if multiple threads or processes are going to
be running on the same cache.
"""
def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
self.cache = cache
self.safe = safe
if not os.path.exists(cache):
os.makedirs(self.cache)
def get(self, key):
retval = None
cacheFullPath = os.path.join(self.cache, self.safe(key))
try:
f = file(cacheFullPath, "rb")
retval = f.read()
f.close()
except IOError:
pass
return retval
def set(self, key, value):
cacheFullPath = os.path.join(self.cache, self.safe(key))
f = file(cacheFullPath, "wb")
f.write(value)
f.close()
def delete(self, key):
cacheFullPath = os.path.join(self.cache, self.safe(key))
if os.path.exists(cacheFullPath):
os.remove(cacheFullPath)
class Credentials(object):
def __init__(self):
self.credentials = []
def add(self, name, password, domain=""):
self.credentials.append((domain.lower(), name, password))
def clear(self):
self.credentials = []
def iter(self, domain):
for (cdomain, name, password) in self.credentials:
if cdomain == "" or domain == cdomain:
yield (name, password)
class KeyCerts(Credentials):
"""Identical to Credentials except that
name/password are mapped to key/cert."""
pass
class AllHosts(object):
pass
class ProxyInfo(object):
"""Collect information required to use a proxy."""
bypass_hosts = ()
def __init__(self, proxy_type, proxy_host, proxy_port,
proxy_rdns=True, proxy_user=None, proxy_pass=None):
"""
Args:
proxy_type: The type of proxy server. This must be set to one of
socks.PROXY_TYPE_XXX constants. For example:
p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP,
proxy_host='localhost', proxy_port=8000)
proxy_host: The hostname or IP address of the proxy server.
proxy_port: The port that the proxy server is running on.
proxy_rdns: If True (default), DNS queries will not be performed
locally, and instead, handed to the proxy to resolve. This is useful
if the network does not allow resolution of non-local names. In
httplib2 0.9 and earlier, this defaulted to False.
proxy_user: The username used to authenticate with the proxy server.
proxy_pass: The password used to authenticate with the proxy server.
"""
self.proxy_type = proxy_type
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_rdns = proxy_rdns
self.proxy_user = proxy_user
self.proxy_pass = proxy_pass
def astuple(self):
return (self.proxy_type, self.proxy_host, self.proxy_port,
self.proxy_rdns, self.proxy_user, self.proxy_pass)
def isgood(self):
return (self.proxy_host != None) and (self.proxy_port != None)
def applies_to(self, hostname):
return not self.bypass_host(hostname)
def bypass_host(self, hostname):
"""Has this host been excluded from the proxy config"""
if self.bypass_hosts is AllHosts:
return True
bypass = False
for domain in self.bypass_hosts:
if hostname.endswith(domain):
bypass = True
return bypass
def proxy_info_from_environment(method='http'):
"""
Read proxy info from the environment variables.
"""
if method not in ['http', 'https']:
return
env_var = method + '_proxy'
url = os.environ.get(env_var, os.environ.get(env_var.upper()))
if not url:
return
pi = proxy_info_from_url(url, method)
no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', ''))
bypass_hosts = []
if no_proxy:
bypass_hosts = no_proxy.split(',')
# special case, no_proxy=* means all hosts bypassed
if no_proxy == '*':
bypass_hosts = AllHosts
pi.bypass_hosts = bypass_hosts
return pi
def proxy_info_from_url(url, method='http'):
"""
Construct a ProxyInfo from a URL (such as http_proxy env var)
"""
url = urlparse.urlparse(url)
username = None
password = None
port = None
if '@' in url[1]:
ident, host_port = url[1].split('@', 1)
if ':' in ident:
username, password = ident.split(':', 1)
else:
password = ident
else:
host_port = url[1]
if ':' in host_port:
host, port = host_port.split(':', 1)
else:
host = host_port
if port:
port = int(port)
else:
port = dict(https=443, http=80)[method]
proxy_type = 3 # socks.PROXY_TYPE_HTTP
return ProxyInfo(
proxy_type = proxy_type,
proxy_host = host,
proxy_port = port,
proxy_user = username or None,
proxy_pass = password or None,
)
class HTTPConnectionWithTimeout(httplib.HTTPConnection):
"""
HTTPConnection subclass that supports timeouts
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
httplib.HTTPConnection.__init__(self, host, port, strict)
self.timeout = timeout
self.proxy_info = proxy_info
def connect(self):
"""Connect to the host and port specified in __init__."""
# Mostly verbatim from httplib.py.
if self.proxy_info and socks is None:
raise ProxiesUnavailableError(
'Proxy support missing but proxy use was requested!')
msg = "getaddrinfo returns an empty list"
if self.proxy_info and self.proxy_info.isgood():
use_proxy = True
proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
host = proxy_host
port = proxy_port
else:
use_proxy = False
host = self.host
port = self.port
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
try:
if use_proxy:
self.sock = socks.socksocket(af, socktype, proto)
self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
else:
self.sock = socket.socket(af, socktype, proto)
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# Different from httplib: support timeouts.
if has_timeout(self.timeout):
self.sock.settimeout(self.timeout)
# End of difference from httplib.
if self.debuglevel > 0:
print "connect: (%s, %s) ************" % (self.host, self.port)
if use_proxy:
print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
self.sock.connect((self.host, self.port) + sa[2:])
except socket.error, msg:
if self.debuglevel > 0:
print "connect fail: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
"""
This class allows communication via SSL.
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None,
ca_certs=None, disable_ssl_certificate_validation=False):
httplib.HTTPSConnection.__init__(self, host, port=port,
key_file=key_file,
cert_file=cert_file, strict=strict)
self.timeout = timeout
self.proxy_info = proxy_info
if ca_certs is None:
ca_certs = CA_CERTS
self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
# The following two methods were adapted from https_wrapper.py, released
# with the Google Appengine SDK at
# http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py
# under the following license:
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def _GetValidHostsForCert(self, cert):
"""Returns a list of valid host globs for an SSL certificate.
Args:
cert: A dictionary representing an SSL certificate.
Returns:
list: A list of valid host globs.
"""
if 'subjectAltName' in cert:
return [x[1] for x in cert['subjectAltName']
if x[0].lower() == 'dns']
else:
return [x[0][1] for x in cert['subject']
if x[0][0].lower() == 'commonname']
def _ValidateCertificateHostname(self, cert, hostname):
"""Validates that a given hostname is valid for an SSL certificate.
Args:
cert: A dictionary representing an SSL certificate.
hostname: The hostname to test.
Returns:
bool: Whether or not the hostname is valid for this certificate.
"""
hosts = self._GetValidHostsForCert(cert)
for host in hosts:
host_re = host.replace('.', '\.').replace('*', '[^.]*')
if re.search('^%s$' % (host_re,), hostname, re.I):
return True
return False
def connect(self):
"Connect to a host on a given (SSL) port."
msg = "getaddrinfo returns an empty list"
if self.proxy_info and self.proxy_info.isgood():
use_proxy = True
proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
host = proxy_host
port = proxy_port
else:
use_proxy = False
host = self.host
port = self.port
address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
for family, socktype, proto, canonname, sockaddr in address_info:
try:
if use_proxy:
sock = socks.socksocket(family, socktype, proto)
sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
else:
sock = socket.socket(family, socktype, proto)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if has_timeout(self.timeout):
sock.settimeout(self.timeout)
sock.connect((self.host, self.port))
self.sock =_ssl_wrap_socket(
sock, self.key_file, self.cert_file,
self.disable_ssl_certificate_validation, self.ca_certs)
if self.debuglevel > 0:
print "connect: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if not self.disable_ssl_certificate_validation:
cert = self.sock.getpeercert()
hostname = self.host.split(':', 0)[0]
if not self._ValidateCertificateHostname(cert, hostname):
raise CertificateHostnameMismatch(
'Server presented certificate that does not match '
'host %s: %s' % (hostname, cert), hostname, cert)
except ssl_SSLError, e:
if sock:
sock.close()
if self.sock:
self.sock.close()
self.sock = None
# Unfortunately the ssl module doesn't seem to provide any way
# to get at more detailed error information, in particular
# whether the error is due to certificate validation or
# something else (such as SSL protocol mismatch).
if e.errno == ssl.SSL_ERROR_SSL:
raise SSLHandshakeError(e)
else:
raise
except (socket.timeout, socket.gaierror):
raise
except socket.error, msg:
if self.debuglevel > 0:
print "connect fail: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
SCHEME_TO_CONNECTION = {
'http': HTTPConnectionWithTimeout,
'https': HTTPSConnectionWithTimeout
}
# Use a different connection object for Google App Engine
try:
from google.appengine.api import apiproxy_stub_map
if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
raise ImportError # Bail out; we're not actually running on App Engine.
from google.appengine.api.urlfetch import fetch
from google.appengine.api.urlfetch import InvalidURLError
def _new_fixed_fetch(validate_certificate):
def fixed_fetch(url, payload=None, method="GET", headers={},
allow_truncated=False, follow_redirects=True,
deadline=None):
if deadline is None:
deadline = socket.getdefaulttimeout() or 5
return fetch(url, payload=payload, method=method, headers=headers,
allow_truncated=allow_truncated,
follow_redirects=follow_redirects, deadline=deadline,
validate_certificate=validate_certificate)
return fixed_fetch
class AppEngineHttpConnection(httplib.HTTPConnection):
"""Use httplib on App Engine, but compensate for its weirdness.
The parameters key_file, cert_file, proxy_info, ca_certs, and
disable_ssl_certificate_validation are all dropped on the ground.
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_ssl_certificate_validation=False):
httplib.HTTPConnection.__init__(self, host, port=port,
strict=strict, timeout=timeout)
class AppEngineHttpsConnection(httplib.HTTPSConnection):
"""Same as AppEngineHttpConnection, but for HTTPS URIs."""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_ssl_certificate_validation=False):
httplib.HTTPSConnection.__init__(self, host, port=port,
key_file=key_file,
cert_file=cert_file, strict=strict,
timeout=timeout)
self._fetch = _new_fixed_fetch(
not disable_ssl_certificate_validation)
# Update the connection classes to use the Googel App Engine specific ones.
SCHEME_TO_CONNECTION = {
'http': AppEngineHttpConnection,
'https': AppEngineHttpsConnection
}
except (ImportError, AttributeError):
pass
class Http(object):
"""An HTTP client that handles:
- all methods
- caching
- ETags
- compression,
- HTTPS
- Basic
- Digest
- WSSE
and more.
"""
def __init__(self, cache=None, timeout=None,
proxy_info=proxy_info_from_environment,
ca_certs=None, disable_ssl_certificate_validation=False):
"""If 'cache' is a string then it is used as a directory name for
a disk cache. Otherwise it must be an object that supports the
same interface as FileCache.
All timeouts are in seconds. If None is passed for timeout
then Python's default timeout for sockets will be used. See
for example the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
`proxy_info` may be:
- a callable that takes the http scheme ('http' or 'https') and
returns a ProxyInfo instance per request. By default, uses
proxy_nfo_from_environment.
- a ProxyInfo instance (static proxy config).
- None (proxy disabled).
ca_certs is the path of a file containing root CA certificates for SSL
server certificate validation. By default, a CA cert file bundled with
httplib2 is used.
If disable_ssl_certificate_validation is true, SSL cert validation will
not be performed.
"""
self.proxy_info = proxy_info
self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
# Map domain name to an httplib connection
self.connections = {}
# The location of the cache, for now a directory
# where cached responses are held.
if cache and isinstance(cache, basestring):
self.cache = FileCache(cache)
else:
self.cache = cache
# Name/password
self.credentials = Credentials()
# Key/cert
self.certificates = KeyCerts()
# authorization objects
self.authorizations = []
# If set to False then no redirects are followed, even safe ones.
self.follow_redirects = True
# Which HTTP methods do we apply optimistic concurrency to, i.e.
# which methods get an "if-match:" etag header added to them.
self.optimistic_concurrency_methods = ["PUT", "PATCH"]
# If 'follow_redirects' is True, and this is set to True then
# all redirecs are followed, including unsafe ones.
self.follow_all_redirects = False
self.ignore_etag = False
self.force_exception_to_status_code = False
self.timeout = timeout
# Keep Authorization: headers on a redirect.
self.forward_authorization_headers = False
def __getstate__(self):
state_dict = copy.copy(self.__dict__)
# In case request is augmented by some foreign object such as
# credentials which handle auth
if 'request' in state_dict:
del state_dict['request']
if 'connections' in state_dict:
del state_dict['connections']
return state_dict
def __setstate__(self, state):
self.__dict__.update(state)
self.connections = {}
def _auth_from_challenge(self, host, request_uri, headers, response, content):
"""A generator that creates Authorization objects
that can be applied to requests.
"""
challenges = _parse_www_authenticate(response, 'www-authenticate')
for cred in self.credentials.iter(host):
for scheme in AUTH_SCHEME_ORDER:
if challenges.has_key(scheme):
yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
def add_credentials(self, name, password, domain=""):
"""Add a name and password that will be used
any time a request requires authentication."""
self.credentials.add(name, password, domain)
def add_certificate(self, key, cert, domain):
"""Add a key and cert that will be used
any time a request requires authentication."""
self.certificates.add(key, cert, domain)
def clear_credentials(self):
"""Remove all the names and passwords
that are used for authentication"""
self.credentials.clear()
self.authorizations = []
def _conn_request(self, conn, request_uri, method, body, headers):
i = 0
seen_bad_status_line = False
while i < RETRIES:
i += 1
try:
if hasattr(conn, 'sock') and conn.sock is None:
conn.connect()
conn.request(method, request_uri, body, headers)
except socket.timeout:
raise
except socket.gaierror:
conn.close()
raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
except ssl_SSLError:
conn.close()
raise
except socket.error, e:
err = 0
if hasattr(e, 'args'):
err = getattr(e, 'args')[0]
else:
err = e.errno
if err == errno.ECONNREFUSED: # Connection refused
raise
if err in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES:
continue # retry on potentially transient socket errors
except httplib.HTTPException:
# Just because the server closed the connection doesn't apparently mean
# that the server didn't send a response.
if hasattr(conn, 'sock') and conn.sock is None:
if i < RETRIES-1:
conn.close()
conn.connect()
continue
else:
conn.close()
raise
if i < RETRIES-1:
conn.close()
conn.connect()
continue
try:
response = conn.getresponse()
except httplib.BadStatusLine:
# If we get a BadStatusLine on the first try then that means
# the connection just went stale, so retry regardless of the
# number of RETRIES set.
if not seen_bad_status_line and i == 1:
i = 0
seen_bad_status_line = True
conn.close()
conn.connect()
continue
else:
conn.close()
raise
except (socket.error, httplib.HTTPException):
if i < RETRIES-1:
conn.close()
conn.connect()
continue
else:
conn.close()
raise
else:
content = ""
if method == "HEAD":
conn.close()
else:
content = response.read()
response = Response(response)
if method != "HEAD":
content = _decompressContent(response, content)
break
return (response, content)
def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey):
"""Do the actual request using the connection object
and also follow one level of redirects if necessary"""
auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
auth = auths and sorted(auths)[0][1] or None
if auth:
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
if auth:
if auth.response(response, body):
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers )
response._stale_digest = 1
if response.status == 401:
for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
authorization.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers, )
if response.status != 401:
self.authorizations.append(authorization)
authorization.response(response, body)
break
if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303):
if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
# Pick out the location header and basically start from the beginning
# remembering first to strip the ETag header and decrement our 'depth'
if redirections:
if not response.has_key('location') and response.status != 300:
raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
# Fix-up relative redirects (which violate an RFC 2616 MUST)
if response.has_key('location'):
location = response['location']
(scheme, authority, path, query, fragment) = parse_uri(location)
if authority == None:
response['location'] = urlparse.urljoin(absolute_uri, location)
if response.status == 301 and method in ["GET", "HEAD"]:
response['-x-permanent-redirect-url'] = response['location']
if not response.has_key('content-location'):
response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
if headers.has_key('if-none-match'):
del headers['if-none-match']
if headers.has_key('if-modified-since'):
del headers['if-modified-since']
if 'authorization' in headers and not self.forward_authorization_headers:
del headers['authorization']
if response.has_key('location'):
location = response['location']
old_response = copy.deepcopy(response)
if not old_response.has_key('content-location'):
old_response['content-location'] = absolute_uri
redirect_method = method
if response.status in [302, 303]:
redirect_method = "GET"
body = None
(response, content) = self.request(
location, method=redirect_method,
body=body, headers=headers,
redirections=redirections - 1)
response.previous = old_response
else:
raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
elif response.status in [200, 203] and method in ["GET", "HEAD"]:
# Don't cache 206's since we aren't going to handle byte range requests
if not response.has_key('content-location'):
response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
return (response, content)
def _normalize_headers(self, headers):
return _normalize_headers(headers)
# Need to catch and rebrand some exceptions
# Then need to optionally turn all exceptions into status codes
# including all socket.* and httplib.* exceptions.
def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
""" Performs a single HTTP request.
The 'uri' is the URI of the HTTP resource and can begin with either
'http' or 'https'. The value of 'uri' must be an absolute URI.
The 'method' is the HTTP method to perform, such as GET, POST, DELETE,
etc. There is no restriction on the methods allowed.
The 'body' is the entity body to be sent with the request. It is a
string object.
Any extra headers that are to be sent with the request should be
provided in the 'headers' dictionary.
The maximum number of redirect to follow before raising an
exception is 'redirections. The default is 5.
The return value is a tuple of (response, content), the first
being and instance of the 'Response' class, the second being
a string that contains the response entity body.
"""
try:
if headers is None:
headers = {}
else:
headers = self._normalize_headers(headers)
if not headers.has_key('user-agent'):
headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
uri = iri2uri(uri)
(scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
domain_port = authority.split(":")[0:2]
if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http':
scheme = 'https'
authority = domain_port[0]
proxy_info = self._get_proxy_info(scheme, authority)
conn_key = scheme+":"+authority
if conn_key in self.connections:
conn = self.connections[conn_key]
else:
if not connection_type:
connection_type = SCHEME_TO_CONNECTION[scheme]
certs = list(self.certificates.iter(authority))
if scheme == 'https':
if certs:
conn = self.connections[conn_key] = connection_type(
authority, key_file=certs[0][0],
cert_file=certs[0][1], timeout=self.timeout,
proxy_info=proxy_info,
ca_certs=self.ca_certs,
disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation)
else:
conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout,
proxy_info=proxy_info,
ca_certs=self.ca_certs,
disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation)
else:
conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout,
proxy_info=proxy_info)
conn.set_debuglevel(debuglevel)
if 'range' not in headers and 'accept-encoding' not in headers:
headers['accept-encoding'] = 'gzip, deflate'
info = email.Message.Message()
cached_value = None
if self.cache:
cachekey = defrag_uri.encode('utf-8')
cached_value = self.cache.get(cachekey)
if cached_value:
# info = email.message_from_string(cached_value)
#
# Need to replace the line above with the kludge below
# to fix the non-existent bug not fixed in this
# bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html
try:
info, content = cached_value.split('\r\n\r\n', 1)
feedparser = email.FeedParser.FeedParser()
feedparser.feed(info)
info = feedparser.close()
feedparser._parse = None
except (IndexError, ValueError):
self.cache.delete(cachekey)
cachekey = None
cached_value = None
else:
cachekey = None
if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers:
# http://www.w3.org/1999/04/Editing/
headers['if-match'] = info['etag']
if method not in ["GET", "HEAD"] and self.cache and cachekey:
# RFC 2616 Section 13.10
self.cache.delete(cachekey)
# Check the vary header in the cache to see if this request
# matches what varies in the cache.
if method in ['GET', 'HEAD'] and 'vary' in info:
vary = info['vary']
vary_headers = vary.lower().replace(' ', '').split(',')
for header in vary_headers:
key = '-varied-%s' % header
value = info[key]
if headers.get(header, None) != value:
cached_value = None
break
if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
if info.has_key('-x-permanent-redirect-url'):
# Should cached permanent redirects be counted in our redirection count? For now, yes.
if redirections <= 0:
raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
(response, new_content) = self.request(
info['-x-permanent-redirect-url'], method='GET',
headers=headers, redirections=redirections - 1)
response.previous = Response(info)
response.previous.fromcache = True
else:
# Determine our course of action:
# Is the cached entry fresh or stale?
# Has the client requested a non-cached response?
#
# There seems to be three possible answers:
# 1. [FRESH] Return the cache entry w/o doing a GET
# 2. [STALE] Do the GET (but add in cache validators if available)
# 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
entry_disposition = _entry_disposition(info, headers)
if entry_disposition == "FRESH":
if not cached_value:
info['status'] = '504'
content = ""
response = Response(info)
if cached_value:
response.fromcache = True
return (response, content)
if entry_disposition == "STALE":
if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers:
headers['if-none-match'] = info['etag']
if info.has_key('last-modified') and not 'last-modified' in headers:
headers['if-modified-since'] = info['last-modified']
elif entry_disposition == "TRANSPARENT":
pass
(response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
if response.status == 304 and method == "GET":
# Rewrite the cache entry with the new end-to-end headers
# Take all headers that are in response
# and overwrite their values in info.
# unless they are hop-by-hop, or are listed in the connection header.
for key in _get_end2end_headers(response):
info[key] = response[key]
merged_response = Response(info)
if hasattr(response, "_stale_digest"):
merged_response._stale_digest = response._stale_digest
_updateCache(headers, merged_response, content, self.cache, cachekey)
response = merged_response
response.status = 200
response.fromcache = True
elif response.status == 200:
content = new_content
else:
self.cache.delete(cachekey)
content = new_content
else:
cc = _parse_cache_control(headers)
if cc.has_key('only-if-cached'):
info['status'] = '504'
response = Response(info)
content = ""
else:
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
except Exception, e:
if self.force_exception_to_status_code:
if isinstance(e, HttpLib2ErrorWithResponse):
response = e.response
content = e.content
response.status = 500
response.reason = str(e)
elif isinstance(e, socket.timeout):
content = "Request Timeout"
response = Response({
"content-type": "text/plain",
"status": "408",
"content-length": len(content)
})
response.reason = "Request Timeout"
else:
content = str(e)
response = Response({
"content-type": "text/plain",
"status": "400",
"content-length": len(content)
})
response.reason = "Bad Request"
else:
raise
return (response, content)
def _get_proxy_info(self, scheme, authority):
"""Return a ProxyInfo instance (or None) based on the scheme
and authority.
"""
hostname, port = urllib.splitport(authority)
proxy_info = self.proxy_info
if callable(proxy_info):
proxy_info = proxy_info(scheme)
if (hasattr(proxy_info, 'applies_to')
and not proxy_info.applies_to(hostname)):
proxy_info = None
return proxy_info
class Response(dict):
"""An object more like email.Message than httplib.HTTPResponse."""
"""Is this response from our local cache"""
fromcache = False
"""HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
version = 11
"Status code returned by server. "
status = 200
"""Reason phrase returned by server."""
reason = "Ok"
previous = None
def __init__(self, info):
# info is either an email.Message or
# an httplib.HTTPResponse object.
if isinstance(info, httplib.HTTPResponse):
for key, value in info.getheaders():
self[key.lower()] = value
self.status = info.status
self['status'] = str(self.status)
self.reason = info.reason
self.version = info.version
elif isinstance(info, email.Message.Message):
for key, value in info.items():
self[key.lower()] = value
self.status = int(self['status'])
else:
for key, value in info.iteritems():
self[key.lower()] = value
self.status = int(self.get('status', self.status))
self.reason = self.get('reason', self.reason)
def __getattr__(self, name):
if name == 'dict':
return self
else:
raise AttributeError, name
| apache-2.0 |
dreardon/crits | crits/certificates/views.py | 16 | 5343 | import json
from django.contrib.auth.decorators import user_passes_test
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from crits.core import form_consts
from crits.core.user_tools import user_can_view_data
from crits.core.user_tools import user_is_admin
from crits.certificates.forms import UploadCertificateForm
from crits.certificates.handlers import handle_cert_file
from crits.certificates.handlers import delete_cert, get_certificate_details
from crits.certificates.handlers import generate_cert_jtable, generate_cert_csv
@user_passes_test(user_can_view_data)
def certificates_listing(request,option=None):
"""
Generate Certificate Listing template.
:param request: Django request object (Required)
:type request: :class:`django.http.HttpRequest`
:param option: Whether or not we should generate a CSV (yes if option is "csv")
:type option: str
:returns: :class:`django.http.HttpResponse`
"""
if option == "csv":
return generate_cert_csv(request)
return generate_cert_jtable(request, option)
@user_passes_test(user_can_view_data)
def certificate_details(request, md5):
"""
Generate Certificate Details template.
:param request: Django request object (Required)
:type request: :class:`django.http.HttpRequest`
:param md5: The MD5 of the Certificate.
:type md5: str
:returns: :class:`django.http.HttpResponse`
"""
template = 'certificate_details.html'
analyst = request.user.username
(new_template, args) = get_certificate_details(md5, analyst)
if new_template:
template = new_template
return render_to_response(template,
args,
RequestContext(request))
@user_passes_test(user_can_view_data)
def upload_certificate(request):
"""
Add a new Certificate to CRITs.
:param request: Django request object (Required)
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST':
form = UploadCertificateForm(request.user, request.POST, request.FILES)
if form.is_valid():
filedata = request.FILES['filedata']
filename = filedata.name
data = filedata.read() # XXX: Should be using chunks here.
source = form.cleaned_data.get('source')
user = request.user.username
description = form.cleaned_data.get('description', '')
related = form.cleaned_data.get('related_id', '')
related_type = form.cleaned_data.get('related_type', '')
bucket_list = form.cleaned_data.get(form_consts.Common.BUCKET_LIST_VARIABLE_NAME)
ticket = form.cleaned_data.get(form_consts.Common.TICKET_VARIABLE_NAME)
method = form.cleaned_data.get('method', '') or 'Upload'
reference = form.cleaned_data.get('reference', '')
status = handle_cert_file(filename, data, source, user, description,
related_id=related, related_type=related_type,
method=method, reference=reference,
bucket_list=bucket_list, ticket=ticket)
if status['success']:
return render_to_response('file_upload_response.html',
{'response': json.dumps({
'message': 'Certificate uploaded successfully! <a href="%s">View Certificate</a>'
% reverse('crits.certificates.views.certificate_details',
args=[status['md5']]), 'success': True})},
RequestContext(request))
else:
return render_to_response('file_upload_response.html',
{'response': json.dumps({ 'success': False,
'message': status['message']})}
, RequestContext(request))
else:
return render_to_response('file_upload_response.html',
{'response': json.dumps({'success': False,
'form': form.as_table()})},
RequestContext(request))
else:
return render_to_response('error.html',
{'error': "Expected POST."},
RequestContext(request))
@user_passes_test(user_is_admin)
def remove_certificate(request, md5):
"""
Remove a Certificate from CRITs.
:param request: Django request object (Required)
:type request: :class:`django.http.HttpRequest`
:param md5: The MD5 of the Certificate.
:type md5: str
:returns: :class:`django.http.HttpResponse`
"""
result = delete_cert(md5, '%s' % request.user.username)
if result:
return HttpResponseRedirect(reverse('crits.certificates.views.certificates_listing'))
else:
return render_to_response('error.html',
{'error': "Could not delete certificate"})
| mit |
nuagenetworks/tempest | tempest/tests/common/test_admin_available.py | 3 | 4749 | # Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslotest import mockpatch
from tempest.common import credentials_factory as credentials
from tempest import config
from tempest.tests import base
from tempest.tests import fake_config
class TestAdminAvailable(base.TestCase):
identity_version = 'v2'
def setUp(self):
super(TestAdminAvailable, self).setUp()
self.useFixture(fake_config.ConfigFixture())
self.stubs.Set(config, 'TempestConfigPrivate', fake_config.FakePrivate)
def run_test(self, dynamic_creds, use_accounts_file, admin_creds):
cfg.CONF.set_default('use_dynamic_credentials',
dynamic_creds, group='auth')
if use_accounts_file:
accounts = [{'username': 'u1',
'tenant_name': 't1',
'password': 'p'},
{'username': 'u2',
'tenant_name': 't2',
'password': 'p'}]
if admin_creds == 'role':
accounts.append({'username': 'admin',
'tenant_name': 'admin',
'password': 'p',
'roles': ['admin']})
elif admin_creds == 'type':
accounts.append({'username': 'admin',
'tenant_name': 'admin',
'password': 'p',
'types': ['admin']})
self.useFixture(mockpatch.Patch(
'tempest.common.preprov_creds.read_accounts_yaml',
return_value=accounts))
cfg.CONF.set_default('test_accounts_file',
use_accounts_file, group='auth')
self.useFixture(mockpatch.Patch('os.path.isfile',
return_value=True))
else:
self.useFixture(mockpatch.Patch('os.path.isfile',
return_value=False))
if admin_creds:
username = 'u'
tenant = 't'
password = 'p'
domain = 'd'
else:
username = None
tenant = None
password = None
domain = None
cfg.CONF.set_default('admin_username', username, group='auth')
cfg.CONF.set_default('admin_tenant_name', tenant, group='auth')
cfg.CONF.set_default('admin_password', password, group='auth')
cfg.CONF.set_default('admin_domain_name', domain, group='auth')
expected = admin_creds is not None or dynamic_creds
observed = credentials.is_admin_available(
identity_version=self.identity_version)
self.assertEqual(expected, observed)
# Dynamic credentials implies admin so only one test case for True
def test__dynamic_creds__accounts_file__no_admin(self):
self.run_test(dynamic_creds=True,
use_accounts_file=True,
admin_creds=None)
def test__no_dynamic_creds__accounts_file__no_admin(self):
self.run_test(dynamic_creds=False,
use_accounts_file=True,
admin_creds=None)
def test__no_dynamic_creds__accounts_file__admin_role(self):
self.run_test(dynamic_creds=False,
use_accounts_file=True,
admin_creds='role')
def test__no_dynamic_creds__accounts_file__admin_type(self):
self.run_test(dynamic_creds=False,
use_accounts_file=True,
admin_creds='type')
def test__no_dynamic_creds__no_accounts_file__no_admin(self):
self.run_test(dynamic_creds=False,
use_accounts_file=False,
admin_creds=None)
def test__no_dynamic_creds__no_accounts_file__admin(self):
self.run_test(dynamic_creds=False,
use_accounts_file=False,
admin_creds='role')
class TestAdminAvailableV3(TestAdminAvailable):
identity_version = 'v3'
| apache-2.0 |
bjarniegill/Cordova-Survey | csv_parser/.env/lib/python2.7/site-packages/setuptools/command/upload.py | 248 | 1172 | import getpass
from distutils.command import upload as orig
class upload(orig.upload):
"""
Override default upload behavior to obtain password
in a variety of different ways.
"""
def finalize_options(self):
orig.upload.finalize_options(self)
self.username = (
self.username or
getpass.getuser()
)
# Attempt to obtain password. Short circuit evaluation at the first
# sign of success.
self.password = (
self.password or
self._load_password_from_keyring() or
self._prompt_for_password()
)
def _load_password_from_keyring(self):
"""
Attempt to load password from keyring. Suppress Exceptions.
"""
try:
keyring = __import__('keyring')
return keyring.get_password(self.repository, self.username)
except Exception:
pass
def _prompt_for_password(self):
"""
Prompt for a password on the tty. Suppress Exceptions.
"""
try:
return getpass.getpass()
except (Exception, KeyboardInterrupt):
pass
| mit |
coderbone/SickRage | sickbeard/trakt.py | 3 | 5762 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
# Author: Nic Wolfe <[email protected]>
# URL: https://sickrage.tv/
# Git: https://github.com/SiCKRAGETV/SickRage.git
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import logging
import requests
import certifi
import json
import time
import sickbeard
class TraktAPI():
def __init__(self, ssl_verify=True, timeout=30):
self.session = requests.Session()
self.verify = certifi.where() if ssl_verify else False
self.timeout = timeout if timeout else None
self.auth_url = sickbeard.TRAKT_OAUTH_URL
self.api_url = sickbeard.TRAKT_API_URL
self.headers = {
'Content-Type': 'application/json',
'trakt-api-version': '2',
'trakt-api-key': sickbeard.TRAKT_API_KEY
}
def traktToken(self, trakt_pin=None, refresh=False, count=0):
if count > 3:
sickbeard.TRAKT_ACCESS_TOKEN = ''
return False
elif count > 0:
time.sleep(2)
data = {
'client_id': sickbeard.TRAKT_API_KEY,
'client_secret': sickbeard.TRAKT_API_SECRET,
'redirect_uri': 'urn:ietf:wg:oauth:2.0:oob'
}
if refresh:
data['grant_type'] = 'refresh_token'
data['refresh_token'] = sickbeard.TRAKT_REFRESH_TOKEN
else:
data['grant_type'] = 'authorization_code'
if not None == trakt_pin:
data['code'] = trakt_pin
headers = {
'Content-Type': 'application/json'
}
resp = self.traktRequest('oauth/token', data=data, headers=headers, url=self.auth_url , method='POST', count=count)
if 'access_token' in resp:
sickbeard.TRAKT_ACCESS_TOKEN = resp['access_token']
if 'refresh_token' in resp:
sickbeard.TRAKT_REFRESH_TOKEN = resp['refresh_token']
return True
return False
def validateAccount(self):
resp = self.traktRequest('users/settings')
if 'account' in resp:
return True
return False
def traktRequest(self, path, data=None, headers=None, url=None, method='GET', count=0):
if None == url:
url = self.api_url
count = count + 1
if None == headers:
headers = self.headers
if None == sickbeard.TRAKT_ACCESS_TOKEN:
logging.warning('You must get a Trakt TOKEN. Check your Trakt settings')
return {}
headers['Authorization'] = 'Bearer ' + sickbeard.TRAKT_ACCESS_TOKEN
try:
resp = self.session.request(method, url + path, headers=headers, timeout=self.timeout,
data=json.dumps(data) if data else [], verify=self.verify)
# check for http errors and raise if any are present
resp.raise_for_status()
# convert response to json
resp = resp.json()
except requests.RequestException as e:
code = getattr(e.response, 'status_code', None)
if not code:
if 'timed out' in e:
logging.warning('Timeout connecting to Trakt. Try to increase timeout value in Trakt settings')
# This is pretty much a fatal error if there is no status_code
# It means there basically was no response at all
else:
logging.debug('Could not connect to Trakt. Error: {0}'.format(e))
elif code == 502:
# Retry the request, cloudflare had a proxying issue
logging.debug('Retrying trakt api request: %s' % path)
return self.traktRequest(path, data, headers, url, method)
elif code == 401:
if self.traktToken(refresh=True, count=count):
return self.traktRequest(path, data, headers, url, method)
else:
logging.warning('Unauthorized. Please check your Trakt settings')
elif code in (500,501,503,504,520,521,522):
#http://docs.trakt.apiary.io/#introduction/status-codes
logging.debug('Trakt may have some issues and it\'s unavailable. Try again later please')
elif code == 404:
logging.debug('Trakt error (404) the resource does not exist: %s' % url + path)
else:
logging.error('Could not connect to Trakt. Code error: {0}'.format(code))
return {}
# check and confirm trakt call did not fail
if isinstance(resp, dict) and resp.get('status', False) == 'failure':
if 'message' in resp:
raise traktException(resp['message'])
if 'error' in resp:
raise traktException(resp['error'])
else:
raise traktException('Unknown Error')
return resp
class traktException(Exception):
pass
class traktAuthException(traktException):
pass
class traktServerBusy(traktException):
pass | gpl-3.0 |
Jgarcia-IAS/localizacion | openerp/addons-extra/odoo-pruebas/odoo-server/addons/stock_account/stock.py | 13 | 13118 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class stock_location_path(osv.osv):
_inherit = "stock.location.path"
_columns = {
'invoice_state': fields.selection([
("invoiced", "Invoiced"),
("2binvoiced", "To Be Invoiced"),
("none", "Not Applicable")], "Invoice Status",),
}
_defaults = {
'invoice_state': '',
}
def _prepare_push_apply(self, cr, uid, rule, move, context=None):
res = super(stock_location_path, self)._prepare_push_apply(cr, uid, rule, move, context=context)
res['invoice_state'] = rule.invoice_state or 'none'
return res
#----------------------------------------------------------
# Procurement Rule
#----------------------------------------------------------
class procurement_rule(osv.osv):
_inherit = 'procurement.rule'
_columns = {
'invoice_state': fields.selection([
("invoiced", "Invoiced"),
("2binvoiced", "To Be Invoiced"),
("none", "Not Applicable")], "Invoice Status",),
}
_defaults = {
'invoice_state': '',
}
#----------------------------------------------------------
# Procurement Order
#----------------------------------------------------------
class procurement_order(osv.osv):
_inherit = "procurement.order"
_columns = {
'invoice_state': fields.selection([("invoiced", "Invoiced"),
("2binvoiced", "To Be Invoiced"),
("none", "Not Applicable")
], "Invoice Control"),
}
def _run_move_create(self, cr, uid, procurement, context=None):
res = super(procurement_order, self)._run_move_create(cr, uid, procurement, context=context)
res.update({'invoice_state': procurement.rule_id.invoice_state or procurement.invoice_state or 'none'})
return res
_defaults = {
'invoice_state': ''
}
#----------------------------------------------------------
# Move
#----------------------------------------------------------
class stock_move(osv.osv):
_inherit = "stock.move"
_columns = {
'invoice_state': fields.selection([("invoiced", "Invoiced"),
("2binvoiced", "To Be Invoiced"),
("none", "Not Applicable")], "Invoice Control",
select=True, required=True, track_visibility='onchange',
states={'draft': [('readonly', False)]}),
}
_defaults = {
'invoice_state': lambda *args, **argv: 'none'
}
def _get_master_data(self, cr, uid, move, company, context=None):
''' returns a tuple (browse_record(res.partner), ID(res.users), ID(res.currency)'''
currency = company.currency_id.id
partner = move.picking_id and move.picking_id.partner_id
if partner:
code = self.get_code_from_locs(cr, uid, move, context=context)
if partner.property_product_pricelist and code == 'outgoing':
currency = partner.property_product_pricelist.currency_id.id
return partner, uid, currency
def _create_invoice_line_from_vals(self, cr, uid, move, invoice_line_vals, context=None):
return self.pool.get('account.invoice.line').create(cr, uid, invoice_line_vals, context=context)
def _get_price_unit_invoice(self, cr, uid, move_line, type, context=None):
""" Gets price unit for invoice
@param move_line: Stock move lines
@param type: Type of invoice
@return: The price unit for the move line
"""
if context is None:
context = {}
if type in ('in_invoice', 'in_refund'):
return move_line.price_unit
else:
# If partner given, search price in its sale pricelist
if move_line.partner_id and move_line.partner_id.property_product_pricelist:
pricelist_obj = self.pool.get("product.pricelist")
pricelist = move_line.partner_id.property_product_pricelist.id
price = pricelist_obj.price_get(cr, uid, [pricelist],
move_line.product_id.id, move_line.product_uom_qty, move_line.partner_id.id, {
'uom': move_line.product_uom.id,
'date': move_line.date,
})[pricelist]
if price:
return price
return move_line.product_id.list_price
def _get_invoice_line_vals(self, cr, uid, move, partner, inv_type, context=None):
fp_obj = self.pool.get('account.fiscal.position')
# Get account_id
if inv_type in ('out_invoice', 'out_refund'):
account_id = move.product_id.property_account_income.id
if not account_id:
account_id = move.product_id.categ_id.property_account_income_categ.id
else:
account_id = move.product_id.property_account_expense.id
if not account_id:
account_id = move.product_id.categ_id.property_account_expense_categ.id
fiscal_position = partner.property_account_position
account_id = fp_obj.map_account(cr, uid, fiscal_position, account_id)
# set UoS if it's a sale and the picking doesn't have one
uos_id = move.product_uom.id
quantity = move.product_uom_qty
if move.product_uos:
uos_id = move.product_uos.id
quantity = move.product_uos_qty
return {
'name': move.name,
'account_id': account_id,
'product_id': move.product_id.id,
'uos_id': uos_id,
'quantity': quantity,
'price_unit': self._get_price_unit_invoice(cr, uid, move, inv_type),
'discount': 0.0,
'account_analytic_id': False,
}
#----------------------------------------------------------
# Picking
#----------------------------------------------------------
class stock_picking(osv.osv):
_inherit = 'stock.picking'
def __get_invoice_state(self, cr, uid, ids, name, arg, context=None):
result = {}
for pick in self.browse(cr, uid, ids, context=context):
result[pick.id] = 'none'
for move in pick.move_lines:
if move.invoice_state == 'invoiced':
result[pick.id] = 'invoiced'
elif move.invoice_state == '2binvoiced':
result[pick.id] = '2binvoiced'
break
return result
def __get_picking_move(self, cr, uid, ids, context={}):
res = []
for move in self.pool.get('stock.move').browse(cr, uid, ids, context=context):
if move.picking_id:
res.append(move.picking_id.id)
return res
def _set_inv_state(self, cr, uid, picking_id, name, value, arg, context=None):
pick = self.browse(cr, uid, picking_id, context=context)
moves = [x.id for x in pick.move_lines]
move_obj= self.pool.get("stock.move")
move_obj.write(cr, uid, moves, {'invoice_state': pick.invoice_state}, context=context)
_columns = {
'invoice_state': fields.function(__get_invoice_state, type='selection', selection=[
("invoiced", "Invoiced"),
("2binvoiced", "To Be Invoiced"),
("none", "Not Applicable")
], string="Invoice Control", required=True,
fnct_inv = _set_inv_state,
store={
'stock.picking': (lambda self, cr, uid, ids, c={}: ids, ['state'], 10),
'stock.move': (__get_picking_move, ['picking_id', 'invoice_state'], 10),
},
),
}
_defaults = {
'invoice_state': lambda *args, **argv: 'none'
}
def _create_invoice_from_picking(self, cr, uid, picking, vals, context=None):
''' This function simply creates the invoice from the given values. It is overriden in delivery module to add the delivery costs.
'''
invoice_obj = self.pool.get('account.invoice')
return invoice_obj.create(cr, uid, vals, context=context)
def _get_partner_to_invoice(self, cr, uid, picking, context=None):
""" Gets the partner that will be invoiced
Note that this function is inherited in the sale and purchase modules
@param picking: object of the picking for which we are selecting the partner to invoice
@return: object of the partner to invoice
"""
return picking.partner_id and picking.partner_id.id
def action_invoice_create(self, cr, uid, ids, journal_id, group=False, type='out_invoice', context=None):
""" Creates invoice based on the invoice state selected for picking.
@param journal_id: Id of journal
@param group: Whether to create a group invoice or not
@param type: Type invoice to be created
@return: Ids of created invoices for the pickings
"""
context = context or {}
todo = {}
for picking in self.browse(cr, uid, ids, context=context):
partner = self._get_partner_to_invoice(cr, uid, picking, context)
#grouping is based on the invoiced partner
if group:
key = partner
else:
key = picking.id
for move in picking.move_lines:
if move.invoice_state == '2binvoiced':
if (move.state != 'cancel') and not move.scrapped:
todo.setdefault(key, [])
todo[key].append(move)
invoices = []
for moves in todo.values():
invoices += self._invoice_create_line(cr, uid, moves, journal_id, type, context=context)
return invoices
def _get_invoice_vals(self, cr, uid, key, inv_type, journal_id, origin, context=None):
if context is None:
context = {}
partner, currency_id, company_id, user_id = key
if inv_type in ('out_invoice', 'out_refund'):
account_id = partner.property_account_receivable.id
payment_term = partner.property_payment_term.id or False
else:
account_id = partner.property_account_payable.id
payment_term = partner.property_supplier_payment_term.id or False
return {
'origin': origin,
'date_invoice': context.get('date_inv', False),
'user_id': user_id,
'partner_id': partner.id,
'account_id': account_id,
'payment_term': payment_term,
'type': inv_type,
'fiscal_position': partner.property_account_position.id,
'company_id': company_id,
'currency_id': currency_id,
'journal_id': journal_id,
}
def _invoice_create_line(self, cr, uid, moves, journal_id, inv_type='out_invoice', context=None):
invoice_obj = self.pool.get('account.invoice')
move_obj = self.pool.get('stock.move')
invoices = {}
for move in moves:
company = move.company_id
origin = move.picking_id.name
partner, user_id, currency_id = move_obj._get_master_data(cr, uid, move, company, context=context)
key = (partner, currency_id, company.id, user_id)
if key not in invoices:
# Get account and payment terms
invoice_vals = self._get_invoice_vals(cr, uid, key, inv_type, journal_id, origin, context=context)
invoice_id = self._create_invoice_from_picking(cr, uid, move.picking_id, invoice_vals, context=context)
invoices[key] = invoice_id
invoice_line_vals = move_obj._get_invoice_line_vals(cr, uid, move, partner, inv_type, context=context)
invoice_line_vals['invoice_id'] = invoices[key]
invoice_line_vals['origin'] = origin
move_obj._create_invoice_line_from_vals(cr, uid, move, invoice_line_vals, context=context)
move_obj.write(cr, uid, move.id, {'invoice_state': 'invoiced'}, context=context)
invoice_obj.button_compute(cr, uid, invoices.values(), context=context, set_total=(inv_type in ('in_invoice', 'in_refund')))
return invoices.values()
| agpl-3.0 |
ThomasA/pywt | pywt/thresholding.py | 2 | 3453 | # -*- coding: utf-8 -*-
# Copyright (c) 2006-2012 Filip Wasilewski <http://en.ig.ma/>
# See COPYING for license details.
"""
The thresholding helper module implements the most popular signal thresholding
functions.
"""
from __future__ import division, print_function, absolute_import
__all__ = ['threshold']
import numpy as np
def soft(data, value, substitute=0):
data = np.asarray(data)
magnitude = np.absolute(data)
sign = np.sign(data)
thresholded = (magnitude - value).clip(0) * sign
cond = np.less(magnitude, value)
return np.where(cond, substitute, thresholded)
def hard(data, value, substitute=0):
data = np.asarray(data)
cond = np.less(np.absolute(data), value)
return np.where(cond, substitute, data)
def greater(data, value, substitute=0):
data = np.asarray(data)
return np.where(np.less(data, value), substitute, data)
def less(data, value, substitute=0):
data = np.asarray(data)
return np.where(np.greater(data, value), substitute, data)
thresholding_options = {'soft': soft,
'hard': hard,
'greater': greater,
'less': less}
def threshold(data, value, mode='soft', substitute=0):
"""
Thresholds the input data depending on the mode argument.
In ``soft`` thresholding, the data values where their absolute value is
less than the value param are replaced with substitute. From the data
values with absolute value greater or equal to the thresholding value,
a quantity of ``(signum * value)`` is subtracted.
In ``hard`` thresholding, the data values where their absolute value is
less than the value param are replaced with substitute. Data values with
absolute value greater or equal to the thresholding value stay untouched.
In ``greater`` thresholding, the data is replaced with substitute where
data is below the thresholding value. Greater data values pass untouched.
In ``less`` thresholding, the data is replaced with substitute where data
is above the thresholding value. Less data values pass untouched.
Parameters
----------
data : array_like
Numeric data.
value : scalar
Thresholding value.
mode : {'soft', 'hard', 'greater', 'less'}
Decides the type of thresholding to be applied on input data. Default
is 'soft'.
substitute : float, optional
Substitute value (default: 0).
Returns
-------
output : array
Thresholded array.
Examples
--------
>>> import pywt
>>> data = np.linspace(1, 4, 7)
>>> data
array([ 1. , 1.5, 2. , 2.5, 3. , 3.5, 4. ])
>>> pywt.threshold(data, 2, 'soft')
array([ 0. , 0. , 0. , 0.5, 1. , 1.5, 2. ])
>>> pywt.threshold(data, 2, 'hard')
array([ 0. , 0. , 2. , 2.5, 3. , 3.5, 4. ])
>>> pywt.threshold(data, 2, 'greater')
array([ 0. , 0. , 2. , 2.5, 3. , 3.5, 4. ])
>>> pywt.threshold(data, 2, 'less')
array([ 1. , 1.5, 2. , 0. , 0. , 0. , 0. ])
"""
try:
return thresholding_options[mode](data, value, substitute)
except KeyError:
# Make sure error is always identical by sorting keys
keys = ("'{0}'".format(key) for key in
sorted(thresholding_options.keys()))
raise ValueError("The mode parameter only takes values from: {0}."
.format(', '.join(keys)))
| mit |
larsbergstrom/servo | tests/wpt/web-platform-tests/xhr/resources/access-control-preflight-denied.py | 23 | 1608 | def main(request, response):
def fail(message):
response.content = "FAIL: " + str(message)
response.status = 400
def getState(token):
server_state = request.server.stash.take(token)
if not server_state:
return "Uninitialized"
return server_state
def setState(token, state):
request.server.stash.put(token, state)
def resetState(token):
setState(token, "")
response.headers.set("Cache-Control", "no-store")
response.headers.set("Access-Control-Allow-Origin", request.headers.get("origin"))
response.headers.set("Access-Control-Max-Age", 1)
token = request.GET.first("token", None)
state = getState(token)
command = request.GET.first("command", None)
if command == "reset":
if request.method == "GET":
resetState(token)
response.content = "Server state reset"
else:
fail("Invalid Method.")
elif state == "Uninitialized":
if request.method == "OPTIONS":
response.content = "This request should not be displayed."
setState(token, "Denied")
else:
fail(state)
elif state == "Denied":
if request.method == "GET" and command == "complete":
resetState(token)
response.content = "Request successfully blocked."
else:
setState("Deny Ignored")
fail("The request was not denied.")
elif state == "Deny Ignored":
resetState(token)
fail(state)
else:
resetState(token)
fail("Unknown Error.")
| mpl-2.0 |
lokirius/python-for-android | python3-alpha/python3-src/Tools/msi/uisample.py | 89 | 147976 |
import msilib,os;dirname=os.path.dirname(__file__)
AdminExecuteSequence = [
(u'InstallValidate', None, 1400),
(u'InstallInitialize', None, 1500),
(u'InstallFinalize', None, 6600),
(u'InstallFiles', None, 4000),
(u'InstallAdminPackage', None, 3900),
(u'FileCost', None, 900),
(u'CostInitialize', None, 800),
(u'CostFinalize', None, 1000),
]
AdminUISequence = [
(u'AdminWelcomeDlg', None, 1230),
(u'FileCost', None, 900),
(u'CostInitialize', None, 800),
(u'CostFinalize', None, 1000),
(u'ExecuteAction', None, 1300),
(u'ExitDialog', None, -1),
(u'FatalError', None, -3),
(u'PrepareDlg', None, 140),
(u'ProgressDlg', None, 1280),
(u'UserExit', None, -2),
]
AdvtExecuteSequence = [
(u'InstallValidate', None, 1400),
(u'InstallInitialize', None, 1500),
(u'InstallFinalize', None, 6600),
(u'CostInitialize', None, 800),
(u'CostFinalize', None, 1000),
(u'CreateShortcuts', None, 4500),
(u'PublishComponents', None, 6200),
(u'PublishFeatures', None, 6300),
(u'PublishProduct', None, 6400),
(u'RegisterClassInfo', None, 4600),
(u'RegisterExtensionInfo', None, 4700),
(u'RegisterMIMEInfo', None, 4900),
(u'RegisterProgIdInfo', None, 4800),
]
BBControl = [
]
Billboard = [
]
Binary = [
(u'bannrbmp', msilib.Binary(os.path.join(dirname,"bannrbmp.bin"))),
(u'completi', msilib.Binary(os.path.join(dirname,"completi.bin"))),
(u'custicon', msilib.Binary(os.path.join(dirname,"custicon.bin"))),
(u'dlgbmp', msilib.Binary(os.path.join(dirname,"dlgbmp.bin"))),
(u'exclamic', msilib.Binary(os.path.join(dirname,"exclamic.bin"))),
(u'info', msilib.Binary(os.path.join(dirname,"info.bin"))),
(u'insticon', msilib.Binary(os.path.join(dirname,"insticon.bin"))),
(u'New', msilib.Binary(os.path.join(dirname,"New.bin"))),
(u'removico', msilib.Binary(os.path.join(dirname,"removico.bin"))),
(u'repairic', msilib.Binary(os.path.join(dirname,"repairic.bin"))),
(u'Up', msilib.Binary(os.path.join(dirname,"Up.bin"))),
]
CheckBox = [
]
Property = [
(u'BannerBitmap', u'bannrbmp'),
(u'IAgree', u'No'),
(u'ProductID', u'none'),
(u'ARPHELPLINK', u'http://www.microsoft.com/management'),
(u'ButtonText_Back', u'< &Back'),
(u'ButtonText_Browse', u'Br&owse'),
(u'ButtonText_Cancel', u'Cancel'),
(u'ButtonText_Exit', u'&Exit'),
(u'ButtonText_Finish', u'&Finish'),
(u'ButtonText_Ignore', u'&Ignore'),
(u'ButtonText_Install', u'&Install'),
(u'ButtonText_Next', u'&Next >'),
(u'ButtonText_No', u'&No'),
(u'ButtonText_OK', u'OK'),
(u'ButtonText_Remove', u'&Remove'),
(u'ButtonText_Repair', u'&Repair'),
(u'ButtonText_Reset', u'&Reset'),
(u'ButtonText_Resume', u'&Resume'),
(u'ButtonText_Retry', u'&Retry'),
(u'ButtonText_Return', u'&Return'),
(u'ButtonText_Yes', u'&Yes'),
(u'CompleteSetupIcon', u'completi'),
(u'ComponentDownload', u'ftp://[email protected]/components/'),
(u'CustomSetupIcon', u'custicon'),
(u'DefaultUIFont', u'DlgFont8'),
(u'DialogBitmap', u'dlgbmp'),
(u'DlgTitleFont', u'{&DlgFontBold8}'),
(u'ErrorDialog', u'ErrorDlg'),
(u'ExclamationIcon', u'exclamic'),
(u'InfoIcon', u'info'),
(u'InstallerIcon', u'insticon'),
(u'INSTALLLEVEL', u'3'),
(u'InstallMode', u'Typical'),
(u'PIDTemplate', u'12345<###-%%%%%%%>@@@@@'),
#(u'ProductLanguage', u'1033'),
(u'Progress1', u'Installing'),
(u'Progress2', u'installs'),
(u'PROMPTROLLBACKCOST', u'P'),
(u'RemoveIcon', u'removico'),
(u'RepairIcon', u'repairic'),
(u'Setup', u'Setup'),
(u'ShowUserRegistrationDlg', u'1'),
(u'Wizard', u'Setup Wizard'),
]
ComboBox = [
]
Control = [
(u'AdminWelcomeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None),
(u'AdminWelcomeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'AdminWelcomeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'AdminWelcomeDlg', u'Description', u'Text', 135, 70, 220, 30, 196611, None, u'The [Wizard] will create a server image of [ProductName], at a specified network location. Click Next to continue or Cancel to exit the [Wizard].', None, None),
(u'AdminWelcomeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None),
(u'AdminWelcomeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None),
(u'AdminWelcomeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
(u'ExitDialog', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None),
(u'ExitDialog', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'ExitDialog', u'Cancel', u'PushButton', 304, 243, 56, 17, 1, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'ExitDialog', u'Description', u'Text', 135, 70, 220, 20, 196611, None, u'Click the Finish button to exit the [Wizard].', None, None),
(u'ExitDialog', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Completing the [ProductName] [Wizard]', None, None),
(u'ExitDialog', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Finish', None),
(u'ExitDialog', u'Finish', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Finish]', u'Cancel', None),
(u'FatalError', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None),
(u'FatalError', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'FatalError', u'Cancel', u'PushButton', 304, 243, 56, 17, 1, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'FatalError', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}[ProductName] [Wizard] ended prematurely', None, None),
(u'FatalError', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Finish', None),
(u'FatalError', u'Finish', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Finish]', u'Cancel', None),
(u'FatalError', u'Description1', u'Text', 135, 70, 220, 40, 196611, None, u'[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.', None, None),
(u'FatalError', u'Description2', u'Text', 135, 115, 220, 20, 196611, None, u'Click the Finish button to exit the [Wizard].', None, None),
(u'PrepareDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Cancel', None),
(u'PrepareDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'PrepareDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'PrepareDlg', u'Description', u'Text', 135, 70, 220, 20, 196611, None, u'Please wait while the [Wizard] prepares to guide you through the installation.', None, None),
(u'PrepareDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None),
(u'PrepareDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', None, None),
(u'PrepareDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', None, None),
(u'PrepareDlg', u'ActionData', u'Text', 135, 125, 220, 30, 196611, None, None, None, None),
(u'PrepareDlg', u'ActionText', u'Text', 135, 100, 220, 20, 196611, None, None, None, None),
(u'ProgressDlg', u'Text', u'Text', 35, 65, 300, 20, 3, None, u'Please wait while the [Wizard] [Progress2] [ProductName]. This may take several minutes.', None, None),
(u'ProgressDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None),
(u'ProgressDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'ProgressDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'ProgressDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'ProgressDlg', u'Title', u'Text', 20, 15, 200, 15, 196611, None, u'[DlgTitleFont][Progress1] [ProductName]', None, None),
(u'ProgressDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None),
(u'ProgressDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', u'Cancel', None),
(u'ProgressDlg', u'ActionText', u'Text', 70, 100, 265, 10, 3, None, None, None, None),
(u'ProgressDlg', u'ProgressBar', u'ProgressBar', 35, 115, 300, 10, 65537, None, u'Progress done', None, None),
(u'ProgressDlg', u'StatusLabel', u'Text', 35, 100, 35, 10, 3, None, u'Status:', None, None),
(u'UserExit', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None),
(u'UserExit', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'UserExit', u'Cancel', u'PushButton', 304, 243, 56, 17, 1, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'UserExit', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}[ProductName] [Wizard] was interrupted', None, None),
(u'UserExit', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Finish', None),
(u'UserExit', u'Finish', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Finish]', u'Cancel', None),
(u'UserExit', u'Description1', u'Text', 135, 70, 220, 40, 196611, None, u'[ProductName] setup was interrupted. Your system has not been modified. To install this program at a later time, please run the installation again.', None, None),
(u'UserExit', u'Description2', u'Text', 135, 115, 220, 20, 196611, None, u'Click the Finish button to exit the [Wizard].', None, None),
(u'AdminBrowseDlg', u'Up', u'PushButton', 298, 55, 19, 19, 3670019, None, u'Up', u'NewFolder', u'Up One Level|'),
(u'AdminBrowseDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'PathEdit', None),
(u'AdminBrowseDlg', u'PathEdit', u'PathEdit', 84, 202, 261, 17, 3, u'TARGETDIR', None, u'OK', None),
(u'AdminBrowseDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'AdminBrowseDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'AdminBrowseDlg', u'Cancel', u'PushButton', 240, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'ComboLabel', None),
(u'AdminBrowseDlg', u'ComboLabel', u'Text', 25, 58, 44, 10, 3, None, u'&Look in:', u'DirectoryCombo', None),
(u'AdminBrowseDlg', u'DirectoryCombo', u'DirectoryCombo', 70, 55, 220, 80, 458755, u'TARGETDIR', None, u'Up', None),
(u'AdminBrowseDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Browse to the destination folder', None, None),
(u'AdminBrowseDlg', u'DirectoryList', u'DirectoryList', 25, 83, 320, 110, 7, u'TARGETDIR', None, u'PathLabel', None),
(u'AdminBrowseDlg', u'PathLabel', u'Text', 25, 205, 59, 10, 3, None, u'&Folder name:', u'BannerBitmap', None),
(u'AdminBrowseDlg', u'NewFolder', u'PushButton', 325, 55, 19, 19, 3670019, None, u'New', u'DirectoryList', u'Create A New Folder|'),
(u'AdminBrowseDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'Cancel', None),
(u'AdminBrowseDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Change current destination folder', None, None),
(u'AdminInstallPointDlg', u'Text', u'Text', 25, 80, 320, 10, 3, None, u'&Enter a new network location or click Browse to browse to one.', u'PathEdit', None),
(u'AdminInstallPointDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Text', None),
(u'AdminInstallPointDlg', u'PathEdit', u'PathEdit', 25, 93, 320, 18, 3, u'TARGETDIR', None, u'Browse', None),
(u'AdminInstallPointDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'AdminInstallPointDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'AdminInstallPointDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'AdminInstallPointDlg', u'Description', u'Text', 25, 20, 280, 20, 196611, None, u'Please specify a network location for the server image of [ProductName] product', None, None),
(u'AdminInstallPointDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Network Location', None, None),
(u'AdminInstallPointDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None),
(u'AdminInstallPointDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
(u'AdminInstallPointDlg', u'Browse', u'PushButton', 289, 119, 56, 17, 3, None, u'[ButtonText_Browse]', u'Back', None),
(u'AdminRegistrationDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'OrganizationLabel', None),
(u'AdminRegistrationDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'AdminRegistrationDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'AdminRegistrationDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'AdminRegistrationDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Please enter your company information', None, None),
(u'AdminRegistrationDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Company Information', None, None),
(u'AdminRegistrationDlg', u'Back', u'PushButton', 180, 243, 56, 17, 65539, None, u'[ButtonText_Back]', u'Next', None),
(u'AdminRegistrationDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
(u'AdminRegistrationDlg', u'OrganizationLabel', u'Text', 45, 71, 285, 30, 3, None, u'&Please enter the name of your organization in the box below. This will be used as default company name for subsequent installations of [ProductName]:', u'OrganizationEdit', None),
(u'AdminRegistrationDlg', u'CDKeyEdit', u'MaskedEdit', 45, 143, 250, 16, 3, u'PIDKEY', u'[PIDTemplate]', u'Back', None),
(u'AdminRegistrationDlg', u'CDKeyLabel', u'Text', 45, 130, 50, 10, 3, None, u'CD &Key:', u'CDKeyEdit', None),
(u'AdminRegistrationDlg', u'OrganizationEdit', u'Edit', 45, 105, 220, 18, 3, u'COMPANYNAME', u'{80}', u'CDKeyLabel', None),
(u'BrowseDlg', u'Up', u'PushButton', 298, 55, 19, 19, 3670019, None, u'Up', u'NewFolder', u'Up One Level|'),
(u'BrowseDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'PathEdit', None),
(u'BrowseDlg', u'PathEdit', u'PathEdit', 84, 202, 261, 18, 11, u'_BrowseProperty', None, u'OK', None),
(u'BrowseDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'BrowseDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'BrowseDlg', u'Cancel', u'PushButton', 240, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'ComboLabel', None),
(u'BrowseDlg', u'ComboLabel', u'Text', 25, 58, 44, 10, 3, None, u'&Look in:', u'DirectoryCombo', None),
(u'BrowseDlg', u'DirectoryCombo', u'DirectoryCombo', 70, 55, 220, 80, 393227, u'_BrowseProperty', None, u'Up', None),
(u'BrowseDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Browse to the destination folder', None, None),
(u'BrowseDlg', u'DirectoryList', u'DirectoryList', 25, 83, 320, 110, 15, u'_BrowseProperty', None, u'PathLabel', None),
(u'BrowseDlg', u'PathLabel', u'Text', 25, 205, 59, 10, 3, None, u'&Folder name:', u'BannerBitmap', None),
(u'BrowseDlg', u'NewFolder', u'PushButton', 325, 55, 19, 19, 3670019, None, u'New', u'DirectoryList', u'Create A New Folder|'),
(u'BrowseDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'Cancel', None),
(u'BrowseDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Change current destination folder', None, None),
(u'CancelDlg', u'Text', u'Text', 48, 15, 194, 30, 3, None, u'Are you sure you want to cancel [ProductName] installation?', None, None),
(u'CancelDlg', u'Icon', u'Icon', 15, 15, 24, 24, 5242881, None, u'[InfoIcon]', None, u'Information icon|'),
(u'CancelDlg', u'No', u'PushButton', 132, 57, 56, 17, 3, None, u'[ButtonText_No]', u'Yes', None),
(u'CancelDlg', u'Yes', u'PushButton', 72, 57, 56, 17, 3, None, u'[ButtonText_Yes]', u'No', None),
(u'CustomizeDlg', u'Text', u'Text', 25, 55, 320, 20, 3, None, u'Click on the icons in the tree below to change the way features will be installed.', None, None),
(u'CustomizeDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Tree', None),
(u'CustomizeDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'CustomizeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'CustomizeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'CustomizeDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Select the way you want features to be installed.', None, None),
(u'CustomizeDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Custom Setup', None, None),
(u'CustomizeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None),
(u'CustomizeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
(u'CustomizeDlg', u'Browse', u'PushButton', 304, 200, 56, 17, 3, None, u'[ButtonText_Browse]', u'Reset', None),
(u'CustomizeDlg', u'Tree', u'SelectionTree', 25, 85, 175, 95, 7, u'_BrowseProperty', u'Tree of selections', u'Browse', None),
(u'CustomizeDlg', u'Box', u'GroupBox', 210, 81, 140, 98, 1, None, None, None, None),
(u'CustomizeDlg', u'Reset', u'PushButton', 42, 243, 56, 17, 3, None, u'[ButtonText_Reset]', u'DiskCost', None),
(u'CustomizeDlg', u'DiskCost', u'PushButton', 111, 243, 56, 17, 3, None, u'Disk &Usage', u'Back', None),
(u'CustomizeDlg', u'ItemDescription', u'Text', 215, 90, 131, 30, 3, None, u'Multiline description of the currently selected item.', None, None),
(u'CustomizeDlg', u'ItemSize', u'Text', 215, 130, 131, 45, 3, None, u'The size of the currently selected item.', None, None),
(u'CustomizeDlg', u'Location', u'Text', 75, 200, 215, 20, 3, None, u"<The selection's path>", None, None),
(u'CustomizeDlg', u'LocationLabel', u'Text', 25, 200, 50, 10, 3, None, u'Location:', None, None),
(u'DiskCostDlg', u'Text', u'Text', 20, 53, 330, 40, 3, None, u'The highlighted volumes (if any) do not have enough disk space available for the currently selected features. You can either remove some files from the highlighted volumes, or choose to install less features onto local drive(s), or select different destination drive(s).', None, None),
(u'DiskCostDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'OK', None),
(u'DiskCostDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'DiskCostDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'DiskCostDlg', u'Description', u'Text', 20, 20, 280, 20, 196611, None, u'The disk space required for the installation of the selected features.', None, None),
(u'DiskCostDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'BannerBitmap', None),
(u'DiskCostDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Disk Space Requirements', None, None),
(u'DiskCostDlg', u'VolumeList', u'VolumeCostList', 20, 100, 330, 120, 393223, None, u'{120}{70}{70}{70}{70}', None, None),
(u'ErrorDlg', u'Y', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Yes]', None, None),
(u'ErrorDlg', u'A', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Cancel]', None, None),
(u'ErrorDlg', u'C', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Cancel]', None, None),
(u'ErrorDlg', u'ErrorIcon', u'Icon', 15, 15, 24, 24, 5242881, None, u'[InfoIcon]', None, u'Information icon|'),
(u'ErrorDlg', u'ErrorText', u'Text', 48, 15, 205, 60, 3, None, u'Information text', None, None),
(u'ErrorDlg', u'I', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Ignore]', None, None),
(u'ErrorDlg', u'N', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_No]', None, None),
(u'ErrorDlg', u'O', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_OK]', None, None),
(u'ErrorDlg', u'R', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Retry]', None, None),
(u'FilesInUse', u'Text', u'Text', 20, 55, 330, 30, 3, None, u'The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.', None, None),
(u'FilesInUse', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Retry', None),
(u'FilesInUse', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'FilesInUse', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'FilesInUse', u'Description', u'Text', 20, 23, 280, 20, 196611, None, u'Some files that need to be updated are currently in use.', None, None),
(u'FilesInUse', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Files in Use', None, None),
(u'FilesInUse', u'Retry', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Retry]', u'Ignore', None),
(u'FilesInUse', u'Exit', u'PushButton', 166, 243, 56, 17, 3, None, u'[ButtonText_Exit]', u'BannerBitmap', None),
(u'FilesInUse', u'Ignore', u'PushButton', 235, 243, 56, 17, 3, None, u'[ButtonText_Ignore]', u'Exit', None),
(u'FilesInUse', u'List', u'ListBox', 20, 87, 330, 130, 7, u'FileInUseProcess', None, None, None),
(u'LicenseAgreementDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'AgreementText', None),
(u'LicenseAgreementDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'LicenseAgreementDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'LicenseAgreementDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'LicenseAgreementDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Please read the following license agreement carefully', None, None),
(u'LicenseAgreementDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]End-User License Agreement', None, None),
(u'LicenseAgreementDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None),
(u'LicenseAgreementDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
(u'LicenseAgreementDlg', u'AgreementText', u'ScrollableText', 20, 60, 330, 120, 7, None, u'{\\rtf1\\ansi\\ansicpg1252\\deff0\\deftab720{\\fonttbl{\\f0\\froman\\fprq2 Times New Roman;}}{\\colortbl\\red0\\green0\\blue0;} \\deflang1033\\horzdoc{\\*\\fchars }{\\*\\lchars }\\pard\\plain\\f0\\fs20 <Your license agreement should go here.>\\par }', u'Buttons', None),
(u'LicenseAgreementDlg', u'Buttons', u'RadioButtonGroup', 20, 187, 330, 40, 3, u'IAgree', None, u'Back', None),
(u'MaintenanceTypeDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'ChangeLabel', None),
(u'MaintenanceTypeDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'MaintenanceTypeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'MaintenanceTypeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'MaintenanceTypeDlg', u'Description', u'Text', 25, 23, 280, 20, 196611, None, u'Select the operation you wish to perform.', None, None),
(u'MaintenanceTypeDlg', u'Title', u'Text', 15, 6, 240, 15, 196611, None, u'[DlgTitleFont]Modify, Repair or Remove installation', None, None),
(u'MaintenanceTypeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None),
(u'MaintenanceTypeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', u'Cancel', None),
(u'MaintenanceTypeDlg', u'ChangeLabel', u'Text', 105, 65, 100, 10, 3, None, u'[DlgTitleFont]&Modify', u'ChangeButton', None),
(u'MaintenanceTypeDlg', u'ChangeButton', u'PushButton', 50, 65, 38, 38, 5767171, None, u'[CustomSetupIcon]', u'RepairLabel', u'Modify Installation|'),
(u'MaintenanceTypeDlg', u'RepairLabel', u'Text', 105, 114, 100, 10, 3, None, u'[DlgTitleFont]Re&pair', u'RepairButton', None),
(u'MaintenanceTypeDlg', u'ChangeText', u'Text', 105, 78, 230, 20, 3, None, u'Allows users to change the way features are installed.', None, None),
(u'MaintenanceTypeDlg', u'RemoveButton', u'PushButton', 50, 163, 38, 38, 5767171, None, u'[RemoveIcon]', u'Back', u'Remove Installation|'),
(u'MaintenanceTypeDlg', u'RemoveLabel', u'Text', 105, 163, 100, 10, 3, None, u'[DlgTitleFont]&Remove', u'RemoveButton', None),
(u'MaintenanceTypeDlg', u'RemoveText', u'Text', 105, 176, 230, 20, 3, None, u'Removes [ProductName] from your computer.', None, None),
(u'MaintenanceTypeDlg', u'RepairButton', u'PushButton', 50, 114, 38, 38, 5767171, None, u'[RepairIcon]', u'RemoveLabel', u'Repair Installation|'),
(u'MaintenanceTypeDlg', u'RepairText', u'Text', 105, 127, 230, 30, 3, None, u'Repairs errors in the most recent installation state - fixes missing or corrupt files, shortcuts and registry entries.', None, None),
(u'MaintenanceWelcomeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None),
(u'MaintenanceWelcomeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'MaintenanceWelcomeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'MaintenanceWelcomeDlg', u'Description', u'Text', 135, 70, 220, 60, 196611, None, u'The [Wizard] will allow you to change the way [ProductName] features are installed on your computer or even to remove [ProductName] from your computer. Click Next to continue or Cancel to exit the [Wizard].', None, None),
(u'MaintenanceWelcomeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None),
(u'MaintenanceWelcomeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None),
(u'MaintenanceWelcomeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
(u'OutOfDiskDlg', u'Text', u'Text', 20, 53, 330, 40, 3, None, u'The highlighted volumes do not have enough disk space available for the currently selected features. You can either remove some files from the highlighted volumes, or choose to install less features onto local drive(s), or select different destination drive(s).', None, None),
(u'OutOfDiskDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'OK', None),
(u'OutOfDiskDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'OutOfDiskDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'OutOfDiskDlg', u'Description', u'Text', 20, 20, 280, 20, 196611, None, u'Disk space required for the installation exceeds available disk space.', None, None),
(u'OutOfDiskDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'BannerBitmap', None),
(u'OutOfDiskDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Out of Disk Space', None, None),
(u'OutOfDiskDlg', u'VolumeList', u'VolumeCostList', 20, 100, 330, 120, 393223, None, u'{120}{70}{70}{70}{70}', None, None),
(u'OutOfRbDiskDlg', u'Text', u'Text', 20, 53, 330, 40, 3, None, u'The highlighted volumes do not have enough disk space available for the currently selected features. You can either remove some files from the highlighted volumes, or choose to install less features onto local drive(s), or select different destination drive(s).', None, None),
(u'OutOfRbDiskDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'No', None),
(u'OutOfRbDiskDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'OutOfRbDiskDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'OutOfRbDiskDlg', u'Description', u'Text', 20, 20, 280, 20, 196611, None, u'Disk space required for the installation exceeds available disk space.', None, None),
(u'OutOfRbDiskDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Out of Disk Space', None, None),
(u'OutOfRbDiskDlg', u'No', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_No]', u'Yes', None),
(u'OutOfRbDiskDlg', u'Yes', u'PushButton', 240, 243, 56, 17, 3, None, u'[ButtonText_Yes]', u'BannerBitmap', None),
(u'OutOfRbDiskDlg', u'VolumeList', u'VolumeCostList', 20, 140, 330, 80, 4587527, None, u'{120}{70}{70}{70}{70}', None, None),
(u'OutOfRbDiskDlg', u'Text2', u'Text', 20, 94, 330, 40, 3, None, u"Alternatively, you may choose to disable the installer's rollback functionality. This allows the installer to restore your computer's original state should the installation be interrupted in any way. Click Yes if you wish to take the risk to disable rollback.", None, None),
(u'ResumeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None),
(u'ResumeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'ResumeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'ResumeDlg', u'Description', u'Text', 135, 70, 220, 30, 196611, None, u'The [Wizard] will complete the installation of [ProductName] on your computer. Click Install to continue or Cancel to exit the [Wizard].', None, None),
(u'ResumeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Resuming the [ProductName] [Wizard]', None, None),
(u'ResumeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Install', None),
(u'ResumeDlg', u'Install', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Install]', u'Cancel', None),
(u'SetupTypeDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'TypicalLabel', None),
(u'SetupTypeDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'SetupTypeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'SetupTypeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'SetupTypeDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Choose the setup type that best suits your needs', None, None),
(u'SetupTypeDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Choose Setup Type', None, None),
(u'SetupTypeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None),
(u'SetupTypeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', u'Cancel', None),
(u'SetupTypeDlg', u'TypicalLabel', u'Text', 105, 65, 100, 10, 3, None, u'[DlgTitleFont]&Typical', u'TypicalButton', None),
(u'SetupTypeDlg', u'CompleteButton', u'PushButton', 50, 171, 38, 38, 5767171, None, u'[CompleteSetupIcon]', u'Back', u'Complete Installation|'),
(u'SetupTypeDlg', u'CompleteLabel', u'Text', 105, 171, 100, 10, 3, None, u'[DlgTitleFont]C&omplete', u'CompleteButton', None),
(u'SetupTypeDlg', u'CompleteText', u'Text', 105, 184, 230, 20, 3, None, u'All program features will be installed. (Requires most disk space)', None, None),
(u'SetupTypeDlg', u'CustomButton', u'PushButton', 50, 118, 38, 38, 5767171, None, u'[CustomSetupIcon]', u'CompleteLabel', u'Custom Installation|'),
(u'SetupTypeDlg', u'CustomLabel', u'Text', 105, 118, 100, 10, 3, None, u'[DlgTitleFont]C&ustom', u'CustomButton', None),
(u'SetupTypeDlg', u'CustomText', u'Text', 105, 131, 230, 30, 3, None, u'Allows users to choose which program features will be installed and where they will be installed. Recommended for advanced users.', None, None),
(u'SetupTypeDlg', u'TypicalButton', u'PushButton', 50, 65, 38, 38, 5767171, None, u'[InstallerIcon]', u'CustomLabel', u'Typical Installation|'),
(u'SetupTypeDlg', u'TypicalText', u'Text', 105, 78, 230, 20, 3, None, u'Installs the most common program features. Recommended for most users.', None, None),
(u'UserRegistrationDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'NameLabel', None),
(u'UserRegistrationDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'UserRegistrationDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'UserRegistrationDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'UserRegistrationDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Please enter your customer information', None, None),
(u'UserRegistrationDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Customer Information', None, None),
(u'UserRegistrationDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None),
(u'UserRegistrationDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
(u'UserRegistrationDlg', u'OrganizationLabel', u'Text', 45, 110, 100, 15, 3, None, u'&Organization:', u'OrganizationEdit', None),
(u'UserRegistrationDlg', u'CDKeyEdit', u'MaskedEdit', 45, 159, 250, 16, 3, u'PIDKEY', u'[PIDTemplate]', u'Back', None),
(u'UserRegistrationDlg', u'CDKeyLabel', u'Text', 45, 147, 50, 10, 3, None, u'CD &Key:', u'CDKeyEdit', None),
(u'UserRegistrationDlg', u'OrganizationEdit', u'Edit', 45, 122, 220, 18, 3, u'COMPANYNAME', u'{80}', u'CDKeyLabel', None),
(u'UserRegistrationDlg', u'NameLabel', u'Text', 45, 73, 100, 15, 3, None, u'&User Name:', u'NameEdit', None),
(u'UserRegistrationDlg', u'NameEdit', u'Edit', 45, 85, 220, 18, 3, u'USERNAME', u'{80}', u'OrganizationLabel', None),
(u'VerifyReadyDlg', u'Text', u'Text', 25, 70, 320, 20, 3, None, u'Click Install to begin the installation. If you want to review or change any of your installation settings, click Back. Click Cancel to exit the wizard.', None, None),
(u'VerifyReadyDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None),
(u'VerifyReadyDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'VerifyReadyDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'VerifyReadyDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'VerifyReadyDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'The [Wizard] is ready to begin the [InstallMode] installation', None, None),
(u'VerifyReadyDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Ready to Install', None, None),
(u'VerifyReadyDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Install', None),
(u'VerifyReadyDlg', u'Install', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Install]', u'Cancel', None),
(u'VerifyRemoveDlg', u'Text', u'Text', 25, 70, 320, 30, 3, None, u'Click Remove to remove [ProductName] from your computer. If you want to review or change any of your installation settings, click Back. Click Cancel to exit the wizard.', None, None),
(u'VerifyRemoveDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None),
(u'VerifyRemoveDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'VerifyRemoveDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'VerifyRemoveDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'VerifyRemoveDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'You have chosen to remove the program from your computer.', None, None),
(u'VerifyRemoveDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Remove [ProductName]', None, None),
(u'VerifyRemoveDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Remove', None),
(u'VerifyRemoveDlg', u'Remove', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Remove]', u'Cancel', None),
(u'VerifyRepairDlg', u'Text', u'Text', 25, 70, 320, 30, 3, None, u'Click Repair to repair the installation of [ProductName]. If you want to review or change any of your installation settings, click Back. Click Cancel to exit the wizard.', None, None),
(u'VerifyRepairDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None),
(u'VerifyRepairDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'VerifyRepairDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'VerifyRepairDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'VerifyRepairDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'The [Wizard] is ready to begin the repair of [ProductName].', None, None),
(u'VerifyRepairDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Repair [ProductName]', None, None),
(u'VerifyRepairDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Repair', None),
(u'VerifyRepairDlg', u'Repair', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Repair]', u'Cancel', None),
(u'WaitForCostingDlg', u'Text', u'Text', 48, 15, 194, 30, 3, None, u'Please wait while the installer finishes determining your disk space requirements.', None, None),
(u'WaitForCostingDlg', u'Icon', u'Icon', 15, 15, 24, 24, 5242881, None, u'[ExclamationIcon]', None, u'Exclamation icon|'),
(u'WaitForCostingDlg', u'Return', u'PushButton', 102, 57, 56, 17, 3, None, u'[ButtonText_Return]', None, None),
(u'WelcomeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None),
(u'WelcomeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'WelcomeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'WelcomeDlg', u'Description', u'Text', 135, 70, 220, 30, 196611, None, u'The [Wizard] will install [ProductName] on your computer. Click Next to continue or Cancel to exit the [Wizard].', None, None),
(u'WelcomeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None),
(u'WelcomeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None),
(u'WelcomeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
]
ListBox = [
]
ActionText = [
(u'InstallValidate', u'Validating install', None),
(u'InstallFiles', u'Copying new files', u'File: [1], Directory: [9], Size: [6]'),
(u'InstallAdminPackage', u'Copying network install files', u'File: [1], Directory: [9], Size: [6]'),
(u'FileCost', u'Computing space requirements', None),
(u'CostInitialize', u'Computing space requirements', None),
(u'CostFinalize', u'Computing space requirements', None),
(u'CreateShortcuts', u'Creating shortcuts', u'Shortcut: [1]'),
(u'PublishComponents', u'Publishing Qualified Components', u'Component ID: [1], Qualifier: [2]'),
(u'PublishFeatures', u'Publishing Product Features', u'Feature: [1]'),
(u'PublishProduct', u'Publishing product information', None),
(u'RegisterClassInfo', u'Registering Class servers', u'Class Id: [1]'),
(u'RegisterExtensionInfo', u'Registering extension servers', u'Extension: [1]'),
(u'RegisterMIMEInfo', u'Registering MIME info', u'MIME Content Type: [1], Extension: [2]'),
(u'RegisterProgIdInfo', u'Registering program identifiers', u'ProgId: [1]'),
(u'AllocateRegistrySpace', u'Allocating registry space', u'Free space: [1]'),
(u'AppSearch', u'Searching for installed applications', u'Property: [1], Signature: [2]'),
(u'BindImage', u'Binding executables', u'File: [1]'),
(u'CCPSearch', u'Searching for qualifying products', None),
(u'CreateFolders', u'Creating folders', u'Folder: [1]'),
(u'DeleteServices', u'Deleting services', u'Service: [1]'),
(u'DuplicateFiles', u'Creating duplicate files', u'File: [1], Directory: [9], Size: [6]'),
(u'FindRelatedProducts', u'Searching for related applications', u'Found application: [1]'),
(u'InstallODBC', u'Installing ODBC components', None),
(u'InstallServices', u'Installing new services', u'Service: [2]'),
(u'LaunchConditions', u'Evaluating launch conditions', None),
(u'MigrateFeatureStates', u'Migrating feature states from related applications', u'Application: [1]'),
(u'MoveFiles', u'Moving files', u'File: [1], Directory: [9], Size: [6]'),
(u'PatchFiles', u'Patching files', u'File: [1], Directory: [2], Size: [3]'),
(u'ProcessComponents', u'Updating component registration', None),
(u'RegisterComPlus', u'Registering COM+ Applications and Components', u'AppId: [1]{{, AppType: [2], Users: [3], RSN: [4]}}'),
(u'RegisterFonts', u'Registering fonts', u'Font: [1]'),
(u'RegisterProduct', u'Registering product', u'[1]'),
(u'RegisterTypeLibraries', u'Registering type libraries', u'LibID: [1]'),
(u'RegisterUser', u'Registering user', u'[1]'),
(u'RemoveDuplicateFiles', u'Removing duplicated files', u'File: [1], Directory: [9]'),
(u'RemoveEnvironmentStrings', u'Updating environment strings', u'Name: [1], Value: [2], Action [3]'),
(u'RemoveExistingProducts', u'Removing applications', u'Application: [1], Command line: [2]'),
(u'RemoveFiles', u'Removing files', u'File: [1], Directory: [9]'),
(u'RemoveFolders', u'Removing folders', u'Folder: [1]'),
(u'RemoveIniValues', u'Removing INI files entries', u'File: [1], Section: [2], Key: [3], Value: [4]'),
(u'RemoveODBC', u'Removing ODBC components', None),
(u'RemoveRegistryValues', u'Removing system registry values', u'Key: [1], Name: [2]'),
(u'RemoveShortcuts', u'Removing shortcuts', u'Shortcut: [1]'),
(u'RMCCPSearch', u'Searching for qualifying products', None),
(u'SelfRegModules', u'Registering modules', u'File: [1], Folder: [2]'),
(u'SelfUnregModules', u'Unregistering modules', u'File: [1], Folder: [2]'),
(u'SetODBCFolders', u'Initializing ODBC directories', None),
(u'StartServices', u'Starting services', u'Service: [1]'),
(u'StopServices', u'Stopping services', u'Service: [1]'),
(u'UnpublishComponents', u'Unpublishing Qualified Components', u'Component ID: [1], Qualifier: [2]'),
(u'UnpublishFeatures', u'Unpublishing Product Features', u'Feature: [1]'),
(u'UnregisterClassInfo', u'Unregister Class servers', u'Class Id: [1]'),
(u'UnregisterComPlus', u'Unregistering COM+ Applications and Components', u'AppId: [1]{{, AppType: [2]}}'),
(u'UnregisterExtensionInfo', u'Unregistering extension servers', u'Extension: [1]'),
(u'UnregisterFonts', u'Unregistering fonts', u'Font: [1]'),
(u'UnregisterMIMEInfo', u'Unregistering MIME info', u'MIME Content Type: [1], Extension: [2]'),
(u'UnregisterProgIdInfo', u'Unregistering program identifiers', u'ProgId: [1]'),
(u'UnregisterTypeLibraries', u'Unregistering type libraries', u'LibID: [1]'),
(u'WriteEnvironmentStrings', u'Updating environment strings', u'Name: [1], Value: [2], Action [3]'),
(u'WriteIniValues', u'Writing INI files values', u'File: [1], Section: [2], Key: [3], Value: [4]'),
(u'WriteRegistryValues', u'Writing system registry values', u'Key: [1], Name: [2], Value: [3]'),
(u'Advertise', u'Advertising application', None),
(u'GenerateScript', u'Generating script operations for action:', u'[1]'),
(u'InstallSFPCatalogFile', u'Installing system catalog', u'File: [1], Dependencies: [2]'),
(u'MsiPublishAssemblies', u'Publishing assembly information', u'Application Context:[1], Assembly Name:[2]'),
(u'MsiUnpublishAssemblies', u'Unpublishing assembly information', u'Application Context:[1], Assembly Name:[2]'),
(u'Rollback', u'Rolling back action:', u'[1]'),
(u'RollbackCleanup', u'Removing backup files', u'File: [1]'),
(u'UnmoveFiles', u'Removing moved files', u'File: [1], Directory: [9]'),
(u'UnpublishProduct', u'Unpublishing product information', None),
]
ControlCondition = [
(u'CustomizeDlg', u'Browse', u'Hide', u'Installed'),
(u'CustomizeDlg', u'Location', u'Hide', u'Installed'),
(u'CustomizeDlg', u'LocationLabel', u'Hide', u'Installed'),
(u'LicenseAgreementDlg', u'Next', u'Disable', u'IAgree <> "Yes"'),
(u'LicenseAgreementDlg', u'Next', u'Enable', u'IAgree = "Yes"'),
]
ControlEvent = [
(u'AdminWelcomeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'AdminWelcomeDlg', u'Next', u'NewDialog', u'AdminRegistrationDlg', u'1', 2),
(u'AdminWelcomeDlg', u'Next', u'[InstallMode]', u'Server Image', u'1', 1),
(u'ExitDialog', u'Finish', u'EndDialog', u'Return', u'1', None),
(u'FatalError', u'Finish', u'EndDialog', u'Exit', u'1', None),
(u'PrepareDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'ProgressDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'UserExit', u'Finish', u'EndDialog', u'Exit', u'1', None),
(u'AdminBrowseDlg', u'Up', u'DirectoryListUp', u'0', u'1', None),
(u'AdminBrowseDlg', u'Cancel', u'Reset', u'0', u'1', 1),
(u'AdminBrowseDlg', u'Cancel', u'EndDialog', u'Return', u'1', 2),
(u'AdminBrowseDlg', u'NewFolder', u'DirectoryListNew', u'0', u'1', None),
(u'AdminBrowseDlg', u'OK', u'EndDialog', u'Return', u'1', 2),
(u'AdminBrowseDlg', u'OK', u'SetTargetPath', u'TARGETDIR', u'1', 1),
(u'AdminInstallPointDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'AdminInstallPointDlg', u'Back', u'NewDialog', u'AdminRegistrationDlg', u'1', None),
(u'AdminInstallPointDlg', u'Next', u'SetTargetPath', u'TARGETDIR', u'1', 1),
(u'AdminInstallPointDlg', u'Next', u'NewDialog', u'VerifyReadyDlg', u'1', 2),
(u'AdminInstallPointDlg', u'Browse', u'SpawnDialog', u'AdminBrowseDlg', u'1', None),
(u'AdminRegistrationDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'AdminRegistrationDlg', u'Back', u'NewDialog', u'AdminWelcomeDlg', u'1', None),
(u'AdminRegistrationDlg', u'Next', u'NewDialog', u'AdminInstallPointDlg', u'ProductID', 2),
(u'AdminRegistrationDlg', u'Next', u'ValidateProductID', u'0', u'0', 1),
(u'BrowseDlg', u'Up', u'DirectoryListUp', u'0', u'1', None),
(u'BrowseDlg', u'Cancel', u'Reset', u'0', u'1', 1),
(u'BrowseDlg', u'Cancel', u'EndDialog', u'Return', u'1', 2),
(u'BrowseDlg', u'NewFolder', u'DirectoryListNew', u'0', u'1', None),
(u'BrowseDlg', u'OK', u'EndDialog', u'Return', u'1', 2),
(u'BrowseDlg', u'OK', u'SetTargetPath', u'[_BrowseProperty]', u'1', 1),
(u'CancelDlg', u'No', u'EndDialog', u'Return', u'1', None),
(u'CancelDlg', u'Yes', u'EndDialog', u'Exit', u'1', None),
(u'CustomizeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'CustomizeDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'InstallMode = "Change"', None),
(u'CustomizeDlg', u'Back', u'NewDialog', u'SetupTypeDlg', u'InstallMode = "Custom"', None),
(u'CustomizeDlg', u'Next', u'NewDialog', u'VerifyReadyDlg', u'1', None),
(u'CustomizeDlg', u'Browse', u'SelectionBrowse', u'BrowseDlg', u'1', None),
(u'CustomizeDlg', u'Reset', u'Reset', u'0', u'1', None),
(u'CustomizeDlg', u'DiskCost', u'SpawnDialog', u'DiskCostDlg', u'1', 2),
(u'DiskCostDlg', u'OK', u'EndDialog', u'Return', u'1', None),
(u'ErrorDlg', u'Y', u'EndDialog', u'ErrorYes', u'1', None),
(u'ErrorDlg', u'A', u'EndDialog', u'ErrorAbort', u'1', None),
(u'ErrorDlg', u'C', u'EndDialog', u'ErrorCancel', u'1', None),
(u'ErrorDlg', u'I', u'EndDialog', u'ErrorIgnore', u'1', None),
(u'ErrorDlg', u'N', u'EndDialog', u'ErrorNo', u'1', None),
(u'ErrorDlg', u'O', u'EndDialog', u'ErrorOk', u'1', None),
(u'ErrorDlg', u'R', u'EndDialog', u'ErrorRetry', u'1', None),
(u'FilesInUse', u'Retry', u'EndDialog', u'Retry', u'1', None),
(u'FilesInUse', u'Exit', u'EndDialog', u'Exit', u'1', None),
(u'FilesInUse', u'Ignore', u'EndDialog', u'Ignore', u'1', None),
(u'LicenseAgreementDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'LicenseAgreementDlg', u'Back', u'NewDialog', u'WelcomeDlg', u'1', None),
(u'LicenseAgreementDlg', u'Next', u'NewDialog', u'SetupTypeDlg', u'IAgree = "Yes" AND ShowUserRegistrationDlg <> 1', 3),
(u'LicenseAgreementDlg', u'Next', u'NewDialog', u'UserRegistrationDlg', u'IAgree = "Yes" AND ShowUserRegistrationDlg = 1', 1),
(u'LicenseAgreementDlg', u'Next', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 2),
(u'MaintenanceTypeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'MaintenanceTypeDlg', u'Back', u'NewDialog', u'MaintenanceWelcomeDlg', u'1', None),
(u'MaintenanceTypeDlg', u'ChangeButton', u'NewDialog', u'CustomizeDlg', u'1', 4),
(u'MaintenanceTypeDlg', u'ChangeButton', u'[InstallMode]', u'Change', u'1', 1),
(u'MaintenanceTypeDlg', u'ChangeButton', u'[Progress1]', u'Changing', u'1', 2),
(u'MaintenanceTypeDlg', u'ChangeButton', u'[Progress2]', u'changes', u'1', 3),
(u'MaintenanceTypeDlg', u'RemoveButton', u'NewDialog', u'VerifyRemoveDlg', u'1', 4),
(u'MaintenanceTypeDlg', u'RemoveButton', u'[InstallMode]', u'Remove', u'1', 1),
(u'MaintenanceTypeDlg', u'RemoveButton', u'[Progress1]', u'Removing', u'1', 2),
(u'MaintenanceTypeDlg', u'RemoveButton', u'[Progress2]', u'removes', u'1', 3),
(u'MaintenanceTypeDlg', u'RepairButton', u'NewDialog', u'VerifyRepairDlg', u'1', 4),
(u'MaintenanceTypeDlg', u'RepairButton', u'[InstallMode]', u'Repair', u'1', 1),
(u'MaintenanceTypeDlg', u'RepairButton', u'[Progress1]', u'Repairing', u'1', 2),
(u'MaintenanceTypeDlg', u'RepairButton', u'[Progress2]', u'repairs', u'1', 3),
(u'MaintenanceWelcomeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'MaintenanceWelcomeDlg', u'Next', u'NewDialog', u'MaintenanceTypeDlg', u'1', 2),
(u'MaintenanceWelcomeDlg', u'Next', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 1),
(u'OutOfDiskDlg', u'OK', u'EndDialog', u'Return', u'1', None),
(u'OutOfRbDiskDlg', u'No', u'EndDialog', u'Return', u'1', None),
(u'OutOfRbDiskDlg', u'Yes', u'EndDialog', u'Return', u'1', 2),
(u'OutOfRbDiskDlg', u'Yes', u'EnableRollback', u'False', u'1', 1),
(u'ResumeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'ResumeDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 4),
(u'ResumeDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 2),
(u'ResumeDlg', u'Install', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 6),
(u'ResumeDlg', u'Install', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 3),
(u'ResumeDlg', u'Install', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 1),
(u'ResumeDlg', u'Install', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 5),
(u'SetupTypeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'SetupTypeDlg', u'Back', u'NewDialog', u'LicenseAgreementDlg', u'ShowUserRegistrationDlg <> 1', None),
(u'SetupTypeDlg', u'Back', u'NewDialog', u'UserRegistrationDlg', u'ShowUserRegistrationDlg = 1', None),
(u'SetupTypeDlg', u'CompleteButton', u'NewDialog', u'VerifyReadyDlg', u'1', 3),
(u'SetupTypeDlg', u'CompleteButton', u'[InstallMode]', u'Complete', u'1', 1),
(u'SetupTypeDlg', u'CompleteButton', u'SetInstallLevel', u'1000', u'1', 2),
(u'SetupTypeDlg', u'CustomButton', u'NewDialog', u'CustomizeDlg', u'1', 2),
(u'SetupTypeDlg', u'CustomButton', u'[InstallMode]', u'Custom', u'1', 1),
(u'SetupTypeDlg', u'TypicalButton', u'NewDialog', u'VerifyReadyDlg', u'1', 3),
(u'SetupTypeDlg', u'TypicalButton', u'[InstallMode]', u'Typical', u'1', 1),
(u'SetupTypeDlg', u'TypicalButton', u'SetInstallLevel', u'3', u'1', 2),
(u'UserRegistrationDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'UserRegistrationDlg', u'Back', u'NewDialog', u'LicenseAgreementDlg', u'1', None),
(u'UserRegistrationDlg', u'Next', u'NewDialog', u'SetupTypeDlg', u'ProductID', 3),
(u'UserRegistrationDlg', u'Next', u'ValidateProductID', u'0', u'0', 1),
(u'UserRegistrationDlg', u'Next', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 2),
(u'VerifyReadyDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'VerifyReadyDlg', u'Back', u'NewDialog', u'AdminInstallPointDlg', u'InstallMode = "Server Image"', None),
(u'VerifyReadyDlg', u'Back', u'NewDialog', u'CustomizeDlg', u'InstallMode = "Custom" OR InstallMode = "Change"', None),
(u'VerifyReadyDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'InstallMode = "Repair"', None),
(u'VerifyReadyDlg', u'Back', u'NewDialog', u'SetupTypeDlg', u'InstallMode = "Typical" OR InstallMode = "Complete"', None),
(u'VerifyReadyDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 3),
(u'VerifyReadyDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 1),
(u'VerifyReadyDlg', u'Install', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 5),
(u'VerifyReadyDlg', u'Install', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 2),
(u'VerifyReadyDlg', u'Install', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 4),
(u'VerifyRemoveDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'VerifyRemoveDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'1', None),
(u'VerifyRemoveDlg', u'Remove', u'Remove', u'All', u'OutOfDiskSpace <> 1', 1),
(u'VerifyRemoveDlg', u'Remove', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 4),
(u'VerifyRemoveDlg', u'Remove', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 2),
(u'VerifyRemoveDlg', u'Remove', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 6),
(u'VerifyRemoveDlg', u'Remove', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 3),
(u'VerifyRemoveDlg', u'Remove', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 5),
(u'VerifyRepairDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'VerifyRepairDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'1', None),
(u'VerifyRepairDlg', u'Repair', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 5),
(u'VerifyRepairDlg', u'Repair', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 3),
(u'VerifyRepairDlg', u'Repair', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 7),
(u'VerifyRepairDlg', u'Repair', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 4),
(u'VerifyRepairDlg', u'Repair', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 6),
(u'VerifyRepairDlg', u'Repair', u'Reinstall', u'All', u'OutOfDiskSpace <> 1', 2),
(u'VerifyRepairDlg', u'Repair', u'ReinstallMode', u'ecmus', u'OutOfDiskSpace <> 1', 1),
(u'WaitForCostingDlg', u'Return', u'EndDialog', u'Exit', u'1', None),
(u'WelcomeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'WelcomeDlg', u'Next', u'NewDialog', u'LicenseAgreementDlg', u'1', None),
]
Dialog = [
(u'AdminWelcomeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Next', u'Next', u'Cancel'),
(u'ExitDialog', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Finish', u'Finish', u'Finish'),
(u'FatalError', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Finish', u'Finish', u'Finish'),
(u'PrepareDlg', 50, 50, 370, 270, 1, u'[ProductName] [Setup]', u'Cancel', u'Cancel', u'Cancel'),
(u'ProgressDlg', 50, 50, 370, 270, 1, u'[ProductName] [Setup]', u'Cancel', u'Cancel', u'Cancel'),
(u'UserExit', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Finish', u'Finish', u'Finish'),
(u'AdminBrowseDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'PathEdit', u'OK', u'Cancel'),
(u'AdminInstallPointDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Text', u'Next', u'Cancel'),
(u'AdminRegistrationDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'OrganizationLabel', u'Next', u'Cancel'),
(u'BrowseDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'PathEdit', u'OK', u'Cancel'),
(u'CancelDlg', 50, 10, 260, 85, 3, u'[ProductName] [Setup]', u'No', u'No', u'No'),
(u'CustomizeDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Tree', u'Next', u'Cancel'),
(u'DiskCostDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'OK', u'OK', u'OK'),
(u'ErrorDlg', 50, 10, 270, 105, 65539, u'Installer Information', u'ErrorText', None, None),
(u'FilesInUse', 50, 50, 370, 270, 19, u'[ProductName] [Setup]', u'Retry', u'Retry', u'Retry'),
(u'LicenseAgreementDlg', 50, 50, 370, 270, 3, u'[ProductName] License Agreement', u'Buttons', u'Next', u'Cancel'),
(u'MaintenanceTypeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'ChangeLabel', u'ChangeButton', u'Cancel'),
(u'MaintenanceWelcomeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Next', u'Next', u'Cancel'),
(u'OutOfDiskDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'OK', u'OK', u'OK'),
(u'OutOfRbDiskDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'No', u'No', u'No'),
(u'ResumeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Install', u'Install', u'Cancel'),
(u'SetupTypeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'TypicalLabel', u'TypicalButton', u'Cancel'),
(u'UserRegistrationDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'NameLabel', u'Next', u'Cancel'),
(u'VerifyReadyDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Install', u'Install', u'Cancel'),
(u'VerifyRemoveDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Back', u'Back', u'Cancel'),
(u'VerifyRepairDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Repair', u'Repair', u'Cancel'),
(u'WaitForCostingDlg', 50, 10, 260, 85, 3, u'[ProductName] [Setup]', u'Return', u'Return', u'Return'),
(u'WelcomeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Next', u'Next', u'Cancel'),
]
EventMapping = [
(u'PrepareDlg', u'ActionData', u'ActionData', u'Text'),
(u'PrepareDlg', u'ActionText', u'ActionText', u'Text'),
(u'ProgressDlg', u'ActionText', u'ActionText', u'Text'),
(u'ProgressDlg', u'ProgressBar', u'SetProgress', u'Progress'),
(u'AdminBrowseDlg', u'DirectoryCombo', u'IgnoreChange', u'IgnoreChange'),
(u'BrowseDlg', u'DirectoryCombo', u'IgnoreChange', u'IgnoreChange'),
(u'CustomizeDlg', u'Next', u'SelectionNoItems', u'Enabled'),
(u'CustomizeDlg', u'Reset', u'SelectionNoItems', u'Enabled'),
(u'CustomizeDlg', u'DiskCost', u'SelectionNoItems', u'Enabled'),
(u'CustomizeDlg', u'ItemDescription', u'SelectionDescription', u'Text'),
(u'CustomizeDlg', u'ItemSize', u'SelectionSize', u'Text'),
(u'CustomizeDlg', u'Location', u'SelectionPath', u'Text'),
(u'CustomizeDlg', u'Location', u'SelectionPathOn', u'Visible'),
(u'CustomizeDlg', u'LocationLabel', u'SelectionPathOn', u'Visible'),
]
InstallExecuteSequence = [
(u'InstallValidate', None, 1400),
(u'InstallInitialize', None, 1500),
(u'InstallFinalize', None, 6600),
(u'InstallFiles', None, 4000),
(u'FileCost', None, 900),
(u'CostInitialize', None, 800),
(u'CostFinalize', None, 1000),
(u'CreateShortcuts', None, 4500),
(u'PublishComponents', None, 6200),
(u'PublishFeatures', None, 6300),
(u'PublishProduct', None, 6400),
(u'RegisterClassInfo', None, 4600),
(u'RegisterExtensionInfo', None, 4700),
(u'RegisterMIMEInfo', None, 4900),
(u'RegisterProgIdInfo', None, 4800),
(u'ValidateProductID', None, 700),
(u'AllocateRegistrySpace', u'NOT Installed', 1550),
(u'AppSearch', None, 400),
(u'BindImage', None, 4300),
(u'CCPSearch', u'NOT Installed', 500),
(u'CreateFolders', None, 3700),
(u'DeleteServices', u'VersionNT', 2000),
(u'DuplicateFiles', None, 4210),
(u'FindRelatedProducts', None, 200),
(u'InstallODBC', None, 5400),
(u'InstallServices', u'VersionNT', 5800),
(u'LaunchConditions', None, 100),
(u'MigrateFeatureStates', None, 1200),
(u'MoveFiles', None, 3800),
(u'PatchFiles', None, 4090),
(u'ProcessComponents', None, 1600),
(u'RegisterComPlus', None, 5700),
(u'RegisterFonts', None, 5300),
(u'RegisterProduct', None, 6100),
(u'RegisterTypeLibraries', None, 5500),
(u'RegisterUser', None, 6000),
(u'RemoveDuplicateFiles', None, 3400),
(u'RemoveEnvironmentStrings', None, 3300),
(u'RemoveExistingProducts', None, 6700),
(u'RemoveFiles', None, 3500),
(u'RemoveFolders', None, 3600),
(u'RemoveIniValues', None, 3100),
(u'RemoveODBC', None, 2400),
(u'RemoveRegistryValues', None, 2600),
(u'RemoveShortcuts', None, 3200),
(u'RMCCPSearch', u'NOT Installed', 600),
(u'SelfRegModules', None, 5600),
(u'SelfUnregModules', None, 2200),
(u'SetODBCFolders', None, 1100),
(u'StartServices', u'VersionNT', 5900),
(u'StopServices', u'VersionNT', 1900),
(u'UnpublishComponents', None, 1700),
(u'UnpublishFeatures', None, 1800),
(u'UnregisterClassInfo', None, 2700),
(u'UnregisterComPlus', None, 2100),
(u'UnregisterExtensionInfo', None, 2800),
(u'UnregisterFonts', None, 2500),
(u'UnregisterMIMEInfo', None, 3000),
(u'UnregisterProgIdInfo', None, 2900),
(u'UnregisterTypeLibraries', None, 2300),
(u'WriteEnvironmentStrings', None, 5200),
(u'WriteIniValues', None, 5100),
(u'WriteRegistryValues', None, 5000),
]
InstallUISequence = [
#(u'FileCost', None, 900),
#(u'CostInitialize', None, 800),
#(u'CostFinalize', None, 1000),
#(u'ExecuteAction', None, 1300),
#(u'ExitDialog', None, -1),
#(u'FatalError', None, -3),
(u'PrepareDlg', None, 140),
(u'ProgressDlg', None, 1280),
#(u'UserExit', None, -2),
(u'MaintenanceWelcomeDlg', u'Installed AND NOT RESUME AND NOT Preselected', 1250),
(u'ResumeDlg', u'Installed AND (RESUME OR Preselected)', 1240),
(u'WelcomeDlg', u'NOT Installed', 1230),
#(u'AppSearch', None, 400),
#(u'CCPSearch', u'NOT Installed', 500),
#(u'FindRelatedProducts', None, 200),
#(u'LaunchConditions', None, 100),
#(u'MigrateFeatureStates', None, 1200),
#(u'RMCCPSearch', u'NOT Installed', 600),
]
ListView = [
]
RadioButton = [
(u'IAgree', 1, u'Yes', 5, 0, 250, 15, u'{\\DlgFont8}I &accept the terms in the License Agreement', None),
(u'IAgree', 2, u'No', 5, 20, 250, 15, u'{\\DlgFont8}I &do not accept the terms in the License Agreement', None),
]
TextStyle = [
(u'DlgFont8', u'Tahoma', 8, None, 0),
(u'DlgFontBold8', u'Tahoma', 8, None, 1),
(u'VerdanaBold13', u'Verdana', 13, None, 1),
]
UIText = [
(u'AbsentPath', None),
(u'bytes', u'bytes'),
(u'GB', u'GB'),
(u'KB', u'KB'),
(u'MB', u'MB'),
(u'MenuAbsent', u'Entire feature will be unavailable'),
(u'MenuAdvertise', u'Feature will be installed when required'),
(u'MenuAllCD', u'Entire feature will be installed to run from CD'),
(u'MenuAllLocal', u'Entire feature will be installed on local hard drive'),
(u'MenuAllNetwork', u'Entire feature will be installed to run from network'),
(u'MenuCD', u'Will be installed to run from CD'),
(u'MenuLocal', u'Will be installed on local hard drive'),
(u'MenuNetwork', u'Will be installed to run from network'),
(u'ScriptInProgress', u'Gathering required information...'),
(u'SelAbsentAbsent', u'This feature will remain uninstalled'),
(u'SelAbsentAdvertise', u'This feature will be set to be installed when required'),
(u'SelAbsentCD', u'This feature will be installed to run from CD'),
(u'SelAbsentLocal', u'This feature will be installed on the local hard drive'),
(u'SelAbsentNetwork', u'This feature will be installed to run from the network'),
(u'SelAdvertiseAbsent', u'This feature will become unavailable'),
(u'SelAdvertiseAdvertise', u'Will be installed when required'),
(u'SelAdvertiseCD', u'This feature will be available to run from CD'),
(u'SelAdvertiseLocal', u'This feature will be installed on your local hard drive'),
(u'SelAdvertiseNetwork', u'This feature will be available to run from the network'),
(u'SelCDAbsent', u"This feature will be uninstalled completely, you won't be able to run it from CD"),
(u'SelCDAdvertise', u'This feature will change from run from CD state to set to be installed when required'),
(u'SelCDCD', u'This feature will remain to be run from CD'),
(u'SelCDLocal', u'This feature will change from run from CD state to be installed on the local hard drive'),
(u'SelChildCostNeg', u'This feature frees up [1] on your hard drive.'),
(u'SelChildCostPos', u'This feature requires [1] on your hard drive.'),
(u'SelCostPending', u'Compiling cost for this feature...'),
(u'SelLocalAbsent', u'This feature will be completely removed'),
(u'SelLocalAdvertise', u'This feature will be removed from your local hard drive, but will be set to be installed when required'),
(u'SelLocalCD', u'This feature will be removed from your local hard drive, but will be still available to run from CD'),
(u'SelLocalLocal', u'This feature will remain on you local hard drive'),
(u'SelLocalNetwork', u'This feature will be removed from your local hard drive, but will be still available to run from the network'),
(u'SelNetworkAbsent', u"This feature will be uninstalled completely, you won't be able to run it from the network"),
(u'SelNetworkAdvertise', u'This feature will change from run from network state to set to be installed when required'),
(u'SelNetworkLocal', u'This feature will change from run from network state to be installed on the local hard drive'),
(u'SelNetworkNetwork', u'This feature will remain to be run from the network'),
(u'SelParentCostNegNeg', u'This feature frees up [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures free up [4] on your hard drive.'),
(u'SelParentCostNegPos', u'This feature frees up [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures require [4] on your hard drive.'),
(u'SelParentCostPosNeg', u'This feature requires [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures free up [4] on your hard drive.'),
(u'SelParentCostPosPos', u'This feature requires [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures require [4] on your hard drive.'),
(u'TimeRemaining', u'Time remaining: {[1] minutes }{[2] seconds}'),
(u'VolumeCostAvailable', u'Available'),
(u'VolumeCostDifference', u'Difference'),
(u'VolumeCostRequired', u'Required'),
(u'VolumeCostSize', u'Disk Size'),
(u'VolumeCostVolume', u'Volume'),
]
_Validation = [
(u'AdminExecuteSequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'),
(u'AdminExecuteSequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'),
(u'AdminExecuteSequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'),
(u'AdminUISequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'),
(u'AdminUISequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'),
(u'AdminUISequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'),
(u'Condition', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Expression evaluated to determine if Level in the Feature table is to change.'),
(u'Condition', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Reference to a Feature entry in Feature table.'),
(u'Condition', u'Level', u'N', 0, 32767, None, None, None, None, u'New selection Level to set in Feature table if Condition evaluates to TRUE.'),
(u'AdvtExecuteSequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'),
(u'AdvtExecuteSequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'),
(u'AdvtExecuteSequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'),
(u'BBControl', u'Type', u'N', None, None, None, None, u'Identifier', None, u'The type of the control.'),
(u'BBControl', u'BBControl', u'N', None, None, None, None, u'Identifier', None, u'Name of the control. This name must be unique within a billboard, but can repeat on different billboard.'),
(u'BBControl', u'Billboard_', u'N', None, None, u'Billboard', 1, u'Identifier', None, u'External key to the Billboard table, name of the billboard.'),
(u'BBControl', u'X', u'N', 0, 32767, None, None, None, None, u'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.'),
(u'BBControl', u'Y', u'N', 0, 32767, None, None, None, None, u'Vertical coordinate of the upper left corner of the bounding rectangle of the control.'),
(u'BBControl', u'Width', u'N', 0, 32767, None, None, None, None, u'Width of the bounding rectangle of the control.'),
(u'BBControl', u'Height', u'N', 0, 32767, None, None, None, None, u'Height of the bounding rectangle of the control.'),
(u'BBControl', u'Attributes', u'Y', 0, 2147483647, None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this control.'),
(u'BBControl', u'Text', u'Y', None, None, None, None, u'Text', None, u'A string used to set the initial text contained within a control (if appropriate).'),
(u'Billboard', u'Action', u'Y', None, None, None, None, u'Identifier', None, u'The name of an action. The billboard is displayed during the progress messages received from this action.'),
(u'Billboard', u'Billboard', u'N', None, None, None, None, u'Identifier', None, u'Name of the billboard.'),
(u'Billboard', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'An external key to the Feature Table. The billboard is shown only if this feature is being installed.'),
(u'Billboard', u'Ordering', u'Y', 0, 32767, None, None, None, None, u'A positive integer. If there is more than one billboard corresponding to an action they will be shown in the order defined by this column.'),
(u'Binary', u'Name', u'N', None, None, None, None, u'Identifier', None, u'Unique key identifying the binary data.'),
(u'Binary', u'Data', u'N', None, None, None, None, u'Binary', None, u'The unformatted binary data.'),
(u'CheckBox', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to the item.'),
(u'CheckBox', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The value string associated with the item.'),
(u'Property', u'Property', u'N', None, None, None, None, u'Identifier', None, u'Name of property, uppercase if settable by launcher or loader.'),
(u'Property', u'Value', u'N', None, None, None, None, u'Text', None, u'String value for property. Never null or empty.'),
(u'ComboBox', u'Text', u'Y', None, None, None, None, u'Formatted', None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.'),
(u'ComboBox', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this item. All the items tied to the same property become part of the same combobox.'),
(u'ComboBox', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value string associated with this item. Selecting the line will set the associated property to this value.'),
(u'ComboBox', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list.\tThe integers do not have to be consecutive.'),
(u'Control', u'Type', u'N', None, None, None, None, u'Identifier', None, u'The type of the control.'),
(u'Control', u'X', u'N', 0, 32767, None, None, None, None, u'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.'),
(u'Control', u'Y', u'N', 0, 32767, None, None, None, None, u'Vertical coordinate of the upper left corner of the bounding rectangle of the control.'),
(u'Control', u'Width', u'N', 0, 32767, None, None, None, None, u'Width of the bounding rectangle of the control.'),
(u'Control', u'Height', u'N', 0, 32767, None, None, None, None, u'Height of the bounding rectangle of the control.'),
(u'Control', u'Attributes', u'Y', 0, 2147483647, None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this control.'),
(u'Control', u'Text', u'Y', None, None, None, None, u'Formatted', None, u'A string used to set the initial text contained within a control (if appropriate).'),
(u'Control', u'Property', u'Y', None, None, None, None, u'Identifier', None, u'The name of a defined property to be linked to this control. '),
(u'Control', u'Control', u'N', None, None, None, None, u'Identifier', None, u'Name of the control. This name must be unique within a dialog, but can repeat on different dialogs. '),
(u'Control', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'External key to the Dialog table, name of the dialog.'),
(u'Control', u'Control_Next', u'Y', None, None, u'Control', 2, u'Identifier', None, u'The name of an other control on the same dialog. This link defines the tab order of the controls. The links have to form one or more cycles!'),
(u'Control', u'Help', u'Y', None, None, None, None, u'Text', None, u'The help strings used with the button. The text is optional. '),
(u'Icon', u'Name', u'N', None, None, None, None, u'Identifier', None, u'Primary key. Name of the icon file.'),
(u'Icon', u'Data', u'N', None, None, None, None, u'Binary', None, u'Binary stream. The binary icon data in PE (.DLL or .EXE) or icon (.ICO) format.'),
(u'ListBox', u'Text', u'Y', None, None, None, None, u'Text', None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.'),
(u'ListBox', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this item. All the items tied to the same property become part of the same listbox.'),
(u'ListBox', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value string associated with this item. Selecting the line will set the associated property to this value.'),
(u'ListBox', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.'),
(u'ActionText', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to be described.'),
(u'ActionText', u'Description', u'Y', None, None, None, None, u'Text', None, u'Localized description displayed in progress dialog and log when action is executing.'),
(u'ActionText', u'Template', u'Y', None, None, None, None, u'Template', None, u'Optional localized format template used to format action data records for display during action execution.'),
(u'ControlCondition', u'Action', u'N', None, None, None, None, None, u'Default;Disable;Enable;Hide;Show', u'The desired action to be taken on the specified control.'),
(u'ControlCondition', u'Condition', u'N', None, None, None, None, u'Condition', None, u'A standard conditional statement that specifies under which conditions the action should be triggered.'),
(u'ControlCondition', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'A foreign key to the Dialog table, name of the dialog.'),
(u'ControlCondition', u'Control_', u'N', None, None, u'Control', 2, u'Identifier', None, u'A foreign key to the Control table, name of the control.'),
(u'ControlEvent', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'A standard conditional statement that specifies under which conditions an event should be triggered.'),
(u'ControlEvent', u'Ordering', u'Y', 0, 2147483647, None, None, None, None, u'An integer used to order several events tied to the same control. Can be left blank.'),
(u'ControlEvent', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'A foreign key to the Dialog table, name of the dialog.'),
(u'ControlEvent', u'Control_', u'N', None, None, u'Control', 2, u'Identifier', None, u'A foreign key to the Control table, name of the control'),
(u'ControlEvent', u'Event', u'N', None, None, None, None, u'Formatted', None, u'An identifier that specifies the type of the event that should take place when the user interacts with control specified by the first two entries.'),
(u'ControlEvent', u'Argument', u'N', None, None, None, None, u'Formatted', None, u'A value to be used as a modifier when triggering a particular event.'),
(u'Dialog', u'Width', u'N', 0, 32767, None, None, None, None, u'Width of the bounding rectangle of the dialog.'),
(u'Dialog', u'Height', u'N', 0, 32767, None, None, None, None, u'Height of the bounding rectangle of the dialog.'),
(u'Dialog', u'Attributes', u'Y', 0, 2147483647, None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this dialog.'),
(u'Dialog', u'Title', u'Y', None, None, None, None, u'Formatted', None, u"A text string specifying the title to be displayed in the title bar of the dialog's window."),
(u'Dialog', u'Dialog', u'N', None, None, None, None, u'Identifier', None, u'Name of the dialog.'),
(u'Dialog', u'HCentering', u'N', 0, 100, None, None, None, None, u'Horizontal position of the dialog on a 0-100 scale. 0 means left end, 100 means right end of the screen, 50 center.'),
(u'Dialog', u'VCentering', u'N', 0, 100, None, None, None, None, u'Vertical position of the dialog on a 0-100 scale. 0 means top end, 100 means bottom end of the screen, 50 center.'),
(u'Dialog', u'Control_First', u'N', None, None, u'Control', 2, u'Identifier', None, u'Defines the control that has the focus when the dialog is created.'),
(u'Dialog', u'Control_Default', u'Y', None, None, u'Control', 2, u'Identifier', None, u'Defines the default control. Hitting return is equivalent to pushing this button.'),
(u'Dialog', u'Control_Cancel', u'Y', None, None, u'Control', 2, u'Identifier', None, u'Defines the cancel control. Hitting escape or clicking on the close icon on the dialog is equivalent to pushing this button.'),
(u'EventMapping', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'A foreign key to the Dialog table, name of the Dialog.'),
(u'EventMapping', u'Control_', u'N', None, None, u'Control', 2, u'Identifier', None, u'A foreign key to the Control table, name of the control.'),
(u'EventMapping', u'Event', u'N', None, None, None, None, u'Identifier', None, u'An identifier that specifies the type of the event that the control subscribes to.'),
(u'EventMapping', u'Attribute', u'N', None, None, None, None, u'Identifier', None, u'The name of the control attribute, that is set when this event is received.'),
(u'InstallExecuteSequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'),
(u'InstallExecuteSequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'),
(u'InstallExecuteSequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'),
(u'AppSearch', u'Property', u'N', None, None, None, None, u'Identifier', None, u'The property associated with a Signature'),
(u'AppSearch', u'Signature_', u'N', None, None, u'Signature;RegLocator;IniLocator;DrLocator;CompLocator', 1, u'Identifier', None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.'),
(u'BindImage', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'The index into the File table. This must be an executable file.'),
(u'BindImage', u'Path', u'Y', None, None, None, None, u'Paths', None, u'A list of ; delimited paths that represent the paths to be searched for the import DLLS. The list is usually a list of properties each enclosed within square brackets [] .'),
(u'CCPSearch', u'Signature_', u'N', None, None, u'Signature;RegLocator;IniLocator;DrLocator;CompLocator', 1, u'Identifier', None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.'),
(u'InstallUISequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'),
(u'InstallUISequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'),
(u'InstallUISequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'),
(u'ListView', u'Text', u'Y', None, None, None, None, u'Text', None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.'),
(u'ListView', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this item. All the items tied to the same property become part of the same listview.'),
(u'ListView', u'Value', u'N', None, None, None, None, u'Identifier', None, u'The value string associated with this item. Selecting the line will set the associated property to this value.'),
(u'ListView', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.'),
(u'ListView', u'Binary_', u'Y', None, None, u'Binary', 1, u'Identifier', None, u'The name of the icon to be displayed with the icon. The binary information is looked up from the Binary Table.'),
(u'RadioButton', u'X', u'N', 0, 32767, None, None, None, None, u'The horizontal coordinate of the upper left corner of the bounding rectangle of the radio button.'),
(u'RadioButton', u'Y', u'N', 0, 32767, None, None, None, None, u'The vertical coordinate of the upper left corner of the bounding rectangle of the radio button.'),
(u'RadioButton', u'Width', u'N', 0, 32767, None, None, None, None, u'The width of the button.'),
(u'RadioButton', u'Height', u'N', 0, 32767, None, None, None, None, u'The height of the button.'),
(u'RadioButton', u'Text', u'Y', None, None, None, None, u'Text', None, u'The visible title to be assigned to the radio button.'),
(u'RadioButton', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this radio button. All the buttons tied to the same property become part of the same group.'),
(u'RadioButton', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value string associated with this button. Selecting the button will set the associated property to this value.'),
(u'RadioButton', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.'),
(u'RadioButton', u'Help', u'Y', None, None, None, None, u'Text', None, u'The help strings used with the button. The text is optional.'),
(u'TextStyle', u'TextStyle', u'N', None, None, None, None, u'Identifier', None, u'Name of the style. The primary key of this table. This name is embedded in the texts to indicate a style change.'),
(u'TextStyle', u'FaceName', u'N', None, None, None, None, u'Text', None, u'A string indicating the name of the font used. Required. The string must be at most 31 characters long.'),
(u'TextStyle', u'Size', u'N', 0, 32767, None, None, None, None, u'The size of the font used. This size is given in our units (1/12 of the system font height). Assuming that the system font is set to 12 point size, this is equivalent to the point size.'),
(u'TextStyle', u'Color', u'Y', 0, 16777215, None, None, None, None, u'A long integer indicating the color of the string in the RGB format (Red, Green, Blue each 0-255, RGB = R + 256*G + 256^2*B).'),
(u'TextStyle', u'StyleBits', u'Y', 0, 15, None, None, None, None, u'A combination of style bits.'),
(u'UIText', u'Text', u'Y', None, None, None, None, u'Text', None, u'The localized version of the string.'),
(u'UIText', u'Key', u'N', None, None, None, None, u'Identifier', None, u'A unique key that identifies the particular string.'),
(u'_Validation', u'Table', u'N', None, None, None, None, u'Identifier', None, u'Name of table'),
(u'_Validation', u'Description', u'Y', None, None, None, None, u'Text', None, u'Description of column'),
(u'_Validation', u'Column', u'N', None, None, None, None, u'Identifier', None, u'Name of column'),
(u'_Validation', u'Nullable', u'N', None, None, None, None, None, u'Y;N;@', u'Whether the column is nullable'),
(u'_Validation', u'MinValue', u'Y', -2147483647, 2147483647, None, None, None, None, u'Minimum value allowed'),
(u'_Validation', u'MaxValue', u'Y', -2147483647, 2147483647, None, None, None, None, u'Maximum value allowed'),
(u'_Validation', u'KeyTable', u'Y', None, None, None, None, u'Identifier', None, u'For foreign key, Name of table to which data must link'),
(u'_Validation', u'KeyColumn', u'Y', 1, 32, None, None, None, None, u'Column to which foreign key connects'),
(u'_Validation', u'Category', u'Y', None, None, None, None, None, u'Text;Formatted;Template;Condition;Guid;Path;Version;Language;Identifier;Binary;UpperCase;LowerCase;Filename;Paths;AnyPath;WildCardFilename;RegPath;KeyFormatted;CustomSource;Property;Cabinet;Shortcut;URL', u'String category'),
(u'_Validation', u'Set', u'Y', None, None, None, None, u'Text', None, u'Set of values that are permitted'),
(u'AdvtUISequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'),
(u'AdvtUISequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'),
(u'AdvtUISequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'),
(u'AppId', u'AppId', u'N', None, None, None, None, u'Guid', None, None),
(u'AppId', u'ActivateAtStorage', u'Y', 0, 1, None, None, None, None, None),
(u'AppId', u'DllSurrogate', u'Y', None, None, None, None, u'Text', None, None),
(u'AppId', u'LocalService', u'Y', None, None, None, None, u'Text', None, None),
(u'AppId', u'RemoteServerName', u'Y', None, None, None, None, u'Formatted', None, None),
(u'AppId', u'RunAsInteractiveUser', u'Y', 0, 1, None, None, None, None, None),
(u'AppId', u'ServiceParameters', u'Y', None, None, None, None, u'Text', None, None),
(u'Feature', u'Attributes', u'N', None, None, None, None, None, u'0;1;2;4;5;6;8;9;10;16;17;18;20;21;22;24;25;26;32;33;34;36;37;38;48;49;50;52;53;54', u'Feature attributes'),
(u'Feature', u'Description', u'Y', None, None, None, None, u'Text', None, u'Longer descriptive text describing a visible feature item.'),
(u'Feature', u'Title', u'Y', None, None, None, None, u'Text', None, u'Short text identifying a visible feature item.'),
(u'Feature', u'Feature', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular feature record.'),
(u'Feature', u'Directory_', u'Y', None, None, u'Directory', 1, u'UpperCase', None, u'The name of the Directory that can be configured by the UI. A non-null value will enable the browse button.'),
(u'Feature', u'Level', u'N', 0, 32767, None, None, None, None, u'The install level at which record will be initially selected. An install level of 0 will disable an item and prevent its display.'),
(u'Feature', u'Display', u'Y', 0, 32767, None, None, None, None, u'Numeric sort order, used to force a specific display ordering.'),
(u'Feature', u'Feature_Parent', u'Y', None, None, u'Feature', 1, u'Identifier', None, u'Optional key of a parent record in the same table. If the parent is not selected, then the record will not be installed. Null indicates a root item.'),
(u'File', u'Sequence', u'N', 1, 32767, None, None, None, None, u'Sequence with respect to the media images; order must track cabinet order.'),
(u'File', u'Attributes', u'Y', 0, 32767, None, None, None, None, u'Integer containing bit flags representing file attributes (with the decimal value of each bit position in parentheses)'),
(u'File', u'File', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token, must match identifier in cabinet. For uncompressed files, this field is ignored.'),
(u'File', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the file.'),
(u'File', u'FileName', u'N', None, None, None, None, u'Filename', None, u'File name used for installation, may be localized. This may contain a "short name|long name" pair.'),
(u'File', u'FileSize', u'N', 0, 2147483647, None, None, None, None, u'Size of file in bytes (long integer).'),
(u'File', u'Language', u'Y', None, None, None, None, u'Language', None, u'List of decimal language Ids, comma-separated if more than one.'),
(u'File', u'Version', u'Y', None, None, u'File', 1, u'Version', None, u'Version string for versioned files; Blank for unversioned files.'),
(u'Class', u'Attributes', u'Y', None, 32767, None, None, None, None, u'Class registration attributes.'),
(u'Class', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.'),
(u'Class', u'Description', u'Y', None, None, None, None, u'Text', None, u'Localized description for the Class.'),
(u'Class', u'Argument', u'Y', None, None, None, None, u'Formatted', None, u'optional argument for LocalServers.'),
(u'Class', u'AppId_', u'Y', None, None, u'AppId', 1, u'Guid', None, u'Optional AppID containing DCOM information for associated application (string GUID).'),
(u'Class', u'CLSID', u'N', None, None, None, None, u'Guid', None, u'The CLSID of an OLE factory.'),
(u'Class', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.'),
(u'Class', u'Context', u'N', None, None, None, None, u'Identifier', None, u'The numeric server context for this server. CLSCTX_xxxx'),
(u'Class', u'DefInprocHandler', u'Y', None, None, None, None, u'Filename', u'1;2;3', u'Optional default inproc handler. Only optionally provided if Context=CLSCTX_LOCAL_SERVER. Typically "ole32.dll" or "mapi32.dll"'),
(u'Class', u'FileTypeMask', u'Y', None, None, None, None, u'Text', None, u'Optional string containing information for the HKCRthis CLSID) key. If multiple patterns exist, they must be delimited by a semicolon, and numeric subkeys will be generated: 0,1,2...'),
(u'Class', u'Icon_', u'Y', None, None, u'Icon', 1, u'Identifier', None, u'Optional foreign key into the Icon Table, specifying the icon file associated with this CLSID. Will be written under the DefaultIcon key.'),
(u'Class', u'IconIndex', u'Y', -32767, 32767, None, None, None, None, u'Optional icon index.'),
(u'Class', u'ProgId_Default', u'Y', None, None, u'ProgId', 1, u'Text', None, u'Optional ProgId associated with this CLSID.'),
(u'Component', u'Condition', u'Y', None, None, None, None, u'Condition', None, u"A conditional statement that will disable this component if the specified condition evaluates to the 'True' state. If a component is disabled, it will not be installed, regardless of the 'Action' state associated with the component."),
(u'Component', u'Attributes', u'N', None, None, None, None, None, None, u'Remote execution option, one of irsEnum'),
(u'Component', u'Component', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular component record.'),
(u'Component', u'ComponentId', u'Y', None, None, None, None, u'Guid', None, u'A string GUID unique to this component, version, and language.'),
(u'Component', u'Directory_', u'N', None, None, u'Directory', 1, u'Identifier', None, u'Required key of a Directory table record. This is actually a property name whose value contains the actual path, set either by the AppSearch action or with the default setting obtained from the Directory table.'),
(u'Component', u'KeyPath', u'Y', None, None, u'File;Registry;ODBCDataSource', 1, u'Identifier', None, u'Either the primary key into the File table, Registry table, or ODBCDataSource table. This extract path is stored when the component is installed, and is used to detect the presence of the component and to return the path to it.'),
(u'ProgId', u'Description', u'Y', None, None, None, None, u'Text', None, u'Localized description for the Program identifier.'),
(u'ProgId', u'Icon_', u'Y', None, None, u'Icon', 1, u'Identifier', None, u'Optional foreign key into the Icon Table, specifying the icon file associated with this ProgId. Will be written under the DefaultIcon key.'),
(u'ProgId', u'IconIndex', u'Y', -32767, 32767, None, None, None, None, u'Optional icon index.'),
(u'ProgId', u'ProgId', u'N', None, None, None, None, u'Text', None, u'The Program Identifier. Primary key.'),
(u'ProgId', u'Class_', u'Y', None, None, u'Class', 1, u'Guid', None, u'The CLSID of an OLE factory corresponding to the ProgId.'),
(u'ProgId', u'ProgId_Parent', u'Y', None, None, u'ProgId', 1, u'Text', None, u'The Parent Program Identifier. If specified, the ProgId column becomes a version independent prog id.'),
(u'CompLocator', u'Type', u'Y', 0, 1, None, None, None, None, u'A boolean value that determines if the registry value is a filename or a directory location.'),
(u'CompLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.'),
(u'CompLocator', u'ComponentId', u'N', None, None, None, None, u'Guid', None, u'A string GUID unique to this component, version, and language.'),
(u'Complus', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the ComPlus component.'),
(u'Complus', u'ExpType', u'Y', 0, 32767, None, None, None, None, u'ComPlus component attributes.'),
(u'Directory', u'Directory', u'N', None, None, None, None, u'Identifier', None, u'Unique identifier for directory entry, primary key. If a property by this name is defined, it contains the full path to the directory.'),
(u'Directory', u'DefaultDir', u'N', None, None, None, None, u'DefaultDir', None, u"The default sub-path under parent's path."),
(u'Directory', u'Directory_Parent', u'Y', None, None, u'Directory', 1, u'Identifier', None, u'Reference to the entry in this table specifying the default parent directory. A record parented to itself or with a Null parent represents a root of the install tree.'),
(u'CreateFolder', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table.'),
(u'CreateFolder', u'Directory_', u'N', None, None, u'Directory', 1, u'Identifier', None, u'Primary key, could be foreign key into the Directory table.'),
(u'CustomAction', u'Type', u'N', 1, 16383, None, None, None, None, u'The numeric custom action type, consisting of source location, code type, entry, option flags.'),
(u'CustomAction', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Primary key, name of action, normally appears in sequence table unless private use.'),
(u'CustomAction', u'Source', u'Y', None, None, None, None, u'CustomSource', None, u'The table reference of the source of the code.'),
(u'CustomAction', u'Target', u'Y', None, None, None, None, u'Formatted', None, u'Excecution parameter, depends on the type of custom action'),
(u'DrLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature table.'),
(u'DrLocator', u'Path', u'Y', None, None, None, None, u'AnyPath', None, u'The path on the user system. This is a either a subpath below the value of the Parent or a full path. The path may contain properties enclosed within [ ] that will be expanded.'),
(u'DrLocator', u'Depth', u'Y', 0, 32767, None, None, None, None, u'The depth below the path to which the Signature_ is recursively searched. If absent, the depth is assumed to be 0.'),
(u'DrLocator', u'Parent', u'Y', None, None, None, None, u'Identifier', None, u'The parent file signature. It is also a foreign key in the Signature table. If null and the Path column does not expand to a full path, then all the fixed drives of the user system are searched using the Path.'),
(u'DuplicateFile', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Foreign key referencing the source file to be duplicated.'),
(u'DuplicateFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the duplicate file.'),
(u'DuplicateFile', u'DestFolder', u'Y', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full pathname to a destination folder.'),
(u'DuplicateFile', u'DestName', u'Y', None, None, None, None, u'Filename', None, u'Filename to be given to the duplicate file.'),
(u'DuplicateFile', u'FileKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular file entry'),
(u'Environment', u'Name', u'N', None, None, None, None, u'Text', None, u'The name of the environmental value.'),
(u'Environment', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The value to set in the environmental settings.'),
(u'Environment', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the installing of the environmental value.'),
(u'Environment', u'Environment', u'N', None, None, None, None, u'Identifier', None, u'Unique identifier for the environmental variable setting'),
(u'Error', u'Error', u'N', 0, 32767, None, None, None, None, u'Integer error number, obtained from header file IError(...) macros.'),
(u'Error', u'Message', u'Y', None, None, None, None, u'Template', None, u'Error formatting template, obtained from user ed. or localizers.'),
(u'Extension', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.'),
(u'Extension', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.'),
(u'Extension', u'Extension', u'N', None, None, None, None, u'Text', None, u'The extension associated with the table row.'),
(u'Extension', u'MIME_', u'Y', None, None, u'MIME', 1, u'Text', None, u'Optional Context identifier, typically "type/format" associated with the extension'),
(u'Extension', u'ProgId_', u'Y', None, None, u'ProgId', 1, u'Text', None, u'Optional ProgId associated with this extension.'),
(u'MIME', u'CLSID', u'Y', None, None, None, None, u'Guid', None, u'Optional associated CLSID.'),
(u'MIME', u'ContentType', u'N', None, None, None, None, u'Text', None, u'Primary key. Context identifier, typically "type/format".'),
(u'MIME', u'Extension_', u'N', None, None, u'Extension', 1, u'Text', None, u'Optional associated extension (without dot)'),
(u'FeatureComponents', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Foreign key into Feature table.'),
(u'FeatureComponents', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into Component table.'),
(u'FileSFPCatalog', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'File associated with the catalog'),
(u'FileSFPCatalog', u'SFPCatalog_', u'N', None, None, u'SFPCatalog', 1, u'Filename', None, u'Catalog associated with the file'),
(u'SFPCatalog', u'SFPCatalog', u'N', None, None, None, None, u'Filename', None, u'File name for the catalog.'),
(u'SFPCatalog', u'Catalog', u'N', None, None, None, None, u'Binary', None, u'SFP Catalog'),
(u'SFPCatalog', u'Dependency', u'Y', None, None, None, None, u'Formatted', None, u'Parent catalog - only used by SFP'),
(u'Font', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Primary key, foreign key into File table referencing font file.'),
(u'Font', u'FontTitle', u'Y', None, None, None, None, u'Text', None, u'Font name.'),
(u'IniFile', u'Action', u'N', None, None, None, None, None, u'0;1;3', u'The type of modification to be made, one of iifEnum'),
(u'IniFile', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value to be written.'),
(u'IniFile', u'Key', u'N', None, None, None, None, u'Formatted', None, u'The .INI file key below Section.'),
(u'IniFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the installing of the .INI value.'),
(u'IniFile', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The .INI file name in which to write the information'),
(u'IniFile', u'IniFile', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'),
(u'IniFile', u'DirProperty', u'Y', None, None, None, None, u'Identifier', None, u'Foreign key into the Directory table denoting the directory where the .INI file is.'),
(u'IniFile', u'Section', u'N', None, None, None, None, u'Formatted', None, u'The .INI file Section.'),
(u'IniLocator', u'Type', u'Y', 0, 2, None, None, None, None, u'An integer value that determines if the .INI value read is a filename or a directory location or to be used as is w/o interpretation.'),
(u'IniLocator', u'Key', u'N', None, None, None, None, u'Text', None, u'Key value (followed by an equals sign in INI file).'),
(u'IniLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.'),
(u'IniLocator', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The .INI file name.'),
(u'IniLocator', u'Section', u'N', None, None, None, None, u'Text', None, u'Section name within in file (within square brackets in INI file).'),
(u'IniLocator', u'Field', u'Y', 0, 32767, None, None, None, None, u'The field in the .INI line. If Field is null or 0 the entire line is read.'),
(u'IsolatedComponent', u'Component_Application', u'N', None, None, u'Component', 1, u'Identifier', None, u'Key to Component table item for application'),
(u'IsolatedComponent', u'Component_Shared', u'N', None, None, u'Component', 1, u'Identifier', None, u'Key to Component table item to be isolated'),
(u'LaunchCondition', u'Condition', u'N', None, None, None, None, u'Condition', None, u'Expression which must evaluate to TRUE in order for install to commence.'),
(u'LaunchCondition', u'Description', u'N', None, None, None, None, u'Formatted', None, u'Localizable text to display when condition fails and install must abort.'),
(u'LockPermissions', u'Table', u'N', None, None, None, None, u'Identifier', u'Directory;File;Registry', u'Reference to another table name'),
(u'LockPermissions', u'Domain', u'Y', None, None, None, None, u'Formatted', None, u'Domain name for user whose permissions are being set. (usually a property)'),
(u'LockPermissions', u'LockObject', u'N', None, None, None, None, u'Identifier', None, u'Foreign key into Registry or File table'),
(u'LockPermissions', u'Permission', u'Y', -2147483647, 2147483647, None, None, None, None, u'Permission Access mask. Full Control = 268435456 (GENERIC_ALL = 0x10000000)'),
(u'LockPermissions', u'User', u'N', None, None, None, None, u'Formatted', None, u'User for permissions to be set. (usually a property)'),
(u'Media', u'Source', u'Y', None, None, None, None, u'Property', None, u'The property defining the location of the cabinet file.'),
(u'Media', u'Cabinet', u'Y', None, None, None, None, u'Cabinet', None, u'If some or all of the files stored on the media are compressed in a cabinet, the name of that cabinet.'),
(u'Media', u'DiskId', u'N', 1, 32767, None, None, None, None, u'Primary key, integer to determine sort order for table.'),
(u'Media', u'DiskPrompt', u'Y', None, None, None, None, u'Text', None, u'Disk name: the visible text actually printed on the disk. This will be used to prompt the user when this disk needs to be inserted.'),
(u'Media', u'LastSequence', u'N', 0, 32767, None, None, None, None, u'File sequence number for the last file for this media.'),
(u'Media', u'VolumeLabel', u'Y', None, None, None, None, u'Text', None, u'The label attributed to the volume.'),
(u'ModuleComponents', u'Component', u'N', None, None, u'Component', 1, u'Identifier', None, u'Component contained in the module.'),
(u'ModuleComponents', u'Language', u'N', None, None, u'ModuleSignature', 2, None, None, u'Default language ID for module (may be changed by transform).'),
(u'ModuleComponents', u'ModuleID', u'N', None, None, u'ModuleSignature', 1, u'Identifier', None, u'Module containing the component.'),
(u'ModuleSignature', u'Language', u'N', None, None, None, None, None, None, u'Default decimal language of module.'),
(u'ModuleSignature', u'Version', u'N', None, None, None, None, u'Version', None, u'Version of the module.'),
(u'ModuleSignature', u'ModuleID', u'N', None, None, None, None, u'Identifier', None, u'Module identifier (String.GUID).'),
(u'ModuleDependency', u'ModuleID', u'N', None, None, u'ModuleSignature', 1, u'Identifier', None, u'Module requiring the dependency.'),
(u'ModuleDependency', u'ModuleLanguage', u'N', None, None, u'ModuleSignature', 2, None, None, u'Language of module requiring the dependency.'),
(u'ModuleDependency', u'RequiredID', u'N', None, None, None, None, None, None, u'String.GUID of required module.'),
(u'ModuleDependency', u'RequiredLanguage', u'N', None, None, None, None, None, None, u'LanguageID of the required module.'),
(u'ModuleDependency', u'RequiredVersion', u'Y', None, None, None, None, u'Version', None, u'Version of the required version.'),
(u'ModuleExclusion', u'ModuleID', u'N', None, None, u'ModuleSignature', 1, u'Identifier', None, u'String.GUID of module with exclusion requirement.'),
(u'ModuleExclusion', u'ModuleLanguage', u'N', None, None, u'ModuleSignature', 2, None, None, u'LanguageID of module with exclusion requirement.'),
(u'ModuleExclusion', u'ExcludedID', u'N', None, None, None, None, None, None, u'String.GUID of excluded module.'),
(u'ModuleExclusion', u'ExcludedLanguage', u'N', None, None, None, None, None, None, u'Language of excluded module.'),
(u'ModuleExclusion', u'ExcludedMaxVersion', u'Y', None, None, None, None, u'Version', None, u'Maximum version of excluded module.'),
(u'ModuleExclusion', u'ExcludedMinVersion', u'Y', None, None, None, None, u'Version', None, u'Minimum version of excluded module.'),
(u'MoveFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'If this component is not "selected" for installation or removal, no action will be taken on the associated MoveFile entry'),
(u'MoveFile', u'DestFolder', u'N', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full path to the destination directory'),
(u'MoveFile', u'DestName', u'Y', None, None, None, None, u'Filename', None, u'Name to be given to the original file after it is moved or copied. If blank, the destination file will be given the same name as the source file'),
(u'MoveFile', u'FileKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key that uniquely identifies a particular MoveFile record'),
(u'MoveFile', u'Options', u'N', 0, 1, None, None, None, None, u'Integer value specifying the MoveFile operating mode, one of imfoEnum'),
(u'MoveFile', u'SourceFolder', u'Y', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full path to the source directory'),
(u'MoveFile', u'SourceName', u'Y', None, None, None, None, u'Text', None, u"Name of the source file(s) to be moved or copied. Can contain the '*' or '?' wildcards."),
(u'MsiAssembly', u'Attributes', u'Y', None, None, None, None, None, None, u'Assembly attributes'),
(u'MsiAssembly', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Foreign key into Feature table.'),
(u'MsiAssembly', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into Component table.'),
(u'MsiAssembly', u'File_Application', u'Y', None, None, u'File', 1, u'Identifier', None, u'Foreign key into File table, denoting the application context for private assemblies. Null for global assemblies.'),
(u'MsiAssembly', u'File_Manifest', u'Y', None, None, u'File', 1, u'Identifier', None, u'Foreign key into the File table denoting the manifest file for the assembly.'),
(u'MsiAssemblyName', u'Name', u'N', None, None, None, None, u'Text', None, u'The name part of the name-value pairs for the assembly name.'),
(u'MsiAssemblyName', u'Value', u'N', None, None, None, None, u'Text', None, u'The value part of the name-value pairs for the assembly name.'),
(u'MsiAssemblyName', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into Component table.'),
(u'MsiDigitalCertificate', u'CertData', u'N', None, None, None, None, u'Binary', None, u'A certificate context blob for a signer certificate'),
(u'MsiDigitalCertificate', u'DigitalCertificate', u'N', None, None, None, None, u'Identifier', None, u'A unique identifier for the row'),
(u'MsiDigitalSignature', u'Table', u'N', None, None, None, None, None, u'Media', u'Reference to another table name (only Media table is supported)'),
(u'MsiDigitalSignature', u'DigitalCertificate_', u'N', None, None, u'MsiDigitalCertificate', 1, u'Identifier', None, u'Foreign key to MsiDigitalCertificate table identifying the signer certificate'),
(u'MsiDigitalSignature', u'Hash', u'Y', None, None, None, None, u'Binary', None, u'The encoded hash blob from the digital signature'),
(u'MsiDigitalSignature', u'SignObject', u'N', None, None, None, None, u'Text', None, u'Foreign key to Media table'),
(u'MsiFileHash', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Primary key, foreign key into File table referencing file with this hash'),
(u'MsiFileHash', u'Options', u'N', 0, 32767, None, None, None, None, u'Various options and attributes for this hash.'),
(u'MsiFileHash', u'HashPart1', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'),
(u'MsiFileHash', u'HashPart2', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'),
(u'MsiFileHash', u'HashPart3', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'),
(u'MsiFileHash', u'HashPart4', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'),
(u'MsiPatchHeaders', u'StreamRef', u'N', None, None, None, None, u'Identifier', None, u'Primary key. A unique identifier for the row.'),
(u'MsiPatchHeaders', u'Header', u'N', None, None, None, None, u'Binary', None, u'Binary stream. The patch header, used for patch validation.'),
(u'ODBCAttribute', u'Value', u'Y', None, None, None, None, u'Text', None, u'Value for ODBC driver attribute'),
(u'ODBCAttribute', u'Attribute', u'N', None, None, None, None, u'Text', None, u'Name of ODBC driver attribute'),
(u'ODBCAttribute', u'Driver_', u'N', None, None, u'ODBCDriver', 1, u'Identifier', None, u'Reference to ODBC driver in ODBCDriver table'),
(u'ODBCDriver', u'Description', u'N', None, None, None, None, u'Text', None, u'Text used as registered name for driver, non-localized'),
(u'ODBCDriver', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Reference to key driver file'),
(u'ODBCDriver', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reference to associated component'),
(u'ODBCDriver', u'Driver', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized.internal token for driver'),
(u'ODBCDriver', u'File_Setup', u'Y', None, None, u'File', 1, u'Identifier', None, u'Optional reference to key driver setup DLL'),
(u'ODBCDataSource', u'Description', u'N', None, None, None, None, u'Text', None, u'Text used as registered name for data source'),
(u'ODBCDataSource', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reference to associated component'),
(u'ODBCDataSource', u'DataSource', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized.internal token for data source'),
(u'ODBCDataSource', u'DriverDescription', u'N', None, None, None, None, u'Text', None, u'Reference to driver description, may be existing driver'),
(u'ODBCDataSource', u'Registration', u'N', 0, 1, None, None, None, None, u'Registration option: 0=machine, 1=user, others t.b.d.'),
(u'ODBCSourceAttribute', u'Value', u'Y', None, None, None, None, u'Text', None, u'Value for ODBC data source attribute'),
(u'ODBCSourceAttribute', u'Attribute', u'N', None, None, None, None, u'Text', None, u'Name of ODBC data source attribute'),
(u'ODBCSourceAttribute', u'DataSource_', u'N', None, None, u'ODBCDataSource', 1, u'Identifier', None, u'Reference to ODBC data source in ODBCDataSource table'),
(u'ODBCTranslator', u'Description', u'N', None, None, None, None, u'Text', None, u'Text used as registered name for translator'),
(u'ODBCTranslator', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Reference to key translator file'),
(u'ODBCTranslator', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reference to associated component'),
(u'ODBCTranslator', u'File_Setup', u'Y', None, None, u'File', 1, u'Identifier', None, u'Optional reference to key translator setup DLL'),
(u'ODBCTranslator', u'Translator', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized.internal token for translator'),
(u'Patch', u'Sequence', u'N', 0, 32767, None, None, None, None, u'Primary key, sequence with respect to the media images; order must track cabinet order.'),
(u'Patch', u'Attributes', u'N', 0, 32767, None, None, None, None, u'Integer containing bit flags representing patch attributes'),
(u'Patch', u'File_', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token, foreign key to File table, must match identifier in cabinet.'),
(u'Patch', u'Header', u'Y', None, None, None, None, u'Binary', None, u'Binary stream. The patch header, used for patch validation.'),
(u'Patch', u'PatchSize', u'N', 0, 2147483647, None, None, None, None, u'Size of patch in bytes (long integer).'),
(u'Patch', u'StreamRef_', u'Y', None, None, None, None, u'Identifier', None, u'Identifier. Foreign key to the StreamRef column of the MsiPatchHeaders table.'),
(u'PatchPackage', u'Media_', u'N', 0, 32767, None, None, None, None, u'Foreign key to DiskId column of Media table. Indicates the disk containing the patch package.'),
(u'PatchPackage', u'PatchId', u'N', None, None, None, None, u'Guid', None, u'A unique string GUID representing this patch.'),
(u'PublishComponent', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Foreign key into the Feature table.'),
(u'PublishComponent', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table.'),
(u'PublishComponent', u'ComponentId', u'N', None, None, None, None, u'Guid', None, u'A string GUID that represents the component id that will be requested by the alien product.'),
(u'PublishComponent', u'AppData', u'Y', None, None, None, None, u'Text', None, u'This is localisable Application specific data that can be associated with a Qualified Component.'),
(u'PublishComponent', u'Qualifier', u'N', None, None, None, None, u'Text', None, u'This is defined only when the ComponentId column is an Qualified Component Id. This is the Qualifier for ProvideComponentIndirect.'),
(u'Registry', u'Name', u'Y', None, None, None, None, u'Formatted', None, u'The registry value name.'),
(u'Registry', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The registry value.'),
(u'Registry', u'Key', u'N', None, None, None, None, u'RegPath', None, u'The key for the registry value.'),
(u'Registry', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the installing of the registry value.'),
(u'Registry', u'Registry', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'),
(u'Registry', u'Root', u'N', -1, 3, None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum.'),
(u'RegLocator', u'Name', u'Y', None, None, None, None, u'Formatted', None, u'The registry value name.'),
(u'RegLocator', u'Type', u'Y', 0, 18, None, None, None, None, u'An integer value that determines if the registry value is a filename or a directory location or to be used as is w/o interpretation.'),
(u'RegLocator', u'Key', u'N', None, None, None, None, u'RegPath', None, u'The key for the registry value.'),
(u'RegLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table. If the type is 0, the registry values refers a directory, and _Signature is not a foreign key.'),
(u'RegLocator', u'Root', u'N', 0, 3, None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum.'),
(u'RemoveFile', u'InstallMode', u'N', None, None, None, None, None, u'1;2;3', u'Installation option, one of iimEnum.'),
(u'RemoveFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the file to be removed.'),
(u'RemoveFile', u'FileKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular file entry'),
(u'RemoveFile', u'FileName', u'Y', None, None, None, None, u'WildCardFilename', None, u'Name of the file to be removed.'),
(u'RemoveFile', u'DirProperty', u'N', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full pathname to the folder of the file to be removed.'),
(u'RemoveIniFile', u'Action', u'N', None, None, None, None, None, u'2;4', u'The type of modification to be made, one of iifEnum.'),
(u'RemoveIniFile', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The value to be deleted. The value is required when Action is iifIniRemoveTag'),
(u'RemoveIniFile', u'Key', u'N', None, None, None, None, u'Formatted', None, u'The .INI file key below Section.'),
(u'RemoveIniFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the deletion of the .INI value.'),
(u'RemoveIniFile', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The .INI file name in which to delete the information'),
(u'RemoveIniFile', u'DirProperty', u'Y', None, None, None, None, u'Identifier', None, u'Foreign key into the Directory table denoting the directory where the .INI file is.'),
(u'RemoveIniFile', u'Section', u'N', None, None, None, None, u'Formatted', None, u'The .INI file Section.'),
(u'RemoveIniFile', u'RemoveIniFile', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'),
(u'RemoveRegistry', u'Name', u'Y', None, None, None, None, u'Formatted', None, u'The registry value name.'),
(u'RemoveRegistry', u'Key', u'N', None, None, None, None, u'RegPath', None, u'The key for the registry value.'),
(u'RemoveRegistry', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the deletion of the registry value.'),
(u'RemoveRegistry', u'Root', u'N', -1, 3, None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum'),
(u'RemoveRegistry', u'RemoveRegistry', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'),
(u'ReserveCost', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reserve a specified amount of space if this component is to be installed.'),
(u'ReserveCost', u'ReserveFolder', u'Y', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full path to the destination directory'),
(u'ReserveCost', u'ReserveKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key that uniquely identifies a particular ReserveCost record'),
(u'ReserveCost', u'ReserveLocal', u'N', 0, 2147483647, None, None, None, None, u'Disk space to reserve if linked component is installed locally.'),
(u'ReserveCost', u'ReserveSource', u'N', 0, 2147483647, None, None, None, None, u'Disk space to reserve if linked component is installed to run from the source location.'),
(u'SelfReg', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Foreign key into the File table denoting the module that needs to be registered.'),
(u'SelfReg', u'Cost', u'Y', 0, 32767, None, None, None, None, u'The cost of registering the module.'),
(u'ServiceControl', u'Name', u'N', None, None, None, None, u'Formatted', None, u'Name of a service. /, \\, comma and space are invalid'),
(u'ServiceControl', u'Event', u'N', 0, 187, None, None, None, None, u'Bit field: Install: 0x1 = Start, 0x2 = Stop, 0x8 = Delete, Uninstall: 0x10 = Start, 0x20 = Stop, 0x80 = Delete'),
(u'ServiceControl', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table that controls the startup of the service'),
(u'ServiceControl', u'ServiceControl', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'),
(u'ServiceControl', u'Arguments', u'Y', None, None, None, None, u'Formatted', None, u'Arguments for the service. Separate by [~].'),
(u'ServiceControl', u'Wait', u'Y', 0, 1, None, None, None, None, u'Boolean for whether to wait for the service to fully start'),
(u'ServiceInstall', u'Name', u'N', None, None, None, None, u'Formatted', None, u'Internal Name of the Service'),
(u'ServiceInstall', u'Description', u'Y', None, None, None, None, u'Text', None, u'Description of service.'),
(u'ServiceInstall', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table that controls the startup of the service'),
(u'ServiceInstall', u'Arguments', u'Y', None, None, None, None, u'Formatted', None, u'Arguments to include in every start of the service, passed to WinMain'),
(u'ServiceInstall', u'ServiceInstall', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'),
(u'ServiceInstall', u'Dependencies', u'Y', None, None, None, None, u'Formatted', None, u'Other services this depends on to start. Separate by [~], and end with [~][~]'),
(u'ServiceInstall', u'DisplayName', u'Y', None, None, None, None, u'Formatted', None, u'External Name of the Service'),
(u'ServiceInstall', u'ErrorControl', u'N', -2147483647, 2147483647, None, None, None, None, u'Severity of error if service fails to start'),
(u'ServiceInstall', u'LoadOrderGroup', u'Y', None, None, None, None, u'Formatted', None, u'LoadOrderGroup'),
(u'ServiceInstall', u'Password', u'Y', None, None, None, None, u'Formatted', None, u'password to run service with. (with StartName)'),
(u'ServiceInstall', u'ServiceType', u'N', -2147483647, 2147483647, None, None, None, None, u'Type of the service'),
(u'ServiceInstall', u'StartName', u'Y', None, None, None, None, u'Formatted', None, u'User or object name to run service as'),
(u'ServiceInstall', u'StartType', u'N', 0, 4, None, None, None, None, u'Type of the service'),
(u'Shortcut', u'Name', u'N', None, None, None, None, u'Filename', None, u'The name of the shortcut to be created.'),
(u'Shortcut', u'Description', u'Y', None, None, None, None, u'Text', None, u'The description for the shortcut.'),
(u'Shortcut', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table denoting the component whose selection gates the the shortcut creation/deletion.'),
(u'Shortcut', u'Icon_', u'Y', None, None, u'Icon', 1, u'Identifier', None, u'Foreign key into the File table denoting the external icon file for the shortcut.'),
(u'Shortcut', u'IconIndex', u'Y', -32767, 32767, None, None, None, None, u'The icon index for the shortcut.'),
(u'Shortcut', u'Directory_', u'N', None, None, u'Directory', 1, u'Identifier', None, u'Foreign key into the Directory table denoting the directory where the shortcut file is created.'),
(u'Shortcut', u'Target', u'N', None, None, None, None, u'Shortcut', None, u'The shortcut target. This is usually a property that is expanded to a file or a folder that the shortcut points to.'),
(u'Shortcut', u'Arguments', u'Y', None, None, None, None, u'Formatted', None, u'The command-line arguments for the shortcut.'),
(u'Shortcut', u'Shortcut', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'),
(u'Shortcut', u'Hotkey', u'Y', 0, 32767, None, None, None, None, u'The hotkey for the shortcut. It has the virtual-key code for the key in the low-order byte, and the modifier flags in the high-order byte. '),
(u'Shortcut', u'ShowCmd', u'Y', None, None, None, None, None, u'1;3;7', u'The show command for the application window.The following values may be used.'),
(u'Shortcut', u'WkDir', u'Y', None, None, None, None, u'Identifier', None, u'Name of property defining location of working directory.'),
(u'Signature', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The name of the file. This may contain a "short name|long name" pair.'),
(u'Signature', u'Signature', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature represents a unique file signature.'),
(u'Signature', u'Languages', u'Y', None, None, None, None, u'Language', None, u'The languages supported by the file.'),
(u'Signature', u'MaxDate', u'Y', 0, 2147483647, None, None, None, None, u'The maximum creation date of the file.'),
(u'Signature', u'MaxSize', u'Y', 0, 2147483647, None, None, None, None, u'The maximum size of the file. '),
(u'Signature', u'MaxVersion', u'Y', None, None, None, None, u'Text', None, u'The maximum version of the file.'),
(u'Signature', u'MinDate', u'Y', 0, 2147483647, None, None, None, None, u'The minimum creation date of the file.'),
(u'Signature', u'MinSize', u'Y', 0, 2147483647, None, None, None, None, u'The minimum size of the file.'),
(u'Signature', u'MinVersion', u'Y', None, None, None, None, u'Text', None, u'The minimum version of the file.'),
(u'TypeLib', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the type library to be operational.'),
(u'TypeLib', u'Description', u'Y', None, None, None, None, u'Text', None, None),
(u'TypeLib', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.'),
(u'TypeLib', u'Directory_', u'Y', None, None, u'Directory', 1, u'Identifier', None, u'Optional. The foreign key into the Directory table denoting the path to the help file for the type library.'),
(u'TypeLib', u'Language', u'N', 0, 32767, None, None, None, None, u'The language of the library.'),
(u'TypeLib', u'Version', u'Y', 0, 16777215, None, None, None, None, u'The version of the library. The minor version is in the lower 8 bits of the integer. The major version is in the next 16 bits. '),
(u'TypeLib', u'Cost', u'Y', 0, 2147483647, None, None, None, None, u'The cost associated with the registration of the typelib. This column is currently optional.'),
(u'TypeLib', u'LibID', u'N', None, None, None, None, u'Guid', None, u'The GUID that represents the library.'),
(u'Upgrade', u'Attributes', u'N', 0, 2147483647, None, None, None, None, u'The attributes of this product set.'),
(u'Upgrade', u'Remove', u'Y', None, None, None, None, u'Formatted', None, u'The list of features to remove when uninstalling a product from this set. The default is "ALL".'),
(u'Upgrade', u'Language', u'Y', None, None, None, None, u'Language', None, u'A comma-separated list of languages for either products in this set or products not in this set.'),
(u'Upgrade', u'ActionProperty', u'N', None, None, None, None, u'UpperCase', None, u'The property to set when a product in this set is found.'),
(u'Upgrade', u'UpgradeCode', u'N', None, None, None, None, u'Guid', None, u'The UpgradeCode GUID belonging to the products in this set.'),
(u'Upgrade', u'VersionMax', u'Y', None, None, None, None, u'Text', None, u'The maximum ProductVersion of the products in this set. The set may or may not include products with this particular version.'),
(u'Upgrade', u'VersionMin', u'Y', None, None, None, None, u'Text', None, u'The minimum ProductVersion of the products in this set. The set may or may not include products with this particular version.'),
(u'Verb', u'Sequence', u'Y', 0, 32767, None, None, None, None, u'Order within the verbs for a particular extension. Also used simply to specify the default verb.'),
(u'Verb', u'Argument', u'Y', None, None, None, None, u'Formatted', None, u'Optional value for the command arguments.'),
(u'Verb', u'Extension_', u'N', None, None, u'Extension', 1, u'Text', None, u'The extension associated with the table row.'),
(u'Verb', u'Verb', u'N', None, None, None, None, u'Text', None, u'The verb for the command.'),
(u'Verb', u'Command', u'Y', None, None, None, None, u'Formatted', None, u'The command text.'),
]
Error = [
(0, u'{{Fatal error: }}'),
(1, u'{{Error [1]. }}'),
(2, u'Warning [1]. '),
(3, None),
(4, u'Info [1]. '),
(5, u'The installer has encountered an unexpected error installing this package. This may indicate a problem with this package. The error code is [1]. {{The arguments are: [2], [3], [4]}}'),
(6, None),
(7, u'{{Disk full: }}'),
(8, u'Action [Time]: [1]. [2]'),
(9, u'[ProductName]'),
(10, u'{[2]}{, [3]}{, [4]}'),
(11, u'Message type: [1], Argument: [2]'),
(12, u'=== Logging started: [Date] [Time] ==='),
(13, u'=== Logging stopped: [Date] [Time] ==='),
(14, u'Action start [Time]: [1].'),
(15, u'Action ended [Time]: [1]. Return value [2].'),
(16, u'Time remaining: {[1] minutes }{[2] seconds}'),
(17, u'Out of memory. Shut down other applications before retrying.'),
(18, u'Installer is no longer responding.'),
(19, u'Installer stopped prematurely.'),
(20, u'Please wait while Windows configures [ProductName]'),
(21, u'Gathering required information...'),
(22, u'Removing older versions of this application...'),
(23, u'Preparing to remove older versions of this application...'),
(32, u'{[ProductName] }Setup completed successfully.'),
(33, u'{[ProductName] }Setup failed.'),
(1101, u'Error reading from file: [2]. {{ System error [3].}} Verify that the file exists and that you can access it.'),
(1301, u"Cannot create the file '[2]'. A directory with this name already exists. Cancel the install and try installing to a different location."),
(1302, u'Please insert the disk: [2]'),
(1303, u'The installer has insufficient privileges to access this directory: [2]. The installation cannot continue. Log on as administrator or contact your system administrator.'),
(1304, u'Error writing to file: [2]. Verify that you have access to that directory.'),
(1305, u'Error reading from file [2]. {{ System error [3].}} Verify that the file exists and that you can access it.'),
(1306, u"Another application has exclusive access to the file '[2]'. Please shut down all other applications, then click Retry."),
(1307, u'There is not enough disk space to install this file: [2]. Free some disk space and click Retry, or click Cancel to exit.'),
(1308, u'Source file not found: [2]. Verify that the file exists and that you can access it.'),
(1309, u'Error reading from file: [3]. {{ System error [2].}} Verify that the file exists and that you can access it.'),
(1310, u'Error writing to file: [3]. {{ System error [2].}} Verify that you have access to that directory.'),
(1311, u'Source file not found{{(cabinet)}}: [2]. Verify that the file exists and that you can access it.'),
(1312, u"Cannot create the directory '[2]'. A file with this name already exists. Please rename or remove the file and click retry, or click Cancel to exit."),
(1313, u'The volume [2] is currently unavailable. Please select another.'),
(1314, u"The specified path '[2]' is unavailable."),
(1315, u'Unable to write to the specified folder: [2].'),
(1316, u'A network error occurred while attempting to read from the file: [2]'),
(1317, u'An error occurred while attempting to create the directory: [2]'),
(1318, u'A network error occurred while attempting to create the directory: [2]'),
(1319, u'A network error occurred while attempting to open the source file cabinet: [2]'),
(1320, u'The specified path is too long: [2]'),
(1321, u'The Installer has insufficient privileges to modify this file: [2].'),
(1322, u"A portion of the folder path '[2]' is invalid. It is either empty or exceeds the length allowed by the system."),
(1323, u"The folder path '[2]' contains words that are not valid in folder paths."),
(1324, u"The folder path '[2]' contains an invalid character."),
(1325, u"'[2]' is not a valid short file name."),
(1326, u'Error getting file security: [3] GetLastError: [2]'),
(1327, u'Invalid Drive: [2]'),
(1328, u'Error applying patch to file [2]. It has probably been updated by other means, and can no longer be modified by this patch. For more information contact your patch vendor. {{System Error: [3]}}'),
(1329, u'A file that is required cannot be installed because the cabinet file [2] is not digitally signed. This may indicate that the cabinet file is corrupt.'),
(1330, u'A file that is required cannot be installed because the cabinet file [2] has an invalid digital signature. This may indicate that the cabinet file is corrupt.{{ Error [3] was returned by WinVerifyTrust.}}'),
(1331, u'Failed to correctly copy [2] file: CRC error.'),
(1332, u'Failed to correctly move [2] file: CRC error.'),
(1333, u'Failed to correctly patch [2] file: CRC error.'),
(1334, u"The file '[2]' cannot be installed because the file cannot be found in cabinet file '[3]'. This could indicate a network error, an error reading from the CD-ROM, or a problem with this package."),
(1335, u"The cabinet file '[2]' required for this installation is corrupt and cannot be used. This could indicate a network error, an error reading from the CD-ROM, or a problem with this package."),
(1336, u'There was an error creating a temporary file that is needed to complete this installation.{{ Folder: [3]. System error code: [2]}}'),
(1401, u'Could not create key: [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel. '),
(1402, u'Could not open key: [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel. '),
(1403, u'Could not delete value [2] from key [3]. {{ System error [4].}} Verify that you have sufficient access to that key, or contact your support personnel. '),
(1404, u'Could not delete key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel. '),
(1405, u'Could not read value [2] from key [3]. {{ System error [4].}} Verify that you have sufficient access to that key, or contact your support personnel. '),
(1406, u'Could not write value [2] to key [3]. {{ System error [4].}} Verify that you have sufficient access to that key, or contact your support personnel.'),
(1407, u'Could not get value names for key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel.'),
(1408, u'Could not get sub key names for key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel.'),
(1409, u'Could not read security information for key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel.'),
(1410, u'Could not increase the available registry space. [2] KB of free registry space is required for the installation of this application.'),
(1500, u'Another installation is in progress. You must complete that installation before continuing this one.'),
(1501, u'Error accessing secured data. Please make sure the Windows Installer is configured properly and try the install again.'),
(1502, u"User '[2]' has previously initiated an install for product '[3]'. That user will need to run that install again before they can use that product. Your current install will now continue."),
(1503, u"User '[2]' has previously initiated an install for product '[3]'. That user will need to run that install again before they can use that product."),
(1601, u"Out of disk space -- Volume: '[2]'; required space: [3] KB; available space: [4] KB. Free some disk space and retry."),
(1602, u'Are you sure you want to cancel?'),
(1603, u"The file [2][3] is being held in use{ by the following process: Name: [4], Id: [5], Window Title: '[6]'}. Close that application and retry."),
(1604, u"The product '[2]' is already installed, preventing the installation of this product. The two products are incompatible."),
(1605, u"There is not enough disk space on the volume '[2]' to continue the install with recovery enabled. [3] KB are required, but only [4] KB are available. Click Ignore to continue the install without saving recovery information, click Retry to check for available space again, or click Cancel to quit the installation."),
(1606, u'Could not access network location [2].'),
(1607, u'The following applications should be closed before continuing the install:'),
(1608, u'Could not find any previously installed compliant products on the machine for installing this product.'),
(1609, u"An error occurred while applying security settings. [2] is not a valid user or group. This could be a problem with the package, or a problem connecting to a domain controller on the network. Check your network connection and click Retry, or Cancel to end the install. {{Unable to locate the user's SID, system error [3]}}"),
(1701, u'The key [2] is not valid. Verify that you entered the correct key.'),
(1702, u'The installer must restart your system before configuration of [2] can continue. Click Yes to restart now or No if you plan to manually restart later.'),
(1703, u'You must restart your system for the configuration changes made to [2] to take effect. Click Yes to restart now or No if you plan to manually restart later.'),
(1704, u'An installation for [2] is currently suspended. You must undo the changes made by that installation to continue. Do you want to undo those changes?'),
(1705, u'A previous installation for this product is in progress. You must undo the changes made by that installation to continue. Do you want to undo those changes?'),
(1706, u"An installation package for the product [2] cannot be found. Try the installation again using a valid copy of the installation package '[3]'."),
(1707, u'Installation completed successfully.'),
(1708, u'Installation failed.'),
(1709, u'Product: [2] -- [3]'),
(1710, u'You may either restore your computer to its previous state or continue the install later. Would you like to restore?'),
(1711, u'An error occurred while writing installation information to disk. Check to make sure enough disk space is available, and click Retry, or Cancel to end the install.'),
(1712, u'One or more of the files required to restore your computer to its previous state could not be found. Restoration will not be possible.'),
(1713, u'[2] cannot install one of its required products. Contact your technical support group. {{System Error: [3].}}'),
(1714, u'The older version of [2] cannot be removed. Contact your technical support group. {{System Error [3].}}'),
(1715, u'Installed [2]'),
(1716, u'Configured [2]'),
(1717, u'Removed [2]'),
(1718, u'File [2] was rejected by digital signature policy.'),
(1719, u'The Windows Installer Service could not be accessed. This can occur if you are running Windows in safe mode, or if the Windows Installer is not correctly installed. Contact your support personnel for assistance.'),
(1720, u'There is a problem with this Windows Installer package. A script required for this install to complete could not be run. Contact your support personnel or package vendor. {{Custom action [2] script error [3], [4]: [5] Line [6], Column [7], [8] }}'),
(1721, u'There is a problem with this Windows Installer package. A program required for this install to complete could not be run. Contact your support personnel or package vendor. {{Action: [2], location: [3], command: [4] }}'),
(1722, u'There is a problem with this Windows Installer package. A program run as part of the setup did not finish as expected. Contact your support personnel or package vendor. {{Action [2], location: [3], command: [4] }}'),
(1723, u'There is a problem with this Windows Installer package. A DLL required for this install to complete could not be run. Contact your support personnel or package vendor. {{Action [2], entry: [3], library: [4] }}'),
(1724, u'Removal completed successfully.'),
(1725, u'Removal failed.'),
(1726, u'Advertisement completed successfully.'),
(1727, u'Advertisement failed.'),
(1728, u'Configuration completed successfully.'),
(1729, u'Configuration failed.'),
(1730, u'You must be an Administrator to remove this application. To remove this application, you can log on as an Administrator, or contact your technical support group for assistance.'),
(1801, u'The path [2] is not valid. Please specify a valid path.'),
(1802, u'Out of memory. Shut down other applications before retrying.'),
(1803, u'There is no disk in drive [2]. Please insert one and click Retry, or click Cancel to go back to the previously selected volume.'),
(1804, u'There is no disk in drive [2]. Please insert one and click Retry, or click Cancel to return to the browse dialog and select a different volume.'),
(1805, u'The folder [2] does not exist. Please enter a path to an existing folder.'),
(1806, u'You have insufficient privileges to read this folder.'),
(1807, u'A valid destination folder for the install could not be determined.'),
(1901, u'Error attempting to read from the source install database: [2].'),
(1902, u'Scheduling reboot operation: Renaming file [2] to [3]. Must reboot to complete operation.'),
(1903, u'Scheduling reboot operation: Deleting file [2]. Must reboot to complete operation.'),
(1904, u'Module [2] failed to register. HRESULT [3]. Contact your support personnel.'),
(1905, u'Module [2] failed to unregister. HRESULT [3]. Contact your support personnel.'),
(1906, u'Failed to cache package [2]. Error: [3]. Contact your support personnel.'),
(1907, u'Could not register font [2]. Verify that you have sufficient permissions to install fonts, and that the system supports this font.'),
(1908, u'Could not unregister font [2]. Verify that you that you have sufficient permissions to remove fonts.'),
(1909, u'Could not create Shortcut [2]. Verify that the destination folder exists and that you can access it.'),
(1910, u'Could not remove Shortcut [2]. Verify that the shortcut file exists and that you can access it.'),
(1911, u'Could not register type library for file [2]. Contact your support personnel.'),
(1912, u'Could not unregister type library for file [2]. Contact your support personnel.'),
(1913, u'Could not update the ini file [2][3]. Verify that the file exists and that you can access it.'),
(1914, u'Could not schedule file [2] to replace file [3] on reboot. Verify that you have write permissions to file [3].'),
(1915, u'Error removing ODBC driver manager, ODBC error [2]: [3]. Contact your support personnel.'),
(1916, u'Error installing ODBC driver manager, ODBC error [2]: [3]. Contact your support personnel.'),
(1917, u'Error removing ODBC driver: [4], ODBC error [2]: [3]. Verify that you have sufficient privileges to remove ODBC drivers.'),
(1918, u'Error installing ODBC driver: [4], ODBC error [2]: [3]. Verify that the file [4] exists and that you can access it.'),
(1919, u'Error configuring ODBC data source: [4], ODBC error [2]: [3]. Verify that the file [4] exists and that you can access it.'),
(1920, u"Service '[2]' ([3]) failed to start. Verify that you have sufficient privileges to start system services."),
(1921, u"Service '[2]' ([3]) could not be stopped. Verify that you have sufficient privileges to stop system services."),
(1922, u"Service '[2]' ([3]) could not be deleted. Verify that you have sufficient privileges to remove system services."),
(1923, u"Service '[2]' ([3]) could not be installed. Verify that you have sufficient privileges to install system services."),
(1924, u"Could not update environment variable '[2]'. Verify that you have sufficient privileges to modify environment variables."),
(1925, u'You do not have sufficient privileges to complete this installation for all users of the machine. Log on as administrator and then retry this installation.'),
(1926, u"Could not set file security for file '[3]'. Error: [2]. Verify that you have sufficient privileges to modify the security permissions for this file."),
(1927, u'Component Services (COM+ 1.0) are not installed on this computer. This installation requires Component Services in order to complete successfully. Component Services are available on Windows 2000.'),
(1928, u'Error registering COM+ Application. Contact your support personnel for more information.'),
(1929, u'Error unregistering COM+ Application. Contact your support personnel for more information.'),
(1930, u"The description for service '[2]' ([3]) could not be changed."),
(1931, u'The Windows Installer service cannot update the system file [2] because the file is protected by Windows. You may need to update your operating system for this program to work correctly. {{Package version: [3], OS Protected version: [4]}}'),
(1932, u'The Windows Installer service cannot update the protected Windows file [2]. {{Package version: [3], OS Protected version: [4], SFP Error: [5]}}'),
(1933, u'The Windows Installer service cannot update one or more protected Windows files. {{SFP Error: [2]. List of protected files:\\r\\n[3]}}'),
(1934, u'User installations are disabled via policy on the machine.'),
(1935, u'An error occurred during the installation of assembly component [2]. HRESULT: [3]. {{assembly interface: [4], function: [5], assembly name: [6]}}'),
]
tables=['AdminExecuteSequence', 'AdminUISequence', 'AdvtExecuteSequence', 'BBControl', 'Billboard', 'Binary', 'CheckBox', 'Property', 'ComboBox', 'Control', 'ListBox', 'ActionText', 'ControlCondition', 'ControlEvent', 'Dialog', 'EventMapping', 'InstallExecuteSequence', 'InstallUISequence', 'ListView', 'RadioButton', 'TextStyle', 'UIText', '_Validation', 'Error']
| apache-2.0 |
bchappet/dnfpy | src/dnfpy/cellular/c_implementation/test_rsdnf.py | 1 | 1471 | import numpy as np
from ctypes import *
import numpy.ctypeslib as npct
libac = npct.load_library("libac", "lib/")
PP_UBYTE = POINTER(POINTER(c_ubyte))
CELL_FUNC = CFUNCTYPE(None, PP_UBYTE, PP_UBYTE)
cell_fun_c = libac.compute_cell_rsdnf
cell_fun_c.argtypes = [PP_UBYTE,PP_UBYTE]
def cell_func_py(data,neighs):
cell_fun_c(data,neighs)
cell_fun = CELL_FUNC(cell_func_py)
if __name__ == "__main__":
size = 20
depth = 5
buffs = [ np.zeros((size,size,depth),dtype=np.uint8),
np.zeros((size,size,depth),dtype=np.uint8)]
current = 0
fun = libac.synchronous_step_neumann
N = 10
buffs[current][size/2,size/2,1] = N
buffs[current][size/2,size/2,2] = N
buffs[current][size/2,size/2,3] = N
buffs[current][size/2,size/2,4] = N
print("Before")
print(buffs[current][:,:,0])
fun.argtypes = [
np.ctypeslib.ndpointer(dtype=np.uint8,ndim=3,flags='C_CONTIGUOUS'),
np.ctypeslib.ndpointer(dtype=np.uint8,ndim=3,flags='C_CONTIGUOUS'),
c_int,c_int,c_int,CELL_FUNC]
for i in range(10):
nextB = (current+1) % 2
print("nextB %s"%nextB)
fun(buffs[current],buffs[nextB],size,size,depth,cell_fun)
result = buffs[nextB]
current = nextB
print("After")
print(result[:,:,0])
print(np.sum(result))
| gpl-2.0 |
mrcslws/nupic.research | tests/unit/frameworks/pytorch/model_compare_test.py | 3 | 2664 | # Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2019, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
#
import copy
import unittest
import torch
import torch.nn
from nupic.research.frameworks.pytorch.model_compare import compare_models
from nupic.torch.modules import Flatten
def simple_linear_net():
return torch.nn.Sequential(
torch.nn.Linear(32, 16),
torch.nn.ReLU(),
torch.nn.Linear(16, 2)
)
def simple_conv_net():
return torch.nn.Sequential(
torch.nn.Conv2d(1, 3, 5),
torch.nn.MaxPool2d(2),
torch.nn.ReLU(),
Flatten(),
torch.nn.Linear(588, 16),
torch.nn.ReLU(),
torch.nn.Linear(16, 2)
)
class ModelCompareTest(unittest.TestCase):
def test_identical(self):
"""Compare a network with itself"""
model = simple_linear_net()
self.assertTrue(compare_models(model, model, (32,)))
def test_almost_identical(self):
"""Compare a network with itself except for one weight"""
model1 = simple_linear_net()
model2 = copy.deepcopy(model1)
model1._modules["0"].weight[0][0] = 1.0
model2._modules["0"].weight[0][0] = -1.0
self.assertFalse(compare_models(model1, model2, (32,)))
def test_different(self):
"""Compare two random networks"""
model1 = simple_linear_net()
model2 = simple_linear_net()
self.assertFalse(compare_models(model1, model2, (32,)))
def test_conv_identical(self):
"""Compare a conv network with itself"""
model = simple_conv_net()
self.assertTrue(compare_models(model, model, (1, 32, 32)))
def test_conv_different(self):
"""Compare two random conv networks"""
model1 = simple_conv_net()
model2 = simple_conv_net()
self.assertFalse(compare_models(model1, model2, (1, 32, 32)))
if __name__ == "__main__":
unittest.main()
| agpl-3.0 |
cjaniake/ionicweb | webapp/ionic/lib/python2.7/site-packages/wheel/pkginfo.py | 565 | 1225 | """Tools for reading and writing PKG-INFO / METADATA without caring
about the encoding."""
from email.parser import Parser
try:
unicode
_PY3 = False
except NameError:
_PY3 = True
if not _PY3:
from email.generator import Generator
def read_pkg_info_bytes(bytestr):
return Parser().parsestr(bytestr)
def read_pkg_info(path):
with open(path, "r") as headers:
message = Parser().parse(headers)
return message
def write_pkg_info(path, message):
with open(path, 'w') as metadata:
Generator(metadata, maxheaderlen=0).flatten(message)
else:
from email.generator import BytesGenerator
def read_pkg_info_bytes(bytestr):
headers = bytestr.decode(encoding="ascii", errors="surrogateescape")
message = Parser().parsestr(headers)
return message
def read_pkg_info(path):
with open(path, "r",
encoding="ascii",
errors="surrogateescape") as headers:
message = Parser().parse(headers)
return message
def write_pkg_info(path, message):
with open(path, "wb") as out:
BytesGenerator(out, maxheaderlen=0).flatten(message)
| gpl-2.0 |
cpcloud/numpy | numpy/f2py/auxfuncs.py | 75 | 19979 | #!/usr/bin/env python
"""
Auxiliary functions for f2py2e.
Copyright 1999,2000 Pearu Peterson all rights reserved,
Pearu Peterson <[email protected]>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy (BSD style) LICENSE.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Date: 2005/07/24 19:01:55 $
Pearu Peterson
"""
from __future__ import division, absolute_import, print_function
import pprint
import sys
import types
from functools import reduce
from . import __version__
from . import cfuncs
f2py_version = __version__.version
errmess=sys.stderr.write
#outmess=sys.stdout.write
show=pprint.pprint
options={}
debugoptions=[]
wrapfuncs = 1
def outmess(t):
if options.get('verbose', 1):
sys.stdout.write(t)
def debugcapi(var):
return 'capi' in debugoptions
def _isstring(var):
return 'typespec' in var and var['typespec']=='character' and (not isexternal(var))
def isstring(var):
return _isstring(var) and not isarray(var)
def ischaracter(var):
return isstring(var) and 'charselector' not in var
def isstringarray(var):
return isarray(var) and _isstring(var)
def isarrayofstrings(var):
# leaving out '*' for now so that
# `character*(*) a(m)` and `character a(m,*)`
# are treated differently. Luckily `character**` is illegal.
return isstringarray(var) and var['dimension'][-1]=='(*)'
def isarray(var):
return 'dimension' in var and (not isexternal(var))
def isscalar(var):
return not (isarray(var) or isstring(var) or isexternal(var))
def iscomplex(var):
return isscalar(var) and var.get('typespec') in ['complex', 'double complex']
def islogical(var):
return isscalar(var) and var.get('typespec')=='logical'
def isinteger(var):
return isscalar(var) and var.get('typespec')=='integer'
def isreal(var):
return isscalar(var) and var.get('typespec')=='real'
def get_kind(var):
try:
return var['kindselector']['*']
except KeyError:
try:
return var['kindselector']['kind']
except KeyError:
pass
def islong_long(var):
if not isscalar(var):
return 0
if var.get('typespec') not in ['integer', 'logical']:
return 0
return get_kind(var)=='8'
def isunsigned_char(var):
if not isscalar(var):
return 0
if var.get('typespec') != 'integer':
return 0
return get_kind(var)=='-1'
def isunsigned_short(var):
if not isscalar(var):
return 0
if var.get('typespec') != 'integer':
return 0
return get_kind(var)=='-2'
def isunsigned(var):
if not isscalar(var):
return 0
if var.get('typespec') != 'integer':
return 0
return get_kind(var)=='-4'
def isunsigned_long_long(var):
if not isscalar(var):
return 0
if var.get('typespec') != 'integer':
return 0
return get_kind(var)=='-8'
def isdouble(var):
if not isscalar(var):
return 0
if not var.get('typespec')=='real':
return 0
return get_kind(var)=='8'
def islong_double(var):
if not isscalar(var):
return 0
if not var.get('typespec')=='real':
return 0
return get_kind(var)=='16'
def islong_complex(var):
if not iscomplex(var):
return 0
return get_kind(var)=='32'
def iscomplexarray(var):
return isarray(var) and var.get('typespec') in ['complex', 'double complex']
def isint1array(var):
return isarray(var) and var.get('typespec')=='integer' \
and get_kind(var)=='1'
def isunsigned_chararray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='-1'
def isunsigned_shortarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='-2'
def isunsignedarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='-4'
def isunsigned_long_longarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='-8'
def issigned_chararray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='1'
def issigned_shortarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='2'
def issigned_array(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='4'
def issigned_long_longarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='8'
def isallocatable(var):
return 'attrspec' in var and 'allocatable' in var['attrspec']
def ismutable(var):
return not (not 'dimension' in var or isstring(var))
def ismoduleroutine(rout):
return 'modulename' in rout
def ismodule(rout):
return ('block' in rout and 'module'==rout['block'])
def isfunction(rout):
return ('block' in rout and 'function'==rout['block'])
#def isfunction_wrap(rout):
# return wrapfuncs and (iscomplexfunction(rout) or isstringfunction(rout)) and (not isexternal(rout))
def isfunction_wrap(rout):
if isintent_c(rout):
return 0
return wrapfuncs and isfunction(rout) and (not isexternal(rout))
def issubroutine(rout):
return ('block' in rout and 'subroutine'==rout['block'])
def issubroutine_wrap(rout):
if isintent_c(rout):
return 0
return issubroutine(rout) and hasassumedshape(rout)
def hasassumedshape(rout):
if rout.get('hasassumedshape'):
return True
for a in rout['args']:
for d in rout['vars'].get(a, {}).get('dimension', []):
if d==':':
rout['hasassumedshape'] = True
return True
return False
def isroutine(rout):
return isfunction(rout) or issubroutine(rout)
def islogicalfunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return islogical(rout['vars'][a])
return 0
def islong_longfunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return islong_long(rout['vars'][a])
return 0
def islong_doublefunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return islong_double(rout['vars'][a])
return 0
def iscomplexfunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return iscomplex(rout['vars'][a])
return 0
def iscomplexfunction_warn(rout):
if iscomplexfunction(rout):
outmess("""\
**************************************************************
Warning: code with a function returning complex value
may not work correctly with your Fortran compiler.
Run the following test before using it in your applications:
$(f2py install dir)/test-site/{b/runme_scalar,e/runme}
When using GNU gcc/g77 compilers, codes should work correctly.
**************************************************************\n""")
return 1
return 0
def isstringfunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return isstring(rout['vars'][a])
return 0
def hasexternals(rout):
return 'externals' in rout and rout['externals']
def isthreadsafe(rout):
return 'f2pyenhancements' in rout and 'threadsafe' in rout['f2pyenhancements']
def hasvariables(rout):
return 'vars' in rout and rout['vars']
def isoptional(var):
return ('attrspec' in var and 'optional' in var['attrspec'] and 'required' not in var['attrspec']) and isintent_nothide(var)
def isexternal(var):
return ('attrspec' in var and 'external' in var['attrspec'])
def isrequired(var):
return not isoptional(var) and isintent_nothide(var)
def isintent_in(var):
if 'intent' not in var:
return 1
if 'hide' in var['intent']:
return 0
if 'inplace' in var['intent']:
return 0
if 'in' in var['intent']:
return 1
if 'out' in var['intent']:
return 0
if 'inout' in var['intent']:
return 0
if 'outin' in var['intent']:
return 0
return 1
def isintent_inout(var):
return 'intent' in var and ('inout' in var['intent'] or 'outin' in var['intent']) and 'in' not in var['intent'] and 'hide' not in var['intent'] and 'inplace' not in var['intent']
def isintent_out(var):
return 'out' in var.get('intent', [])
def isintent_hide(var):
return ('intent' in var and ('hide' in var['intent'] or ('out' in var['intent'] and 'in' not in var['intent'] and (not l_or(isintent_inout, isintent_inplace)(var)))))
def isintent_nothide(var):
return not isintent_hide(var)
def isintent_c(var):
return 'c' in var.get('intent', [])
# def isintent_f(var):
# return not isintent_c(var)
def isintent_cache(var):
return 'cache' in var.get('intent', [])
def isintent_copy(var):
return 'copy' in var.get('intent', [])
def isintent_overwrite(var):
return 'overwrite' in var.get('intent', [])
def isintent_callback(var):
return 'callback' in var.get('intent', [])
def isintent_inplace(var):
return 'inplace' in var.get('intent', [])
def isintent_aux(var):
return 'aux' in var.get('intent', [])
def isintent_aligned4(var):
return 'aligned4' in var.get('intent', [])
def isintent_aligned8(var):
return 'aligned8' in var.get('intent', [])
def isintent_aligned16(var):
return 'aligned16' in var.get('intent', [])
isintent_dict = {isintent_in: 'INTENT_IN', isintent_inout: 'INTENT_INOUT',
isintent_out: 'INTENT_OUT', isintent_hide: 'INTENT_HIDE',
isintent_cache: 'INTENT_CACHE',
isintent_c: 'INTENT_C', isoptional: 'OPTIONAL',
isintent_inplace: 'INTENT_INPLACE',
isintent_aligned4: 'INTENT_ALIGNED4',
isintent_aligned8: 'INTENT_ALIGNED8',
isintent_aligned16: 'INTENT_ALIGNED16',
}
def isprivate(var):
return 'attrspec' in var and 'private' in var['attrspec']
def hasinitvalue(var):
return '=' in var
def hasinitvalueasstring(var):
if not hasinitvalue(var):
return 0
return var['='][0] in ['"', "'"]
def hasnote(var):
return 'note' in var
def hasresultnote(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return hasnote(rout['vars'][a])
return 0
def hascommon(rout):
return 'common' in rout
def containscommon(rout):
if hascommon(rout):
return 1
if hasbody(rout):
for b in rout['body']:
if containscommon(b):
return 1
return 0
def containsmodule(block):
if ismodule(block):
return 1
if not hasbody(block):
return 0
for b in block['body']:
if containsmodule(b):
return 1
return 0
def hasbody(rout):
return 'body' in rout
def hascallstatement(rout):
return getcallstatement(rout) is not None
def istrue(var):
return 1
def isfalse(var):
return 0
class F2PYError(Exception):
pass
class throw_error:
def __init__(self, mess):
self.mess = mess
def __call__(self, var):
mess = '\n\n var = %s\n Message: %s\n' % (var, self.mess)
raise F2PYError(mess)
def l_and(*f):
l, l2='lambda v', []
for i in range(len(f)):
l='%s,f%d=f[%d]'%(l, i, i)
l2.append('f%d(v)'%(i))
return eval('%s:%s'%(l, ' and '.join(l2)))
def l_or(*f):
l, l2='lambda v', []
for i in range(len(f)):
l='%s,f%d=f[%d]'%(l, i, i)
l2.append('f%d(v)'%(i))
return eval('%s:%s'%(l, ' or '.join(l2)))
def l_not(f):
return eval('lambda v,f=f:not f(v)')
def isdummyroutine(rout):
try:
return rout['f2pyenhancements']['fortranname']==''
except KeyError:
return 0
def getfortranname(rout):
try:
name = rout['f2pyenhancements']['fortranname']
if name=='':
raise KeyError
if not name:
errmess('Failed to use fortranname from %s\n'%(rout['f2pyenhancements']))
raise KeyError
except KeyError:
name = rout['name']
return name
def getmultilineblock(rout,blockname,comment=1,counter=0):
try:
r = rout['f2pyenhancements'].get(blockname)
except KeyError:
return
if not r: return
if counter > 0 and isinstance(r, str):
return
if isinstance(r, list):
if counter>=len(r): return
r = r[counter]
if r[:3]=="'''":
if comment:
r = '\t/* start ' + blockname + ' multiline ('+repr(counter)+') */\n' + r[3:]
else:
r = r[3:]
if r[-3:]=="'''":
if comment:
r = r[:-3] + '\n\t/* end multiline ('+repr(counter)+')*/'
else:
r = r[:-3]
else:
errmess("%s multiline block should end with `'''`: %s\n" \
% (blockname, repr(r)))
return r
def getcallstatement(rout):
return getmultilineblock(rout, 'callstatement')
def getcallprotoargument(rout,cb_map={}):
r = getmultilineblock(rout, 'callprotoargument', comment=0)
if r: return r
if hascallstatement(rout):
outmess('warning: callstatement is defined without callprotoargument\n')
return
from .capi_maps import getctype
arg_types, arg_types2 = [], []
if l_and(isstringfunction, l_not(isfunction_wrap))(rout):
arg_types.extend(['char*', 'size_t'])
for n in rout['args']:
var = rout['vars'][n]
if isintent_callback(var):
continue
if n in cb_map:
ctype = cb_map[n]+'_typedef'
else:
ctype = getctype(var)
if l_and(isintent_c, l_or(isscalar, iscomplex))(var):
pass
elif isstring(var):
pass
#ctype = 'void*'
else:
ctype = ctype+'*'
if isstring(var) or isarrayofstrings(var):
arg_types2.append('size_t')
arg_types.append(ctype)
proto_args = ','.join(arg_types+arg_types2)
if not proto_args:
proto_args = 'void'
#print proto_args
return proto_args
def getusercode(rout):
return getmultilineblock(rout, 'usercode')
def getusercode1(rout):
return getmultilineblock(rout, 'usercode', counter=1)
def getpymethoddef(rout):
return getmultilineblock(rout, 'pymethoddef')
def getargs(rout):
sortargs, args=[], []
if 'args' in rout:
args=rout['args']
if 'sortvars' in rout:
for a in rout['sortvars']:
if a in args: sortargs.append(a)
for a in args:
if a not in sortargs:
sortargs.append(a)
else: sortargs=rout['args']
return args, sortargs
def getargs2(rout):
sortargs, args=[], rout.get('args', [])
auxvars = [a for a in rout['vars'].keys() if isintent_aux(rout['vars'][a])\
and a not in args]
args = auxvars + args
if 'sortvars' in rout:
for a in rout['sortvars']:
if a in args: sortargs.append(a)
for a in args:
if a not in sortargs:
sortargs.append(a)
else: sortargs=auxvars + rout['args']
return args, sortargs
def getrestdoc(rout):
if 'f2pymultilines' not in rout:
return None
k = None
if rout['block']=='python module':
k = rout['block'], rout['name']
return rout['f2pymultilines'].get(k, None)
def gentitle(name):
l=(80-len(name)-6)//2
return '/*%s %s %s*/'%(l*'*', name, l*'*')
def flatlist(l):
if isinstance(l, list):
return reduce(lambda x,y,f=flatlist:x+f(y), l, [])
return [l]
def stripcomma(s):
if s and s[-1]==',': return s[:-1]
return s
def replace(str,d,defaultsep=''):
if isinstance(d, list):
return [replace(str, _m, defaultsep) for _m in d]
if isinstance(str, list):
return [replace(_m, d, defaultsep) for _m in str]
for k in 2*list(d.keys()):
if k=='separatorsfor':
continue
if 'separatorsfor' in d and k in d['separatorsfor']:
sep=d['separatorsfor'][k]
else:
sep=defaultsep
if isinstance(d[k], list):
str=str.replace('#%s#'%(k), sep.join(flatlist(d[k])))
else:
str=str.replace('#%s#'%(k), d[k])
return str
def dictappend(rd, ar):
if isinstance(ar, list):
for a in ar:
rd=dictappend(rd, a)
return rd
for k in ar.keys():
if k[0]=='_':
continue
if k in rd:
if isinstance(rd[k], str):
rd[k]=[rd[k]]
if isinstance(rd[k], list):
if isinstance(ar[k], list):
rd[k]=rd[k]+ar[k]
else:
rd[k].append(ar[k])
elif isinstance(rd[k], dict):
if isinstance(ar[k], dict):
if k=='separatorsfor':
for k1 in ar[k].keys():
if k1 not in rd[k]:
rd[k][k1]=ar[k][k1]
else:
rd[k]=dictappend(rd[k], ar[k])
else:
rd[k]=ar[k]
return rd
def applyrules(rules,d,var={}):
ret={}
if isinstance(rules, list):
for r in rules:
rr=applyrules(r, d, var)
ret=dictappend(ret, rr)
if '_break' in rr:
break
return ret
if '_check' in rules and (not rules['_check'](var)):
return ret
if 'need' in rules:
res = applyrules({'needs':rules['need']}, d, var)
if 'needs' in res:
cfuncs.append_needs(res['needs'])
for k in rules.keys():
if k=='separatorsfor':
ret[k]=rules[k]; continue
if isinstance(rules[k], str):
ret[k]=replace(rules[k], d)
elif isinstance(rules[k], list):
ret[k]=[]
for i in rules[k]:
ar=applyrules({k:i}, d, var)
if k in ar:
ret[k].append(ar[k])
elif k[0]=='_':
continue
elif isinstance(rules[k], dict):
ret[k]=[]
for k1 in rules[k].keys():
if isinstance(k1, types.FunctionType) and k1(var):
if isinstance(rules[k][k1], list):
for i in rules[k][k1]:
if isinstance(i, dict):
res=applyrules({'supertext':i}, d, var)
if 'supertext' in res:
i=res['supertext']
else: i=''
ret[k].append(replace(i, d))
else:
i=rules[k][k1]
if isinstance(i, dict):
res=applyrules({'supertext':i}, d)
if 'supertext' in res:
i=res['supertext']
else: i=''
ret[k].append(replace(i, d))
else:
errmess('applyrules: ignoring rule %s.\n'%repr(rules[k]))
if isinstance(ret[k], list):
if len(ret[k])==1:
ret[k]=ret[k][0]
if ret[k]==[]:
del ret[k]
return ret
| bsd-3-clause |
gskachkov/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/thirdparty/mod_pywebsocket/handshake/__init__.py | 658 | 4406 | # Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""WebSocket opening handshake processor. This class try to apply available
opening handshake processors for each protocol version until a connection is
successfully established.
"""
import logging
from mod_pywebsocket import common
from mod_pywebsocket.handshake import hybi00
from mod_pywebsocket.handshake import hybi
# Export AbortedByUserException, HandshakeException, and VersionException
# symbol from this module.
from mod_pywebsocket.handshake._base import AbortedByUserException
from mod_pywebsocket.handshake._base import HandshakeException
from mod_pywebsocket.handshake._base import VersionException
_LOGGER = logging.getLogger(__name__)
def do_handshake(request, dispatcher, allowDraft75=False, strict=False):
"""Performs WebSocket handshake.
Args:
request: mod_python request.
dispatcher: Dispatcher (dispatch.Dispatcher).
allowDraft75: obsolete argument. ignored.
strict: obsolete argument. ignored.
Handshaker will add attributes such as ws_resource in performing
handshake.
"""
_LOGGER.debug('Client\'s opening handshake resource: %r', request.uri)
# To print mimetools.Message as escaped one-line string, we converts
# headers_in to dict object. Without conversion, if we use %r, it just
# prints the type and address, and if we use %s, it prints the original
# header string as multiple lines.
#
# Both mimetools.Message and MpTable_Type of mod_python can be
# converted to dict.
#
# mimetools.Message.__str__ returns the original header string.
# dict(mimetools.Message object) returns the map from header names to
# header values. While MpTable_Type doesn't have such __str__ but just
# __repr__ which formats itself as well as dictionary object.
_LOGGER.debug(
'Client\'s opening handshake headers: %r', dict(request.headers_in))
handshakers = []
handshakers.append(
('RFC 6455', hybi.Handshaker(request, dispatcher)))
handshakers.append(
('HyBi 00', hybi00.Handshaker(request, dispatcher)))
for name, handshaker in handshakers:
_LOGGER.debug('Trying protocol version %s', name)
try:
handshaker.do_handshake()
_LOGGER.info('Established (%s protocol)', name)
return
except HandshakeException, e:
_LOGGER.debug(
'Failed to complete opening handshake as %s protocol: %r',
name, e)
if e.status:
raise e
except AbortedByUserException, e:
raise
except VersionException, e:
raise
# TODO(toyoshim): Add a test to cover the case all handshakers fail.
raise HandshakeException(
'Failed to complete opening handshake for all available protocols',
status=common.HTTP_STATUS_BAD_REQUEST)
# vi:sts=4 sw=4 et
| bsd-3-clause |
geotagx/geotagx-pybossa-archive | test/test_stats.py | 1 | 8132 | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
import datetime
import time
from base import web, model, db, Fixtures
import pybossa.stats as stats
class TestStats:
def setUp(self):
self.app = web.app
model.rebuild_db()
Fixtures.create()
def tearDown(self):
db.session.remove()
@classmethod
def teardown_class(cls):
model.rebuild_db()
# Tests
# Fixtures will create 10 tasks and will need 10 answers per task, so
# the app will be completed when 100 tasks have been submitted
# Only 10 task_runs are saved in the DB
def test_00_avg_n_tasks(self):
"""Test STATS avg and n of tasks method works"""
with self.app.test_request_context('/'):
avg, n_tasks = stats.get_avg_n_tasks(1)
err_msg = "The average number of answer per task is wrong"
assert avg == 10, err_msg
err_msg = "The n of tasks is wrong"
assert n_tasks == 10, err_msg
def test_01_stats_dates(self):
"""Test STATS dates method works"""
today = unicode(datetime.date.today())
with self.app.test_request_context('/'):
dates, dates_n_tasks, dates_anon, dates_auth = stats.stats_dates(1)
err_msg = "There should be 10 answers today"
assert dates[today] == 10, err_msg
err_msg = "There should be 100 answers per day"
assert dates_n_tasks[today] == 100, err_msg
err_msg = "The SUM of answers from anon and auth users should be 10"
assert (dates_anon[today] + dates_auth[today]) == 10, err_msg
def test_02_stats_hours(self):
"""Test STATS hours method works"""
hour = unicode(datetime.datetime.utcnow().strftime('%H'))
with self.app.test_request_context('/'):
hours, hours_anon, hours_auth, max_hours,\
max_hours_anon, max_hours_auth = stats.stats_hours(1)
print hours
for i in range(0, 24):
# There should be only 10 answers at current hour
if str(i).zfill(2) == hour:
err_msg = "At time %s there should be 10 answers" \
"but there are %s" % (str(i).zfill(2),
hours[str(i).zfill(2)])
assert hours[str(i).zfill(2)] == 10, "There should be 10 answers"
else:
err_msg = "At time %s there should be 0 answers" \
"but there are %s" % (str(i).zfill(2),
hours[str(i).zfill(2)])
assert hours[str(i).zfill(2)] == 0, err_msg
if str(i).zfill(2) == hour:
tmp = (hours_anon[hour] + hours_auth[hour])
assert tmp == 10, "There should be 10 answers"
else:
tmp = (hours_anon[str(i).zfill(2)] + hours_auth[str(i).zfill(2)])
assert tmp == 0, "There should be 0 answers"
err_msg = "It should be 10, as all answers are submitted in the same hour"
tr = db.session.query(model.TaskRun).all()
for t in tr:
print t.finish_time
assert max_hours == 10, err_msg
assert (max_hours_anon + max_hours_auth) == 10, err_msg
def test_03_stats(self):
"""Test STATS stats method works"""
today = unicode(datetime.date.today())
hour = int(datetime.datetime.utcnow().strftime('%H'))
date_ms = time.mktime(time.strptime(today, "%Y-%m-%d")) * 1000
anon = 0
auth = 0
with self.app.test_request_context('/'):
dates_stats, hours_stats, user_stats = stats.get_stats(1)
for item in dates_stats:
if item['label'] == 'Anon + Auth':
assert item['values'][0][0] == date_ms, item['values'][0][0]
assert item['values'][0][1] == 10, "There should be 10 answers"
if item['label'] == 'Anonymous':
assert item['values'][0][0] == date_ms, item['values'][0][0]
anon = item['values'][0][1]
if item['label'] == 'Authenticated':
assert item['values'][0][0] == date_ms, item['values'][0][0]
auth = item['values'][0][1]
if item['label'] == 'Total':
assert item['values'][0][0] == date_ms, item['values'][0][0]
assert item['values'][0][1] == 10, "There should be 10 answers"
if item['label'] == 'Expected Answers':
assert item['values'][0][0] == date_ms, item['values'][0][0]
for i in item['values']:
assert i[1] == 100, "Each date should have 100 answers"
assert item['values'][0][1] == 100, "There should be 10 answers"
if item['label'] == 'Estimation':
assert item['values'][0][0] == date_ms, item['values'][0][0]
v = 10
for i in item['values']:
assert i[1] == v, "Each date should have 10 extra answers"
v = v + 10
assert auth + anon == 10, "date stats sum of auth and anon should be 10"
max_hours = 0
for item in hours_stats:
if item['label'] == 'Anon + Auth':
max_hours = item['max']
print item
assert item['max'] == 10, item['max']
assert item['max'] == 10, "Max hours value should be 10"
for i in item['values']:
if i[0] == hour:
assert i[1] == 10, "There should be 10 answers"
assert i[2] == 5, "The size of the bubble should be 5"
else:
assert i[1] == 0, "There should be 0 answers"
assert i[2] == 0, "The size of the buggle should be 0"
if item['label'] == 'Anonymous':
anon = item['max']
for i in item['values']:
if i[0] == hour:
assert i[1] == anon, "There should be anon answers"
assert i[2] == (anon * 5) / max_hours, "The size of the bubble should be 5"
else:
assert i[1] == 0, "There should be 0 answers"
assert i[2] == 0, "The size of the buggle should be 0"
if item['label'] == 'Authenticated':
auth = item['max']
for i in item['values']:
if i[0] == hour:
assert i[1] == auth, "There should be anon answers"
assert i[2] == (auth * 5) / max_hours, "The size of the bubble should be 5"
else:
assert i[1] == 0, "There should be 0 answers"
assert i[2] == 0, "The size of the buggle should be 0"
assert auth + anon == 10, "date stats sum of auth and anon should be 10"
err_msg = "date stats sum of auth and anon should be 10"
assert user_stats['n_anon'] + user_stats['n_auth'], err_msg
| agpl-3.0 |
Azure/azure-sdk-for-python | sdk/identity/azure-identity/tests/test_device_code_credential.py | 1 | 11509 | # ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import datetime
from azure.core.exceptions import ClientAuthenticationError
from azure.core.pipeline.policies import SansIOHTTPPolicy
from azure.identity import AuthenticationRequiredError, DeviceCodeCredential
from azure.identity._internal.user_agent import USER_AGENT
import pytest
from helpers import (
build_aad_response,
build_id_token,
get_discovery_response,
id_token_claims,
mock_response,
Request,
validating_transport,
)
try:
from unittest.mock import Mock, patch
except ImportError: # python < 3.3
from mock import Mock, patch # type: ignore
def test_tenant_id_validation():
"""The credential should raise ValueError when given an invalid tenant_id"""
valid_ids = {"c878a2ab-8ef4-413b-83a0-199afb84d7fb", "contoso.onmicrosoft.com", "organizations", "common"}
for tenant in valid_ids:
DeviceCodeCredential(tenant_id=tenant)
invalid_ids = {"my tenant", "my_tenant", "/", "\\", '"my-tenant"', "'my-tenant'"}
for tenant in invalid_ids:
with pytest.raises(ValueError):
DeviceCodeCredential(tenant_id=tenant)
def test_no_scopes():
"""The credential should raise when get_token is called with no scopes"""
credential = DeviceCodeCredential("client_id")
with pytest.raises(ValueError):
credential.get_token()
def test_authenticate():
client_id = "client-id"
environment = "localhost"
issuer = "https://" + environment
tenant_id = "some-tenant"
authority = issuer + "/" + tenant_id
access_token = "***"
scope = "scope"
# mock AAD response with id token
object_id = "object-id"
home_tenant = "home-tenant-id"
username = "[email protected]"
id_token = build_id_token(aud=client_id, iss=issuer, object_id=object_id, tenant_id=home_tenant, username=username)
auth_response = build_aad_response(
uid=object_id, utid=home_tenant, access_token=access_token, refresh_token="**", id_token=id_token
)
transport = validating_transport(
requests=[Request(url_substring=issuer)] * 4,
responses=[get_discovery_response(authority)] * 2 # instance and tenant discovery
+ [
mock_response( # start device code flow
json_payload={
"device_code": "_",
"user_code": "user-code",
"verification_uri": "verification-uri",
"expires_in": 42,
}
),
mock_response(json_payload=dict(auth_response, scope=scope)), # poll for completion
],
)
credential = DeviceCodeCredential(
client_id,
prompt_callback=Mock(), # prevent credential from printing to stdout
transport=transport,
authority=environment,
tenant_id=tenant_id,
)
record = credential.authenticate(scopes=(scope,))
assert record.authority == environment
assert record.home_account_id == object_id + "." + home_tenant
assert record.tenant_id == home_tenant
assert record.username == username
# credential should have a cached access token for the scope used in authenticate
token = credential.get_token(scope)
assert token.token == access_token
def test_disable_automatic_authentication():
"""When configured for strict silent auth, the credential should raise when silent auth fails"""
transport = Mock(send=Mock(side_effect=Exception("no request should be sent")))
credential = DeviceCodeCredential("client-id", disable_automatic_authentication=True, transport=transport)
with pytest.raises(AuthenticationRequiredError):
credential.get_token("scope")
def test_policies_configurable():
policy = Mock(spec_set=SansIOHTTPPolicy, on_request=Mock())
client_id = "client-id"
transport = validating_transport(
requests=[Request()] * 3,
responses=[
# expected requests: discover tenant, start device code flow, poll for completion
get_discovery_response(),
mock_response(
json_payload={
"device_code": "_",
"user_code": "user-code",
"verification_uri": "verification-uri",
"expires_in": 42,
}
),
mock_response(
json_payload=dict(
build_aad_response(access_token="**", id_token=build_id_token(aud=client_id)), scope="scope"
)
),
],
)
credential = DeviceCodeCredential(
client_id=client_id, prompt_callback=Mock(), policies=[policy], transport=transport
)
credential.get_token("scope")
assert policy.on_request.called
def test_user_agent():
client_id = "client-id"
transport = validating_transport(
requests=[Request()] * 2 + [Request(required_headers={"User-Agent": USER_AGENT})],
responses=[
get_discovery_response(),
mock_response(
json_payload={
"device_code": "_",
"user_code": "user-code",
"verification_uri": "verification-uri",
"expires_in": 42,
}
),
mock_response(
json_payload=dict(
build_aad_response(access_token="**", id_token=build_id_token(aud=client_id)), scope="scope"
)
),
],
)
credential = DeviceCodeCredential(client_id=client_id, prompt_callback=Mock(), transport=transport)
credential.get_token("scope")
def test_device_code_credential():
client_id = "client-id"
expected_token = "access-token"
user_code = "user-code"
verification_uri = "verification-uri"
expires_in = 42
transport = validating_transport(
requests=[Request()] * 3, # not validating requests because they're formed by MSAL
responses=[
# expected requests: discover tenant, start device code flow, poll for completion
mock_response(json_payload={"authorization_endpoint": "https://a/b", "token_endpoint": "https://a/b"}),
mock_response(
json_payload={
"device_code": "_",
"user_code": user_code,
"verification_uri": verification_uri,
"expires_in": expires_in,
}
),
mock_response(
json_payload=dict(
build_aad_response(
access_token=expected_token,
expires_in=expires_in,
refresh_token="_",
id_token=build_id_token(aud=client_id),
),
scope="scope",
),
),
],
)
callback = Mock()
credential = DeviceCodeCredential(
client_id=client_id, prompt_callback=callback, transport=transport, instance_discovery=False,
)
now = datetime.datetime.utcnow()
token = credential.get_token("scope")
assert token.token == expected_token
# prompt_callback should have been called as documented
assert callback.call_count == 1
uri, code, expires_on = callback.call_args[0]
assert uri == verification_uri
assert code == user_code
# validating expires_on exactly would require depending on internals of the credential and
# patching time, so we'll be satisfied if expires_on is a datetime at least expires_in
# seconds later than our call to get_token
assert isinstance(expires_on, datetime.datetime)
assert expires_on - now >= datetime.timedelta(seconds=expires_in)
def test_timeout():
transport = validating_transport(
requests=[Request()] * 3, # not validating requests because they're formed by MSAL
responses=[
# expected requests: discover tenant, start device code flow, poll for completion
mock_response(json_payload={"authorization_endpoint": "https://a/b", "token_endpoint": "https://a/b"}),
mock_response(json_payload={"device_code": "_", "user_code": "_", "verification_uri": "_"}),
mock_response(json_payload={"error": "authorization_pending"}),
],
)
credential = DeviceCodeCredential(
client_id="_", prompt_callback=Mock(), transport=transport, timeout=0.01, instance_discovery=False,
)
with pytest.raises(ClientAuthenticationError) as ex:
credential.get_token("scope")
assert "timed out" in ex.value.message.lower()
def test_client_capabilities():
"""the credential should configure MSAL for capability CP1 unless AZURE_IDENTITY_DISABLE_CP1 is set"""
transport = Mock(send=Mock(side_effect=Exception("this test mocks MSAL, so no request should be sent")))
with patch("msal.PublicClientApplication") as PublicClientApplication:
DeviceCodeCredential(transport=transport)._get_app()
assert PublicClientApplication.call_count == 1
_, kwargs = PublicClientApplication.call_args
assert kwargs["client_capabilities"] == ["CP1"]
with patch.dict("os.environ", {"AZURE_IDENTITY_DISABLE_CP1": "true"}):
with patch("msal.PublicClientApplication") as PublicClientApplication:
DeviceCodeCredential(transport=transport)._get_app()
assert PublicClientApplication.call_count == 1
_, kwargs = PublicClientApplication.call_args
assert kwargs["client_capabilities"] is None
def test_claims_challenge():
"""get_token and authenticate should pass any claims challenge to MSAL token acquisition APIs"""
msal_acquire_token_result = dict(
build_aad_response(access_token="**", id_token=build_id_token()),
id_token_claims=id_token_claims("issuer", "subject", "audience", upn="upn"),
)
expected_claims = '{"access_token": {"essential": "true"}'
transport = Mock(send=Mock(side_effect=Exception("this test mocks MSAL, so no request should be sent")))
credential = DeviceCodeCredential(transport=transport)
with patch.object(DeviceCodeCredential, "_get_app") as get_mock_app:
msal_app = get_mock_app()
msal_app.initiate_device_flow.return_value = {"message": "it worked"}
msal_app.acquire_token_by_device_flow.return_value = msal_acquire_token_result
credential.authenticate(scopes=["scope"], claims=expected_claims)
assert msal_app.acquire_token_by_device_flow.call_count == 1
args, kwargs = msal_app.acquire_token_by_device_flow.call_args
assert kwargs["claims_challenge"] == expected_claims
credential.get_token("scope", claims=expected_claims)
assert msal_app.acquire_token_by_device_flow.call_count == 2
args, kwargs = msal_app.acquire_token_by_device_flow.call_args
assert kwargs["claims_challenge"] == expected_claims
msal_app.get_accounts.return_value = [{"home_account_id": credential._auth_record.home_account_id}]
msal_app.acquire_token_silent_with_error.return_value = msal_acquire_token_result
credential.get_token("scope", claims=expected_claims)
assert msal_app.acquire_token_silent_with_error.call_count == 1
args, kwargs = msal_app.acquire_token_silent_with_error.call_args
assert kwargs["claims_challenge"] == expected_claims
| mit |
rackerlabs/onmetal-scripts | onmetal_scripts/fix_noheartbeat.py | 1 | 6080 | #!/usr/bin/env python
# Copyright 2015 Rackspace, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from onmetal_scripts.lib import ipmi
from onmetal_scripts.lib import script
from onmetal_scripts.lib import states
import datetime
import re
import socket
import time
import traceback
"""
Fixes nodes in a no heartbeat state.
1. Set maintenance mode
2. Remove instance ports
3. Add decom ports
4. Put into decom, decom_target_state none
5. Fix bmc force boot agent
6. Unmaintenance (release lock)
This will leave nodes in maintenance mode should anything fail.
"""
class FixNoHeartbeat(script.TeethScript):
use_ironic = True
use_neutron = True
use_ipmi = True
def __init__(self):
super(FixNoHeartbeat, self).__init__(
'Utility for fixing nodes in a neutron failed state.')
self.add_argument('--force', action='store_true',
help='Do not sanity-check environment first.')
self.add_argument('--time_since_heartbeat', default=3600)
self.add_argument('--ipmi_password', default=None)
def _dt_to_ut(self, t):
# Ugly, but python timezone handling is crappy in this case
return datetime.datetime.strptime(t.replace("+00:00", ""),
"%Y-%m-%dT%H:%M:%S")
def _time_since(self, t):
# T can be a datetime string or a timestamp
if isinstance(t, basestring):
return (datetime.datetime.now() -
self._dt_to_ut(t)).total_seconds()
else:
return time.time() - t
def _is_no_heartbeat(self, ironic_node):
timeout = self.get_argument('time_since_heartbeat')
time_since_heartbeat = self._time_since(
ironic_node['driver_internal_info'].get('agent_last_heartbeat', 0))
time_since_prov = self._time_since(
ironic_node['provision_updated_at'] or 0)
if (ironic_node['maintenance'] is True and
ironic_node['maintenance_reason'] != 'NoHeartbeat'):
return False
if ironic_node['reservation']:
return False
return ((time_since_heartbeat > timeout and
ironic_node['provision_state'] == states.AVAILABLE) or
(time_since_heartbeat > timeout and
ironic_node['provision_state'] == states.CLEANING and
time_since_prov > timeout))
def _get_no_heartbeat_nodes(self):
print("getting nodes that are not heartbeating...")
return [node for node in self.ironic_client.list_nodes() if
self._is_no_heartbeat(node)]
def _format_node(self, node):
node_info = {
'uuid': node['uuid'],
'last_hb': self._time_since(node['driver_info'].get(
'agent_last_heartbeat', 0)),
'reservation': node['reservation'],
'prov_state': node['provision_state'],
'decom_step': node['driver_info'].get('decommission_target_state'),
'bmc_ip': node['driver_info'].get('ipmi_address')
}
return ("{uuid} agent_last_heartbeat: {last_hb} reservation: "
"{reservation} provision_state: {prov_state} "
"decom step: {decom_step} bmc ip: {bmc_ip}".format(
**node_info))
def _dry_run(self):
nodes = self._get_no_heartbeat_nodes()
print("dry_run is set. Would execute on the following nodes:")
for node in nodes:
print(self._format_node(node))
def run(self):
if not self.get_argument('force'):
if not re.search(r'conductor', socket.gethostname()):
raise EnvironmentError(
'This script must be run on a conductor')
ipmi_password = self.get_argument('ipmi_password', check_env=True)
for node in self._get_no_heartbeat_nodes():
print("fixing node: {}".format(self._format_node(node)))
try:
# Get node again and reverify to avoid some race
node = self.ironic_client.get_node(node['uuid'])
if not self._is_no_heartbeat(node):
print(
"node {} is no longer in a no heartbeat state, "
"skipping".format(
node['uuid']))
continue
# maintenance the node, we filter only unmaintenanced nodes
# so this acts a a lock as well as
# protects us should this fail halfway
self.ironic_client.set_maintenance(node, "True", 'NoHeartbeat')
# remove any existing neutron ports, and add decom ports
self.neutron_client.delete_ports_for_node(node)
self.neutron_client.add_decom_port(node)
# force reboot the node and boot from PXE
self.ipmi_client.force_reboot(node, ipmi.IPMIBootDev.PXE,
ipmi_password)
# unmaintenance the now fixed node
self.ironic_client.set_maintenance(node, "False")
except Exception as e:
print(traceback.print_exc())
raise Exception(
"node {} failed to be reset because: {}. Exiting".format(
node.get('uuid'), str(e)))
else:
print("node {} successfully fixed.".format(node['uuid']))
if __name__ == "__main__":
FixNoHeartbeat().run()
| apache-2.0 |
BtbN/xbmc | tools/EventClients/lib/python/bt/bt.py | 181 | 2535 | # -*- coding: utf-8 -*-
# Copyright (C) 2008-2013 Team XBMC
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
BLUEZ=0
try:
import bluetooth
BLUEZ=1
except:
try:
import lightblue
except:
print "ERROR: You need to have either LightBlue or PyBluez installed\n"\
" in order to use this program."
print "- PyBluez (Linux / Windows XP) http://org.csail.mit.edu/pybluez/"
print "- LightBlue (Mac OS X / Linux) http://lightblue.sourceforge.net/"
exit()
def bt_create_socket():
if BLUEZ:
sock = bluetooth.BluetoothSocket(bluetooth.L2CAP)
else:
sock = lightblue.socket(lightblue.L2CAP)
return sock
def bt_create_rfcomm_socket():
if BLUEZ:
sock = bluetooth.BluetoothSocket( bluetooth.RFCOMM )
sock.bind(("",bluetooth.PORT_ANY))
else:
sock = lightblue.socket(lightblue.RFCOMM)
sock.bind(("",0))
return sock
def bt_discover_devices():
if BLUEZ:
nearby = bluetooth.discover_devices()
else:
nearby = lightblue.finddevices()
return nearby
def bt_lookup_name(bdaddr):
if BLUEZ:
bname = bluetooth.lookup_name( bdaddr )
else:
bname = bdaddr[1]
return bname
def bt_lookup_addr(bdaddr):
if BLUEZ:
return bdaddr
else:
return bdaddr[0]
def bt_advertise(name, uuid, socket):
if BLUEZ:
bluetooth.advertise_service( socket, name,
service_id = uuid,
service_classes = [ uuid, bluetooth.SERIAL_PORT_CLASS ],
profiles = [ bluetooth.SERIAL_PORT_PROFILE ] )
else:
lightblue.advertise(name, socket, lightblue.RFCOMM)
def bt_stop_advertising(socket):
if BLUEZ:
stop_advertising(socket)
else:
lightblue.stopadvertise(socket)
| gpl-2.0 |
jdobes/cobbler | cobbler/modules/authn_spacewalk.py | 1 | 5150 | """
Authentication module that uses Spacewalk's auth system.
Any org_admin or kickstart_admin can get in.
Copyright 2007-2008, Red Hat, Inc
Michael DeHaan <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
import distutils.sysconfig
import sys
import xmlrpclib
plib = distutils.sysconfig.get_python_lib()
mod_path="%s/cobbler" % plib
sys.path.insert(0, mod_path)
def register():
"""
The mandatory cobbler module registration hook.
"""
return "authn"
def __looks_like_a_token(password):
# what spacewalk sends us could be an internal token or it could be a password
# if it's long and lowercase hex, it's /likely/ a token, and we should try to treat
# it as a token first, if not, we should treat it as a password. All of this
# code is there to avoid extra XMLRPC calls, which are slow.
# we can't use binascii.unhexlify here as it's an "odd length string"
if password.lower() != password:
# tokens are always lowercase, this isn't a token
return False
#try:
# #data = binascii.unhexlify(password)
# return True # looks like a token, but we can't be sure
#except:
# return False # definitely not a token
return (len(password) > 45)
def authenticate(api_handle,username,password):
"""
Validate a username/password combo, returning True/False
This will pass the username and password back to Spacewalk
to see if this authentication request is valid.
See also: http://www.redhat.com/spacewalk/documentation/api/0.4/
"""
if api_handle is not None:
server = api_handle.settings().redhat_management_server
user_enabled = api_handle.settings().redhat_management_permissive
else:
server = "columbia.devel.redhat.com"
user_enabled = True
if server == "xmlrpc.rhn.redhat.com":
return False # emergency fail, don't bother RHN!
spacewalk_url = "http://%s/rpc/api" % server
client = xmlrpclib.Server(spacewalk_url, verbose=0)
if __looks_like_a_token(password) or username == 'taskomatic_user':
# The tokens
# are lowercase hex, but a password can also be lowercase hex,
# so we have to try it as both a token and then a password if
# we are unsure. We do it this way to be faster but also to avoid
# any login failed stuff in the logs that we don't need to send.
try:
valid = client.auth.checkAuthToken(username,password)
except:
# if the token is not a token this will raise an exception
# rather than return an integer.
valid = 0
# problem at this point, 0xdeadbeef is valid as a token but if that
# fails, it's also a valid password, so we must try auth system #2
if valid != 1:
# first API code returns 1 on success
# the second uses exceptions for login failed.
#
# so... token check failed, but maybe the username/password
# is just a simple username/pass!
if user_enabled == 0:
# this feature must be explicitly enabled.
return False
session = ""
try:
session = client.auth.login(username,password)
except:
# FIXME: should log exceptions that are not excepted
# as we could detect spacewalk java errors here that
# are not login related.
return False
# login success by username, role must also match
roles = client.user.listRoles(session, username)
if not ("config_admin" in roles or "org_admin" in roles):
return False
return True
else:
# it's an older version of spacewalk, so just try the username/pass
# OR: we know for sure it's not a token because it's not lowercase hex.
if user_enabled == 0:
# this feature must be explicitly enabled.
return False
session = ""
try:
session = client.auth.login(username,password)
except:
return False
# login success by username, role must also match
roles = client.user.listRoles(session, username)
if not ("config_admin" in roles or "org_admin" in roles):
return False
return True
if __name__ == "__main__":
print authenticate(None,"admin","redhat")
| gpl-2.0 |
snim2/rcsp | _pytest/hookspec.py | 2 | 8732 | """ hook specifications for pytest plugins, invoked from main.py and builtin plugins. """
# -------------------------------------------------------------------------
# Initialization
# -------------------------------------------------------------------------
def pytest_addhooks(pluginmanager):
"""called at plugin load time to allow adding new hooks via a call to
pluginmanager.registerhooks(module)."""
def pytest_namespace():
"""return dict of name->object to be made globally available in
the py.test/pytest namespace. This hook is called before command
line options are parsed.
"""
def pytest_cmdline_parse(pluginmanager, args):
"""return initialized config object, parsing the specified args. """
pytest_cmdline_parse.firstresult = True
def pytest_cmdline_preparse(config, args):
"""modify command line arguments before option parsing. """
def pytest_addoption(parser):
"""add optparse-style options and ini-style config values via calls
to ``parser.addoption`` and ``parser.addini(...)``.
"""
def pytest_cmdline_main(config):
""" called for performing the main command line action. The default
implementation will invoke the configure hooks and runtest_mainloop. """
pytest_cmdline_main.firstresult = True
def pytest_configure(config):
""" called after command line options have been parsed.
and all plugins and initial conftest files been loaded.
"""
def pytest_unconfigure(config):
""" called before test process is exited. """
def pytest_runtestloop(session):
""" called for performing the main runtest loop
(after collection finished). """
pytest_runtestloop.firstresult = True
# -------------------------------------------------------------------------
# collection hooks
# -------------------------------------------------------------------------
def pytest_collection(session):
""" perform the collection protocol for the given session. """
pytest_collection.firstresult = True
def pytest_collection_modifyitems(session, config, items):
""" called after collection has been performed, may filter or re-order
the items in-place."""
def pytest_collection_finish(session):
""" called after collection has been performed and modified. """
def pytest_ignore_collect(path, config):
""" return True to prevent considering this path for collection.
This hook is consulted for all files and directories prior to calling
more specific hooks.
"""
pytest_ignore_collect.firstresult = True
def pytest_collect_directory(path, parent):
""" called before traversing a directory for collection files. """
pytest_collect_directory.firstresult = True
def pytest_collect_file(path, parent):
""" return collection Node or None for the given path. Any new node
needs to have the specified ``parent`` as a parent."""
# logging hooks for collection
def pytest_collectstart(collector):
""" collector starts collecting. """
def pytest_itemcollected(item):
""" we just collected a test item. """
def pytest_collectreport(report):
""" collector finished collecting. """
def pytest_deselected(items):
""" called for test items deselected by keyword. """
def pytest_make_collect_report(collector):
""" perform ``collector.collect()`` and return a CollectReport. """
pytest_make_collect_report.firstresult = True
# -------------------------------------------------------------------------
# Python test function related hooks
# -------------------------------------------------------------------------
def pytest_pycollect_makemodule(path, parent):
""" return a Module collector or None for the given path.
This hook will be called for each matching test module path.
The pytest_collect_file hook needs to be used if you want to
create test modules for files that do not match as a test module.
"""
pytest_pycollect_makemodule.firstresult = True
def pytest_pycollect_makeitem(collector, name, obj):
""" return custom item/collector for a python object in a module, or None. """
pytest_pycollect_makeitem.firstresult = True
def pytest_pyfunc_call(pyfuncitem):
""" call underlying test function. """
pytest_pyfunc_call.firstresult = True
def pytest_generate_tests(metafunc):
""" generate (multiple) parametrized calls to a test function."""
# -------------------------------------------------------------------------
# generic runtest related hooks
# -------------------------------------------------------------------------
def pytest_itemstart(item, node=None):
""" (deprecated, use pytest_runtest_logstart). """
def pytest_runtest_protocol(item, nextitem):
""" implements the runtest_setup/call/teardown protocol for
the given test item, including capturing exceptions and calling
reporting hooks.
:arg item: test item for which the runtest protocol is performed.
:arg nexitem: the scheduled-to-be-next test item (or None if this
is the end my friend). This argument is passed on to
:py:func:`pytest_runtest_teardown`.
:return boolean: True if no further hook implementations should be invoked.
"""
pytest_runtest_protocol.firstresult = True
def pytest_runtest_logstart(nodeid, location):
""" signal the start of running a single test item. """
def pytest_runtest_setup(item):
""" called before ``pytest_runtest_call(item)``. """
def pytest_runtest_call(item):
""" called to execute the test ``item``. """
def pytest_runtest_teardown(item, nextitem):
""" called after ``pytest_runtest_call``.
:arg nexitem: the scheduled-to-be-next test item (None if no further
test item is scheduled). This argument can be used to
perform exact teardowns, i.e. calling just enough finalizers
so that nextitem only needs to call setup-functions.
"""
def pytest_runtest_makereport(item, call):
""" return a :py:class:`_pytest.runner.TestReport` object
for the given :py:class:`pytest.Item` and
:py:class:`_pytest.runner.CallInfo`.
"""
pytest_runtest_makereport.firstresult = True
def pytest_runtest_logreport(report):
""" process a test setup/call/teardown report relating to
the respective phase of executing a test. """
# -------------------------------------------------------------------------
# test session related hooks
# -------------------------------------------------------------------------
def pytest_sessionstart(session):
""" before session.main() is called. """
def pytest_sessionfinish(session, exitstatus):
""" whole test run finishes. """
# -------------------------------------------------------------------------
# hooks for customising the assert methods
# -------------------------------------------------------------------------
def pytest_assertrepr_compare(config, op, left, right):
"""return explanation for comparisons in failing assert expressions.
Return None for no custom explanation, otherwise return a list
of strings. The strings will be joined by newlines but any newlines
*in* a string will be escaped. Note that all but the first line will
be indented sligthly, the intention is for the first line to be a summary.
"""
# -------------------------------------------------------------------------
# hooks for influencing reporting (invoked from _pytest_terminal)
# -------------------------------------------------------------------------
def pytest_report_header(config):
""" return a string to be displayed as header info for terminal reporting."""
def pytest_report_teststatus(report):
""" return result-category, shortletter and verbose word for reporting."""
pytest_report_teststatus.firstresult = True
def pytest_terminal_summary(terminalreporter):
""" add additional section in terminal summary reporting. """
# -------------------------------------------------------------------------
# doctest hooks
# -------------------------------------------------------------------------
def pytest_doctest_prepare_content(content):
""" return processed content for a given doctest"""
pytest_doctest_prepare_content.firstresult = True
# -------------------------------------------------------------------------
# error handling and internal debugging hooks
# -------------------------------------------------------------------------
def pytest_plugin_registered(plugin, manager):
""" a new py lib plugin got registered. """
def pytest_plugin_unregistered(plugin):
""" a py lib plugin got unregistered. """
def pytest_internalerror(excrepr):
""" called for internal errors. """
def pytest_keyboard_interrupt(excinfo):
""" called for keyboard interrupt. """
| gpl-2.0 |
CiscoSystems/avos | openstack_dashboard/dashboards/admin/aggregates/urls.py | 46 | 1252 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.admin.aggregates \
import views
urlpatterns = patterns(
'openstack_dashboard.dashboards.admin.aggregates.views',
url(r'^$',
views.IndexView.as_view(), name='index'),
url(r'^create/$',
views.CreateView.as_view(), name='create'),
url(r'^(?P<id>[^/]+)/update/$',
views.UpdateView.as_view(), name='update'),
url(r'^(?P<id>[^/]+)/update_metadata/$',
views.UpdateMetadataView.as_view(), name='update_metadata'),
url(r'^(?P<id>[^/]+)/manage_hosts/$',
views.ManageHostsView.as_view(), name='manage_hosts'),
)
| apache-2.0 |
koobonil/Boss2D | Boss2D/addon/tensorflow-1.2.1_for_boss/tensorflow/contrib/keras/python/keras/regularizers.py | 58 | 2778 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras built-in regularizers.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.contrib.keras.python.keras import backend as K
from tensorflow.contrib.keras.python.keras.utils.generic_utils import deserialize_keras_object
from tensorflow.contrib.keras.python.keras.utils.generic_utils import serialize_keras_object
class Regularizer(object):
"""Regularizer base class.
"""
def __call__(self, x):
return 0.
@classmethod
def from_config(cls, config):
return cls(**config)
class L1L2(Regularizer):
"""Regularizer for L1 and L2 regularization.
Arguments:
l1: Float; L1 regularization factor.
l2: Float; L2 regularization factor.
"""
def __init__(self, l1=0., l2=0.): # pylint: disable=redefined-outer-name
self.l1 = K.cast_to_floatx(l1)
self.l2 = K.cast_to_floatx(l2)
def __call__(self, x):
regularization = 0.
if self.l1:
regularization += K.sum(self.l1 * K.abs(x))
if self.l2:
regularization += K.sum(self.l2 * K.square(x))
return regularization
def get_config(self):
return {'l1': float(self.l1), 'l2': float(self.l2)}
# Aliases.
def l1(l=0.01):
return L1L2(l1=l)
def l2(l=0.01):
return L1L2(l2=l)
def l1_l2(l1=0.01, l2=0.01): # pylint: disable=redefined-outer-name
return L1L2(l1=l1, l2=l2)
def serialize(regularizer):
return serialize_keras_object(regularizer)
def deserialize(config, custom_objects=None):
return deserialize_keras_object(
config,
module_objects=globals(),
custom_objects=custom_objects,
printable_module_name='regularizer')
def get(identifier):
if identifier is None:
return None
if isinstance(identifier, dict):
return deserialize(identifier)
elif isinstance(identifier, six.string_types):
config = {'class_name': str(identifier), 'config': {}}
return deserialize(config)
elif callable(identifier):
return identifier
else:
raise ValueError('Could not interpret regularizer identifier:', identifier)
| mit |
sunqm/pyscf | pyscf/pbc/cc/test/test_eom_kuccsd_diag.py | 2 | 4547 | import unittest
import numpy as np
from pyscf.lib import finger
from pyscf.pbc import gto as pbcgto
from pyscf.pbc import scf as pbcscf
from pyscf.pbc import df as pbc_df
import pyscf.cc
import pyscf.pbc.cc as pbcc
import pyscf.pbc.tools.make_test_cell as make_test_cell
from pyscf.pbc.lib import kpts_helper
import pyscf.pbc.cc.kccsd_uhf as kccsd
import pyscf.pbc.cc.eom_kccsd_uhf as kccsd_uhf
cell = pbcgto.Cell()
cell.atom = '''
He 0.000000000000 0.000000000000 0.000000000000
He 1.685068664391 1.685068664391 1.685068664391
'''
cell.basis = [[0, (1., 1.)], [0, (.5, 1.)]]
cell.a = '''
0.000000000, 3.370137329, 3.370137329
3.370137329, 0.000000000, 3.370137329
3.370137329, 3.370137329, 0.000000000'''
cell.unit = 'B'
cell.build()
KGCCSD_TEST_THRESHOLD = 1e-8
def tearDownModule():
global cell, KGCCSD_TEST_THRESHOLD
del cell, KGCCSD_TEST_THRESHOLD
class KnownValues(unittest.TestCase):
def _test_ip_diag(self,kmf,kshift=0):
cc = kccsd.KUCCSD(kmf)
Ecc = cc.kernel()[0]
eom = kccsd_uhf.EOMIP(cc)
imds = eom.make_imds()
t1a,t1b = imds.t1
nkpts, nocc_a, nvir_a = t1a.shape
nkpts, nocc_b, nvir_b = t1b.shape
nocc = nocc_a + nocc_b
diag = kccsd_uhf.ipccsd_diag(eom,kshift,imds=imds)
I = np.identity(diag.shape[0],dtype=complex)
indices = np.arange(diag.shape[0])
H = np.zeros((I.shape[0],len(indices)),dtype=complex)
for j,idx in enumerate(indices):
H[:,j] = kccsd_uhf.ipccsd_matvec(eom,I[:,idx],kshift,imds=imds)
diag_ref = np.zeros(len(indices),dtype=complex)
diag_out = np.zeros(len(indices),dtype=complex)
for j,idx in enumerate(indices):
diag_ref[j] = H[idx,j]
diag_out[j] = diag[idx]
diff = np.linalg.norm(diag_ref - diag_out)
self.assertTrue(abs(diff) < KGCCSD_TEST_THRESHOLD,"Difference in IP diag: {}".format(diff))
def _test_ea_diag(self,kmf,kshift=0):
cc = kccsd.KUCCSD(kmf)
Ecc = cc.kernel()[0]
eom = kccsd_uhf.EOMEA(cc)
imds = eom.make_imds()
t1a,t1b = imds.t1
nkpts, nocc_a, nvir_a = t1a.shape
nkpts, nocc_b, nvir_b = t1b.shape
nocc = nocc_a + nocc_b
nvir = nvir_a + nvir_b
diag = kccsd_uhf.eaccsd_diag(eom,kshift,imds=imds)
I = np.identity(diag.shape[0],dtype=complex)
indices = np.arange(diag.shape[0])
H = np.zeros((I.shape[0],len(indices)),dtype=complex)
for j,idx in enumerate(indices):
H[:,j] = kccsd_uhf.eaccsd_matvec(eom,I[:,idx],kshift,imds=imds)
diag_ref = np.zeros(len(indices),dtype=complex)
diag_out = np.zeros(len(indices),dtype=complex)
for j,idx in enumerate(indices):
diag_ref[j] = H[idx,j]
diag_out[j] = diag[idx]
diff = np.linalg.norm(diag_ref - diag_out)
self.assertTrue(abs(diff) < KGCCSD_TEST_THRESHOLD,"Difference in EA diag: {}".format(diff))
def test_he_112_ip_diag(self):
kpts = cell.make_kpts([1,1,2])
kmf = pbcscf.KUHF(cell, kpts, exxdiv=None)
Escf = kmf.scf()
self._test_ip_diag(kmf)
def test_he_112_ip_diag_shift(self):
kpts = cell.make_kpts([1,1,2])
kmf = pbcscf.KUHF(cell, kpts, exxdiv=None)
Escf = kmf.scf()
self._test_ip_diag(kmf,kshift=1)
def test_he_212_ip_diag_high_cost(self):
kpts = cell.make_kpts([2,1,2])
kmf = pbcscf.KUHF(cell, kpts, exxdiv=None)
Escf = kmf.scf()
self._test_ip_diag(kmf)
def test_he_131_ip_diag(self):
kpts = cell.make_kpts([1,3,1])
kmf = pbcscf.KUHF(cell, kpts, exxdiv=None)
Escf = kmf.scf()
self._test_ip_diag(kmf)
def test_he_112_ea_diag(self):
kpts = cell.make_kpts([1,1,2])
kmf = pbcscf.KUHF(cell, kpts, exxdiv=None)
Escf = kmf.scf()
self._test_ea_diag(kmf)
def test_he_112_ea_diag_shift(self):
kpts = cell.make_kpts([1,1,2])
kmf = pbcscf.KUHF(cell, kpts, exxdiv=None)
Escf = kmf.scf()
self._test_ea_diag(kmf,kshift=1)
def test_he_212_ea_diag_high_cost(self):
kpts = cell.make_kpts([2,1,2])
kmf = pbcscf.KUHF(cell, kpts, exxdiv=None)
Escf = kmf.scf()
self._test_ea_diag(kmf)
def test_he_131_ea_diag(self):
kpts = cell.make_kpts([1,3,1])
kmf = pbcscf.KUHF(cell, kpts, exxdiv=None)
Escf = kmf.scf()
self._test_ea_diag(kmf)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
openhatch/oh-mainline | vendor/packages/twill/twill/other_packages/_mechanize_dist/_rfc3986.py | 20 | 7560 | """RFC 3986 URI parsing and relative reference resolution / absolutization.
(aka splitting and joining)
Copyright 2006 John J. Lee <[email protected]>
This code is free software; you can redistribute it and/or modify it under
the terms of the BSD or ZPL 2.1 licenses (see the file COPYING.txt
included with the distribution).
"""
# XXX Wow, this is ugly. Overly-direct translation of the RFC ATM.
import sys, re, posixpath, urllib
## def chr_range(a, b):
## return "".join(map(chr, range(ord(a), ord(b)+1)))
## UNRESERVED_URI_CHARS = ("ABCDEFGHIJKLMNOPQRSTUVWXYZ"
## "abcdefghijklmnopqrstuvwxyz"
## "0123456789"
## "-_.~")
## RESERVED_URI_CHARS = "!*'();:@&=+$,/?#[]"
## URI_CHARS = RESERVED_URI_CHARS+UNRESERVED_URI_CHARS+'%'
# this re matches any character that's not in URI_CHARS
BAD_URI_CHARS_RE = re.compile("[^A-Za-z0-9\-_.~!*'();:@&=+$,/?%#[\]]")
def clean_url(url, encoding):
# percent-encode illegal URI characters
# Trying to come up with test cases for this gave me a headache, revisit
# when do switch to unicode.
# Somebody else's comments (lost the attribution):
## - IE will return you the url in the encoding you send it
## - Mozilla/Firefox will send you latin-1 if there's no non latin-1
## characters in your link. It will send you utf-8 however if there are...
if type(url) == type(""):
url = url.decode(encoding, "replace")
url = url.strip()
# for second param to urllib.quote(), we want URI_CHARS, minus the
# 'always_safe' characters that urllib.quote() never percent-encodes
return urllib.quote(url.encode(encoding), "!*'();:@&=+$,/?%#[]~")
def is_clean_uri(uri):
"""
>>> is_clean_uri("ABC!")
True
>>> is_clean_uri(u"ABC!")
True
>>> is_clean_uri("ABC|")
False
>>> is_clean_uri(u"ABC|")
False
>>> is_clean_uri("http://example.com/0")
True
>>> is_clean_uri(u"http://example.com/0")
True
"""
# note module re treats bytestrings as through they were decoded as latin-1
# so this function accepts both unicode and bytestrings
return not bool(BAD_URI_CHARS_RE.search(uri))
SPLIT_MATCH = re.compile(
r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?").match
def urlsplit(absolute_uri):
"""Return scheme, authority, path, query, fragment."""
match = SPLIT_MATCH(absolute_uri)
if match:
g = match.groups()
return g[1], g[3], g[4], g[6], g[8]
def urlunsplit(parts):
scheme, authority, path, query, fragment = parts
r = []
append = r.append
if scheme is not None:
append(scheme)
append(":")
if authority is not None:
append("//")
append(authority)
append(path)
if query is not None:
append("?")
append(query)
if fragment is not None:
append("#")
append(fragment)
return "".join(r)
def urljoin(base_uri, uri_reference):
return urlunsplit(urljoin_parts(urlsplit(base_uri),
urlsplit(uri_reference)))
# oops, this doesn't do the same thing as the literal translation
# from the RFC below
## def urljoin_parts(base_parts, reference_parts):
## scheme, authority, path, query, fragment = base_parts
## rscheme, rauthority, rpath, rquery, rfragment = reference_parts
## # compute target URI path
## if rpath == "":
## tpath = path
## else:
## tpath = rpath
## if not tpath.startswith("/"):
## tpath = merge(authority, path, tpath)
## tpath = posixpath.normpath(tpath)
## if rscheme is not None:
## return (rscheme, rauthority, tpath, rquery, rfragment)
## elif rauthority is not None:
## return (scheme, rauthority, tpath, rquery, rfragment)
## elif rpath == "":
## if rquery is not None:
## tquery = rquery
## else:
## tquery = query
## return (scheme, authority, tpath, tquery, rfragment)
## else:
## return (scheme, authority, tpath, rquery, rfragment)
def urljoin_parts(base_parts, reference_parts):
scheme, authority, path, query, fragment = base_parts
rscheme, rauthority, rpath, rquery, rfragment = reference_parts
if rscheme == scheme:
rscheme = None
if rscheme is not None:
tscheme, tauthority, tpath, tquery = (
rscheme, rauthority, remove_dot_segments(rpath), rquery)
else:
if rauthority is not None:
tauthority, tpath, tquery = (
rauthority, remove_dot_segments(rpath), rquery)
else:
if rpath == "":
tpath = path
if rquery is not None:
tquery = rquery
else:
tquery = query
else:
if rpath.startswith("/"):
tpath = remove_dot_segments(rpath)
else:
tpath = merge(authority, path, rpath)
tpath = remove_dot_segments(tpath)
tquery = rquery
tauthority = authority
tscheme = scheme
tfragment = rfragment
return (tscheme, tauthority, tpath, tquery, tfragment)
# um, something *vaguely* like this is what I want, but I have to generate
# lots of test cases first, if only to understand what it is that
# remove_dot_segments really does...
## def remove_dot_segments(path):
## if path == '':
## return ''
## comps = path.split('/')
## new_comps = []
## for comp in comps:
## if comp in ['.', '']:
## if not new_comps or new_comps[-1]:
## new_comps.append('')
## continue
## if comp != '..':
## new_comps.append(comp)
## elif new_comps:
## new_comps.pop()
## return '/'.join(new_comps)
def remove_dot_segments(path):
r = []
while path:
# A
if path.startswith("../"):
path = path[3:]
continue
if path.startswith("./"):
path = path[2:]
continue
# B
if path.startswith("/./"):
path = path[2:]
continue
if path == "/.":
path = "/"
continue
# C
if path.startswith("/../"):
path = path[3:]
if r:
r.pop()
continue
if path == "/..":
path = "/"
if r:
r.pop()
continue
# D
if path == ".":
path = path[1:]
continue
if path == "..":
path = path[2:]
continue
# E
start = 0
if path.startswith("/"):
start = 1
ii = path.find("/", start)
if ii < 0:
ii = None
r.append(path[:ii])
if ii is None:
break
path = path[ii:]
return "".join(r)
def merge(base_authority, base_path, ref_path):
# XXXX Oddly, the sample Perl implementation of this by Roy Fielding
# doesn't even take base_authority as a parameter, despite the wording in
# the RFC suggesting otherwise. Perhaps I'm missing some obvious identity.
#if base_authority is not None and base_path == "":
if base_path == "":
return "/" + ref_path
ii = base_path.rfind("/")
if ii >= 0:
return base_path[:ii+1] + ref_path
return ref_path
if __name__ == "__main__":
import doctest
doctest.testmod()
| agpl-3.0 |
mandeepdhami/nova | nova/volume/encryptors/nop.py | 61 | 1348 | # Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from nova.volume.encryptors import base
LOG = logging.getLogger(__name__)
class NoOpEncryptor(base.VolumeEncryptor):
"""A VolumeEncryptor that does nothing.
This class exists solely to wrap regular (i.e., unencrypted) volumes so
that they do not require special handling with respect to an encrypted
volume. This implementation performs no action when a volume is attached
or detached.
"""
def __init__(self, connection_info, **kwargs):
super(NoOpEncryptor, self).__init__(connection_info, **kwargs)
def attach_volume(self, context):
pass
def detach_volume(self):
pass
| apache-2.0 |
jenalgit/django | tests/indexes/tests.py | 321 | 3037 | from unittest import skipUnless
from django.db import connection
from django.test import TestCase
from .models import Article, ArticleTranslation, IndexTogetherSingleList
class SchemaIndexesTests(TestCase):
"""
Test index handling by the db.backends.schema infrastructure.
"""
def test_index_name_hash(self):
"""
Index names should be deterministic.
"""
with connection.schema_editor() as editor:
index_name = editor._create_index_name(
model=Article,
column_names=("c1", "c2", "c3"),
suffix="123",
)
self.assertEqual(index_name, "indexes_article_c1_7ce4cc86123")
def test_index_together(self):
editor = connection.schema_editor()
index_sql = editor._model_indexes_sql(Article)
self.assertEqual(len(index_sql), 1)
# Ensure the index name is properly quoted
self.assertIn(
connection.ops.quote_name(
editor._create_index_name(Article, ['headline', 'pub_date'], suffix='_idx')
),
index_sql[0]
)
def test_index_together_single_list(self):
# Test for using index_together with a single list (#22172)
index_sql = connection.schema_editor()._model_indexes_sql(IndexTogetherSingleList)
self.assertEqual(len(index_sql), 1)
@skipUnless(connection.vendor == 'postgresql',
"This is a postgresql-specific issue")
def test_postgresql_text_indexes(self):
"""Test creation of PostgreSQL-specific text indexes (#12234)"""
from .models import IndexedArticle
index_sql = connection.schema_editor()._model_indexes_sql(IndexedArticle)
self.assertEqual(len(index_sql), 5)
self.assertIn('("headline" varchar_pattern_ops)', index_sql[2])
self.assertIn('("body" text_pattern_ops)', index_sql[3])
# unique=True and db_index=True should only create the varchar-specific
# index (#19441).
self.assertIn('("slug" varchar_pattern_ops)', index_sql[4])
@skipUnless(connection.vendor == 'postgresql',
"This is a postgresql-specific issue")
def test_postgresql_virtual_relation_indexes(self):
"""Test indexes are not created for related objects"""
index_sql = connection.schema_editor()._model_indexes_sql(Article)
self.assertEqual(len(index_sql), 1)
@skipUnless(connection.vendor == 'mysql', "This is a mysql-specific issue")
def test_no_index_for_foreignkey(self):
"""
MySQL on InnoDB already creates indexes automatically for foreign keys.
(#14180).
"""
storage = connection.introspection.get_storage_engine(
connection.cursor(), ArticleTranslation._meta.db_table
)
if storage != "InnoDB":
self.skip("This test only applies to the InnoDB storage engine")
index_sql = connection.schema_editor()._model_indexes_sql(ArticleTranslation)
self.assertEqual(index_sql, [])
| bsd-3-clause |
midma101/m0du1ar | .venv/lib/python2.7/site-packages/sqlalchemy/dialects/maxdb/base.py | 17 | 42818 | # maxdb/base.py
# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Support for the MaxDB database.
.. note::
The MaxDB dialect is **non-functional as of SQLAlchemy 0.6**,
pending development efforts to bring it up-to-date.
Overview
--------
The ``maxdb`` dialect is **experimental** and has only been tested on 7.6.03.007
and 7.6.00.037. Of these, **only 7.6.03.007 will work** with SQLAlchemy's ORM.
The earlier version has severe ``LEFT JOIN`` limitations and will return
incorrect results from even very simple ORM queries.
Only the native Python DB-API is currently supported. ODBC driver support
is a future enhancement.
Connecting
----------
The username is case-sensitive. If you usually connect to the
database with sqlcli and other tools in lower case, you likely need to
use upper case for DB-API.
Implementation Notes
--------------------
With the 7.6.00.37 driver and Python 2.5, it seems that all DB-API
generated exceptions are broken and can cause Python to crash.
For 'somecol.in_([])' to work, the IN operator's generation must be changed
to cast 'NULL' to a numeric, i.e. NUM(NULL). The DB-API doesn't accept a
bind parameter there, so that particular generation must inline the NULL value,
which depends on [ticket:807].
The DB-API is very picky about where bind params may be used in queries.
Bind params for some functions (e.g. MOD) need type information supplied.
The dialect does not yet do this automatically.
Max will occasionally throw up 'bad sql, compile again' exceptions for
perfectly valid SQL. The dialect does not currently handle these, more
research is needed.
MaxDB 7.5 and Sap DB <= 7.4 reportedly do not support schemas. A very
slightly different version of this dialect would be required to support
those versions, and can easily be added if there is demand. Some other
required components such as an Max-aware 'old oracle style' join compiler
(thetas with (+) outer indicators) are already done and available for
integration- email the devel list if you're interested in working on
this.
Versions tested: 7.6.03.07 and 7.6.00.37, native Python DB-API
* MaxDB has severe limitations on OUTER JOINs, which are essential to ORM
eager loading. And rather than raise an error if a SELECT can't be serviced,
the database simply returns incorrect results.
* Version 7.6.03.07 seems to JOIN properly, however the docs do not show the
OUTER restrictions being lifted (as of this writing), and no changelog is
available to confirm either. If you are using a different server version and
your tasks require the ORM or any semi-advanced SQL through the SQL layer,
running the SQLAlchemy test suite against your database is HIGHLY
recommended before you begin.
* Version 7.6.00.37 is LHS/RHS sensitive in `FROM lhs LEFT OUTER JOIN rhs ON
lhs.col=rhs.col` vs `rhs.col=lhs.col`!
* Version 7.6.00.37 is confused by `SELECT DISTINCT col as alias FROM t ORDER
BY col` - these aliased, DISTINCT, ordered queries need to be re-written to
order by the alias name.
* Version 7.6.x supports creating a SAVEPOINT but not its RELEASE.
* MaxDB supports autoincrement-style columns (DEFAULT SERIAL) and independent
sequences. When including a DEFAULT SERIAL column in an insert, 0 needs to
be inserted rather than NULL to generate a value.
* MaxDB supports ANSI and "old Oracle style" theta joins with (+) outer join
indicators.
* The SQLAlchemy dialect is schema-aware and probably won't function correctly
on server versions (pre-7.6?). Support for schema-less server versions could
be added if there's call.
* ORDER BY is not supported in subqueries. LIMIT is not supported in
subqueries. In 7.6.00.37, TOP does work in subqueries, but without limit not
so useful. OFFSET does not work in 7.6 despite being in the docs. Row number
tricks in WHERE via ROWNO may be possible but it only seems to allow
less-than comparison!
* Version 7.6.03.07 can't LIMIT if a derived table is in FROM: `SELECT * FROM
(SELECT * FROM a) LIMIT 2`
* MaxDB does not support sql's CAST and can only usefullly cast two types.
There isn't much implicit type conversion, so be precise when creating
`PassiveDefaults` in DDL generation: `'3'` and `3` aren't the same.
sapdb.dbapi
^^^^^^^^^^^
* As of 2007-10-22 the Python 2.4 and 2.5 compatible versions of the DB-API
are no longer available. A forum posting at SAP states that the Python
driver will be available again "in the future". The last release from MySQL
AB works if you can find it.
* sequence.NEXTVAL skips every other value!
* No rowcount for executemany()
* If an INSERT into a table with a DEFAULT SERIAL column inserts the results
of a function `INSERT INTO t VALUES (LENGTH('foo'))`, the cursor won't have
the serial id. It needs to be manually yanked from tablename.CURRVAL.
* Super-duper picky about where bind params can be placed. Not smart about
converting Python types for some functions, such as `MOD(5, ?)`.
* LONG (text, binary) values in result sets are read-once. The dialect uses a
caching RowProxy when these types are present.
* Connection objects seem like they want to be either `close()`d or garbage
collected, but not both. There's a warning issued but it seems harmless.
"""
import datetime, itertools, re
from sqlalchemy import exc, schema, sql, util, processors
from sqlalchemy.sql import operators as sql_operators, expression as sql_expr
from sqlalchemy.sql import compiler, visitors
from sqlalchemy.engine import base as engine_base, default, reflection
from sqlalchemy import types as sqltypes
class _StringType(sqltypes.String):
_type = None
def __init__(self, length=None, encoding=None, **kw):
super(_StringType, self).__init__(length=length, **kw)
self.encoding = encoding
def bind_processor(self, dialect):
if self.encoding == 'unicode':
return None
else:
def process(value):
if isinstance(value, unicode):
return value.encode(dialect.encoding)
else:
return value
return process
def result_processor(self, dialect, coltype):
#XXX: this code is probably very slow and one should try (if at all
# possible) to determine the correct code path on a per-connection
# basis (ie, here in result_processor, instead of inside the processor
# function itself) and probably also use a few generic
# processors, or possibly per query (though there is no mechanism
# for that yet).
def process(value):
while True:
if value is None:
return None
elif isinstance(value, unicode):
return value
elif isinstance(value, str):
if self.convert_unicode or dialect.convert_unicode:
return value.decode(dialect.encoding)
else:
return value
elif hasattr(value, 'read'):
# some sort of LONG, snarf and retry
value = value.read(value.remainingLength())
continue
else:
# unexpected type, return as-is
return value
return process
class MaxString(_StringType):
_type = 'VARCHAR'
class MaxUnicode(_StringType):
_type = 'VARCHAR'
def __init__(self, length=None, **kw):
kw['encoding'] = 'unicode'
super(MaxUnicode, self).__init__(length=length, **kw)
class MaxChar(_StringType):
_type = 'CHAR'
class MaxText(_StringType):
_type = 'LONG'
def __init__(self, length=None, **kw):
super(MaxText, self).__init__(length, **kw)
def get_col_spec(self):
spec = 'LONG'
if self.encoding is not None:
spec = ' '.join((spec, self.encoding))
elif self.convert_unicode:
spec = ' '.join((spec, 'UNICODE'))
return spec
class MaxNumeric(sqltypes.Numeric):
"""The FIXED (also NUMERIC, DECIMAL) data type."""
def __init__(self, precision=None, scale=None, **kw):
kw.setdefault('asdecimal', True)
super(MaxNumeric, self).__init__(scale=scale, precision=precision,
**kw)
def bind_processor(self, dialect):
return None
class MaxTimestamp(sqltypes.DateTime):
def bind_processor(self, dialect):
def process(value):
if value is None:
return None
elif isinstance(value, basestring):
return value
elif dialect.datetimeformat == 'internal':
ms = getattr(value, 'microsecond', 0)
return value.strftime("%Y%m%d%H%M%S" + ("%06u" % ms))
elif dialect.datetimeformat == 'iso':
ms = getattr(value, 'microsecond', 0)
return value.strftime("%Y-%m-%d %H:%M:%S." + ("%06u" % ms))
else:
raise exc.InvalidRequestError(
"datetimeformat '%s' is not supported." % (
dialect.datetimeformat,))
return process
def result_processor(self, dialect, coltype):
if dialect.datetimeformat == 'internal':
def process(value):
if value is None:
return None
else:
return datetime.datetime(
*[int(v)
for v in (value[0:4], value[4:6], value[6:8],
value[8:10], value[10:12], value[12:14],
value[14:])])
elif dialect.datetimeformat == 'iso':
def process(value):
if value is None:
return None
else:
return datetime.datetime(
*[int(v)
for v in (value[0:4], value[5:7], value[8:10],
value[11:13], value[14:16], value[17:19],
value[20:])])
else:
raise exc.InvalidRequestError(
"datetimeformat '%s' is not supported." %
dialect.datetimeformat)
return process
class MaxDate(sqltypes.Date):
def bind_processor(self, dialect):
def process(value):
if value is None:
return None
elif isinstance(value, basestring):
return value
elif dialect.datetimeformat == 'internal':
return value.strftime("%Y%m%d")
elif dialect.datetimeformat == 'iso':
return value.strftime("%Y-%m-%d")
else:
raise exc.InvalidRequestError(
"datetimeformat '%s' is not supported." % (
dialect.datetimeformat,))
return process
def result_processor(self, dialect, coltype):
if dialect.datetimeformat == 'internal':
def process(value):
if value is None:
return None
else:
return datetime.date(int(value[0:4]), int(value[4:6]),
int(value[6:8]))
elif dialect.datetimeformat == 'iso':
def process(value):
if value is None:
return None
else:
return datetime.date(int(value[0:4]), int(value[5:7]),
int(value[8:10]))
else:
raise exc.InvalidRequestError(
"datetimeformat '%s' is not supported." %
dialect.datetimeformat)
return process
class MaxTime(sqltypes.Time):
def bind_processor(self, dialect):
def process(value):
if value is None:
return None
elif isinstance(value, basestring):
return value
elif dialect.datetimeformat == 'internal':
return value.strftime("%H%M%S")
elif dialect.datetimeformat == 'iso':
return value.strftime("%H-%M-%S")
else:
raise exc.InvalidRequestError(
"datetimeformat '%s' is not supported." % (
dialect.datetimeformat,))
return process
def result_processor(self, dialect, coltype):
if dialect.datetimeformat == 'internal':
def process(value):
if value is None:
return None
else:
return datetime.time(int(value[0:4]), int(value[4:6]),
int(value[6:8]))
elif dialect.datetimeformat == 'iso':
def process(value):
if value is None:
return None
else:
return datetime.time(int(value[0:4]), int(value[5:7]),
int(value[8:10]))
else:
raise exc.InvalidRequestError(
"datetimeformat '%s' is not supported." %
dialect.datetimeformat)
return process
class MaxBlob(sqltypes.LargeBinary):
def bind_processor(self, dialect):
return processors.to_str
def result_processor(self, dialect, coltype):
def process(value):
if value is None:
return None
else:
return value.read(value.remainingLength())
return process
class MaxDBTypeCompiler(compiler.GenericTypeCompiler):
def _string_spec(self, string_spec, type_):
if type_.length is None:
spec = 'LONG'
else:
spec = '%s(%s)' % (string_spec, type_.length)
if getattr(type_, 'encoding'):
spec = ' '.join([spec, getattr(type_, 'encoding').upper()])
return spec
def visit_text(self, type_):
spec = 'LONG'
if getattr(type_, 'encoding', None):
spec = ' '.join((spec, type_.encoding))
elif type_.convert_unicode:
spec = ' '.join((spec, 'UNICODE'))
return spec
def visit_char(self, type_):
return self._string_spec("CHAR", type_)
def visit_string(self, type_):
return self._string_spec("VARCHAR", type_)
def visit_large_binary(self, type_):
return "LONG BYTE"
def visit_numeric(self, type_):
if type_.scale and type_.precision:
return 'FIXED(%s, %s)' % (type_.precision, type_.scale)
elif type_.precision:
return 'FIXED(%s)' % type_.precision
else:
return 'INTEGER'
def visit_BOOLEAN(self, type_):
return "BOOLEAN"
colspecs = {
sqltypes.Numeric: MaxNumeric,
sqltypes.DateTime: MaxTimestamp,
sqltypes.Date: MaxDate,
sqltypes.Time: MaxTime,
sqltypes.String: MaxString,
sqltypes.Unicode:MaxUnicode,
sqltypes.LargeBinary: MaxBlob,
sqltypes.Text: MaxText,
sqltypes.CHAR: MaxChar,
sqltypes.TIMESTAMP: MaxTimestamp,
sqltypes.BLOB: MaxBlob,
sqltypes.Unicode: MaxUnicode,
}
ischema_names = {
'boolean': sqltypes.BOOLEAN,
'char': sqltypes.CHAR,
'character': sqltypes.CHAR,
'date': sqltypes.DATE,
'fixed': sqltypes.Numeric,
'float': sqltypes.FLOAT,
'int': sqltypes.INT,
'integer': sqltypes.INT,
'long binary': sqltypes.BLOB,
'long unicode': sqltypes.Text,
'long': sqltypes.Text,
'long': sqltypes.Text,
'smallint': sqltypes.SmallInteger,
'time': sqltypes.Time,
'timestamp': sqltypes.TIMESTAMP,
'varchar': sqltypes.VARCHAR,
}
# TODO: migrate this to sapdb.py
class MaxDBExecutionContext(default.DefaultExecutionContext):
def post_exec(self):
# DB-API bug: if there were any functions as values,
# then do another select and pull CURRVAL from the
# autoincrement column's implicit sequence... ugh
if self.compiled.isinsert and not self.executemany:
table = self.compiled.statement.table
index, serial_col = _autoserial_column(table)
if serial_col and (not self.compiled._safeserial or
not(self._last_inserted_ids) or
self._last_inserted_ids[index] in (None, 0)):
if table.schema:
sql = "SELECT %s.CURRVAL FROM DUAL" % (
self.compiled.preparer.format_table(table))
else:
sql = "SELECT CURRENT_SCHEMA.%s.CURRVAL FROM DUAL" % (
self.compiled.preparer.format_table(table))
rs = self.cursor.execute(sql)
id = rs.fetchone()[0]
if not self._last_inserted_ids:
# This shouldn't ever be > 1? Right?
self._last_inserted_ids = \
[None] * len(table.primary_key.columns)
self._last_inserted_ids[index] = id
super(MaxDBExecutionContext, self).post_exec()
def get_result_proxy(self):
if self.cursor.description is not None:
for column in self.cursor.description:
if column[1] in ('Long Binary', 'Long', 'Long Unicode'):
return MaxDBResultProxy(self)
return engine_base.ResultProxy(self)
@property
def rowcount(self):
if hasattr(self, '_rowcount'):
return self._rowcount
else:
return self.cursor.rowcount
def fire_sequence(self, seq):
if seq.optional:
return None
return self._execute_scalar("SELECT %s.NEXTVAL FROM DUAL" % (
self.dialect.identifier_preparer.format_sequence(seq)))
class MaxDBCachedColumnRow(engine_base.RowProxy):
"""A RowProxy that only runs result_processors once per column."""
def __init__(self, parent, row):
super(MaxDBCachedColumnRow, self).__init__(parent, row)
self.columns = {}
self._row = row
self._parent = parent
def _get_col(self, key):
if key not in self.columns:
self.columns[key] = self._parent._get_col(self._row, key)
return self.columns[key]
def __iter__(self):
for i in xrange(len(self._row)):
yield self._get_col(i)
def __repr__(self):
return repr(list(self))
def __eq__(self, other):
return ((other is self) or
(other == tuple([self._get_col(key)
for key in xrange(len(self._row))])))
def __getitem__(self, key):
if isinstance(key, slice):
indices = key.indices(len(self._row))
return tuple([self._get_col(i) for i in xrange(*indices)])
else:
return self._get_col(key)
def __getattr__(self, name):
try:
return self._get_col(name)
except KeyError:
raise AttributeError(name)
class MaxDBResultProxy(engine_base.ResultProxy):
_process_row = MaxDBCachedColumnRow
class MaxDBCompiler(compiler.SQLCompiler):
function_conversion = {
'CURRENT_DATE': 'DATE',
'CURRENT_TIME': 'TIME',
'CURRENT_TIMESTAMP': 'TIMESTAMP',
}
# These functions must be written without parens when called with no
# parameters. e.g. 'SELECT DATE FROM DUAL' not 'SELECT DATE() FROM DUAL'
bare_functions = set([
'CURRENT_SCHEMA', 'DATE', 'FALSE', 'SYSDBA', 'TIME', 'TIMESTAMP',
'TIMEZONE', 'TRANSACTION', 'TRUE', 'USER', 'UID', 'USERGROUP',
'UTCDATE', 'UTCDIFF'])
def visit_mod(self, binary, **kw):
return "mod(%s, %s)" % \
(self.process(binary.left), self.process(binary.right))
def default_from(self):
return ' FROM DUAL'
def for_update_clause(self, select):
clause = select.for_update
if clause is True:
return " WITH LOCK EXCLUSIVE"
elif clause is None:
return ""
elif clause == "read":
return " WITH LOCK"
elif clause == "ignore":
return " WITH LOCK (IGNORE) EXCLUSIVE"
elif clause == "nowait":
return " WITH LOCK (NOWAIT) EXCLUSIVE"
elif isinstance(clause, basestring):
return " WITH LOCK %s" % clause.upper()
elif not clause:
return ""
else:
return " WITH LOCK EXCLUSIVE"
def function_argspec(self, fn, **kw):
if fn.name.upper() in self.bare_functions:
return ""
elif len(fn.clauses) > 0:
return compiler.SQLCompiler.function_argspec(self, fn, **kw)
else:
return ""
def visit_function(self, fn, **kw):
transform = self.function_conversion.get(fn.name.upper(), None)
if transform:
fn = fn._clone()
fn.name = transform
return super(MaxDBCompiler, self).visit_function(fn, **kw)
def visit_cast(self, cast, **kwargs):
# MaxDB only supports casts * to NUMERIC, * to VARCHAR or
# date/time to VARCHAR. Casts of LONGs will fail.
if isinstance(cast.type, (sqltypes.Integer, sqltypes.Numeric)):
return "NUM(%s)" % self.process(cast.clause)
elif isinstance(cast.type, sqltypes.String):
return "CHR(%s)" % self.process(cast.clause)
else:
return self.process(cast.clause)
def visit_sequence(self, sequence):
if sequence.optional:
return None
else:
return (
self.dialect.identifier_preparer.format_sequence(sequence) +
".NEXTVAL")
class ColumnSnagger(visitors.ClauseVisitor):
def __init__(self):
self.count = 0
self.column = None
def visit_column(self, column):
self.column = column
self.count += 1
def _find_labeled_columns(self, columns, use_labels=False):
labels = {}
for column in columns:
if isinstance(column, basestring):
continue
snagger = self.ColumnSnagger()
snagger.traverse(column)
if snagger.count == 1:
if isinstance(column, sql_expr._Label):
labels[unicode(snagger.column)] = column.name
elif use_labels:
labels[unicode(snagger.column)] = column._label
return labels
def order_by_clause(self, select, **kw):
order_by = self.process(select._order_by_clause, **kw)
# ORDER BY clauses in DISTINCT queries must reference aliased
# inner columns by alias name, not true column name.
if order_by and getattr(select, '_distinct', False):
labels = self._find_labeled_columns(select.inner_columns,
select.use_labels)
if labels:
for needs_alias in labels.keys():
r = re.compile(r'(^| )(%s)(,| |$)' %
re.escape(needs_alias))
order_by = r.sub((r'\1%s\3' % labels[needs_alias]),
order_by)
# No ORDER BY in subqueries.
if order_by:
if self.is_subquery():
# It's safe to simply drop the ORDER BY if there is no
# LIMIT. Right? Other dialects seem to get away with
# dropping order.
if select._limit:
raise exc.CompileError(
"MaxDB does not support ORDER BY in subqueries")
else:
return ""
return " ORDER BY " + order_by
else:
return ""
def get_select_precolumns(self, select):
# Convert a subquery's LIMIT to TOP
sql = select._distinct and 'DISTINCT ' or ''
if self.is_subquery() and select._limit:
if select._offset:
raise exc.InvalidRequestError(
'MaxDB does not support LIMIT with an offset.')
sql += 'TOP %s ' % select._limit
return sql
def limit_clause(self, select):
# The docs say offsets are supported with LIMIT. But they're not.
# TODO: maybe emulate by adding a ROWNO/ROWNUM predicate?
# TODO: does MaxDB support bind params for LIMIT / TOP ?
if self.is_subquery():
# sub queries need TOP
return ''
elif select._offset:
raise exc.InvalidRequestError(
'MaxDB does not support LIMIT with an offset.')
else:
return ' \n LIMIT %s' % (select._limit,)
def visit_insert(self, insert):
self.isinsert = True
self._safeserial = True
colparams = self._get_colparams(insert)
for value in (insert.parameters or {}).itervalues():
if isinstance(value, sql_expr.Function):
self._safeserial = False
break
return ''.join(('INSERT INTO ',
self.preparer.format_table(insert.table),
' (',
', '.join([self.preparer.format_column(c[0])
for c in colparams]),
') VALUES (',
', '.join([c[1] for c in colparams]),
')'))
class MaxDBIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = set([
'abs', 'absolute', 'acos', 'adddate', 'addtime', 'all', 'alpha',
'alter', 'any', 'ascii', 'asin', 'atan', 'atan2', 'avg', 'binary',
'bit', 'boolean', 'byte', 'case', 'ceil', 'ceiling', 'char',
'character', 'check', 'chr', 'column', 'concat', 'constraint', 'cos',
'cosh', 'cot', 'count', 'cross', 'curdate', 'current', 'curtime',
'database', 'date', 'datediff', 'day', 'dayname', 'dayofmonth',
'dayofweek', 'dayofyear', 'dec', 'decimal', 'decode', 'default',
'degrees', 'delete', 'digits', 'distinct', 'double', 'except',
'exists', 'exp', 'expand', 'first', 'fixed', 'float', 'floor', 'for',
'from', 'full', 'get_objectname', 'get_schema', 'graphic', 'greatest',
'group', 'having', 'hex', 'hextoraw', 'hour', 'ifnull', 'ignore',
'index', 'initcap', 'inner', 'insert', 'int', 'integer', 'internal',
'intersect', 'into', 'join', 'key', 'last', 'lcase', 'least', 'left',
'length', 'lfill', 'list', 'ln', 'locate', 'log', 'log10', 'long',
'longfile', 'lower', 'lpad', 'ltrim', 'makedate', 'maketime',
'mapchar', 'max', 'mbcs', 'microsecond', 'min', 'minute', 'mod',
'month', 'monthname', 'natural', 'nchar', 'next', 'no', 'noround',
'not', 'now', 'null', 'num', 'numeric', 'object', 'of', 'on',
'order', 'packed', 'pi', 'power', 'prev', 'primary', 'radians',
'real', 'reject', 'relative', 'replace', 'rfill', 'right', 'round',
'rowid', 'rowno', 'rpad', 'rtrim', 'second', 'select', 'selupd',
'serial', 'set', 'show', 'sign', 'sin', 'sinh', 'smallint', 'some',
'soundex', 'space', 'sqrt', 'stamp', 'statistics', 'stddev',
'subdate', 'substr', 'substring', 'subtime', 'sum', 'sysdba',
'table', 'tan', 'tanh', 'time', 'timediff', 'timestamp', 'timezone',
'to', 'toidentifier', 'transaction', 'translate', 'trim', 'trunc',
'truncate', 'ucase', 'uid', 'unicode', 'union', 'update', 'upper',
'user', 'usergroup', 'using', 'utcdate', 'utcdiff', 'value', 'values',
'varchar', 'vargraphic', 'variance', 'week', 'weekofyear', 'when',
'where', 'with', 'year', 'zoned' ])
def _normalize_name(self, name):
if name is None:
return None
if name.isupper():
lc_name = name.lower()
if not self._requires_quotes(lc_name):
return lc_name
return name
def _denormalize_name(self, name):
if name is None:
return None
elif (name.islower() and
not self._requires_quotes(name)):
return name.upper()
else:
return name
def _maybe_quote_identifier(self, name):
if self._requires_quotes(name):
return self.quote_identifier(name)
else:
return name
class MaxDBDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kw):
colspec = [self.preparer.format_column(column),
self.dialect.type_compiler.process(column.type)]
if not column.nullable:
colspec.append('NOT NULL')
default = column.default
default_str = self.get_column_default_string(column)
# No DDL default for columns specified with non-optional sequence-
# this defaulting behavior is entirely client-side. (And as a
# consequence, non-reflectable.)
if (default and isinstance(default, schema.Sequence) and
not default.optional):
pass
# Regular default
elif default_str is not None:
colspec.append('DEFAULT %s' % default_str)
# Assign DEFAULT SERIAL heuristically
elif column.primary_key and column.autoincrement:
# For SERIAL on a non-primary key member, use
# DefaultClause(text('SERIAL'))
try:
first = [c for c in column.table.primary_key.columns
if (c.autoincrement and
(isinstance(c.type, sqltypes.Integer) or
(isinstance(c.type, MaxNumeric) and
c.type.precision)) and
not c.foreign_keys)].pop(0)
if column is first:
colspec.append('DEFAULT SERIAL')
except IndexError:
pass
return ' '.join(colspec)
def get_column_default_string(self, column):
if isinstance(column.server_default, schema.DefaultClause):
if isinstance(column.default.arg, basestring):
if isinstance(column.type, sqltypes.Integer):
return str(column.default.arg)
else:
return "'%s'" % column.default.arg
else:
return unicode(self._compile(column.default.arg, None))
else:
return None
def visit_create_sequence(self, create):
"""Creates a SEQUENCE.
TODO: move to module doc?
start
With an integer value, set the START WITH option.
increment
An integer value to increment by. Default is the database default.
maxdb_minvalue
maxdb_maxvalue
With an integer value, sets the corresponding sequence option.
maxdb_no_minvalue
maxdb_no_maxvalue
Defaults to False. If true, sets the corresponding sequence option.
maxdb_cycle
Defaults to False. If true, sets the CYCLE option.
maxdb_cache
With an integer value, sets the CACHE option.
maxdb_no_cache
Defaults to False. If true, sets NOCACHE.
"""
sequence = create.element
if (not sequence.optional and
(not self.checkfirst or
not self.dialect.has_sequence(self.connection, sequence.name))):
ddl = ['CREATE SEQUENCE',
self.preparer.format_sequence(sequence)]
sequence.increment = 1
if sequence.increment is not None:
ddl.extend(('INCREMENT BY', str(sequence.increment)))
if sequence.start is not None:
ddl.extend(('START WITH', str(sequence.start)))
opts = dict([(pair[0][6:].lower(), pair[1])
for pair in sequence.kwargs.items()
if pair[0].startswith('maxdb_')])
if 'maxvalue' in opts:
ddl.extend(('MAXVALUE', str(opts['maxvalue'])))
elif opts.get('no_maxvalue', False):
ddl.append('NOMAXVALUE')
if 'minvalue' in opts:
ddl.extend(('MINVALUE', str(opts['minvalue'])))
elif opts.get('no_minvalue', False):
ddl.append('NOMINVALUE')
if opts.get('cycle', False):
ddl.append('CYCLE')
if 'cache' in opts:
ddl.extend(('CACHE', str(opts['cache'])))
elif opts.get('no_cache', False):
ddl.append('NOCACHE')
return ' '.join(ddl)
class MaxDBDialect(default.DefaultDialect):
name = 'maxdb'
supports_alter = True
supports_unicode_statements = True
max_identifier_length = 32
supports_sane_rowcount = True
supports_sane_multi_rowcount = False
preparer = MaxDBIdentifierPreparer
statement_compiler = MaxDBCompiler
ddl_compiler = MaxDBDDLCompiler
execution_ctx_cls = MaxDBExecutionContext
ported_sqla_06 = False
colspecs = colspecs
ischema_names = ischema_names
# MaxDB-specific
datetimeformat = 'internal'
def __init__(self, _raise_known_sql_errors=False, **kw):
super(MaxDBDialect, self).__init__(**kw)
self._raise_known = _raise_known_sql_errors
if self.dbapi is None:
self.dbapi_type_map = {}
else:
self.dbapi_type_map = {
'Long Binary': MaxBlob(),
'Long byte_t': MaxBlob(),
'Long Unicode': MaxText(),
'Timestamp': MaxTimestamp(),
'Date': MaxDate(),
'Time': MaxTime(),
datetime.datetime: MaxTimestamp(),
datetime.date: MaxDate(),
datetime.time: MaxTime(),
}
def do_execute(self, cursor, statement, parameters, context=None):
res = cursor.execute(statement, parameters)
if isinstance(res, int) and context is not None:
context._rowcount = res
def do_release_savepoint(self, connection, name):
# Does MaxDB truly support RELEASE SAVEPOINT <id>? All my attempts
# produce "SUBTRANS COMMIT/ROLLBACK not allowed without SUBTRANS
# BEGIN SQLSTATE: I7065"
# Note that ROLLBACK TO works fine. In theory, a RELEASE should
# just free up some transactional resources early, before the overall
# COMMIT/ROLLBACK so omitting it should be relatively ok.
pass
def _get_default_schema_name(self, connection):
return self.identifier_preparer._normalize_name(
connection.execute(
'SELECT CURRENT_SCHEMA FROM DUAL').scalar())
def has_table(self, connection, table_name, schema=None):
denormalize = self.identifier_preparer._denormalize_name
bind = [denormalize(table_name)]
if schema is None:
sql = ("SELECT tablename FROM TABLES "
"WHERE TABLES.TABLENAME=? AND"
" TABLES.SCHEMANAME=CURRENT_SCHEMA ")
else:
sql = ("SELECT tablename FROM TABLES "
"WHERE TABLES.TABLENAME = ? AND"
" TABLES.SCHEMANAME=? ")
bind.append(denormalize(schema))
rp = connection.execute(sql, bind)
return bool(rp.first())
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
if schema is None:
sql = (" SELECT TABLENAME FROM TABLES WHERE "
" SCHEMANAME=CURRENT_SCHEMA ")
rs = connection.execute(sql)
else:
sql = (" SELECT TABLENAME FROM TABLES WHERE "
" SCHEMANAME=? ")
matchname = self.identifier_preparer._denormalize_name(schema)
rs = connection.execute(sql, matchname)
normalize = self.identifier_preparer._normalize_name
return [normalize(row[0]) for row in rs]
def reflecttable(self, connection, table, include_columns):
denormalize = self.identifier_preparer._denormalize_name
normalize = self.identifier_preparer._normalize_name
st = ('SELECT COLUMNNAME, MODE, DATATYPE, CODETYPE, LEN, DEC, '
' NULLABLE, "DEFAULT", DEFAULTFUNCTION '
'FROM COLUMNS '
'WHERE TABLENAME=? AND SCHEMANAME=%s '
'ORDER BY POS')
fk = ('SELECT COLUMNNAME, FKEYNAME, '
' REFSCHEMANAME, REFTABLENAME, REFCOLUMNNAME, RULE, '
' (CASE WHEN REFSCHEMANAME = CURRENT_SCHEMA '
' THEN 1 ELSE 0 END) AS in_schema '
'FROM FOREIGNKEYCOLUMNS '
'WHERE TABLENAME=? AND SCHEMANAME=%s '
'ORDER BY FKEYNAME ')
params = [denormalize(table.name)]
if not table.schema:
st = st % 'CURRENT_SCHEMA'
fk = fk % 'CURRENT_SCHEMA'
else:
st = st % '?'
fk = fk % '?'
params.append(denormalize(table.schema))
rows = connection.execute(st, params).fetchall()
if not rows:
raise exc.NoSuchTableError(table.fullname)
include_columns = set(include_columns or [])
for row in rows:
(name, mode, col_type, encoding, length, scale,
nullable, constant_def, func_def) = row
name = normalize(name)
if include_columns and name not in include_columns:
continue
type_args, type_kw = [], {}
if col_type == 'FIXED':
type_args = length, scale
# Convert FIXED(10) DEFAULT SERIAL to our Integer
if (scale == 0 and
func_def is not None and func_def.startswith('SERIAL')):
col_type = 'INTEGER'
type_args = length,
elif col_type in 'FLOAT':
type_args = length,
elif col_type in ('CHAR', 'VARCHAR'):
type_args = length,
type_kw['encoding'] = encoding
elif col_type == 'LONG':
type_kw['encoding'] = encoding
try:
type_cls = ischema_names[col_type.lower()]
type_instance = type_cls(*type_args, **type_kw)
except KeyError:
util.warn("Did not recognize type '%s' of column '%s'" %
(col_type, name))
type_instance = sqltypes.NullType
col_kw = {'autoincrement': False}
col_kw['nullable'] = (nullable == 'YES')
col_kw['primary_key'] = (mode == 'KEY')
if func_def is not None:
if func_def.startswith('SERIAL'):
if col_kw['primary_key']:
# No special default- let the standard autoincrement
# support handle SERIAL pk columns.
col_kw['autoincrement'] = True
else:
# strip current numbering
col_kw['server_default'] = schema.DefaultClause(
sql.text('SERIAL'))
col_kw['autoincrement'] = True
else:
col_kw['server_default'] = schema.DefaultClause(
sql.text(func_def))
elif constant_def is not None:
col_kw['server_default'] = schema.DefaultClause(sql.text(
"'%s'" % constant_def.replace("'", "''")))
table.append_column(schema.Column(name, type_instance, **col_kw))
fk_sets = itertools.groupby(connection.execute(fk, params),
lambda row: row.FKEYNAME)
for fkeyname, fkey in fk_sets:
fkey = list(fkey)
if include_columns:
key_cols = set([r.COLUMNNAME for r in fkey])
if key_cols != include_columns:
continue
columns, referants = [], []
quote = self.identifier_preparer._maybe_quote_identifier
for row in fkey:
columns.append(normalize(row.COLUMNNAME))
if table.schema or not row.in_schema:
referants.append('.'.join(
[quote(normalize(row[c]))
for c in ('REFSCHEMANAME', 'REFTABLENAME',
'REFCOLUMNNAME')]))
else:
referants.append('.'.join(
[quote(normalize(row[c]))
for c in ('REFTABLENAME', 'REFCOLUMNNAME')]))
constraint_kw = {'name': fkeyname.lower()}
if fkey[0].RULE is not None:
rule = fkey[0].RULE
if rule.startswith('DELETE '):
rule = rule[7:]
constraint_kw['ondelete'] = rule
table_kw = {}
if table.schema or not row.in_schema:
table_kw['schema'] = normalize(fkey[0].REFSCHEMANAME)
ref_key = schema._get_table_key(normalize(fkey[0].REFTABLENAME),
table_kw.get('schema'))
if ref_key not in table.metadata.tables:
schema.Table(normalize(fkey[0].REFTABLENAME),
table.metadata,
autoload=True, autoload_with=connection,
**table_kw)
constraint = schema.ForeignKeyConstraint(
columns, referants, link_to_name=True,
**constraint_kw)
table.append_constraint(constraint)
def has_sequence(self, connection, name):
# [ticket:726] makes this schema-aware.
denormalize = self.identifier_preparer._denormalize_name
sql = ("SELECT sequence_name FROM SEQUENCES "
"WHERE SEQUENCE_NAME=? ")
rp = connection.execute(sql, denormalize(name))
return bool(rp.first())
def _autoserial_column(table):
"""Finds the effective DEFAULT SERIAL column of a Table, if any."""
for index, col in enumerate(table.primary_key.columns):
if (isinstance(col.type, (sqltypes.Integer, sqltypes.Numeric)) and
col.autoincrement):
if isinstance(col.default, schema.Sequence):
if col.default.optional:
return index, col
elif (col.default is None or
(not isinstance(col.server_default, schema.DefaultClause))):
return index, col
return None, None
| mit |
niavlys/kivy | kivy/core/video/video_pygst.py | 8 | 5989 | '''
Video PyGst
===========
Implementation of a VideoBase using PyGST. This module is compatible only with
Python 2.
'''
#
# Important notes: you must take care of glib event + python. If you connect()
# directly an event to a python object method, the object will be ref, and will
# be never unref.
# To prevent memory leak, you must connect() to a func, and you might want to
# pass the referenced object with weakref()
#
import pygst
if not hasattr(pygst, '_gst_already_checked'):
found = False
for version in ('1.0', '0.10'):
try:
pygst.require(version)
found = True
break
except:
continue
if found:
pygst._gst_already_checked = True
else:
raise Exception('Unable to find a valid Gstreamer version to use')
import gst
from functools import partial
from os import path
from threading import Lock
from urllib import pathname2url
from weakref import ref
from kivy.core.video import VideoBase
from kivy.graphics.texture import Texture
from kivy.logger import Logger
from kivy.support import install_gobject_iteration
install_gobject_iteration()
def _gst_new_buffer(obj, appsink):
obj = obj()
if not obj:
return
with obj._buffer_lock:
obj._buffer = obj._appsink.emit('pull-buffer')
def _on_gst_message(bus, message):
Logger.trace('VideoPyGst: (bus) %s' % str(message))
# log all error messages
if message.type == gst.MESSAGE_ERROR:
error, debug = list(map(str, message.parse_error()))
Logger.error('VideoPyGst: %s' % error)
Logger.debug('VideoPyGst: %s' % debug)
def _on_gst_eos(obj, *largs):
obj = obj()
if not obj:
return
obj._do_eos()
class VideoPyGst(VideoBase):
def __init__(self, **kwargs):
self._buffer_lock = Lock()
self._buffer = None
self._texture = None
self._gst_init()
super(VideoPyGst, self).__init__(**kwargs)
def _gst_init(self):
# self._appsink will receive the buffers so we can upload them to GPU
self._appsink = gst.element_factory_make('appsink', '')
self._appsink.set_property('caps', gst.Caps(
'video/x-raw-rgb,red_mask=(int)0xff0000,'
'green_mask=(int)0x00ff00,blue_mask=(int)0x0000ff'))
self._appsink.set_property('async', True)
self._appsink.set_property('drop', True)
self._appsink.set_property('qos', True)
self._appsink.set_property('emit-signals', True)
self._appsink.connect('new-buffer', partial(
_gst_new_buffer, ref(self)))
# playbin, takes care of all, loading, playing, etc.
# XXX playbin2 have some issue when playing some video or streaming :/
self._playbin = gst.element_factory_make('playbin', 'playbin')
self._playbin.set_property('video-sink', self._appsink)
# gstreamer bus, to attach and listen to gst messages
self._bus = self._playbin.get_bus()
self._bus.add_signal_watch()
self._bus.connect('message', _on_gst_message)
self._bus.connect('message::eos', partial(
_on_gst_eos, ref(self)))
def _update_texture(self, buf):
# texture will be updated with newest buffer/frame
size = None
caps = buf.get_caps()
_s = caps.get_structure(0)
size = _s['width'], _s['height']
if not self._texture:
# texture is not allocated yet, so create it first
self._texture = Texture.create(size=size, colorfmt='rgb')
self._texture.flip_vertical()
self.dispatch('on_load')
# upload texture data to GPU
self._texture.blit_buffer(buf.data, size=size, colorfmt='rgb')
def _update(self, dt):
buf = None
with self._buffer_lock:
buf = self._buffer
self._buffer = None
if buf is not None:
self._update_texture(buf)
self.dispatch('on_frame')
def unload(self):
self._playbin.set_state(gst.STATE_NULL)
self._buffer = None
self._texture = None
def load(self):
Logger.debug('VideoPyGst: Load <%s>' % self._filename)
self._playbin.set_state(gst.STATE_NULL)
self._playbin.set_property('uri', self._get_uri())
self._playbin.set_state(gst.STATE_READY)
def stop(self):
'''.. versionchanged:: 1.4.0'''
self._state = ''
self._playbin.set_state(gst.STATE_PAUSED)
def pause(self):
'''.. versionadded:: 1.4.0'''
self._state = 'paused'
self._playbin.set_state(gst.STATE_PAUSED)
def play(self):
self._state = 'playing'
self._playbin.set_state(gst.STATE_PLAYING)
def seek(self, percent):
seek_t = percent * self._get_duration() * 10e8
seek_format = gst.FORMAT_TIME
seek_flags = gst.SEEK_FLAG_FLUSH | gst.SEEK_FLAG_KEY_UNIT
self._playbin.seek_simple(seek_format, seek_flags, seek_t)
#if pipeline is not playing, we need to pull pre-roll to update frame
if not self._state == 'playing':
with self._buffer_lock:
self._buffer = self._appsink.emit('pull-preroll')
def _get_uri(self):
uri = self.filename
if not uri:
return
if not '://' in uri:
uri = 'file:' + pathname2url(path.realpath(uri))
return uri
def _get_position(self):
try:
value, fmt = self._appsink.query_position(gst.FORMAT_TIME)
return value / 10e8
except:
return -1
def _get_duration(self):
try:
return self._playbin.query_duration(gst.FORMAT_TIME)[0] / 10e8
except:
return -1
def _get_volume(self):
self._volume = self._playbin.get_property('volume')
return self._volume
def _set_volume(self, volume):
self._playbin.set_property('volume', volume)
self._volume = volume
| mit |
xpansa/account-financial-tools | account_tax_update/model/__init__.py | 46 | 1149 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2012 Therp BV (<http://therp.nl>).
# This module copyright (C) 2013 Camptocamp (<http://www.camptocamp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import update_tax_config
from . import select_taxes
from . import account_tax
| agpl-3.0 |
DylannCordel/django-cms | cms/south_migrations/0056_auto__del_field_page_published_languages.py | 63 | 18273 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Page.published_languages'
db.delete_column(u'cms_page', 'published_languages')
def backwards(self, orm):
# Adding field 'Page.published_languages'
db.add_column(u'cms_page', 'published_languages',
self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True),
keep_default=False)
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.globalpagepermission': {
'Meta': {'object_name': 'GlobalPagePermission'},
'can_add': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_recover_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('tree_id', 'lft')", 'unique_together': "(('publisher_is_draft', 'application_namespace'),)", 'object_name': 'Page'},
'application_namespace': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'application_urls': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'is_home': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'languages': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'limit_visibility_in_menu': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}),
'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'revision_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'djangocms_pages'", 'to': u"orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'default': "'INHERIT'", 'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.pagemoderatorstate': {
'Meta': {'ordering': "('page', 'action', '-created')", 'object_name': 'PageModeratorState'},
'action': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'})
},
'cms.pagepermission': {
'Meta': {'object_name': 'PagePermission'},
'can_add': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grant_on': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.pageuser': {
'Meta': {'object_name': 'PageUser', '_ormbases': [u'auth.User']},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_users'", 'to': u"orm['auth.User']"}),
u'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True', 'primary_key': 'True'})
},
'cms.pageusergroup': {
'Meta': {'object_name': 'PageUserGroup', '_ormbases': [u'auth.Group']},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_usergroups'", 'to': u"orm['auth.User']"}),
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'cms.placeholderreference': {
'Meta': {'object_name': 'PlaceholderReference', 'db_table': "u'cmsplugin_placeholderreference'", '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'placeholder_ref': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'})
},
'cms.staticplaceholder': {
'Meta': {'object_name': 'StaticPlaceholder'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'blank': 'True'}),
'creation_method': ('django.db.models.fields.CharField', [], {'default': "'code'", 'max_length': '20', 'blank': 'True'}),
'dirty': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'draft': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'static_draft'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'public': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'static_public'", 'null': 'True', 'to': "orm['cms.Placeholder']"})
},
'cms.title': {
'Meta': {'unique_together': "(('language', 'page'),)", 'object_name': 'Title'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'has_url_overwrite': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'menu_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'meta_description': ('django.db.models.fields.TextField', [], {'max_length': '155', 'null': 'True', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'title_set'", 'to': "orm['cms.Page']"}),
'page_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Title']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'redirect': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'cms.usersettings': {
'Meta': {'object_name': 'UserSettings'},
'clipboard': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'djangocms_usersettings'", 'to': u"orm['auth.User']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cms'] | bsd-3-clause |
CiuffysHub/MITMf | mitmflib-0.18.4/mitmflib/impacket/dhcp.py | 2 | 8466 | # Copyright (c) 2003-2015 CORE Security Technologies
#
# This software is provided under under a slightly modified version
# of the Apache Software License. See the accompanying LICENSE file
# for more information.
#
import pcapy
import socket
import time
from random import randint
from mitmflib.impacket import structure, ImpactDecoder
class BootpPacket(structure.Structure):
commonHdr = (
('op','b'),
('htype','b=1'), # 1 = Ether
('hlen','b=len(chaddr)'),
('hops','b=0'),
('xid','!L=0'),
('secs','!H=0'),
('flags','!H=0'),
('ciaddr','!L=0'),
('yiaddr','!L=0'),
('siaddr','!L=0'),
('giaddr','!L=0'),
('_chaddr','16s=chaddr'),
('chaddr','_','_chaddr[:hlen]'),
('sname','64s=""'),
('file','128s=""'))
#def __init__(self, data = None, alignment = 0):
# structure.Structure.__init__(self, data, alignment)
class DhcpPacket(BootpPacket):
# DHCP: http://www.faqs.org/rfcs/rfc2131.html
# DHCP Options: http://www.faqs.org/rfcs/rfc1533.html
# good list of options: http://www.networksorcery.com/enp/protocol/bootp/options.htm
BOOTREQUEST = 1
BOOTREPLY = 2
DHCPDISCOVER= 1
DHCPOFFER = 2
DHCPREQUEST = 3
DHCPDECLINE = 4
DHCPACK = 5
DHCPNAK = 6
DHCPRELEASE = 7
DHCPINFORM = 8
options = {
# 3. Vendor Extensions
'pad':(0,'_'),
'subnet-mask':(1,'!L'),
'time-offset':(2,'!L'),
'router':(3,'*!L'),
'time-server':(4,'*!L'),
'name-server':(5,'*!L'),
'domain-name-server':(6,'*!L'),
'log-server':(7,'*!L'),
'cookie-server':(8,'*!L'),
'lpr-server':(9,'*!L'),
'impress-server':(10,'*!L'),
'resource-locator-server':(11,'*!L'),
'host-name':(12,':'),
'boot-file-size':(13,'!H'),
'merit-dump-file':(14,':'),
'domain-name':(15,':'),
'swap-server':(16,':'),
'root-path':(17,':'),
'extensions-path':(18,':'),
# 4. IP Layer Parameters per Host
'ip-forwarding':(19,'B'),
'non-local-source-routing':(20,'B'),
'policy-filter':(21,'*!L'),
'maximum-datagram-reassembly-size':(22,'!H'),
'default-ip-ttl':(23,'B'),
'path-mtu-aging-timeout':(24,'!L'),
'path-mtu-plateau-table':(25,'*!H'),
# 5. IP Layer Parameters per Interface
'interface-mtu':(26,'!H'),
'all-subnets-are-local':(27,'B'),
'broadcast-address':(28,'!L'),
'perform-mask-discovery':(29,'B'),
'mask-supplier':(30,'B'),
'perform-router-discovery':(31,'B'),
'router-solicitation-address':(32,'!L'),
'static-route':(33,'*!L'),
# 6. Link Layer Parameters per Interface
'trailer-encapsulation':(34,'B'),
'arp-cache-timeout':(35,'!L'),
'ethernet-encapsulation':(36,'B'),
# 7. TCP parameters
'tcp-default-ttl':(37,'B'),
'tcp-keepalive-interval':(38,'!L'),
'tcp-keepalive-garbage':(39,'B'),
# 8. Application and Service parameters
'nis-domain':(40,':'),
'nis-servers':(41,'*!L'),
'ntp-servers':(42,'*!L'),
'vendor-specific':(43,':'),
'netbios-name-server':(44,'*!L'),
'netbios-datagrame-distribution-server':(45,'*!L'),
'netbios-node-type':(46,'B'),
'netbios-scope':(47,':'),
'x11-font-server':(48,'*!L'),
'x11-display-manager':(49,'*!L'),
# 9. DHCP Extensions
'requested-ip':(50,'!L'),
'lease-time':(51,'!L'),
'option-overload':(52,'B'),
'message-type':(53,'B'),
'server-id':(54,'!L'),
'parameter-request-list':(55,':'),
'message':(56,':'),
'maximum-dhcp-message-size':(57,'!H'),
'renewal-time':(58,'!L'),
'rebinding-time':(59,'!L'),
'vendor-class':(60,':'),
'client-id':(61,':'),
# other non-rfc1533 options
'slp-directory-agent':(78,':'), # http://www.ietf.org/rfc/rfc2610.txt
'slp-service-scope':(79,':'), # http://www.ietf.org/rfc/rfc2610.txt
'fully-qualified-domain-name':(81,':'), # http://www.ietf.org/rfc/rfc4702.txt
'auto-configuration':(116,'B'), # http://www.ietf.org/rfc/rfc2563.txt
'domain-search-list':(119,'B'), # http://www.ietf.org/rfc/rfc3397.txt
'classless-route-121':(121, ':'), # http://www.ietf.org/rfc/rfc3442.txt
'classless-route-249':(249, ':'), # http://support.microsoft.com/kb/121005
'proxy-autoconfig':(252,':'),
'eof':(255,'_'),
}
structure = (
('cookie','!L'),
('_options',':=self.packOptions(options)'),
('options','_','self.unpackOptions(_options)'))
#def __init__(self, data = None, alignment = 0):
# BootpPacket.__init__(self, data, alignment)
def packOptions(self, options):
# options is an array of tuples: ('name',value)
answer = ''
for name, value in options:
code,format = self.options[name]
val = self.pack(format, value)
answer += '%c%c%s' % (code, len(val), val)
return answer
def getOptionNameAndFormat(self, optionCode):
for k in self.options:
code,format = self.options[k]
if code == optionCode: return k, format
return optionCode, ':'
def unpackOptions(self, options):
# options is a string
# print '%r' % options
answer = []
i = 0
while i < len(options)-1:
name, format = self.getOptionNameAndFormat(ord(options[i]))
# size = self.calcUnpackSize(format, options[i+1:])
size = ord(options[i+1])
# print i, name, format, size
value = self.unpack(format, options[i+2:i+2+size])
answer.append((name, value))
i += 2+size
return answer
def unpackParameterRequestList(self, options):
return [self.getOptionNameAndFormat(ord(opt))[0] for opt in options]
def isAskingForProxyAutodiscovery(self):
for opt in self.fields['options']:
if opt[0] == 'parameter-request-list':
for optCode in opt[1]:
if ord(optCode) == 252:
return True
return False
def getOptionValue(self, name):
for opt in self.fields['options']:
if opt[0] == name:
return opt[1]
return None
class DHCPTool:
def initialize(self):
self.pcap = pcapy.open_live(pcapy.lookupdev(), -1, 1, 1)
self.pcap.setfilter("port 67", 1, 0xffffff00)
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.connect(('192.168.1.1',67))
self.decoder = ImpactDecoder.EthDecoder()
def targetRun(self):
for i in range(1,254):
self.sendDISCOVER('12345%c' % i, ip = '192.168.1.%d' % i)
self.processPacketsForOneSecond()
def finalize(self):
self.pcap.close()
Module.finalize(self)
def processPacketsForOneSecond(self):
t = time.time()
while time.time()-t < 1:
p = self.pcap.next()
if p[1][2]:
pp = self.decoder.decode(p[0])
print pp
def sendDHCP(self, type, chaddr, hostname = None, ip = None, xid = None,opts = []):
p = DhcpPacket()
opt = [('message-type',type)] + list(opts)
if xid is None:
xid = randint(0,0xffffffff)
if ip:
ip = structure.unpack('!L',socket.inet_aton(ip))[0]
p['ciaddr'] = ip
opt.append(('requested-ip',ip))
if hostname is not None:
for i in range(0,len(hostname),255):
opt.append(('host-name',hostname[i:i+255]))
p['op'] = p.BOOTREQUEST
p['xid'] = xid
p['chaddr'] = chaddr
p['cookie'] = 0x63825363
p['options'] = opt
self.sock.send(str(p))
def sendDISCOVER(self, chaddr, hostname = None, ip = None,xid = 0x12345678):
print 'DHCPDISCOVER: %s' % ip
self.sendDHCP(DhcpPacket.DHCPDISCOVER, chaddr, hostname, ip, xid)
| gpl-3.0 |
zubair-arbi/edx-platform | common/djangoapps/util/tests/test_date_utils.py | 55 | 7800 | # -*- coding: utf-8 -*-
"""
Tests for util.date_utils
"""
from datetime import datetime, timedelta, tzinfo
import unittest
import ddt
from mock import patch
from nose.tools import assert_equals, assert_false # pylint: disable=no-name-in-module
from pytz import UTC
from util.date_utils import (
get_default_time_display, get_time_display, almost_same_datetime,
strftime_localized,
)
def test_get_default_time_display():
assert_equals("", get_default_time_display(None))
test_time = datetime(1992, 3, 12, 15, 3, 30, tzinfo=UTC)
assert_equals(
"Mar 12, 1992 at 15:03 UTC",
get_default_time_display(test_time))
def test_get_dflt_time_disp_notz():
test_time = datetime(1992, 3, 12, 15, 3, 30)
assert_equals(
"Mar 12, 1992 at 15:03 UTC",
get_default_time_display(test_time))
def test_get_time_disp_ret_empty():
assert_equals("", get_time_display(None))
test_time = datetime(1992, 3, 12, 15, 3, 30, tzinfo=UTC)
assert_equals("", get_time_display(test_time, ""))
def test_get_time_display():
test_time = datetime(1992, 3, 12, 15, 3, 30, tzinfo=UTC)
assert_equals("dummy text", get_time_display(test_time, 'dummy text'))
assert_equals("Mar 12 1992", get_time_display(test_time, '%b %d %Y'))
assert_equals("Mar 12 1992 UTC", get_time_display(test_time, '%b %d %Y %Z'))
assert_equals("Mar 12 15:03", get_time_display(test_time, '%b %d %H:%M'))
def test_get_time_pass_through():
test_time = datetime(1992, 3, 12, 15, 3, 30, tzinfo=UTC)
assert_equals("Mar 12, 1992 at 15:03 UTC", get_time_display(test_time))
assert_equals("Mar 12, 1992 at 15:03 UTC", get_time_display(test_time, None))
assert_equals("Mar 12, 1992 at 15:03 UTC", get_time_display(test_time, "%"))
def test_get_time_display_coerce():
test_time_standard = datetime(1992, 1, 12, 15, 3, 30, tzinfo=UTC)
test_time_daylight = datetime(1992, 7, 12, 15, 3, 30, tzinfo=UTC)
assert_equals("Jan 12, 1992 at 07:03 PST",
get_time_display(test_time_standard, None, coerce_tz="US/Pacific"))
assert_equals("Jan 12, 1992 at 15:03 UTC",
get_time_display(test_time_standard, None, coerce_tz="NONEXISTENTTZ"))
assert_equals("Jan 12 07:03",
get_time_display(test_time_standard, '%b %d %H:%M', coerce_tz="US/Pacific"))
assert_equals("Jul 12, 1992 at 08:03 PDT",
get_time_display(test_time_daylight, None, coerce_tz="US/Pacific"))
assert_equals("Jul 12, 1992 at 15:03 UTC",
get_time_display(test_time_daylight, None, coerce_tz="NONEXISTENTTZ"))
assert_equals("Jul 12 08:03",
get_time_display(test_time_daylight, '%b %d %H:%M', coerce_tz="US/Pacific"))
class NamelessTZ(tzinfo):
"""Static timezone for testing"""
def utcoffset(self, _dt):
return timedelta(hours=-3)
def dst(self, _dt):
return timedelta(0)
def test_get_default_time_display_no_tzname():
assert_equals("", get_default_time_display(None))
test_time = datetime(1992, 3, 12, 15, 3, 30, tzinfo=NamelessTZ())
assert_equals(
"Mar 12, 1992 at 15:03-0300",
get_default_time_display(test_time))
def test_almost_same_datetime():
assert almost_same_datetime(
datetime(2013, 5, 3, 10, 20, 30),
datetime(2013, 5, 3, 10, 21, 29)
)
assert almost_same_datetime(
datetime(2013, 5, 3, 11, 20, 30),
datetime(2013, 5, 3, 10, 21, 29),
timedelta(hours=1)
)
assert_false(
almost_same_datetime(
datetime(2013, 5, 3, 11, 20, 30),
datetime(2013, 5, 3, 10, 21, 29)
)
)
assert_false(
almost_same_datetime(
datetime(2013, 5, 3, 11, 20, 30),
datetime(2013, 5, 3, 10, 21, 29),
timedelta(minutes=10)
)
)
def fake_ugettext(translations):
"""
Create a fake implementation of ugettext, for testing.
"""
def _ugettext(text): # pylint: disable=missing-docstring
return translations.get(text, text)
return _ugettext
def fake_pgettext(translations):
"""
Create a fake implementation of pgettext, for testing.
"""
def _pgettext(context, text): # pylint: disable=missing-docstring
return translations.get((context, text), text)
return _pgettext
@ddt.ddt
class StrftimeLocalizedTest(unittest.TestCase):
"""
Tests for strftime_localized.
"""
@ddt.data(
("%Y", "2013"),
("%m/%d/%y", "02/14/13"),
("hello", "hello"),
(u'%Y년 %m월 %d일', u"2013년 02월 14일"),
("%a, %b %d, %Y", "Thu, Feb 14, 2013"),
("%I:%M:%S %p", "04:41:17 PM"),
("%A at %-I%P", "Thursday at 4pm"),
)
def test_usual_strftime_behavior(self, (fmt, expected)):
dtime = datetime(2013, 02, 14, 16, 41, 17)
self.assertEqual(expected, strftime_localized(dtime, fmt))
# strftime doesn't like Unicode, so do the work in UTF8.
self.assertEqual(expected, dtime.strftime(fmt.encode('utf8')).decode('utf8'))
@ddt.data(
("SHORT_DATE", "Feb 14, 2013"),
("LONG_DATE", "Thursday, February 14, 2013"),
("TIME", "04:41:17 PM"),
("DAY_AND_TIME", "Thursday at 4pm"),
("%x %X!", "Feb 14, 2013 04:41:17 PM!"),
)
def test_shortcuts(self, (fmt, expected)):
dtime = datetime(2013, 02, 14, 16, 41, 17)
self.assertEqual(expected, strftime_localized(dtime, fmt))
@patch('util.date_utils.pgettext', fake_pgettext(translations={
("abbreviated month name", "Feb"): "XXfebXX",
("month name", "February"): "XXfebruaryXX",
("abbreviated weekday name", "Thu"): "XXthuXX",
("weekday name", "Thursday"): "XXthursdayXX",
("am/pm indicator", "PM"): "XXpmXX",
}))
@ddt.data(
("SHORT_DATE", "XXfebXX 14, 2013"),
("LONG_DATE", "XXthursdayXX, XXfebruaryXX 14, 2013"),
("DATE_TIME", "XXfebXX 14, 2013 at 16:41"),
("TIME", "04:41:17 XXpmXX"),
("%x %X!", "XXfebXX 14, 2013 04:41:17 XXpmXX!"),
)
def test_translated_words(self, (fmt, expected)):
dtime = datetime(2013, 02, 14, 16, 41, 17)
self.assertEqual(expected, strftime_localized(dtime, fmt))
@patch('util.date_utils.ugettext', fake_ugettext(translations={
"SHORT_DATE_FORMAT": "date(%Y.%m.%d)",
"LONG_DATE_FORMAT": "date(%A.%Y.%B.%d)",
"DATE_TIME_FORMAT": "date(%Y.%m.%d@%H.%M)",
"TIME_FORMAT": "%Hh.%Mm.%Ss",
}))
@ddt.data(
("SHORT_DATE", "date(2013.02.14)"),
("Look: %x", "Look: date(2013.02.14)"),
("LONG_DATE", "date(Thursday.2013.February.14)"),
("DATE_TIME", "date([email protected])"),
("TIME", "16h.41m.17s"),
("The time is: %X", "The time is: 16h.41m.17s"),
("%x %X", "date(2013.02.14) 16h.41m.17s"),
)
def test_translated_formats(self, (fmt, expected)):
dtime = datetime(2013, 02, 14, 16, 41, 17)
self.assertEqual(expected, strftime_localized(dtime, fmt))
@patch('util.date_utils.ugettext', fake_ugettext(translations={
"SHORT_DATE_FORMAT": "oops date(%Y.%x.%d)",
"TIME_FORMAT": "oops %Hh.%Xm.%Ss",
}))
@ddt.data(
("SHORT_DATE", "Feb 14, 2013"),
("TIME", "04:41:17 PM"),
)
def test_recursion_protection(self, (fmt, expected)):
dtime = datetime(2013, 02, 14, 16, 41, 17)
self.assertEqual(expected, strftime_localized(dtime, fmt))
@ddt.data(
"%",
"Hello%"
"%Y/%m/%d%",
)
def test_invalid_format_strings(self, fmt):
dtime = datetime(2013, 02, 14, 16, 41, 17)
with self.assertRaises(ValueError):
strftime_localized(dtime, fmt)
| agpl-3.0 |
ruguevara/neon | neon/datasets/tests/test_cifar100.py | 7 | 1060 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import shutil
from nose.plugins.attrib import attr
from neon.datasets.cifar100 import CIFAR100
from neon.backends.cpu import CPU
class TestCIFAR100(object):
tmp_repo = os.path.join(os.path.dirname(__file__), 'repo')
def setup(self):
os.makedirs(self.tmp_repo)
def teardown(self):
shutil.rmtree(self.tmp_repo, ignore_errors=True)
@attr('slow')
def test_fine_labels(self):
data = CIFAR100(coarse=False, repo_path=self.tmp_repo)
data.backend = CPU(rng_seed=0)
data.backend.actual_batch_size = 128
data.load()
assert len(data.inputs['train']) == 50000
assert len(data.targets['train'][0]) == 100
@attr('slow')
def test_coarse_labels(self):
data = CIFAR100(coarse=True, repo_path=self.tmp_repo)
data.backend = CPU(rng_seed=0)
data.backend.actual_batch_size = 128
data.load()
assert len(data.inputs['train']) == 50000
assert len(data.targets['train'][0]) == 20
| apache-2.0 |
youprofit/webserver | admin/plugins/url_arg.py | 5 | 4080 | # Cheroke Admin
#
# Authors:
# Alvaro Lopez Ortega <[email protected]>
#
# Copyright (C) 2001-2014 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import CTK
from util import *
from consts import *
from Rule import RulePlugin
URL_APPLY = '/plugin/url_arg/apply'
NOTE_ARGUMENT = N_("Argument name")
NOTE_REGEX = N_("Regular Expression for the match")
OPTIONS = [('0', N_('Match a specific argument')),
('1', N_('Match any argument'))]
def commit():
# POST info
key = CTK.post.pop ('key', None)
vsrv_num = CTK.post.pop ('vsrv_num', None)
new_match = CTK.post.pop ('tmp!match', None)
new_any = CTK.post.pop ('tmp!match_any', "0")
new_arg = CTK.post.pop ('tmp!arg', None)
# New entry
if new_match:
next_rule, next_pre = cfg_vsrv_rule_get_next ('vserver!%s'%(vsrv_num))
CTK.cfg['%s!match'%(next_pre)] = 'url_arg'
CTK.cfg['%s!match!match'%(next_pre)] = new_match
CTK.cfg['%s!match!match_any'%(next_pre)] = new_any
if not int(new_any):
CTK.cfg['%s!match!arg'%(next_pre)] = new_arg
return {'ret': 'ok', 'redirect': '/vserver/%s/rule/%s' %(vsrv_num, next_rule)}
# Modifications
return CTK.cfg_apply_post()
class Plugin_url_arg (RulePlugin):
def __init__ (self, key, **kwargs):
RulePlugin.__init__ (self, key)
self.vsrv_num = kwargs.pop('vsrv_num', '')
if key.startswith('tmp'):
return self.GUI_new()
return self.GUI_mod()
def GUI_new (self):
any = CTK.ComboCfg('%s!match_any'%(self.key), trans_options(OPTIONS), {'class': 'noauto'})
table = CTK.PropsTable()
table.Add (_('Match type'), any, '')
table.Add (_('Argument'), CTK.TextCfg('%s!arg'%(self.key), False, {'class': 'noauto'}), _(NOTE_ARGUMENT))
table.Add (_('Regular Expression'), CTK.TextCfg('%s!match'%(self.key), False, {'class': 'noauto'}), _(NOTE_REGEX))
# Special events
any.bind ('change', """if ($(this).val() == 0) {
$("#%(row)s").show();
} else {
$("#%(row)s").hide(); }""" %({'row': table[1].id}))
submit = CTK.Submitter (URL_APPLY)
submit += CTK.Hidden ('key', self.key)
submit += CTK.Hidden ('vsrv_num', self.vsrv_num)
submit += table
self += submit
def GUI_mod (self):
table = CTK.PropsTable()
table.Add (_('Match type'), CTK.ComboCfg('%s!match_any'%(self.key), trans_options(OPTIONS)), '')
if not int(CTK.cfg.get_val('%s!match_any'%(self.key), 0)):
table.Add (_('Argument'), CTK.TextCfg('%s!arg'%(self.key), False), _(NOTE_ARGUMENT))
table.Add (_('Regular Expression'), CTK.TextCfg('%s!match'%(self.key), False), _(NOTE_REGEX))
submit = CTK.Submitter (URL_APPLY)
submit += CTK.Hidden ('key', self.key)
submit += CTK.Hidden ('vsrv_num', self.vsrv_num)
submit += table
self += submit
def GetName (self):
match = CTK.cfg.get_val ('%s!match'%(self.key), '')
if int(CTK.cfg.get_val ('%s!match_any'%(self.key), "0")):
return "Any arg. matches ~ %s" %(match)
arg = CTK.cfg.get_val ('%s!arg'%(self.key), '')
return "Arg %s matches %s" %(arg, match)
# Validation, and Public URLs
CTK.publish ("^%s"%(URL_APPLY), commit, method="POST")
| gpl-2.0 |
auduny/home-assistant | tests/components/meraki/test_device_tracker.py | 8 | 4400 | """The tests the for Meraki device tracker."""
import asyncio
import json
import pytest
from homeassistant.components.meraki.device_tracker import (
CONF_VALIDATOR, CONF_SECRET)
from homeassistant.setup import async_setup_component
import homeassistant.components.device_tracker as device_tracker
from homeassistant.const import CONF_PLATFORM
from homeassistant.components.meraki.device_tracker import URL
@pytest.fixture
def meraki_client(loop, hass, hass_client):
"""Meraki mock client."""
assert loop.run_until_complete(
async_setup_component(
hass,
device_tracker.DOMAIN,
{
device_tracker.DOMAIN: {
CONF_PLATFORM: "meraki",
CONF_VALIDATOR: "validator",
CONF_SECRET: "secret",
}
},
)
)
yield loop.run_until_complete(hass_client())
@asyncio.coroutine
def test_invalid_or_missing_data(mock_device_tracker_conf, meraki_client):
"""Test validator with invalid or missing data."""
req = yield from meraki_client.get(URL)
text = yield from req.text()
assert req.status == 200
assert text == "validator"
req = yield from meraki_client.post(URL, data=b"invalid")
text = yield from req.json()
assert req.status == 400
assert text["message"] == "Invalid JSON"
req = yield from meraki_client.post(URL, data=b"{}")
text = yield from req.json()
assert req.status == 422
assert text["message"] == "No secret"
data = {"version": "1.0", "secret": "secret"}
req = yield from meraki_client.post(URL, data=json.dumps(data))
text = yield from req.json()
assert req.status == 422
assert text["message"] == "Invalid version"
data = {"version": "2.0", "secret": "invalid"}
req = yield from meraki_client.post(URL, data=json.dumps(data))
text = yield from req.json()
assert req.status == 422
assert text["message"] == "Invalid secret"
data = {"version": "2.0", "secret": "secret", "type": "InvalidType"}
req = yield from meraki_client.post(URL, data=json.dumps(data))
text = yield from req.json()
assert req.status == 422
assert text["message"] == "Invalid device type"
data = {
"version": "2.0",
"secret": "secret",
"type": "BluetoothDevicesSeen",
"data": {"observations": []},
}
req = yield from meraki_client.post(URL, data=json.dumps(data))
assert req.status == 200
@asyncio.coroutine
def test_data_will_be_saved(mock_device_tracker_conf, hass, meraki_client):
"""Test with valid data."""
data = {
"version": "2.0",
"secret": "secret",
"type": "DevicesSeen",
"data": {
"observations": [
{
"location": {
"lat": "51.5355157",
"lng": "21.0699035",
"unc": "46.3610585",
},
"seenTime": "2016-09-12T16:23:13Z",
"ssid": "ssid",
"os": "HA",
"ipv6": "2607:f0d0:1002:51::4/64",
"clientMac": "00:26:ab:b8:a9:a4",
"seenEpoch": "147369739",
"rssi": "20",
"manufacturer": "Seiko Epson",
},
{
"location": {
"lat": "51.5355357",
"lng": "21.0699635",
"unc": "46.3610585",
},
"seenTime": "2016-09-12T16:21:13Z",
"ssid": "ssid",
"os": "HA",
"ipv4": "192.168.0.1",
"clientMac": "00:26:ab:b8:a9:a5",
"seenEpoch": "147369750",
"rssi": "20",
"manufacturer": "Seiko Epson",
},
]
},
}
req = yield from meraki_client.post(URL, data=json.dumps(data))
assert req.status == 200
yield from hass.async_block_till_done()
state_name = hass.states.get(
"{}.{}".format("device_tracker", "00_26_ab_b8_a9_a4")
).state
assert "home" == state_name
state_name = hass.states.get(
"{}.{}".format("device_tracker", "00_26_ab_b8_a9_a5")
).state
assert "home" == state_name
| apache-2.0 |
drunknbass/plexpy | lib/unidecode/x012.py | 252 | 4318 | data = (
'ha', # 0x00
'hu', # 0x01
'hi', # 0x02
'haa', # 0x03
'hee', # 0x04
'he', # 0x05
'ho', # 0x06
'[?]', # 0x07
'la', # 0x08
'lu', # 0x09
'li', # 0x0a
'laa', # 0x0b
'lee', # 0x0c
'le', # 0x0d
'lo', # 0x0e
'lwa', # 0x0f
'hha', # 0x10
'hhu', # 0x11
'hhi', # 0x12
'hhaa', # 0x13
'hhee', # 0x14
'hhe', # 0x15
'hho', # 0x16
'hhwa', # 0x17
'ma', # 0x18
'mu', # 0x19
'mi', # 0x1a
'maa', # 0x1b
'mee', # 0x1c
'me', # 0x1d
'mo', # 0x1e
'mwa', # 0x1f
'sza', # 0x20
'szu', # 0x21
'szi', # 0x22
'szaa', # 0x23
'szee', # 0x24
'sze', # 0x25
'szo', # 0x26
'szwa', # 0x27
'ra', # 0x28
'ru', # 0x29
'ri', # 0x2a
'raa', # 0x2b
'ree', # 0x2c
're', # 0x2d
'ro', # 0x2e
'rwa', # 0x2f
'sa', # 0x30
'su', # 0x31
'si', # 0x32
'saa', # 0x33
'see', # 0x34
'se', # 0x35
'so', # 0x36
'swa', # 0x37
'sha', # 0x38
'shu', # 0x39
'shi', # 0x3a
'shaa', # 0x3b
'shee', # 0x3c
'she', # 0x3d
'sho', # 0x3e
'shwa', # 0x3f
'qa', # 0x40
'qu', # 0x41
'qi', # 0x42
'qaa', # 0x43
'qee', # 0x44
'qe', # 0x45
'qo', # 0x46
'[?]', # 0x47
'qwa', # 0x48
'[?]', # 0x49
'qwi', # 0x4a
'qwaa', # 0x4b
'qwee', # 0x4c
'qwe', # 0x4d
'[?]', # 0x4e
'[?]', # 0x4f
'qha', # 0x50
'qhu', # 0x51
'qhi', # 0x52
'qhaa', # 0x53
'qhee', # 0x54
'qhe', # 0x55
'qho', # 0x56
'[?]', # 0x57
'qhwa', # 0x58
'[?]', # 0x59
'qhwi', # 0x5a
'qhwaa', # 0x5b
'qhwee', # 0x5c
'qhwe', # 0x5d
'[?]', # 0x5e
'[?]', # 0x5f
'ba', # 0x60
'bu', # 0x61
'bi', # 0x62
'baa', # 0x63
'bee', # 0x64
'be', # 0x65
'bo', # 0x66
'bwa', # 0x67
'va', # 0x68
'vu', # 0x69
'vi', # 0x6a
'vaa', # 0x6b
'vee', # 0x6c
've', # 0x6d
'vo', # 0x6e
'vwa', # 0x6f
'ta', # 0x70
'tu', # 0x71
'ti', # 0x72
'taa', # 0x73
'tee', # 0x74
'te', # 0x75
'to', # 0x76
'twa', # 0x77
'ca', # 0x78
'cu', # 0x79
'ci', # 0x7a
'caa', # 0x7b
'cee', # 0x7c
'ce', # 0x7d
'co', # 0x7e
'cwa', # 0x7f
'xa', # 0x80
'xu', # 0x81
'xi', # 0x82
'xaa', # 0x83
'xee', # 0x84
'xe', # 0x85
'xo', # 0x86
'[?]', # 0x87
'xwa', # 0x88
'[?]', # 0x89
'xwi', # 0x8a
'xwaa', # 0x8b
'xwee', # 0x8c
'xwe', # 0x8d
'[?]', # 0x8e
'[?]', # 0x8f
'na', # 0x90
'nu', # 0x91
'ni', # 0x92
'naa', # 0x93
'nee', # 0x94
'ne', # 0x95
'no', # 0x96
'nwa', # 0x97
'nya', # 0x98
'nyu', # 0x99
'nyi', # 0x9a
'nyaa', # 0x9b
'nyee', # 0x9c
'nye', # 0x9d
'nyo', # 0x9e
'nywa', # 0x9f
'\'a', # 0xa0
'\'u', # 0xa1
'[?]', # 0xa2
'\'aa', # 0xa3
'\'ee', # 0xa4
'\'e', # 0xa5
'\'o', # 0xa6
'\'wa', # 0xa7
'ka', # 0xa8
'ku', # 0xa9
'ki', # 0xaa
'kaa', # 0xab
'kee', # 0xac
'ke', # 0xad
'ko', # 0xae
'[?]', # 0xaf
'kwa', # 0xb0
'[?]', # 0xb1
'kwi', # 0xb2
'kwaa', # 0xb3
'kwee', # 0xb4
'kwe', # 0xb5
'[?]', # 0xb6
'[?]', # 0xb7
'kxa', # 0xb8
'kxu', # 0xb9
'kxi', # 0xba
'kxaa', # 0xbb
'kxee', # 0xbc
'kxe', # 0xbd
'kxo', # 0xbe
'[?]', # 0xbf
'kxwa', # 0xc0
'[?]', # 0xc1
'kxwi', # 0xc2
'kxwaa', # 0xc3
'kxwee', # 0xc4
'kxwe', # 0xc5
'[?]', # 0xc6
'[?]', # 0xc7
'wa', # 0xc8
'wu', # 0xc9
'wi', # 0xca
'waa', # 0xcb
'wee', # 0xcc
'we', # 0xcd
'wo', # 0xce
'[?]', # 0xcf
'`a', # 0xd0
'`u', # 0xd1
'`i', # 0xd2
'`aa', # 0xd3
'`ee', # 0xd4
'`e', # 0xd5
'`o', # 0xd6
'[?]', # 0xd7
'za', # 0xd8
'zu', # 0xd9
'zi', # 0xda
'zaa', # 0xdb
'zee', # 0xdc
'ze', # 0xdd
'zo', # 0xde
'zwa', # 0xdf
'zha', # 0xe0
'zhu', # 0xe1
'zhi', # 0xe2
'zhaa', # 0xe3
'zhee', # 0xe4
'zhe', # 0xe5
'zho', # 0xe6
'zhwa', # 0xe7
'ya', # 0xe8
'yu', # 0xe9
'yi', # 0xea
'yaa', # 0xeb
'yee', # 0xec
'ye', # 0xed
'yo', # 0xee
'[?]', # 0xef
'da', # 0xf0
'du', # 0xf1
'di', # 0xf2
'daa', # 0xf3
'dee', # 0xf4
'de', # 0xf5
'do', # 0xf6
'dwa', # 0xf7
'dda', # 0xf8
'ddu', # 0xf9
'ddi', # 0xfa
'ddaa', # 0xfb
'ddee', # 0xfc
'dde', # 0xfd
'ddo', # 0xfe
'ddwa', # 0xff
)
| gpl-3.0 |
pwarren/AGDeviceControl | agdevicecontrol/thirdparty/site-packages/linux2/twisted/test/test_pb.py | 3 | 40740 |
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
"""Tests for Perspective Broker module.
TODO: update protocol level tests to use new connection API, leaving
only specific tests for old API.
"""
import sys, os
from cStringIO import StringIO
from zope.interface import implements
from twisted.trial import unittest
from twisted.trial.util import suppressWarnings
dR = unittest.deferredResult
from twisted.spread import pb, util
from twisted.internet import protocol, main
from twisted.internet.app import Application
from twisted.python import failure, log
from twisted.cred import identity, authorizer
from twisted.internet import reactor, defer
class Dummy(pb.Viewable):
def view_doNothing(self, user):
if isinstance(user, DummyPerspective):
return 'hello world!'
else:
return 'goodbye, cruel world!'
class DummyPerspective(pb.Perspective):
def perspective_getDummyViewPoint(self):
return Dummy()
class DummyService(pb.Service):
"""A dummy PB service to test with.
"""
def getPerspectiveNamed(self, user):
"""
"""
# Note: I don't need to go back and forth between identity and
# perspective here, so I _never_ need to specify identityName.
p = DummyPerspective(user)
p.setService(self)
return p
class IOPump:
"""Utility to pump data between clients and servers for protocol testing.
Perhaps this is a utility worthy of being in protocol.py?
"""
def __init__(self, client, server, clientIO, serverIO):
self.client = client
self.server = server
self.clientIO = clientIO
self.serverIO = serverIO
def flush(self):
"Pump until there is no more input or output."
reactor.iterate()
while self.pump():
reactor.iterate()
reactor.iterate()
def pump(self):
"""Move data back and forth.
Returns whether any data was moved.
"""
self.clientIO.seek(0)
self.serverIO.seek(0)
cData = self.clientIO.read()
sData = self.serverIO.read()
self.clientIO.seek(0)
self.serverIO.seek(0)
self.clientIO.truncate()
self.serverIO.truncate()
self.client.transport._checkProducer()
self.server.transport._checkProducer()
for byte in cData:
self.server.dataReceived(byte)
for byte in sData:
self.client.dataReceived(byte)
if cData or sData:
return 1
else:
return 0
def connectedServerAndClient():
"""Returns a 3-tuple: (client, server, pump)
"""
c = pb.Broker()
auth = authorizer.DefaultAuthorizer()
appl = Application("pb-test")
auth.setServiceCollection(appl)
ident = identity.Identity("guest", authorizer=auth)
ident.setPassword("guest")
svc = DummyService("test", appl, authorizer=auth)
ident.addKeyForPerspective(svc.getPerspectiveNamed("any"))
auth.addIdentity(ident)
svr = pb.BrokerFactory(pb.AuthRoot(auth))
s = svr.buildProtocol(('127.0.0.1',))
s.copyTags = {}
cio = StringIO()
sio = StringIO()
c.makeConnection(protocol.FileWrapper(cio))
s.makeConnection(protocol.FileWrapper(sio))
pump = IOPump(c, s, cio, sio)
# Challenge-response authentication:
pump.flush()
return c, s, pump
connectedServerAndClient = suppressWarnings(connectedServerAndClient,
("twisted.internet.app", DeprecationWarning),
("Identities", DeprecationWarning),
("Cred services", DeprecationWarning),
("This is deprecated. Use PBServerFactory.", DeprecationWarning),
("Authorizers are deprecated, switch to portals/realms/etc.", DeprecationWarning))
class SimpleRemote(pb.Referenceable):
def remote_thunk(self, arg):
self.arg = arg
return arg + 1
def remote_knuth(self, arg):
raise Exception()
class NestedRemote(pb.Referenceable):
def remote_getSimple(self):
return SimpleRemote()
class SimpleCopy(pb.Copyable):
def __init__(self):
self.x = 1
self.y = {"Hello":"World"}
self.z = ['test']
class SimpleLocalCopy(pb.RemoteCopy):
def check(self):
# checks based on above '__init__'
assert self.x == 1
assert self.y['Hello'] == 'World'
assert self.z[0] == 'test'
return 1
pb.setCopierForClass(SimpleCopy, SimpleLocalCopy)
class SimpleFactoryCopy(pb.Copyable):
allIDs = {}
def __init__(self, id):
self.id = id
SimpleFactoryCopy.allIDs[id] = self
def createFactoryCopy(state):
id = state.get("id", None)
if not id:
raise "factory copy state has no 'id' member %s" % repr(state)
if not SimpleFactoryCopy.allIDs.has_key(id):
raise "factory class has no ID: %s" % SimpleFactoryCopy.allIDs
inst = SimpleFactoryCopy.allIDs[id]
if not inst:
raise "factory method found no object with id"
return inst
pb.setFactoryForClass(SimpleFactoryCopy, createFactoryCopy)
class NestedCopy(pb.Referenceable):
def remote_getCopy(self):
return SimpleCopy()
def remote_getFactory(self, value):
return SimpleFactoryCopy(value)
class SimpleCache(pb.Cacheable):
def __init___(self):
self.x = 1
self.y = {"Hello":"World"}
self.z = ['test']
class NestedComplicatedCache(pb.Referenceable):
def __init__(self):
self.c = VeryVeryComplicatedCacheable()
def remote_getCache(self):
return self.c
class VeryVeryComplicatedCacheable(pb.Cacheable):
def __init__(self):
self.x = 1
self.y = 2
self.foo = 3
def setFoo4(self):
self.foo = 4
self.observer.callRemote('foo',4)
def getStateToCacheAndObserveFor(self, perspective, observer):
self.observer = observer
return {"x": self.x,
"y": self.y,
"foo": self.foo}
def stoppedObserving(self, perspective, observer):
log.msg("stopped observing")
observer.callRemote("end")
if observer == self.observer:
self.observer = None
class RatherBaroqueCache(pb.RemoteCache):
def observe_foo(self, newFoo):
self.foo = newFoo
def observe_end(self):
log.msg("the end of things")
def checkFoo4(self):
return (self.foo == 4)
def check(self):
# checks based on above '__init__'
assert self.x == 1
assert self.y == 2
assert self.foo == 3
return 1
pb.setCopierForClass(VeryVeryComplicatedCacheable, RatherBaroqueCache)
class SimpleLocalCache(pb.RemoteCache):
def setCopyableState(self, state):
self.__dict__.update(state)
def checkMethod(self):
return self.check
def checkSelf(self):
return self
def check(self):
# checks based on above '__init__'
assert self.x == 1
assert self.y['Hello'] == 'World'
assert self.z[0] == 'test'
return 1
pb.setCopierForClass(SimpleCache, SimpleLocalCache)
class NestedCache(pb.Referenceable):
def __init__(self):
self.x = SimpleCache()
def remote_getCache(self):
return [self.x,self.x]
def remote_putCache(self, cache):
return (self.x is cache)
class Observable(pb.Referenceable):
def __init__(self):
self.observers = []
def remote_observe(self, obs):
self.observers.append(obs)
def remote_unobserve(self, obs):
self.observers.remove(obs)
def notify(self, obj):
for observer in self.observers:
observer.callRemote('notify', self, obj)
class DeferredRemote(pb.Referenceable):
def __init__(self):
self.run = 0
def runMe(self, arg):
self.run = arg
return arg + 1
def dontRunMe(self, arg):
assert 0, "shouldn't have been run!"
def remote_doItLater(self):
d = defer.Deferred()
d.addCallbacks(self.runMe, self.dontRunMe)
self.d = d
return d
class Observer(pb.Referenceable):
notified = 0
obj = None
def remote_notify(self, other, obj):
self.obj = obj
self.notified = self.notified + 1
other.callRemote('unobserve',self)
class NewStyleCopy(pb.Copyable, pb.RemoteCopy, object):
def __init__(self, s):
self.s = s
pb.setUnjellyableForClass(NewStyleCopy, NewStyleCopy)
class NewStyleCopy2(pb.Copyable, pb.RemoteCopy, object):
allocated = 0
initialized = 0
value = 1
def __new__(self):
NewStyleCopy2.allocated += 1
inst = object.__new__(self)
inst.value = 2
return inst
def __init__(self):
NewStyleCopy2.initialized += 1
pb.setUnjellyableForClass(NewStyleCopy2, NewStyleCopy2)
class Echoer(pb.Root):
def remote_echo(self, st):
return st
class NewStyleTestCase(unittest.TestCase):
ref = None
def tearDown(self):
if self.ref:
self.ref.broker.transport.loseConnection()
return self.server.stopListening()
def testNewStyle(self):
self.server = reactor.listenTCP(0, pb.PBServerFactory(Echoer()))
f = pb.PBClientFactory()
reactor.connectTCP("localhost", self.server.getHost().port, f)
d = f.getRootObject()
d.addCallback(self._testNewStyle_1)
return d
def _testNewStyle_1(self, ref):
self.ref = ref
orig = NewStyleCopy("value")
d = ref.callRemote("echo", orig)
d.addCallback(self._testNewStyle_2, orig)
return d
def _testNewStyle_2(self, res, orig):
self.failUnless(isinstance(res, NewStyleCopy))
self.failUnlessEqual(res.s, "value")
self.failIf(res is orig) # no cheating :)
def testAlloc(self):
self.server = reactor.listenTCP(0, pb.PBServerFactory(Echoer()))
f = pb.PBClientFactory()
reactor.connectTCP("localhost", self.server.getHost().port, f)
d = f.getRootObject()
d.addCallback(self._testAlloc_1)
return d
def _testAlloc_1(self, ref):
self.ref = ref
orig = NewStyleCopy2()
self.failUnlessEqual(NewStyleCopy2.allocated, 1)
self.failUnlessEqual(NewStyleCopy2.initialized, 1)
d = ref.callRemote("echo", orig)
# sending the object creates a second one on the far side
d.addCallback(self._testAlloc_2, orig)
return d
def _testAlloc_2(self, res, orig):
# receiving the response creates a third one on the way back
self.failUnless(isinstance(res, NewStyleCopy2))
self.failUnlessEqual(res.value, 2)
self.failUnlessEqual(NewStyleCopy2.allocated, 3)
self.failUnlessEqual(NewStyleCopy2.initialized, 1)
self.failIf(res is orig) # no cheating :)
class BrokerTestCase(unittest.TestCase):
thunkResult = None
def tearDown(self):
import os
try:
os.unlink('None-None-TESTING.pub') # from RemotePublished.getFileName
except OSError:
pass
def thunkErrorBad(self, error):
assert 0, "This should cause a return value, not %s" % error
def thunkResultGood(self, result):
self.thunkResult = result
def thunkErrorGood(self, tb):
pass
def thunkResultBad(self, result):
assert 0, "This should cause an error, not %s" % result
def testReference(self):
c, s, pump = connectedServerAndClient()
class X(pb.Referenceable):
def remote_catch(self,arg):
self.caught = arg
class Y(pb.Referenceable):
def remote_throw(self, a, b):
a.callRemote('catch', b)
s.setNameForLocal("y", Y())
y = c.remoteForName("y")
x = X()
z = X()
y.callRemote('throw', x, z)
pump.pump()
pump.pump()
pump.pump()
assert x.caught is z, "X should have caught Z"
# make sure references to remote methods are equals
self.assertEquals(y.remoteMethod('throw'), y.remoteMethod('throw'))
def testResult(self):
c, s, pump = connectedServerAndClient()
for x, y in (c, s), (s, c):
# test reflexivity
foo = SimpleRemote()
x.setNameForLocal("foo", foo)
bar = y.remoteForName("foo")
self.expectedThunkResult = 8
bar.callRemote('thunk',self.expectedThunkResult - 1).addCallbacks(self.thunkResultGood, self.thunkErrorBad)
# Send question.
pump.pump()
# Send response.
pump.pump()
# Shouldn't require any more pumping than that...
assert self.thunkResult == self.expectedThunkResult,\
"result wasn't received."
def refcountResult(self, result):
self.nestedRemote = result
def testTooManyRefs(self):
l = []
e = []
c, s, pump = connectedServerAndClient()
foo = NestedRemote()
s.setNameForLocal("foo", foo)
x = c.remoteForName("foo")
for igno in xrange(pb.MAX_BROKER_REFS + 10):
if s.transport.closed or c.transport.closed:
break
x.callRemote("getSimple").addCallbacks(l.append, e.append)
pump.pump()
expected = (pb.MAX_BROKER_REFS - 1)
assert s.transport.closed, "transport was not closed"
assert len(l) == expected, "expected %s got %s" % (expected, len(l))
def testCopy(self):
c, s, pump = connectedServerAndClient()
foo = NestedCopy()
s.setNameForLocal("foo", foo)
x = c.remoteForName("foo")
x.callRemote('getCopy').addCallbacks(self.thunkResultGood, self.thunkErrorBad)
pump.pump()
pump.pump()
assert self.thunkResult.check() == 1, "check failed"
def testObserve(self):
c, s, pump = connectedServerAndClient()
# this is really testing the comparison between remote objects, to make
# sure that you can *UN*observe when you have an observer architecture.
a = Observable()
b = Observer()
s.setNameForLocal("a", a)
ra = c.remoteForName("a")
ra.callRemote('observe',b)
pump.pump()
a.notify(1)
pump.pump()
pump.pump()
a.notify(10)
pump.pump()
pump.pump()
assert b.obj is not None, "didn't notify"
assert b.obj == 1, 'notified too much'
def testDefer(self):
c, s, pump = connectedServerAndClient()
d = DeferredRemote()
s.setNameForLocal("d", d)
e = c.remoteForName("d")
pump.pump(); pump.pump()
results = []
e.callRemote('doItLater').addCallback(results.append)
pump.pump(); pump.pump()
assert not d.run, "Deferred method run too early."
d.d.callback(5)
assert d.run == 5, "Deferred method run too late."
pump.pump(); pump.pump()
assert results[0] == 6, "Incorrect result."
def testRefcount(self):
c, s, pump = connectedServerAndClient()
foo = NestedRemote()
s.setNameForLocal("foo", foo)
bar = c.remoteForName("foo")
bar.callRemote('getSimple').addCallbacks(self.refcountResult, self.thunkErrorBad)
# send question
pump.pump()
# send response
pump.pump()
# delving into internal structures here, because GC is sort of
# inherently internal.
rluid = self.nestedRemote.luid
assert s.localObjects.has_key(rluid), "Should have key."
del self.nestedRemote
# nudge the gc
if sys.hexversion >= 0x2000000 and os.name != "java":
import gc
gc.collect()
# try to nudge the GC even if we can't really
pump.pump()
pump.pump()
pump.pump()
assert not s.localObjects.has_key(rluid), "Should NOT have key."
def testCache(self):
c, s, pump = connectedServerAndClient()
obj = NestedCache()
obj2 = NestedComplicatedCache()
vcc = obj2.c
s.setNameForLocal("obj", obj)
s.setNameForLocal("xxx", obj2)
o2 = c.remoteForName("obj")
o3 = c.remoteForName("xxx")
coll = []
o2.callRemote("getCache").addCallback(coll.append).addErrback(coll.append)
o2.callRemote("getCache").addCallback(coll.append).addErrback(coll.append)
complex = []
o3.callRemote("getCache").addCallback(complex.append)
o3.callRemote("getCache").addCallback(complex.append)
pump.flush()
# `worst things first'
assert complex[0].check()
vcc.setFoo4()
pump.flush()
assert complex[0].checkFoo4(), "method was not called."
assert len(coll) == 2
cp = coll[0][0]
assert cp.checkMethod().im_self is cp, "potential refcounting issue"
assert cp.checkSelf() is cp, "other potential refcounting issue"
col2 = []
o2.callRemote('putCache',cp).addCallback(col2.append)
pump.flush()
# The objects were the same (testing lcache identity)
assert col2[0]
# test equality of references to methods
self.assertEquals(o2.remoteMethod("getCache"), o2.remoteMethod("getCache"))
# now, refcounting (similiar to testRefCount)
luid = cp.luid
baroqueLuid = complex[0].luid
assert s.remotelyCachedObjects.has_key(luid), "remote cache doesn't have it"
del coll
del cp
pump.flush()
del complex
del col2
# extra nudge...
pump.flush()
# del vcc.observer
# nudge the gc
if sys.hexversion >= 0x2000000 and os.name != "java":
import gc
gc.collect()
# try to nudge the GC even if we can't really
pump.flush()
# The GC is done with it.
assert not s.remotelyCachedObjects.has_key(luid), "Server still had it after GC"
assert not c.locallyCachedObjects.has_key(luid), "Client still had it after GC"
assert not s.remotelyCachedObjects.has_key(baroqueLuid), "Server still had complex after GC"
assert not c.locallyCachedObjects.has_key(baroqueLuid), "Client still had complex after GC"
assert vcc.observer is None, "observer was not removed"
def whatTheHell(self, obj):
print '!?!?!?!?', repr(obj)
def testViewPoint(self):
c, s, pump = connectedServerAndClient()
pump.pump()
authRef = c.remoteForName("root")
accum = []
pb.logIn(authRef, None, "test", "guest", "guest", "any").addCallbacks(accum.append, self.whatTheHell)
# ident = c.remoteForName("identity")
# ident.attach("test", "any", None).addCallback(accum.append)
pump.flush()
pump.flush()
pump.flush()
test = accum.pop() # okay, this should be our perspective...
test.callRemote('getDummyViewPoint').addCallback(accum.append)
pump.flush()
accum.pop().callRemote('doNothing').addCallback(accum.append)
pump.flush()
assert accum.pop() == 'hello world!', 'oops...'
testViewPoint = suppressWarnings(testViewPoint,
("This is deprecated. Use PBClientFactory.", DeprecationWarning),
("pb.Perspective is deprecated, please use pb.Avatar.", DeprecationWarning))
def testPublishable(self):
import os
try:
os.unlink('None-None-TESTING.pub') # from RemotePublished.getFileName
except OSError:
pass # Sometimes it's not there.
c, s, pump = connectedServerAndClient()
foo = GetPublisher()
# foo.pub.timestamp = 1.0
s.setNameForLocal("foo", foo)
bar = c.remoteForName("foo")
accum = []
bar.callRemote('getPub').addCallbacks(accum.append, self.thunkErrorBad)
pump.flush()
obj = accum.pop()
self.assertEquals(obj.activateCalled, 1)
self.assertEquals(obj.isActivated, 1)
self.assertEquals(obj.yayIGotPublished, 1)
self.assertEquals(obj._wasCleanWhenLoaded, 0) # timestamp's dirty, we don't have a cache file
c, s, pump = connectedServerAndClient()
s.setNameForLocal("foo", foo)
bar = c.remoteForName("foo")
bar.callRemote('getPub').addCallbacks(accum.append, self.thunkErrorBad)
pump.flush()
obj = accum.pop()
self.assertEquals(obj._wasCleanWhenLoaded, 1) # timestamp's clean, our cache file is up-to-date
def gotCopy(self, val):
self.thunkResult = val.id
def testFactoryCopy(self):
c, s, pump = connectedServerAndClient()
ID = 99
obj = NestedCopy()
s.setNameForLocal("foo", obj)
x = c.remoteForName("foo")
x.callRemote('getFactory', ID).addCallbacks(self.gotCopy, self.thunkResultBad)
pump.pump()
pump.pump()
pump.pump()
assert self.thunkResult == ID, "ID not correct on factory object %s" % self.thunkResult
from twisted.spread.util import Pager, StringPager, FilePager, getAllPages
bigString = "helloworld" * 50
callbackArgs = None
callbackKeyword = None
def finishedCallback(*args, **kw):
global callbackArgs, callbackKeyword
callbackArgs = args
callbackKeyword = kw
class Pagerizer(pb.Referenceable):
def __init__(self, callback, *args, **kw):
self.callback, self.args, self.kw = callback, args, kw
def remote_getPages(self, collector):
StringPager(collector, bigString, 100, self.callback, *self.args, **self.kw)
self.args = self.kw = None
class FilePagerizer(pb.Referenceable):
def __init__(self, filename, callback, *args, **kw):
self.filename = filename
self.callback, self.args, self.kw = callback, args, kw
def remote_getPages(self, collector):
FilePager(collector, file(self.filename), self.callback, *self.args, **self.kw)
self.args = self.kw = None
class PagingTestCase(unittest.TestCase):
def setUpClass(self):
self.filename = self.mktemp()
fd = file(self.filename, 'w')
fd.write(bigString)
fd.close()
def tearDownClass(self):
os.remove(self.filename)
def testPagingWithCallback(self):
c, s, pump = connectedServerAndClient()
s.setNameForLocal("foo", Pagerizer(finishedCallback, 'hello', value = 10))
x = c.remoteForName("foo")
l = []
getAllPages(x, "getPages").addCallback(l.append)
while not l:
pump.pump()
assert ''.join(l[0]) == bigString, "Pages received not equal to pages sent!"
assert callbackArgs == ('hello',), "Completed callback not invoked"
assert callbackKeyword == {'value': 10}, "Completed callback not invoked"
def testPagingWithoutCallback(self):
c, s, pump = connectedServerAndClient()
s.setNameForLocal("foo", Pagerizer(None))
x = c.remoteForName("foo")
l = []
getAllPages(x, "getPages").addCallback(l.append)
while not l:
pump.pump()
assert ''.join(l[0]) == bigString, "Pages received not equal to pages sent!"
def testFilePagingWithCallback(self):
c, s, pump = connectedServerAndClient()
s.setNameForLocal("bar", FilePagerizer(self.filename, finishedCallback,
'frodo', value = 9))
x = c.remoteForName("bar")
l = []
getAllPages(x, "getPages").addCallback(l.append)
while not l:
pump.pump()
assert ''.join(l[0]) == bigString, "Pages received not equal to pages sent!"
assert callbackArgs == ('frodo',), "Completed callback not invoked"
assert callbackKeyword == {'value': 9}, "Completed callback not invoked"
def testFilePagingWithoutCallback(self):
c, s, pump = connectedServerAndClient()
s.setNameForLocal("bar", FilePagerizer(self.filename, None))
x = c.remoteForName("bar")
l = []
getAllPages(x, "getPages").addCallback(l.append)
while not l:
pump.pump()
assert ''.join(l[0]) == bigString, "Pages received not equal to pages sent!"
from twisted.spread import publish
class DumbPublishable(publish.Publishable):
def getStateToPublish(self):
return {"yayIGotPublished": 1}
class DumbPub(publish.RemotePublished):
def activated(self):
self.activateCalled = 1
class GetPublisher(pb.Referenceable):
def __init__(self):
self.pub = DumbPublishable("TESTING")
def remote_getPub(self):
return self.pub
pb.setCopierForClass(DumbPublishable, DumbPub)
class DisconnectionTestCase(unittest.TestCase):
"""Test disconnection callbacks."""
def error(self, *args):
raise RuntimeError, "I shouldn't have been called: %s" % args
def gotDisconnected(self):
"""Called on broker disconnect."""
self.gotCallback = 1
def objectDisconnected(self, o):
"""Called on RemoteReference disconnect."""
self.assertEquals(o, self.remoteObject)
self.objectCallback = 1
def testBadSerialization(self):
c, s, pump = connectedServerAndClient()
pump.pump()
s.setNameForLocal("o", BadCopySet())
g = c.remoteForName("o")
l = []
g.callRemote("setBadCopy", BadCopyable()).addErrback(l.append)
reactor.iterate()
pump.flush()
self.assertEquals(len(l), 1)
def testDisconnection(self):
c, s, pump = connectedServerAndClient()
pump.pump()
s.setNameForLocal("o", SimpleRemote())
# get a client reference to server object
r = c.remoteForName("o")
pump.pump()
pump.pump()
pump.pump()
# register and then unregister disconnect callbacks
# making sure they get unregistered
c.notifyOnDisconnect(self.error)
self.assert_(self.error in c.disconnects)
c.dontNotifyOnDisconnect(self.error)
self.assert_(not (self.error in c.disconnects))
r.notifyOnDisconnect(self.error)
self.assert_(r._disconnected in c.disconnects)
self.assert_(self.error in r.disconnectCallbacks)
r.dontNotifyOnDisconnect(self.error)
self.assert_(not (r._disconnected in c.disconnects))
self.assert_(not (self.error in r.disconnectCallbacks))
# register disconnect callbacks
c.notifyOnDisconnect(self.gotDisconnected)
r.notifyOnDisconnect(self.objectDisconnected)
self.remoteObject = r
# disconnect
c.connectionLost(failure.Failure(main.CONNECTION_DONE))
self.assert_(self.gotCallback)
self.assert_(self.objectCallback)
class FreakOut(Exception):
pass
class BadCopyable(pb.Copyable):
def getStateToCopyFor(self, p):
raise FreakOut
class BadCopySet(pb.Referenceable):
def remote_setBadCopy(self, bc):
return None
class LocalRemoteTest(util.LocalAsRemote):
reportAllTracebacks = 0
def sync_add1(self, x):
return x + 1
def async_add(self, x=0, y=1):
return x + y
def async_fail(self):
raise RuntimeError
class SpreadUtilTestCase(unittest.TestCase):
"""Tests for twisted.spread.util"""
def testSync(self):
o = LocalRemoteTest()
self.assertEquals(o.callRemote("add1", 2), 3)
def testAsync(self):
l = []
o = LocalRemoteTest()
d = o.callRemote("add", 2, y=4)
self.assert_(isinstance(d, defer.Deferred))
d.addCallback(l.append)
reactor.iterate()
self.assertEquals(l, [6])
def testAsyncFail(self):
l = []
o = LocalRemoteTest()
d = o.callRemote("fail")
d.addErrback(l.append)
reactor.iterate()
self.assertEquals(len(l), 1)
self.assert_(isinstance(l[0], failure.Failure))
def testRemoteMethod(self):
o = LocalRemoteTest()
m = o.remoteMethod("add1")
self.assertEquals(m(3), 4)
class ReconnectOnce(pb.PBClientFactory):
reconnected = 0
def clientConnectionLost(self, connector, reason):
pb.PBClientFactory.clientConnectionLost(self, connector, reason,
reconnecting=(not self.reconnected))
if not self.reconnected:
self.reconnected = 1
connector.connect()
class ConnectionTestCase(unittest.TestCase):
def setUp(self):
c = pb.Broker()
auth = authorizer.DefaultAuthorizer()
appl = Application("pb-test")
auth.setServiceCollection(appl)
ident = identity.Identity("guest", authorizer=auth)
ident.setPassword("guest")
svc = DummyService("test", appl, authorizer=auth)
ident.addKeyForPerspective(svc.getPerspectiveNamed("any"))
auth.addIdentity(ident)
ident2 = identity.Identity("foo", authorizer=auth)
ident2.setPassword("foo")
ident2.addKeyForPerspective(svc.getPerspectiveNamed("foo"))
auth.addIdentity(ident2)
self.svr = pb.BrokerFactory(pb.AuthRoot(auth))
self.port = reactor.listenTCP(0, self.svr, interface="127.0.0.1")
self.portno = self.port.getHost().port
setUp = suppressWarnings(setUp,
("Update your backend to use PBServerFactory, and then use login().", DeprecationWarning),
("This is deprecated. Use PBServerFactory.", DeprecationWarning),
("Identities are deprecated, switch to credentialcheckers etc.", DeprecationWarning),
("Cred services are deprecated, use realms instead.", DeprecationWarning))
def tearDown(self):
self.port.stopListening()
reactor.iterate(); reactor.iterate();
def _checkRootObject(self, root):
challenge = dR(root.callRemote("username", "guest"))
self.assertEquals(len(challenge), 2)
self.assert_(isinstance(challenge[1], pb.RemoteReference))
# tests for *really* deprecated APIs:
def testGetObjectAt(self):
root = dR(pb.getObjectAt("127.0.0.1", self.portno))
self._checkRootObject(root)
root.broker.transport.loseConnection()
testGetObjectAt = suppressWarnings(testGetObjectAt,
("This is deprecated. Use PBClientFactory.", DeprecationWarning))
def testConnect(self):
p = dR(pb.connect("127.0.0.1", self.portno, "guest", "guest", "test",
perspectiveName="any"))
self.assert_(isinstance(p, pb.RemoteReference))
p.broker.transport.loseConnection()
testConnect = suppressWarnings(testConnect,
("Update your backend to use PBServerFactory, and then use login().", DeprecationWarning),
("This is deprecated. Use PBClientFactory.", DeprecationWarning))
def testIdentityConnector(self):
iConnector = pb.IdentityConnector("127.0.0.1", self.portno, "guest", "guest")
p1 = dR(iConnector.requestService("test", perspectiveName="any"))
p2 = dR(iConnector.requestService("test", perspectiveName="any"))
self.assert_(isinstance(p1, pb.RemoteReference))
self.assert_(isinstance(p2, pb.RemoteReference))
iConnector.disconnect()
testIdentityConnector = suppressWarnings(testIdentityConnector,
("This is deprecated. Use PBClientFactory.", DeprecationWarning))
# tests for new, shiny API, although getPerspective stuff is also deprecated:
def testGoodGetObject(self):
# we test getting both before and after connection
factory = pb.PBClientFactory()
d = factory.getRootObject()
reactor.connectTCP("127.0.0.1", self.portno, factory)
root = dR(d)
self._checkRootObject(root)
root = dR(factory.getRootObject())
self._checkRootObject(root)
root.broker.transport.loseConnection()
def testGoodPerspective(self):
# we test getting both before and after connection
factory = pb.PBClientFactory()
d = factory.getPerspective("guest", "guest", "test", perspectiveName="any")
reactor.connectTCP("127.0.0.1", self.portno, factory)
p = dR(d)
self.assert_(isinstance(p, pb.RemoteReference))
d = factory.getPerspective("guest", "guest", "test", perspectiveName="any")
p = dR(d)
self.assert_(isinstance(p, pb.RemoteReference))
p.broker.transport.loseConnection()
testGoodPerspective = suppressWarnings(testGoodPerspective,
("Update your backend to use PBServerFactory, and then use login().", DeprecationWarning))
def testGoodFailedConnect(self):
factory = pb.PBClientFactory()
d = factory.getPerspective("guest", "guest", "test", perspectiveName="any")
reactor.connectTCP("127.0.0.1", 69, factory)
f = unittest.deferredError(d)
from twisted.internet import error
f.trap(error.ConnectError)
testGoodFailedConnect = suppressWarnings(testGoodFailedConnect,
("Update your backend to use PBServerFactory, and then use login().", DeprecationWarning))
def testDisconnect(self):
factory = pb.PBClientFactory()
d = factory.getPerspective("guest", "guest", "test", perspectiveName="any")
reactor.connectTCP("127.0.0.1", self.portno, factory)
p = dR(d)
d = dR(p.callRemote("getDummyViewPoint")) # just to check it's working
factory.disconnect()
reactor.iterate(); reactor.iterate(); reactor.iterate()
self.assertRaises(pb.DeadReferenceError, p.callRemote, "getDummyViewPoint")
testDisconnect = suppressWarnings(testDisconnect,
("Update your backend to use PBServerFactory, and then use login().", DeprecationWarning))
def testEmptyPerspective(self):
factory = pb.PBClientFactory()
d = factory.getPerspective("foo", "foo", "test")
reactor.connectTCP("127.0.0.1", self.portno, factory)
p = dR(d)
self.assert_(isinstance(p, pb.RemoteReference))
p.broker.transport.loseConnection()
testEmptyPerspective = suppressWarnings(testEmptyPerspective,
("Update your backend to use PBServerFactory, and then use login().", DeprecationWarning))
def testReconnect(self):
factory = ReconnectOnce()
l = []
def disconnected(p):
self.assertEquals(l, [])
l.append(factory.getPerspective("guest", "guest", "test", perspectiveName="any"))
d = factory.getPerspective("guest", "guest", "test", perspectiveName="any")
reactor.connectTCP("127.0.0.1", self.portno, factory)
p = dR(d)
self.assert_(isinstance(p, pb.RemoteReference))
p.notifyOnDisconnect(disconnected)
factory.disconnect()
while not l:
reactor.iterate()
p = dR(l[0])
self.assert_(isinstance(p, pb.RemoteReference))
factory.disconnect()
testReconnect = suppressWarnings(testReconnect,
("Update your backend to use PBServerFactory, and then use login().", DeprecationWarning))
def testImmediateClose(self):
from twisted.internet import protocol
p = dR(protocol.ClientCreator(reactor, protocol.Protocol
).connectTCP("127.0.0.1", self.portno))
p.transport.loseConnection()
reactor.iterate(); reactor.iterate()
# yay new cred, everyone use this:
from twisted.cred import portal, checkers, credentials
class MyRealm:
"""A test realm."""
def __init__(self):
self.p = MyPerspective()
def requestAvatar(self, avatarId, mind, interface):
assert interface == pb.IPerspective
assert mind == "BRAINS!"
self.p.loggedIn = 1
return pb.IPerspective, self.p, self.p.logout
class MyPerspective(pb.Avatar):
implements(pb.IPerspective)
loggedIn = loggedOut = False
def __init__(self):
pass
def perspective_getViewPoint(self):
return MyView()
def logout(self):
self.loggedOut = 1
class MyView(pb.Viewable):
def view_check(self, user):
return isinstance(user, MyPerspective)
class NewCredTestCase(unittest.TestCase):
def setUp(self):
self.realm = MyRealm()
self.portal = portal.Portal(self.realm)
self.checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
self.checker.addUser("user", "pass")
self.portal.registerChecker(self.checker)
self.factory = pb.PBServerFactory(self.portal)
self.port = reactor.listenTCP(0, self.factory, interface="127.0.0.1")
self.portno = self.port.getHost().port
def tearDown(self):
self.port.stopListening()
reactor.iterate()
reactor.iterate()
def testLoginLogout(self):
factory = pb.PBClientFactory()
# NOTE: real code probably won't need anything where we have the "BRAINS!"
# argument, passing None is fine. We just do it here to test that it is
# being passed. It is used to give additional info to the realm to aid
# perspective creation, if you don't need that, ignore it.
d = factory.login(credentials.UsernamePassword("user", "pass"), "BRAINS!")
reactor.connectTCP("127.0.0.1", self.portno, factory)
p = dR(d)
self.assertEquals(self.realm.p.loggedIn, 1)
self.assert_(isinstance(p, pb.RemoteReference))
factory.disconnect()
for i in range(5):
reactor.iterate()
self.assertEquals(self.realm.p.loggedOut, 1)
def testBadLogin(self):
factory = pb.PBClientFactory()
for username, password in [("nosuchuser", "pass"), ("user", "wrongpass")]:
d = factory.login(credentials.UsernamePassword(username, password), "BRAINS!")
c = reactor.connectTCP("127.0.0.1", self.portno, factory)
p = unittest.deferredError(d)
self.failUnless(p.check("twisted.cred.error.UnauthorizedLogin"))
c.disconnect()
reactor.iterate()
reactor.iterate()
reactor.iterate()
from twisted.cred.error import UnauthorizedLogin
log.flushErrors(UnauthorizedLogin)
def testView(self):
factory = pb.PBClientFactory()
d = factory.login(credentials.UsernamePassword("user", "pass"), "BRAINS!")
reactor.connectTCP("127.0.0.1", self.portno, factory)
p = dR(d)
v = dR(p.callRemote("getViewPoint"))
self.assertEquals(dR(v.callRemote("check")), 1)
factory.disconnect()
class NonSubclassingPerspective:
implements(pb.IPerspective)
# IPerspective implementation
def perspectiveMessageReceived(self, broker, message, args, kwargs):
args = broker.unserialize(args, self)
kwargs = broker.unserialize(kwargs, self)
return broker.serialize((message, args, kwargs))
# Methods required by MyRealm
def logout(self):
self.loggedOut = True
class NSPTestCase(unittest.TestCase):
def setUp(self):
self.realm = MyRealm()
self.realm.p = NonSubclassingPerspective()
self.portal = portal.Portal(self.realm)
self.checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
self.checker.addUser("user", "pass")
self.portal.registerChecker(self.checker)
self.factory = pb.PBServerFactory(self.portal)
self.port = reactor.listenTCP(0, self.factory, interface="127.0.0.1")
self.portno = self.port.getHost().port
def tearDown(self):
self.port.stopListening()
reactor.iterate()
reactor.iterate()
def testNSP(self):
factory = pb.PBClientFactory()
d = factory.login(credentials.UsernamePassword('user', 'pass'), "BRAINS!")
reactor.connectTCP('127.0.0.1', self.portno, factory)
p = dR(d)
self.assertEquals(dR(p.callRemote('ANYTHING', 'here', bar='baz')),
('ANYTHING', ('here',), {'bar': 'baz'}))
factory.disconnect()
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.