repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
mdublin/Brightcove-Dynamic-Ingest-App
|
ENV/lib/python2.7/site-packages/sqlalchemy/log.py
|
14
|
6796
|
# sqlalchemy/log.py
# Copyright (C) 2006-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
# Includes alterations by Vinay Sajip [email protected]
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Logging control and utilities.
Control of logging for SA can be performed from the regular python logging
module. The regular dotted module namespace is used, starting at
'sqlalchemy'. For class-level logging, the class name is appended.
The "echo" keyword parameter, available on SQLA :class:`.Engine`
and :class:`.Pool` objects, corresponds to a logger specific to that
instance only.
"""
import logging
import sys
# set initial level to WARN. This so that
# log statements don't occur in the absense of explicit
# logging being enabled for 'sqlalchemy'.
rootlogger = logging.getLogger('sqlalchemy')
if rootlogger.level == logging.NOTSET:
rootlogger.setLevel(logging.WARN)
def _add_default_handler(logger):
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s %(name)s %(message)s'))
logger.addHandler(handler)
_logged_classes = set()
def class_logger(cls, enable=False):
logger = logging.getLogger(cls.__module__ + "." + cls.__name__)
if enable == 'debug':
logger.setLevel(logging.DEBUG)
elif enable == 'info':
logger.setLevel(logging.INFO)
cls._should_log_debug = lambda self: logger.isEnabledFor(logging.DEBUG)
cls._should_log_info = lambda self: logger.isEnabledFor(logging.INFO)
cls.logger = logger
_logged_classes.add(cls)
class Identified(object):
logging_name = None
def _should_log_debug(self):
return self.logger.isEnabledFor(logging.DEBUG)
def _should_log_info(self):
return self.logger.isEnabledFor(logging.INFO)
class InstanceLogger(object):
"""A logger adapter (wrapper) for :class:`.Identified` subclasses.
This allows multiple instances (e.g. Engine or Pool instances)
to share a logger, but have its verbosity controlled on a
per-instance basis.
The basic functionality is to return a logging level
which is based on an instance's echo setting.
Default implementation is:
'debug' -> logging.DEBUG
True -> logging.INFO
False -> Effective level of underlying logger
(logging.WARNING by default)
None -> same as False
"""
# Map echo settings to logger levels
_echo_map = {
None: logging.NOTSET,
False: logging.NOTSET,
True: logging.INFO,
'debug': logging.DEBUG,
}
def __init__(self, echo, name):
self.echo = echo
self.logger = logging.getLogger(name)
# if echo flag is enabled and no handlers,
# add a handler to the list
if self._echo_map[echo] <= logging.INFO \
and not self.logger.handlers:
_add_default_handler(self.logger)
#
# Boilerplate convenience methods
#
def debug(self, msg, *args, **kwargs):
"""Delegate a debug call to the underlying logger."""
self.log(logging.DEBUG, msg, *args, **kwargs)
def info(self, msg, *args, **kwargs):
"""Delegate an info call to the underlying logger."""
self.log(logging.INFO, msg, *args, **kwargs)
def warning(self, msg, *args, **kwargs):
"""Delegate a warning call to the underlying logger."""
self.log(logging.WARNING, msg, *args, **kwargs)
warn = warning
def error(self, msg, *args, **kwargs):
"""
Delegate an error call to the underlying logger.
"""
self.log(logging.ERROR, msg, *args, **kwargs)
def exception(self, msg, *args, **kwargs):
"""Delegate an exception call to the underlying logger."""
kwargs["exc_info"] = 1
self.log(logging.ERROR, msg, *args, **kwargs)
def critical(self, msg, *args, **kwargs):
"""Delegate a critical call to the underlying logger."""
self.log(logging.CRITICAL, msg, *args, **kwargs)
def log(self, level, msg, *args, **kwargs):
"""Delegate a log call to the underlying logger.
The level here is determined by the echo
flag as well as that of the underlying logger, and
logger._log() is called directly.
"""
# inline the logic from isEnabledFor(),
# getEffectiveLevel(), to avoid overhead.
if self.logger.manager.disable >= level:
return
selected_level = self._echo_map[self.echo]
if selected_level == logging.NOTSET:
selected_level = self.logger.getEffectiveLevel()
if level >= selected_level:
self.logger._log(level, msg, args, **kwargs)
def isEnabledFor(self, level):
"""Is this logger enabled for level 'level'?"""
if self.logger.manager.disable >= level:
return False
return level >= self.getEffectiveLevel()
def getEffectiveLevel(self):
"""What's the effective level for this logger?"""
level = self._echo_map[self.echo]
if level == logging.NOTSET:
level = self.logger.getEffectiveLevel()
return level
def instance_logger(instance, echoflag=None):
"""create a logger for an instance that implements :class:`.Identified`."""
if instance.logging_name:
name = "%s.%s.%s" % (instance.__class__.__module__,
instance.__class__.__name__, instance.logging_name)
else:
name = "%s.%s" % (instance.__class__.__module__,
instance.__class__.__name__)
instance._echo = echoflag
if echoflag in (False, None):
# if no echo setting or False, return a Logger directly,
# avoiding overhead of filtering
logger = logging.getLogger(name)
else:
# if a specified echo flag, return an EchoLogger,
# which checks the flag, overrides normal log
# levels by calling logger._log()
logger = InstanceLogger(echoflag, name)
instance.logger = logger
class echo_property(object):
__doc__ = """\
When ``True``, enable log output for this element.
This has the effect of setting the Python logging level for the namespace
of this element's class and object reference. A value of boolean ``True``
indicates that the loglevel ``logging.INFO`` will be set for the logger,
whereas the string value ``debug`` will set the loglevel to
``logging.DEBUG``.
"""
def __get__(self, instance, owner):
if instance is None:
return self
else:
return instance._echo
def __set__(self, instance, value):
instance_logger(instance, echoflag=value)
|
mit
|
munkiat/libcloud
|
libcloud/test/common/test_cloudstack.py
|
45
|
7303
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
try:
import simplejson as json
except ImportError:
import json
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import urlparse
from libcloud.utils.py3 import b
from libcloud.utils.py3 import parse_qsl
from libcloud.common.cloudstack import CloudStackConnection
from libcloud.common.types import MalformedResponseError
from libcloud.test import MockHttpTestCase
async_delay = 0
class CloudStackMockDriver(object):
host = 'nonexistent.'
path = '/path'
async_poll_frequency = 0
name = 'fake'
async_delay = 0
class CloudStackCommonTest(unittest.TestCase):
def setUp(self):
CloudStackConnection.conn_classes = (None, CloudStackMockHttp)
self.connection = CloudStackConnection('apikey', 'secret',
host=CloudStackMockDriver.host)
self.connection.poll_interval = 0.0
self.driver = self.connection.driver = CloudStackMockDriver()
def test_sync_request_bad_response(self):
self.driver.path = '/bad/response'
try:
self.connection._sync_request('fake')
except Exception:
e = sys.exc_info()[1]
self.assertTrue(isinstance(e, MalformedResponseError))
return
self.assertTrue(False)
def test_sync_request(self):
self.driver.path = '/sync'
self.connection._sync_request('fake')
def test_async_request_successful(self):
self.driver.path = '/async/success'
result = self.connection._async_request('fake')
self.assertEqual(result, {'fake': 'result'})
def test_async_request_unsuccessful(self):
self.driver.path = '/async/fail'
try:
self.connection._async_request('fake')
except Exception:
e = sys.exc_info()[1]
self.assertEqual(CloudStackMockHttp.ERROR_TEXT, str(e))
return
self.assertFalse(True)
def test_async_request_delayed(self):
global async_delay
self.driver.path = '/async/delayed'
async_delay = 2
self.connection._async_request('fake')
self.assertEqual(async_delay, 0)
def test_signature_algorithm(self):
cases = [
(
{
'command': 'listVirtualMachines'
}, 'z/a9Y7J52u48VpqIgiwaGUMCso0='
), (
{
'command': 'deployVirtualMachine',
'name': 'fred',
'displayname': 'George',
'serviceofferingid': 5,
'templateid': 17,
'zoneid': 23,
'networkids': 42
}, 'gHTo7mYmadZ+zluKHzlEKb1i/QU='
), (
{
'command': 'deployVirtualMachine',
'name': 'fred',
'displayname': 'George+Ringo',
'serviceofferingid': 5,
'templateid': 17,
'zoneid': 23,
'networkids': 42
}, 'tAgfrreI1ZvWlWLClD3gu4+aKv4='
)
]
connection = CloudStackConnection('fnord', 'abracadabra')
for case in cases:
params = connection.add_default_params(case[0])
self.assertEqual(connection._make_signature(params), b(case[1]))
class CloudStackMockHttp(MockHttpTestCase):
ERROR_TEXT = 'ERROR TEXT'
def _response(self, status, result, response):
return (status, json.dumps(result), result, response)
def _check_request(self, url):
url = urlparse.urlparse(url)
query = dict(parse_qsl(url.query))
self.assertTrue('apiKey' in query)
self.assertTrue('command' in query)
self.assertTrue('response' in query)
self.assertTrue('signature' in query)
self.assertTrue(query['response'] == 'json')
return query
def _bad_response(self, method, url, body, headers):
self._check_request(url)
result = {'success': True}
return self._response(httplib.OK, result, httplib.responses[httplib.OK])
def _sync(self, method, url, body, headers):
query = self._check_request(url)
result = {query['command'].lower() + 'response': {}}
return self._response(httplib.OK, result, httplib.responses[httplib.OK])
def _async_success(self, method, url, body, headers):
query = self._check_request(url)
if query['command'].lower() == 'queryasyncjobresult':
self.assertEqual(query['jobid'], '42')
result = {
query['command'].lower() + 'response': {
'jobstatus': 1,
'jobresult': {'fake': 'result'}
}
}
else:
result = {query['command'].lower() + 'response': {'jobid': '42'}}
return self._response(httplib.OK, result, httplib.responses[httplib.OK])
def _async_fail(self, method, url, body, headers):
query = self._check_request(url)
if query['command'].lower() == 'queryasyncjobresult':
self.assertEqual(query['jobid'], '42')
result = {
query['command'].lower() + 'response': {
'jobstatus': 2,
'jobresult': {'errortext': self.ERROR_TEXT}
}
}
else:
result = {query['command'].lower() + 'response': {'jobid': '42'}}
return self._response(httplib.OK, result, httplib.responses[httplib.OK])
def _async_delayed(self, method, url, body, headers):
global async_delay
query = self._check_request(url)
if query['command'].lower() == 'queryasyncjobresult':
self.assertEqual(query['jobid'], '42')
if async_delay == 0:
result = {
query['command'].lower() + 'response': {
'jobstatus': 1,
'jobresult': {'fake': 'result'}
}
}
else:
result = {
query['command'].lower() + 'response': {
'jobstatus': 0,
}
}
async_delay -= 1
else:
result = {query['command'].lower() + 'response': {'jobid': '42'}}
return self._response(httplib.OK, result, httplib.responses[httplib.OK])
if __name__ == '__main__':
sys.exit(unittest.main())
|
apache-2.0
|
pgodd/KOS
|
doc/ksdomain.py
|
11
|
8805
|
# -*- coding: utf-8 -*-
import re
from docutils import nodes
from docutils.parsers.rst import directives
from sphinx import addnodes
from sphinx.roles import XRefRole
from sphinx.locale import l_, _
from sphinx.domains import Domain, ObjType, Index
from sphinx.directives import ObjectDescription
from sphinx.util.nodes import make_refnode
from sphinx.util.compat import Directive
from sphinx.util.docfields import Field, GroupedField, TypedField
ks_sig_re = re.compile(r'''
(?:
(?P<prefix>
[a-zA-Z][\w:]*
)
:
)?
(?P<object>
[a-zA-Z][\w]*
)
(?:
\(
(?P<args>
.*
)
\)
)?
''', re.VERBOSE)
class KOSObject(ObjectDescription):
def add_target_and_index(self, name, sig, signode):
targetname = self.objtype + ':' + name.upper()
if targetname not in self.state.document.ids:
signode['names'].append(targetname)
signode['ids'].append(targetname)
signode['first'] = (not self.names)
self.state.document.note_explicit_target(signode)
objects = self.env.domaindata['ks']['objects']
key = (self.objtype, name.upper())
if key in objects:
self.env.warn(self.env.docname,
'duplicate description of %s %s, ' %
(self.objtype, name.upper()) +
'other instance in ' +
self.env.doc2path(objects[key]),
self.lineno)
objects[key] = self.env.docname
indextext = self.get_index_text(self.objtype, name)
if indextext:
self.indexnode['entries'].append(('single', indextext,
targetname, ''))
class KOSGlobal(KOSObject):
doc_field_types = [
Field('access', label=l_('Access'), has_arg=False),
Field('type' , label=l_('Type' ), has_arg=False),
]
def handle_signature(self, sig, signode):
fullname = sig
if sig.upper().startswith('CONSTANT():'):
name = sig[11:]
else:
name = sig
signode += addnodes.desc_name(name, fullname)
return name
def get_index_text(self, objectname, name):
return _('{}'.format(name))
class KOSFunction(KOSObject):
doc_field_types = [
Field('access', label=l_('Access'), has_arg=False),
TypedField('parameter', label=l_('Parameters'),
names=('param', 'parameter', 'arg', 'argument'),
typerolename='obj', typenames=('paramtype', 'type')),
Field('returnvalue', label=l_('Returns'), has_arg=False,
names=('returns', 'return')),
Field('returntype', label=l_('Return type'), has_arg=False,
names=('rtype','type')),
]
def handle_signature(self, sig, signode):
m = ks_sig_re.match(sig)
name = m.group('object')
signode += addnodes.desc_name(name,name)
args = m.group('args')
if args:
signode += addnodes.desc_parameterlist(args,args)
else:
signode += addnodes.desc_parameterlist()
return name
def get_index_text(self, objectname, name):
return _('{}()'.format(name))
class KOSStructure(KOSObject):
def handle_signature(self, sig, signode):
m = ks_sig_re.match(sig)
name = m.group('object')
signode += addnodes.desc_annotation('structure ','structure ')
signode += addnodes.desc_name(name,name)
return name
def get_index_text(self, objectname, name):
return _('{} [struct]'.format(name))
def before_content(self):
self.env.temp_data['ks:structure'] = self.names[0]
def after_content(self):
self.env.temp_data['ks:structure'] = None
class KOSAttribute(KOSObject):
doc_field_types = [
Field('access', label=l_('Access'), has_arg=False),
Field('type' , label=l_('Type' ), has_arg=False),
]
def handle_signature(self, sig, signode):
m = ks_sig_re.match(sig)
name = m.group('object')
current_struct = self.env.temp_data.get('ks:structure')
if m.group('prefix') is None:
if current_struct is not None:
struct = current_struct
fullname = current_struct + ':' + name
else:
struct = m.group('prefix').split(':')[-1]
fullname = struct + ':' + name
if struct is not None:
if struct != '':
signode += addnodes.desc_type(struct,struct+':')
signode += addnodes.desc_name(fullname, name)
return fullname
def get_index_text(self, objectname, name):
return _('{}'.format(name))
class KOSMethod(KOSObject):
doc_field_types = [
Field('access', label=l_('Access'), has_arg=False),
TypedField('parameter', label=l_('Parameters'),
names=('param', 'parameter', 'arg', 'argument'),
typerolename='obj', typenames=('paramtype', 'type')),
Field('returnvalue', label=l_('Returns'), has_arg=False,
names=('returns', 'return')),
Field('returntype', label=l_('Return type'), has_arg=False,
names=('rtype','type')),
]
def handle_signature(self, sig, signode):
m = ks_sig_re.match(sig)
name = m.group('object')
current_struct = self.env.temp_data.get('ks:structure')
if m.group('prefix') is None:
if current_struct is not None:
fullname = current_struct + ':' + name
else:
struct = m.group('prefix').split(':')[-1]
fullname = struct + ':' + name
if struct is not None:
if struct != '':
signode += addnodes.desc_type(struct,struct+':')
signode += addnodes.desc_name(fullname, name)
args = m.group('args')
if args:
signode += addnodes.desc_parameterlist(args,args)
else:
signode += addnodes.desc_parameterlist()
return fullname
def get_index_text(self, objectname, name):
return _('{}()'.format(name))
class KOSXRefRole(XRefRole):
def process_link(self, *args):
title, target = super(KOSXRefRole,self).process_link(*args)
m = ks_sig_re.match(target)
target = m.group('object')
if m.group('prefix') is not None:
struct = m.group('prefix').split(':')[-1]
target = ':'.join([struct,target])
return title, target.upper()
class KOSAttrXRefRole(XRefRole):
def process_link(self, env, *args):
title, target = super(KOSAttrXRefRole,self).process_link(env, *args)
m = ks_sig_re.match(target)
target = m.group('object')
if m.group('prefix') is None:
current_struct = env.temp_data.get('ks:structure')
if current_struct is not None:
target = ':'.join([current_struct,target])
else:
struct = m.group('prefix').split(':')[-1]
target = ':'.join([struct,target])
return title, target.upper()
class KOSDomain(Domain):
name = 'ks'
label = 'KerboScript'
initial_data = {
'objects': {}, # fullname -> docname, objtype
}
object_types = {
'global' : ObjType(l_('global' ), 'global'),
'function' : ObjType(l_('function' ), 'func' ),
'structure': ObjType(l_('structure'), 'struct'),
'attribute': ObjType(l_('attribure'), 'attr' ),
'method' : ObjType(l_('method' ), 'meth' ),
}
directives = {
'global' : KOSGlobal,
'function' : KOSFunction,
'structure': KOSStructure,
'attribute': KOSAttribute,
'method' : KOSMethod,
}
roles = {
'global': KOSXRefRole(),
'func' : KOSXRefRole(),
'struct': KOSXRefRole(),
'attr' : KOSAttrXRefRole(),
'meth' : KOSAttrXRefRole(),
}
def resolve_xref(self, env, fromdocname, builder, typ, target, node,
contnode):
objects = self.data['objects']
objtypes = self.objtypes_for_role(typ)
for objtype in objtypes:
if (objtype, target.upper()) in objects:
return make_refnode(builder, fromdocname,
objects[objtype, target],
objtype + ':' + target.upper(),
contnode, target + ' ' + objtype)
def get_objects(self):
for (typ, name), docname in self.data['objects'].iteritems():
yield name, name, typ, docname, name, 1
def setup(app):
app.add_domain(KOSDomain)
|
gpl-3.0
|
teonlamont/mne-python
|
mne/io/tests/test_raw.py
|
2
|
6009
|
# Generic tests that all raw classes should run
from os import path as op
import math
import pytest
import numpy as np
from numpy.testing import (assert_allclose, assert_array_almost_equal,
assert_equal, assert_array_equal)
from mne import concatenate_raws, create_info
from mne.datasets import testing
from mne.io import read_raw_fif, RawArray
from mne.utils import _TempDir
def _test_raw_reader(reader, test_preloading=True, **kwargs):
"""Test reading, writing and slicing of raw classes.
Parameters
----------
reader : function
Function to test.
test_preloading : bool
Whether not preloading is implemented for the reader. If True, both
cases and memory mapping to file are tested.
**kwargs :
Arguments for the reader. Note: Do not use preload as kwarg.
Use ``test_preloading`` instead.
Returns
-------
raw : Instance of Raw
A preloaded Raw object.
"""
tempdir = _TempDir()
rng = np.random.RandomState(0)
if test_preloading:
raw = reader(preload=True, **kwargs)
# don't assume the first is preloaded
buffer_fname = op.join(tempdir, 'buffer')
picks = rng.permutation(np.arange(len(raw.ch_names) - 1))[:10]
picks = np.append(picks, len(raw.ch_names) - 1) # test trigger channel
bnd = min(int(round(raw.buffer_size_sec *
raw.info['sfreq'])), raw.n_times)
slices = [slice(0, bnd), slice(bnd - 1, bnd), slice(3, bnd),
slice(3, 300), slice(None), slice(1, bnd)]
if raw.n_times >= 2 * bnd: # at least two complete blocks
slices += [slice(bnd, 2 * bnd), slice(bnd, bnd + 1),
slice(0, bnd + 100)]
other_raws = [reader(preload=buffer_fname, **kwargs),
reader(preload=False, **kwargs)]
for sl_time in slices:
data1, times1 = raw[picks, sl_time]
for other_raw in other_raws:
data2, times2 = other_raw[picks, sl_time]
assert_allclose(data1, data2)
assert_allclose(times1, times2)
else:
raw = reader(**kwargs)
full_data = raw._data
assert raw.__class__.__name__ in repr(raw) # to test repr
assert raw.info.__class__.__name__ in repr(raw.info)
# Test saving and reading
out_fname = op.join(tempdir, 'test_raw.fif')
raw = concatenate_raws([raw])
raw.save(out_fname, tmax=raw.times[-1], overwrite=True, buffer_size_sec=1)
raw3 = read_raw_fif(out_fname)
assert set(raw.info.keys()) == set(raw3.info.keys())
assert_allclose(raw3[0:20][0], full_data[0:20], rtol=1e-6,
atol=1e-20) # atol is very small but > 0
assert_array_almost_equal(raw.times, raw3.times)
assert not math.isnan(raw3.info['highpass'])
assert not math.isnan(raw3.info['lowpass'])
assert not math.isnan(raw.info['highpass'])
assert not math.isnan(raw.info['lowpass'])
assert raw3.info['kit_system_id'] == raw.info['kit_system_id']
# Make sure concatenation works
first_samp = raw.first_samp
last_samp = raw.last_samp
concat_raw = concatenate_raws([raw.copy(), raw])
assert_equal(concat_raw.n_times, 2 * raw.n_times)
assert_equal(concat_raw.first_samp, first_samp)
assert_equal(concat_raw.last_samp - last_samp + first_samp, last_samp + 1)
idx = np.where(concat_raw.annotations.description == 'BAD boundary')[0]
assert_array_almost_equal([(last_samp - first_samp) / raw.info['sfreq']],
concat_raw.annotations.onset[idx], decimal=2)
if raw.info['meas_id'] is not None:
for key in ['secs', 'usecs', 'version']:
assert_equal(raw.info['meas_id'][key], raw3.info['meas_id'][key])
assert_array_equal(raw.info['meas_id']['machid'],
raw3.info['meas_id']['machid'])
return raw
def _test_concat(reader, *args):
"""Test concatenation of raw classes that allow not preloading."""
data = None
for preload in (True, False):
raw1 = reader(*args, preload=preload)
raw2 = reader(*args, preload=preload)
raw1.append(raw2)
raw1.load_data()
if data is None:
data = raw1[:, :][0]
assert_allclose(data, raw1[:, :][0])
for first_preload in (True, False):
raw = reader(*args, preload=first_preload)
data = raw[:, :][0]
for preloads in ((True, True), (True, False), (False, False)):
for last_preload in (True, False):
t_crops = raw.times[np.argmin(np.abs(raw.times - 0.5)) +
[0, 1]]
raw1 = raw.copy().crop(0, t_crops[0])
if preloads[0]:
raw1.load_data()
raw2 = raw.copy().crop(t_crops[1], None)
if preloads[1]:
raw2.load_data()
raw1.append(raw2)
if last_preload:
raw1.load_data()
assert_allclose(data, raw1[:, :][0])
@testing.requires_testing_data
def test_time_index():
"""Test indexing of raw times."""
raw_fname = op.join(op.dirname(__file__), '..', '..', 'io', 'tests',
'data', 'test_raw.fif')
raw = read_raw_fif(raw_fname)
# Test original (non-rounding) indexing behavior
orig_inds = raw.time_as_index(raw.times)
assert(len(set(orig_inds)) != len(orig_inds))
# Test new (rounding) indexing behavior
new_inds = raw.time_as_index(raw.times, use_rounding=True)
assert(len(set(new_inds)) == len(new_inds))
def test_annotation_property_deprecation_warning():
"""Test that assigning annotations warns and nowhere else."""
with pytest.warns(None) as w:
raw = RawArray(np.random.rand(1, 1), create_info(1, 1))
assert len(w) is 0
with pytest.warns(DeprecationWarning, match='by assignment is deprecated'):
raw.annotations = None
|
bsd-3-clause
|
palmhold/djinn
|
djinn/errors.py
|
2
|
3285
|
# -*- coding: utf-8 -*-
#
# Copyright(c) 2014 palmhold.com
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from six import PY2
from tornado import escape
from tornado.web import HTTPError
# HTTP status code
HTTP_OK = 200
ERROR_BAD_REQUEST = 400
ERROR_UNAUTHORIZED = 401
ERROR_FORBIDDEN = 403
ERROR_NOT_FOUND = 404
ERROR_METHOD_NOT_ALLOWED = 405
ERROR_INTERNAL_SERVER_ERROR = 500
# Custom error code
ERROR_WARNING = 1001
ERROR_DEPRECATED = 1002
ERROR_MAINTAINING = 1003
ERROR_UNKNOWN_ERROR = 9999
# default errors
_unknown_error = "unknow_error"
_unknown_message = "Unknown error"
_error_types = {400: "bad_request",
401: "unauthorized",
403: "forbidden",
404: "not_found",
405: "method_not_allowed",
500: "internal_server_error",
1001: "warning",
1002: "deprecated",
1003: "maintaining",
9999: _unknown_error}
ERROR_MESSAGES = {400: "Bad request",
401: "Unauthorized",
403: "Forbidden",
404: "Not found",
405: "Method not allowed",
500: "Internal server error",
1001: "Warning",
1002: "Deprecated",
1003: "Maintaining",
9999: _unknown_message}
class DjinnError(Exception):
pass
class DatastoreError(DjinnError):
pass
class TemplateContextError(DjinnError):
"""Template context variable does not exist."""
pass
class HTTPAPIError(HTTPError):
"""API error handling exception
API server always returns formatted JSON to client even there is
an internal server error.
"""
def __init__(self, status_code=ERROR_UNKNOWN_ERROR, message=None,
error=None, data=None, *args, **kwargs):
assert isinstance(data, dict) or data is None
message = message if message else ""
if PY2:
assert isinstance(message, basestring)
else:
assert isinstance(message, (str, bytes))
super(HTTPAPIError, self).__init__(int(status_code),
log_message=message, *args, **kwargs)
self.error = error if error else \
_error_types.get(self.status_code, _unknown_error)
self.message = message if message else \
ERROR_MESSAGES.get(self.status_code, _unknown_message)
self.data = data if data is not None else {}
def __str__(self):
err = {"meta": {"code": self.status_code, "error": self.error}}
if self.data:
err["data"] = self.data
if self.message:
err["meta"]["message"] = self.message % self.args
return escape.json_encode(err)
|
apache-2.0
|
apdavison/python-neo
|
neo/test/iotest/test_nsdfio.py
|
3
|
8778
|
"""
Tests of neo.io.NSDFIO
"""
import numpy as np
import quantities as pq
from datetime import datetime
import os
import unittest
from neo.io.nsdfio import HAVE_NSDF, NSDFIO
from neo.test.iotest.common_io_test import BaseTestIO
from neo.core import AnalogSignal, Segment, Block, ChannelIndex
from neo.test.tools import assert_same_attributes, assert_same_annotations, \
assert_neo_object_is_compliant
@unittest.skipUnless(HAVE_NSDF, "Requires NSDF")
class CommonTests(BaseTestIO, unittest.TestCase):
ioclass = NSDFIO
read_and_write_is_bijective = False
@unittest.skipUnless(HAVE_NSDF, "Requires NSDF")
class NSDFIOTest(unittest.TestCase):
"""
Base class for all NSDFIO tests.
setUp and tearDown methods are responsible for setting up and cleaning after tests,
respectively
All create_{object} methods create and return an example {object}.
"""
def setUp(self):
self.filename = 'nsdfio_testfile.h5'
self.io = NSDFIO(self.filename)
def tearDown(self):
os.remove(self.filename)
def create_list_of_blocks(self):
blocks = []
for i in range(2):
blocks.append(self.create_block(name='Block #{}'.format(i)))
return blocks
def create_block(self, name='Block'):
block = Block()
self._assign_basic_attributes(block, name=name)
self._assign_datetime_attributes(block)
self._assign_index_attribute(block)
self._create_block_children(block)
self._assign_annotations(block)
return block
def _create_block_children(self, block):
for i in range(3):
block.segments.append(self.create_segment(block, name='Segment #{}'.format(i)))
for i in range(3):
block.channel_indexes.append(
self.create_channelindex(block, name='ChannelIndex #{}'.format(i),
analogsignals=[seg.analogsignals[i] for seg in
block.segments]))
def create_segment(self, parent=None, name='Segment'):
segment = Segment()
segment.block = parent
self._assign_basic_attributes(segment, name=name)
self._assign_datetime_attributes(segment)
self._assign_index_attribute(segment)
self._create_segment_children(segment)
self._assign_annotations(segment)
return segment
def _create_segment_children(self, segment):
for i in range(2):
segment.analogsignals.append(self.create_analogsignal(
segment, name='Signal #{}'.format(i * 3)))
segment.analogsignals.append(self.create_analogsignal2(
segment, name='Signal #{}'.format(i * 3 + 1)))
segment.analogsignals.append(self.create_analogsignal3(
segment, name='Signal #{}'.format(i * 3 + 2)))
def create_analogsignal(self, parent=None, name='AnalogSignal1'):
signal = AnalogSignal([[1.0, 2.5], [2.2, 3.1], [3.2, 4.4]], units='mV',
sampling_rate=100 * pq.Hz, t_start=2 * pq.min)
signal.segment = parent
self._assign_basic_attributes(signal, name=name)
self._assign_annotations(signal)
return signal
def create_analogsignal2(self, parent=None, name='AnalogSignal2'):
signal = AnalogSignal([[1], [2], [3], [4], [5]], units='mA',
sampling_period=0.5 * pq.ms)
signal.segment = parent
self._assign_annotations(signal)
return signal
def create_analogsignal3(self, parent=None, name='AnalogSignal3'):
signal = AnalogSignal([[1, 2, 3], [4, 5, 6]], units='mV',
sampling_rate=2 * pq.kHz, t_start=100 * pq.s)
signal.segment = parent
self._assign_basic_attributes(signal, name=name)
return signal
def create_channelindex(self, parent=None, name='ChannelIndex', analogsignals=None):
channels_num = min([signal.shape[1] for signal in analogsignals])
channelindex = ChannelIndex(index=np.arange(channels_num),
channel_names=['Channel{}'.format(
i) for i in range(channels_num)],
channel_ids=np.arange(channels_num),
coordinates=([[1.87, -5.2, 4.0]] * channels_num) * pq.cm)
for signal in analogsignals:
channelindex.analogsignals.append(signal)
self._assign_basic_attributes(channelindex, name)
self._assign_annotations(channelindex)
return channelindex
def _assign_basic_attributes(self, object, name=None):
if name is None:
object.name = 'neo object'
else:
object.name = name
object.description = 'Example of neo object'
object.file_origin = 'datafile.pp'
def _assign_datetime_attributes(self, object):
object.file_datetime = datetime(2017, 6, 11, 14, 53, 23)
object.rec_datetime = datetime(2017, 5, 29, 13, 12, 47)
def _assign_index_attribute(self, object):
object.index = 12
def _assign_annotations(self, object):
object.annotations = {'str': 'value',
'int': 56,
'float': 5.234}
@unittest.skipUnless(HAVE_NSDF, "Requires NSDF")
class NSDFIOTestWriteThenRead(NSDFIOTest):
"""
Class for testing NSDFIO.
It first creates example neo objects, then writes them to the file,
reads the file and compares the result with the original ones.
all test_{object} methods run "write then read" test for a/an {object}
all compare_{object} methods check if the second {object} is a proper copy
of the first one
"""
lazy_modes = [False]
def test_list_of_blocks(self, lazy=False):
blocks = self.create_list_of_blocks()
self.io.write(blocks)
for lazy in self.lazy_modes:
blocks2 = self.io.read(lazy=lazy)
self.compare_list_of_blocks(blocks, blocks2, lazy)
def test_block(self, lazy=False):
block = self.create_block()
self.io.write_block(block)
for lazy in self.lazy_modes:
block2 = self.io.read_block(lazy=lazy)
self.compare_blocks(block, block2, lazy)
def test_segment(self, lazy=False):
segment = self.create_segment()
self.io.write_segment(segment)
for lazy in self.lazy_modes:
segment2 = self.io.read_segment(lazy=lazy)
self.compare_segments(segment, segment2, lazy)
def compare_list_of_blocks(self, blocks1, blocks2, lazy=False):
assert len(blocks1) == len(blocks2)
for block1, block2 in zip(blocks1, blocks2):
self.compare_blocks(block1, block2, lazy)
def compare_blocks(self, block1, block2, lazy=False):
self._compare_objects(block1, block2)
assert block2.file_datetime == datetime.fromtimestamp(os.stat(self.filename).st_mtime)
assert_neo_object_is_compliant(block2)
self._compare_blocks_children(block1, block2, lazy=lazy)
def _compare_blocks_children(self, block1, block2, lazy):
assert len(block1.segments) == len(block2.segments)
for segment1, segment2 in zip(block1.segments, block2.segments):
self.compare_segments(segment1, segment2, lazy=lazy)
def compare_segments(self, segment1, segment2, lazy=False):
self._compare_objects(segment1, segment2)
assert segment2.file_datetime == datetime.fromtimestamp(os.stat(self.filename).st_mtime)
self._compare_segments_children(segment1, segment2, lazy=lazy)
def _compare_segments_children(self, segment1, segment2, lazy):
assert len(segment1.analogsignals) == len(segment2.analogsignals)
for signal1, signal2 in zip(segment1.analogsignals, segment2.analogsignals):
self.compare_analogsignals(signal1, signal2, lazy=lazy)
def compare_analogsignals(self, signal1, signal2, lazy=False):
if not lazy:
self._compare_objects(signal1, signal2)
else:
self._compare_objects(signal1, signal2, exclude_attr=['shape', 'signal'])
assert signal2.lazy_shape == signal1.shape
assert signal2.dtype == signal1.dtype
def _compare_objects(self, object1, object2, exclude_attr=[]):
assert object1.__class__.__name__ == object2.__class__.__name__
assert object2.file_origin == self.filename
assert_same_attributes(object1, object2, exclude=[
'file_origin',
'file_datetime'] + exclude_attr)
assert_same_annotations(object1, object2)
if __name__ == "__main__":
unittest.main()
|
bsd-3-clause
|
mrquim/mrquimrepo
|
plugin.video.playlistLoader/resources/lib/chardet/langcyrillicmodel.py
|
2762
|
17725
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# KOI8-R language model
# Character Mapping Table:
KOI8R_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90
223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0
238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0
27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0
15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0
59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0
35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0
)
win1251_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253,
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
)
latin5_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)
macCyrillic_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,
)
IBM855_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205,
206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70,
3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219,
220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229,
230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243,
8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248,
43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249,
250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,
)
IBM866_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 97.6601%
# first 1024 sequences: 2.3389%
# rest sequences: 0.1237%
# negative sequences: 0.0009%
RussianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1,
1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1,
1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,
2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1,
1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,
3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1,
1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,
2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2,
1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1,
1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1,
1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1,
1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,
3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2,
1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,
2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1,
1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,
2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1,
1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,
1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1,
1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,
3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,
3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,
1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,
1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1,
0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1,
1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,
1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,
0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1,
1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2,
2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,
1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0,
1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,
2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,
1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1,
1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,
1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,
0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1,
0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,
0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,
0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,
0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,
2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,
0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
)
Koi8rModel = {
'charToOrderMap': KOI8R_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "KOI8-R"
}
Win1251CyrillicModel = {
'charToOrderMap': win1251_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "windows-1251"
}
Latin5CyrillicModel = {
'charToOrderMap': latin5_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-5"
}
MacCyrillicModel = {
'charToOrderMap': macCyrillic_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "MacCyrillic"
};
Ibm866Model = {
'charToOrderMap': IBM866_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "IBM866"
}
Ibm855Model = {
'charToOrderMap': IBM855_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "IBM855"
}
# flake8: noqa
|
gpl-2.0
|
bunjiboys/security_monkey
|
security_monkey/watchers/keypair.py
|
1
|
4346
|
# Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module: security_monkey.watchers.keypair
:platform: Unix
.. version:: $$VERSION$$
.. moduleauthor:: Patrick Kelley <[email protected]> @monkeysecurity
"""
from security_monkey.watcher import Watcher
from security_monkey.watcher import ChangeItem
from security_monkey.constants import TROUBLE_REGIONS
from security_monkey.exceptions import BotoConnectionIssue
from security_monkey.datastore import Account
from security_monkey import app
class Keypair(Watcher):
index = 'keypair'
i_am_singular = 'Keypair'
i_am_plural = 'Keypairs'
def __init__(self, accounts=None, debug=False):
super(Keypair, self).__init__(accounts=accounts, debug=debug)
def slurp(self):
"""
:returns: item_list - list of IAM SSH Keypairs.
:returns: exception_map - A dict where the keys are a tuple containing the
location of the exception and the value is the actual exception
"""
self.prep_for_slurp()
item_list = []
exception_map = {}
from security_monkey.common.sts_connect import connect
for account in self.accounts:
try:
account_db = Account.query.filter(Account.name == account).first()
account_number = account_db.number
ec2 = connect(account, 'ec2')
regions = ec2.get_all_regions()
except Exception as e: # EC2ResponseError
# Some Accounts don't subscribe to EC2 and will throw an exception here.
exc = BotoConnectionIssue(str(e), 'keypair', account, None)
self.slurp_exception((self.index, account), exc, exception_map, source="{}-watcher".format(self.index))
continue
for region in regions:
app.logger.debug("Checking {}/{}/{}".format(Keypair.index, account, region.name))
try:
rec2 = connect(account, 'ec2', region=region)
kps = self.wrap_aws_rate_limited_call(
rec2.get_all_key_pairs
)
except Exception as e:
if region.name not in TROUBLE_REGIONS:
exc = BotoConnectionIssue(str(e), 'keypair', account, region.name)
self.slurp_exception((self.index, account, region.name), exc, exception_map,
source="{}-watcher".format(self.index))
continue
app.logger.debug("Found {} {}".format(len(kps), Keypair.i_am_plural))
for kp in kps:
if self.check_ignore_list(kp.name):
continue
arn = 'arn:aws:ec2:{region}:{account_number}:key-pair/{name}'.format(
region=region.name,
account_number=account_number,
name=kp.name)
item_list.append(KeypairItem(region=region.name, account=account, name=kp.name, arn=arn,
config={
'fingerprint': kp.fingerprint,
'arn': arn,
'name': kp.name
}))
return item_list, exception_map
class KeypairItem(ChangeItem):
def __init__(self, region=None, account=None, name=None, arn=None, config={}):
super(KeypairItem, self).__init__(
index=Keypair.index,
region=region,
account=account,
name=name,
arn=arn,
new_config=config)
|
apache-2.0
|
ZhangXinNan/tensorflow
|
tensorflow/python/training/slot_creator_test.py
|
45
|
5256
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional test for slot_creator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import slot_creator
class SlotCreatorTest(test.TestCase):
def testCreateSlotFromVariable(self):
with self.test_session():
v = variables.Variable([1.0, 2.5], name="var")
slot = slot_creator.create_slot(v, v.initialized_value(), name="slot")
variables.global_variables_initializer().run()
self.assertEqual("var/slot", slot.op.name)
self.assertEqual([2], slot.get_shape().as_list())
self.assertEqual(dtypes.float32, slot.dtype.base_dtype)
self.assertAllEqual([1.0, 2.5], slot.eval())
def testCreateSlotFromTensor(self):
with self.test_session():
v = constant_op.constant([1.0, 2.5], name="const")
slot = slot_creator.create_slot(v, v * 2, name="slot")
variables.global_variables_initializer().run()
self.assertEqual("const/slot", slot.op.name)
self.assertEqual([2], slot.get_shape().as_list())
self.assertEqual(dtypes.float32, slot.dtype.base_dtype)
self.assertAllEqual([2.0, 5.0], slot.eval())
def testCreateZerosSlotFromVariable(self):
with self.test_session():
v = variables.Variable([1.0, 2.5], name="var")
with ops.control_dependencies(None):
slot = slot_creator.create_zeros_slot(
v, name="slot", dtype=dtypes.float64)
variables.global_variables_initializer().run()
self.assertEqual("var/slot", slot.op.name)
self.assertEqual([2], slot.get_shape().as_list())
self.assertEqual(dtypes.float64, slot.dtype.base_dtype)
self.assertAllEqual([0.0, 0.0], slot.eval())
def testCreateZerosSlotFromDynamicShapedVariable(self):
with self.test_session():
dyn_shape = constant_op.constant([2], dtype=dtypes.int32)
dyn_shape = array_ops.placeholder_with_default(dyn_shape,
shape=[None])
v = variable_scope.get_variable(
"var",
initializer=random_ops.random_uniform(dyn_shape,
dtype=dtypes.float64),
validate_shape=False)
with ops.control_dependencies(None):
slot = slot_creator.create_zeros_slot(
v, name="slot", dtype=dtypes.float64)
variables.global_variables_initializer().run()
self.assertEqual("var/slot", slot.op.name)
self.assertEqual([2], array_ops.shape(slot).eval())
self.assertEqual(dtypes.float64, slot.dtype.base_dtype)
self.assertAllEqual([0.0, 0.0], slot.eval())
def testCreateZerosSlotFromTensor(self):
with self.test_session():
v = constant_op.constant([1.0, 2.5], name="const")
with ops.control_dependencies(None):
slot = slot_creator.create_zeros_slot(v, name="slot")
variables.global_variables_initializer().run()
self.assertEqual("const/slot", slot.op.name)
self.assertEqual([2], slot.get_shape().as_list())
self.assertEqual(dtypes.float32, slot.dtype.base_dtype)
self.assertAllEqual([0.0, 0.0], slot.eval())
def testCreateZerosSlotFromDynamicShapedTensor(self):
with self.test_session():
v = random_ops.random_uniform([2], dtype=dtypes.float64)
v = array_ops.placeholder_with_default(v, shape=[None], name="const")
with ops.control_dependencies(None):
slot = slot_creator.create_zeros_slot(
v, name="slot", dtype=dtypes.float64)
variables.global_variables_initializer().run()
self.assertEqual("const/slot", slot.op.name)
self.assertEqual([2], array_ops.shape(slot).eval())
self.assertEqual(dtypes.float64, slot.dtype.base_dtype)
self.assertAllEqual([0.0, 0.0], slot.eval())
def testCreateSlotFromVariableRespectsScope(self):
# See discussion on #2740.
with self.test_session():
with variable_scope.variable_scope("scope"):
v = variables.Variable([1.0, 2.5], name="var")
slot = slot_creator.create_slot(v, v.initialized_value(), name="slot")
self.assertEqual("scope/scope/var/slot", slot.op.name)
if __name__ == "__main__":
test.main()
|
apache-2.0
|
zmike/servo
|
tests/wpt/web-platform-tests/tools/pywebsocket/src/example/benchmark_helper_wsh.py
|
451
|
3234
|
# Copyright 2013, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Handler for benchmark.html."""
def web_socket_do_extra_handshake(request):
# Turn off compression.
request.ws_extension_processors = []
def web_socket_transfer_data(request):
data = ''
while True:
command = request.ws_stream.receive_message()
if command is None:
return
if not isinstance(command, unicode):
raise ValueError('Invalid command data:' + command)
commands = command.split(' ')
if len(commands) == 0:
raise ValueError('Invalid command data: ' + command)
if commands[0] == 'receive':
if len(commands) != 2:
raise ValueError(
'Illegal number of arguments for send command' +
command)
size = int(commands[1])
# Reuse data if possible.
if len(data) != size:
data = 'a' * size
request.ws_stream.send_message(data, binary=True)
elif commands[0] == 'send':
if len(commands) != 2:
raise ValueError(
'Illegal number of arguments for receive command' +
command)
verify_data = commands[1] == '1'
data = request.ws_stream.receive_message()
if data is None:
raise ValueError('Payload not received')
size = len(data)
if verify_data:
if data != 'a' * size:
raise ValueError('Payload verification failed')
request.ws_stream.send_message(str(size))
else:
raise ValueError('Invalid command: ' + commands[0])
# vi:sts=4 sw=4 et
|
mpl-2.0
|
takeshineshiro/nova
|
nova/db/sqlalchemy/migrate_repo/versions/250_remove_instance_groups_metadata.py
|
81
|
1198
|
# Copyright 2014 Red Hat, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import MetaData, Table
def upgrade(migrate_engine):
"""Remove the instance_group_metadata table."""
meta = MetaData(bind=migrate_engine)
if migrate_engine.has_table('instance_group_metadata'):
group_metadata = Table('instance_group_metadata', meta, autoload=True)
group_metadata.drop()
if migrate_engine.has_table('shadow_instance_group_metadata'):
shadow_group_metadata = Table('shadow_instance_group_metadata', meta,
autoload=True)
shadow_group_metadata.drop()
|
apache-2.0
|
ludwig-n/fchess
|
old_code.py
|
1
|
21297
|
# def comet(mvs, pcs, recdepth, maxrecdepth):
# options = []
# curxp = xp_diff(mvs[0][0].color, pcs)
# intro = '// ' + ('-' * (recdepth * 2)) + ' COMET: '
# if DEBUG_OUTPUT:
# print(intro + '{} opts'.format(len(mvs)))
# else:
# print(' ' * recdepth + '*')
# if len(mvs) == 1:
# tmp, xp, oma = can_move_piece(mvs[0][0], mvs[0][1][0], mvs[0][1][1], pcs, aimode=True)
# if DEBUG_OUTPUT:
# print(intro + 'one opt ({} -> {}) - xp {}'.format(num_to_chess_coord(
# mvs[0][0].x,
# mvs[0][0].y),
# num_to_chess_coord(
# mvs[0][1][0],
# mvs[0][1][1]),
# xp))
# return 0, xp
# if recdepth == maxrecdepth:
# if DEBUG_OUTPUT:
# print(intro + 'unable to continue analysis; maximum recursion depth exceeded;'
# ' xp set to current xp ({})'.format(curxp))
# return 0, curxp
# for m in mvs:
# if m[0].type != KING:
# break
# else:
# # Check if possible to check or checkmate
# for n in range(len(mvs)):
# tmp, xp, oma = can_move_piece(mvs[n][0], mvs[n][1][0], mvs[n][1][1], pcs, aimode=True)
# if type(oma) == bool and oma:
# return n, xp
#
# for n in range(len(mvs)):
# tmp, xp, oma = can_move_piece(mvs[n][0], mvs[n][1][0], mvs[n][1][1], pcs, aimode=True)
# if xp - curxp >= 2:
# return n, xp
# # / Check if possible to check or checkmate /
# myking = m[0]
# for p in pcs:
# if p.color != myking.color and p.type == KING:
# otherking = p
# if myking.resist >= 6:
# mv = select_move_toward(myking, otherking, mvs)
# else:
# mv = select_move_away(myking, otherking, mvs)
# a, x, c = can_move_piece(myking, mvs[mv][1][0], mvs[mv][1][1], pcs, aimode=True)
# return mv, x
# for i in range(len(mvs)):
# tmp, xp, oma = can_move_piece(mvs[i][0], mvs[i][1][0], mvs[i][1][1], pcs, aimode=True)
# if type(oma) == bool and oma:
# turn = i
# if DEBUG_OUTPUT:
# print(intro + 'opt {} ({} -> {}) leads to victory, xp counted as inf; exiting analysis'.format(i,
# num_to_chess_coord(
# mvs[i][0].x,
# mvs[i][0].y),
# num_to_chess_coord(
# mvs[i][1][0],
# mvs[i][1][1])))
# xp = INF
# return i, INF
#
# elif type(oma) != bool and len(oma) == 1:
# bpcs = []
# for p in pcs:
# bpcs.append(copy.deepcopy(p))
#
# if DEBUG_OUTPUT:
# print(intro + 'analyzing our opt {} ({} -> {})...'.format(i,
# num_to_chess_coord(
# mvs[i][0].x,
# mvs[i][0].y),
# num_to_chess_coord(
# mvs[i][1][0],
# mvs[i][1][1])))
#
# move_piece(mvs[i][0], mvs[i][1][0], mvs[i][1][1], bpcs)
# if DEBUG_OUTPUT:
# print(intro + 'fc; one opponent opt ({} -> {})'.format(num_to_chess_coord(
# oma[0][0].x,
# oma[0][0].y),
# num_to_chess_coord(
# oma[0][1][0],
# oma[0][1][1]
# )))
# move_piece(oma[0][0], oma[0][1][0], oma[0][1][1], bpcs)
#
# newmv = get_all_moves(mvs[0][0].color, bpcs)
# if type(newmv) != bool:
# tmptmp, xp = comet(newmv, bpcs, recdepth + 1, maxrecdepth)# if maxrecdepth - recdepth >= 1 else -1, curxp
# if DEBUG_OUTPUT:
# print(intro + 'analysis of opt {} finished; xp {}'.format(i, xp))
# if xp == INF:
# if DEBUG_OUTPUT:
# print(intro + 'checkmate detected, exiting analysis')
# return i, INF
# else:
# if DEBUG_OUTPUT:
# print(intro + 'opt {} leads to defeat/stalemate, xp counted as -inf'.format(i))
# xp = -INF
#
# elif type(oma) != bool and get_piece_by_coords(oma[0][1][0], oma[0][1][1], pcs) is not None:
# bpcs = []
# for p in pcs:
# bpcs.append(copy.deepcopy(p))
#
# if DEBUG_OUTPUT:
# print(intro + 'analyzing opt {} ({} -> {})...'.format(i,
# num_to_chess_coord(
# mvs[i][0].x,
# mvs[i][0].y),
# num_to_chess_coord(
# mvs[i][1][0],
# mvs[i][1][1])))
#
# move_piece(mvs[i][0], mvs[i][1][0], mvs[i][1][1], bpcs)
# if DEBUG_OUTPUT:
# print(intro + 'fc; {} opponent opts'.format(len(oma)))
# xps = []
# for q in range(len(oma)):
# nbpcs = []
# for p in bpcs:
# nbpcs.append(copy.deepcopy(p))
# if DEBUG_OUTPUT:
# print(intro + 'analyzing opponent opt {} ({} -> {})'.format(q, num_to_chess_coord(
# oma[0][0].x,
# oma[0][0].y),
# num_to_chess_coord(
# oma[0][1][0],
# oma[0][1][1]
# )))
# move_piece(oma[q][0], oma[q][1][0], oma[q][1][1], nbpcs)
#
# newmv = get_all_moves(mvs[0][0].color, nbpcs)
# if type(newmv) != bool:
# if maxrecdepth - recdepth >= 1:
# t, xpn = comet(newmv, nbpcs, recdepth + 1, maxrecdepth)
# if DEBUG_OUTPUT:
# print(intro + 'analysis of opponent opt {} finished; xp {}'.format(q, xpn))
# else:
# xpn = curxp
# if DEBUG_OUTPUT:
# print(intro + 'unable to analyze opponent opt {}; maximum recursion depth exceeded;'
# ' xp set to current xp ({})'.format(q, xpn))
#
# else:
# if DEBUG_OUTPUT:
# print(intro + 'opponent opt {} leads to defeat/stalemate, xp counted as -inf'.format(q))
# xpn = -INF
#
# xps.append(xpn)
#
# xp = min(xps)
# if DEBUG_OUTPUT:
# print(intro + 'analysis of opt {} finished, final possible xps {}'.format(i, xps))
# print(intro + 'min xp {}'.format(xp))
#
# # elif type(oma) != bool and len(oma) == 2:
# # bpcs = []
# # for p in pcs:
# # bpcs.append(copy.deepcopy(p))
# #
# # if DEBUG_OUTPUT:
# # print(
# # intro + 'semi-analyzing opt {} ({} -> {})...'.format(i,
# # num_to_chess_coord(
# # mvs[
# # i][0].x,
# # mvs[
# # i][0].y),
# # num_to_chess_coord(
# # mvs[
# # i][
# # 1][
# # 0],
# # mvs[
# # i][
# # 1][
# # 1])))
# #
# # move_piece(mvs[i][0], mvs[i][1][0], mvs[i][1][1], bpcs)
# # t, xp = comet(oma, bpcs, -1, -1)
# # move_piece(oma[t][0], oma[t][1][0], oma[t][1][1], bpcs)
# # xp = xp_sum(mvs[0][0].color, bpcs)
# # if DEBUG_OUTPUT:
# # print(intro + 'semi-analysis of opt {} finished; xp {}'.format(i, xp))
#
# elif DEBUG_OUTPUT:
# print(intro + 'opt {} ({} -> {}) - not fc, xp {}'.format(i,
# num_to_chess_coord(mvs[i][0].x, mvs[i][0].y),
# num_to_chess_coord(mvs[i][1][0], mvs[i][1][1]),
# xp))
# options.append(xp)
# else:
# m = max(options)
# turns = [i for i in range(len(options)) if options[i] == m]
# turn = random.choice(turns)
# if DEBUG_OUTPUT:
# print(intro + 'final opts {}'.format(str(options).replace('100000000000000000000', 'inf')))
#
# if DEBUG_OUTPUT:
# print(intro + 'selected opt {}'.format(turn))
#
# return turn, max(options)
# def get_piece_by_coords(x, y, pieces):
# for piece in pieces:
# if piece.x == x and piece.y == y:
# return piece
# return None
#
# def get_index_by_coords(x, y, pieces):
# for i in range(len(pieces)):
# if pieces[i].x == x and pieces[i].y == y:
# return i
# return None
# def get_moves_by_offset(diags, x, y, board):
# stopped = [False] * 4
# ret = []
#
# for i in range(1, max(BOARD_X, BOARD_Y)):
# for d in range(4):
# if not stopped[d]:
# p = board[x + diags[d][0] * i, y + diags[d][1] * i]
# if p is not None:
# stopped[d] = True
# if p.color != self.color:
# ret.append((p.x, p.y))
# else:
# ret.append((self.x + diags[d][0] * i, self.y + diags[d][1] * i))
# return ret
# def is_check(color, pieces):
# return get_all_moves(not color, pieces, has_king_cpt=True)
#
# def is_under_attack_of(piece1, piece2, pieces):
# allm = get_all_moves(True, pieces) + get_all_moves(False, pieces)
# allm = [x for x in allm if x[0] == piece1 and x[1][0] == piece2.x and x[1][1] == piece2.y]
# return not len(allm) == 0
#
# def can_move_piece(piece, x2, y2, pieces, aimode=False):
# pieces_back = []
#
# for p in pieces:
# pieces_back.append(copy.deepcopy(p))
#
# p1 = get_index_by_coords(piece.x, piece.y, pieces_back)
# p2 = get_index_by_coords(x2, y2, pieces_back)
#
# xp = 0
#
# if p1 is None:
# raise Exception('No such piece')
# if p2 is not None:
# xp += pieces_back[p2].type.capture_price
# pieces_back.pop(p2)
# if p1 > p2:
# p1 -= 1
#
# pieces_back[p1].x = x2
# pieces_back[p1].y = y2
#
# ret = not is_check(pieces_back[p1].color, pieces_back)
#
# xp += CHECK_XP if is_check(not piece.color, pieces_back) else 0
# xp = TURN_XP if xp == 0 else xp
# pieces_back[p1].add_xp(xp)
#
# total_xp = xp_diff(piece.color, pieces_back)
#
# if aimode:
# return ret, total_xp, get_all_moves(not piece.color, pieces_back) # total_xp = difference between sum of xp of pcs
# # of this color and pieces of the opp color
# else:
# return ret
#
# def move_piece(piece, x2, y2, pieces):
# global pawn_prom
# p1 = get_index_by_coords(piece.x, piece.y, pieces)
# p2 = get_index_by_coords(x2, y2, pieces)
#
# xpsum = 0
#
# if p1 is None:
# raise Exception('No such piece')
# if p1 == p2:
# raise Exception('Can\'t move piece to previous location')
# if p2 is not None:
# xpsum += pieces[p2].type.capture_price
# pieces.pop(p2)
# if p1 > p2:
# p1 -= 1
#
# if pieces[p1].type == PAWN and pawn_prom:
# print(LANGUAGE.phrases.PROMOTION_CHOICE)
# typ = input()
# if typ == '1':
# pieces[p1] = Piece(p1.color, ROOK, x2, y2, moved=True)
# elif typ == '2':
# pieces[p1] = Piece(p1.color, BISHOP, x2, y2, moved=True)
# elif typ == '3':
# pieces[p1] = Piece(p1.color, KNIGHT, x2, y2, moved=True)
# else:
# pieces[p1] = Piece(p1.color, QUEEN, x2, y2, moved=True)
# else:
# pieces[p1].x = x2
# pieces[p1].y = y2
#
# xpsum += CHECK_XP if is_check(not piece.color, pieces) else 0
# xpsum = TURN_XP if xpsum == 0 else xpsum
# pieces[p1].add_xp(xpsum)
#
# def get_all_moves(color, pieces, has_king_cpt=False, has_mob_cpt=False):
# ret = []
# captures = []
# res = []
# for p in pieces:
# if p.color == color:
# rt, cpt = p.get_moves(pieces)
# ret.extend([(p, x) for x in rt])
# captures.extend([(p, x) for x in cpt])
# for x, y in cpt:
# if get_piece_by_coords(x, y, pieces).type == KING:
# return True
# if get_piece_by_coords(x, y, pieces).type == MOB and has_mob_cpt:
# return True
#
# if has_king_cpt or has_mob_cpt:
# return False
#
# # --- Check all capture variants for checks
# popped = []
#
# for i in range(len(captures)):
# b = can_move_piece(captures[i][0], captures[i][1][0], captures[i][1][1], pieces)
# if not b:
# popped.append(captures[i])
#
# for p in popped:
# captures.remove(p)
#
# if len(captures) == 0:
# # --- Same with ret
# popped = []
#
# for i in range(len(ret)):
# b = can_move_piece(ret[i][0], ret[i][1][0], ret[i][1][1], pieces)
# if not b:
# popped.append(ret[i])
#
# for p in popped:
# ret.remove(p)
#
# res = ret
# else:
# res = captures
#
# if len(res) == 0:
# return is_check(color, pieces)
# else:
# return res
# def change_back(v):
# global curon
#
# v = False
# if v:
# curon = not curon
#
# if curon:
# print('\033[47m', end='')
# else:
# print('\033[0m', end='')
# def power(type):
# if type == PAWN:
# return 1
# if type == KNIGHT:
# return 2
# if type == BISHOP:
# return 3
# if type == ROOK:
# return 4
# if type == QUEEN:
# return 5
# if type == KING:
# return 6
# if type == AMAZON:
# return 7
#
# def select_move_toward(p1, p2, mvs):
# xd, yd = abs(p1.x - p2.x), abs(p1.y - p2.y)
# resindex = -1
# resval = -INF
# for m in range(len(mvs)):
# nx, ny = abs(mvs[m][1][0] - p2.x), abs(mvs[m][1][1] - p2.y)
# change = xd - nx + yd - ny
# if change > resval:
# resval = change
# resindex = m
#
# return resindex
#
# def select_move_away(p1, p2, mvs):
# xd, yd = abs(p1.x - p2.x), abs(p1.y - p2.y)
# resindex = -1
# resval = -INF
# for m in range(len(mvs)):
# nx, ny = abs(mvs[m][1][0] - p2.x), abs(mvs[m][1][1] - p2.y)
# change = nx - xd + ny - yd
# if change > resval:
# resval = change
# resindex = m
#
# return resindex
#
# def escape_capture_of(mob, piece1, pieces):
# variants = [(-1, 0), (1, 0), (0, -1), (0, 1)]
# ret = []
# for v in variants:
# pieces_back = []
# for p in pieces:
# pieces_back.append(copy.deepcopy(p))
# move_piece(mob, mob.x + v[0], mob.y + v[1], pieces_back)
# if not is_under_attack_of(piece1, mob, pieces_back):
# ret.append(v)
# return ret
#
# def get_all_mobs(pieces):
# return [x for x in pieces if x.type == MOB]
#
# def can_move_piece(self, p1, x, y, return_details=False):
# board_c = copy.copy(self)
#
# p2 = self[x, y]
# board_c.pieces.pop((self[p1].x, self[p1].y))
#
# xp = 0
#
# if p1 is None:
# raise Exception('No such piece')
# if p2 is not None:
# xp += board_c.pieces_l[p2].type.capture_price
# self.remove(p2)
# if p1 > p2:
# p1 -= 1
#
# board_c[p1].x = x
# board_c[p1].y = y
#
# ret = not self.is_check(board_c.pieces_l[p1].color)
#
# xp += CHECK_XP if self.is_check(not board_c.pieces_l[p1].color) else 0
# xp = TURN_XP if xp == 0 else xp
# board_c.pieces_l[p1].add_xp(xp)
#
# total_xp = board_c.xp_diff(board_c.pieces_l[p1].color)
#
# if return_details:
# mv, cpt = self.get_all_moves()
# return ret, total_xp, mv[int(not board_c.pieces_l[p1].color)], cpt[int(not board_c.pieces_l[p1].color)]
# # total_xp = difference between sum of xp of pcs
# # of this color and pieces of the opp color
# else:
# return ret
#
# def move_to_chess(piece, x, y, pieces):
# if piece.type == PAWN:
# if piece.x == x:
# ret = num_to_chess_coord(x, y)
# else:
# ret = LINES[piece.x] + LINES[x]
# else:
# if get_piece_by_coords(x, y, pieces) is None:
# ret = piece.type.abbr + num_to_chess_coord(x, y)
# else:
# ret = piece.type.abbr + ':' + num_to_chess_coord(x, y)
# return ret
#
# bool1 = len(t) != 2 or len(t[0]) != 2 or len(t[1]) != 2 or \
# t[0][0] not in LINES[:BOARD_X] or t[1][0] not in LINES[:BOARD_X] or \
# t[0][1] not in string.digits or t[1][1] not in string.digits
|
gpl-3.0
|
HybridF5/jacket
|
jacket/compute/block_device.py
|
1
|
21039
|
# Copyright 2011 Isaku Yamahata <yamahata@valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from oslo_log import log as logging
from oslo_utils import strutils
import six
import jacket.compute.conf
from jacket.compute import exception
from jacket.i18n import _
from jacket.compute import utils
from jacket.compute.virt import driver
CONF = jacket.compute.conf.CONF
LOG = logging.getLogger(__name__)
DEFAULT_ROOT_DEV_NAME = '/dev/sda1'
_DEFAULT_MAPPINGS = {'ami': 'sda1',
'ephemeral0': 'sda2',
'root': DEFAULT_ROOT_DEV_NAME,
'swap': 'sda3'}
bdm_legacy_fields = set(['device_name', 'delete_on_termination',
'virtual_name', 'snapshot_id',
'volume_id', 'volume_size', 'no_device',
'connection_info'])
bdm_new_fields = set(['source_type', 'destination_type',
'guest_format', 'device_type', 'disk_bus', 'boot_index',
'device_name', 'delete_on_termination', 'snapshot_id',
'volume_id', 'volume_size', 'image_id', 'no_device',
'connection_info'])
bdm_db_only_fields = set(['id', 'instance_uuid'])
bdm_db_inherited_fields = set(['created_at', 'updated_at',
'deleted_at', 'deleted'])
bdm_new_non_api_fields = set(['volume_id', 'snapshot_id',
'image_id', 'connection_info'])
bdm_new_api_only_fields = set(['uuid'])
bdm_new_api_fields = ((bdm_new_fields - bdm_new_non_api_fields) |
bdm_new_api_only_fields)
class BlockDeviceDict(dict):
"""Represents a Block Device Mapping in Nova."""
_fields = bdm_new_fields
_db_only_fields = (bdm_db_only_fields |
bdm_db_inherited_fields)
_required_fields = set(['source_type'])
def __init__(self, bdm_dict=None, do_not_default=None, **kwargs):
super(BlockDeviceDict, self).__init__()
bdm_dict = bdm_dict or {}
bdm_dict.update(kwargs)
do_not_default = do_not_default or set()
self._validate(bdm_dict)
if bdm_dict.get('device_name'):
bdm_dict['device_name'] = prepend_dev(bdm_dict['device_name'])
bdm_dict['delete_on_termination'] = bool(
bdm_dict.get('delete_on_termination'))
# NOTE (ndipanov): Never default db fields
self.update({field: None for field in self._fields - do_not_default})
self.update(list(six.iteritems(bdm_dict)))
def _validate(self, bdm_dict):
"""Basic data format validations."""
dict_fields = set(key for key, _ in six.iteritems(bdm_dict))
# Check that there are no bogus fields
if not (dict_fields <=
(self._fields | self._db_only_fields)):
raise exception.InvalidBDMFormat(
details=_("Some fields are invalid."))
if bdm_dict.get('no_device'):
return
# Check that all required fields are there
if (self._required_fields and
not ((dict_fields & self._required_fields) ==
self._required_fields)):
raise exception.InvalidBDMFormat(
details=_("Some required fields are missing"))
if 'delete_on_termination' in bdm_dict:
bdm_dict['delete_on_termination'] = strutils.bool_from_string(
bdm_dict['delete_on_termination'])
if bdm_dict.get('device_name') is not None:
validate_device_name(bdm_dict['device_name'])
validate_and_default_volume_size(bdm_dict)
if bdm_dict.get('boot_index'):
try:
bdm_dict['boot_index'] = int(bdm_dict['boot_index'])
except ValueError:
raise exception.InvalidBDMFormat(
details=_("Boot index is invalid."))
@classmethod
def from_legacy(cls, legacy_bdm):
copy_over_fields = bdm_legacy_fields & bdm_new_fields
copy_over_fields |= (bdm_db_only_fields |
bdm_db_inherited_fields)
# NOTE (ndipanov): These fields cannot be computed
# from legacy bdm, so do not default them
# to avoid overwriting meaningful values in the db
non_computable_fields = set(['boot_index', 'disk_bus',
'guest_format', 'device_type'])
new_bdm = {fld: val for fld, val in six.iteritems(legacy_bdm)
if fld in copy_over_fields}
virt_name = legacy_bdm.get('virtual_name')
if is_swap_or_ephemeral(virt_name):
new_bdm['source_type'] = 'blank'
new_bdm['delete_on_termination'] = True
new_bdm['destination_type'] = 'local'
if virt_name == 'swap':
new_bdm['guest_format'] = 'swap'
else:
new_bdm['guest_format'] = CONF.default_ephemeral_format
elif legacy_bdm.get('snapshot_id'):
new_bdm['source_type'] = 'snapshot'
new_bdm['destination_type'] = 'volume'
elif legacy_bdm.get('volume_id'):
new_bdm['source_type'] = 'volume'
new_bdm['destination_type'] = 'volume'
elif legacy_bdm.get('no_device'):
# NOTE (ndipanov): Just keep the BDM for now,
pass
else:
raise exception.InvalidBDMFormat(
details=_("Unrecognized legacy format."))
return cls(new_bdm, non_computable_fields)
@classmethod
def from_api(cls, api_dict, image_uuid_specified):
"""Transform the API format of data to the internally used one.
Only validate if the source_type field makes sense.
"""
if not api_dict.get('no_device'):
source_type = api_dict.get('source_type')
device_uuid = api_dict.get('uuid')
destination_type = api_dict.get('destination_type')
if source_type not in ('volume', 'image', 'snapshot', 'blank'):
raise exception.InvalidBDMFormat(
details=_("Invalid source_type field."))
elif source_type == 'blank' and device_uuid:
raise exception.InvalidBDMFormat(
details=_("Invalid device UUID."))
elif source_type != 'blank':
if not device_uuid:
raise exception.InvalidBDMFormat(
details=_("Missing device UUID."))
api_dict[source_type + '_id'] = device_uuid
if source_type == 'image' and destination_type == 'local':
try:
boot_index = int(api_dict.get('boot_index', -1))
except ValueError:
raise exception.InvalidBDMFormat(
details=_("Boot index is invalid."))
# if this bdm is generated from --image ,then
# source_type = image and destination_type = local is allowed
if not (image_uuid_specified and boot_index == 0):
raise exception.InvalidBDMFormat(
details=_("Mapping image to local is not supported."))
api_dict.pop('uuid', None)
return cls(api_dict)
def legacy(self):
copy_over_fields = bdm_legacy_fields - set(['virtual_name'])
copy_over_fields |= (bdm_db_only_fields |
bdm_db_inherited_fields)
legacy_block_device = {field: self.get(field)
for field in copy_over_fields if field in self}
source_type = self.get('source_type')
destination_type = self.get('destination_type')
no_device = self.get('no_device')
if source_type == 'blank':
if self['guest_format'] == 'swap':
legacy_block_device['virtual_name'] = 'swap'
else:
# NOTE (ndipanov): Always label as 0, it is up to
# the calling routine to re-enumerate them
legacy_block_device['virtual_name'] = 'ephemeral0'
elif source_type in ('volume', 'snapshot') or no_device:
legacy_block_device['virtual_name'] = None
elif source_type == 'image':
if destination_type != 'volume':
# NOTE(ndipanov): Image bdms with local destination
# have no meaning in the legacy format - raise
raise exception.InvalidBDMForLegacy()
legacy_block_device['virtual_name'] = None
return legacy_block_device
def get_image_mapping(self):
drop_fields = (set(['connection_info']) |
self._db_only_fields)
mapping_dict = dict(self)
for fld in drop_fields:
mapping_dict.pop(fld, None)
return mapping_dict
def is_safe_for_update(block_device_dict):
"""Determine if passed dict is a safe subset for update.
Safe subset in this case means a safe subset of both legacy
and new versions of data, that can be passed to an UPDATE query
without any transformation.
"""
fields = set(block_device_dict.keys())
return fields <= (bdm_new_fields |
bdm_db_inherited_fields |
bdm_db_only_fields)
def create_image_bdm(image_ref, boot_index=0):
"""Create a block device dict based on the image_ref.
This is useful in the API layer to keep the compatibility
with having an image_ref as a field in the instance requests
"""
return BlockDeviceDict(
{'source_type': 'image',
'image_id': image_ref,
'delete_on_termination': True,
'boot_index': boot_index,
'device_type': 'disk',
'destination_type': 'local'})
def create_blank_bdm(size, guest_format=None):
return BlockDeviceDict(
{'source_type': 'blank',
'delete_on_termination': True,
'device_type': 'disk',
'boot_index': -1,
'destination_type': 'local',
'guest_format': guest_format,
'volume_size': size})
def snapshot_from_bdm(snapshot_id, template):
"""Create a basic volume snapshot BDM from a given template bdm."""
copy_from_template = ('disk_bus', 'device_type', 'boot_index',
'delete_on_termination', 'volume_size',
'device_name')
snapshot_dict = {'source_type': 'snapshot',
'destination_type': 'volume',
'snapshot_id': snapshot_id}
for key in copy_from_template:
snapshot_dict[key] = template.get(key)
return BlockDeviceDict(snapshot_dict)
def legacy_mapping(block_device_mapping):
"""Transform a list of block devices of an instance back to the
legacy data format.
"""
legacy_block_device_mapping = []
for bdm in block_device_mapping:
try:
legacy_block_device = BlockDeviceDict(bdm).legacy()
except exception.InvalidBDMForLegacy:
continue
legacy_block_device_mapping.append(legacy_block_device)
# Re-enumerate the ephemeral devices
for i, dev in enumerate(dev for dev in legacy_block_device_mapping
if dev['virtual_name'] and
is_ephemeral(dev['virtual_name'])):
dev['virtual_name'] = dev['virtual_name'][:-1] + str(i)
return legacy_block_device_mapping
def from_legacy_mapping(legacy_block_device_mapping, image_uuid='',
root_device_name=None, no_root=False):
"""Transform a legacy list of block devices to the new data format."""
new_bdms = [BlockDeviceDict.from_legacy(legacy_bdm)
for legacy_bdm in legacy_block_device_mapping]
# NOTE (ndipanov): We will not decide which device is root here - we assume
# that it will be supplied later. This is useful for having the root device
# as part of the image defined mappings that are already in the v2 format.
if no_root:
for bdm in new_bdms:
bdm['boot_index'] = -1
return new_bdms
image_bdm = None
volume_backed = False
# Try to assign boot_device
if not root_device_name and not image_uuid:
# NOTE (ndipanov): If there is no root_device, pick the first non
# blank one.
non_blank = [bdm for bdm in new_bdms if bdm['source_type'] != 'blank']
if non_blank:
non_blank[0]['boot_index'] = 0
else:
for bdm in new_bdms:
if (bdm['source_type'] in ('volume', 'snapshot', 'image') and
root_device_name is not None and
(strip_dev(bdm.get('device_name')) ==
strip_dev(root_device_name))):
bdm['boot_index'] = 0
volume_backed = True
elif not bdm['no_device']:
bdm['boot_index'] = -1
else:
bdm['boot_index'] = None
if not volume_backed and image_uuid:
image_bdm = create_image_bdm(image_uuid, boot_index=0)
return ([image_bdm] if image_bdm else []) + new_bdms
def properties_root_device_name(properties):
"""get root device name from image meta data.
If it isn't specified, return None.
"""
root_device_name = None
# NOTE(yamahata): see image_service.s3.s3create()
for bdm in properties.get('mappings', []):
if bdm['virtual'] == 'root':
root_device_name = bdm['device']
# NOTE(yamahata): register_image's command line can override
# <machine>.manifest.xml
if 'root_device_name' in properties:
root_device_name = properties['root_device_name']
return root_device_name
def validate_device_name(value):
try:
# NOTE (ndipanov): Do not allow empty device names
# until assigning default values
# is supported by compute.compute
utils.check_string_length(value, 'Device name',
min_length=1, max_length=255)
except exception.InvalidInput:
raise exception.InvalidBDMFormat(
details=_("Device name empty or too long."))
if ' ' in value:
raise exception.InvalidBDMFormat(
details=_("Device name contains spaces."))
def validate_and_default_volume_size(bdm):
if bdm.get('volume_size'):
try:
bdm['volume_size'] = utils.validate_integer(
bdm['volume_size'], 'volume_size', min_value=0)
except exception.InvalidInput:
# NOTE: We can remove this validation code after removing
# Nova v2.0 API code because v2.1 API validates this case
# already at its REST API layer.
raise exception.InvalidBDMFormat(
details=_("Invalid volume_size."))
_ephemeral = re.compile('^ephemeral(\d|[1-9]\d+)$')
def is_ephemeral(device_name):
return _ephemeral.match(device_name) is not None
def ephemeral_num(ephemeral_name):
assert is_ephemeral(ephemeral_name)
return int(_ephemeral.sub('\\1', ephemeral_name))
def is_swap_or_ephemeral(device_name):
return (device_name and
(device_name == 'swap' or is_ephemeral(device_name)))
def new_format_is_swap(bdm):
if (bdm.get('source_type') == 'blank' and
bdm.get('destination_type') == 'local' and
bdm.get('guest_format') == 'swap'):
return True
return False
def new_format_is_ephemeral(bdm):
if (bdm.get('source_type') == 'blank' and
bdm.get('destination_type') == 'local' and
bdm.get('guest_format') != 'swap'):
return True
return False
def get_root_bdm(bdms):
try:
return next(bdm for bdm in bdms if bdm.get('boot_index', -1) == 0)
except StopIteration:
return None
def get_bdms_to_connect(bdms, exclude_root_mapping=False):
"""Will return non-root mappings, when exclude_root_mapping is true.
Otherwise all mappings will be returned.
"""
return (bdm for bdm in bdms if bdm.get('boot_index', -1) != 0 or
not exclude_root_mapping)
def mappings_prepend_dev(mappings):
"""Prepend '/dev/' to 'device' entry of swap/ephemeral virtual type."""
for m in mappings:
virtual = m['virtual']
if (is_swap_or_ephemeral(virtual) and
(not m['device'].startswith('/'))):
m['device'] = '/dev/' + m['device']
return mappings
_dev = re.compile('^/dev/')
def strip_dev(device_name):
"""remove leading '/dev/'."""
return _dev.sub('', device_name) if device_name else device_name
def prepend_dev(device_name):
"""Make sure there is a leading '/dev/'."""
return device_name and '/dev/' + strip_dev(device_name)
_pref = re.compile('^((x?v|s|h)d)')
def strip_prefix(device_name):
"""remove both leading /dev/ and xvd or sd or vd or hd."""
device_name = strip_dev(device_name)
return _pref.sub('', device_name) if device_name else device_name
_nums = re.compile('\d+')
def get_device_letter(device_name):
letter = strip_prefix(device_name)
# NOTE(vish): delete numbers in case we have something like
# /dev/sda1
return _nums.sub('', letter) if device_name else device_name
def instance_block_mapping(instance, bdms):
root_device_name = instance['root_device_name']
# NOTE(clayg): remove this when xenapi is setting default_root_device
if root_device_name is None:
if driver.is_xenapi():
root_device_name = '/dev/xvda'
else:
return _DEFAULT_MAPPINGS
mappings = {}
mappings['ami'] = strip_dev(root_device_name)
mappings['root'] = root_device_name
default_ephemeral_device = instance.get('default_ephemeral_device')
if default_ephemeral_device:
mappings['ephemeral0'] = default_ephemeral_device
default_swap_device = instance.get('default_swap_device')
if default_swap_device:
mappings['swap'] = default_swap_device
ebs_devices = []
blanks = []
# 'ephemeralN', 'swap' and ebs
for bdm in bdms:
# ebs volume case
if bdm.destination_type == 'volume':
ebs_devices.append(bdm.device_name)
continue
if bdm.source_type == 'blank':
blanks.append(bdm)
# NOTE(yamahata): I'm not sure how ebs device should be numbered.
# Right now sort by device name for deterministic
# result.
if ebs_devices:
ebs_devices.sort()
for nebs, ebs in enumerate(ebs_devices):
mappings['ebs%d' % nebs] = ebs
swap = [bdm for bdm in blanks if bdm.guest_format == 'swap']
if swap:
mappings['swap'] = swap.pop().device_name
ephemerals = [bdm for bdm in blanks if bdm.guest_format != 'swap']
if ephemerals:
for num, eph in enumerate(ephemerals):
mappings['ephemeral%d' % num] = eph.device_name
return mappings
def match_device(device):
"""Matches device name and returns prefix, suffix."""
match = re.match("(^/dev/x{0,1}[a-z]{0,1}d{0,1})([a-z]+)[0-9]*$", device)
if not match:
return None
return match.groups()
def volume_in_mapping(mount_device, block_device_info):
block_device_list = [strip_dev(vol['mount_device'])
for vol in
driver.block_device_info_get_mapping(
block_device_info)]
swap = driver.block_device_info_get_swap(block_device_info)
if driver.swap_is_usable(swap):
block_device_list.append(strip_dev(swap['device_name']))
block_device_list += [strip_dev(ephemeral['device_name'])
for ephemeral in
driver.block_device_info_get_ephemerals(
block_device_info)]
LOG.debug("block_device_list %s", sorted(filter(None, block_device_list)))
return strip_dev(mount_device) in block_device_list
def get_bdm_ephemeral_disk_size(block_device_mappings):
return sum(bdm.get('volume_size', 0)
for bdm in block_device_mappings
if new_format_is_ephemeral(bdm))
def get_bdm_swap_list(block_device_mappings):
return [bdm for bdm in block_device_mappings
if new_format_is_swap(bdm)]
def get_bdm_local_disk_num(block_device_mappings):
return len([bdm for bdm in block_device_mappings
if bdm.get('destination_type') == 'local'])
|
apache-2.0
|
shenlong3030/asv-django-guestbook
|
django/contrib/gis/utils/geoip.py
|
13
|
15172
|
"""
This module houses the GeoIP object, a ctypes wrapper for the MaxMind GeoIP(R)
C API (http://www.maxmind.com/app/c). This is an alternative to the GPL
licensed Python GeoIP interface provided by MaxMind.
GeoIP(R) is a registered trademark of MaxMind, LLC of Boston, Massachusetts.
For IP-based geolocation, this module requires the GeoLite Country and City
datasets, in binary format (CSV will not work!). The datasets may be
downloaded from MaxMind at http://www.maxmind.com/download/geoip/database/.
Grab GeoIP.dat.gz and GeoLiteCity.dat.gz, and unzip them in the directory
corresponding to settings.GEOIP_PATH. See the GeoIP docstring and examples
below for more details.
TODO: Verify compatibility with Windows.
Example:
>>> from django.contrib.gis.utils import GeoIP
>>> g = GeoIP()
>>> g.country('google.com')
{'country_code': 'US', 'country_name': 'United States'}
>>> g.city('72.14.207.99')
{'area_code': 650,
'city': 'Mountain View',
'country_code': 'US',
'country_code3': 'USA',
'country_name': 'United States',
'dma_code': 807,
'latitude': 37.419200897216797,
'longitude': -122.05740356445312,
'postal_code': '94043',
'region': 'CA'}
>>> g.lat_lon('salon.com')
(37.789798736572266, -122.39420318603516)
>>> g.lon_lat('uh.edu')
(-95.415199279785156, 29.77549934387207)
>>> g.geos('24.124.1.80').wkt
'POINT (-95.2087020874023438 39.0392990112304688)'
"""
import os, re
from ctypes import c_char_p, c_float, c_int, Structure, CDLL, POINTER
from ctypes.util import find_library
from django.conf import settings
if not settings.configured: settings.configure()
# Creating the settings dictionary with any settings, if needed.
GEOIP_SETTINGS = dict((key, getattr(settings, key))
for key in ('GEOIP_PATH', 'GEOIP_LIBRARY_PATH', 'GEOIP_COUNTRY', 'GEOIP_CITY')
if hasattr(settings, key))
lib_path = GEOIP_SETTINGS.get('GEOIP_LIBRARY_PATH', None)
# GeoIP Exception class.
class GeoIPException(Exception): pass
# The shared library for the GeoIP C API. May be downloaded
# from http://www.maxmind.com/download/geoip/api/c/
if lib_path:
lib_name = None
else:
# TODO: Is this really the library name for Windows?
lib_name = 'GeoIP'
# Getting the path to the GeoIP library.
if lib_name: lib_path = find_library(lib_name)
if lib_path is None: raise GeoIPException('Could not find the GeoIP library (tried "%s"). '
'Try setting GEOIP_LIBRARY_PATH in your settings.' % lib_name)
lgeoip = CDLL(lib_path)
# Regular expressions for recognizing IP addresses and the GeoIP
# free database editions.
ipregex = re.compile(r'^(?P<w>\d\d?\d?)\.(?P<x>\d\d?\d?)\.(?P<y>\d\d?\d?)\.(?P<z>\d\d?\d?)$')
free_regex = re.compile(r'^GEO-\d{3}FREE')
lite_regex = re.compile(r'^GEO-\d{3}LITE')
#### GeoIP C Structure definitions ####
class GeoIPRecord(Structure):
_fields_ = [('country_code', c_char_p),
('country_code3', c_char_p),
('country_name', c_char_p),
('region', c_char_p),
('city', c_char_p),
('postal_code', c_char_p),
('latitude', c_float),
('longitude', c_float),
# TODO: In 1.4.6 this changed from `int dma_code;` to
# `union {int metro_code; int dma_code;};`. Change
# to a `ctypes.Union` in to accomodate in future when
# pre-1.4.6 versions are no longer distributed.
('dma_code', c_int),
('area_code', c_int),
# TODO: The following structure fields were added in 1.4.3 --
# uncomment these fields when sure previous versions are no
# longer distributed by package maintainers.
#('charset', c_int),
#('continent_code', c_char_p),
]
class GeoIPTag(Structure): pass
#### ctypes function prototypes ####
RECTYPE = POINTER(GeoIPRecord)
DBTYPE = POINTER(GeoIPTag)
# For retrieving records by name or address.
def record_output(func):
func.restype = RECTYPE
return func
rec_by_addr = record_output(lgeoip.GeoIP_record_by_addr)
rec_by_name = record_output(lgeoip.GeoIP_record_by_name)
# For opening & closing GeoIP database files.
geoip_open = lgeoip.GeoIP_open
geoip_open.restype = DBTYPE
geoip_close = lgeoip.GeoIP_delete
geoip_close.argtypes = [DBTYPE]
geoip_close.restype = None
# String output routines.
def string_output(func):
func.restype = c_char_p
return func
geoip_dbinfo = string_output(lgeoip.GeoIP_database_info)
cntry_code_by_addr = string_output(lgeoip.GeoIP_country_code_by_addr)
cntry_code_by_name = string_output(lgeoip.GeoIP_country_code_by_name)
cntry_name_by_addr = string_output(lgeoip.GeoIP_country_name_by_addr)
cntry_name_by_name = string_output(lgeoip.GeoIP_country_name_by_name)
#### GeoIP class ####
class GeoIP(object):
# The flags for GeoIP memory caching.
# GEOIP_STANDARD - read database from filesystem, uses least memory.
#
# GEOIP_MEMORY_CACHE - load database into memory, faster performance
# but uses more memory
#
# GEOIP_CHECK_CACHE - check for updated database. If database has been updated,
# reload filehandle and/or memory cache.
#
# GEOIP_INDEX_CACHE - just cache
# the most frequently accessed index portion of the database, resulting
# in faster lookups than GEOIP_STANDARD, but less memory usage than
# GEOIP_MEMORY_CACHE - useful for larger databases such as
# GeoIP Organization and GeoIP City. Note, for GeoIP Country, Region
# and Netspeed databases, GEOIP_INDEX_CACHE is equivalent to GEOIP_MEMORY_CACHE
#
GEOIP_STANDARD = 0
GEOIP_MEMORY_CACHE = 1
GEOIP_CHECK_CACHE = 2
GEOIP_INDEX_CACHE = 4
cache_options = dict((opt, None) for opt in (0, 1, 2, 4))
_city_file = ''
_country_file = ''
# Initially, pointers to GeoIP file references are NULL.
_city = None
_country = None
def __init__(self, path=None, cache=0, country=None, city=None):
"""
Initializes the GeoIP object, no parameters are required to use default
settings. Keyword arguments may be passed in to customize the locations
of the GeoIP data sets.
* path: Base directory to where GeoIP data is located or the full path
to where the city or country data files (*.dat) are located.
Assumes that both the city and country data sets are located in
this directory; overrides the GEOIP_PATH settings attribute.
* cache: The cache settings when opening up the GeoIP datasets,
and may be an integer in (0, 1, 2, 4) corresponding to
the GEOIP_STANDARD, GEOIP_MEMORY_CACHE, GEOIP_CHECK_CACHE,
and GEOIP_INDEX_CACHE `GeoIPOptions` C API settings,
respectively. Defaults to 0, meaning that the data is read
from the disk.
* country: The name of the GeoIP country data file. Defaults to
'GeoIP.dat'; overrides the GEOIP_COUNTRY settings attribute.
* city: The name of the GeoIP city data file. Defaults to
'GeoLiteCity.dat'; overrides the GEOIP_CITY settings attribute.
"""
# Checking the given cache option.
if cache in self.cache_options:
self._cache = self.cache_options[cache]
else:
raise GeoIPException('Invalid caching option: %s' % cache)
# Getting the GeoIP data path.
if not path:
path = GEOIP_SETTINGS.get('GEOIP_PATH', None)
if not path: raise GeoIPException('GeoIP path must be provided via parameter or the GEOIP_PATH setting.')
if not isinstance(path, basestring):
raise TypeError('Invalid path type: %s' % type(path).__name__)
if os.path.isdir(path):
# Constructing the GeoIP database filenames using the settings
# dictionary. If the database files for the GeoLite country
# and/or city datasets exist, then try and open them.
country_db = os.path.join(path, country or GEOIP_SETTINGS.get('GEOIP_COUNTRY', 'GeoIP.dat'))
if os.path.isfile(country_db):
self._country = geoip_open(country_db, cache)
self._country_file = country_db
city_db = os.path.join(path, city or GEOIP_SETTINGS.get('GEOIP_CITY', 'GeoLiteCity.dat'))
if os.path.isfile(city_db):
self._city = geoip_open(city_db, cache)
self._city_file = city_db
elif os.path.isfile(path):
# Otherwise, some detective work will be needed to figure
# out whether the given database path is for the GeoIP country
# or city databases.
ptr = geoip_open(path, cache)
info = geoip_dbinfo(ptr)
if lite_regex.match(info):
# GeoLite City database detected.
self._city = ptr
self._city_file = path
elif free_regex.match(info):
# GeoIP Country database detected.
self._country = ptr
self._country_file = path
else:
raise GeoIPException('Unable to recognize database edition: %s' % info)
else:
raise GeoIPException('GeoIP path must be a valid file or directory.')
def __del__(self):
# Cleaning any GeoIP file handles lying around.
if self._country: geoip_close(self._country)
if self._city: geoip_close(self._city)
def _check_query(self, query, country=False, city=False, city_or_country=False):
"Helper routine for checking the query and database availability."
# Making sure a string was passed in for the query.
if not isinstance(query, basestring):
raise TypeError('GeoIP query must be a string, not type %s' % type(query).__name__)
# Extra checks for the existence of country and city databases.
if city_or_country and not (self._country or self._city):
raise GeoIPException('Invalid GeoIP country and city data files.')
elif country and not self._country:
raise GeoIPException('Invalid GeoIP country data file: %s' % self._country_file)
elif city and not self._city:
raise GeoIPException('Invalid GeoIP city data file: %s' % self._city_file)
def city(self, query):
"""
Returns a dictionary of city information for the given IP address or
Fully Qualified Domain Name (FQDN). Some information in the dictionary
may be undefined (None).
"""
self._check_query(query, city=True)
if ipregex.match(query):
# If an IP address was passed in
ptr = rec_by_addr(self._city, c_char_p(query))
else:
# If a FQDN was passed in.
ptr = rec_by_name(self._city, c_char_p(query))
# Checking the pointer to the C structure, if valid pull out elements
# into a dicionary and return.
if bool(ptr):
record = ptr.contents
return dict((tup[0], getattr(record, tup[0])) for tup in record._fields_)
else:
return None
def country_code(self, query):
"Returns the country code for the given IP Address or FQDN."
self._check_query(query, city_or_country=True)
if self._country:
if ipregex.match(query): return cntry_code_by_addr(self._country, query)
else: return cntry_code_by_name(self._country, query)
else:
return self.city(query)['country_code']
def country_name(self, query):
"Returns the country name for the given IP Address or FQDN."
self._check_query(query, city_or_country=True)
if self._country:
if ipregex.match(query): return cntry_name_by_addr(self._country, query)
else: return cntry_name_by_name(self._country, query)
else:
return self.city(query)['country_name']
def country(self, query):
"""
Returns a dictonary with with the country code and name when given an
IP address or a Fully Qualified Domain Name (FQDN). For example, both
'24.124.1.80' and 'djangoproject.com' are valid parameters.
"""
# Returning the country code and name
return {'country_code' : self.country_code(query),
'country_name' : self.country_name(query),
}
#### Coordinate retrieval routines ####
def coords(self, query, ordering=('longitude', 'latitude')):
cdict = self.city(query)
if cdict is None: return None
else: return tuple(cdict[o] for o in ordering)
def lon_lat(self, query):
"Returns a tuple of the (longitude, latitude) for the given query."
return self.coords(query)
def lat_lon(self, query):
"Returns a tuple of the (latitude, longitude) for the given query."
return self.coords(query, ('latitude', 'longitude'))
def geos(self, query):
"Returns a GEOS Point object for the given query."
ll = self.lon_lat(query)
if ll:
from django.contrib.gis.geos import Point
return Point(ll, srid=4326)
else:
return None
#### GeoIP Database Information Routines ####
def country_info(self):
"Returns information about the GeoIP country database."
if self._country is None:
ci = 'No GeoIP Country data in "%s"' % self._country_file
else:
ci = geoip_dbinfo(self._country)
return ci
country_info = property(country_info)
def city_info(self):
"Retuns information about the GeoIP city database."
if self._city is None:
ci = 'No GeoIP City data in "%s"' % self._city_file
else:
ci = geoip_dbinfo(self._city)
return ci
city_info = property(city_info)
def info(self):
"Returns information about all GeoIP databases in use."
return 'Country:\n\t%s\nCity:\n\t%s' % (self.country_info, self.city_info)
info = property(info)
#### Methods for compatibility w/the GeoIP-Python API. ####
@classmethod
def open(cls, full_path, cache):
return GeoIP(full_path, cache)
def _rec_by_arg(self, arg):
if self._city:
return self.city(arg)
else:
return self.country(arg)
region_by_addr = city
region_by_name = city
record_by_addr = _rec_by_arg
record_by_name = _rec_by_arg
country_code_by_addr = country_code
country_code_by_name = country_code
country_name_by_addr = country_name
country_name_by_name = country_name
|
bsd-3-clause
|
yanheven/keystone
|
keystone/i18n.py
|
22
|
1156
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html .
"""
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='keystone')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
|
apache-2.0
|
Smarsh/django
|
django/core/files/move.py
|
403
|
2931
|
"""
Move a file in the safest way possible::
>>> from django.core.files.move import file_move_safe
>>> file_move_safe("/tmp/old_file", "/tmp/new_file")
"""
import os
from django.core.files import locks
try:
from shutil import copystat
except ImportError:
import stat
def copystat(src, dst):
"""Copy all stat info (mode bits, atime and mtime) from src to dst"""
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
if hasattr(os, 'utime'):
os.utime(dst, (st.st_atime, st.st_mtime))
if hasattr(os, 'chmod'):
os.chmod(dst, mode)
__all__ = ['file_move_safe']
def _samefile(src, dst):
# Macintosh, Unix.
if hasattr(os.path,'samefile'):
try:
return os.path.samefile(src, dst)
except OSError:
return False
# All other platforms: check for same pathname.
return (os.path.normcase(os.path.abspath(src)) ==
os.path.normcase(os.path.abspath(dst)))
def file_move_safe(old_file_name, new_file_name, chunk_size = 1024*64, allow_overwrite=False):
"""
Moves a file from one location to another in the safest way possible.
First, tries ``os.rename``, which is simple but will break across filesystems.
If that fails, streams manually from one file to another in pure Python.
If the destination file exists and ``allow_overwrite`` is ``False``, this
function will throw an ``IOError``.
"""
# There's no reason to move if we don't have to.
if _samefile(old_file_name, new_file_name):
return
try:
os.rename(old_file_name, new_file_name)
return
except OSError:
# This will happen with os.rename if moving to another filesystem
# or when moving opened files on certain operating systems
pass
# first open the old file, so that it won't go away
old_file = open(old_file_name, 'rb')
try:
# now open the new file, not forgetting allow_overwrite
fd = os.open(new_file_name, os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) |
(not allow_overwrite and os.O_EXCL or 0))
try:
locks.lock(fd, locks.LOCK_EX)
current_chunk = None
while current_chunk != '':
current_chunk = old_file.read(chunk_size)
os.write(fd, current_chunk)
finally:
locks.unlock(fd)
os.close(fd)
finally:
old_file.close()
copystat(old_file_name, new_file_name)
try:
os.remove(old_file_name)
except OSError, e:
# Certain operating systems (Cygwin and Windows)
# fail when deleting opened files, ignore it. (For the
# systems where this happens, temporary files will be auto-deleted
# on close anyway.)
if getattr(e, 'winerror', 0) != 32 and getattr(e, 'errno', 0) != 13:
raise
|
bsd-3-clause
|
sauloal/PiCastPy
|
sqlalchemy/engine/__init__.py
|
14
|
15527
|
# engine/__init__.py
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""SQL connections, SQL execution and high-level DB-API interface.
The engine package defines the basic components used to interface
DB-API modules with higher-level statement construction,
connection-management, execution and result contexts. The primary
"entry point" class into this package is the Engine and it's public
constructor ``create_engine()``.
This package includes:
base.py
Defines interface classes and some implementation classes which
comprise the basic components used to interface between a DB-API,
constructed and plain-text statements, connections, transactions,
and results.
default.py
Contains default implementations of some of the components defined
in base.py. All current database dialects use the classes in
default.py as base classes for their own database-specific
implementations.
strategies.py
The mechanics of constructing ``Engine`` objects are represented
here. Defines the ``EngineStrategy`` class which represents how
to go from arguments specified to the ``create_engine()``
function, to a fully constructed ``Engine``, including
initialization of connection pooling, dialects, and specific
subclasses of ``Engine``.
threadlocal.py
The ``TLEngine`` class is defined here, which is a subclass of
the generic ``Engine`` and tracks ``Connection`` and
``Transaction`` objects against the identity of the current
thread. This allows certain programming patterns based around
the concept of a "thread-local connection" to be possible.
The ``TLEngine`` is created by using the "threadlocal" engine
strategy in conjunction with the ``create_engine()`` function.
url.py
Defines the ``URL`` class which represents the individual
components of a string URL passed to ``create_engine()``. Also
defines a basic module-loading strategy for the dialect specifier
within a URL.
"""
# not sure what this was used for
#import sqlalchemy.databases
from .interfaces import (
Compiled,
Connectable,
Dialect,
ExecutionContext,
TypeCompiler
)
from .base import (
Connection,
Engine,
NestedTransaction,
RootTransaction,
Transaction,
TwoPhaseTransaction,
)
from .result import (
BufferedColumnResultProxy,
BufferedColumnRow,
BufferedRowResultProxy,
FullyBufferedResultProxy,
ResultProxy,
RowProxy,
)
from .util import (
connection_memoize
)
from . import util, strategies
default_strategy = 'plain'
def create_engine(*args, **kwargs):
"""Create a new :class:`.Engine` instance.
The standard calling form is to send the URL as the
first positional argument, usually a string
that indicates database dialect and connection arguments.
Additional keyword arguments may then follow it which
establish various options on the resulting :class:`.Engine`
and its underlying :class:`.Dialect` and :class:`.Pool`
constructs.
The string form of the URL is
``dialect+driver://user:password@host/dbname[?key=value..]``, where
``dialect`` is a database name such as ``mysql``, ``oracle``,
``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
``**kwargs`` takes a wide variety of options which are routed
towards their appropriate components. Arguments may be specific
to the :class:`.Engine`, the underlying :class:`.Dialect`, as well as
the :class:`.Pool`. Specific dialects also accept keyword
arguments that are unique to that dialect. Here, we describe the
parameters that are common to most :func:`.create_engine()` usage.
Once established, the newly resulting :class:`.Engine` will
request a connection from the underlying :class:`.Pool` once
:meth:`.Engine.connect` is called, or a method which depends on it
such as :meth:`.Engine.execute` is invoked. The :class:`.Pool` in turn
will establish the first actual DBAPI connection when this request
is received. The :func:`.create_engine` call itself does **not**
establish any actual DBAPI connections directly.
See also:
:doc:`/core/engines`
:ref:`connections_toplevel`
:param case_sensitive=True: if False, result column names
will match in a case-insensitive fashion, that is,
``row['SomeColumn']``.
.. versionchanged:: 0.8
By default, result row names match case-sensitively.
In version 0.7 and prior, all matches were case-insensitive.
:param connect_args: a dictionary of options which will be
passed directly to the DBAPI's ``connect()`` method as
additional keyword arguments. See the example
at :ref:`custom_dbapi_args`.
:param convert_unicode=False: if set to True, sets
the default behavior of ``convert_unicode`` on the
:class:`.String` type to ``True``, regardless
of a setting of ``False`` on an individual
:class:`.String` type, thus causing all :class:`.String`
-based columns
to accommodate Python ``unicode`` objects. This flag
is useful as an engine-wide setting when using a
DBAPI that does not natively support Python
``unicode`` objects and raises an error when
one is received (such as pyodbc with FreeTDS).
See :class:`.String` for further details on
what this flag indicates.
:param creator: a callable which returns a DBAPI connection.
This creation function will be passed to the underlying
connection pool and will be used to create all new database
connections. Usage of this function causes connection
parameters specified in the URL argument to be bypassed.
:param echo=False: if True, the Engine will log all statements
as well as a repr() of their parameter lists to the engines
logger, which defaults to sys.stdout. The ``echo`` attribute of
``Engine`` can be modified at any time to turn logging on and
off. If set to the string ``"debug"``, result rows will be
printed to the standard output as well. This flag ultimately
controls a Python logger; see :ref:`dbengine_logging` for
information on how to configure logging directly.
:param echo_pool=False: if True, the connection pool will log
all checkouts/checkins to the logging stream, which defaults to
sys.stdout. This flag ultimately controls a Python logger; see
:ref:`dbengine_logging` for information on how to configure logging
directly.
:param encoding: Defaults to ``utf-8``. This is the string
encoding used by SQLAlchemy for string encode/decode
operations which occur within SQLAlchemy, **outside of
the DBAPI.** Most modern DBAPIs feature some degree of
direct support for Python ``unicode`` objects,
what you see in Python 2 as a string of the form
``u'some string'``. For those scenarios where the
DBAPI is detected as not supporting a Python ``unicode``
object, this encoding is used to determine the
source/destination encoding. It is **not used**
for those cases where the DBAPI handles unicode
directly.
To properly configure a system to accommodate Python
``unicode`` objects, the DBAPI should be
configured to handle unicode to the greatest
degree as is appropriate - see
the notes on unicode pertaining to the specific
target database in use at :ref:`dialect_toplevel`.
Areas where string encoding may need to be accommodated
outside of the DBAPI include zero or more of:
* the values passed to bound parameters, corresponding to
the :class:`.Unicode` type or the :class:`.String` type
when ``convert_unicode`` is ``True``;
* the values returned in result set columns corresponding
to the :class:`.Unicode` type or the :class:`.String`
type when ``convert_unicode`` is ``True``;
* the string SQL statement passed to the DBAPI's
``cursor.execute()`` method;
* the string names of the keys in the bound parameter
dictionary passed to the DBAPI's ``cursor.execute()``
as well as ``cursor.setinputsizes()`` methods;
* the string column names retrieved from the DBAPI's
``cursor.description`` attribute.
When using Python 3, the DBAPI is required to support
*all* of the above values as Python ``unicode`` objects,
which in Python 3 are just known as ``str``. In Python 2,
the DBAPI does not specify unicode behavior at all,
so SQLAlchemy must make decisions for each of the above
values on a per-DBAPI basis - implementations are
completely inconsistent in their behavior.
:param execution_options: Dictionary execution options which will
be applied to all connections. See
:meth:`~sqlalchemy.engine.Connection.execution_options`
:param implicit_returning=True: When ``True``, a RETURNING-
compatible construct, if available, will be used to
fetch newly generated primary key values when a single row
INSERT statement is emitted with no existing returning()
clause. This applies to those backends which support RETURNING
or a compatible construct, including Postgresql, Firebird, Oracle,
Microsoft SQL Server. Set this to ``False`` to disable
the automatic usage of RETURNING.
:param label_length=None: optional integer value which limits
the size of dynamically generated column labels to that many
characters. If less than 6, labels are generated as
"_(counter)". If ``None``, the value of
``dialect.max_identifier_length`` is used instead.
:param listeners: A list of one or more
:class:`~sqlalchemy.interfaces.PoolListener` objects which will
receive connection pool events.
:param logging_name: String identifier which will be used within
the "name" field of logging records generated within the
"sqlalchemy.engine" logger. Defaults to a hexstring of the
object's id.
:param max_overflow=10: the number of connections to allow in
connection pool "overflow", that is connections that can be
opened above and beyond the pool_size setting, which defaults
to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
:param module=None: reference to a Python module object (the module
itself, not its string name). Specifies an alternate DBAPI module to
be used by the engine's dialect. Each sub-dialect references a
specific DBAPI which will be imported before first connect. This
parameter causes the import to be bypassed, and the given module to
be used instead. Can be used for testing of DBAPIs as well as to
inject "mock" DBAPI implementations into the :class:`.Engine`.
:param pool=None: an already-constructed instance of
:class:`~sqlalchemy.pool.Pool`, such as a
:class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
pool will be used directly as the underlying connection pool
for the engine, bypassing whatever connection parameters are
present in the URL argument. For information on constructing
connection pools manually, see :ref:`pooling_toplevel`.
:param poolclass=None: a :class:`~sqlalchemy.pool.Pool`
subclass, which will be used to create a connection pool
instance using the connection parameters given in the URL. Note
this differs from ``pool`` in that you don't actually
instantiate the pool in this case, you just indicate what type
of pool to be used.
:param pool_logging_name: String identifier which will be used within
the "name" field of logging records generated within the
"sqlalchemy.pool" logger. Defaults to a hexstring of the object's
id.
:param pool_size=5: the number of connections to keep open
inside the connection pool. This used with
:class:`~sqlalchemy.pool.QueuePool` as
well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With
:class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting
of 0 indicates no limit; to disable pooling, set ``poolclass`` to
:class:`~sqlalchemy.pool.NullPool` instead.
:param pool_recycle=-1: this setting causes the pool to recycle
connections after the given number of seconds has passed. It
defaults to -1, or no timeout. For example, setting to 3600
means connections will be recycled after one hour. Note that
MySQL in particular will disconnect automatically if no
activity is detected on a connection for eight hours (although
this is configurable with the MySQLDB connection itself and the
server configuration as well).
:param pool_reset_on_return='rollback': set the "reset on return"
behavior of the pool, which is whether ``rollback()``,
``commit()``, or nothing is called upon connections
being returned to the pool. See the docstring for
``reset_on_return`` at :class:`.Pool`.
.. versionadded:: 0.7.6
:param pool_timeout=30: number of seconds to wait before giving
up on getting a connection from the pool. This is only used
with :class:`~sqlalchemy.pool.QueuePool`.
:param strategy='plain': selects alternate engine implementations.
Currently available are:
* the ``threadlocal`` strategy, which is described in
:ref:`threadlocal_strategy`;
* the ``mock`` strategy, which dispatches all statement
execution to a function passed as the argument ``executor``.
See `example in the FAQ
<http://www.sqlalchemy.org/trac/wiki/FAQ#HowcanIgettheCREATETABLEDROPTABLEoutputasastring>`_.
:param executor=None: a function taking arguments
``(sql, *multiparams, **params)``, to which the ``mock`` strategy will
dispatch all statement execution. Used only by ``strategy='mock'``.
"""
strategy = kwargs.pop('strategy', default_strategy)
strategy = strategies.strategies[strategy]
return strategy.create(*args, **kwargs)
def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
"""Create a new Engine instance using a configuration dictionary.
The dictionary is typically produced from a config file where keys
are prefixed, such as sqlalchemy.url, sqlalchemy.echo, etc. The
'prefix' argument indicates the prefix to be searched for.
A select set of keyword arguments will be "coerced" to their
expected type based on string values. In a future release, this
functionality will be expanded and include dialect-specific
arguments.
"""
opts = util._coerce_config(configuration, prefix)
opts.update(kwargs)
url = opts.pop('url')
return create_engine(url, **opts)
__all__ = (
'create_engine',
'engine_from_config',
)
|
mit
|
amboxer21/scrapy
|
scrapy/utils/signal.py
|
18
|
2931
|
"""Helper functions for working with signals"""
import logging
from twisted.internet.defer import maybeDeferred, DeferredList, Deferred
from twisted.python.failure import Failure
from scrapy.xlib.pydispatch.dispatcher import Any, Anonymous, liveReceivers, \
getAllReceivers, disconnect
from scrapy.xlib.pydispatch.robustapply import robustApply
from scrapy.utils.log import failure_to_exc_info
logger = logging.getLogger(__name__)
def send_catch_log(signal=Any, sender=Anonymous, *arguments, **named):
"""Like pydispatcher.robust.sendRobust but it also logs errors and returns
Failures instead of exceptions.
"""
dont_log = named.pop('dont_log', None)
spider = named.get('spider', None)
responses = []
for receiver in liveReceivers(getAllReceivers(sender, signal)):
try:
response = robustApply(receiver, signal=signal, sender=sender,
*arguments, **named)
if isinstance(response, Deferred):
logger.error("Cannot return deferreds from signal handler: %(receiver)s",
{'receiver': receiver}, extra={'spider': spider})
except dont_log:
result = Failure()
except Exception:
result = Failure()
logger.error("Error caught on signal handler: %(receiver)s",
{'receiver': receiver},
exc_info=True, extra={'spider': spider})
else:
result = response
responses.append((receiver, result))
return responses
def send_catch_log_deferred(signal=Any, sender=Anonymous, *arguments, **named):
"""Like send_catch_log but supports returning deferreds on signal handlers.
Returns a deferred that gets fired once all signal handlers deferreds were
fired.
"""
def logerror(failure, recv):
if dont_log is None or not isinstance(failure.value, dont_log):
logger.error("Error caught on signal handler: %(receiver)s",
{'receiver': recv},
exc_info=failure_to_exc_info(failure),
extra={'spider': spider})
return failure
dont_log = named.pop('dont_log', None)
spider = named.get('spider', None)
dfds = []
for receiver in liveReceivers(getAllReceivers(sender, signal)):
d = maybeDeferred(robustApply, receiver, signal=signal, sender=sender,
*arguments, **named)
d.addErrback(logerror, receiver)
d.addBoth(lambda result: (receiver, result))
dfds.append(d)
d = DeferredList(dfds)
d.addCallback(lambda out: [x[1] for x in out])
return d
def disconnect_all(signal=Any, sender=Any):
"""Disconnect all signal handlers. Useful for cleaning up after running
tests
"""
for receiver in liveReceivers(getAllReceivers(sender, signal)):
disconnect(receiver, signal=signal, sender=sender)
|
bsd-3-clause
|
OndinaHQ/Tracker
|
cherrypy/process/win32.py
|
93
|
5870
|
"""Windows service. Requires pywin32."""
import os
import win32api
import win32con
import win32event
import win32service
import win32serviceutil
from cherrypy.process import wspbus, plugins
class ConsoleCtrlHandler(plugins.SimplePlugin):
"""A WSPBus plugin for handling Win32 console events (like Ctrl-C)."""
def __init__(self, bus):
self.is_set = False
plugins.SimplePlugin.__init__(self, bus)
def start(self):
if self.is_set:
self.bus.log('Handler for console events already set.', level=40)
return
result = win32api.SetConsoleCtrlHandler(self.handle, 1)
if result == 0:
self.bus.log('Could not SetConsoleCtrlHandler (error %r)' %
win32api.GetLastError(), level=40)
else:
self.bus.log('Set handler for console events.', level=40)
self.is_set = True
def stop(self):
if not self.is_set:
self.bus.log('Handler for console events already off.', level=40)
return
try:
result = win32api.SetConsoleCtrlHandler(self.handle, 0)
except ValueError:
# "ValueError: The object has not been registered"
result = 1
if result == 0:
self.bus.log('Could not remove SetConsoleCtrlHandler (error %r)' %
win32api.GetLastError(), level=40)
else:
self.bus.log('Removed handler for console events.', level=40)
self.is_set = False
def handle(self, event):
"""Handle console control events (like Ctrl-C)."""
if event in (win32con.CTRL_C_EVENT, win32con.CTRL_LOGOFF_EVENT,
win32con.CTRL_BREAK_EVENT, win32con.CTRL_SHUTDOWN_EVENT,
win32con.CTRL_CLOSE_EVENT):
self.bus.log('Console event %s: shutting down bus' % event)
# Remove self immediately so repeated Ctrl-C doesn't re-call it.
try:
self.stop()
except ValueError:
pass
self.bus.exit()
# 'First to return True stops the calls'
return 1
return 0
class Win32Bus(wspbus.Bus):
"""A Web Site Process Bus implementation for Win32.
Instead of time.sleep, this bus blocks using native win32event objects.
"""
def __init__(self):
self.events = {}
wspbus.Bus.__init__(self)
def _get_state_event(self, state):
"""Return a win32event for the given state (creating it if needed)."""
try:
return self.events[state]
except KeyError:
event = win32event.CreateEvent(None, 0, 0,
"WSPBus %s Event (pid=%r)" %
(state.name, os.getpid()))
self.events[state] = event
return event
def _get_state(self):
return self._state
def _set_state(self, value):
self._state = value
event = self._get_state_event(value)
win32event.PulseEvent(event)
state = property(_get_state, _set_state)
def wait(self, state, interval=0.1, channel=None):
"""Wait for the given state(s), KeyboardInterrupt or SystemExit.
Since this class uses native win32event objects, the interval
argument is ignored.
"""
if isinstance(state, (tuple, list)):
# Don't wait for an event that beat us to the punch ;)
if self.state not in state:
events = tuple([self._get_state_event(s) for s in state])
win32event.WaitForMultipleObjects(events, 0, win32event.INFINITE)
else:
# Don't wait for an event that beat us to the punch ;)
if self.state != state:
event = self._get_state_event(state)
win32event.WaitForSingleObject(event, win32event.INFINITE)
class _ControlCodes(dict):
"""Control codes used to "signal" a service via ControlService.
User-defined control codes are in the range 128-255. We generally use
the standard Python value for the Linux signal and add 128. Example:
>>> signal.SIGUSR1
10
control_codes['graceful'] = 128 + 10
"""
def key_for(self, obj):
"""For the given value, return its corresponding key."""
for key, val in self.items():
if val is obj:
return key
raise ValueError("The given object could not be found: %r" % obj)
control_codes = _ControlCodes({'graceful': 138})
def signal_child(service, command):
if command == 'stop':
win32serviceutil.StopService(service)
elif command == 'restart':
win32serviceutil.RestartService(service)
else:
win32serviceutil.ControlService(service, control_codes[command])
class PyWebService(win32serviceutil.ServiceFramework):
"""Python Web Service."""
_svc_name_ = "Python Web Service"
_svc_display_name_ = "Python Web Service"
_svc_deps_ = None # sequence of service names on which this depends
_exe_name_ = "pywebsvc"
_exe_args_ = None # Default to no arguments
# Only exists on Windows 2000 or later, ignored on windows NT
_svc_description_ = "Python Web Service"
def SvcDoRun(self):
from cherrypy import process
process.bus.start()
process.bus.block()
def SvcStop(self):
from cherrypy import process
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
process.bus.exit()
def SvcOther(self, control):
process.bus.publish(control_codes.key_for(control))
if __name__ == '__main__':
win32serviceutil.HandleCommandLine(PyWebService)
|
gpl-3.0
|
rghe/ansible
|
lib/ansible/modules/cloud/vmware/vmware_vm_facts.py
|
31
|
6619
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Joseph Callen <jcallen () csc.com>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_vm_facts
short_description: Return basic facts pertaining to a vSphere virtual machine guest
description:
- Return basic facts pertaining to a vSphere virtual machine guest.
- Cluster name as fact is added in version 2.7.
version_added: '2.0'
author:
- Joseph Callen (@jcpowermac)
- Abhijeet Kasurde (@Akasurde)
notes:
- Tested on vSphere 5.5 and vSphere 6.5
requirements:
- python >= 2.6
- PyVmomi
options:
vm_type:
description:
- If set to C(vm), then facts are gathered for virtual machines only.
- If set to C(template), then facts are gathered for virtual machine templates only.
- If set to C(all), then facts are gathered for all virtual machines and virtual machine templates.
required: False
default: 'all'
choices: [ all, vm, template ]
version_added: 2.5
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Gather all registered virtual machines
vmware_vm_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
delegate_to: localhost
register: vmfacts
- debug:
var: vmfacts.virtual_machines
- name: Gather only registered virtual machine templates
vmware_vm_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
vm_type: template
delegate_to: localhost
register: template_facts
- debug:
var: template_facts.virtual_machines
- name: Gather only registered virtual machines
vmware_vm_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
vm_type: vm
delegate_to: localhost
register: vm_facts
- debug:
var: vm_facts.virtual_machines
'''
RETURN = r'''
virtual_machines:
description: dictionary of virtual machines and their facts
returned: success
type: dict
sample:
{
"ubuntu_t": {
"cluster": null,
"esxi_hostname": "10.76.33.226",
"guest_fullname": "Ubuntu Linux (64-bit)",
"ip_address": "",
"mac_address": [
"00:50:56:87:a5:9a"
],
"power_state": "poweredOff",
"uuid": "4207072c-edd8-3bd5-64dc-903fd3a0db04",
"vm_network": {}
}
}
'''
try:
from pyVmomi import vim
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import PyVmomi, get_all_objs, vmware_argument_spec, _get_vm_prop
class VmwareVmFacts(PyVmomi):
def __init__(self, module):
super(VmwareVmFacts, self).__init__(module)
# https://github.com/vmware/pyvmomi-community-samples/blob/master/samples/getallvms.py
def get_all_virtual_machines(self):
"""
Function to get all virtual machines and related configurations information
"""
virtual_machines = get_all_objs(self.content, [vim.VirtualMachine])
_virtual_machines = {}
for vm in virtual_machines:
_ip_address = ""
summary = vm.summary
if summary.guest is not None:
_ip_address = summary.guest.ipAddress
if _ip_address is None:
_ip_address = ""
_mac_address = []
all_devices = _get_vm_prop(vm, ('config', 'hardware', 'device'))
if all_devices:
for dev in all_devices:
if isinstance(dev, vim.vm.device.VirtualEthernetCard):
_mac_address.append(dev.macAddress)
net_dict = {}
vmnet = _get_vm_prop(vm, ('guest', 'net'))
if vmnet:
for device in vmnet:
net_dict[device.macAddress] = dict()
net_dict[device.macAddress]['ipv4'] = []
net_dict[device.macAddress]['ipv6'] = []
for ip_addr in device.ipAddress:
if "::" in ip_addr:
net_dict[device.macAddress]['ipv6'].append(ip_addr)
else:
net_dict[device.macAddress]['ipv4'].append(ip_addr)
esxi_hostname = None
esxi_parent = None
if summary.runtime.host:
esxi_hostname = summary.runtime.host.summary.config.name
esxi_parent = summary.runtime.host.parent
cluster_name = None
if esxi_parent and isinstance(esxi_parent, vim.ClusterComputeResource):
cluster_name = summary.runtime.host.parent.name
virtual_machine = {
summary.config.name: {
"guest_fullname": summary.config.guestFullName,
"power_state": summary.runtime.powerState,
"ip_address": _ip_address, # Kept for backward compatibility
"mac_address": _mac_address, # Kept for backward compatibility
"uuid": summary.config.uuid,
"vm_network": net_dict,
"esxi_hostname": esxi_hostname,
"cluster": cluster_name,
}
}
vm_type = self.module.params.get('vm_type')
is_template = _get_vm_prop(vm, ('config', 'template'))
if vm_type == 'vm' and not is_template:
_virtual_machines.update(virtual_machine)
elif vm_type == 'template' and is_template:
_virtual_machines.update(virtual_machine)
elif vm_type == 'all':
_virtual_machines.update(virtual_machine)
return _virtual_machines
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
vm_type=dict(type='str', choices=['vm', 'all', 'template'], default='all'),
)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=False)
vmware_vm_facts = VmwareVmFacts(module)
_virtual_machines = vmware_vm_facts.get_all_virtual_machines()
module.exit_json(changed=False, virtual_machines=_virtual_machines)
if __name__ == '__main__':
main()
|
gpl-3.0
|
AltSchool/django
|
tests/template_tests/filter_tests/test_addslashes.py
|
473
|
1202
|
from django.template.defaultfilters import addslashes
from django.test import SimpleTestCase
from django.utils.safestring import mark_safe
from ..utils import setup
class AddslashesTests(SimpleTestCase):
@setup({'addslashes01': '{% autoescape off %}{{ a|addslashes }} {{ b|addslashes }}{% endautoescape %}'})
def test_addslashes01(self):
output = self.engine.render_to_string('addslashes01', {"a": "<a>'", "b": mark_safe("<a>'")})
self.assertEqual(output, r"<a>\' <a>\'")
@setup({'addslashes02': '{{ a|addslashes }} {{ b|addslashes }}'})
def test_addslashes02(self):
output = self.engine.render_to_string('addslashes02', {"a": "<a>'", "b": mark_safe("<a>'")})
self.assertEqual(output, r"<a>\' <a>\'")
class FunctionTests(SimpleTestCase):
def test_quotes(self):
self.assertEqual(
addslashes('"double quotes" and \'single quotes\''),
'\\"double quotes\\" and \\\'single quotes\\\'',
)
def test_backslashes(self):
self.assertEqual(addslashes(r'\ : backslashes, too'), '\\\\ : backslashes, too')
def test_non_string_input(self):
self.assertEqual(addslashes(123), '123')
|
bsd-3-clause
|
crafty78/ansible
|
lib/ansible/plugins/action/service.py
|
16
|
3306
|
# (c) 2015, Ansible Inc,
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
TRANSFERS_FILES = False
UNUSED_PARAMS = {
'systemd': ['pattern', 'runlevel', 'sleep', 'arguments', 'args'],
}
def run(self, tmp=None, task_vars=None):
''' handler for package operations '''
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
module = self._task.args.get('use', 'auto').lower()
if module == 'auto':
try:
if self._task.delegate_to: # if we delegate, we should use delegated host's facts
module = self._templar.template("{{hostvars['%s']['ansible_service_mgr']}}" % self._task.delegate_to)
else:
module = self._templar.template('{{ansible_service_mgr}}')
except:
pass # could not get it from template!
if module == 'auto':
facts = self._execute_module(module_name='setup', module_args=dict(gather_subset='!all', filter='ansible_service_mgr'), task_vars=task_vars)
self._display.debug("Facts %s" % facts)
if 'ansible_facts' in facts and 'ansible_service_mgr' in facts['ansible_facts']:
module = facts['ansible_facts']['ansible_service_mgr']
if not module or module == 'auto' or module not in self._shared_loader_obj.module_loader:
module = 'service'
if module != 'auto':
# run the 'service' module
new_module_args = self._task.args.copy()
if 'use' in new_module_args:
del new_module_args['use']
# for backwards compatibility
if 'state' in new_module_args and new_module_args['state'] == 'running':
new_module_args['state'] = 'started'
if module in self.UNUSED_PARAMS:
for unused in self.UNUSED_PARAMS[module]:
if unused in new_module_args:
del new_module_args[unused]
self._display.warning('Ignoring "%s" as it is not used in "%s"' % (unused, module))
self._display.vvvv("Running %s" % module)
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars))
else:
result['failed'] = True
result['msg'] = 'Could not detect which service manager to use. Try gathering facts or setting the "use" option.'
return result
|
gpl-3.0
|
thinkopensolutions/tkobr-addons
|
unported/tko_partner_configuration_menus/__openerp__.py
|
2
|
1853
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# ThinkOpen Solutions Brasil
# Copyright (C) Thinkopen Solutions <http://www.tkobr.com>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Partner Entities Menus for tko_l10n_br_base',
'version': '0.007',
'category': 'Customization',
'sequence': 18,
'complexity': 'normal',
'description': ''' This module creates partner menus for tko_l10n_br_base new fields under Settings -> Technical''',
'author': 'ThinkOpen Solutions Brasil',
'website': 'http://www.tkobr.com',
'depends': [
'base',
'tko_l10n_br_base',
],
'data': [
'base_view.xml',
],
'init': [],
'demo': [],
'update': [],
'test': [], # YAML files with tests
'installable': True,
'application': False,
# If it's True, the modules will be auto-installed when all dependencies
# are installed
'auto_install': False,
'certificate': '',
}
|
agpl-3.0
|
stonek4/iperfparser
|
parseperf.py
|
1
|
1894
|
import argparse
import json
from pprint import pprint
def convertToM(number, btype):
if (btype == 'KBytes' or btype == 'Kbits/sec'):
return number/1000
elif (btype == 'Bytes' or btype == 'Bits/sec'):
return number/1000000
else:
return number
def parseIPerf(file_name, json_data):
data = ""
with open (file_name) as iperf_data:
data = iperf_data.read()
index = 0
data_points = data.split("\n\n")
for point in data_points:
t = 0.0
b = 0.0
p = 0.0
if (len(point.split('\n')) >= 12):
sdata = point.split('\n')[11].split(' ')
if (len(sdata) > 5):
tdata = sdata[3].lstrip().split(' ')
bdata = sdata[4].lstrip().split(' ')
t = convertToM(float(tdata[0]), tdata[1])
b = convertToM(float(bdata[0]), bdata[1])
p = float(sdata[-1].split(' ')[-1].strip('(').strip(')').strip('%'))
json_data['points'][index]['transferred_mbytes'] = t
json_data['points'][index]['bandwidth_mbps'] = b
json_data['points'][index]['packet_perc'] = p
index += 1
while index < len(json_data['points']):
json_data['points'][index]['transferred_mbytes'] = 0.0
json_data['points'][index]['bandwidth_mbps'] = 0.0
json_data['points'][index]['packet_perc'] = 0.0
index += 1
def main():
parser = argparse.ArgumentParser()
parser.add_argument("pfile", help="the iperf data file")
parser.add_argument("jfile", help="the json data file")
parser.add_argument("ofile", help="the json output file")
args = parser.parse_args()
with open(args.jfile) as json_file:
data = json.load(json_file)
parseIPerf(args.pfile, data)
with open(args.ofile, 'w+') as output_file:
json.dump(data, output_file, indent=4)
main()
|
mit
|
mobilecosystem/walisph-bootstrap
|
test-infra/s3_cache.py
|
1700
|
3523
|
#!/usr/bin/env python2.7
from __future__ import absolute_import, unicode_literals, print_function, division
from sys import argv
from os import environ, stat, remove as _delete_file
from os.path import isfile, dirname, basename, abspath
from hashlib import sha256
from subprocess import check_call as run
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from boto.exception import S3ResponseError
NEED_TO_UPLOAD_MARKER = '.need-to-upload'
BYTES_PER_MB = 1024 * 1024
try:
BUCKET_NAME = environ['TWBS_S3_BUCKET']
except KeyError:
raise SystemExit("TWBS_S3_BUCKET environment variable not set!")
def _sha256_of_file(filename):
hasher = sha256()
with open(filename, 'rb') as input_file:
hasher.update(input_file.read())
file_hash = hasher.hexdigest()
print('sha256({}) = {}'.format(filename, file_hash))
return file_hash
def _delete_file_quietly(filename):
try:
_delete_file(filename)
except (OSError, IOError):
pass
def _tarball_size(directory):
kib = stat(_tarball_filename_for(directory)).st_size // BYTES_PER_MB
return "{} MiB".format(kib)
def _tarball_filename_for(directory):
return abspath('./{}.tar.gz'.format(basename(directory)))
def _create_tarball(directory):
print("Creating tarball of {}...".format(directory))
run(['tar', '-czf', _tarball_filename_for(directory), '-C', dirname(directory), basename(directory)])
def _extract_tarball(directory):
print("Extracting tarball of {}...".format(directory))
run(['tar', '-xzf', _tarball_filename_for(directory), '-C', dirname(directory)])
def download(directory):
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
try:
print("Downloading {} tarball from S3...".format(friendly_name))
key.get_contents_to_filename(_tarball_filename_for(directory))
except S3ResponseError as err:
open(NEED_TO_UPLOAD_MARKER, 'a').close()
print(err)
raise SystemExit("Cached {} download failed!".format(friendly_name))
print("Downloaded {}.".format(_tarball_size(directory)))
_extract_tarball(directory)
print("{} successfully installed from cache.".format(friendly_name))
def upload(directory):
_create_tarball(directory)
print("Uploading {} tarball to S3... ({})".format(friendly_name, _tarball_size(directory)))
key.set_contents_from_filename(_tarball_filename_for(directory))
print("{} cache successfully updated.".format(friendly_name))
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
if __name__ == '__main__':
# Uses environment variables:
# AWS_ACCESS_KEY_ID -- AWS Access Key ID
# AWS_SECRET_ACCESS_KEY -- AWS Secret Access Key
argv.pop(0)
if len(argv) != 4:
raise SystemExit("USAGE: s3_cache.py <download | upload> <friendly name> <dependencies file> <directory>")
mode, friendly_name, dependencies_file, directory = argv
conn = S3Connection()
bucket = conn.lookup(BUCKET_NAME, validate=False)
if bucket is None:
raise SystemExit("Could not access bucket!")
dependencies_file_hash = _sha256_of_file(dependencies_file)
key = Key(bucket, dependencies_file_hash)
key.storage_class = 'REDUCED_REDUNDANCY'
if mode == 'download':
download(directory)
elif mode == 'upload':
if isfile(NEED_TO_UPLOAD_MARKER): # FIXME
upload(directory)
else:
print("No need to upload anything.")
else:
raise SystemExit("Unrecognized mode {!r}".format(mode))
|
mit
|
Tejeshwarabm/Westwood
|
examples/wireless/wifi-ap.py
|
57
|
5871
|
# -*- Mode: Python; -*-
# /*
# * Copyright (c) 2005,2006,2007 INRIA
# * Copyright (c) 2009 INESC Porto
# *
# * This program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License version 2 as
# * published by the Free Software Foundation;
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# *
# * Authors: Mathieu Lacage <[email protected]>
# * Gustavo Carneiro <[email protected]>
# */
import sys
import ns.applications
import ns.core
import ns.internet
import ns.mobility
import ns.network
import ns.point_to_point
import ns.wifi
# void
# DevTxTrace (std::string context, Ptr<const Packet> p, Mac48Address address)
# {
# std::cout << " TX to=" << address << " p: " << *p << std::endl;
# }
# void
# DevRxTrace(std::string context, Ptr<const Packet> p, Mac48Address address)
# {
# std::cout << " RX from=" << address << " p: " << *p << std::endl;
# }
# void
# PhyRxOkTrace(std::string context, Ptr<const Packet> packet, double snr, WifiMode mode, enum WifiPreamble preamble)
# {
# std::cout << "PHYRXOK mode=" << mode << " snr=" << snr << " " << *packet << std::endl;
# }
# void
# PhyRxErrorTrace(std::string context, Ptr<const Packet> packet, double snr)
# {
# std::cout << "PHYRXERROR snr=" << snr << " " << *packet << std::endl;
# }
# void
# PhyTxTrace(std::string context, Ptr<const Packet> packet, WifiMode mode, WifiPreamble preamble, uint8_t txPower)
# {
# std::cout << "PHYTX mode=" << mode << " " << *packet << std::endl;
# }
# void
# PhyStateTrace(std::string context, Time start, Time duration, enum WifiPhy::State state)
# {
# std::cout << " state=";
# switch(state) {
# case WifiPhy::TX:
# std::cout << "tx ";
# break;
# case WifiPhy::SYNC:
# std::cout << "sync ";
# break;
# case WifiPhy::CCA_BUSY:
# std::cout << "cca-busy";
# break;
# case WifiPhy::IDLE:
# std::cout << "idle ";
# break;
# }
# std::cout << " start="<<start<<" duration="<<duration<<std::endl;
# }
def SetPosition(node, position):
mobility = node.GetObject(ns.mobility.MobilityModel.GetTypeId())
mobility.SetPosition(position)
def GetPosition(node):
mobility = node.GetObject(ns.mobility.MobilityModel.GetTypeId())
return mobility.GetPosition()
def AdvancePosition(node):
pos = GetPosition(node);
pos.x += 5.0
if pos.x >= 210.0:
return
SetPosition(node, pos)
ns.core.Simulator.Schedule(ns.core.Seconds(1.0), AdvancePosition, node)
def main(argv):
ns.core.CommandLine().Parse(argv)
ns.network.Packet.EnablePrinting();
# enable rts cts all the time.
ns.core.Config.SetDefault("ns3::WifiRemoteStationManager::RtsCtsThreshold", ns.core.StringValue("0"))
# disable fragmentation
ns.core.Config.SetDefault("ns3::WifiRemoteStationManager::FragmentationThreshold", ns.core.StringValue("2200"))
wifi = ns.wifi.WifiHelper.Default()
mobility = ns.mobility.MobilityHelper()
stas = ns.network.NodeContainer()
ap = ns.network.NodeContainer()
#NetDeviceContainer staDevs;
packetSocket = ns.network.PacketSocketHelper()
stas.Create(2)
ap.Create(1)
# give packet socket powers to nodes.
packetSocket.Install(stas)
packetSocket.Install(ap)
wifiPhy = ns.wifi.YansWifiPhyHelper.Default()
wifiChannel = ns.wifi.YansWifiChannelHelper.Default()
wifiPhy.SetChannel(wifiChannel.Create())
ssid = ns.wifi.Ssid("wifi-default")
wifi.SetRemoteStationManager("ns3::ArfWifiManager")
wifiMac = ns.wifi.WifiMacHelper()
# setup stas.
wifiMac.SetType("ns3::StaWifiMac",
"Ssid", ns.wifi.SsidValue(ssid),
"ActiveProbing", ns.core.BooleanValue(False))
staDevs = wifi.Install(wifiPhy, wifiMac, stas)
# setup ap.
wifiMac.SetType("ns3::ApWifiMac",
"Ssid", ns.wifi.SsidValue(ssid),
"BeaconGeneration", ns.core.BooleanValue(True),
"BeaconInterval", ns.core.TimeValue(ns.core.Seconds(2.5)))
wifi.Install(wifiPhy, wifiMac, ap)
# mobility.
mobility.Install(stas)
mobility.Install(ap)
ns.core.Simulator.Schedule(ns.core.Seconds(1.0), AdvancePosition, ap.Get(0))
socket = ns.network.PacketSocketAddress()
socket.SetSingleDevice(staDevs.Get(0).GetIfIndex())
socket.SetPhysicalAddress(staDevs.Get(1).GetAddress())
socket.SetProtocol(1)
onoff = ns.applications.OnOffHelper("ns3::PacketSocketFactory", ns.network.Address(socket))
onoff.SetConstantRate (ns.network.DataRate ("500kb/s"))
apps = onoff.Install(ns.network.NodeContainer(stas.Get(0)))
apps.Start(ns.core.Seconds(0.5))
apps.Stop(ns.core.Seconds(43.0))
ns.core.Simulator.Stop(ns.core.Seconds(44.0))
# Config::Connect("/NodeList/*/DeviceList/*/Tx", MakeCallback(&DevTxTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Rx", MakeCallback(&DevRxTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/RxOk", MakeCallback(&PhyRxOkTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/RxError", MakeCallback(&PhyRxErrorTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/Tx", MakeCallback(&PhyTxTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/State", MakeCallback(&PhyStateTrace));
ns.core.Simulator.Run()
ns.core.Simulator.Destroy()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
gpl-2.0
|
jeremycline/pulp
|
server/test/unit/server/db/model/test_repository.py
|
14
|
2436
|
import unittest
import mock
from pulp.server.db.model.repository import RepoContentUnit
REPOSITORY = 'pulp.server.db.model.repository'
class TestRepoContentUnit(unittest.TestCase):
def setUp(self):
self.unit = RepoContentUnit('repo1', 'unit1', 'rpm')
def test_utc_in_iso8601(self):
# make sure the ISO8601 serialization includes the UTC timezone
self.assertTrue(
self.unit.created.endswith('Z') or
self.unit.created.endswith('+00:00'))
class TestRepoContentUnitInit(unittest.TestCase):
def setUp(self):
self.patch_a = mock.patch(REPOSITORY + '.Model.__init__')
self.mock_Model__init__ = self.patch_a.start()
self.patch_b = mock.patch(REPOSITORY + '.dateutils')
self.mock_dateutils = self.patch_b.start()
self.mock_repo_id = mock.Mock()
self.mock_unit_id = mock.Mock()
self.mock_unit_type_id = mock.Mock()
self.repo_content_unit = RepoContentUnit(self.mock_repo_id, self.mock_unit_id,
self.mock_unit_type_id)
def tearDown(self):
self.patch_a.stop()
self.patch_b.stop()
def test_repo_content_unit___init___calls_super___init__(self):
self.mock_Model__init__.assert_called_once_with()
def test_repo_content_unit___init___stores_repo_id(self):
self.assertTrue(self.repo_content_unit.repo_id is self.mock_repo_id)
def test_repo_content_unit___init___stores_unit_id(self):
self.assertTrue(self.repo_content_unit.unit_id is self.mock_unit_id)
def test_repo_content_unit___init___stores_unit_type_id(self):
self.assertTrue(self.repo_content_unit.unit_type_id is self.mock_unit_type_id)
def test_repo_content_unit___init___generates_8601_utc_timestamp(self):
self.mock_dateutils.now_utc_timestamp.assert_called_once_with()
utc_timestamp = self.mock_dateutils.now_utc_timestamp.return_value
self.mock_dateutils.format_iso8601_utc_timestamp.assert_called_once_with(utc_timestamp)
def test_repo_content_unit___init___stores_created(self):
created = self.mock_dateutils.format_iso8601_utc_timestamp.return_value
self.assertTrue(self.repo_content_unit.created is created)
def test_repo_content_unit___init___stores_updated_equal_to_created(self):
self.assertTrue(self.repo_content_unit.created is self.repo_content_unit.updated)
|
gpl-2.0
|
tboyce021/home-assistant
|
tests/components/atag/__init__.py
|
9
|
2495
|
"""Tests for the Atag integration."""
from homeassistant.components.atag import DOMAIN
from homeassistant.const import CONF_EMAIL, CONF_HOST, CONF_PORT, CONTENT_TYPE_JSON
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
USER_INPUT = {
CONF_HOST: "127.0.0.1",
CONF_EMAIL: "[email protected]",
CONF_PORT: 10000,
}
UID = "xxxx-xxxx-xxxx_xx-xx-xxx-xxx"
PAIR_REPLY = {"pair_reply": {"status": {"device_id": UID}, "acc_status": 2}}
UPDATE_REPLY = {"update_reply": {"status": {"device_id": UID}, "acc_status": 2}}
RECEIVE_REPLY = {
"retrieve_reply": {
"status": {"device_id": UID},
"report": {
"burning_hours": 1000,
"room_temp": 20,
"outside_temp": 15,
"dhw_water_temp": 30,
"ch_water_temp": 40,
"ch_water_pres": 1.8,
"ch_return_temp": 35,
"boiler_status": 0,
"tout_avg": 12,
"details": {"rel_mod_level": 0},
},
"control": {
"ch_control_mode": 0,
"ch_mode": 1,
"ch_mode_duration": 0,
"ch_mode_temp": 12,
"dhw_temp_setp": 40,
"dhw_mode": 1,
"dhw_mode_temp": 150,
"weather_status": 8,
},
"configuration": {
"download_url": "http://firmware.atag-one.com:80/R58",
"temp_unit": 0,
"dhw_max_set": 65,
"dhw_min_set": 40,
},
"acc_status": 2,
}
}
async def init_integration(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
rgbw: bool = False,
skip_setup: bool = False,
) -> MockConfigEntry:
"""Set up the Atag integration in Home Assistant."""
aioclient_mock.post(
"http://127.0.0.1:10000/retrieve",
json=RECEIVE_REPLY,
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.post(
"http://127.0.0.1:10000/update",
json=UPDATE_REPLY,
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.post(
"http://127.0.0.1:10000/pair",
json=PAIR_REPLY,
headers={"Content-Type": CONTENT_TYPE_JSON},
)
entry = MockConfigEntry(domain=DOMAIN, data=USER_INPUT)
entry.add_to_hass(hass)
if not skip_setup:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
apache-2.0
|
ikmaak/Printrun
|
printrun/injectgcode.py
|
25
|
1922
|
# This file is part of the Printrun suite.
#
# Printrun is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Printrun is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Printrun. If not, see <http://www.gnu.org/licenses/>.
import logging
from .gui.widgets import MacroEditor
from .utils import install_locale
install_locale('pronterface')
def injector(gcode, viz_layer, layer_idx):
cb = lambda toadd: inject(gcode, viz_layer, layer_idx, toadd)
z = gcode.all_layers[layer_idx].z
z = z if z is not None else 0
MacroEditor(_("Inject G-Code at layer %d (Z = %.03f)") % (viz_layer, z), "", cb, True)
def injector_edit(gcode, viz_layer, layer_idx):
cb = lambda toadd: rewritelayer(gcode, viz_layer, layer_idx, toadd)
layer = gcode.all_layers[layer_idx]
z = layer.z
z = z if z is not None else 0
lines = [line.raw for line in layer]
MacroEditor(_("Edit G-Code of layer %d (Z = %.03f)") % (viz_layer, z), lines, cb, True)
def inject(gcode, viz_layer, layer_idx, toadd):
# TODO: save modified gcode after injection ?
nlines = len(gcode.prepend_to_layer(toadd, layer_idx))
logging.info(_("Successfully injected %d lines at beginning of layer %d") % (nlines, viz_layer))
def rewritelayer(gcode, viz_layer, layer_idx, toadd):
# TODO: save modified gcode after edit ?
nlines = len(gcode.rewrite_layer(toadd, layer_idx))
logging.info(_("Successfully edited layer %d (which now contains %d lines)") % (viz_layer, nlines))
|
gpl-3.0
|
eugene1g/phantomjs
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/common/system/zipfileset_mock.py
|
167
|
2166
|
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def make_factory(ziphashes):
"""ZipFileSet factory routine that looks up zipfiles in a dict;
each zipfile should also be a dict of member names -> contents."""
class MockZipFileSet(object):
def __init__(self, url):
self._url = url
self._ziphash = ziphashes[url]
def namelist(self):
return self._ziphash.keys()
def read(self, member):
return self._ziphash[member]
def close(self):
pass
def maker(url):
# We return None because there's no tempfile to delete.
return (None, MockZipFileSet(url))
return maker
|
bsd-3-clause
|
alexander-ae/sistema-de-cotizacion
|
quoman/quotes/pdf.py
|
1
|
9706
|
import os
from io import BytesIO
from django.utils import timezone
from django.template.loader import get_template
from django.template import Context
from django.core.mail import EmailMessage
from django.conf import settings
from reportlab.pdfgen import canvas
from reportlab.platypus import Table
from reportlab.lib.pagesizes import A4
from reportlab.lib.units import cm
from reportlab.platypus import BaseDocTemplate, PageTemplate, Frame, Paragraph, Image
from reportlab.platypus import TableStyle
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
from reportlab.lib.enums import TA_CENTER, TA_RIGHT, TA_LEFT
from reportlab.lib import colors
from quoman.models import Config
from .utils import bold
FONT_FAMILY = 'Helvetica'
class CustomCanvas(canvas.Canvas):
def __init__(self, *args, **kwargs):
canvas.Canvas.__init__(self, *args, **kwargs)
self._saved_page_states = []
self.config, created = Config.objects.get_or_create(pk=1)
def showPage(self):
self._saved_page_states.append(dict(self.__dict__))
self._startPage()
def save(self):
num_pages = len(self._saved_page_states)
for state in self._saved_page_states:
self.__dict__.update(state)
self.translate(0, 29.7 * cm)
self.setFont(FONT_FAMILY, 11)
# métodos personalizados
self.setAuthor('quoman')
self.draw_page_number(num_pages)
self.draw_header()
self.draw_footer()
canvas.Canvas.showPage(self)
canvas.Canvas.save(self)
def draw_page_number(self, page_count):
self.setFont(FONT_FAMILY, 11)
self.drawRightString(19 * cm, - 28.5 * cm,
'Página {} de {}'.format(self._pageNumber, page_count))
def draw_header(self):
self.setStrokeColorRGB(0, 0.2, 0.4)
self.setFillColorRGB(0.2, 0.2, 0.2)
self.drawString(16 * cm, -1 * cm, self.config.razon_social)
# self.drawInlineImage(settings.INV_LOGO, 1 * cm, -1 * cm, 180, 16, preserveAspectRatio=True)
self.setLineWidth(2)
self.line(0.75 * cm, -1.20 * cm, 20 * cm, -1.20 * cm)
def draw_footer(self):
self.setStrokeColorRGB(0, 0.2, 0.4)
self.setFillColorRGB(0.2, 0.2, 0.2)
self.setLineWidth(2)
self.line(0.75 * cm, -28.00 * cm, 20 * cm, -28 * cm)
fecha_actual = timezone.now().strftime('%d/%m/%Y')
self.drawString(2 * cm, -28.5 * cm, fecha_actual)
def draw_pdf(buffer, cotizacion):
""" Genera el pdf de la cotización """
# configuracion
config, created = Config.objects.get_or_create(pk=1)
# pdf
doc = BaseDocTemplate(buffer, pagesize=A4,
rightMargin=72,
leftMargin=72,
topMargin=72,
bottomMargin=72,
title=cotizacion.codigo)
# doc.canv.setTitle()
pHeight, pWidth = doc.pagesize
myFrame = Frame(0, 0, pHeight, pWidth, 50, 60, 50, 50, id='myFrame')
mainTemplate = PageTemplate(id='mainTemplate', frames=[myFrame])
doc.addPageTemplates([mainTemplate])
elements = []
styles = getSampleStyleSheet()
styleN = styles['Normal']
# cabecera
logo = Image(os.path.join(settings.MEDIA_ROOT, config.logo.path))
elements.append(logo)
header_info = [
('Domilio Fiscal', config.direccion),
('RUC', config.ruc),
('Teléfono', cotizacion.propietario_id.userprofile.telefono),
('Email', cotizacion.propietario_id.userprofile.email),
('Representante asignado', cotizacion.propietario_id.userprofile.full_name())
]
style_header = ParagraphStyle(name='Normal',
fontName=FONT_FAMILY,
fontSize=10,
leading=12,
spaceAfter=4,
spaceBefore=8)
for header_item in header_info:
elements.append(Paragraph('<b>{}: </b> {}'.format(header_item[0], header_item[1], ), style_header))
style_title = ParagraphStyle(name='header',
fontName=FONT_FAMILY,
fontSize=16,
spaceAfter=20,
spaceBefore=20,
alignment=TA_CENTER)
elements.append(Paragraph('<b>Cotización {}</b>'.format(cotizacion.codigo), style_title))
# datos de la empresa
info_empresa = [
('Empresa:', cotizacion.empresa_razon_social, 'RUC:', cotizacion.ruc),
('Dirección:', cotizacion.empresa_direccion, 'Fecha:', cotizacion.fecha_de_creacion.strftime('%d/%m/%Y')),
('Atención:', cotizacion.representante, 'Teléfono:', cotizacion.empresa_telefono),
('Tiempo de Entrega:', cotizacion.tiempo_de_entrega, 'Método de pago:', cotizacion.forma_de_pago)
]
data_empresa = []
styleLeft = ParagraphStyle(name='Normal',
fontName=FONT_FAMILY,
fontSize=10,
leading=12,
alignment=TA_LEFT,
wordWrap='CJK'
)
for line in info_empresa:
_p1 = Paragraph(bold(line[0]), styleN)
_p2 = Paragraph(line[1], styleLeft)
_p3 = Paragraph(bold(line[2]), styleN)
_p4 = Paragraph(line[3], styleLeft)
data_empresa.append(
(_p1, _p2, _p3, _p4)
)
tableEmpresa = Table(data_empresa, colWidths=[3 * cm, 6.5 * cm, 3.5 * cm, 4.5 * cm], spaceAfter=20, hAlign='LEFT')
styleTableEmpresa = TableStyle([
('ALIGN', (1, 0), (1, -1), 'LEFT'),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
('BOTTOMPADDING', (0, 0), (-1, -1), 0),
('TOPPADDING', (0, 0), (-1, -1), 3),
])
tableEmpresa.setStyle(styleTableEmpresa)
elements.append(tableEmpresa)
# productos a cotizar
s = getSampleStyleSheet()
s = s['BodyText']
s.wordWrap = 'CJK'
styleR = styleN
styleR.alignment = TA_RIGHT
data = [['Item', 'Producto', 'Cantidad', 'Precio x unidad', 'Subtotal']]
for i, producto in enumerate(cotizacion.productos_a_cotizar.all()):
data.append([
str(i + 1),
Paragraph(bold(producto.nombre), s),
producto.cantidad,
'S/ {}'.format(producto.precio),
'S/ {}'.format(producto.subtotal)
])
data.append([
'',
Paragraph(producto.detalle, s),
'',
'',
''
])
data.append(['', 'Nota: Los precios no incluyen IGV', '', Paragraph('<b>SubTotal</b>', styleR),
'S/ {}'.format(cotizacion.calcula_subtotal_productos())])
data.append(
['', '', '', Paragraph('<b>Envío</b>', styleR), 'S/ {}'.format(cotizacion.costo_de_envio)])
data.append(
['', '', 'IGV', Paragraph('<b>{} %</b>'.format(cotizacion.igv), styleR),
'S/ {:.2f}'.format(cotizacion.calcula_igv())])
data.append(['', '', '', Paragraph('<b>Total</b>', styleR), 'S/ {}'.format(cotizacion.total)])
tableThatSplitsOverPages = Table(data, repeatRows=1, colWidths=[1 * cm, 8 * cm, 2 * cm, 3.5 * cm, 3 * cm])
tableThatSplitsOverPages.hAlign = 'LEFT'
tblStyle = TableStyle([('TEXTCOLOR', (0, 0), (-1, -1), colors.black),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
('ALIGN', (2, 0), (2, -1), 'CENTER'),
('ALIGN', (3, 0), (3, -1), 'RIGHT'),
('ALIGN', (4, 0), (4, -1), 'RIGHT'),
('LINEBELOW', (0, 0), (-1, -1), 1, colors.black),
('BOX', (0, 0), (-1, -1), 1, colors.black),
('GRID', (0, 0), (-1, -5), 1, colors.black),
('BACKGROUND', (0, 0), (-1, 0), colors.lightblue),
('BACKGROUND', (0, 1), (-1, -1), colors.white)
])
tableThatSplitsOverPages.setStyle(tblStyle)
elements.append(tableThatSplitsOverPages)
# texto final
if cotizacion.aplica_detraccion:
_style = styleN
_style.fontSize = 10
_style.textColor = colors.HexColor(0x666666)
_style.spaceBefore = 20
elements.append(Paragraph(config.detraccion_texto, _style))
doc.build(elements, canvasmaker=CustomCanvas)
return doc
def envia_cotizacion(cotizacion):
config, created = Config.objects.get_or_create(pk=1)
htmly = get_template('quotes/email-quote.html')
d = Context({
'config': config,
'cotizacion': cotizacion,
'SITE_URL': settings.SITE_URL
})
html_content = htmly.render(d)
asunto = u'Cotización {}'.format(cotizacion.codigo)
mail = '{0}<{1}>'.format(settings.PROJECT_NAME, settings.DEFAULT_FROM_EMAIL)
emails_destino = cotizacion.quotereceiver_set.all().values_list('email', flat=True)
msg = EmailMessage(asunto, html_content, mail, emails_destino)
msg.content_subtype = "html"
buffer = BytesIO()
draw_pdf(buffer, cotizacion)
msg.attach('cotizacion.pdf', buffer.getvalue(), 'application/pdf')
try:
msg.send()
return {
'status_code': 200,
'str_status': 'success',
'mensaje': 'El correo ha sido enviado'
}
except:
return {
'status_code': 503,
'str_status': 'error',
'mensaje': 'El servicio de envío de correos tiene problemas'
}
|
gpl-3.0
|
stdweird/aquilon
|
lib/python2.6/aquilon/worker/commands/make_aquilon.py
|
2
|
1052
|
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the logic for `aq make aquilon`."""
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.commands.make import CommandMake
class CommandMakeAquilon(CommandMake):
def render(self, **arguments):
arguments['archetype'] = 'aquilon'
return CommandMake.render(self, **arguments)
|
apache-2.0
|
seawaywen/vim-config
|
bundle/python-mode/pymode/libs2/rope/contrib/fixsyntax.py
|
17
|
6737
|
import rope.base.codeanalyze
import rope.base.evaluate
from rope.base import worder, exceptions, utils
from rope.base.codeanalyze import ArrayLinesAdapter, LogicalLineFinder
class FixSyntax(object):
def __init__(self, pycore, code, resource, maxfixes=1):
self.pycore = pycore
self.code = code
self.resource = resource
self.maxfixes = maxfixes
@utils.saveit
def get_pymodule(self):
"""Get a `PyModule`"""
msg = None
code = self.code
tries = 0
while True:
try:
if tries == 0 and self.resource is not None and \
self.resource.read() == code:
return self.pycore.resource_to_pyobject(self.resource,
force_errors=True)
return self.pycore.get_string_module(
code, resource=self.resource, force_errors=True)
except exceptions.ModuleSyntaxError, e:
if msg is None:
msg = '%s:%s %s' % (e.filename, e.lineno, e.message_)
if tries < self.maxfixes:
tries += 1
self.commenter.comment(e.lineno)
code = '\n'.join(self.commenter.lines)
else:
raise exceptions.ModuleSyntaxError(e.filename, e.lineno, msg)
@property
@utils.saveit
def commenter(self):
return _Commenter(self.code)
def pyname_at(self, offset):
pymodule = self.get_pymodule()
def old_pyname():
word_finder = worder.Worder(self.code, True)
expression = word_finder.get_primary_at(offset)
expression = expression.replace('\\\n', ' ').replace('\n', ' ')
lineno = self.code.count('\n', 0, offset)
scope = pymodule.get_scope().get_inner_scope_for_line(lineno)
return rope.base.evaluate.eval_str(scope, expression)
new_code = pymodule.source_code
def new_pyname():
newoffset = self.commenter.transfered_offset(offset)
return rope.base.evaluate.eval_location(pymodule, newoffset)
if new_code.startswith(self.code[:offset + 1]):
return new_pyname()
result = old_pyname()
if result is None:
return new_pyname()
return result
class _Commenter(object):
def __init__(self, code):
self.code = code
self.lines = self.code.split('\n')
self.lines.append('\n')
self.origs = range(len(self.lines) + 1)
self.diffs = [0] * (len(self.lines) + 1)
def comment(self, lineno):
start = _logical_start(self.lines, lineno, check_prev=True) - 1
# using self._get_stmt_end() instead of self._get_block_end()
# to lower commented lines
end = self._get_stmt_end(start)
indents = _get_line_indents(self.lines[start])
if 0 < start:
last_lineno = self._last_non_blank(start - 1)
last_line = self.lines[last_lineno]
if last_line.rstrip().endswith(':'):
indents = _get_line_indents(last_line) + 4
self._set(start, ' ' * indents + 'pass')
for line in range(start + 1, end + 1):
self._set(line, self.lines[start])
self._fix_incomplete_try_blocks(lineno, indents)
def transfered_offset(self, offset):
lineno = self.code.count('\n', 0, offset)
diff = sum(self.diffs[:lineno])
return offset + diff
def _last_non_blank(self, start):
while start > 0 and self.lines[start].strip() == '':
start -= 1
return start
def _get_block_end(self, lineno):
end_line = lineno
base_indents = _get_line_indents(self.lines[lineno])
for i in range(lineno + 1, len(self.lines)):
if _get_line_indents(self.lines[i]) >= base_indents:
end_line = i
else:
break
return end_line
def _get_stmt_end(self, lineno):
end_line = lineno
base_indents = _get_line_indents(self.lines[lineno])
for i in range(lineno + 1, len(self.lines)):
if _get_line_indents(self.lines[i]) <= base_indents:
return i - 1
return lineno
def _fix_incomplete_try_blocks(self, lineno, indents):
block_start = lineno
last_indents = current_indents = indents
while block_start > 0:
block_start = rope.base.codeanalyze.get_block_start(
ArrayLinesAdapter(self.lines), block_start) - 1
if self.lines[block_start].strip().startswith('try:'):
indents = _get_line_indents(self.lines[block_start])
if indents > last_indents:
continue
last_indents = indents
block_end = self._find_matching_deindent(block_start)
line = self.lines[block_end].strip()
if not (line.startswith('finally:') or
line.startswith('except ') or
line.startswith('except:')):
self._insert(block_end, ' ' * indents + 'finally:')
self._insert(block_end + 1, ' ' * indents + ' pass')
def _find_matching_deindent(self, line_number):
indents = _get_line_indents(self.lines[line_number])
current_line = line_number + 1
while current_line < len(self.lines):
line = self.lines[current_line]
if not line.strip().startswith('#') and not line.strip() == '':
# HACK: We should have used logical lines here
if _get_line_indents(self.lines[current_line]) <= indents:
return current_line
current_line += 1
return len(self.lines) - 1
def _set(self, lineno, line):
self.diffs[self.origs[lineno]] += len(line) - len(self.lines[lineno])
self.lines[lineno] = line
def _insert(self, lineno, line):
self.diffs[self.origs[lineno]] += len(line) + 1
self.origs.insert(lineno, self.origs[lineno])
self.lines.insert(lineno, line)
def _logical_start(lines, lineno, check_prev=False):
logical_finder = LogicalLineFinder(ArrayLinesAdapter(lines))
if check_prev:
prev = lineno - 1
while prev > 0:
start, end = logical_finder.logical_line_in(prev)
if end is None or start <= lineno < end:
return start
if start <= prev:
break
prev -= 1
return logical_finder.logical_line_in(lineno)[0]
def _get_line_indents(line):
return rope.base.codeanalyze.count_line_indents(line)
|
apache-2.0
|
AutorestCI/azure-sdk-for-python
|
azure-mgmt-sql/azure/mgmt/sql/models/metric_value.py
|
2
|
2022
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class MetricValue(Model):
"""Represents database metrics.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar count: The number of values for the metric.
:vartype count: float
:ivar average: The average value of the metric.
:vartype average: float
:ivar maximum: The max value of the metric.
:vartype maximum: float
:ivar minimum: The min value of the metric.
:vartype minimum: float
:ivar timestamp: The metric timestamp (ISO-8601 format).
:vartype timestamp: datetime
:ivar total: The total value of the metric.
:vartype total: float
"""
_validation = {
'count': {'readonly': True},
'average': {'readonly': True},
'maximum': {'readonly': True},
'minimum': {'readonly': True},
'timestamp': {'readonly': True},
'total': {'readonly': True},
}
_attribute_map = {
'count': {'key': 'count', 'type': 'float'},
'average': {'key': 'average', 'type': 'float'},
'maximum': {'key': 'maximum', 'type': 'float'},
'minimum': {'key': 'minimum', 'type': 'float'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'total': {'key': 'total', 'type': 'float'},
}
def __init__(self):
super(MetricValue, self).__init__()
self.count = None
self.average = None
self.maximum = None
self.minimum = None
self.timestamp = None
self.total = None
|
mit
|
Xarthisius/girder
|
plugins/mongo_search/plugin_tests/search_test.py
|
2
|
6228
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2013 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import bson.json_util
from tests import base
from girder.constants import AccessType
from girder.models.collection import Collection
from girder.models.folder import Folder
from girder.models.item import Item
from girder.models.user import User
def setUpModule():
base.enabledPlugins.append('mongo_search')
base.startServer()
def tearDownModule():
base.stopServer()
class MongoSearchTestCase(base.TestCase):
def testMongoSearch(self):
"""
Test resource/mongo_search endpoint
"""
# Create a bunch of searchable documents
admin = {
'email': '[email protected]',
'login': 'adminlogin',
'firstName': 'Admin',
'lastName': 'Last',
'password': 'adminpassword',
'admin': True
}
admin = User().createUser(**admin)
user = {
'email': '[email protected]',
'login': 'goodlogin',
'firstName': 'First',
'lastName': 'Last',
'password': 'goodpassword',
'admin': False
}
user = User().createUser(**user)
coll1 = {
'name': 'Test Collection',
'description': 'magic words. And more magic.',
'public': True,
'creator': admin
}
coll1 = Collection().createCollection(**coll1)
coll2 = {
'name': 'Magic collection',
'description': 'private',
'public': False,
'creator': admin
}
coll2 = Collection().createCollection(**coll2)
Collection().setUserAccess(coll2, user, level=AccessType.READ, save=True)
folder1 = {
'parent': coll1,
'parentType': 'collection',
'name': 'Public test folder'
}
folder1 = Folder().createFolder(**folder1)
Folder().setUserAccess(folder1, user, level=AccessType.READ, save=False)
Folder().setPublic(folder1, True, save=True)
folder2 = {
'parent': coll2,
'parentType': 'collection',
'name': 'Private test folder'
}
folder2 = Folder().createFolder(**folder2)
Folder().setUserAccess(folder2, user, level=AccessType.NONE, save=True)
item1 = {
'name': 'Public object',
'creator': admin,
'folder': folder1
}
item1 = Item().createItem(**item1)
item2 = {
'name': 'Secret object',
'creator': admin,
'folder': folder2
}
item2 = Item().createItem(**item2)
# Grab the default user folders
resp = self.request(
path='/folder', method='GET', user=user, params={
'parentType': 'user',
'parentId': user['_id'],
'sort': 'name',
'sortdir': 1
})
# First test all of the required parameters.
self.ensureRequiredParams(
path='/resource/search', required=['q', 'types'])
# Now test parameter validation
resp = self.request(path='/resource/mongo_search', params={
'q': 'query',
'type': 'wrong type'
})
self.assertStatus(resp, 400)
self.assertEqual('Invalid resource type: wrong type', resp.json['message'])
# Test validation of JSON input
resp = self.request(path='/resource/mongo_search', params={
'q': 'not_json',
'type': 'folder'
})
self.assertStatus(resp, 400)
self.assertEqual(resp.json['message'], 'The query parameter must be a JSON object.')
# Ensure searching respects permissions
resp = self.request(path='/resource/mongo_search', params={
'q': bson.json_util.dumps({'name': 'Private'}),
'type': 'folder'
})
self.assertStatusOk(resp)
self.assertEqual(resp.json, [])
resp = self.request(path='/resource/mongo_search', params={
'q': bson.json_util.dumps({'name': 'Private'}),
'type': 'folder'
}, user=user)
self.assertStatusOk(resp)
self.assertEqual(len(resp.json), 1)
self.assertHasKeys(resp.json[0], ('_id', 'name', 'description'))
self.assertEqual(len(resp.json[0]), 3)
# Test item search
resp = self.request(path='/resource/mongo_search', params={
'q': bson.json_util.dumps({'folderId': folder1['_id']}),
'type': 'item'
})
self.assertStatusOk(resp)
self.assertEqual(resp.json, [{
'_id': str(item1['_id']),
'name': 'Public object',
'description': '',
'folderId': str(folder1['_id'])
}])
resp = self.request(path='/resource/mongo_search', params={
'q': bson.json_util.dumps({'folderId': folder2['_id']}),
'type': 'item'
})
self.assertStatusOk(resp)
self.assertEqual(resp.json, [])
resp = self.request(path='/resource/mongo_search', params={
'q': bson.json_util.dumps({'folderId': folder2['_id']}),
'type': 'item'
}, user=admin)
self.assertStatusOk(resp)
self.assertEqual(resp.json, [{
'_id': str(item2['_id']),
'name': 'Secret object',
'description': '',
'folderId': str(folder2['_id'])
}])
|
apache-2.0
|
mortada/tensorflow
|
tensorflow/contrib/keras/python/keras/datasets/reuters.py
|
15
|
4442
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Reuters newswire topic classification dataset.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import numpy as np
from six.moves import zip # pylint: disable=redefined-builtin
from tensorflow.contrib.keras.python.keras.utils.data_utils import get_file
def load_data(path='reuters.npz',
num_words=None,
skip_top=0,
maxlen=None,
test_split=0.2,
seed=113,
start_char=1,
oov_char=2,
index_from=3):
"""Loads the Reuters newswire classification dataset.
Arguments:
path: where to cache the data (relative to `~/.keras/dataset`).
num_words: max number of words to include. Words are ranked
by how often they occur (in the training set) and only
the most frequent words are kept
skip_top: skip the top N most frequently occurring words
(which may not be informative).
maxlen: truncate sequences after this length.
test_split: Fraction of the dataset to be used as test data.
seed: random seed for sample shuffling.
start_char: The start of a sequence will be marked with this character.
Set to 1 because 0 is usually the padding character.
oov_char: words that were cut out because of the `num_words`
or `skip_top` limit will be replaced with this character.
index_from: index actual words with this index and higher.
Returns:
Tuple of Numpy arrays: `(x_train, y_train), (x_test, y_test)`.
Note that the 'out of vocabulary' character is only used for
words that were present in the training set but are not included
because they're not making the `num_words` cut here.
Words that were not seen in the training set but are in the test set
have simply been skipped.
"""
path = get_file(
path, origin='https://s3.amazonaws.com/text-datasets/reuters.npz')
npzfile = np.load(path)
xs = npzfile['x']
labels = npzfile['y']
npzfile.close()
np.random.seed(seed)
np.random.shuffle(xs)
np.random.seed(seed)
np.random.shuffle(labels)
if start_char is not None:
xs = [[start_char] + [w + index_from for w in x] for x in xs]
elif index_from:
xs = [[w + index_from for w in x] for x in xs]
if maxlen:
new_xs = []
new_labels = []
for x, y in zip(xs, labels):
if len(x) < maxlen:
new_xs.append(x)
new_labels.append(y)
xs = new_xs
labels = new_labels
if not num_words:
num_words = max([max(x) for x in xs])
# by convention, use 2 as OOV word
# reserve 'index_from' (=3 by default) characters:
# 0 (padding), 1 (start), 2 (OOV)
if oov_char is not None:
xs = [[oov_char if (w >= num_words or w < skip_top) else w for w in x]
for x in xs]
else:
new_xs = []
for x in xs:
nx = []
for w in x:
if w >= num_words or w < skip_top:
nx.append(w)
new_xs.append(nx)
xs = new_xs
x_train = np.array(xs[:int(len(xs) * (1 - test_split))])
y_train = np.array(labels[:int(len(xs) * (1 - test_split))])
x_test = np.array(xs[int(len(xs) * (1 - test_split)):])
y_test = np.array(labels[int(len(xs) * (1 - test_split)):])
return (x_train, y_train), (x_test, y_test)
def get_word_index(path='reuters_word_index.json'):
"""Retrieves the dictionary mapping word indices back to words.
Arguments:
path: where to cache the data (relative to `~/.keras/dataset`).
Returns:
The word index dictionary.
"""
path = get_file(
path,
origin='https://s3.amazonaws.com/text-datasets/reuters_word_index.json')
f = open(path)
data = json.load(f)
f.close()
return data
|
apache-2.0
|
wido/cloudstack
|
test/integration/component/test_network_offering.py
|
6
|
67587
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" P1 tests for network offering
"""
#Import Local Modules
from nose.plugins.attrib import attr
from marvin.cloudstackTestCase import cloudstackTestCase
#from marvin.cloudstackAPI import *
from marvin.lib.utils import (cleanup_resources,
validateList)
from marvin.lib.base import (VirtualMachine,
Account,
Network,
LoadBalancerRule,
PublicIPAddress,
FireWallRule,
NATRule,
Vpn,
ServiceOffering,
NetworkOffering)
from marvin.lib.common import (get_domain,
get_zone,
get_template)
from marvin.codes import *
class Services:
"""Test network offering Services
"""
def __init__(self):
self.services = {
"account": {
"email": "[email protected]",
"firstname": "Test",
"lastname": "User",
"username": "test",
# Random characters are appended for unique
# username
"password": "password",
},
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100, # in MHz
"memory": 128, # In MBs
},
"network_offering": {
"name": 'Network offering-VR services',
"displaytext": 'Network offering-VR services',
"guestiptype": 'Isolated',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Vpn,Firewall,Lb,UserData,StaticNat',
"traffictype": 'GUEST',
"availability": 'Optional',
"serviceProviderList": {
"Dhcp": 'VirtualRouter',
"Dns": 'VirtualRouter',
"SourceNat": 'VirtualRouter',
"PortForwarding": 'VirtualRouter',
"Vpn": 'VirtualRouter',
"Firewall": 'VirtualRouter',
"Lb": 'VirtualRouter',
"UserData": 'VirtualRouter',
"StaticNat": 'VirtualRouter',
},
},
"network_offering_netscaler": {
"name": 'Network offering-netscaler',
"displaytext": 'Network offering-netscaler',
"guestiptype": 'Isolated',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Vpn,Firewall,Lb,UserData,StaticNat',
"traffictype": 'GUEST',
"availability": 'Optional',
"serviceProviderList": {
"Dhcp": 'VirtualRouter',
"Dns": 'VirtualRouter',
"SourceNat": 'VirtualRouter',
"PortForwarding": 'VirtualRouter',
"Vpn": 'VirtualRouter',
"Firewall": 'VirtualRouter',
"Lb": 'Netscaler',
"UserData": 'VirtualRouter',
"StaticNat": 'VirtualRouter',
},
},
"network_offering_sourcenat" : {
"name": 'Network offering - SourceNat only',
"displaytext": 'Network offering - SourceNat only',
"guestiptype": 'Isolated',
"supportedservices": 'SourceNat,Dhcp,Dns',
"traffictype": 'GUEST',
"availability": 'Optional',
"serviceProviderList": {
"Dhcp": 'VirtualRouter',
"Dns": 'VirtualRouter',
"SourceNat": 'VirtualRouter',
},
},
"network_offering_withoutDNS" : {
"name": 'NW offering without DNS',
"displaytext": 'NW offering without DNS',
"guestiptype": 'Isolated',
"supportedservices": 'SourceNat,StaticNat,Dhcp',
"traffictype": 'GUEST',
"availability": 'Optional',
"serviceProviderList": {
"Dhcp": 'VirtualRouter',
"SourceNat": 'VirtualRouter',
"StaticNat": 'VirtualRouter',
},
},
"network": {
"name": "Test Network",
"displaytext": "Test Network",
},
"lbrule": {
"name": "SSH",
"alg": "leastconn",
# Algorithm used for load balancing
"privateport": 22,
"publicport": 2222,
"openfirewall": False,
},
"lbrule_port_2221": {
"name": "SSH",
"alg": "leastconn",
# Algorithm used for load balancing
"privateport": 22,
"publicport": 2221,
"openfirewall": False,
},
"natrule": {
"privateport": 22,
"publicport": 22,
"protocol": "TCP"
},
"natrule_port_66": {
"privateport": 22,
"publicport": 66,
"protocol": "TCP"
},
"fw_rule": {
"startport": 1,
"endport": 6000,
"cidr": '55.55.0.0/11',
# Any network (For creating FW rule)
},
"virtual_machine": {
"displayname": "Test VM",
"username": "root",
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
# Hypervisor type should be same as
# hypervisor type of cluster
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"ostype": 'CentOS 5.3 (64-bit)',
# Cent OS 5.3 (64 bit)
"sleep": 60,
"timeout": 10,
}
class TestNOVirtualRouter(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestNOVirtualRouter, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls._cleanup = [
cls.service_offering,
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.domain.id
)
self.cleanup = []
return
def tearDown(self):
try:
self.account.delete(self.apiclient)
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced"], required_hardware="false")
def test_01_network_off_without_conserve_mode(self):
"""Test Network offering with Conserve mode off and VR - All services
"""
# Validate the following
# 1. Create a Network from the above network offering and deploy a VM.
# 2. On source NAT ipaddress, we should NOT be allowed to add a
# LB rules
# 3. On source NAT ipaddress, we should be NOT be allowed to add
# PF rule
# 4. On an ipaddress that has PF rules, we should NOT be allowed to
# add a LB rules.
# 5. On an ipaddress that has Lb rules, we should NOT allow PF rules
# to be programmed.
# 6. We should be allowed to program multiple PF rules on the same Ip
# address on different public ports.
# 7. We should be allowed to program multiple LB rules on the same Ip
# address for different public port ranges.
# 8. On source NAT ipaddress, we should be allowed to Enable VPN.
# 9. On SOurce NAT ipaddress, we will be allowed to add firewall rule
# Create a network offering with all virtual router services enabled
self.debug(
"Creating n/w offering with all services in VR & conserve mode:off"
)
self.network_offering = NetworkOffering.create(
self.api_client,
self.services["network_offering"],
conservemode=False
)
self.cleanup.append(self.network_offering)
self.debug("Created n/w offering with ID: %s" %
self.network_offering.id)
# Enable Network offering
self.network_offering.update(self.apiclient, state='Enabled')
# Creating network using the network offering created
self.debug("Creating network with network offering: %s" %
self.network_offering.id)
self.network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id
)
self.debug("Created network with ID: %s" % self.network.id)
self.debug("Deploying VM in account: %s" % self.account.name)
# Spawn an instance in that network
virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network.id)]
)
self.debug("Deployed VM in network: %s" % self.network.id)
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=self.network.id,
account=self.account.name,
domainid=self.account.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug("Trying to create LB rule on source NAT IP: %s" %
src_nat.ipaddress)
# Create Load Balancer rule with source NAT
with self.assertRaises(Exception):
LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
ipaddressid=src_nat.id,
accountid=self.account.name
)
self.debug(
"Trying to create a port forwarding rule in source NAT: %s" %
src_nat.ipaddress)
#Create NAT rule
with self.assertRaises(Exception):
NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule"],
ipaddressid=src_nat.id
)
self.debug("Associating public IP for network: %s" % self.network.id)
ip_with_nat_rule = PublicIPAddress.create(
self.apiclient,
accountid=self.account.name,
zoneid=self.zone.id,
domainid=self.account.domainid,
networkid=self.network.id
)
self.debug("Associated %s with network %s" % (
ip_with_nat_rule.ipaddress,
self.network.id
))
self.debug("Creating PF rule for IP address: %s" %
ip_with_nat_rule.ipaddress)
NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule"],
ipaddressid=ip_with_nat_rule.ipaddress.id
)
self.debug("Trying to create LB rule on IP with NAT: %s" %
ip_with_nat_rule.ipaddress)
# Create Load Balancer rule on IP already having NAT rule
with self.assertRaises(Exception):
LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
ipaddressid=ip_with_nat_rule.ipaddress.id,
accountid=self.account.name
)
self.debug("Creating PF rule with public port: 66")
nat_rule = NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule_port_66"],
ipaddressid=ip_with_nat_rule.ipaddress.id
)
# Check if NAT rule created successfully
nat_rules = NATRule.list(
self.apiclient,
id=nat_rule.id
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT rules should return valid list"
)
self.debug("Associating public IP for network: %s" % self.network.id)
ip_with_lb_rule = PublicIPAddress.create(
self.apiclient,
accountid=self.account.name,
zoneid=self.zone.id,
domainid=self.account.domainid,
networkid=self.network.id
)
self.debug("Associated %s with network %s" % (
ip_with_lb_rule.ipaddress,
self.network.id
))
self.debug("Creating LB rule for IP address: %s" %
ip_with_lb_rule.ipaddress)
LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
ipaddressid=ip_with_lb_rule.ipaddress.id,
accountid=self.account.name
)
self.debug("Trying to create PF rule on IP with LB rule: %s" %
ip_with_nat_rule.ipaddress)
with self.assertRaises(Exception):
NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule"],
ipaddressid=ip_with_lb_rule.ipaddress.id
)
self.debug("Creating LB rule with public port: 2221")
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule_port_2221"],
ipaddressid=ip_with_lb_rule.ipaddress.id,
accountid=self.account.name
)
# Check if NAT rule created successfully
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=lb_rule.id
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"List LB rules should return valid list"
)
self.debug("Creating firewall rule on source NAT: %s" %
src_nat.ipaddress)
#Create Firewall rule on source NAT
fw_rule = FireWallRule.create(
self.apiclient,
ipaddressid=src_nat.id,
protocol='TCP',
cidrlist=[self.services["fw_rule"]["cidr"]],
startport=self.services["fw_rule"]["startport"],
endport=self.services["fw_rule"]["endport"]
)
self.debug("Created firewall rule: %s" % fw_rule.id)
fw_rules = FireWallRule.list(
self.apiclient,
id=fw_rule.id
)
self.assertEqual(
isinstance(fw_rules, list),
True,
"List fw rules should return a valid firewall rules"
)
self.assertNotEqual(
len(fw_rules),
0,
"Length of fw rules response should not be zero"
)
return
@attr(tags=["advanced"], required_hardware="false")
def test_02_network_off_with_conserve_mode(self):
"""Test Network offering with Conserve mode ON and VR - All services
"""
# Validate the following
# 1. Create a Network from the above network offering and deploy a VM.
# 2. On source NAT ipaddress, we should be allowed to add a LB rules
# 3. On source NAT ipaddress, we should be allowed to add a PF rules
# 4. On source NAT ipaddress, we should be allowed to add a Firewall
# rules
# 5. On an ipaddress that has Lb rules, we should be allowed to
# program PF rules.
# 6. We should be allowed to program multiple PF rules on the same Ip
# address on different public ports.
# 7. We should be allowed to program multiple LB rules on the same Ip
# address for different public port ranges.
# 8. On source NAT ipaddress, we should be allowed to Enable VPN
# access.
# Create a network offering with all virtual router services enabled
self.debug(
"Creating n/w offering with all services in VR & conserve mode:off"
)
self.network_offering = NetworkOffering.create(
self.api_client,
self.services["network_offering"],
conservemode=True
)
self.cleanup.append(self.network_offering)
self.debug("Created n/w offering with ID: %s" %
self.network_offering.id)
# Enable Network offering
self.network_offering.update(self.apiclient, state='Enabled')
# Creating network using the network offering created
self.debug("Creating network with network offering: %s" %
self.network_offering.id)
self.network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id
)
self.debug("Created network with ID: %s" % self.network.id)
self.debug("Deploying VM in account: %s" % self.account.name)
# Spawn an instance in that network
virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network.id)]
)
self.debug("Deployed VM in network: %s" % self.network.id)
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=self.network.id,
account=self.account.name,
domainid=self.account.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug("Trying to create LB rule on source NAT IP: %s" %
src_nat.ipaddress)
# Create Load Balancer rule with source NAT
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
ipaddressid=src_nat.id,
accountid=self.account.name
)
self.debug("Created LB rule on source NAT: %s" % src_nat.ipaddress)
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=lb_rule.id
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"List lb rules should return a valid lb rules"
)
self.assertNotEqual(
len(lb_rules),
0,
"Length of response from listLbRules should not be 0"
)
self.debug(
"Trying to create a port forwarding rule in source NAT: %s" %
src_nat.ipaddress)
#Create NAT rule
nat_rule = NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule"],
ipaddressid=src_nat.id
)
self.debug("Created PF rule on source NAT: %s" % src_nat.ipaddress)
nat_rules = NATRule.list(
self.apiclient,
id=nat_rule.id
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT should return a valid port forwarding rules"
)
self.assertNotEqual(
len(nat_rules),
0,
"Length of response from listLbRules should not be 0"
)
self.debug("Creating firewall rule on source NAT: %s" %
src_nat.ipaddress)
#Create Firewall rule on source NAT
fw_rule = FireWallRule.create(
self.apiclient,
ipaddressid=src_nat.id,
protocol='TCP',
cidrlist=[self.services["fw_rule"]["cidr"]],
startport=self.services["fw_rule"]["startport"],
endport=self.services["fw_rule"]["endport"]
)
self.debug("Created firewall rule: %s" % fw_rule.id)
fw_rules = FireWallRule.list(
self.apiclient,
id=fw_rule.id
)
self.assertEqual(
isinstance(fw_rules, list),
True,
"List fw rules should return a valid firewall rules"
)
self.assertNotEqual(
len(fw_rules),
0,
"Length of fw rules response should not be zero"
)
self.debug("Associating public IP for network: %s" % self.network.id)
public_ip = PublicIPAddress.create(
self.apiclient,
accountid=self.account.name,
zoneid=self.zone.id,
domainid=self.account.domainid,
networkid=self.network.id
)
self.debug("Associated %s with network %s" % (
public_ip.ipaddress,
self.network.id
))
self.debug("Creating PF rule for IP address: %s" %
public_ip.ipaddress)
NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule"],
ipaddressid=public_ip.ipaddress.id
)
self.debug("Trying to create LB rule on IP with NAT: %s" %
public_ip.ipaddress)
# Create Load Balancer rule on IP already having NAT rule
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
ipaddressid=public_ip.ipaddress.id,
accountid=self.account.name
)
self.debug("Creating PF rule with public port: 66")
nat_rule = NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule_port_66"],
ipaddressid=public_ip.ipaddress.id
)
# Check if NAT rule created successfully
nat_rules = NATRule.list(
self.apiclient,
id=nat_rule.id
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT rules should return valid list"
)
self.debug("Creating LB rule with public port: 2221")
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule_port_2221"],
ipaddressid=public_ip.ipaddress.id,
accountid=self.account.name
)
# Check if NAT rule created successfully
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=lb_rule.id
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"List LB rules should return valid list"
)
# User should be able to enable VPN on source NAT
self.debug("Created VPN with source NAT IP: %s" % src_nat.ipaddress)
# Assign VPN to source NAT
Vpn.create(
self.apiclient,
src_nat.id,
account=self.account.name,
domainid=self.account.domainid
)
vpns = Vpn.list(
self.apiclient,
publicipid=src_nat.id,
listall=True,
)
self.assertEqual(
isinstance(vpns, list),
True,
"List VPNs should return a valid VPN list"
)
self.assertNotEqual(
len(vpns),
0,
"Length of list VPN response should not be zero"
)
return
@attr(tags=["advanced"], required_hardware="false")
def test_03_network_off_CS5332(self):
"""
@Desc: Test Network offering with Custom system offering for VR
@Steps:
Step1: Create new system offering for domain router
Step2: Verify the custom system offering creation for domain router
Step3: Create new network offering with domain router system offering created in step1
Step4: Verify the network offering creation with custom system offering for VR
Step5: Enable the network offering created in step3
Step5: Create isolated guest network using network offering created in step3
Step6: Deploy guest vm in network created above
"""
#create custom system offering for VR
self.services["service_offering"]["name"] = "test_service_offering_for_router"
self.services["service_offering"]["displaytext"] = "test_service_offering_for_router"
self.services["service_offering"]["cpuspeed"] = 500
self.services["service_offering"]["memory"] = 512
self.services["service_offering"]["systemvmtype"] = "domainrouter"
self.services["service_offering"]["storagetype"] = "shared"
self.services["service_offering"]["issystem"] = "true"
vr_sys_off = ServiceOffering.create(
self.apiclient,
self.services["service_offering"],
)
self.assertIsNotNone(
vr_sys_off,
"Failed to create custom system offering for VR"
)
vr_sys_off_res = ServiceOffering.list(
self.apiclient,
id = vr_sys_off.id,
issystem = "true"
)
status = validateList(vr_sys_off_res)
self.assertEquals(
PASS,
status[0],
"Listing of VR system offering failed"
)
self.assertEqual(
len(vr_sys_off_res),
1,
"Listing more than VR system offerings created"
)
self.debug("Created system offering with id %s" % vr_sys_off.id)
# Create a network offering with all virtual router services enabled using custom system offering for VR
self.debug(
"Creating n/w offering with all services in VR & using custom system offering for VR"
)
self.network_offering = NetworkOffering.create(
self.apiclient,
self.services["network_offering"],
conservemode=False,
serviceofferingid=vr_sys_off.id
)
self.assertIsNotNone(
self.network_offering,
"Failed to create network offering with custom system offering for VR"
)
network_off_res = NetworkOffering.list(
self.apiclient,
id=self.network_offering.id
)
status = validateList(network_off_res)
self.assertEquals(
PASS,
status[0],
"Listing of network offerings failed"
)
self.assertEquals(
len(network_off_res),
1,
"More than one network offerings are created"
)
self.assertEquals(
network_off_res[0].serviceofferingid,
vr_sys_off.id,
"FAIL: Network offering has been created with default system offering"
)
self.cleanup.append(self.network_offering)
self.cleanup.append(vr_sys_off)
self.debug("Created n/w offering with ID: %s" % self.network_offering.id)
# Enable Network offering
self.network_offering.update(self.apiclient, state='Enabled')
# Creating network using the network offering created
self.debug("Creating network with network offering: %s" % self.network_offering.id)
self.network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id
)
self.assertIsNotNone(self.network,"Failed to create network")
self.debug("Created network with ID: %s" % self.network.id)
self.debug("Deploying VM in account: %s" % self.account.name)
# Spawn an instance in that network
virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network.id)]
)
self.assertIsNotNone(
virtual_machine,
"VM creation failed with network %s" % self.network.id
)
self.debug("Deployed VM in network: %s" % self.network.id)
return
@attr(tags=["advanced"], required_hardware="false")
def test_04_network_without_domain_CS19303(self):
"""
@Desc: Errors editing a network without a network domain specified
@Steps:
Step1: Create a network offering with SourceNAT,staticNAT and dhcp services
Step2: Verify the network offering creation
Step3: Create an isolated network with the offering created in step1 and without a network domain specified
Step4: Verify the network creation
Step5: Edit the network and verify that updating network should not error out
"""
self.debug(
"Creating n/w offering with SourceNat,StaticNat and DHCP services in VR & conserve mode:off"
)
self.network_offering = NetworkOffering.create(
self.api_client,
self.services["network_offering_withoutDNS"],
conservemode=False
)
self.assertIsNotNone(
self.network_offering,
"Failed to create NO with Sourcenat,staticnat and dhcp only services"
)
self.cleanup.append(self.network_offering)
self.debug("Created n/w offering with ID: %s" % self.network_offering.id)
# Enable Network offering
self.network_offering.update(self.apiclient, state='Enabled')
self.debug("Creating nw without dns service using no id: %s" % self.network_offering.id)
self.network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id
)
self.assertIsNotNone(
self.network,
"Failed to create network without DNS service and network domain"
)
self.debug("Created network with NO: %s" % self.network_offering.id)
try:
self.network_update = self.network.update(
self.apiclient,
name="NW without nw domain"
)
self.debug("Success:Network update has been successful without network domain")
except Exception as e:
self.fail("Error editing a network without network domain specified: %s" % e)
return
class TestNetworkUpgrade(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestNetworkUpgrade, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls.network_offering = NetworkOffering.create(
cls.api_client,
cls.services["network_offering"],
conservemode=True
)
# Enable Network offering
cls.network_offering.update(cls.api_client, state='Enabled')
cls._cleanup = [
cls.service_offering,
cls.network_offering
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.domain.id
)
self.cleanup = []
return
def tearDown(self):
try:
self.account.delete(self.apiclient)
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(speed = "slow")
@attr(tags=["advancedns"], required_hardware="true")
def test_01_nwupgrade_netscaler_conserve_on(self):
"""Test Nw upgrade to netscaler lb service and conserve mode ON
"""
# Validate the following
# 1. Upgrade a network with VR and conserve mode ON TO
# A network that has Lb provided by "Netscaler" and all other
# services provided by "VR" and Conserve mode ON
# 2. Have PF and LB rules on the same ip address. Upgrade network
# should fail.
# 3. Have SourceNat,PF and VPN on the same IP address. Upgrade of
# network should succeed.
# Creating network using the network offering created
self.debug("Creating network with network offering: %s" %
self.network_offering.id)
self.network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id
)
self.debug("Created network with ID: %s" % self.network.id)
self.debug("Deploying VM in account: %s" % self.account.name)
# Spawn an instance in that network
virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network.id)]
)
self.debug("Deployed VM in network: %s" % self.network.id)
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=self.network.id,
account=self.account.name,
domainid=self.account.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug("Trying to create LB rule on source NAT IP: %s" %
src_nat.ipaddress)
# Create Load Balancer rule with source NAT
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
ipaddressid=src_nat.id,
accountid=self.account.name
)
self.debug("Created LB rule on source NAT: %s" % src_nat.ipaddress)
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=lb_rule.id
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"List lb rules should return a valid lb rules"
)
self.assertNotEqual(
len(lb_rules),
0,
"Length of response from listLbRules should not be 0"
)
self.debug(
"Trying to create a port forwarding rule in source NAT: %s" %
src_nat.ipaddress)
#Create NAT rule
nat_rule = NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule"],
ipaddressid=src_nat.id
)
self.debug("Created PF rule on source NAT: %s" % src_nat.ipaddress)
nat_rules = NATRule.list(
self.apiclient,
id=nat_rule.id
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT should return a valid port forwarding rules"
)
self.assertNotEqual(
len(nat_rules),
0,
"Length of response from listLbRules should not be 0"
)
# Create a network offering with all virtual router services enabled
self.debug(
"Creating n/w offering with all services in VR & conserve mode:ON LB- Netscaler"
)
ns_lb_offering = NetworkOffering.create(
self.api_client,
self.services["network_offering_netscaler"],
conservemode=True
)
self.cleanup.append(ns_lb_offering)
ns_lb_offering.update(self.apiclient, state='Enabled')
#Stop all the VMs associated with network to update cidr
self.debug("Stopping the VM: %s" % virtual_machine.name)
virtual_machine.stop(self.apiclient)
self.debug("Updating network offering for network: %s" %
self.network.id)
with self.assertRaises(Exception):
self.network.update(
self.apiclient,
networkofferingid=ns_lb_offering.id,
changecidr=True
)
self.debug("Network upgrade failed!")
self.debug("Deleting LB Rule: %s" % lb_rule.id)
lb_rule.delete(self.apiclient)
self.debug("LB rule deleted")
# Assign VPN to source NAT
self.debug("Enabling VPN on source NAT")
Vpn.create(
self.apiclient,
src_nat.id,
account=self.account.name,
domainid=self.account.domainid
)
vpns = Vpn.list(
self.apiclient,
publicipid=src_nat.id,
listall=True,
)
self.assertEqual(
isinstance(vpns, list),
True,
"List VPNs should return a valid VPN list"
)
self.assertNotEqual(
len(vpns),
0,
"Length of list VPN response should not be zero"
)
self.debug("Upgrading the network: %s" % self.network.id)
self.network.update(
self.apiclient,
networkofferingid=ns_lb_offering.id,
changecidr=True
)
networks = Network.list(
self.apiclient,
id=self.network.id,
listall=True
)
self.assertEqual(
isinstance(networks, list),
True,
"List Networks should return a valid list for given network ID"
)
self.assertNotEqual(
len(networks),
0,
"Length of list networks should not be 0"
)
network = networks[0]
self.assertEqual(
network.networkofferingid,
ns_lb_offering.id,
"Network offering ID should match with new offering ID"
)
return
@attr(speed = "slow")
@attr(tags=["advancedns"], required_hardware="true")
def test_02_nwupgrade_netscaler_conserve_off(self):
"""Test Nw upgrade to netscaler lb service and conserve mode OFF
"""
# Validate the following
# 1. Upgrade a network with VR and conserve mode ON TO
# A network that has Lb provided by "Netscaler" and all other
# services provided by "VR" and Conserve mode OFF
# 2. Have PF and LB rules on the same ip address. Upgrade network
# should fail.
# 3. Have SourceNat,PF and VPN on the same IP address. Upgrade of
# network should fail.
# Creating network using the network offering created
self.debug("Creating network with network offering: %s" %
self.network_offering.id)
self.network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id
)
self.debug("Created network with ID: %s" % self.network.id)
self.debug("Deploying VM in account: %s" % self.account.name)
# Spawn an instance in that network
virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network.id)]
)
self.debug("Deployed VM in network: %s" % self.network.id)
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=self.network.id,
account=self.account.name,
domainid=self.account.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug("Trying to create LB rule on source NAT IP: %s" %
src_nat.ipaddress)
# Create Load Balancer rule with source NAT
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
ipaddressid=src_nat.id,
accountid=self.account.name
)
self.debug("Created LB rule on source NAT: %s" % src_nat.ipaddress)
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=lb_rule.id
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"List lb rules should return a valid lb rules"
)
self.assertNotEqual(
len(lb_rules),
0,
"Length of response from listLbRules should not be 0"
)
self.debug(
"Trying to create a port forwarding rule in source NAT: %s" %
src_nat.ipaddress)
#Create NAT rule
nat_rule = NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule"],
ipaddressid=src_nat.id
)
self.debug("Created PF rule on source NAT: %s" % src_nat.ipaddress)
nat_rules = NATRule.list(
self.apiclient,
id=nat_rule.id
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT should return a valid port forwarding rules"
)
self.assertNotEqual(
len(nat_rules),
0,
"Length of response from listLbRules should not be 0"
)
# Create a network offering with all virtual router services enabled
self.debug(
"Creating n/w offering with all services in VR & conserve mode:ON LB- Netscaler"
)
ns_lb_offering = NetworkOffering.create(
self.api_client,
self.services["network_offering_netscaler"],
conservemode=False
)
self.cleanup.append(ns_lb_offering)
ns_lb_offering.update(self.apiclient, state='Enabled')
#Stop all the VMs associated with network to update cidr
self.debug("Stopping the VM: %s" % virtual_machine.name)
virtual_machine.stop(self.apiclient)
self.debug("Updating network offering for network: %s" %
self.network.id)
with self.assertRaises(Exception):
self.network.update(
self.apiclient,
networkofferingid=ns_lb_offering.id,
changecidr=True
)
self.debug("Network upgrade failed!")
self.debug("Deleting LB Rule: %s" % lb_rule.id)
lb_rule.delete(self.apiclient)
self.debug("LB rule deleted")
# Assign VPN to source NAT
self.debug("Enabling VPN on source NAT")
Vpn.create(
self.apiclient,
src_nat.id,
account=self.account.name,
domainid=self.account.domainid
)
vpns = Vpn.list(
self.apiclient,
publicipid=src_nat.id,
listall=True,
)
self.assertEqual(
isinstance(vpns, list),
True,
"List VPNs should return a valid VPN list"
)
self.assertNotEqual(
len(vpns),
0,
"Length of list VPN response should not be zero"
)
self.debug("Upgrading the network: %s" % self.network.id)
with self.assertRaises(Exception):
self.network.update(
self.apiclient,
networkofferingid=ns_lb_offering.id,
changecidr=True
)
return
class TestNOWithOnlySourceNAT(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestNOWithOnlySourceNAT, cls).getClsTestClient()
cls.apiclient = cls.testClient.getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.apiclient)
cls.zone = get_zone(cls.apiclient, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.apiclient,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.apiclient,
cls.services["service_offering"]
)
cls.cleanup = [
cls.service_offering,
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.apiclient, cls.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "advancedns"], required_hardware="false")
def test_create_network_with_snat(self):
"""Test to create a network with SourceNAT service only"""
# Validate the following
# 1. create a network offering with source nat service
# 2. create a network and deploy a vm within the network
# 3. deployment and network creation should be successful
# 4. attempt to create a fw rule. should fail since offering hasn't allowed it
# 5. try to ping out of the guest to www.google.com to check SourceNAT is working
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=False,
domainid=self.domain.id
)
self.cleanup.append(self.account)
# Create a network offering VR and only SourceNAT service
self.debug(
"creating network offering with source NAT only"
)
self.network_offering = NetworkOffering.create(
self.apiclient,
self.services["network_offering_sourcenat"]
)
# Enable Network offering
self.network_offering.update(self.apiclient, state='Enabled')
self.debug("Created n/w offering with ID: %s" %
self.network_offering.id)
# Creating network using the network offering created
self.debug("Creating network with network offering: %s" %
self.network_offering.id)
self.network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id
)
self.debug("Created guest network with ID: %s within account %s" % (self.network.id, self.account.name))
self.debug("Deploying VM in account: %s on the network %s" % (self.account.name, self.network.id))
# Spawn an instance in that network
VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network.id)]
)
self.debug("Deployed VM in network: %s" % self.network.id)
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=self.network.id,
account=self.account.name,
domainid=self.account.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug("Successfully implemented network with source NAT IP: %s" %
src_nat.ipaddress)
with self.assertRaises(Exception):
FireWallRule.create(
self.apiclient,
ipaddressid=src_nat.id,
protocol='TCP',
cidrlist=[self.services["fw_rule"]["cidr"]],
startport=self.services["fw_rule"]["startport"],
endport=self.services["fw_rule"]["endport"]
)
return
|
apache-2.0
|
figment/falloutsnip
|
Vendor/IronPython/Lib/xml/dom/pulldom.py
|
322
|
11974
|
import xml.sax
import xml.sax.handler
import types
try:
_StringTypes = [types.StringType, types.UnicodeType]
except AttributeError:
_StringTypes = [types.StringType]
START_ELEMENT = "START_ELEMENT"
END_ELEMENT = "END_ELEMENT"
COMMENT = "COMMENT"
START_DOCUMENT = "START_DOCUMENT"
END_DOCUMENT = "END_DOCUMENT"
PROCESSING_INSTRUCTION = "PROCESSING_INSTRUCTION"
IGNORABLE_WHITESPACE = "IGNORABLE_WHITESPACE"
CHARACTERS = "CHARACTERS"
class PullDOM(xml.sax.ContentHandler):
_locator = None
document = None
def __init__(self, documentFactory=None):
from xml.dom import XML_NAMESPACE
self.documentFactory = documentFactory
self.firstEvent = [None, None]
self.lastEvent = self.firstEvent
self.elementStack = []
self.push = self.elementStack.append
try:
self.pop = self.elementStack.pop
except AttributeError:
# use class' pop instead
pass
self._ns_contexts = [{XML_NAMESPACE:'xml'}] # contains uri -> prefix dicts
self._current_context = self._ns_contexts[-1]
self.pending_events = []
def pop(self):
result = self.elementStack[-1]
del self.elementStack[-1]
return result
def setDocumentLocator(self, locator):
self._locator = locator
def startPrefixMapping(self, prefix, uri):
if not hasattr(self, '_xmlns_attrs'):
self._xmlns_attrs = []
self._xmlns_attrs.append((prefix or 'xmlns', uri))
self._ns_contexts.append(self._current_context.copy())
self._current_context[uri] = prefix or None
def endPrefixMapping(self, prefix):
self._current_context = self._ns_contexts.pop()
def startElementNS(self, name, tagName , attrs):
# Retrieve xml namespace declaration attributes.
xmlns_uri = 'http://www.w3.org/2000/xmlns/'
xmlns_attrs = getattr(self, '_xmlns_attrs', None)
if xmlns_attrs is not None:
for aname, value in xmlns_attrs:
attrs._attrs[(xmlns_uri, aname)] = value
self._xmlns_attrs = []
uri, localname = name
if uri:
# When using namespaces, the reader may or may not
# provide us with the original name. If not, create
# *a* valid tagName from the current context.
if tagName is None:
prefix = self._current_context[uri]
if prefix:
tagName = prefix + ":" + localname
else:
tagName = localname
if self.document:
node = self.document.createElementNS(uri, tagName)
else:
node = self.buildDocument(uri, tagName)
else:
# When the tagname is not prefixed, it just appears as
# localname
if self.document:
node = self.document.createElement(localname)
else:
node = self.buildDocument(None, localname)
for aname,value in attrs.items():
a_uri, a_localname = aname
if a_uri == xmlns_uri:
if a_localname == 'xmlns':
qname = a_localname
else:
qname = 'xmlns:' + a_localname
attr = self.document.createAttributeNS(a_uri, qname)
node.setAttributeNodeNS(attr)
elif a_uri:
prefix = self._current_context[a_uri]
if prefix:
qname = prefix + ":" + a_localname
else:
qname = a_localname
attr = self.document.createAttributeNS(a_uri, qname)
node.setAttributeNodeNS(attr)
else:
attr = self.document.createAttribute(a_localname)
node.setAttributeNode(attr)
attr.value = value
self.lastEvent[1] = [(START_ELEMENT, node), None]
self.lastEvent = self.lastEvent[1]
self.push(node)
def endElementNS(self, name, tagName):
self.lastEvent[1] = [(END_ELEMENT, self.pop()), None]
self.lastEvent = self.lastEvent[1]
def startElement(self, name, attrs):
if self.document:
node = self.document.createElement(name)
else:
node = self.buildDocument(None, name)
for aname,value in attrs.items():
attr = self.document.createAttribute(aname)
attr.value = value
node.setAttributeNode(attr)
self.lastEvent[1] = [(START_ELEMENT, node), None]
self.lastEvent = self.lastEvent[1]
self.push(node)
def endElement(self, name):
self.lastEvent[1] = [(END_ELEMENT, self.pop()), None]
self.lastEvent = self.lastEvent[1]
def comment(self, s):
if self.document:
node = self.document.createComment(s)
self.lastEvent[1] = [(COMMENT, node), None]
self.lastEvent = self.lastEvent[1]
else:
event = [(COMMENT, s), None]
self.pending_events.append(event)
def processingInstruction(self, target, data):
if self.document:
node = self.document.createProcessingInstruction(target, data)
self.lastEvent[1] = [(PROCESSING_INSTRUCTION, node), None]
self.lastEvent = self.lastEvent[1]
else:
event = [(PROCESSING_INSTRUCTION, target, data), None]
self.pending_events.append(event)
def ignorableWhitespace(self, chars):
node = self.document.createTextNode(chars)
self.lastEvent[1] = [(IGNORABLE_WHITESPACE, node), None]
self.lastEvent = self.lastEvent[1]
def characters(self, chars):
node = self.document.createTextNode(chars)
self.lastEvent[1] = [(CHARACTERS, node), None]
self.lastEvent = self.lastEvent[1]
def startDocument(self):
if self.documentFactory is None:
import xml.dom.minidom
self.documentFactory = xml.dom.minidom.Document.implementation
def buildDocument(self, uri, tagname):
# Can't do that in startDocument, since we need the tagname
# XXX: obtain DocumentType
node = self.documentFactory.createDocument(uri, tagname, None)
self.document = node
self.lastEvent[1] = [(START_DOCUMENT, node), None]
self.lastEvent = self.lastEvent[1]
self.push(node)
# Put everything we have seen so far into the document
for e in self.pending_events:
if e[0][0] == PROCESSING_INSTRUCTION:
_,target,data = e[0]
n = self.document.createProcessingInstruction(target, data)
e[0] = (PROCESSING_INSTRUCTION, n)
elif e[0][0] == COMMENT:
n = self.document.createComment(e[0][1])
e[0] = (COMMENT, n)
else:
raise AssertionError("Unknown pending event ",e[0][0])
self.lastEvent[1] = e
self.lastEvent = e
self.pending_events = None
return node.firstChild
def endDocument(self):
self.lastEvent[1] = [(END_DOCUMENT, self.document), None]
self.pop()
def clear(self):
"clear(): Explicitly release parsing structures"
self.document = None
class ErrorHandler:
def warning(self, exception):
print exception
def error(self, exception):
raise exception
def fatalError(self, exception):
raise exception
class DOMEventStream:
def __init__(self, stream, parser, bufsize):
self.stream = stream
self.parser = parser
self.bufsize = bufsize
if not hasattr(self.parser, 'feed'):
self.getEvent = self._slurp
self.reset()
def reset(self):
self.pulldom = PullDOM()
# This content handler relies on namespace support
self.parser.setFeature(xml.sax.handler.feature_namespaces, 1)
self.parser.setContentHandler(self.pulldom)
def __getitem__(self, pos):
rc = self.getEvent()
if rc:
return rc
raise IndexError
def next(self):
rc = self.getEvent()
if rc:
return rc
raise StopIteration
def __iter__(self):
return self
def expandNode(self, node):
event = self.getEvent()
parents = [node]
while event:
token, cur_node = event
if cur_node is node:
return
if token != END_ELEMENT:
parents[-1].appendChild(cur_node)
if token == START_ELEMENT:
parents.append(cur_node)
elif token == END_ELEMENT:
del parents[-1]
event = self.getEvent()
def getEvent(self):
# use IncrementalParser interface, so we get the desired
# pull effect
if not self.pulldom.firstEvent[1]:
self.pulldom.lastEvent = self.pulldom.firstEvent
while not self.pulldom.firstEvent[1]:
buf = self.stream.read(self.bufsize)
if not buf:
self.parser.close()
return None
self.parser.feed(buf)
rc = self.pulldom.firstEvent[1][0]
self.pulldom.firstEvent[1] = self.pulldom.firstEvent[1][1]
return rc
def _slurp(self):
""" Fallback replacement for getEvent() using the
standard SAX2 interface, which means we slurp the
SAX events into memory (no performance gain, but
we are compatible to all SAX parsers).
"""
self.parser.parse(self.stream)
self.getEvent = self._emit
return self._emit()
def _emit(self):
""" Fallback replacement for getEvent() that emits
the events that _slurp() read previously.
"""
rc = self.pulldom.firstEvent[1][0]
self.pulldom.firstEvent[1] = self.pulldom.firstEvent[1][1]
return rc
def clear(self):
"""clear(): Explicitly release parsing objects"""
self.pulldom.clear()
del self.pulldom
self.parser = None
self.stream = None
class SAX2DOM(PullDOM):
def startElementNS(self, name, tagName , attrs):
PullDOM.startElementNS(self, name, tagName, attrs)
curNode = self.elementStack[-1]
parentNode = self.elementStack[-2]
parentNode.appendChild(curNode)
def startElement(self, name, attrs):
PullDOM.startElement(self, name, attrs)
curNode = self.elementStack[-1]
parentNode = self.elementStack[-2]
parentNode.appendChild(curNode)
def processingInstruction(self, target, data):
PullDOM.processingInstruction(self, target, data)
node = self.lastEvent[0][1]
parentNode = self.elementStack[-1]
parentNode.appendChild(node)
def ignorableWhitespace(self, chars):
PullDOM.ignorableWhitespace(self, chars)
node = self.lastEvent[0][1]
parentNode = self.elementStack[-1]
parentNode.appendChild(node)
def characters(self, chars):
PullDOM.characters(self, chars)
node = self.lastEvent[0][1]
parentNode = self.elementStack[-1]
parentNode.appendChild(node)
default_bufsize = (2 ** 14) - 20
def parse(stream_or_string, parser=None, bufsize=None):
if bufsize is None:
bufsize = default_bufsize
if type(stream_or_string) in _StringTypes:
stream = open(stream_or_string)
else:
stream = stream_or_string
if not parser:
parser = xml.sax.make_parser()
return DOMEventStream(stream, parser, bufsize)
def parseString(string, parser=None):
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
bufsize = len(string)
buf = StringIO(string)
if not parser:
parser = xml.sax.make_parser()
return DOMEventStream(buf, parser, bufsize)
|
gpl-3.0
|
simbha/mAngE-Gin
|
lib/Django 1.7/django/contrib/auth/context_processors.py
|
514
|
1938
|
# PermWrapper and PermLookupDict proxy the permissions system into objects that
# the template system can understand.
class PermLookupDict(object):
def __init__(self, user, app_label):
self.user, self.app_label = user, app_label
def __repr__(self):
return str(self.user.get_all_permissions())
def __getitem__(self, perm_name):
return self.user.has_perm("%s.%s" % (self.app_label, perm_name))
def __iter__(self):
# To fix 'item in perms.someapp' and __getitem__ iteraction we need to
# define __iter__. See #18979 for details.
raise TypeError("PermLookupDict is not iterable.")
def __bool__(self):
return self.user.has_module_perms(self.app_label)
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
class PermWrapper(object):
def __init__(self, user):
self.user = user
def __getitem__(self, app_label):
return PermLookupDict(self.user, app_label)
def __iter__(self):
# I am large, I contain multitudes.
raise TypeError("PermWrapper is not iterable.")
def __contains__(self, perm_name):
"""
Lookup by "someapp" or "someapp.someperm" in perms.
"""
if '.' not in perm_name:
# The name refers to module.
return bool(self[perm_name])
app_label, perm_name = perm_name.split('.', 1)
return self[app_label][perm_name]
def auth(request):
"""
Returns context variables required by apps that use Django's authentication
system.
If there is no 'user' attribute in the request, uses AnonymousUser (from
django.contrib.auth).
"""
if hasattr(request, 'user'):
user = request.user
else:
from django.contrib.auth.models import AnonymousUser
user = AnonymousUser()
return {
'user': user,
'perms': PermWrapper(user),
}
|
mit
|
ikropotov/kops
|
vendor/k8s.io/kubernetes/hack/update_owners.py
|
40
|
7464
|
#!/usr/bin/env python
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import collections
import csv
import re
import json
import os
import random
import subprocess
import sys
import time
import urllib2
import zlib
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
OWNERS_PATH = os.path.abspath(
os.path.join(BASE_DIR, '..', 'test', 'test_owners.csv'))
GCS_URL_BASE = 'https://storage.googleapis.com/kubernetes-test-history/'
SKIP_MAINTAINERS = {
'a-robinson', 'aronchick', 'bgrant0607-nocc', 'david-mcmahon',
'goltermann', 'sarahnovotny'}
def normalize(name):
name = re.sub(r'\[.*?\]|\{.*?\}', '', name)
name = re.sub(r'\s+', ' ', name)
return name.strip()
def get_test_history(days_ago):
url = time.strftime(GCS_URL_BASE + 'logs/%Y-%m-%d.json',
time.gmtime(time.time() - days_ago * 24 * 60 * 60))
resp = urllib2.urlopen(url)
content = resp.read()
if resp.headers.get('content-encoding') == 'gzip':
content = zlib.decompress(content, 15 | 16)
return json.loads(content)
def get_test_names_from_test_history():
test_names = set()
for days_ago in range(4):
test_history = get_test_history(days_ago)
test_names.update(normalize(name) for name in test_history['test_names'])
return test_names
def get_test_names_from_local_files():
tests_json = subprocess.check_output(['go', 'run', 'test/list/main.go', '-json'])
tests = json.loads(tests_json)
return {normalize(t['Name'] + (' ' + t['TestName'] if 'k8s.io/' not in t['Name'] else ''))
for t in tests}
def load_owners(fname):
owners = {}
with open(fname) as f:
for n, (name, owner, random_assignment) in enumerate(csv.reader(f)):
if n == 0:
continue # header
owners[normalize(name)] = (owner, int(random_assignment))
return owners
def write_owners(fname, owners):
with open(fname, 'w') as f:
out = csv.writer(f, lineterminator='\n')
out.writerow(['name', 'owner', 'auto-assigned'])
sort_key = lambda (k, v): (k != 'DEFAULT', k) # put 'DEFAULT' first.
items = sorted(owners.items(), key=sort_key)
for name, (owner, random_assignment) in items:
out.writerow([name, owner, int(random_assignment)])
def get_maintainers():
# Github doesn't seem to support team membership listing without a key with
# org admin privileges. Instead, we do it manually:
# Open https://github.com/orgs/kubernetes/teams/kubernetes-maintainers
# Run this in the js console:
# [].slice.call(document.querySelectorAll('.team-member-username a')).map(
# e => e.textContent.trim())
ret = {"alex-mohr", "apelisse", "aronchick", "bgrant0607", "bgrant0607-nocc",
"bprashanth", "brendandburns", "caesarxuchao", "childsb", "cjcullen",
"david-mcmahon", "davidopp", "dchen1107", "deads2k", "derekwaynecarr",
"eparis", "erictune", "fabioy", "fejta", "fgrzadkowski", "freehan",
"gmarek", "grodrigues3", "ingvagabund", "ixdy", "janetkuo", "jbeda",
"jessfraz", "jingxu97", "jlowdermilk", "jsafrane", "jszczepkowski",
"justinsb", "kargakis", "Kashomon", "kevin-wangzefeng", "krousey",
"lavalamp", "liggitt", "luxas", "madhusudancs", "maisem", "matchstick",
"mbohlool", "mikedanese", "mml", "mtaufen", "mwielgus", "ncdc",
"nikhiljindal", "piosz", "pmorie", "pwittrock", "Q-Lee", "quinton-hoole",
"Random-Liu", "rmmh", "roberthbailey", "saad-ali", "smarterclayton",
"soltysh", "spxtr", "sttts", "thelinuxfoundation", "thockin",
"timothysc", "timstclair", "vishh", "wojtek-t", "xiang90", "yifan-gu",
"yujuhong", "zmerlynn"}
return sorted(ret - SKIP_MAINTAINERS)
def detect_github_username():
origin_url = subprocess.check_output(['git', 'config', 'remote.origin.url'])
m = re.search(r'github.com[:/](.*)/', origin_url)
if m and m.group(1) != 'kubernetes':
return m.group(1)
raise ValueError('unable to determine GitHub user from '
'`git config remote.origin.url` output, run with --user instead')
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--history', action='store_true', help='Generate test list from result history.')
parser.add_argument('--user', help='User to assign new tests to (or RANDOM, default: current GitHub user).')
parser.add_argument('--addonly', action='store_true', help='Only add missing tests, do not change existing.')
parser.add_argument('--check', action='store_true', help='Exit with a nonzero status if the test list has changed.')
options = parser.parse_args()
if options.history:
test_names = get_test_names_from_test_history()
else:
test_names = get_test_names_from_local_files()
test_names.add('DEFAULT')
test_names = sorted(test_names)
owners = load_owners(OWNERS_PATH)
outdated_tests = sorted(set(owners) - set(test_names))
new_tests = sorted(set(test_names) - set(owners))
maintainers = get_maintainers()
print '# OUTDATED TESTS (%d):' % len(outdated_tests)
print '\n'.join('%s -- %s%s' %
(t, owners[t][0], ['', ' (random)'][owners[t][1]])
for t in outdated_tests)
print '# NEW TESTS (%d):' % len(new_tests)
print '\n'.join(new_tests)
if options.check:
if new_tests or outdated_tests:
print
print 'ERROR: the test list has changed'
sys.exit(1)
sys.exit(0)
if not options.user:
options.user = detect_github_username()
for name in outdated_tests:
owners.pop(name)
if not options.addonly:
print '# UNEXPECTED MAINTAINERS ',
print '(randomly assigned, but not in kubernetes-maintainers)'
for name, (owner, random_assignment) in sorted(owners.iteritems()):
if random_assignment and owner not in maintainers:
print '%-16s %s' % (owner, name)
owners.pop(name)
print
owner_counts = collections.Counter(
owner for name, (owner, random) in owners.iteritems()
if owner in maintainers)
for test_name in set(test_names) - set(owners):
random_assignment = True
if options.user.lower() == 'random':
new_owner, _count = random.choice(owner_counts.most_common()[-4:])
else:
new_owner = options.user
random_assignment = False
owner_counts[new_owner] += 1
owners[test_name] = (new_owner, random_assignment)
if options.user.lower() == 'random':
print '# Tests per maintainer:'
for owner, count in owner_counts.most_common():
print '%-20s %3d' % (owner, count)
write_owners(OWNERS_PATH, owners)
if __name__ == '__main__':
main()
|
apache-2.0
|
BlogomaticProject/Blogomatic
|
opt/blog-o-matic/usr/lib/python/Bio/Nexus/Nodes.py
|
1
|
5759
|
# Copyright 2005-2008 by Frank Kauff & Cymon J. Cox. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
#
# Nodes.py
#
# Provides functionality of a linked list.
# Each node has one (or none) predecessor, and an arbitrary number of successors.
# Nodes can store arbitrary data in a NodeData class.
#
# Subclassed by Nexus.Trees to store phylogenetic trees.
#
# Bug reports to Frank Kauff ([email protected])
#
class ChainException(Exception):
pass
class NodeException(Exception):
pass
class Chain:
"""Stores a list of nodes that are linked together."""
def __init__(self):
"""Initiates a node chain: (self)."""
self.chain={}
self.id=-1
def _get_id(self):
"""Gets a new id for a node in the chain."""
self.id+=1
return self.id
def all_ids(self):
"""Return a list of all node ids."""
return self.chain.keys()
def add(self,node,prev=None):
"""Attaches node to another: (self, node, prev)."""
if prev is not None and prev not in self.chain:
raise ChainException('Unknown predecessor: '+str(prev))
else:
id=self._get_id()
node.set_id(id)
node.set_prev(prev)
if prev is not None:
self.chain[prev].add_succ(id)
self.chain[id]=node
return id
def collapse(self,id):
"""Deletes node from chain and relinks successors to predecessor: collapse(self, id)."""
if id not in self.chain:
raise ChainException('Unknown ID: '+str(id))
prev_id=self.chain[id].get_prev()
self.chain[prev_id].remove_succ(id)
succ_ids=self.chain[id].get_succ()
for i in succ_ids:
self.chain[i].set_prev(prev_id)
self.chain[prev_id].add_succ(succ_ids)
node=self.chain[id]
self.kill(id)
return node
def kill(self,id):
"""Kills a node from chain without caring to what it is connected: kill(self,id)."""
if id not in self.chain:
raise ChainException('Unknown ID: '+str(id))
else:
del self.chain[id]
def unlink(self,id):
"""Disconnects node from his predecessor: unlink(self,id)."""
if id not in self.chain:
raise ChainException('Unknown ID: '+str(id))
else:
prev_id=self.chain[id].prev
if prev_id is not None:
self.chain[prev_id].succ.pop(self.chain[prev_id].succ.index(id))
self.chain[id].prev=None
return prev_id
def link(self, parent,child):
"""Connects son to parent: link(self,son,parent)."""
if child not in self.chain:
raise ChainException('Unknown ID: '+str(child))
elif parent not in self.chain:
raise ChainException('Unknown ID: '+str(parent))
else:
self.unlink(child)
self.chain[parent].succ.append(child)
self.chain[child].set_prev(parent)
def is_parent_of(self,parent,grandchild):
"""Check if grandchild is a subnode of parent: is_parent_of(self,parent,grandchild)."""
if grandchild==parent or grandchild in self.chain[parent].get_succ():
return True
else:
for sn in self.chain[parent].get_succ():
if self.is_parent_of(sn,grandchild):
return True
else:
return False
def trace(self,start,finish):
"""Returns a list of all node_ids between two nodes (excluding start, including end): trace(start,end)."""
if start not in self.chain or finish not in self.chain:
raise NodeException('Unknown node.')
if not self.is_parent_of(start,finish) or start==finish:
return []
for sn in self.chain[start].get_succ():
if self.is_parent_of(sn,finish):
return [sn]+self.trace(sn,finish)
class Node:
"""A single node."""
def __init__(self,data=None):
"""Represents a node with one predecessor and multiple successors: (self, data=None)."""
self.id=None
self.data=data
self.prev=None
self.succ=[]
def set_id(self,id):
"""Sets the id of a node, if not set yet: (self,id)."""
if self.id is not None:
raise NodeException('Node id cannot be changed.')
self.id=id
def get_id(self):
"""Returns the node's id: (self)."""
return self.id
def get_succ(self):
"""Returns a list of the node's successors: (self)."""
return self.succ
def get_prev(self):
"""Returns the id of the node's predecessor: (self)."""
return self.prev
def add_succ(self,id):
"""Adds a node id to the node's successors: (self,id)."""
if isinstance(id,type([])):
self.succ.extend(id)
else:
self.succ.append(id)
def remove_succ(self,id):
"""Removes a node id from the node's successors: (self,id)."""
self.succ.remove(id)
def set_succ(self,new_succ):
"""Sets the node's successors: (self,new_succ)."""
if not isinstance(new_succ,type([])):
raise NodeException('Node successor must be of list type.')
self.succ=new_succ
def set_prev(self,id):
"""Sets the node's predecessor: (self,id)."""
self.prev=id
def get_data(self):
"""Returns a node's data: (self)."""
return self.data
def set_data(self,data):
"""Sets a node's data: (self,data)."""
self.data=data
|
gpl-2.0
|
tinchoss/Python_Android
|
python/src/Lib/test/test_unittest.py
|
51
|
85098
|
"""Test script for unittest.
By Collin Winter <collinw at gmail.com>
Still need testing:
TestCase.{assert,fail}* methods (some are tested implicitly)
"""
from test import test_support
import unittest
from unittest import TestCase
import types
### Support code
################################################################
class LoggingResult(unittest.TestResult):
def __init__(self, log):
self._events = log
super(LoggingResult, self).__init__()
def startTest(self, test):
self._events.append('startTest')
super(LoggingResult, self).startTest(test)
def stopTest(self, test):
self._events.append('stopTest')
super(LoggingResult, self).stopTest(test)
def addFailure(self, *args):
self._events.append('addFailure')
super(LoggingResult, self).addFailure(*args)
def addError(self, *args):
self._events.append('addError')
super(LoggingResult, self).addError(*args)
class TestEquality(object):
# Check for a valid __eq__ implementation
def test_eq(self):
for obj_1, obj_2 in self.eq_pairs:
self.assertEqual(obj_1, obj_2)
self.assertEqual(obj_2, obj_1)
# Check for a valid __ne__ implementation
def test_ne(self):
for obj_1, obj_2 in self.ne_pairs:
self.failIfEqual(obj_1, obj_2)
self.failIfEqual(obj_2, obj_1)
class TestHashing(object):
# Check for a valid __hash__ implementation
def test_hash(self):
for obj_1, obj_2 in self.eq_pairs:
try:
assert hash(obj_1) == hash(obj_2)
except KeyboardInterrupt:
raise
except AssertionError:
self.fail("%s and %s do not hash equal" % (obj_1, obj_2))
except Exception, e:
self.fail("Problem hashing %s and %s: %s" % (obj_1, obj_2, e))
for obj_1, obj_2 in self.ne_pairs:
try:
assert hash(obj_1) != hash(obj_2)
except KeyboardInterrupt:
raise
except AssertionError:
self.fail("%s and %s hash equal, but shouldn't" % (obj_1, obj_2))
except Exception, e:
self.fail("Problem hashing %s and %s: %s" % (obj_1, obj_2, e))
################################################################
### /Support code
class Test_TestLoader(TestCase):
### Tests for TestLoader.loadTestsFromTestCase
################################################################
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
def test_loadTestsFromTestCase(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
tests = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
loader = unittest.TestLoader()
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests)
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
#
# Make sure it does the right thing even if no tests were found
def test_loadTestsFromTestCase__no_matches(self):
class Foo(unittest.TestCase):
def foo_bar(self): pass
empty_suite = unittest.TestSuite()
loader = unittest.TestLoader()
self.assertEqual(loader.loadTestsFromTestCase(Foo), empty_suite)
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
#
# What happens if loadTestsFromTestCase() is given an object
# that isn't a subclass of TestCase? Specifically, what happens
# if testCaseClass is a subclass of TestSuite?
#
# This is checked for specifically in the code, so we better add a
# test for it.
def test_loadTestsFromTestCase__TestSuite_subclass(self):
class NotATestCase(unittest.TestSuite):
pass
loader = unittest.TestLoader()
try:
loader.loadTestsFromTestCase(NotATestCase)
except TypeError:
pass
else:
self.fail('Should raise TypeError')
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
#
# Make sure loadTestsFromTestCase() picks up the default test method
# name (as specified by TestCase), even though the method name does
# not match the default TestLoader.testMethodPrefix string
def test_loadTestsFromTestCase__default_method_name(self):
class Foo(unittest.TestCase):
def runTest(self):
pass
loader = unittest.TestLoader()
# This has to be false for the test to succeed
self.failIf('runTest'.startswith(loader.testMethodPrefix))
suite = loader.loadTestsFromTestCase(Foo)
self.failUnless(isinstance(suite, loader.suiteClass))
self.assertEqual(list(suite), [Foo('runTest')])
################################################################
### /Tests for TestLoader.loadTestsFromTestCase
### Tests for TestLoader.loadTestsFromModule
################################################################
# "This method searches `module` for classes derived from TestCase"
def test_loadTestsFromModule__TestCase_subclass(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.failUnless(isinstance(suite, loader.suiteClass))
expected = [loader.suiteClass([MyTestCase('test')])]
self.assertEqual(list(suite), expected)
# "This method searches `module` for classes derived from TestCase"
#
# What happens if no tests are found (no TestCase instances)?
def test_loadTestsFromModule__no_TestCase_instances(self):
m = types.ModuleType('m')
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.failUnless(isinstance(suite, loader.suiteClass))
self.assertEqual(list(suite), [])
# "This method searches `module` for classes derived from TestCase"
#
# What happens if no tests are found (TestCases instances, but no tests)?
def test_loadTestsFromModule__no_TestCase_tests(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.failUnless(isinstance(suite, loader.suiteClass))
self.assertEqual(list(suite), [loader.suiteClass()])
# "This method searches `module` for classes derived from TestCase"s
#
# What happens if loadTestsFromModule() is given something other
# than a module?
#
# XXX Currently, it succeeds anyway. This flexibility
# should either be documented or loadTestsFromModule() should
# raise a TypeError
#
# XXX Certain people are using this behaviour. We'll add a test for it
def test_loadTestsFromModule__not_a_module(self):
class MyTestCase(unittest.TestCase):
def test(self):
pass
class NotAModule(object):
test_2 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(NotAModule)
reference = [unittest.TestSuite([MyTestCase('test')])]
self.assertEqual(list(suite), reference)
################################################################
### /Tests for TestLoader.loadTestsFromModule()
### Tests for TestLoader.loadTestsFromName()
################################################################
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Is ValueError raised in response to an empty name?
def test_loadTestsFromName__empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('')
except ValueError, e:
self.assertEqual(str(e), "Empty module name")
else:
self.fail("TestLoader.loadTestsFromName failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when the name contains invalid characters?
def test_loadTestsFromName__malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise ValueError or ImportError?
try:
loader.loadTestsFromName('abc () //')
except ValueError:
pass
except ImportError:
pass
else:
self.fail("TestLoader.loadTestsFromName failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve ... to a
# module"
#
# What happens when a module by that name can't be found?
def test_loadTestsFromName__unknown_module_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('sdasfasfasdf')
except ImportError, e:
self.assertEqual(str(e), "No module named sdasfasfasdf")
else:
self.fail("TestLoader.loadTestsFromName failed to raise ImportError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when the module is found, but the attribute can't?
def test_loadTestsFromName__unknown_attr_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('unittest.sdasfasfasdf')
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when we provide the module, but the attribute can't be
# found?
def test_loadTestsFromName__relative_unknown_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('sdasfasfasdf', unittest)
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# Does loadTestsFromName raise ValueError when passed an empty
# name relative to a provided module?
#
# XXX Should probably raise a ValueError instead of an AttributeError
def test_loadTestsFromName__relative_empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('', unittest)
except AttributeError, e:
pass
else:
self.fail("Failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when an impossible name is given, relative to the provided
# `module`?
def test_loadTestsFromName__relative_malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise AttributeError or ValueError?
try:
loader.loadTestsFromName('abc () //', unittest)
except ValueError:
pass
except AttributeError:
pass
else:
self.fail("TestLoader.loadTestsFromName failed to raise ValueError")
# "The method optionally resolves name relative to the given module"
#
# Does loadTestsFromName raise TypeError when the `module` argument
# isn't a module object?
#
# XXX Accepts the not-a-module object, ignorning the object's type
# This should raise an exception or the method name should be changed
#
# XXX Some people are relying on this, so keep it for now
def test_loadTestsFromName__relative_not_a_module(self):
class MyTestCase(unittest.TestCase):
def test(self):
pass
class NotAModule(object):
test_2 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('test_2', NotAModule)
reference = [MyTestCase('test')]
self.assertEqual(list(suite), reference)
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Does it raise an exception if the name resolves to an invalid
# object?
def test_loadTestsFromName__relative_bad_object(self):
m = types.ModuleType('m')
m.testcase_1 = object()
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('testcase_1', m)
except TypeError:
pass
else:
self.fail("Should have raised TypeError")
# "The specifier name is a ``dotted name'' that may
# resolve either to ... a test case class"
def test_loadTestsFromName__relative_TestCase_subclass(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('testcase_1', m)
self.failUnless(isinstance(suite, loader.suiteClass))
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
def test_loadTestsFromName__relative_TestSuite(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testsuite = unittest.TestSuite([MyTestCase('test')])
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('testsuite', m)
self.failUnless(isinstance(suite, loader.suiteClass))
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a test method within a test case class"
def test_loadTestsFromName__relative_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('testcase_1.test', m)
self.failUnless(isinstance(suite, loader.suiteClass))
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Does loadTestsFromName() raise the proper exception when trying to
# resolve "a test method within a test case class" that doesn't exist
# for the given name (relative to a provided module)?
def test_loadTestsFromName__relative_invalid_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('testcase_1.testfoo', m)
except AttributeError, e:
self.assertEqual(str(e), "type object 'MyTestCase' has no attribute 'testfoo'")
else:
self.fail("Failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a ... TestSuite instance"
def test_loadTestsFromName__callable__TestSuite(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
testcase_2 = unittest.FunctionTestCase(lambda: None)
def return_TestSuite():
return unittest.TestSuite([testcase_1, testcase_2])
m.return_TestSuite = return_TestSuite
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('return_TestSuite', m)
self.failUnless(isinstance(suite, loader.suiteClass))
self.assertEqual(list(suite), [testcase_1, testcase_2])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase ... instance"
def test_loadTestsFromName__callable__TestCase_instance(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
def return_TestCase():
return testcase_1
m.return_TestCase = return_TestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('return_TestCase', m)
self.failUnless(isinstance(suite, loader.suiteClass))
self.assertEqual(list(suite), [testcase_1])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase or TestSuite instance"
#
# What happens if the callable returns something else?
def test_loadTestsFromName__callable__wrong_type(self):
m = types.ModuleType('m')
def return_wrong():
return 6
m.return_wrong = return_wrong
loader = unittest.TestLoader()
try:
suite = loader.loadTestsFromName('return_wrong', m)
except TypeError:
pass
else:
self.fail("TestLoader.loadTestsFromName failed to raise TypeError")
# "The specifier can refer to modules and packages which have not been
# imported; they will be imported as a side-effect"
def test_loadTestsFromName__module_not_loaded(self):
# We're going to try to load this module as a side-effect, so it
# better not be loaded before we try.
#
# Why pick audioop? Google shows it isn't used very often, so there's
# a good chance that it won't be imported when this test is run
module_name = 'audioop'
import sys
if module_name in sys.modules:
del sys.modules[module_name]
loader = unittest.TestLoader()
try:
suite = loader.loadTestsFromName(module_name)
self.failUnless(isinstance(suite, loader.suiteClass))
self.assertEqual(list(suite), [])
# audioop should now be loaded, thanks to loadTestsFromName()
self.failUnless(module_name in sys.modules)
finally:
if module_name in sys.modules:
del sys.modules[module_name]
################################################################
### Tests for TestLoader.loadTestsFromName()
### Tests for TestLoader.loadTestsFromNames()
################################################################
# "Similar to loadTestsFromName(), but takes a sequence of names rather
# than a single name."
#
# What happens if that sequence of names is empty?
def test_loadTestsFromNames__empty_name_list(self):
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames([])
self.failUnless(isinstance(suite, loader.suiteClass))
self.assertEqual(list(suite), [])
# "Similar to loadTestsFromName(), but takes a sequence of names rather
# than a single name."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens if that sequence of names is empty?
#
# XXX Should this raise a ValueError or just return an empty TestSuite?
def test_loadTestsFromNames__relative_empty_name_list(self):
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames([], unittest)
self.failUnless(isinstance(suite, loader.suiteClass))
self.assertEqual(list(suite), [])
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Is ValueError raised in response to an empty name?
def test_loadTestsFromNames__empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames([''])
except ValueError, e:
self.assertEqual(str(e), "Empty module name")
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when presented with an impossible module name?
def test_loadTestsFromNames__malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise ValueError or ImportError?
try:
loader.loadTestsFromNames(['abc () //'])
except ValueError:
pass
except ImportError:
pass
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when no module can be found for the given name?
def test_loadTestsFromNames__unknown_module_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['sdasfasfasdf'])
except ImportError, e:
self.assertEqual(str(e), "No module named sdasfasfasdf")
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ImportError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when the module can be found, but not the attribute?
def test_loadTestsFromNames__unknown_attr_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['unittest.sdasfasfasdf', 'unittest'])
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromNames failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when given an unknown attribute on a specified `module`
# argument?
def test_loadTestsFromNames__unknown_name_relative_1(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['sdasfasfasdf'], unittest)
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# Do unknown attributes (relative to a provided module) still raise an
# exception even in the presence of valid attribute names?
def test_loadTestsFromNames__unknown_name_relative_2(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['TestCase', 'sdasfasfasdf'], unittest)
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when faced with the empty string?
#
# XXX This currently raises AttributeError, though ValueError is probably
# more appropriate
def test_loadTestsFromNames__relative_empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames([''], unittest)
except AttributeError:
pass
else:
self.fail("Failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when presented with an impossible attribute name?
def test_loadTestsFromNames__relative_malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise AttributeError or ValueError?
try:
loader.loadTestsFromNames(['abc () //'], unittest)
except AttributeError:
pass
except ValueError:
pass
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ValueError")
# "The method optionally resolves name relative to the given module"
#
# Does loadTestsFromNames() make sure the provided `module` is in fact
# a module?
#
# XXX This validation is currently not done. This flexibility should
# either be documented or a TypeError should be raised.
def test_loadTestsFromNames__relative_not_a_module(self):
class MyTestCase(unittest.TestCase):
def test(self):
pass
class NotAModule(object):
test_2 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['test_2'], NotAModule)
reference = [unittest.TestSuite([MyTestCase('test')])]
self.assertEqual(list(suite), reference)
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Does it raise an exception if the name resolves to an invalid
# object?
def test_loadTestsFromNames__relative_bad_object(self):
m = types.ModuleType('m')
m.testcase_1 = object()
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['testcase_1'], m)
except TypeError:
pass
else:
self.fail("Should have raised TypeError")
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a test case class"
def test_loadTestsFromNames__relative_TestCase_subclass(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['testcase_1'], m)
self.failUnless(isinstance(suite, loader.suiteClass))
expected = loader.suiteClass([MyTestCase('test')])
self.assertEqual(list(suite), [expected])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a TestSuite instance"
def test_loadTestsFromNames__relative_TestSuite(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testsuite = unittest.TestSuite([MyTestCase('test')])
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['testsuite'], m)
self.failUnless(isinstance(suite, loader.suiteClass))
self.assertEqual(list(suite), [m.testsuite])
# "The specifier name is a ``dotted name'' that may resolve ... to ... a
# test method within a test case class"
def test_loadTestsFromNames__relative_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['testcase_1.test'], m)
self.failUnless(isinstance(suite, loader.suiteClass))
ref_suite = unittest.TestSuite([MyTestCase('test')])
self.assertEqual(list(suite), [ref_suite])
# "The specifier name is a ``dotted name'' that may resolve ... to ... a
# test method within a test case class"
#
# Does the method gracefully handle names that initially look like they
# resolve to "a test method within a test case class" but don't?
def test_loadTestsFromNames__relative_invalid_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['testcase_1.testfoo'], m)
except AttributeError, e:
self.assertEqual(str(e), "type object 'MyTestCase' has no attribute 'testfoo'")
else:
self.fail("Failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a ... TestSuite instance"
def test_loadTestsFromNames__callable__TestSuite(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
testcase_2 = unittest.FunctionTestCase(lambda: None)
def return_TestSuite():
return unittest.TestSuite([testcase_1, testcase_2])
m.return_TestSuite = return_TestSuite
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['return_TestSuite'], m)
self.failUnless(isinstance(suite, loader.suiteClass))
expected = unittest.TestSuite([testcase_1, testcase_2])
self.assertEqual(list(suite), [expected])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase ... instance"
def test_loadTestsFromNames__callable__TestCase_instance(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
def return_TestCase():
return testcase_1
m.return_TestCase = return_TestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['return_TestCase'], m)
self.failUnless(isinstance(suite, loader.suiteClass))
ref_suite = unittest.TestSuite([testcase_1])
self.assertEqual(list(suite), [ref_suite])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase or TestSuite instance"
#
# Are staticmethods handled correctly?
def test_loadTestsFromNames__callable__call_staticmethod(self):
m = types.ModuleType('m')
class Test1(unittest.TestCase):
def test(self):
pass
testcase_1 = Test1('test')
class Foo(unittest.TestCase):
@staticmethod
def foo():
return testcase_1
m.Foo = Foo
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['Foo.foo'], m)
self.failUnless(isinstance(suite, loader.suiteClass))
ref_suite = unittest.TestSuite([testcase_1])
self.assertEqual(list(suite), [ref_suite])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase or TestSuite instance"
#
# What happens when the callable returns something else?
def test_loadTestsFromNames__callable__wrong_type(self):
m = types.ModuleType('m')
def return_wrong():
return 6
m.return_wrong = return_wrong
loader = unittest.TestLoader()
try:
suite = loader.loadTestsFromNames(['return_wrong'], m)
except TypeError:
pass
else:
self.fail("TestLoader.loadTestsFromNames failed to raise TypeError")
# "The specifier can refer to modules and packages which have not been
# imported; they will be imported as a side-effect"
def test_loadTestsFromNames__module_not_loaded(self):
# We're going to try to load this module as a side-effect, so it
# better not be loaded before we try.
#
# Why pick audioop? Google shows it isn't used very often, so there's
# a good chance that it won't be imported when this test is run
module_name = 'audioop'
import sys
if module_name in sys.modules:
del sys.modules[module_name]
loader = unittest.TestLoader()
try:
suite = loader.loadTestsFromNames([module_name])
self.failUnless(isinstance(suite, loader.suiteClass))
self.assertEqual(list(suite), [unittest.TestSuite()])
# audioop should now be loaded, thanks to loadTestsFromName()
self.failUnless(module_name in sys.modules)
finally:
if module_name in sys.modules:
del sys.modules[module_name]
################################################################
### /Tests for TestLoader.loadTestsFromNames()
### Tests for TestLoader.getTestCaseNames()
################################################################
# "Return a sorted sequence of method names found within testCaseClass"
#
# Test.foobar is defined to make sure getTestCaseNames() respects
# loader.testMethodPrefix
def test_getTestCaseNames(self):
class Test(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foobar(self): pass
loader = unittest.TestLoader()
self.assertEqual(loader.getTestCaseNames(Test), ['test_1', 'test_2'])
# "Return a sorted sequence of method names found within testCaseClass"
#
# Does getTestCaseNames() behave appropriately if no tests are found?
def test_getTestCaseNames__no_tests(self):
class Test(unittest.TestCase):
def foobar(self): pass
loader = unittest.TestLoader()
self.assertEqual(loader.getTestCaseNames(Test), [])
# "Return a sorted sequence of method names found within testCaseClass"
#
# Are not-TestCases handled gracefully?
#
# XXX This should raise a TypeError, not return a list
#
# XXX It's too late in the 2.5 release cycle to fix this, but it should
# probably be revisited for 2.6
def test_getTestCaseNames__not_a_TestCase(self):
class BadCase(int):
def test_foo(self):
pass
loader = unittest.TestLoader()
names = loader.getTestCaseNames(BadCase)
self.assertEqual(names, ['test_foo'])
# "Return a sorted sequence of method names found within testCaseClass"
#
# Make sure inherited names are handled.
#
# TestP.foobar is defined to make sure getTestCaseNames() respects
# loader.testMethodPrefix
def test_getTestCaseNames__inheritance(self):
class TestP(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foobar(self): pass
class TestC(TestP):
def test_1(self): pass
def test_3(self): pass
loader = unittest.TestLoader()
names = ['test_1', 'test_2', 'test_3']
self.assertEqual(loader.getTestCaseNames(TestC), names)
################################################################
### /Tests for TestLoader.getTestCaseNames()
### Tests for TestLoader.testMethodPrefix
################################################################
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromTestCase(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
tests_1 = unittest.TestSuite([Foo('foo_bar')])
tests_2 = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests_2)
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromModule(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests_1 = [unittest.TestSuite([Foo('foo_bar')])]
tests_2 = [unittest.TestSuite([Foo('test_1'), Foo('test_2')])]
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(list(loader.loadTestsFromModule(m)), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(list(loader.loadTestsFromModule(m)), tests_2)
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromName(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests_1 = unittest.TestSuite([Foo('foo_bar')])
tests_2 = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(loader.loadTestsFromName('Foo', m), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(loader.loadTestsFromName('Foo', m), tests_2)
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromNames(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests_1 = unittest.TestSuite([unittest.TestSuite([Foo('foo_bar')])])
tests_2 = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
tests_2 = unittest.TestSuite([tests_2])
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(loader.loadTestsFromNames(['Foo'], m), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(loader.loadTestsFromNames(['Foo'], m), tests_2)
# "The default value is 'test'"
def test_testMethodPrefix__default_value(self):
loader = unittest.TestLoader()
self.failUnless(loader.testMethodPrefix == 'test')
################################################################
### /Tests for TestLoader.testMethodPrefix
### Tests for TestLoader.sortTestMethodsUsing
################################################################
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromTestCase(self):
def reversed_cmp(x, y):
return -cmp(x, y)
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = loader.suiteClass([Foo('test_2'), Foo('test_1')])
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromModule(self):
def reversed_cmp(x, y):
return -cmp(x, y)
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
m.Foo = Foo
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = [loader.suiteClass([Foo('test_2'), Foo('test_1')])]
self.assertEqual(list(loader.loadTestsFromModule(m)), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromName(self):
def reversed_cmp(x, y):
return -cmp(x, y)
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
m.Foo = Foo
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = loader.suiteClass([Foo('test_2'), Foo('test_1')])
self.assertEqual(loader.loadTestsFromName('Foo', m), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromNames(self):
def reversed_cmp(x, y):
return -cmp(x, y)
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
m.Foo = Foo
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = [loader.suiteClass([Foo('test_2'), Foo('test_1')])]
self.assertEqual(list(loader.loadTestsFromNames(['Foo'], m)), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames()"
#
# Does it actually affect getTestCaseNames()?
def test_sortTestMethodsUsing__getTestCaseNames(self):
def reversed_cmp(x, y):
return -cmp(x, y)
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
test_names = ['test_2', 'test_1']
self.assertEqual(loader.getTestCaseNames(Foo), test_names)
# "The default value is the built-in cmp() function"
def test_sortTestMethodsUsing__default_value(self):
loader = unittest.TestLoader()
self.failUnless(loader.sortTestMethodsUsing is cmp)
# "it can be set to None to disable the sort."
#
# XXX How is this different from reassigning cmp? Are the tests returned
# in a random order or something? This behaviour should die
def test_sortTestMethodsUsing__None(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = None
test_names = ['test_2', 'test_1']
self.assertEqual(set(loader.getTestCaseNames(Foo)), set(test_names))
################################################################
### /Tests for TestLoader.sortTestMethodsUsing
### Tests for TestLoader.suiteClass
################################################################
# "Callable object that constructs a test suite from a list of tests."
def test_suiteClass__loadTestsFromTestCase(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
tests = [Foo('test_1'), Foo('test_2')]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests)
# It is implicit in the documentation for TestLoader.suiteClass that
# all TestLoader.loadTestsFrom* methods respect it. Let's make sure
def test_suiteClass__loadTestsFromModule(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests = [[Foo('test_1'), Foo('test_2')]]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromModule(m), tests)
# It is implicit in the documentation for TestLoader.suiteClass that
# all TestLoader.loadTestsFrom* methods respect it. Let's make sure
def test_suiteClass__loadTestsFromName(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests = [Foo('test_1'), Foo('test_2')]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromName('Foo', m), tests)
# It is implicit in the documentation for TestLoader.suiteClass that
# all TestLoader.loadTestsFrom* methods respect it. Let's make sure
def test_suiteClass__loadTestsFromNames(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests = [[Foo('test_1'), Foo('test_2')]]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromNames(['Foo'], m), tests)
# "The default value is the TestSuite class"
def test_suiteClass__default_value(self):
loader = unittest.TestLoader()
self.failUnless(loader.suiteClass is unittest.TestSuite)
################################################################
### /Tests for TestLoader.suiteClass
### Support code for Test_TestSuite
################################################################
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def test_3(self): pass
def runTest(self): pass
def _mk_TestSuite(*names):
return unittest.TestSuite(Foo(n) for n in names)
################################################################
### /Support code for Test_TestSuite
class Test_TestSuite(TestCase, TestEquality):
### Set up attributes needed by inherited tests
################################################################
# Used by TestEquality.test_eq
eq_pairs = [(unittest.TestSuite(), unittest.TestSuite())
,(unittest.TestSuite(), unittest.TestSuite([]))
,(_mk_TestSuite('test_1'), _mk_TestSuite('test_1'))]
# Used by TestEquality.test_ne
ne_pairs = [(unittest.TestSuite(), _mk_TestSuite('test_1'))
,(unittest.TestSuite([]), _mk_TestSuite('test_1'))
,(_mk_TestSuite('test_1', 'test_2'), _mk_TestSuite('test_1', 'test_3'))
,(_mk_TestSuite('test_1'), _mk_TestSuite('test_2'))]
################################################################
### /Set up attributes needed by inherited tests
### Tests for TestSuite.__init__
################################################################
# "class TestSuite([tests])"
#
# The tests iterable should be optional
def test_init__tests_optional(self):
suite = unittest.TestSuite()
self.assertEqual(suite.countTestCases(), 0)
# "class TestSuite([tests])"
# ...
# "If tests is given, it must be an iterable of individual test cases
# or other test suites that will be used to build the suite initially"
#
# TestSuite should deal with empty tests iterables by allowing the
# creation of an empty suite
def test_init__empty_tests(self):
suite = unittest.TestSuite([])
self.assertEqual(suite.countTestCases(), 0)
# "class TestSuite([tests])"
# ...
# "If tests is given, it must be an iterable of individual test cases
# or other test suites that will be used to build the suite initially"
#
# TestSuite should allow any iterable to provide tests
def test_init__tests_from_any_iterable(self):
def tests():
yield unittest.FunctionTestCase(lambda: None)
yield unittest.FunctionTestCase(lambda: None)
suite_1 = unittest.TestSuite(tests())
self.assertEqual(suite_1.countTestCases(), 2)
suite_2 = unittest.TestSuite(suite_1)
self.assertEqual(suite_2.countTestCases(), 2)
suite_3 = unittest.TestSuite(set(suite_1))
self.assertEqual(suite_3.countTestCases(), 2)
# "class TestSuite([tests])"
# ...
# "If tests is given, it must be an iterable of individual test cases
# or other test suites that will be used to build the suite initially"
#
# Does TestSuite() also allow other TestSuite() instances to be present
# in the tests iterable?
def test_init__TestSuite_instances_in_tests(self):
def tests():
ftc = unittest.FunctionTestCase(lambda: None)
yield unittest.TestSuite([ftc])
yield unittest.FunctionTestCase(lambda: None)
suite = unittest.TestSuite(tests())
self.assertEqual(suite.countTestCases(), 2)
################################################################
### /Tests for TestSuite.__init__
# Container types should support the iter protocol
def test_iter(self):
test1 = unittest.FunctionTestCase(lambda: None)
test2 = unittest.FunctionTestCase(lambda: None)
suite = unittest.TestSuite((test1, test2))
self.assertEqual(list(suite), [test1, test2])
# "Return the number of tests represented by the this test object.
# ...this method is also implemented by the TestSuite class, which can
# return larger [greater than 1] values"
#
# Presumably an empty TestSuite returns 0?
def test_countTestCases_zero_simple(self):
suite = unittest.TestSuite()
self.assertEqual(suite.countTestCases(), 0)
# "Return the number of tests represented by the this test object.
# ...this method is also implemented by the TestSuite class, which can
# return larger [greater than 1] values"
#
# Presumably an empty TestSuite (even if it contains other empty
# TestSuite instances) returns 0?
def test_countTestCases_zero_nested(self):
class Test1(unittest.TestCase):
def test(self):
pass
suite = unittest.TestSuite([unittest.TestSuite()])
self.assertEqual(suite.countTestCases(), 0)
# "Return the number of tests represented by the this test object.
# ...this method is also implemented by the TestSuite class, which can
# return larger [greater than 1] values"
def test_countTestCases_simple(self):
test1 = unittest.FunctionTestCase(lambda: None)
test2 = unittest.FunctionTestCase(lambda: None)
suite = unittest.TestSuite((test1, test2))
self.assertEqual(suite.countTestCases(), 2)
# "Return the number of tests represented by the this test object.
# ...this method is also implemented by the TestSuite class, which can
# return larger [greater than 1] values"
#
# Make sure this holds for nested TestSuite instances, too
def test_countTestCases_nested(self):
class Test1(unittest.TestCase):
def test1(self): pass
def test2(self): pass
test2 = unittest.FunctionTestCase(lambda: None)
test3 = unittest.FunctionTestCase(lambda: None)
child = unittest.TestSuite((Test1('test2'), test2))
parent = unittest.TestSuite((test3, child, Test1('test1')))
self.assertEqual(parent.countTestCases(), 4)
# "Run the tests associated with this suite, collecting the result into
# the test result object passed as result."
#
# And if there are no tests? What then?
def test_run__empty_suite(self):
events = []
result = LoggingResult(events)
suite = unittest.TestSuite()
suite.run(result)
self.assertEqual(events, [])
# "Note that unlike TestCase.run(), TestSuite.run() requires the
# "result object to be passed in."
def test_run__requires_result(self):
suite = unittest.TestSuite()
try:
suite.run()
except TypeError:
pass
else:
self.fail("Failed to raise TypeError")
# "Run the tests associated with this suite, collecting the result into
# the test result object passed as result."
def test_run(self):
events = []
result = LoggingResult(events)
class LoggingCase(unittest.TestCase):
def run(self, result):
events.append('run %s' % self._testMethodName)
def test1(self): pass
def test2(self): pass
tests = [LoggingCase('test1'), LoggingCase('test2')]
unittest.TestSuite(tests).run(result)
self.assertEqual(events, ['run test1', 'run test2'])
# "Add a TestCase ... to the suite"
def test_addTest__TestCase(self):
class Foo(unittest.TestCase):
def test(self): pass
test = Foo('test')
suite = unittest.TestSuite()
suite.addTest(test)
self.assertEqual(suite.countTestCases(), 1)
self.assertEqual(list(suite), [test])
# "Add a ... TestSuite to the suite"
def test_addTest__TestSuite(self):
class Foo(unittest.TestCase):
def test(self): pass
suite_2 = unittest.TestSuite([Foo('test')])
suite = unittest.TestSuite()
suite.addTest(suite_2)
self.assertEqual(suite.countTestCases(), 1)
self.assertEqual(list(suite), [suite_2])
# "Add all the tests from an iterable of TestCase and TestSuite
# instances to this test suite."
#
# "This is equivalent to iterating over tests, calling addTest() for
# each element"
def test_addTests(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
test_1 = Foo('test_1')
test_2 = Foo('test_2')
inner_suite = unittest.TestSuite([test_2])
def gen():
yield test_1
yield test_2
yield inner_suite
suite_1 = unittest.TestSuite()
suite_1.addTests(gen())
self.assertEqual(list(suite_1), list(gen()))
# "This is equivalent to iterating over tests, calling addTest() for
# each element"
suite_2 = unittest.TestSuite()
for t in gen():
suite_2.addTest(t)
self.assertEqual(suite_1, suite_2)
# "Add all the tests from an iterable of TestCase and TestSuite
# instances to this test suite."
#
# What happens if it doesn't get an iterable?
def test_addTest__noniterable(self):
suite = unittest.TestSuite()
try:
suite.addTests(5)
except TypeError:
pass
else:
self.fail("Failed to raise TypeError")
def test_addTest__noncallable(self):
suite = unittest.TestSuite()
self.assertRaises(TypeError, suite.addTest, 5)
def test_addTest__casesuiteclass(self):
suite = unittest.TestSuite()
self.assertRaises(TypeError, suite.addTest, Test_TestSuite)
self.assertRaises(TypeError, suite.addTest, unittest.TestSuite)
def test_addTests__string(self):
suite = unittest.TestSuite()
self.assertRaises(TypeError, suite.addTests, "foo")
class Test_FunctionTestCase(TestCase):
# "Return the number of tests represented by the this test object. For
# TestCase instances, this will always be 1"
def test_countTestCases(self):
test = unittest.FunctionTestCase(lambda: None)
self.assertEqual(test.countTestCases(), 1)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if setUp() raises
# an exception.
def test_run_call_order__error_in_setUp(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
raise RuntimeError('raised by setUp')
def test():
events.append('test')
def tearDown():
events.append('tearDown')
expected = ['startTest', 'setUp', 'addError', 'stopTest']
unittest.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if the test raises
# an error (as opposed to a failure).
def test_run_call_order__error_in_test(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
def test():
events.append('test')
raise RuntimeError('raised by test')
def tearDown():
events.append('tearDown')
expected = ['startTest', 'setUp', 'test', 'addError', 'tearDown',
'stopTest']
unittest.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if the test signals
# a failure (as opposed to an error).
def test_run_call_order__failure_in_test(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
def test():
events.append('test')
self.fail('raised by test')
def tearDown():
events.append('tearDown')
expected = ['startTest', 'setUp', 'test', 'addFailure', 'tearDown',
'stopTest']
unittest.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if tearDown() raises
# an exception.
def test_run_call_order__error_in_tearDown(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
def test():
events.append('test')
def tearDown():
events.append('tearDown')
raise RuntimeError('raised by tearDown')
expected = ['startTest', 'setUp', 'test', 'tearDown', 'addError',
'stopTest']
unittest.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "Return a string identifying the specific test case."
#
# Because of the vague nature of the docs, I'm not going to lock this
# test down too much. Really all that can be asserted is that the id()
# will be a string (either 8-byte or unicode -- again, because the docs
# just say "string")
def test_id(self):
test = unittest.FunctionTestCase(lambda: None)
self.failUnless(isinstance(test.id(), basestring))
# "Returns a one-line description of the test, or None if no description
# has been provided. The default implementation of this method returns
# the first line of the test method's docstring, if available, or None."
def test_shortDescription__no_docstring(self):
test = unittest.FunctionTestCase(lambda: None)
self.assertEqual(test.shortDescription(), None)
# "Returns a one-line description of the test, or None if no description
# has been provided. The default implementation of this method returns
# the first line of the test method's docstring, if available, or None."
def test_shortDescription__singleline_docstring(self):
desc = "this tests foo"
test = unittest.FunctionTestCase(lambda: None, description=desc)
self.assertEqual(test.shortDescription(), "this tests foo")
class Test_TestResult(TestCase):
# Note: there are not separate tests for TestResult.wasSuccessful(),
# TestResult.errors, TestResult.failures, TestResult.testsRun or
# TestResult.shouldStop because these only have meaning in terms of
# other TestResult methods.
#
# Accordingly, tests for the aforenamed attributes are incorporated
# in with the tests for the defining methods.
################################################################
def test_init(self):
result = unittest.TestResult()
self.failUnless(result.wasSuccessful())
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.failures), 0)
self.assertEqual(result.testsRun, 0)
self.assertEqual(result.shouldStop, False)
# "This method can be called to signal that the set of tests being
# run should be aborted by setting the TestResult's shouldStop
# attribute to True."
def test_stop(self):
result = unittest.TestResult()
result.stop()
self.assertEqual(result.shouldStop, True)
# "Called when the test case test is about to be run. The default
# implementation simply increments the instance's testsRun counter."
def test_startTest(self):
class Foo(unittest.TestCase):
def test_1(self):
pass
test = Foo('test_1')
result = unittest.TestResult()
result.startTest(test)
self.failUnless(result.wasSuccessful())
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.failures), 0)
self.assertEqual(result.testsRun, 1)
self.assertEqual(result.shouldStop, False)
result.stopTest(test)
# "Called after the test case test has been executed, regardless of
# the outcome. The default implementation does nothing."
def test_stopTest(self):
class Foo(unittest.TestCase):
def test_1(self):
pass
test = Foo('test_1')
result = unittest.TestResult()
result.startTest(test)
self.failUnless(result.wasSuccessful())
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.failures), 0)
self.assertEqual(result.testsRun, 1)
self.assertEqual(result.shouldStop, False)
result.stopTest(test)
# Same tests as above; make sure nothing has changed
self.failUnless(result.wasSuccessful())
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.failures), 0)
self.assertEqual(result.testsRun, 1)
self.assertEqual(result.shouldStop, False)
# "addSuccess(test)"
# ...
# "Called when the test case test succeeds"
# ...
# "wasSuccessful() - Returns True if all tests run so far have passed,
# otherwise returns False"
# ...
# "testsRun - The total number of tests run so far."
# ...
# "errors - A list containing 2-tuples of TestCase instances and
# formatted tracebacks. Each tuple represents a test which raised an
# unexpected exception. Contains formatted
# tracebacks instead of sys.exc_info() results."
# ...
# "failures - A list containing 2-tuples of TestCase instances and
# formatted tracebacks. Each tuple represents a test where a failure was
# explicitly signalled using the TestCase.fail*() or TestCase.assert*()
# methods. Contains formatted tracebacks instead
# of sys.exc_info() results."
def test_addSuccess(self):
class Foo(unittest.TestCase):
def test_1(self):
pass
test = Foo('test_1')
result = unittest.TestResult()
result.startTest(test)
result.addSuccess(test)
result.stopTest(test)
self.failUnless(result.wasSuccessful())
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.failures), 0)
self.assertEqual(result.testsRun, 1)
self.assertEqual(result.shouldStop, False)
# "addFailure(test, err)"
# ...
# "Called when the test case test signals a failure. err is a tuple of
# the form returned by sys.exc_info(): (type, value, traceback)"
# ...
# "wasSuccessful() - Returns True if all tests run so far have passed,
# otherwise returns False"
# ...
# "testsRun - The total number of tests run so far."
# ...
# "errors - A list containing 2-tuples of TestCase instances and
# formatted tracebacks. Each tuple represents a test which raised an
# unexpected exception. Contains formatted
# tracebacks instead of sys.exc_info() results."
# ...
# "failures - A list containing 2-tuples of TestCase instances and
# formatted tracebacks. Each tuple represents a test where a failure was
# explicitly signalled using the TestCase.fail*() or TestCase.assert*()
# methods. Contains formatted tracebacks instead
# of sys.exc_info() results."
def test_addFailure(self):
import sys
class Foo(unittest.TestCase):
def test_1(self):
pass
test = Foo('test_1')
try:
test.fail("foo")
except:
exc_info_tuple = sys.exc_info()
result = unittest.TestResult()
result.startTest(test)
result.addFailure(test, exc_info_tuple)
result.stopTest(test)
self.failIf(result.wasSuccessful())
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.failures), 1)
self.assertEqual(result.testsRun, 1)
self.assertEqual(result.shouldStop, False)
test_case, formatted_exc = result.failures[0]
self.failUnless(test_case is test)
self.failUnless(isinstance(formatted_exc, str))
# "addError(test, err)"
# ...
# "Called when the test case test raises an unexpected exception err
# is a tuple of the form returned by sys.exc_info():
# (type, value, traceback)"
# ...
# "wasSuccessful() - Returns True if all tests run so far have passed,
# otherwise returns False"
# ...
# "testsRun - The total number of tests run so far."
# ...
# "errors - A list containing 2-tuples of TestCase instances and
# formatted tracebacks. Each tuple represents a test which raised an
# unexpected exception. Contains formatted
# tracebacks instead of sys.exc_info() results."
# ...
# "failures - A list containing 2-tuples of TestCase instances and
# formatted tracebacks. Each tuple represents a test where a failure was
# explicitly signalled using the TestCase.fail*() or TestCase.assert*()
# methods. Contains formatted tracebacks instead
# of sys.exc_info() results."
def test_addError(self):
import sys
class Foo(unittest.TestCase):
def test_1(self):
pass
test = Foo('test_1')
try:
raise TypeError()
except:
exc_info_tuple = sys.exc_info()
result = unittest.TestResult()
result.startTest(test)
result.addError(test, exc_info_tuple)
result.stopTest(test)
self.failIf(result.wasSuccessful())
self.assertEqual(len(result.errors), 1)
self.assertEqual(len(result.failures), 0)
self.assertEqual(result.testsRun, 1)
self.assertEqual(result.shouldStop, False)
test_case, formatted_exc = result.errors[0]
self.failUnless(test_case is test)
self.failUnless(isinstance(formatted_exc, str))
### Support code for Test_TestCase
################################################################
class Foo(unittest.TestCase):
def runTest(self): pass
def test1(self): pass
class Bar(Foo):
def test2(self): pass
################################################################
### /Support code for Test_TestCase
class Test_TestCase(TestCase, TestEquality, TestHashing):
### Set up attributes used by inherited tests
################################################################
# Used by TestHashing.test_hash and TestEquality.test_eq
eq_pairs = [(Foo('test1'), Foo('test1'))]
# Used by TestEquality.test_ne
ne_pairs = [(Foo('test1'), Foo('runTest'))
,(Foo('test1'), Bar('test1'))
,(Foo('test1'), Bar('test2'))]
################################################################
### /Set up attributes used by inherited tests
# "class TestCase([methodName])"
# ...
# "Each instance of TestCase will run a single test method: the
# method named methodName."
# ...
# "methodName defaults to "runTest"."
#
# Make sure it really is optional, and that it defaults to the proper
# thing.
def test_init__no_test_name(self):
class Test(unittest.TestCase):
def runTest(self): raise MyException()
def test(self): pass
self.assertEqual(Test().id()[-13:], '.Test.runTest')
# "class TestCase([methodName])"
# ...
# "Each instance of TestCase will run a single test method: the
# method named methodName."
def test_init__test_name__valid(self):
class Test(unittest.TestCase):
def runTest(self): raise MyException()
def test(self): pass
self.assertEqual(Test('test').id()[-10:], '.Test.test')
# "class TestCase([methodName])"
# ...
# "Each instance of TestCase will run a single test method: the
# method named methodName."
def test_init__test_name__invalid(self):
class Test(unittest.TestCase):
def runTest(self): raise MyException()
def test(self): pass
try:
Test('testfoo')
except ValueError:
pass
else:
self.fail("Failed to raise ValueError")
# "Return the number of tests represented by the this test object. For
# TestCase instances, this will always be 1"
def test_countTestCases(self):
class Foo(unittest.TestCase):
def test(self): pass
self.assertEqual(Foo('test').countTestCases(), 1)
# "Return the default type of test result object to be used to run this
# test. For TestCase instances, this will always be
# unittest.TestResult; subclasses of TestCase should
# override this as necessary."
def test_defaultTestResult(self):
class Foo(unittest.TestCase):
def runTest(self):
pass
result = Foo().defaultTestResult()
self.assertEqual(type(result), unittest.TestResult)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if setUp() raises
# an exception.
def test_run_call_order__error_in_setUp(self):
events = []
result = LoggingResult(events)
class Foo(unittest.TestCase):
def setUp(self):
events.append('setUp')
raise RuntimeError('raised by Foo.setUp')
def test(self):
events.append('test')
def tearDown(self):
events.append('tearDown')
Foo('test').run(result)
expected = ['startTest', 'setUp', 'addError', 'stopTest']
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if the test raises
# an error (as opposed to a failure).
def test_run_call_order__error_in_test(self):
events = []
result = LoggingResult(events)
class Foo(unittest.TestCase):
def setUp(self):
events.append('setUp')
def test(self):
events.append('test')
raise RuntimeError('raised by Foo.test')
def tearDown(self):
events.append('tearDown')
expected = ['startTest', 'setUp', 'test', 'addError', 'tearDown',
'stopTest']
Foo('test').run(result)
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if the test signals
# a failure (as opposed to an error).
def test_run_call_order__failure_in_test(self):
events = []
result = LoggingResult(events)
class Foo(unittest.TestCase):
def setUp(self):
events.append('setUp')
def test(self):
events.append('test')
self.fail('raised by Foo.test')
def tearDown(self):
events.append('tearDown')
expected = ['startTest', 'setUp', 'test', 'addFailure', 'tearDown',
'stopTest']
Foo('test').run(result)
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if tearDown() raises
# an exception.
def test_run_call_order__error_in_tearDown(self):
events = []
result = LoggingResult(events)
class Foo(unittest.TestCase):
def setUp(self):
events.append('setUp')
def test(self):
events.append('test')
def tearDown(self):
events.append('tearDown')
raise RuntimeError('raised by Foo.tearDown')
Foo('test').run(result)
expected = ['startTest', 'setUp', 'test', 'tearDown', 'addError',
'stopTest']
self.assertEqual(events, expected)
# "This class attribute gives the exception raised by the test() method.
# If a test framework needs to use a specialized exception, possibly to
# carry additional information, it must subclass this exception in
# order to ``play fair'' with the framework. The initial value of this
# attribute is AssertionError"
def test_failureException__default(self):
class Foo(unittest.TestCase):
def test(self):
pass
self.failUnless(Foo('test').failureException is AssertionError)
# "This class attribute gives the exception raised by the test() method.
# If a test framework needs to use a specialized exception, possibly to
# carry additional information, it must subclass this exception in
# order to ``play fair'' with the framework."
#
# Make sure TestCase.run() respects the designated failureException
def test_failureException__subclassing__explicit_raise(self):
events = []
result = LoggingResult(events)
class Foo(unittest.TestCase):
def test(self):
raise RuntimeError()
failureException = RuntimeError
self.failUnless(Foo('test').failureException is RuntimeError)
Foo('test').run(result)
expected = ['startTest', 'addFailure', 'stopTest']
self.assertEqual(events, expected)
# "This class attribute gives the exception raised by the test() method.
# If a test framework needs to use a specialized exception, possibly to
# carry additional information, it must subclass this exception in
# order to ``play fair'' with the framework."
#
# Make sure TestCase.run() respects the designated failureException
def test_failureException__subclassing__implicit_raise(self):
events = []
result = LoggingResult(events)
class Foo(unittest.TestCase):
def test(self):
self.fail("foo")
failureException = RuntimeError
self.failUnless(Foo('test').failureException is RuntimeError)
Foo('test').run(result)
expected = ['startTest', 'addFailure', 'stopTest']
self.assertEqual(events, expected)
# "The default implementation does nothing."
def test_setUp(self):
class Foo(unittest.TestCase):
def runTest(self):
pass
# ... and nothing should happen
Foo().setUp()
# "The default implementation does nothing."
def test_tearDown(self):
class Foo(unittest.TestCase):
def runTest(self):
pass
# ... and nothing should happen
Foo().tearDown()
# "Return a string identifying the specific test case."
#
# Because of the vague nature of the docs, I'm not going to lock this
# test down too much. Really all that can be asserted is that the id()
# will be a string (either 8-byte or unicode -- again, because the docs
# just say "string")
def test_id(self):
class Foo(unittest.TestCase):
def runTest(self):
pass
self.failUnless(isinstance(Foo().id(), basestring))
# "Returns a one-line description of the test, or None if no description
# has been provided. The default implementation of this method returns
# the first line of the test method's docstring, if available, or None."
def test_shortDescription__no_docstring(self):
class Foo(unittest.TestCase):
def runTest(self):
pass
self.assertEqual(Foo().shortDescription(), None)
# "Returns a one-line description of the test, or None if no description
# has been provided. The default implementation of this method returns
# the first line of the test method's docstring, if available, or None."
def test_shortDescription__singleline_docstring(self):
class Foo(unittest.TestCase):
def runTest(self):
"this tests foo"
pass
self.assertEqual(Foo().shortDescription(), "this tests foo")
# "Returns a one-line description of the test, or None if no description
# has been provided. The default implementation of this method returns
# the first line of the test method's docstring, if available, or None."
def test_shortDescription__multiline_docstring(self):
class Foo(unittest.TestCase):
def runTest(self):
"""this tests foo
blah, bar and baz are also tested"""
pass
self.assertEqual(Foo().shortDescription(), "this tests foo")
# "If result is omitted or None, a temporary result object is created
# and used, but is not made available to the caller"
def test_run__uses_defaultTestResult(self):
events = []
class Foo(unittest.TestCase):
def test(self):
events.append('test')
def defaultTestResult(self):
return LoggingResult(events)
# Make run() find a result object on its own
Foo('test').run()
expected = ['startTest', 'test', 'stopTest']
self.assertEqual(events, expected)
class Test_Assertions(TestCase):
def test_AlmostEqual(self):
self.failUnlessAlmostEqual(1.00000001, 1.0)
self.failIfAlmostEqual(1.0000001, 1.0)
self.assertRaises(AssertionError,
self.failUnlessAlmostEqual, 1.0000001, 1.0)
self.assertRaises(AssertionError,
self.failIfAlmostEqual, 1.00000001, 1.0)
self.failUnlessAlmostEqual(1.1, 1.0, places=0)
self.assertRaises(AssertionError,
self.failUnlessAlmostEqual, 1.1, 1.0, places=1)
self.failUnlessAlmostEqual(0, .1+.1j, places=0)
self.failIfAlmostEqual(0, .1+.1j, places=1)
self.assertRaises(AssertionError,
self.failUnlessAlmostEqual, 0, .1+.1j, places=1)
self.assertRaises(AssertionError,
self.failIfAlmostEqual, 0, .1+.1j, places=0)
######################################################################
## Main
######################################################################
def test_main():
test_support.run_unittest(Test_TestCase, Test_TestLoader,
Test_TestSuite, Test_TestResult, Test_FunctionTestCase,
Test_Assertions)
if __name__ == "__main__":
test_main()
|
apache-2.0
|
kcpawan/django
|
django/contrib/admin/views/main.py
|
327
|
16684
|
import sys
from collections import OrderedDict
from django.contrib.admin import FieldListFilter
from django.contrib.admin.exceptions import (
DisallowedModelAdminLookup, DisallowedModelAdminToField,
)
from django.contrib.admin.options import (
IS_POPUP_VAR, TO_FIELD_VAR, IncorrectLookupParameters,
)
from django.contrib.admin.utils import (
get_fields_from_path, lookup_needs_distinct, prepare_lookup_value, quote,
)
from django.core.exceptions import (
FieldDoesNotExist, ImproperlyConfigured, SuspiciousOperation,
)
from django.core.paginator import InvalidPage
from django.core.urlresolvers import reverse
from django.db import models
from django.utils import six
from django.utils.encoding import force_text
from django.utils.http import urlencode
from django.utils.translation import ugettext
# Changelist settings
ALL_VAR = 'all'
ORDER_VAR = 'o'
ORDER_TYPE_VAR = 'ot'
PAGE_VAR = 'p'
SEARCH_VAR = 'q'
ERROR_FLAG = 'e'
IGNORED_PARAMS = (
ALL_VAR, ORDER_VAR, ORDER_TYPE_VAR, SEARCH_VAR, IS_POPUP_VAR, TO_FIELD_VAR)
class ChangeList(object):
def __init__(self, request, model, list_display, list_display_links,
list_filter, date_hierarchy, search_fields, list_select_related,
list_per_page, list_max_show_all, list_editable, model_admin):
self.model = model
self.opts = model._meta
self.lookup_opts = self.opts
self.root_queryset = model_admin.get_queryset(request)
self.list_display = list_display
self.list_display_links = list_display_links
self.list_filter = list_filter
self.date_hierarchy = date_hierarchy
self.search_fields = search_fields
self.list_select_related = list_select_related
self.list_per_page = list_per_page
self.list_max_show_all = list_max_show_all
self.model_admin = model_admin
self.preserved_filters = model_admin.get_preserved_filters(request)
# Get search parameters from the query string.
try:
self.page_num = int(request.GET.get(PAGE_VAR, 0))
except ValueError:
self.page_num = 0
self.show_all = ALL_VAR in request.GET
self.is_popup = IS_POPUP_VAR in request.GET
to_field = request.GET.get(TO_FIELD_VAR)
if to_field and not model_admin.to_field_allowed(request, to_field):
raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field)
self.to_field = to_field
self.params = dict(request.GET.items())
if PAGE_VAR in self.params:
del self.params[PAGE_VAR]
if ERROR_FLAG in self.params:
del self.params[ERROR_FLAG]
if self.is_popup:
self.list_editable = ()
else:
self.list_editable = list_editable
self.query = request.GET.get(SEARCH_VAR, '')
self.queryset = self.get_queryset(request)
self.get_results(request)
if self.is_popup:
title = ugettext('Select %s')
else:
title = ugettext('Select %s to change')
self.title = title % force_text(self.opts.verbose_name)
self.pk_attname = self.lookup_opts.pk.attname
def get_filters_params(self, params=None):
"""
Returns all params except IGNORED_PARAMS
"""
if not params:
params = self.params
lookup_params = params.copy() # a dictionary of the query string
# Remove all the parameters that are globally and systematically
# ignored.
for ignored in IGNORED_PARAMS:
if ignored in lookup_params:
del lookup_params[ignored]
return lookup_params
def get_filters(self, request):
lookup_params = self.get_filters_params()
use_distinct = False
for key, value in lookup_params.items():
if not self.model_admin.lookup_allowed(key, value):
raise DisallowedModelAdminLookup("Filtering by %s not allowed" % key)
filter_specs = []
if self.list_filter:
for list_filter in self.list_filter:
if callable(list_filter):
# This is simply a custom list filter class.
spec = list_filter(request, lookup_params,
self.model, self.model_admin)
else:
field_path = None
if isinstance(list_filter, (tuple, list)):
# This is a custom FieldListFilter class for a given field.
field, field_list_filter_class = list_filter
else:
# This is simply a field name, so use the default
# FieldListFilter class that has been registered for
# the type of the given field.
field, field_list_filter_class = list_filter, FieldListFilter.create
if not isinstance(field, models.Field):
field_path = field
field = get_fields_from_path(self.model, field_path)[-1]
spec = field_list_filter_class(field, request, lookup_params,
self.model, self.model_admin, field_path=field_path)
# Check if we need to use distinct()
use_distinct = (use_distinct or
lookup_needs_distinct(self.lookup_opts,
field_path))
if spec and spec.has_output():
filter_specs.append(spec)
# At this point, all the parameters used by the various ListFilters
# have been removed from lookup_params, which now only contains other
# parameters passed via the query string. We now loop through the
# remaining parameters both to ensure that all the parameters are valid
# fields and to determine if at least one of them needs distinct(). If
# the lookup parameters aren't real fields, then bail out.
try:
for key, value in lookup_params.items():
lookup_params[key] = prepare_lookup_value(key, value)
use_distinct = (use_distinct or
lookup_needs_distinct(self.lookup_opts, key))
return filter_specs, bool(filter_specs), lookup_params, use_distinct
except FieldDoesNotExist as e:
six.reraise(IncorrectLookupParameters, IncorrectLookupParameters(e), sys.exc_info()[2])
def get_query_string(self, new_params=None, remove=None):
if new_params is None:
new_params = {}
if remove is None:
remove = []
p = self.params.copy()
for r in remove:
for k in list(p):
if k.startswith(r):
del p[k]
for k, v in new_params.items():
if v is None:
if k in p:
del p[k]
else:
p[k] = v
return '?%s' % urlencode(sorted(p.items()))
def get_results(self, request):
paginator = self.model_admin.get_paginator(request, self.queryset, self.list_per_page)
# Get the number of objects, with admin filters applied.
result_count = paginator.count
# Get the total number of objects, with no admin filters applied.
# Perform a slight optimization:
# full_result_count is equal to paginator.count if no filters
# were applied
if self.model_admin.show_full_result_count:
if self.get_filters_params() or self.params.get(SEARCH_VAR):
full_result_count = self.root_queryset.count()
else:
full_result_count = result_count
else:
full_result_count = None
can_show_all = result_count <= self.list_max_show_all
multi_page = result_count > self.list_per_page
# Get the list of objects to display on this page.
if (self.show_all and can_show_all) or not multi_page:
result_list = self.queryset._clone()
else:
try:
result_list = paginator.page(self.page_num + 1).object_list
except InvalidPage:
raise IncorrectLookupParameters
self.result_count = result_count
self.show_full_result_count = self.model_admin.show_full_result_count
# Admin actions are shown if there is at least one entry
# or if entries are not counted because show_full_result_count is disabled
self.show_admin_actions = not self.show_full_result_count or bool(full_result_count)
self.full_result_count = full_result_count
self.result_list = result_list
self.can_show_all = can_show_all
self.multi_page = multi_page
self.paginator = paginator
def _get_default_ordering(self):
ordering = []
if self.model_admin.ordering:
ordering = self.model_admin.ordering
elif self.lookup_opts.ordering:
ordering = self.lookup_opts.ordering
return ordering
def get_ordering_field(self, field_name):
"""
Returns the proper model field name corresponding to the given
field_name to use for ordering. field_name may either be the name of a
proper model field or the name of a method (on the admin or model) or a
callable with the 'admin_order_field' attribute. Returns None if no
proper model field name can be matched.
"""
try:
field = self.lookup_opts.get_field(field_name)
return field.name
except FieldDoesNotExist:
# See whether field_name is a name of a non-field
# that allows sorting.
if callable(field_name):
attr = field_name
elif hasattr(self.model_admin, field_name):
attr = getattr(self.model_admin, field_name)
else:
attr = getattr(self.model, field_name)
return getattr(attr, 'admin_order_field', None)
def get_ordering(self, request, queryset):
"""
Returns the list of ordering fields for the change list.
First we check the get_ordering() method in model admin, then we check
the object's default ordering. Then, any manually-specified ordering
from the query string overrides anything. Finally, a deterministic
order is guaranteed by ensuring the primary key is used as the last
ordering field.
"""
params = self.params
ordering = list(self.model_admin.get_ordering(request)
or self._get_default_ordering())
if ORDER_VAR in params:
# Clear ordering and used params
ordering = []
order_params = params[ORDER_VAR].split('.')
for p in order_params:
try:
none, pfx, idx = p.rpartition('-')
field_name = self.list_display[int(idx)]
order_field = self.get_ordering_field(field_name)
if not order_field:
continue # No 'admin_order_field', skip it
# reverse order if order_field has already "-" as prefix
if order_field.startswith('-') and pfx == "-":
ordering.append(order_field[1:])
else:
ordering.append(pfx + order_field)
except (IndexError, ValueError):
continue # Invalid ordering specified, skip it.
# Add the given query's ordering fields, if any.
ordering.extend(queryset.query.order_by)
# Ensure that the primary key is systematically present in the list of
# ordering fields so we can guarantee a deterministic order across all
# database backends.
pk_name = self.lookup_opts.pk.name
if not (set(ordering) & {'pk', '-pk', pk_name, '-' + pk_name}):
# The two sets do not intersect, meaning the pk isn't present. So
# we add it.
ordering.append('-pk')
return ordering
def get_ordering_field_columns(self):
"""
Returns an OrderedDict of ordering field column numbers and asc/desc
"""
# We must cope with more than one column having the same underlying sort
# field, so we base things on column numbers.
ordering = self._get_default_ordering()
ordering_fields = OrderedDict()
if ORDER_VAR not in self.params:
# for ordering specified on ModelAdmin or model Meta, we don't know
# the right column numbers absolutely, because there might be more
# than one column associated with that ordering, so we guess.
for field in ordering:
if field.startswith('-'):
field = field[1:]
order_type = 'desc'
else:
order_type = 'asc'
for index, attr in enumerate(self.list_display):
if self.get_ordering_field(attr) == field:
ordering_fields[index] = order_type
break
else:
for p in self.params[ORDER_VAR].split('.'):
none, pfx, idx = p.rpartition('-')
try:
idx = int(idx)
except ValueError:
continue # skip it
ordering_fields[idx] = 'desc' if pfx == '-' else 'asc'
return ordering_fields
def get_queryset(self, request):
# First, we collect all the declared list filters.
(self.filter_specs, self.has_filters, remaining_lookup_params,
filters_use_distinct) = self.get_filters(request)
# Then, we let every list filter modify the queryset to its liking.
qs = self.root_queryset
for filter_spec in self.filter_specs:
new_qs = filter_spec.queryset(request, qs)
if new_qs is not None:
qs = new_qs
try:
# Finally, we apply the remaining lookup parameters from the query
# string (i.e. those that haven't already been processed by the
# filters).
qs = qs.filter(**remaining_lookup_params)
except (SuspiciousOperation, ImproperlyConfigured):
# Allow certain types of errors to be re-raised as-is so that the
# caller can treat them in a special way.
raise
except Exception as e:
# Every other error is caught with a naked except, because we don't
# have any other way of validating lookup parameters. They might be
# invalid if the keyword arguments are incorrect, or if the values
# are not in the correct type, so we might get FieldError,
# ValueError, ValidationError, or ?.
raise IncorrectLookupParameters(e)
if not qs.query.select_related:
qs = self.apply_select_related(qs)
# Set ordering.
ordering = self.get_ordering(request, qs)
qs = qs.order_by(*ordering)
# Apply search results
qs, search_use_distinct = self.model_admin.get_search_results(
request, qs, self.query)
# Remove duplicates from results, if necessary
if filters_use_distinct | search_use_distinct:
return qs.distinct()
else:
return qs
def apply_select_related(self, qs):
if self.list_select_related is True:
return qs.select_related()
if self.list_select_related is False:
if self.has_related_field_in_list_display():
return qs.select_related()
if self.list_select_related:
return qs.select_related(*self.list_select_related)
return qs
def has_related_field_in_list_display(self):
for field_name in self.list_display:
try:
field = self.lookup_opts.get_field(field_name)
except FieldDoesNotExist:
pass
else:
if isinstance(field.remote_field, models.ManyToOneRel):
return True
return False
def url_for_result(self, result):
pk = getattr(result, self.pk_attname)
return reverse('admin:%s_%s_change' % (self.opts.app_label,
self.opts.model_name),
args=(quote(pk),),
current_app=self.model_admin.admin_site.name)
|
bsd-3-clause
|
yast/yast-python-bindings
|
examples/CheckBox3.py
|
1
|
1854
|
# encoding: utf-8
from yast import import_module
import_module('UI')
from yast import *
class CheckBox3Client:
def main(self):
# Build dialog with one check box and buttons to set its state to
# on, off or "don't care" (tri-state).
UI.OpenDialog(
VBox(
CheckBox(Id("cb"), "Format hard disk"),
HBox(
HWeight(1, PushButton(Id("setOn"), "Set on")),
HWeight(1, PushButton(Id("setOff"), "Set off")),
HWeight(1, PushButton(Id("dontCare"), "Don't care"))
),
PushButton(Id("ok"), "&OK")
)
)
# Input loop. Will be left only after 'OK' is clicked.
button = None
while True:
button = UI.UserInput()
if button == "setOn":
UI.ChangeWidget(Id("cb"), "Value", True)
elif button == "setOff":
UI.ChangeWidget(Id("cb"), "Value", False)
elif button == "dontCare":
UI.ChangeWidget(Id("cb"), "Value", None)
if button == "ok":
break
# Get the check box's value.
#
# Notice: The return value of UI::UserInput() does NOT return this value!
# Rather, it returns the ID of the widget (normally the PushButton)
# that caused UI::UserInput() to return.
cb_val = UI.QueryWidget(Id("cb"), "Value")
# Close the dialog.
# Remember to read values from the dialog's widgets BEFORE closing it!
UI.CloseDialog()
# Convert the check box value to string.
valStr = "Don't care"
if cb_val == True:
valStr = "Yes"
if cb_val == False:
valStr = "No"
# Pop up a new dialog to echo the input.
UI.OpenDialog(
VBox(Label("Your selection:"), Label(valStr), PushButton("&OK"))
)
UI.UserInput()
UI.CloseDialog()
CheckBox3Client().main()
|
gpl-2.0
|
rcharp/toyota-flask
|
flask/debughelpers.py
|
777
|
3508
|
# -*- coding: utf-8 -*-
"""
flask.debughelpers
~~~~~~~~~~~~~~~~~~
Various helpers to make the development experience better.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from ._compat import implements_to_string
class UnexpectedUnicodeError(AssertionError, UnicodeError):
"""Raised in places where we want some better error reporting for
unexpected unicode or binary data.
"""
@implements_to_string
class DebugFilesKeyError(KeyError, AssertionError):
"""Raised from request.files during debugging. The idea is that it can
provide a better error message than just a generic KeyError/BadRequest.
"""
def __init__(self, request, key):
form_matches = request.form.getlist(key)
buf = ['You tried to access the file "%s" in the request.files '
'dictionary but it does not exist. The mimetype for the request '
'is "%s" instead of "multipart/form-data" which means that no '
'file contents were transmitted. To fix this error you should '
'provide enctype="multipart/form-data" in your form.' %
(key, request.mimetype)]
if form_matches:
buf.append('\n\nThe browser instead transmitted some file names. '
'This was submitted: %s' % ', '.join('"%s"' % x
for x in form_matches))
self.msg = ''.join(buf)
def __str__(self):
return self.msg
class FormDataRoutingRedirect(AssertionError):
"""This exception is raised by Flask in debug mode if it detects a
redirect caused by the routing system when the request method is not
GET, HEAD or OPTIONS. Reasoning: form data will be dropped.
"""
def __init__(self, request):
exc = request.routing_exception
buf = ['A request was sent to this URL (%s) but a redirect was '
'issued automatically by the routing system to "%s".'
% (request.url, exc.new_url)]
# In case just a slash was appended we can be extra helpful
if request.base_url + '/' == exc.new_url.split('?')[0]:
buf.append(' The URL was defined with a trailing slash so '
'Flask will automatically redirect to the URL '
'with the trailing slash if it was accessed '
'without one.')
buf.append(' Make sure to directly send your %s-request to this URL '
'since we can\'t make browsers or HTTP clients redirect '
'with form data reliably or without user interaction.' %
request.method)
buf.append('\n\nNote: this exception is only raised in debug mode')
AssertionError.__init__(self, ''.join(buf).encode('utf-8'))
def attach_enctype_error_multidict(request):
"""Since Flask 0.8 we're monkeypatching the files object in case a
request is detected that does not use multipart form data but the files
object is accessed.
"""
oldcls = request.files.__class__
class newcls(oldcls):
def __getitem__(self, key):
try:
return oldcls.__getitem__(self, key)
except KeyError as e:
if key not in request.form:
raise
raise DebugFilesKeyError(request, key)
newcls.__name__ = oldcls.__name__
newcls.__module__ = oldcls.__module__
request.files.__class__ = newcls
|
apache-2.0
|
lichuan261/wuand
|
XX-Net/python27/1.0/lib/dis.py
|
270
|
6499
|
"""Disassembler of Python byte code into mnemonics."""
import sys
import types
from opcode import *
from opcode import __all__ as _opcodes_all
__all__ = ["dis", "disassemble", "distb", "disco",
"findlinestarts", "findlabels"] + _opcodes_all
del _opcodes_all
_have_code = (types.MethodType, types.FunctionType, types.CodeType,
types.ClassType, type)
def dis(x=None):
"""Disassemble classes, methods, functions, or code.
With no argument, disassemble the last traceback.
"""
if x is None:
distb()
return
if isinstance(x, types.InstanceType):
x = x.__class__
if hasattr(x, 'im_func'):
x = x.im_func
if hasattr(x, 'func_code'):
x = x.func_code
if hasattr(x, '__dict__'):
items = x.__dict__.items()
items.sort()
for name, x1 in items:
if isinstance(x1, _have_code):
print "Disassembly of %s:" % name
try:
dis(x1)
except TypeError, msg:
print "Sorry:", msg
print
elif hasattr(x, 'co_code'):
disassemble(x)
elif isinstance(x, str):
disassemble_string(x)
else:
raise TypeError, \
"don't know how to disassemble %s objects" % \
type(x).__name__
def distb(tb=None):
"""Disassemble a traceback (default: last traceback)."""
if tb is None:
try:
tb = sys.last_traceback
except AttributeError:
raise RuntimeError, "no last traceback to disassemble"
while tb.tb_next: tb = tb.tb_next
disassemble(tb.tb_frame.f_code, tb.tb_lasti)
def disassemble(co, lasti=-1):
"""Disassemble a code object."""
code = co.co_code
labels = findlabels(code)
linestarts = dict(findlinestarts(co))
n = len(code)
i = 0
extended_arg = 0
free = None
while i < n:
c = code[i]
op = ord(c)
if i in linestarts:
if i > 0:
print
print "%3d" % linestarts[i],
else:
print ' ',
if i == lasti: print '-->',
else: print ' ',
if i in labels: print '>>',
else: print ' ',
print repr(i).rjust(4),
print opname[op].ljust(20),
i = i+1
if op >= HAVE_ARGUMENT:
oparg = ord(code[i]) + ord(code[i+1])*256 + extended_arg
extended_arg = 0
i = i+2
if op == EXTENDED_ARG:
extended_arg = oparg*65536L
print repr(oparg).rjust(5),
if op in hasconst:
print '(' + repr(co.co_consts[oparg]) + ')',
elif op in hasname:
print '(' + co.co_names[oparg] + ')',
elif op in hasjrel:
print '(to ' + repr(i + oparg) + ')',
elif op in haslocal:
print '(' + co.co_varnames[oparg] + ')',
elif op in hascompare:
print '(' + cmp_op[oparg] + ')',
elif op in hasfree:
if free is None:
free = co.co_cellvars + co.co_freevars
print '(' + free[oparg] + ')',
print
def disassemble_string(code, lasti=-1, varnames=None, names=None,
constants=None):
labels = findlabels(code)
n = len(code)
i = 0
while i < n:
c = code[i]
op = ord(c)
if i == lasti: print '-->',
else: print ' ',
if i in labels: print '>>',
else: print ' ',
print repr(i).rjust(4),
print opname[op].ljust(15),
i = i+1
if op >= HAVE_ARGUMENT:
oparg = ord(code[i]) + ord(code[i+1])*256
i = i+2
print repr(oparg).rjust(5),
if op in hasconst:
if constants:
print '(' + repr(constants[oparg]) + ')',
else:
print '(%d)'%oparg,
elif op in hasname:
if names is not None:
print '(' + names[oparg] + ')',
else:
print '(%d)'%oparg,
elif op in hasjrel:
print '(to ' + repr(i + oparg) + ')',
elif op in haslocal:
if varnames:
print '(' + varnames[oparg] + ')',
else:
print '(%d)' % oparg,
elif op in hascompare:
print '(' + cmp_op[oparg] + ')',
print
disco = disassemble # XXX For backwards compatibility
def findlabels(code):
"""Detect all offsets in a byte code which are jump targets.
Return the list of offsets.
"""
labels = []
n = len(code)
i = 0
while i < n:
c = code[i]
op = ord(c)
i = i+1
if op >= HAVE_ARGUMENT:
oparg = ord(code[i]) + ord(code[i+1])*256
i = i+2
label = -1
if op in hasjrel:
label = i+oparg
elif op in hasjabs:
label = oparg
if label >= 0:
if label not in labels:
labels.append(label)
return labels
def findlinestarts(code):
"""Find the offsets in a byte code which are start of lines in the source.
Generate pairs (offset, lineno) as described in Python/compile.c.
"""
byte_increments = [ord(c) for c in code.co_lnotab[0::2]]
line_increments = [ord(c) for c in code.co_lnotab[1::2]]
lastlineno = None
lineno = code.co_firstlineno
addr = 0
for byte_incr, line_incr in zip(byte_increments, line_increments):
if byte_incr:
if lineno != lastlineno:
yield (addr, lineno)
lastlineno = lineno
addr += byte_incr
lineno += line_incr
if lineno != lastlineno:
yield (addr, lineno)
def _test():
"""Simple test program to disassemble a file."""
if sys.argv[1:]:
if sys.argv[2:]:
sys.stderr.write("usage: python dis.py [-|file]\n")
sys.exit(2)
fn = sys.argv[1]
if not fn or fn == "-":
fn = None
else:
fn = None
if fn is None:
f = sys.stdin
else:
f = open(fn)
source = f.read()
if fn is not None:
f.close()
else:
fn = "<stdin>"
code = compile(source, fn, "exec")
dis(code)
if __name__ == "__main__":
_test()
|
gpl-2.0
|
michaelhkw/incubator-impala
|
tests/performance/query_executor.py
|
1
|
7409
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Module used for executing queries and gathering results.
# The QueryExecutor is meant to be generic and doesn't
# have the knowledge of how to actually execute a query. It takes a query and its config
# and executes is against a executor function.
# For example (in pseudo-code):
#
# def exec_func(query, config):
# ...
#
# config = ImpalaBeeswaxQueryExecConfig()
# executor = QueryExecutor('beeswax', query, config, exec_func)
# executor.run()
# result = executor.result
import logging
import os
# Setup logging for this module.
logging.basicConfig(level=logging.INFO, format='[%(name)s] %(threadName)s: %(message)s')
LOG = logging.getLogger('query_executor')
LOG.setLevel(level=logging.INFO)
# globals.
hive_result_regex = 'Time taken: (\d*).(\d*) seconds'
## TODO: Split executors into their own modules.
class QueryExecConfig(object):
"""Base Class for Execution Configs
Attributes:
plugin_runner (PluginRunner?)
"""
def __init__(self, plugin_runner=None):
self.plugin_runner = plugin_runner
class ImpalaQueryExecConfig(QueryExecConfig):
"""Base class for Impala query execution config
Attributes:
impalad (str): address of impalad <host>:<port>
"""
def __init__(self, plugin_runner=None, impalad='localhost:21000'):
super(ImpalaQueryExecConfig, self).__init__(plugin_runner=plugin_runner)
self._impalad = impalad
@property
def impalad(self):
return self._impalad
@impalad.setter
def impalad(self, value):
self._impalad = value
class JdbcQueryExecConfig(ImpalaQueryExecConfig):
"""Impala query execution config for jdbc
Attributes:
tranport (?): ?
"""
JDBC_CLIENT_PATH = os.path.join(os.environ['IMPALA_HOME'], 'bin/run-jdbc-client.sh')
def __init__(self, plugin_runner=None, impalad='localhost:21050', transport=None):
super(JdbcQueryExecConfig, self).__init__(plugin_runner=plugin_runner,
impalad=impalad)
self.transport = transport
@property
def jdbc_client_cmd(self):
"""The args to run the jdbc client.
Constructed on the fly, since the impalad it points to can change.
"""
assert self.transport is not None
return JdbcQueryExecConfig.JDBC_CLIENT_PATH + ' -i "%s" -t %s' % (self._impalad,
self.transport)
class ImpalaHS2QueryConfig(ImpalaQueryExecConfig):
def __init__(self, use_kerberos=False, impalad="localhost:21050", plugin_runner=None):
super(ImpalaHS2QueryConfig, self).__init__(plugin_runner=plugin_runner,
impalad=impalad)
# TODO Use a config dict for query execution options similar to HS2
self.use_kerberos = use_kerberos
class HiveHS2QueryConfig(QueryExecConfig):
def __init__(self,
plugin_runner=None,
exec_options = None,
use_kerberos=False,
user=None,
hiveserver='localhost'):
super(HiveHS2QueryConfig, self).__init__()
self.exec_options = dict()
self._build_options(exec_options)
self.use_kerberos = use_kerberos
self.user = user
self.hiveserver = hiveserver
def _build_options(self, exec_options):
"""Read the exec_options into self.exec_options
Args:
exec_options (str): String formatted as "command1;command2"
"""
if exec_options:
# exec_options are seperated by ; on the command line
options = exec_options.split(';')
for option in options:
key, value = option.split(':')
# The keys in HiveService QueryOptions are lower case.
self.exec_options[key.lower()] = value
class BeeswaxQueryExecConfig(ImpalaQueryExecConfig):
"""Impala query execution config for beeswax
Args:
use_kerberos (boolean)
exec_options (str): String formatted as "opt1:val1;opt2:val2"
impalad (str): address of impalad <host>:<port>
plugin_runner (?): ?
Attributes:
use_kerberos (boolean)
exec_options (dict str -> str): execution options
"""
def __init__(self, use_kerberos=False, exec_options=None, impalad='localhost:21000',
plugin_runner=None):
super(BeeswaxQueryExecConfig, self).__init__(plugin_runner=plugin_runner,
impalad=impalad)
self.use_kerberos = use_kerberos
self.exec_options = dict()
self._build_options(exec_options)
def _build_options(self, exec_options):
"""Read the exec_options into self.exec_options
Args:
exec_options (str): String formatted as "opt1:val1;opt2:val2"
"""
if exec_options:
# exec_options are seperated by ; on the command line
options = exec_options.split(';')
for option in options:
key, value = option.split(':')
# The keys in ImpalaService QueryOptions are upper case.
self.exec_options[key.upper()] = value
class QueryExecutor(object):
"""Executes a query.
Args:
name (str): eg. "hive"
query (str): string containing SQL query to be executed
func (function): Function that accepts a QueryExecOption parameter and returns a
ImpalaQueryResult. Eg. execute_using_impala_beeswax
config (QueryExecOption)
exit_on_error (boolean): Exit right after an error encountered.
Attributes:
exec_func (function): Function that accepts a QueryExecOption parameter and returns a
ImpalaQueryResult.
exec_config (QueryExecOption)
query (str): string containing SQL query to be executed
exit_on_error (boolean): Exit right after an error encountered.
executor_name (str): eg. "hive"
result (ImpalaQueryResult): Contains the result after execute method is called.
"""
def __init__(self, name, query, func, config, exit_on_error):
self.exec_func = func
self.exec_config = config
self.query = query
self.exit_on_error = exit_on_error
self.executor_name = name
self._result = None
def prepare(self, impalad):
"""Prepare the query to be run.
For now, this sets the impalad that the query connects to. If the executor is hive,
it's a no op.
"""
if 'hive' not in self.executor_name:
self.exec_config.impalad = impalad
def execute(self):
"""Execute the query using the given execution function"""
LOG.debug('Executing %s' % self.query)
self._result = self.exec_func(self.query, self.exec_config)
if not self._result.success:
if self.exit_on_error:
raise RuntimeError(self._result.query_error)
else:
LOG.info("Continuing execution")
@property
def result(self):
"""Getter for the result of the query execution.
A result is a ImpalaQueryResult object that contains the details of a single run of
the query.
"""
return self._result
|
apache-2.0
|
alangwansui/mtl_ordercenter
|
openerp/addons/report_intrastat/report_intrastat.py
|
52
|
5691
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.sql import drop_view_if_exists
from openerp.addons.decimal_precision import decimal_precision as dp
class res_country(osv.osv):
_name = 'res.country'
_inherit = 'res.country'
_columns = {
'intrastat': fields.boolean('Intrastat member'),
}
_defaults = {
'intrastat': lambda *a: False,
}
res_country()
class report_intrastat_code(osv.osv):
_name = "report.intrastat.code"
_description = "Intrastat code"
_columns = {
'name': fields.char('Intrastat Code', size=16),
'description': fields.char('Description', size=64),
}
report_intrastat_code()
class product_template(osv.osv):
_name = "product.template"
_inherit = "product.template"
_columns = {
'intrastat_id': fields.many2one('report.intrastat.code', 'Intrastat code'),
}
product_template()
class report_intrastat(osv.osv):
_name = "report.intrastat"
_description = "Intrastat report"
_auto = False
_columns = {
'name': fields.char('Year',size=64,required=False, readonly=True),
'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'),
('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month',readonly=True),
'supply_units':fields.float('Supply Units', readonly=True),
'ref':fields.char('Source document',size=64, readonly=True),
'code': fields.char('Country code', size=2, readonly=True),
'intrastat_id': fields.many2one('report.intrastat.code', 'Intrastat code', readonly=True),
'weight': fields.float('Weight', readonly=True),
'value': fields.float('Value', readonly=True, digits_compute=dp.get_precision('Account')),
'type': fields.selection([('import', 'Import'), ('export', 'Export')], 'Type'),
'currency_id': fields.many2one('res.currency', "Currency", readonly=True),
}
def init(self, cr):
drop_view_if_exists(cr, 'report_intrastat')
cr.execute("""
create or replace view report_intrastat as (
select
to_char(inv.create_date, 'YYYY') as name,
to_char(inv.create_date, 'MM') as month,
min(inv_line.id) as id,
intrastat.id as intrastat_id,
upper(inv_country.code) as code,
sum(case when inv_line.price_unit is not null
then inv_line.price_unit * inv_line.quantity
else 0
end) as value,
sum(
case when uom.category_id != puom.category_id then (pt.weight_net * inv_line.quantity)
else (pt.weight_net * inv_line.quantity * uom.factor) end
) as weight,
sum(
case when uom.category_id != puom.category_id then inv_line.quantity
else (inv_line.quantity * uom.factor) end
) as supply_units,
inv.currency_id as currency_id,
inv.number as ref,
case when inv.type in ('out_invoice','in_refund')
then 'export'
else 'import'
end as type
from
account_invoice inv
left join account_invoice_line inv_line on inv_line.invoice_id=inv.id
left join (product_template pt
left join product_product pp on (pp.product_tmpl_id = pt.id))
on (inv_line.product_id = pp.id)
left join product_uom uom on uom.id=inv_line.uos_id
left join product_uom puom on puom.id = pt.uom_id
left join report_intrastat_code intrastat on pt.intrastat_id = intrastat.id
left join (res_partner inv_address
left join res_country inv_country on (inv_country.id = inv_address.country_id))
on (inv_address.id = inv.partner_id)
where
inv.state in ('open','paid')
and inv_line.product_id is not null
and inv_country.intrastat=true
group by to_char(inv.create_date, 'YYYY'), to_char(inv.create_date, 'MM'),intrastat.id,inv.type,pt.intrastat_id, inv_country.code,inv.number, inv.currency_id
)""")
report_intrastat()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
vadimtk/chrome4sdp
|
styleguide/c++/chromium-cpp/main.py
|
90
|
1649
|
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from google.appengine.api import memcache
from google.appengine.api import urlfetch
import webapp2
import base64
"""A simple appengine app that hosts .html files in src/styleguide/c++ from
chromium's git repo."""
class MainHandler(webapp2.RequestHandler):
def get(self):
handler = GitilesMirrorHandler()
handler.initialize(self.request, self.response)
return handler.get("c++11.html")
BASE = 'https://chromium.googlesource.com/chromium/src.git/' \
'+/master/styleguide/c++/%s?format=TEXT'
class GitilesMirrorHandler(webapp2.RequestHandler):
def get(self, resource):
if '..' in resource: # No path traversal.
self.response.write(':-(')
return
url = BASE % resource
contents = memcache.get(url)
if not contents or self.request.get('bust'):
result = urlfetch.fetch(url)
if result.status_code != 200:
self.response.set_status(result.status_code)
self.response.write('http error %d' % result.status_code)
return
contents = base64.b64decode(result.content)
memcache.set(url, contents, time=5*60) # seconds
if resource.endswith('.css'):
self.response.headers['Content-Type'] = 'text/css'
self.response.write(contents)
app = webapp2.WSGIApplication([
('/', MainHandler),
('/(\S+\.(?:css|html))', GitilesMirrorHandler),
], debug=True)
|
bsd-3-clause
|
uclouvain/osis
|
education_group/ddd/service/read/get_mini_training_service.py
|
1
|
1686
|
# ############################################################################
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2020 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
# ############################################################################
from education_group.ddd import command
from education_group.ddd.domain import mini_training
from education_group.ddd.repository import mini_training as mini_training_repositoty
def get_mini_training(cmd: command.GetMiniTrainingCommand) -> mini_training.MiniTraining:
mini_training_id = mini_training.MiniTrainingIdentity(acronym=cmd.acronym, year=cmd.year)
return mini_training_repositoty.MiniTrainingRepository.get(mini_training_id)
|
agpl-3.0
|
fernandog/Sick-Beard
|
lib/unidecode/x052.py
|
253
|
4654
|
data = (
'Dao ', # 0x00
'Diao ', # 0x01
'Dao ', # 0x02
'Ren ', # 0x03
'Ren ', # 0x04
'Chuang ', # 0x05
'Fen ', # 0x06
'Qie ', # 0x07
'Yi ', # 0x08
'Ji ', # 0x09
'Kan ', # 0x0a
'Qian ', # 0x0b
'Cun ', # 0x0c
'Chu ', # 0x0d
'Wen ', # 0x0e
'Ji ', # 0x0f
'Dan ', # 0x10
'Xing ', # 0x11
'Hua ', # 0x12
'Wan ', # 0x13
'Jue ', # 0x14
'Li ', # 0x15
'Yue ', # 0x16
'Lie ', # 0x17
'Liu ', # 0x18
'Ze ', # 0x19
'Gang ', # 0x1a
'Chuang ', # 0x1b
'Fu ', # 0x1c
'Chu ', # 0x1d
'Qu ', # 0x1e
'Ju ', # 0x1f
'Shan ', # 0x20
'Min ', # 0x21
'Ling ', # 0x22
'Zhong ', # 0x23
'Pan ', # 0x24
'Bie ', # 0x25
'Jie ', # 0x26
'Jie ', # 0x27
'Bao ', # 0x28
'Li ', # 0x29
'Shan ', # 0x2a
'Bie ', # 0x2b
'Chan ', # 0x2c
'Jing ', # 0x2d
'Gua ', # 0x2e
'Gen ', # 0x2f
'Dao ', # 0x30
'Chuang ', # 0x31
'Kui ', # 0x32
'Ku ', # 0x33
'Duo ', # 0x34
'Er ', # 0x35
'Zhi ', # 0x36
'Shua ', # 0x37
'Quan ', # 0x38
'Cha ', # 0x39
'Ci ', # 0x3a
'Ke ', # 0x3b
'Jie ', # 0x3c
'Gui ', # 0x3d
'Ci ', # 0x3e
'Gui ', # 0x3f
'Kai ', # 0x40
'Duo ', # 0x41
'Ji ', # 0x42
'Ti ', # 0x43
'Jing ', # 0x44
'Lou ', # 0x45
'Gen ', # 0x46
'Ze ', # 0x47
'Yuan ', # 0x48
'Cuo ', # 0x49
'Xue ', # 0x4a
'Ke ', # 0x4b
'La ', # 0x4c
'Qian ', # 0x4d
'Cha ', # 0x4e
'Chuang ', # 0x4f
'Gua ', # 0x50
'Jian ', # 0x51
'Cuo ', # 0x52
'Li ', # 0x53
'Ti ', # 0x54
'Fei ', # 0x55
'Pou ', # 0x56
'Chan ', # 0x57
'Qi ', # 0x58
'Chuang ', # 0x59
'Zi ', # 0x5a
'Gang ', # 0x5b
'Wan ', # 0x5c
'Bo ', # 0x5d
'Ji ', # 0x5e
'Duo ', # 0x5f
'Qing ', # 0x60
'Yan ', # 0x61
'Zhuo ', # 0x62
'Jian ', # 0x63
'Ji ', # 0x64
'Bo ', # 0x65
'Yan ', # 0x66
'Ju ', # 0x67
'Huo ', # 0x68
'Sheng ', # 0x69
'Jian ', # 0x6a
'Duo ', # 0x6b
'Duan ', # 0x6c
'Wu ', # 0x6d
'Gua ', # 0x6e
'Fu ', # 0x6f
'Sheng ', # 0x70
'Jian ', # 0x71
'Ge ', # 0x72
'Zha ', # 0x73
'Kai ', # 0x74
'Chuang ', # 0x75
'Juan ', # 0x76
'Chan ', # 0x77
'Tuan ', # 0x78
'Lu ', # 0x79
'Li ', # 0x7a
'Fou ', # 0x7b
'Shan ', # 0x7c
'Piao ', # 0x7d
'Kou ', # 0x7e
'Jiao ', # 0x7f
'Gua ', # 0x80
'Qiao ', # 0x81
'Jue ', # 0x82
'Hua ', # 0x83
'Zha ', # 0x84
'Zhuo ', # 0x85
'Lian ', # 0x86
'Ju ', # 0x87
'Pi ', # 0x88
'Liu ', # 0x89
'Gui ', # 0x8a
'Jiao ', # 0x8b
'Gui ', # 0x8c
'Jian ', # 0x8d
'Jian ', # 0x8e
'Tang ', # 0x8f
'Huo ', # 0x90
'Ji ', # 0x91
'Jian ', # 0x92
'Yi ', # 0x93
'Jian ', # 0x94
'Zhi ', # 0x95
'Chan ', # 0x96
'Cuan ', # 0x97
'Mo ', # 0x98
'Li ', # 0x99
'Zhu ', # 0x9a
'Li ', # 0x9b
'Ya ', # 0x9c
'Quan ', # 0x9d
'Ban ', # 0x9e
'Gong ', # 0x9f
'Jia ', # 0xa0
'Wu ', # 0xa1
'Mai ', # 0xa2
'Lie ', # 0xa3
'Jin ', # 0xa4
'Keng ', # 0xa5
'Xie ', # 0xa6
'Zhi ', # 0xa7
'Dong ', # 0xa8
'Zhu ', # 0xa9
'Nu ', # 0xaa
'Jie ', # 0xab
'Qu ', # 0xac
'Shao ', # 0xad
'Yi ', # 0xae
'Zhu ', # 0xaf
'Miao ', # 0xb0
'Li ', # 0xb1
'Jing ', # 0xb2
'Lao ', # 0xb3
'Lao ', # 0xb4
'Juan ', # 0xb5
'Kou ', # 0xb6
'Yang ', # 0xb7
'Wa ', # 0xb8
'Xiao ', # 0xb9
'Mou ', # 0xba
'Kuang ', # 0xbb
'Jie ', # 0xbc
'Lie ', # 0xbd
'He ', # 0xbe
'Shi ', # 0xbf
'Ke ', # 0xc0
'Jing ', # 0xc1
'Hao ', # 0xc2
'Bo ', # 0xc3
'Min ', # 0xc4
'Chi ', # 0xc5
'Lang ', # 0xc6
'Yong ', # 0xc7
'Yong ', # 0xc8
'Mian ', # 0xc9
'Ke ', # 0xca
'Xun ', # 0xcb
'Juan ', # 0xcc
'Qing ', # 0xcd
'Lu ', # 0xce
'Pou ', # 0xcf
'Meng ', # 0xd0
'Lai ', # 0xd1
'Le ', # 0xd2
'Kai ', # 0xd3
'Mian ', # 0xd4
'Dong ', # 0xd5
'Xu ', # 0xd6
'Xu ', # 0xd7
'Kan ', # 0xd8
'Wu ', # 0xd9
'Yi ', # 0xda
'Xun ', # 0xdb
'Weng ', # 0xdc
'Sheng ', # 0xdd
'Lao ', # 0xde
'Mu ', # 0xdf
'Lu ', # 0xe0
'Piao ', # 0xe1
'Shi ', # 0xe2
'Ji ', # 0xe3
'Qin ', # 0xe4
'Qiang ', # 0xe5
'Jiao ', # 0xe6
'Quan ', # 0xe7
'Yang ', # 0xe8
'Yi ', # 0xe9
'Jue ', # 0xea
'Fan ', # 0xeb
'Juan ', # 0xec
'Tong ', # 0xed
'Ju ', # 0xee
'Dan ', # 0xef
'Xie ', # 0xf0
'Mai ', # 0xf1
'Xun ', # 0xf2
'Xun ', # 0xf3
'Lu ', # 0xf4
'Li ', # 0xf5
'Che ', # 0xf6
'Rang ', # 0xf7
'Quan ', # 0xf8
'Bao ', # 0xf9
'Shao ', # 0xfa
'Yun ', # 0xfb
'Jiu ', # 0xfc
'Bao ', # 0xfd
'Gou ', # 0xfe
'Wu ', # 0xff
)
|
gpl-3.0
|
foss-transportationmodeling/rettina-server
|
.env/lib/python2.7/site-packages/distribute-0.6.24-py2.7.egg/setuptools/tests/test_resources.py
|
62
|
24677
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# NOTE: the shebang and encoding lines are for ScriptHeaderTests; do not remove
from unittest import TestCase, makeSuite; from pkg_resources import *
from setuptools.command.easy_install import get_script_header, is_sh
import os, pkg_resources, sys, StringIO, tempfile, shutil
try: frozenset
except NameError:
from sets import ImmutableSet as frozenset
def safe_repr(obj, short=False):
""" copied from Python2.7"""
try:
result = repr(obj)
except Exception:
result = object.__repr__(obj)
if not short or len(result) < _MAX_LENGTH:
return result
return result[:_MAX_LENGTH] + ' [truncated]...'
class Metadata(EmptyProvider):
"""Mock object to return metadata as if from an on-disk distribution"""
def __init__(self,*pairs):
self.metadata = dict(pairs)
def has_metadata(self,name):
return name in self.metadata
def get_metadata(self,name):
return self.metadata[name]
def get_metadata_lines(self,name):
return yield_lines(self.get_metadata(name))
class DistroTests(TestCase):
def testCollection(self):
# empty path should produce no distributions
ad = Environment([], platform=None, python=None)
self.assertEqual(list(ad), [])
self.assertEqual(ad['FooPkg'],[])
ad.add(Distribution.from_filename("FooPkg-1.3_1.egg"))
ad.add(Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg"))
ad.add(Distribution.from_filename("FooPkg-1.2-py2.4.egg"))
# Name is in there now
self.assert_(ad['FooPkg'])
# But only 1 package
self.assertEqual(list(ad), ['foopkg'])
# Distributions sort by version
self.assertEqual(
[dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2']
)
# Removing a distribution leaves sequence alone
ad.remove(ad['FooPkg'][1])
self.assertEqual(
[dist.version for dist in ad['FooPkg']], ['1.4','1.2']
)
# And inserting adds them in order
ad.add(Distribution.from_filename("FooPkg-1.9.egg"))
self.assertEqual(
[dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2']
)
ws = WorkingSet([])
foo12 = Distribution.from_filename("FooPkg-1.2-py2.4.egg")
foo14 = Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg")
req, = parse_requirements("FooPkg>=1.3")
# Nominal case: no distros on path, should yield all applicable
self.assertEqual(ad.best_match(req,ws).version, '1.9')
# If a matching distro is already installed, should return only that
ws.add(foo14); self.assertEqual(ad.best_match(req,ws).version, '1.4')
# If the first matching distro is unsuitable, it's a version conflict
ws = WorkingSet([]); ws.add(foo12); ws.add(foo14)
self.assertRaises(VersionConflict, ad.best_match, req, ws)
# If more than one match on the path, the first one takes precedence
ws = WorkingSet([]); ws.add(foo14); ws.add(foo12); ws.add(foo14);
self.assertEqual(ad.best_match(req,ws).version, '1.4')
def checkFooPkg(self,d):
self.assertEqual(d.project_name, "FooPkg")
self.assertEqual(d.key, "foopkg")
self.assertEqual(d.version, "1.3-1")
self.assertEqual(d.py_version, "2.4")
self.assertEqual(d.platform, "win32")
self.assertEqual(d.parsed_version, parse_version("1.3-1"))
def testDistroBasics(self):
d = Distribution(
"/some/path",
project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
)
self.checkFooPkg(d)
d = Distribution("/some/path")
self.assertEqual(d.py_version, sys.version[:3])
self.assertEqual(d.platform, None)
def testDistroParse(self):
d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg")
self.checkFooPkg(d)
d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg-info")
self.checkFooPkg(d)
def testDistroMetadata(self):
d = Distribution(
"/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
metadata = Metadata(
('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
)
)
self.checkFooPkg(d)
def distRequires(self, txt):
return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
def checkRequires(self, dist, txt, extras=()):
self.assertEqual(
list(dist.requires(extras)),
list(parse_requirements(txt))
)
def testDistroDependsSimple(self):
for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
self.checkRequires(self.distRequires(v), v)
def testResolve(self):
ad = Environment([]); ws = WorkingSet([])
# Resolving no requirements -> nothing to install
self.assertEqual( list(ws.resolve([],ad)), [] )
# Request something not in the collection -> DistributionNotFound
self.assertRaises(
DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad
)
Foo = Distribution.from_filename(
"/foo_dir/Foo-1.2.egg",
metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
)
ad.add(Foo); ad.add(Distribution.from_filename("Foo-0.9.egg"))
# Request thing(s) that are available -> list to activate
for i in range(3):
targets = list(ws.resolve(parse_requirements("Foo"), ad))
self.assertEqual(targets, [Foo])
map(ws.add,targets)
self.assertRaises(VersionConflict, ws.resolve,
parse_requirements("Foo==0.9"), ad)
ws = WorkingSet([]) # reset
# Request an extra that causes an unresolved dependency for "Baz"
self.assertRaises(
DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad
)
Baz = Distribution.from_filename(
"/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
)
ad.add(Baz)
# Activation list now includes resolved dependency
self.assertEqual(
list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz]
)
# Requests for conflicting versions produce VersionConflict
self.assertRaises( VersionConflict,
ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad
)
def testDistroDependsOptions(self):
d = self.distRequires("""
Twisted>=1.5
[docgen]
ZConfig>=2.0
docutils>=0.3
[fastcgi]
fcgiapp>=0.1""")
self.checkRequires(d,"Twisted>=1.5")
self.checkRequires(
d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
)
self.checkRequires(
d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
)
self.checkRequires(
d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
["docgen","fastcgi"]
)
self.checkRequires(
d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
["fastcgi", "docgen"]
)
self.assertRaises(UnknownExtra, d.requires, ["foo"])
def testSetuptoolsDistributeCombination(self):
# Ensure that installing a 0.7-series setuptools fails. PJE says that
# it will not co-exist.
ws = WorkingSet([])
d = Distribution(
"/some/path",
project_name="setuptools",
version="0.7a1")
self.assertRaises(ValueError, ws.add, d)
# A 0.6-series is no problem
d2 = Distribution(
"/some/path",
project_name="setuptools",
version="0.6c9")
ws.add(d2)
# a unexisting version needs to work
ws = WorkingSet([])
d3 = Distribution(
"/some/path",
project_name="setuptools")
ws.add(d3)
class EntryPointTests(TestCase):
def assertfields(self, ep):
self.assertEqual(ep.name,"foo")
self.assertEqual(ep.module_name,"setuptools.tests.test_resources")
self.assertEqual(ep.attrs, ("EntryPointTests",))
self.assertEqual(ep.extras, ("x",))
self.assert_(ep.load() is EntryPointTests)
self.assertEqual(
str(ep),
"foo = setuptools.tests.test_resources:EntryPointTests [x]"
)
def setUp(self):
self.dist = Distribution.from_filename(
"FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
def testBasics(self):
ep = EntryPoint(
"foo", "setuptools.tests.test_resources", ["EntryPointTests"],
["x"], self.dist
)
self.assertfields(ep)
def testParse(self):
s = "foo = setuptools.tests.test_resources:EntryPointTests [x]"
ep = EntryPoint.parse(s, self.dist)
self.assertfields(ep)
ep = EntryPoint.parse("bar baz= spammity[PING]")
self.assertEqual(ep.name,"bar baz")
self.assertEqual(ep.module_name,"spammity")
self.assertEqual(ep.attrs, ())
self.assertEqual(ep.extras, ("ping",))
ep = EntryPoint.parse(" fizzly = wocka:foo")
self.assertEqual(ep.name,"fizzly")
self.assertEqual(ep.module_name,"wocka")
self.assertEqual(ep.attrs, ("foo",))
self.assertEqual(ep.extras, ())
def testRejects(self):
for ep in [
"foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2",
]:
try: EntryPoint.parse(ep)
except ValueError: pass
else: raise AssertionError("Should've been bad", ep)
def checkSubMap(self, m):
self.assertEqual(len(m), len(self.submap_expect))
for key, ep in self.submap_expect.iteritems():
self.assertEqual(repr(m.get(key)), repr(ep))
submap_expect = dict(
feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']),
feature3=EntryPoint('feature3', 'this.module', extras=['something'])
)
submap_str = """
# define features for blah blah
feature1 = somemodule:somefunction
feature2 = another.module:SomeClass [extra1,extra2]
feature3 = this.module [something]
"""
def testParseList(self):
self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar")
self.assertRaises(ValueError, EntryPoint.parse_group, "x",
["foo=baz", "foo=bar"])
def testParseMap(self):
m = EntryPoint.parse_map({'xyz':self.submap_str})
self.checkSubMap(m['xyz'])
self.assertEqual(m.keys(),['xyz'])
m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
self.checkSubMap(m['xyz'])
self.assertEqual(m.keys(),['xyz'])
self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"])
self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str)
class RequirementsTests(TestCase):
def testBasics(self):
r = Requirement.parse("Twisted>=1.2")
self.assertEqual(str(r),"Twisted>=1.2")
self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')")
self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ()))
self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ()))
self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ()))
self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ()))
self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ()))
self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2"))
def testOrdering(self):
r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ())
r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ())
self.assertEqual(r1,r2)
self.assertEqual(str(r1),str(r2))
self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2")
def testBasicContains(self):
r = Requirement("Twisted", [('>=','1.2')], ())
foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
twist11 = Distribution.from_filename("Twisted-1.1.egg")
twist12 = Distribution.from_filename("Twisted-1.2.egg")
self.assert_(parse_version('1.2') in r)
self.assert_(parse_version('1.1') not in r)
self.assert_('1.2' in r)
self.assert_('1.1' not in r)
self.assert_(foo_dist not in r)
self.assert_(twist11 not in r)
self.assert_(twist12 in r)
def testAdvancedContains(self):
r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5")
for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'):
self.assert_(v in r, (v,r))
for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'):
self.assert_(v not in r, (v,r))
def testOptionsAndHashing(self):
r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0")
self.assertEqual(r1,r2)
self.assertEqual(r1,r3)
self.assertEqual(r1.extras, ("foo","bar"))
self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized
self.assertEqual(hash(r1), hash(r2))
self.assertEqual(
hash(r1), hash(("twisted", ((">=",parse_version("1.2")),),
frozenset(["foo","bar"])))
)
def testVersionEquality(self):
r1 = Requirement.parse("foo==0.3a2")
r2 = Requirement.parse("foo!=0.3a4")
d = Distribution.from_filename
self.assert_(d("foo-0.3a4.egg") not in r1)
self.assert_(d("foo-0.3a1.egg") not in r1)
self.assert_(d("foo-0.3a4.egg") not in r2)
self.assert_(d("foo-0.3a2.egg") in r1)
self.assert_(d("foo-0.3a2.egg") in r2)
self.assert_(d("foo-0.3a3.egg") in r2)
self.assert_(d("foo-0.3a5.egg") in r2)
def testDistributeSetuptoolsOverride(self):
# Plain setuptools or distribute mean we return distribute.
self.assertEqual(
Requirement.parse('setuptools').project_name, 'distribute')
self.assertEqual(
Requirement.parse('distribute').project_name, 'distribute')
# setuptools lower than 0.7 means distribute
self.assertEqual(
Requirement.parse('setuptools==0.6c9').project_name, 'distribute')
self.assertEqual(
Requirement.parse('setuptools==0.6c10').project_name, 'distribute')
self.assertEqual(
Requirement.parse('setuptools>=0.6').project_name, 'distribute')
self.assertEqual(
Requirement.parse('setuptools < 0.7').project_name, 'distribute')
# setuptools 0.7 and higher means setuptools.
self.assertEqual(
Requirement.parse('setuptools == 0.7').project_name, 'setuptools')
self.assertEqual(
Requirement.parse('setuptools == 0.7a1').project_name, 'setuptools')
self.assertEqual(
Requirement.parse('setuptools >= 0.7').project_name, 'setuptools')
class ParseTests(TestCase):
def testEmptyParse(self):
self.assertEqual(list(parse_requirements('')), [])
def testYielding(self):
for inp,out in [
([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
(['x\n\n','y'], ['x','y']),
]:
self.assertEqual(list(pkg_resources.yield_lines(inp)),out)
def testSplitting(self):
self.assertEqual(
list(
pkg_resources.split_sections("""
x
[Y]
z
a
[b ]
# foo
c
[ d]
[q]
v
"""
)
),
[(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])]
)
self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo"))
def testSafeName(self):
self.assertEqual(safe_name("adns-python"), "adns-python")
self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker")
self.assertNotEqual(safe_name("peak.web"), "peak-web")
def testSafeVersion(self):
self.assertEqual(safe_version("1.2-1"), "1.2-1")
self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha")
self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521")
self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker")
self.assertEqual(safe_version("peak.web"), "peak.web")
def testSimpleRequirements(self):
self.assertEqual(
list(parse_requirements('Twis-Ted>=1.2-1')),
[Requirement('Twis-Ted',[('>=','1.2-1')], ())]
)
self.assertEqual(
list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')),
[Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())]
)
self.assertEqual(
Requirement.parse("FooBar==1.99a3"),
Requirement("FooBar", [('==','1.99a3')], ())
)
self.assertRaises(ValueError,Requirement.parse,">=2.3")
self.assertRaises(ValueError,Requirement.parse,"x\\")
self.assertRaises(ValueError,Requirement.parse,"x==2 q")
self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2")
self.assertRaises(ValueError,Requirement.parse,"#")
def testVersionEquality(self):
def c(s1,s2):
p1, p2 = parse_version(s1),parse_version(s2)
self.assertEqual(p1,p2, (s1,s2,p1,p2))
c('0.4', '0.4.0')
c('0.4.0.0', '0.4.0')
c('0.4.0-0', '0.4-0')
c('0pl1', '0.0pl1')
c('0pre1', '0.0c1')
c('0.0.0preview1', '0c1')
c('0.0c1', '0rc1')
c('1.2a1', '1.2.a.1'); c('1.2...a', '1.2a')
def testVersionOrdering(self):
def c(s1,s2):
p1, p2 = parse_version(s1),parse_version(s2)
self.assert_(p1<p2, (s1,s2,p1,p2))
c('2.1','2.1.1')
c('2.1.0','2.10')
c('2a1','2b0')
c('2b1','2c0')
c('2a1','2.1')
c('2.3a1', '2.3')
c('2.1-1', '2.1-2')
c('2.1-1', '2.1.1')
c('2.1', '2.1.1-1')
c('2.1', '2.1pl4')
c('2.1a0-20040501', '2.1')
c('1.1', '02.1')
c('A56','B27')
c('3.2', '3.2.pl0')
c('3.2-1', '3.2pl1')
c('3.2pl1', '3.2pl1-1')
c('0.4', '4.0')
c('0.0.4', '0.4.0')
c('0pl1', '0.4pl1')
c('2.1dev','2.1a0')
c('2.1.0rc1','2.1.0')
c('2.1.0','2.1.0-rc0')
c('2.1.0','2.1.0-a')
c('2.1.0','2.1.0-alpha')
c('2.1.0','2.1.0-foo')
c('1.0','1.0-1')
c('1.0-1','1.0.1')
c('1.0a','1.0b')
c('1.0dev','1.0rc1')
c('1.0pre','1.0')
c('1.0pre','1.0')
c('1.0a','1.0-a')
c('1.0rc1','1.0-rc1')
torture ="""
0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2
0.77.2-1 0.77.1-1 0.77.0-1
""".split()
for p,v1 in enumerate(torture):
for v2 in torture[p+1:]:
c(v2,v1)
class ScriptHeaderTests(TestCase):
non_ascii_exe = '/Users/José/bin/python'
def test_get_script_header(self):
if not sys.platform.startswith('java') or not is_sh(sys.executable):
# This test is for non-Jython platforms
self.assertEqual(get_script_header('#!/usr/local/bin/python'),
'#!%s\n' % os.path.normpath(sys.executable))
self.assertEqual(get_script_header('#!/usr/bin/python -x'),
'#!%s -x\n' % os.path.normpath(sys.executable))
self.assertEqual(get_script_header('#!/usr/bin/python',
executable=self.non_ascii_exe),
'#!%s -x\n' % self.non_ascii_exe)
def test_get_script_header_jython_workaround(self):
# This test doesn't work with Python 3 in some locales
if (sys.version_info >= (3,) and os.environ.get("LC_CTYPE")
in (None, "C", "POSIX")):
return
platform = sys.platform
sys.platform = 'java1.5.0_13'
stdout = sys.stdout
try:
# A mock sys.executable that uses a shebang line (this file)
exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py')
self.assertEqual(
get_script_header('#!/usr/local/bin/python', executable=exe),
'#!/usr/bin/env %s\n' % exe)
# Ensure we generate what is basically a broken shebang line
# when there's options, with a warning emitted
sys.stdout = sys.stderr = StringIO.StringIO()
self.assertEqual(get_script_header('#!/usr/bin/python -x',
executable=exe),
'#!%s -x\n' % exe)
self.assert_('Unable to adapt shebang line' in sys.stdout.getvalue())
sys.stdout = sys.stderr = StringIO.StringIO()
self.assertEqual(get_script_header('#!/usr/bin/python',
executable=self.non_ascii_exe),
'#!%s -x\n' % self.non_ascii_exe)
self.assert_('Unable to adapt shebang line' in sys.stdout.getvalue())
finally:
sys.platform = platform
sys.stdout = stdout
class NamespaceTests(TestCase):
def setUp(self):
self._ns_pkgs = pkg_resources._namespace_packages.copy()
self._tmpdir = tempfile.mkdtemp(prefix="tests-distribute-")
os.makedirs(os.path.join(self._tmpdir, "site-pkgs"))
self._prev_sys_path = sys.path[:]
sys.path.append(os.path.join(self._tmpdir, "site-pkgs"))
def tearDown(self):
shutil.rmtree(self._tmpdir)
pkg_resources._namespace_packages = self._ns_pkgs.copy()
sys.path = self._prev_sys_path[:]
def _assertIn(self, member, container):
""" assertIn and assertTrue does not exist in Python2.3"""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def test_two_levels_deep(self):
"""
Test nested namespace packages
Create namespace packages in the following tree :
site-packages-1/pkg1/pkg2
site-packages-2/pkg1/pkg2
Check both are in the _namespace_packages dict and that their __path__
is correct
"""
sys.path.append(os.path.join(self._tmpdir, "site-pkgs2"))
os.makedirs(os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"))
os.makedirs(os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2"))
ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n"
for site in ["site-pkgs", "site-pkgs2"]:
pkg1_init = open(os.path.join(self._tmpdir, site,
"pkg1", "__init__.py"), "w")
pkg1_init.write(ns_str)
pkg1_init.close()
pkg2_init = open(os.path.join(self._tmpdir, site,
"pkg1", "pkg2", "__init__.py"), "w")
pkg2_init.write(ns_str)
pkg2_init.close()
import pkg1
self._assertIn("pkg1", pkg_resources._namespace_packages.keys())
try:
import pkg1.pkg2
except ImportError, e:
self.fail("Distribute tried to import the parent namespace package")
# check the _namespace_packages dict
self._assertIn("pkg1.pkg2", pkg_resources._namespace_packages.keys())
self.assertEqual(pkg_resources._namespace_packages["pkg1"], ["pkg1.pkg2"])
# check the __path__ attribute contains both paths
self.assertEqual(pkg1.pkg2.__path__, [
os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"),
os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2") ])
|
apache-2.0
|
kiyoto/statsmodels
|
statsmodels/tsa/vector_ar/tests/test_svar.py
|
28
|
2034
|
"""
Test SVAR estimation
"""
import statsmodels.api as sm
from statsmodels.tsa.vector_ar.svar_model import SVAR
from numpy.testing import assert_almost_equal, assert_equal, assert_allclose
from .results import results_svar
import numpy as np
import numpy.testing as npt
DECIMAL_6 = 6
DECIMAL_5 = 5
DECIMAL_4 = 4
class TestSVAR(object):
@classmethod
def setupClass(cls):
mdata = sm.datasets.macrodata.load().data
mdata = mdata[['realgdp','realcons','realinv']]
names = mdata.dtype.names
data = mdata.view((float,3))
data = np.diff(np.log(data), axis=0)
A = np.asarray([[1, 0, 0],['E', 1, 0],['E', 'E', 1]])
B = np.asarray([['E', 0, 0], [0, 'E', 0], [0, 0, 'E']])
results = SVAR(data, svar_type='AB', A=A, B=B).fit(maxlags=3)
cls.res1 = results
#cls.res2 = results_svar.SVARdataResults()
from .results import results_svar_st
cls.res2 = results_svar_st.results_svar1_small
def _reformat(self, x):
return x[[1, 4, 7, 2, 5, 8, 3, 6, 9, 0], :].ravel("F")
def test_A(self):
assert_almost_equal(self.res1.A, self.res2.A, DECIMAL_4)
def test_B(self):
assert_almost_equal(self.res1.B, self.res2.B, DECIMAL_4)
def test_basic(self):
res1 = self.res1
res2 = self.res2
assert_allclose(self._reformat(res1.params), res2.b_var, atol=1e-12)
bse_st = np.sqrt(np.diag(res2.V_var))
assert_allclose(self._reformat(res1.bse), bse_st, atol=1e-12)
def test_llf_ic(self):
res1 = self.res1
res2 = self.res2
assert_allclose(res1.llf, res2.ll_var, atol=1e-12)
# different definition, missing constant term ?
corr_const = -8.51363119922803
assert_allclose(res1.fpe, res2.fpe_var, atol=1e-12)
assert_allclose(res1.aic - corr_const, res2.aic_var, atol=1e-12)
assert_allclose(res1.bic - corr_const, res2.sbic_var, atol=1e-12)
assert_allclose(res1.hqic - corr_const, res2.hqic_var, atol=1e-12)
|
bsd-3-clause
|
davipeterlini/routeflow_ha
|
pox/pox/lib/threadpool.py
|
26
|
2952
|
# Copyright 2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
Totally untested thread pool class.
Tries to not get more than "maximum" (but this is not a hard limit).
Kills off up to around half of its workers when more than half are idle.
"""
from __future__ import print_function
from __future__ import with_statement
from threading import Thread, RLock
from Queue import Queue
CYCLE_TIME = 3
class WorkerThread (Thread):
def __init__ (self, pool):
Thread.__init__(self)
self._pool = pool
self.daemon = True
self.start()
def run (self):
with self._pool._lock:
self._pool._total += 1
while self._pool.running:
with self._pool._lock:
self._pool._available += 1
try:
func, args, kw = self._pool._tasks.get(True, CYCLE_TIME)
if func is None: return
except:
continue
finally:
with self._pool._lock:
self._pool._available -= 1
assert self._pool._available >= 0
try:
func(*args, **kw)
except Exception as e:
print("Worker thread exception", e)
self._pool._tasks.task_done()
with self._pool._lock:
self._pool._total -= 1
assert self._pool._total >= 0
class ThreadPool (object):
#NOTE: Assumes only one thread manipulates the pool
# (Add some locks to fix)
def __init__ (self, initial = 0, maximum = None):
self._available = 0
self._total = 0
self._tasks = Queue()
self.maximum = maximum
self._lock = RLock()
for i in xrange(initial):
self._new_worker
def _new_worker (self):
with self._lock:
if self.maximum is not None:
if self._total >= self.maximum:
# Too many!
return False
WorkerThread(self)
return True
def add (_self, _func, *_args, **_kwargs):
self.add_task(_func, args=_args, kwargs=_kwargs)
def add_task (self, func, args=(), kwargs={}):
while True:
self._lock.acquire()
if self._available == 0:
self._lock.release()
self._new_worker()
else:
break
self._tasks.put((func, args, kwargs))
if self.available > self._total / 2 and self.total > 8:
for i in xrange(self._total / 2 - 1):
self._tasks.put((None,None,None))
self._lock.release()
def join (self):
self._tasks.join()
|
apache-2.0
|
sosey/ginga
|
ginga/mockw/ImageViewCanvasTypesMock.py
|
1
|
2800
|
#
# ImageViewCanvasTypesMock.py -- drawing classes for ImageViewCanvas widget
#
# Eric Jeschke ([email protected])
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
# TODO: this line is for backward compatibility with files importing
# this module--to be removed
from ginga.canvas.CanvasObject import *
class RenderContext(object):
def __init__(self, viewer):
self.viewer = viewer
# TODO: encapsulate this drawable
#self.cr = GraphicsContext(self.viewer.pixmap)
self.cr = None
def __get_color(self, color, alpha):
# return a color in the widget's native object
# color is either a string or a 3-tuple of floats in 0-1 range
clr = None
return clr
def set_line_from_shape(self, shape):
pass
def set_fill_from_shape(self, shape):
pass
def set_font_from_shape(self, shape):
pass
def initialize_from_shape(self, shape, line=True, fill=True, font=True):
if line:
self.set_line_from_shape(shape)
if fill:
self.set_fill_from_shape(shape)
if font:
self.set_font_from_shape(shape)
def set_line(self, color, alpha=1.0, linewidth=1, style='solid'):
pass
def set_fill(self, color, alpha=1.0):
pass
def set_font(self, fontname, fontsize):
pass
def text_extents(self, text):
# TODO: how to mock this?
width = 200
height = 15
return width, height
##### DRAWING OPERATIONS #####
def draw_text(self, cx, cy, text):
#self.cr.draw_text(cx, cy, text)
pass
def draw_polygon(self, cpoints):
#self.cr.draw_polygon(cpoints)
pass
def draw_circle(self, cx, cy, cradius):
cradius = float(cradius)
self.draw_ellipse(cx, cy, cradius, cradius, 0.0)
def draw_ellipse(self, cx, cy, cradius, cyradius, theta):
#self.cr.draw_ellipse((cx, cy), (cxradius, cyradius), theta)
pass
def draw_line(self, cx1, cy1, cx2, cy2):
#self.cr.draw_line(cx1, cy1, cx2, cy2)
pass
def draw_path(self, cpoints):
for i in range(len(cpoints) - 1):
cx1, cy1 = cpoints[i]
cx2, cy2 = cpoints[i+1]
#self.cr.draw_line(cx1, cy1, cx2, cy2)
class CanvasRenderer(object):
def __init__(self, viewer):
self.viewer = viewer
def setup_cr(self, shape):
cr = RenderContext(self.viewer)
cr.initialize_from_shape(shape, font=False)
return cr
def get_dimensions(self, shape):
cr = self.setup_cr(shape)
cr.set_font_from_shape(shape)
return cr.text_extents(shape.text)
#END
|
bsd-3-clause
|
glovebx/odoo
|
addons/website_sale/models/product.py
|
262
|
10108
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
from openerp.osv import osv, fields
class product_style(osv.Model):
_name = "product.style"
_columns = {
'name' : fields.char('Style Name', required=True),
'html_class': fields.char('HTML Classes'),
}
class product_pricelist(osv.Model):
_inherit = "product.pricelist"
_columns = {
'code': fields.char('Promotional Code'),
}
class product_public_category(osv.osv):
_name = "product.public.category"
_description = "Public Category"
_order = "sequence, name"
_constraints = [
(osv.osv._check_recursion, 'Error ! You cannot create recursive categories.', ['parent_id'])
]
def name_get(self, cr, uid, ids, context=None):
res = []
for cat in self.browse(cr, uid, ids, context=context):
names = [cat.name]
pcat = cat.parent_id
while pcat:
names.append(pcat.name)
pcat = pcat.parent_id
res.append((cat.id, ' / '.join(reversed(names))))
return res
def _name_get_fnc(self, cr, uid, ids, prop, unknow_none, context=None):
res = self.name_get(cr, uid, ids, context=context)
return dict(res)
def _get_image(self, cr, uid, ids, name, args, context=None):
result = dict.fromkeys(ids, False)
for obj in self.browse(cr, uid, ids, context=context):
result[obj.id] = tools.image_get_resized_images(obj.image)
return result
def _set_image(self, cr, uid, id, name, value, args, context=None):
return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context)
_columns = {
'name': fields.char('Name', required=True, translate=True),
'complete_name': fields.function(_name_get_fnc, type="char", string='Name'),
'parent_id': fields.many2one('product.public.category','Parent Category', select=True),
'child_id': fields.one2many('product.public.category', 'parent_id', string='Children Categories'),
'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of product categories."),
# NOTE: there is no 'default image', because by default we don't show thumbnails for categories. However if we have a thumbnail
# for at least one category, then we display a default image on the other, so that the buttons have consistent styling.
# In this case, the default image is set by the js code.
# NOTE2: image: all image fields are base64 encoded and PIL-supported
'image': fields.binary("Image",
help="This field holds the image used as image for the category, limited to 1024x1024px."),
'image_medium': fields.function(_get_image, fnct_inv=_set_image,
string="Medium-sized image", type="binary", multi="_get_image",
store={
'product.public.category': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10),
},
help="Medium-sized image of the category. It is automatically "\
"resized as a 128x128px image, with aspect ratio preserved. "\
"Use this field in form views or some kanban views."),
'image_small': fields.function(_get_image, fnct_inv=_set_image,
string="Smal-sized image", type="binary", multi="_get_image",
store={
'product.public.category': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10),
},
help="Small-sized image of the category. It is automatically "\
"resized as a 64x64px image, with aspect ratio preserved. "\
"Use this field anywhere a small image is required."),
}
class product_template(osv.Model):
_inherit = ["product.template", "website.seo.metadata"]
_order = 'website_published desc, website_sequence desc, name'
_name = 'product.template'
_mail_post_access = 'read'
def _website_url(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, '')
for product in self.browse(cr, uid, ids, context=context):
res[product.id] = "/shop/product/%s" % (product.id,)
return res
_columns = {
# TODO FIXME tde: when website_mail/mail_thread.py inheritance work -> this field won't be necessary
'website_message_ids': fields.one2many(
'mail.message', 'res_id',
domain=lambda self: [
'&', ('model', '=', self._name), ('type', '=', 'comment')
],
string='Website Comments',
),
'website_published': fields.boolean('Available in the website', copy=False),
'website_description': fields.html('Description for the website', translate=True),
'alternative_product_ids': fields.many2many('product.template','product_alternative_rel','src_id','dest_id', string='Alternative Products', help='Appear on the product page'),
'accessory_product_ids': fields.many2many('product.product','product_accessory_rel','src_id','dest_id', string='Accessory Products', help='Appear on the shopping cart'),
'website_size_x': fields.integer('Size X'),
'website_size_y': fields.integer('Size Y'),
'website_style_ids': fields.many2many('product.style', string='Styles'),
'website_sequence': fields.integer('Sequence', help="Determine the display order in the Website E-commerce"),
'website_url': fields.function(_website_url, string="Website url", type="char"),
'public_categ_ids': fields.many2many('product.public.category', string='Public Category', help="Those categories are used to group similar products for e-commerce."),
}
def _defaults_website_sequence(self, cr, uid, *l, **kwargs):
cr.execute('SELECT MAX(website_sequence)+1 FROM product_template')
next_sequence = cr.fetchone()[0] or 0
return next_sequence
_defaults = {
'website_size_x': 1,
'website_size_y': 1,
'website_sequence': _defaults_website_sequence,
'website_published': False,
}
def set_sequence_top(self, cr, uid, ids, context=None):
cr.execute('SELECT MAX(website_sequence) FROM product_template')
max_sequence = cr.fetchone()[0] or 0
return self.write(cr, uid, ids, {'website_sequence': max_sequence + 1}, context=context)
def set_sequence_bottom(self, cr, uid, ids, context=None):
cr.execute('SELECT MIN(website_sequence) FROM product_template')
min_sequence = cr.fetchone()[0] or 0
return self.write(cr, uid, ids, {'website_sequence': min_sequence -1}, context=context)
def set_sequence_up(self, cr, uid, ids, context=None):
product = self.browse(cr, uid, ids[0], context=context)
cr.execute(""" SELECT id, website_sequence FROM product_template
WHERE website_sequence > %s AND website_published = %s ORDER BY website_sequence ASC LIMIT 1""" % (product.website_sequence, product.website_published))
prev = cr.fetchone()
if prev:
self.write(cr, uid, [prev[0]], {'website_sequence': product.website_sequence}, context=context)
return self.write(cr, uid, [ids[0]], {'website_sequence': prev[1]}, context=context)
else:
return self.set_sequence_top(cr, uid, ids, context=context)
def set_sequence_down(self, cr, uid, ids, context=None):
product = self.browse(cr, uid, ids[0], context=context)
cr.execute(""" SELECT id, website_sequence FROM product_template
WHERE website_sequence < %s AND website_published = %s ORDER BY website_sequence DESC LIMIT 1""" % (product.website_sequence, product.website_published))
next = cr.fetchone()
if next:
self.write(cr, uid, [next[0]], {'website_sequence': product.website_sequence}, context=context)
return self.write(cr, uid, [ids[0]], {'website_sequence': next[1]}, context=context)
else:
return self.set_sequence_bottom(cr, uid, ids, context=context)
class product_product(osv.Model):
_inherit = "product.product"
def _website_url(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for product in self.browse(cr, uid, ids, context=context):
res[product.id] = "/shop/product/%s" % (product.product_tmpl_id.id,)
return res
_columns = {
'website_url': fields.function(_website_url, string="Website url", type="char"),
}
class product_attribute(osv.Model):
_inherit = "product.attribute"
_columns = {
'type': fields.selection([('radio', 'Radio'), ('select', 'Select'), ('color', 'Color'), ('hidden', 'Hidden')], string="Type"),
}
_defaults = {
'type': lambda *a: 'radio',
}
class product_attribute_value(osv.Model):
_inherit = "product.attribute.value"
_columns = {
'color': fields.char("HTML Color Index", help="Here you can set a specific HTML color index (e.g. #ff0000) to display the color on the website if the attibute type is 'Color'."),
}
|
agpl-3.0
|
jkoelker/quark
|
quark/api/extensions/subnets_quark.py
|
2
|
1724
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2013 OpenStack Foundation.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.api import extensions
EXTENDED_ATTRIBUTES_2_0 = {
'subnets': {
"enable_dhcp": {'allow_post': False, 'allow_put': False,
'default': False,
'is_visible': True},
}
}
class Subnets_quark(extensions.ExtensionDescriptor):
"""Extends subnets for quark API purposes.
* Shunts enable_dhcp to false
"""
@classmethod
def get_name(cls):
return "Quark Subnets API Extension"
@classmethod
def get_alias(cls):
return "subnets_quark"
@classmethod
def get_description(cls):
return "Quark Subnets API Extension"
@classmethod
def get_namespace(cls):
return ("http://docs.openstack.org/api/openstack-network/2.0/content/"
"Subnets.html")
@classmethod
def get_updated(cls):
return "2013-04-22T10:00:00-00:00"
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
|
apache-2.0
|
billwanjohi/ansible
|
lib/ansible/runner/connection_plugins/paramiko_ssh.py
|
13
|
13550
|
# (c) 2012, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# ---
# The paramiko transport is provided because many distributions, in particular EL6 and before
# do not support ControlPersist in their SSH implementations. This is needed on the Ansible
# control machine to be reasonably efficient with connections. Thus paramiko is faster
# for most users on these platforms. Users with ControlPersist capability can consider
# using -c ssh or configuring the transport in ansible.cfg.
import warnings
import os
import pipes
import socket
import random
import logging
import traceback
import fcntl
import sys
from termios import tcflush, TCIFLUSH
from binascii import hexlify
from ansible.callbacks import vvv
from ansible import errors
from ansible import utils
from ansible import constants as C
AUTHENTICITY_MSG="""
paramiko: The authenticity of host '%s' can't be established.
The %s key fingerprint is %s.
Are you sure you want to continue connecting (yes/no)?
"""
# prevent paramiko warning noise -- see http://stackoverflow.com/questions/3920502/
HAVE_PARAMIKO=False
with warnings.catch_warnings():
warnings.simplefilter("ignore")
try:
import paramiko
HAVE_PARAMIKO=True
logging.getLogger("paramiko").setLevel(logging.WARNING)
except ImportError:
pass
class MyAddPolicy(object):
"""
Based on AutoAddPolicy in paramiko so we can determine when keys are added
and also prompt for input.
Policy for automatically adding the hostname and new host key to the
local L{HostKeys} object, and saving it. This is used by L{SSHClient}.
"""
def __init__(self, runner):
self.runner = runner
def missing_host_key(self, client, hostname, key):
if C.HOST_KEY_CHECKING:
fcntl.lockf(self.runner.process_lockfile, fcntl.LOCK_EX)
fcntl.lockf(self.runner.output_lockfile, fcntl.LOCK_EX)
old_stdin = sys.stdin
sys.stdin = self.runner._new_stdin
fingerprint = hexlify(key.get_fingerprint())
ktype = key.get_name()
# clear out any premature input on sys.stdin
tcflush(sys.stdin, TCIFLUSH)
inp = raw_input(AUTHENTICITY_MSG % (hostname, ktype, fingerprint))
sys.stdin = old_stdin
if inp not in ['yes','y','']:
fcntl.flock(self.runner.output_lockfile, fcntl.LOCK_UN)
fcntl.flock(self.runner.process_lockfile, fcntl.LOCK_UN)
raise errors.AnsibleError("host connection rejected by user")
fcntl.lockf(self.runner.output_lockfile, fcntl.LOCK_UN)
fcntl.lockf(self.runner.process_lockfile, fcntl.LOCK_UN)
key._added_by_ansible_this_time = True
# existing implementation below:
client._host_keys.add(hostname, key.get_name(), key)
# host keys are actually saved in close() function below
# in order to control ordering.
# keep connection objects on a per host basis to avoid repeated attempts to reconnect
SSH_CONNECTION_CACHE = {}
SFTP_CONNECTION_CACHE = {}
class Connection(object):
''' SSH based connections with Paramiko '''
def __init__(self, runner, host, port, user, password, private_key_file, *args, **kwargs):
self.ssh = None
self.sftp = None
self.runner = runner
self.host = host
self.port = port or 22
self.user = user
self.password = password
self.private_key_file = private_key_file
self.has_pipelining = False
def _cache_key(self):
return "%s__%s__" % (self.host, self.user)
def connect(self):
cache_key = self._cache_key()
if cache_key in SSH_CONNECTION_CACHE:
self.ssh = SSH_CONNECTION_CACHE[cache_key]
else:
self.ssh = SSH_CONNECTION_CACHE[cache_key] = self._connect_uncached()
return self
def _connect_uncached(self):
''' activates the connection object '''
if not HAVE_PARAMIKO:
raise errors.AnsibleError("paramiko is not installed")
vvv("ESTABLISH CONNECTION FOR USER: %s on PORT %s TO %s" % (self.user, self.port, self.host), host=self.host)
ssh = paramiko.SSHClient()
self.keyfile = os.path.expanduser("~/.ssh/known_hosts")
if C.HOST_KEY_CHECKING:
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(MyAddPolicy(self.runner))
allow_agent = True
if self.password is not None:
allow_agent = False
try:
if self.private_key_file:
key_filename = os.path.expanduser(self.private_key_file)
elif self.runner.private_key_file:
key_filename = os.path.expanduser(self.runner.private_key_file)
else:
key_filename = None
ssh.connect(self.host, username=self.user, allow_agent=allow_agent, look_for_keys=True,
key_filename=key_filename, password=self.password,
timeout=self.runner.timeout, port=self.port)
except Exception, e:
msg = str(e)
if "PID check failed" in msg:
raise errors.AnsibleError("paramiko version issue, please upgrade paramiko on the machine running ansible")
elif "Private key file is encrypted" in msg:
msg = 'ssh %s@%s:%s : %s\nTo connect as a different user, use -u <username>.' % (
self.user, self.host, self.port, msg)
raise errors.AnsibleConnectionFailed(msg)
else:
raise errors.AnsibleConnectionFailed(msg)
return ssh
def exec_command(self, cmd, tmp_path, sudo_user=None, sudoable=False, executable='/bin/sh', in_data=None, su=None, su_user=None):
''' run a command on the remote host '''
if in_data:
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
bufsize = 4096
try:
chan = self.ssh.get_transport().open_session()
except Exception, e:
msg = "Failed to open session"
if len(str(e)) > 0:
msg += ": %s" % str(e)
raise errors.AnsibleConnectionFailed(msg)
if not (self.runner.sudo and sudoable) and not (self.runner.su and su):
if executable:
quoted_command = executable + ' -c ' + pipes.quote(cmd)
else:
quoted_command = cmd
vvv("EXEC %s" % quoted_command, host=self.host)
chan.exec_command(quoted_command)
else:
# sudo usually requires a PTY (cf. requiretty option), therefore
# we give it one by default (pty=True in ansble.cfg), and we try
# to initialise from the calling environment
if C.PARAMIKO_PTY:
chan.get_pty(term=os.getenv('TERM', 'vt100'),
width=int(os.getenv('COLUMNS', 0)),
height=int(os.getenv('LINES', 0)))
if self.runner.sudo or sudoable:
shcmd, prompt, success_key = utils.make_sudo_cmd(sudo_user, executable, cmd)
elif self.runner.su or su:
shcmd, prompt, success_key = utils.make_su_cmd(su_user, executable, cmd)
vvv("EXEC %s" % shcmd, host=self.host)
sudo_output = ''
try:
chan.exec_command(shcmd)
if self.runner.sudo_pass or self.runner.su_pass:
while not sudo_output.endswith(prompt) and success_key not in sudo_output:
chunk = chan.recv(bufsize)
if not chunk:
if 'unknown user' in sudo_output:
raise errors.AnsibleError(
'user %s does not exist' % sudo_user)
else:
raise errors.AnsibleError('ssh connection ' +
'closed waiting for password prompt')
sudo_output += chunk
if success_key not in sudo_output:
if sudoable:
chan.sendall(self.runner.sudo_pass + '\n')
elif su:
chan.sendall(self.runner.su_pass + '\n')
except socket.timeout:
raise errors.AnsibleError('ssh timed out waiting for sudo.\n' + sudo_output)
stdout = ''.join(chan.makefile('rb', bufsize))
stderr = ''.join(chan.makefile_stderr('rb', bufsize))
return (chan.recv_exit_status(), '', stdout, stderr)
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
if not os.path.exists(in_path):
raise errors.AnsibleFileNotFound("file or module does not exist: %s" % in_path)
try:
self.sftp = self.ssh.open_sftp()
except Exception, e:
raise errors.AnsibleError("failed to open a SFTP connection (%s)" % e)
try:
self.sftp.put(in_path, out_path)
except IOError:
raise errors.AnsibleError("failed to transfer file to %s" % out_path)
def _connect_sftp(self):
cache_key = "%s__%s__" % (self.host, self.user)
if cache_key in SFTP_CONNECTION_CACHE:
return SFTP_CONNECTION_CACHE[cache_key]
else:
result = SFTP_CONNECTION_CACHE[cache_key] = self.connect().ssh.open_sftp()
return result
def fetch_file(self, in_path, out_path):
''' save a remote file to the specified path '''
vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
try:
self.sftp = self._connect_sftp()
except Exception, e:
raise errors.AnsibleError("failed to open a SFTP connection (%s)", e)
try:
self.sftp.get(in_path, out_path)
except IOError:
raise errors.AnsibleError("failed to transfer file from %s" % in_path)
def _any_keys_added(self):
added_any = False
for hostname, keys in self.ssh._host_keys.iteritems():
for keytype, key in keys.iteritems():
added_this_time = getattr(key, '_added_by_ansible_this_time', False)
if added_this_time:
return True
return False
def _save_ssh_host_keys(self, filename):
'''
not using the paramiko save_ssh_host_keys function as we want to add new SSH keys at the bottom so folks
don't complain about it :)
'''
if not self._any_keys_added():
return False
path = os.path.expanduser("~/.ssh")
if not os.path.exists(path):
os.makedirs(path)
f = open(filename, 'w')
for hostname, keys in self.ssh._host_keys.iteritems():
for keytype, key in keys.iteritems():
# was f.write
added_this_time = getattr(key, '_added_by_ansible_this_time', False)
if not added_this_time:
f.write("%s %s %s\n" % (hostname, keytype, key.get_base64()))
for hostname, keys in self.ssh._host_keys.iteritems():
for keytype, key in keys.iteritems():
added_this_time = getattr(key, '_added_by_ansible_this_time', False)
if added_this_time:
f.write("%s %s %s\n" % (hostname, keytype, key.get_base64()))
f.close()
def close(self):
''' terminate the connection '''
cache_key = self._cache_key()
SSH_CONNECTION_CACHE.pop(cache_key, None)
SFTP_CONNECTION_CACHE.pop(cache_key, None)
if self.sftp is not None:
self.sftp.close()
if C.PARAMIKO_RECORD_HOST_KEYS and self._any_keys_added():
# add any new SSH host keys -- warning -- this could be slow
lockfile = self.keyfile.replace("known_hosts",".known_hosts.lock")
dirname = os.path.dirname(self.keyfile)
if not os.path.exists(dirname):
os.makedirs(dirname)
KEY_LOCK = open(lockfile, 'w')
fcntl.lockf(KEY_LOCK, fcntl.LOCK_EX)
try:
# just in case any were added recently
self.ssh.load_system_host_keys()
self.ssh._host_keys.update(self.ssh._system_host_keys)
self._save_ssh_host_keys(self.keyfile)
except:
# unable to save keys, including scenario when key was invalid
# and caught earlier
traceback.print_exc()
pass
fcntl.lockf(KEY_LOCK, fcntl.LOCK_UN)
self.ssh.close()
|
gpl-3.0
|
kiyukuta/chainer
|
chainer/function.py
|
2
|
25375
|
import collections
import traceback
import weakref
import six
import chainer
from chainer import configuration
from chainer import cuda
from chainer.utils import type_check
from chainer import variable
def no_backprop_mode():
"""Make a context manager which disables back-propagation.
In this context, Chainer does not make a computational graph.
:class:`~chainer.Variable` created in this context does not have
reference to the :class:`~chainer.Function` which created the variable.
So, you cannot compute gradient with :func:`~chainer.Variable.backward`.
Instead memory consumption is reduced.
In this example, ``y`` is created in this context. So you cannot call
:func:`~chianer.Variable.backward`.
>>> x = chainer.Variable(numpy.array([1,], 'f'))
>>> with chainer.no_backprop_mode():
... y = x + 1
"""
return configuration.using_config('enable_backprop', False)
def force_backprop_mode():
"""Make a context manager which enables back-propagation.
When you want to enable back-propagation in :func:`no_backprop_mode`,
call this method. :~chainer.Variable: created in this context always has
a computational graph.
If you call this method outside of :func:`no_backprop_mode` context, it
changes nothing.
In this example, ``y`` has a computational graph and ``y.backward``
computes gradients of variables in the graph.
>>> with chainer.no_backprop_mode():
... with chainer.force_backprop_mode():
... y = x + 1
.. seealso::
See :func:`no_backprop_mode` for details of back-prop mode.
"""
return configuration.using_config('enable_backprop', True)
class Function(object):
"""Function on variables with backpropagation ability.
All function implementations defined in :mod:`chainer.functions` inherit
this class.
The main feature of this class is keeping track of function applications as
a backward graph. When a function is applied to :class:`Variable` objects,
its :meth:`forward` method is called on :data:`~Variable.data` fields of
input variables, and at the same time it chains references from output
variable nodes to the function and from the function to its input nodes.
.. note::
As of v2.0, the input/output variables and their corresponding variable
nodes in the graph are distinguished. Function acts like a function on
:class:`Variable` objects that returns :class:`Variable` objects as
outputs, whereas these objects do not appear directly in the graph.
Instead, their corresponding :class:`VariableNode` objects are inserted
to the graph.
.. note::
As of v1.5, a function instance cannot be used twice in any
computational graphs. In order to reuse a function object multiple
times, use :func:`copy.copy` before the function applications to make a
copy of the instance.
This restriction also means that we cannot make a *stateful function*
anymore. For example, it is now not allowed to let a function hold
parameters. Define a function as a pure (stateless) procedure, and use
:class:`~chainer.Link` to combine it with parameter variables.
.. admonition:: Example
Let ``x`` an instance of :class:`Variable` and ``f`` an instance of
:class:`Function` taking only one argument. Then a line
>>> import numpy, chainer, chainer.functions as F
>>> x = chainer.Variable(numpy.zeros(10))
>>> f = F.Identity()
>>> y = f(x)
computes a new variable ``y`` and creates backward references. Actually,
backward references are set as per the following diagram::
x.node <--- f <--- y.node
If an application of another function ``g`` occurs as
>>> g = F.Identity()
>>> z = g(x)
then the graph grows with a branch::
|--- f <--- y.node
x.node <-+
|--- g <--- z.node
Note that the branching is correctly managed on backward computation,
i.e. the gradients from ``f`` and ``g`` are accumulated to the gradient
of ``x``.
Every function implementation should provide :meth:`forward_cpu`,
:meth:`forward_gpu`, :meth:`backward_cpu` and :meth:`backward_gpu`.
Alternatively, one can provide :meth:`forward` and :meth:`backward` instead
of separate methods. Backward methods have default implementations that
just return ``None``, which indicates that the function is non-
differentiable.
For functions that do not need a part of inputs in backward computation,
there is a way to possibly reduce the memory consumption by quickly
releasing the input arrays after the forward propagation. This is done by
calling :meth:`retain_inputs` from inside of :meth:`forward` (including
:meth:`forward_cpu` and :meth:`forward_gpu`). See the documentation of
:meth:`retain_inputs` for details.
For functions that need a part of outputs in backward computation, it is
**strongly recommended** to call :meth:`retain_outputs` from inside of
:meth:`forward` (including :meth:`forward_cpu` and :meth:`forward_gpu`).
It marks the specified output variable nodes to retain the data. The
retained data can be accessed by :attr:`output_arrays` property.
Attributes:
inputs: A tuple or list of input variables.
outputs: A tuple or list of output variables.
output_data: A tuple of retained output arrays. It has the same length
as :attr:`outputs`. The data of variables that are not retained are
set to ``None``. See :meth:`retain_outputs` for details.
"""
rank = 0 # default value of the rank
def __call__(self, *inputs):
"""Applies forward propagation with chaining backward references.
Basic behavior is expressed in documentation of :class:`Function`
class.
.. note::
If the :data:`~Variable.data` attribute of input variables exist on
GPU device, then, before it calls :meth:`forward` method, the
appropriate device is selected, so in most cases implementers do
not need to take care of device selection.
Args:
inputs: Tuple of input :class:`Variable`, :class:`numpy.ndarray` or
:class:`cupy.ndarray` objects.
If the input is an :class:`numpy.ndarray` or a
:class:`cupy.ndarray`, it is automatically wrapped with
:class:`Variable`.
Returns:
One :class:`Variable` object or a tuple of multiple
:class:`Variable` objects.
"""
inputs = [x if isinstance(x, variable.Variable)
else variable.Variable(x, requires_grad=False)
for x in inputs]
in_data = tuple([x.data for x in inputs])
requires_grad = any([x.requires_grad for x in inputs])
if chainer.is_debug():
self._stack = traceback.extract_stack()
if configuration.config.type_check:
self._check_data_type_forward(in_data)
hooks = chainer.get_function_hooks()
if self._n_local_function_hooks != 0:
hooks = collections.OrderedDict(hooks)
hooks.update(self.local_function_hooks)
for hook in six.itervalues(hooks):
hook.forward_preprocess(self, in_data)
# Forward prop
with cuda.get_device_from_array(*in_data):
self._input_indexes_to_retain = None
self._output_indexes_to_retain = None
outputs = self.forward(in_data)
assert type(outputs) == tuple
for hook in six.itervalues(hooks):
hook.forward_postprocess(self, in_data)
if chainer.is_debug():
if any(out.dtype.kind == 'f' and
cuda.get_array_module(out).isnan(out).any()
for out in outputs):
msg = 'NaN is detected on forward computation'
raise RuntimeError(msg)
ret = tuple([variable.Variable(y, requires_grad=requires_grad)
for y in outputs])
if configuration.config.enable_backprop:
# Topological ordering
self.rank = max([x.rank for x in inputs]) if inputs else 0
# Backward edges
for y in ret:
y.set_creator(self)
self.inputs = tuple([x.node for x in inputs])
# Forward edges (must be weak references)
self.outputs = tuple([weakref.ref(y.node) for y in ret])
input_indexes_to_retain = self._input_indexes_to_retain
if input_indexes_to_retain is None:
# input arrays are retained by default
input_indexes_to_retain = six.moves.range(len(inputs))
for index in input_indexes_to_retain:
inputs[index].retain_data()
del self._input_indexes_to_retain
output_indexes_to_retain = self._output_indexes_to_retain
if output_indexes_to_retain is not None:
for index in output_indexes_to_retain:
ret[index].retain_data()
del self._output_indexes_to_retain
if len(ret) == 1:
return ret[0]
else:
return ret
@property
def local_function_hooks(self):
"""Ordered Dictionary of registered function hooks.
Contrary to ``chainer.thread_local.function_hooks``,
which registers its elements to all functions,
Function hooks in this property is specific to this function.
"""
if not hasattr(self, '_local_function_hooks'):
self._local_function_hooks = collections.OrderedDict()
return self._local_function_hooks
@property
def _n_local_function_hooks(self):
if hasattr(self, '_local_function_hooks'):
return len(self._local_function_hooks)
return 0
@property
def label(self):
"""Short text that represents the function.
The default implementation returns its type name.
Each function should override it to give more information.
"""
return self.__class__.__name__
@property
def stack(self):
if hasattr(self, '_stack'):
return self._stack
else:
return None
def _check_data_type_forward(self, in_data):
in_type = type_check.get_light_types(in_data)
try:
with type_check.light_mode:
self.check_type_forward(in_type)
return
except type_check.InvalidType:
# Ignore errors on first run
pass
in_type = type_check.get_types(in_data, 'in_types', False)
with type_check.get_function_check_context(self):
self.check_type_forward(in_type)
def check_type_forward(self, in_types):
"""Checks types of input data before forward propagation.
Before :meth:`forward` is called, this function is called.
You need to validate types of input data in this function
using :ref:`the type checking utilities <type-check-utils>`.
Args:
in_types (~chainer.utils.type_check.TypeInfoTuple): The type
information of input data for :meth:`forward`.
"""
pass
def forward(self, inputs):
"""Applies forward propagation to input arrays.
It delegates the procedure to :meth:`forward_cpu` or
:meth:`forward_gpu` by default. Which it selects is determined by the
type of input arrays.
Implementations of :class:`Function` must implement either CPU/GPU
methods or this method.
Args:
inputs: Tuple of input array(s).
Returns:
Tuple of output array(s).
.. warning::
Implementations of :class:`Function` must take care that the
return value must be a tuple even if it returns only one array.
"""
if any(isinstance(x, cuda.ndarray) for x in inputs):
return self.forward_gpu(inputs)
else:
return self.forward_cpu(inputs)
def forward_cpu(self, inputs):
"""Applies forward propagation to input arrays on CPU.
Args:
inputs: Tuple of :class:`numpy.ndarray` object(s).
Returns:
tuple: Tuple of :class:`numpy.ndarray` object(s).
.. warning::
Implementations of :class:`Function` must take care that the
return value must be a tuple even if it returns only one array.
"""
raise NotImplementedError()
def forward_gpu(self, inputs):
"""Applies forward propagation to input arrays on GPU.
Args:
inputs: Tuple of :class:`cupy.ndarray` object(s).
Returns:
tuple: Tuple of :class:`cupy.ndarray` object(s).
.. warning::
Implementations of :class:`Function` must take care that the
return value must be a tuple even if it returns only one array.
"""
raise NotImplementedError()
def backward(self, inputs, grad_outputs):
"""Applies backprop to output gradient arrays.
It delegates the procedure to :meth:`backward_cpu` or
:meth:`backward_gpu` by default. Which it selects is determined by the
type of input arrays and output gradient arrays. Implementations of
:class:`Function` must implement either CPU/GPU methods or this method,
if the function is intended to be backprop-ed.
Args:
inputs: Tuple of input arrays.
grad_outputs: Tuple of output gradient arrays.
Returns:
tuple: Tuple of input gradient arrays. Some or all of them can be
``None``, if the function is not differentiable on
inputs.
.. warning::
Implementations of :class:`Function` must take care that the
return value must be a tuple even if it returns only one array.
"""
if any(isinstance(x, cuda.ndarray) for x in inputs + grad_outputs):
return self.backward_gpu(inputs, grad_outputs)
else:
return self.backward_cpu(inputs, grad_outputs)
def backward_cpu(self, inputs, grad_outputs):
"""Applies backprop to output gradient arrays on CPU.
Args:
inputs: Tuple of input :class:`numpy.ndarray` object(s).
grad_outputs: Tuple of output gradient :class:`numpy.ndarray`
object(s).
Returns:
tuple: Tuple of input gradient :class:`numpy.ndarray` object(s).
Some or all of them can be ``None``, if the function is not
differentiable on corresponding inputs.
.. warning::
Implementations of :class:`Function` must take care that the
return value must be a tuple even if it returns only one array.
"""
return tuple(None for _ in inputs)
def backward_gpu(self, inputs, grad_outputs):
"""Applies backprop to output gradient arrays on GPU.
Args:
inputs: Tuple of input :class:`cupy.ndarray`
object(s).
grad_outputs: Tuple of output gradient
:class:`cupy.ndarray` object(s).
Returns:
tuple: Tuple of input gradient :class:`cupy.ndarray`
object(s). Some or all of them can be ``None``, if the function is
not differentiable on corresponding inputs.
.. warning::
Implementations of :class:`Function` must take care that the
return value must be a tuple even if it returns only one array.
"""
return tuple(None for _ in inputs)
def unchain(self):
"""Purges in/out nodes and this function itself from the graph.
This method is called from :meth:`Variable.unchain_backward` method.
"""
for y in self.outputs:
y_ref = y()
if y_ref is not None:
y_ref.unchain()
self.inputs = None
def add_hook(self, hook, name=None):
"""Registers the function hook.
Args:
hook(~chainer.function.FunctionHook):
Function hook to be registered.
name(str): Name of the function hook.
name must be unique among function hooks
registered to the function. If ``None``,
default name of the function hook is used.
"""
if not isinstance(hook, FunctionHook):
raise TypeError('Hook must be a FunctionHook')
if name is None:
name = hook.name
if name in self.local_function_hooks:
raise KeyError('Hook %s already exists' % name)
self.local_function_hooks[name] = hook
def delete_hook(self, name):
"""Unregisters the function hook.
Args:
name(str): the name of the function hook
to be unregistered.
"""
del self.local_function_hooks[name]
def retain_inputs(self, indexes):
"""Lets specified input variable nodes keep data arrays.
By calling this method from :meth:`forward`, the function can specify
which inputs are required for backprop.
If this method is not called, the function keeps all input arrays. If
you want to release all input arrays, call this method by passing an
empty sequence.
Note that **this method must not be called from the outside of
forward method.**
Args:
indexes (iterable of int): Indexes of input variables that the
function does not require for backprop.
"""
self._input_indexes_to_retain = indexes
def retain_outputs(self, indexes, retain_after_backward=False):
"""Lets specified output variable nodes keep data arrays.
By calling this method from :meth:`forward`, the function can specify
which outputs are required for backprop. If this method is not called,
any output variables are not marked to keep the data array at the point
of returning from :meth:`__call__`. The retained arrays are stored to
:attr:`output_data`.
.. note::
It is STRONGLY RECOMMENDED to use this method if the function
requires some or all output arrays in backprop. The function can
also use output arrays just by keeping references to them directly,
whereas it might influence on the performance of later function
applications to the output variables.
Note that **this method must not be called from the outside of
forward method.**
Args:
indexes (iterable of int): Indexes of input variables that the
function does not require for backprop.
retain_after_backward (bool): If ``True``, a reference to the
outputs will remain after the backprop of the function is over.
If ``False``, the reference will be deleted.
"""
self._output_indexes_to_retain = indexes
if retain_after_backward:
self._retain_after_backward = retain_after_backward
class FunctionHook(object):
"""Base class of hooks for Functions.
:class:`~chainer.function.FunctionHook` is an callback object
that is registered to :class:`~chainer.Function`.
Registered function hooks are invoked before and after
forward and backward operations of each function.
Function hooks that derive :class:`FunctionHook` are required
to implement four methods:
:meth:`~chainer.function.FunctionHook.forward_preprocess`,
:meth:`~chainer.function.FunctionHook.forward_postprocess`,
:meth:`~chainer.function.FunctionHook.backward_preprocess`, and
:meth:`~chainer.function.FunctionHook.backward_postprocess`.
By default, these methods do nothing.
Specifically, when :meth:`~chainer.Function.__call__`
method of some function is invoked,
:meth:`~chainer.function.FunctionHook.forward_preprocess`
(resp. :meth:`~chainer.function.FunctionHook.forward_postprocess`)
of all function hooks registered to this function are called before
(resp. after) forward propagation.
Likewise, when :meth:`~chainer.Variable.backward` of some
:class:`~chainer.Variable` is invoked,
:meth:`~chainer.function.FunctionHook.backward_preprocess`
(resp. :meth:`~chainer.function.FunctionHook.backward_postprocess`)
of all function hooks registered to the function which holds this variable
as a gradient are called before (resp. after) backward propagation.
There are two ways to register :class:`~chainer.function.FunctionHook`
objects to :class:`~chainer.Function` objects.
First one is to use ``with`` statement. Function hooks hooked
in this way are registered to all functions within ``with`` statement
and are unregistered at the end of ``with`` statement.
.. admonition:: Example
The following code is a simple example in which
we measure the elapsed time of a part of forward propagation procedure
with :class:`~chainer.function_hooks.TimerHook`, which is a subclass of
:class:`~chainer.function.FunctionHook`.
>>> from chainer import function_hooks
>>> class Model(chainer.Chain):
... def __call__(self, x1):
... return F.exp(self.l(x1))
>>> model1 = Model(l=L.Linear(10, 10))
>>> model2 = Model(l=L.Linear(10, 10))
>>> x = chainer.Variable(np.zeros((1, 10), 'f'))
>>> with chainer.function_hooks.TimerHook() as m:
... _ = model1(x)
... y = model2(x)
... print("Total time : " + str(m.total_time()))
... model3 = Model(l=L.Linear(10, 10))
... z = model3(y) # doctest:+ELLIPSIS
Total time : ...
In this example, we measure the elapsed times for each forward
propagation of all functions in ``model1`` and ``model2``
(specifically, :class:`~chainer.functions.LinearFunction` and
:class:`~chainer.functions.Exp` of ``model1`` and ``model2``).
Note that ``model3`` is not a target of measurement
as :class:`~chainer.function_hooks.TimerHook` is unregistered
before forward propagation of ``model3``.
.. note::
Chainer stores the dictionary of registered function hooks
as a thread local object. So, function hooks registered
are different depending on threads.
The other one is to register directly to
:class:`~chainer.Function` object with
:meth:`~chainer.Function.add_hook` method.
Function hooks registered in this way can be removed by
:meth:`~chainer.Function.delete_hook` method.
Contrary to former registration method, function hooks are registered
only to the function which :meth:`~chainer.Function.add_hook`
is called.
Args:
name(str): Name of this function hook.
"""
name = 'FunctionHook'
def __enter__(self):
function_hooks = chainer.get_function_hooks()
if self.name in function_hooks:
raise KeyError('hook %s already exists' % self.name)
function_hooks[self.name] = self
return self
def __exit__(self, *_):
del chainer.get_function_hooks()[self.name]
# forward
def forward_preprocess(self, function, in_data):
"""Callback function invoked before forward propagation.
Args:
function(~chainer.Function): Function object to which
the function hook is registered.
in_data(tuple of numpy.ndarray or tuple of cupy.ndarray):
Input data of forward propagation.
"""
pass
def forward_postprocess(self, function, in_data):
"""Callback function invoked after forward propagation.
Args:
function(~chainer.Function): Function object to which
the function hook is registered.
in_data(tuple of numpy.ndarray or tuple of cupy.ndarray):
Input data of forward propagation.
"""
pass
# backward
def backward_preprocess(self, function, in_data, out_grad):
"""Callback function invoked before backward propagation.
Args:
function(~chainer.Function): Function object to which
the function hook is registered.
in_data(tuple of numpy.ndarray or tuple of cupy.ndarray):
Input data of forward propagation.
out_grad(tuple of numpy.ndarray or tuple of cupy.ndarray):
Gradient data of backward propagation.
"""
pass
def backward_postprocess(self, function, in_data, out_grad):
"""Callback function invoked after backward propagation.
Args:
function(~chainer.Function): Function object to which
the function hook is registered.
in_data(tuple of numpy.ndarray or tuple of cupy.ndarray):
Input of forward propagation.
out_grad(tuple of numpy.ndarray or tuple of cupy.ndarray):
Gradient data of backward propagation.
"""
pass
|
mit
|
spaceone/pyjs
|
examples/libtest/ClassTest.py
|
6
|
50377
|
import sys
from UnitTest import UnitTest, IN_BROWSER
# syntax check
# import a, b, c
if True:
import imports.circ1
from imports import exec_order, imports as IMPORTS
from imports import exec_order as EXEC_ORDER
import I18N
from imports.classes import WithAttribute
import imports.decors # must be in this form
global names
names = {}
class SubAssignBase(object):
names['SubAssign'] = 'SubAssign'
def __init__(self):
pass
class SubAssign(SubAssignBase):
def __init__(self):
SubAssignBase.__init__(self)
names['SubAssignBase'] = 'SubAssignBase'
class GetAttribute():
# This class definition fails at startup
getIt = WithAttribute.ATTR
class Sink:
def __init__(self):
self.sink = "Sink"
class SinkInfo:
def __init__(self, object_type):
self.object_type=object_type
self.instance=None
def createInstance(self):
return self.object_type()
def getInstance(self):
if self.instance==None:
self.instance=self.createInstance()
return self.instance
class Trees(Sink):
def __init__(self):
Sink.__init__(self)
self.test = "Trees"
class TestClass1Bug339(object):
def __init__(self):
self.test = TestClass2()
# The following method call causes the problem:
self.test.test_method(test_arg=0)
# The problem happens when a method is called with keyword
# arguments on an object that is referenced as an attribute of
# another object. In other words, this method could be called
# in either of the following ways with no problem:
test = TestClass2()
test.test_method(test_arg=0)
# or
self.test = TestClass2()
self.test.test_method(0)
class TestClass2(object):
def test_method(self, test_arg):
# Because of the way this method is called, self will be undefined
# and the following line will cause an exception
self.value = 0
class UserListPanel():
def setUsers(self, title, screennames=None):
pass
class TestClassBug342(object):
def __init__(self):
self.u = UserListPanel()
self.u.setUsers('title', screennames=33)
class ClassTest(UnitTest):
def testInstancePassing(self):
s = SinkInfo(Trees)
i = s.getInstance()
self.assertEquals(i.test, "Trees")
self.assertEquals(i.sink, "Sink")
def testBug342(self):
try:
t = TestClassBug342()
except:
self.fail("Bug #342 encountered")
finally:
self.assertTrue(True)
def testBug339(self):
try:
TestClass1Bug339()
except:
self.fail("Bug #339 encountered")
finally:
self.assertTrue(True)
def testSubAssign(self):
self.assertEquals(names['SubAssignBase'], 'SubAssignBase')
self.assertEquals(names['SubAssign'], 'SubAssign')
# test Class.x
def testClassVars(self):
expected_result1="test"
expected_result2=1
# check class var value without instance
self.assertEquals(ExampleClass.x, expected_result1)
self.assertEquals(ExampleClass.x.upper(), expected_result1.upper())
# verify class var value for instances
y = ExampleClass()
self.assertEquals(y.x, expected_result1)
# modify class var
ExampleClass.x = expected_result2
self.assertEquals(ExampleClass.x, expected_result2)
# verify that class var changed for NEW instances
z = ExampleClass()
self.assertEquals(z.x, expected_result2)
# verify that class var changed for EXISTING instances
self.assertEquals(y.x, expected_result2)
# verify that the initiation of ExampleClass.c is correct
self.assertEquals(ExampleClass.c, 1|2)
# verify that class properties can only be reached via instance
#
# this test no longer fails as expected because the compiler now
# correctly assumes that a used in fail_a is on module level.
# This has the consequence that a is undefined in javascript. This
# could be solved by adding a lot of code.
# Test is enabled, to remind us of the differences with CPython
try:
ExampleClass().fail_a()
self.fail("Failed to raise error on ExampleClass().fail_a() bug #217")
except (NameError, AttributeError), e:
self.assertTrue(True)
except ValueError:
self.fail("Failed to raise NameError or AttributeError on ExampleClass().fail_a() bug #217")
except:
self.fail("Failed to raise NameError or AttributeError on ExampleClass().fail_a()")
# for we just make sure the result is undefined and not the value of
# ExampleClass.a
if IN_BROWSER:
from __pyjamas__ import JS
try:
x = ExampleClass().fail_a()
#self.assertTrue(JS('pyjslib.isUndefined(@{{x}})'))
self.assertTrue(JS("$pyjs['loaded_modules']['pyjslib']['isUndefined'](@{{x}})"))
except ValueError:
self.assertTrue(True)
def test_iops(self):
class X(object):
def __init__(self, x):
self.x = x
def __add__(self, y):
return X(self.x + y.x)
def __mul__(self, y):
return X(self.x * y.x)
def __sub__(self, y):
return X(self.x - y.x)
def __iadd__(self, y):
self.x += y.x
return self
def __imul__(self, y):
self.x *= y.x
return self
a = a0 = X(2)
b = b0 = X(4)
c = a + b
d = a * b
self.assertTrue(c is not a and c is not b)
self.assertTrue(d is not a and d is not b and d is not c)
self.assertEqual(c.x, 6)
self.assertEqual(d.x, 8)
a += b
self.assertTrue(a is a0, 'Bug #573 __iadd__ should modify object in-place')
self.assertEqual(a.x, 6)
self.assertEqual(a0.x, a.x, 'Bug #573 __iadd__ should modify all references to an object')
a -= b
self.assertTrue(a is not a0)
self.assertEqual(a.x, 2)
self.assertNotEqual(a0.x, a.x, 'Bug #573 reference should not have same value after __iadd__ & __neg__')
b *= c
self.assertTrue(b is b0, 'Bug #573 __imul__ should modify object in-place')
self.assertEqual(b.x, 24)
self.assertEqual(b0.x, b.x, 'Bug #573 __imul__ should modify all references to an object')
def test_getattr(self):
class X(object):
def __init__(self, x=0):
self.x = x
def __getattr__(self, name):
return X(self.x + 1)
x = X()
self.assertEqual(x.x, 0)
try:
self.assertEqual(x.next.x, 1)
self.assertEqual(x.next.bla.x, 2)
self.assertEqual(x.a.b.c.x, 3)
except:
self.fail("Bug #575 __getattr__ method not supported")
def test_deep_property_access(self):
class X(object):
def __init__(self, x=0):
self.x = x
@property
def bla(self):
return self.next
@property
def next(self):
return X(self.x + 1)
x = X()
self.assertEqual(x.x, 0)
try:
self.assertEqual(x.next.x, 1)
self.assertEqual(x.next.bla.x, 2)
self.assertEqual(x.next.bla.next.x, 3)
self.assertEqual(x.bla.next.bla.next.bla.x, 5)
except:
self.fail("Bug #576 Deep property access not supported")
def test_slice(self):
class X(object):
def __init__(self, data):
self.data = data
def __getitem__(self, index):
assert isinstance(index, slice)
return self.data[index]
def __setitem__(self, index, value):
assert isinstance(index, slice)
self.data[index] = value
data = [1, 2, 3]
x = X(data)
self.assertEqual(data[:2], x[:2], 'Bug #577 __getitem__ should be used for slicing')
self.assertEqual(x[:2], [1, 2], 'Bug #577 __getitem__ not supported')
try:
x[1:2] = [5]
self.assertEqual(data[:], x[:], 'Bug #577 __setitem__ should be used for slice assignment')
self.assertEqual(x[1:], [5, 3])
except:
self.fail('Bug #577 slice / __getitem__ / __setitem__ not supported')
# test Class().x
def testInheritedProperties(self):
expected_result1="test"
expected_result2=1
expected_result3="other"
# check parent property
obj1 = ExampleParentClass()
self.assertEquals(obj1.x, expected_result1)
# check default inherited property
obj1.x = expected_result2
obj2 = ExampleChildClass()
self.assertEquals(obj2.x, expected_result1)
# change inherited property
obj2.x = expected_result3
self.assertEquals(obj2.x, expected_result3)
# verify that parent class properties were NOT changed
self.assertEquals(obj1.x, expected_result2)
obj = ExampleChildClass(b = 222)
self.assertEquals(obj.prop_a, 1)
self.assertEquals(obj.prop_b, 222)
# test Class().anObject
def testInheritedPropertyObjects(self):
expected_result1 = "another"
expected_result2 = "other"
# check parent property
obj1 = ExampleParentObject()
self.assertEquals(len(obj1.x), 0)
# check default inherited property
obj1.x.append(expected_result2)
obj2 = ExampleChildObject()
self.assertEquals(len(obj2.x), 1)
# change inherited property
obj2.x.append(expected_result1)
self.assertEquals(obj2.x[1], expected_result1)
# verify that parent class properties were NOT changed
self.assertEquals(obj1.x[0], expected_result2)
# test Class().__init__
def testInheritedConstructors(self):
expected_result1 = "test"
expected_result2 = "parent"
expected_result3 = "grandparent"
expected_result4 = "older"
# verify that parent.__init__ is called if there is no child.__init__()
obj1 = ExampleChildNoConstructor()
self.assertEquals(obj1.x, expected_result1, "ExampleParentConstructor.__init__() was NOT called for ExampleChildNoConstructor")
# verify that parent.__init__ is NOT called (child.__init__() is defined)
obj2 = ExampleChildConstructor()
self.assertNotEqual(getattr(obj2, "x", None), expected_result1, "ExampleParentConstructor.__init__() was called for ExampleChildConstructor")
# verify that parent.__init__ is explicitly called
obj3 = ExampleChildExplicitConstructor()
self.assertEquals(obj3.x, expected_result1, "ExampleParentConstructor.__init__() was NOT called for ExampleChildExplicitConstructor")
# verify inherited values
self.assertEquals(obj1.y, expected_result2, "Did not inherit property from parent")
self.assertEquals(obj2.y, expected_result2, "Did not inherit property from parent")
self.assertEquals(obj1.z, expected_result3, "Did not inherit property from grandparent")
self.assertEquals(obj2.z, expected_result3, "Did not inherit property from grandparent")
res = getattr(obj1, "r", None)
self.assertNotEqual(res, expected_result4, "ExampleGrandParentConstructor.__init__() was called (%s)" % res)
self.assertNotEqual(getattr(obj2, "r", None), expected_result4, "ExampleGrandParentConstructor.__init__() was called")
# check inherited class vars (from parent)
self.assertEqual(ExampleChildConstructor.y, expected_result2, "Did not inherit class var from parent")
self.assertEqual(ExampleChildNoConstructor.y, expected_result2, "Did not inherit class var from parent")
self.assertEqual(ExampleChildExplicitConstructor.y, expected_result2, "Did not inherit class var from parent")
# check inherited class vars (from grandparent)
self.assertEqual(ExampleChildConstructor.z, expected_result3, "Did not inherit class var from grandparent")
self.assertEqual(ExampleChildNoConstructor.z, expected_result3, "Did not inherit class var from grandparent")
self.assertEqual(ExampleChildExplicitConstructor.z, expected_result3, "Did not inherit class var from grandparent")
def testInheritFromType(self):
i_types = [(int, 1), (float, 1.5), (str, "test"), (long, 1),
(tuple, (1,2)), (list, [1,2]), (dict, {'1':1}), (set, set([1,2]))]
for cls, val in i_types:
try:
class subclassed_type(cls):
def test_inh_method(self):
return 1
subclassed_type.__name__ = cls.__name__
inst = subclassed_type(val)
self.assertEqual(inst, val, "Subclasses of type '%s' are not instantiated properly, issue #623" % cls.__name__)
self.assertEqual(inst.test_inh_method(), 1, "Methods of subclasses of type '%s' fail, issue #623" % cls.__name__)
self.assertEqual(str(inst), str(val), "__str__ of subclasses of type '%s' fail, issue #623" % cls.__name__)
self.assertEqual(type(inst), subclassed_type, "Subclasses of type '%s' have wrong type, issue #623" % cls.__name__)
self.assertTrue(isinstance(inst, subclassed_type), "Subclasses of type '%s' have wrong type, issue #623" % cls.__name__)
except:
self.fail("Subclassing type '%s' does not work, issue #623" % cls.__name__)
class SubclassedString(str): pass
class SubclassedInt(int): pass
class SubclassedFloat(float): pass
try:
self.assertEqual(str(SubclassedString("string")), "string", "#484")
except:
self.fail("Could not instantiate subclassed string, bug #484")
try:
v = str(SubclassedInt(1))
self.assertEqual(v, "1", "bug #484 - %s != '1'" % v)
except:
self.fail("Could not instantiate subclassed int")
try:
self.assertEqual(str(SubclassedFloat(1.1)), "1.1", "#484")
except:
self.fail("Could not instantiate subclassed float")
self.assertTrue(isinstance(SubclassedString('abc'), object),
'Issue #670'
' derived from int/float/str not instance of object')
self.assertTrue(isinstance(SubclassedInt(1), object),
'Issue #670'
' derived from int/float/str not instance of object')
self.assertTrue(isinstance(SubclassedFloat(1.1), object),
'Issue #670'
' derived from int/float/str not instance of object')
def testClassMethods(self):
results = ExampleClass.sampleClassMethod("a")
self.assertEqual(results[0], ExampleClass, "Expected first parameter to be the class instance")
self.assertEqual(results[1], "a")
results = ExampleParentClass.sampleClassMethod("a")
self.assertEqual(results[0], ExampleParentClass, "Expected first parameter to be the class instance")
self.assertEqual(results[1], "a")
results = ExampleChildClass.sampleClassMethod("a")
self.assertEqual(results[0], ExampleChildClass, "Expected first parameter to be the class instance")
self.assertEqual(results[1], "a")
results = ExampleClass.sampleClassMethodVarargs("a", "b", "c")
self.assertEqual(results[0], ExampleClass, "Expected first parameter to be the class instance")
self.assertEqual(results[1][0], "a")
self.assertEqual(results[1][1], "b")
self.assertEqual(results[1][2], "c")
results = ExampleClass.sampleClassMethodKwargs(c=9, b=8, a=7)
self.assertEqual(results[0], ExampleClass, "Expected first parameter to be the class instance")
self.assertEqual(results[1], 7)
self.assertEqual(results[2], 8)
self.assertEqual(results[3], 9)
#
# Repeat the test using class instances; the effect should be the same
#
results = ExampleClass().sampleClassMethod("a")
self.assertEqual(results[0], ExampleClass, "Expected first parameter to be the class instance")
self.assertEqual(results[1], "a")
results = ExampleParentClass().sampleClassMethod("a")
self.assertEqual(results[0], ExampleParentClass, "Expected first parameter to be the class instance")
self.assertEqual(results[1], "a")
results = ExampleChildClass().sampleClassMethod("a")
self.assertEqual(results[0], ExampleChildClass, "Expected first parameter to be the class instance")
self.assertEqual(results[1], "a")
results = ExampleClass().sampleClassMethodVarargs("a", "b", "c")
self.assertEqual(results[0], ExampleClass, "Expected first parameter to be the class instance")
self.assertEqual(results[1][0], "a")
self.assertEqual(results[1][1], "b")
self.assertEqual(results[1][2], "c")
# Test argument passing
self.assertEqual(ExampleParentClass().inert('inert'), 'inert')
self.assertEqual(ExampleParentClass().global_x1(), 'global test')
self.assertEqual(ExampleParentClass().global_x2(), 'global test')
# Test reqursive class definition
instance = RecurseMe()
self.assertEqual(instance.chain[0], 0)
self.assertEqual(instance.chain[1], 1)
def testStaticMethod(self):
self.assertEqual(ExampleClass.sampleStaticMethod("a"), "a", "Expected static method to take the parameter I give as its first parameter")
try:
m = ExampleClass.oldIdiomStaticMethod("middle")
self.assertEqual(m,"beforemiddleafter")
except:
self.fail("Issue #415 - staticmethod(method) idiom does not work")
def test_method_alias(self):
class C(object):
def original(self):
return 5
alias = original
def method_using_alias(self):
return self.alias()
c = C()
self.assertEqual(c.original(), 5)
try:
self.assertEqual(c.alias(), 5)
self.assertEqual(c.method_using_alias(), 5)
except:
self.fail("Bug #578 : method alias fails")
def test_class_isinstance_type(self):
class C(object):
pass
self.assertTrue(isinstance(C, type), "Bug #579 type type not supported")
def test__new__Method(self):
c = OtherClass1()
self.assertEqual(c.__class__.__name__, 'ObjectClass')
self.assertEqual(c.prop, 1)
c = OtherSubclass1()
self.assertEqual(c.__class__.__name__, 'ObjectClass', "Issue #414: __new__ method on superclass not called")
c = OtherClass2()
self.assertEqual(c.__class__.__name__, 'OtherClass2')
try:
prop = c.prop
self.fail("failed to raise an error on c.prop (improperly follows explicit __new__ with implicit __init__)")
except:
self.assertTrue(True)
self.assertTrue(c.init, "OtherClass2.__init__() is not executed")
try:
c = OtherClass3(41, 42)
self.assertTrue(True)
except:
self.fail("Issue #417: __new__ method fails for lack of arguments")
self.assertEqual(c.y if hasattr(c,"y") else 0, 42, "Issue #417: __new__ method not passed constructor arguments.")
try:
c = OtherClass3()
self.fail("Issue #418: __new__ method doesn't fail for lack of arguments")
except:
self.assertTrue(True)
try:
c = OtherClass4()
except TypeError:
self.fail("TypeError on OtherClass4()")
c = OtherSubclass4(1, 2, c=3, d=4)
try:
self.assertEqual(c.args, (1,2))
except AttributeError:
self.fail("c.args is not defined")
try:
self.assertEqual(c.kwargs, dict(c=3, d=4))
except AttributeError:
self.fail("c.kwargs is not defined")
instance = MultiBase.__new__(MultiInherit1)
self.assertEqual(instance.name, 'MultiInherit1')
instance = MultiInherit1.__new__(MultiBase)
self.assertEqual(instance.name, 'MultiBase')
instance = object.__new__(MultiInherit1, **{})
self.assertEqual(instance.name, 'MultiInherit1')
#def testClassDefinitionOrder(self):
# x = ExampleSubclassDefinedBeforeSuperclass()
# self.assertEqual(x.someMethod(), "abc", "Expected someMethod to return 'abc'")
def testIsInstance(self):
c = ExampleChildClass()
self.failIf(isinstance(c, ExampleClass))
self.failUnless(isinstance(c, ExampleChildClass))
self.failUnless(isinstance(c, ExampleParentClass))
def testIsInstanceNested(self):
c = ExampleChildClass()
self.failUnless(isinstance(c, (ExampleClass, ExampleChildClass)))
self.failIf(isinstance(c, (ExampleClass, ExampleParentObject)))
self.failUnless(isinstance(c, (ExampleClass, (ExampleChildClass,))))
def testInstanceChecking(self):
try:
ExampleChildClass.get_x(ExampleChildClass())
self.assertTrue(True)
except TypeError, e:
self.fail(e)
try:
ExampleChildClass.get_x(ExampleClass())
self.fail('Failed to raise error for invalid instance')
except TypeError, e:
self.assertTrue(e.args[0].find('get_x() must be called') >= 0, e.args[0])
def testIsSubclass(self):
class A: pass
class B(A): pass
class C(B): pass
class D: pass
class E(D, C): pass
self.assertTrue(issubclass(A, A))
self.assertTrue(issubclass(C, A))
self.assertTrue(issubclass(E, A))
self.assertTrue(issubclass(E, (PassMeAClass, A)))
self.assertFalse(issubclass(A, PassMeAClass))
self.assertRaises(TypeError, issubclass, PassMeAClass(), PassMeAClass)
self.assertRaises(TypeError, issubclass, PassMeAClass, PassMeAClass())
self.assertRaises(TypeError, issubclass, None, PassMeAClass)
def testMetaClass(self):
Klass = type('MyClass', (object,), {'method': method, 'x': 5})
instance = Klass()
self.assertEqual(instance.method(), 1)
self.assertEqual(instance.x, 5)
def testMetaClassInheritFromType(self):
class Metaklass(type):
def metamethod(cls):
return 2
class Klass(object):
__metaclass__ = Metaklass
def method(cls):
return 1
x = 5
try:
self.assertEqual(Klass.metamethod(), 2)
instance = Klass()
self.assertEqual(instance.method(), 1)
self.assertEqual(instance.x, 5)
except:
self.fail('bug #298 - missing metaclass features')
def testMetaClassDct(self):
class MetaklassDctSaver(type):
def __init__(cls, name, bases, dct):
super(MetaklassDctSaver, cls).__init__(name, bases, dct)
cls.saved_dct = dct
class MyClass(object):
__metaclass__ = MetaklassDctSaver
a = 1
b = 2
try:
self.assertTrue(isinstance(MyClass.saved_dct, dict))
self.assertTrue("a" in MyClass.saved_dct)
self.assertTrue("b" in MyClass.saved_dct)
except:
self.fail('bug #298 - missing metaclass features')
def testMultiSuperclass(self):
new_value = 'New value'
c = ExampleMultiSuperclassNoConstructor(new_value)
# Verify that the __init__ of ExampleMultiSuperclassParent1 is used
self.assertEqual(c.x, new_value)
# Verify that the ExampleMultiSuperclassParent2.y is there
self.assertEqual(c.y, ExampleMultiSuperclassParent2.y)
# Verify that the get_value() of ExampleMultiSuperclassParent1 is used
self.assertEqual(c.get_value(), new_value)
c = ExampleMultiSuperclassExplicitConstructor(new_value)
# Verify that the ExampleMultiSuperclassParent1.x is there
self.assertEqual(c.x, ExampleMultiSuperclassParent1.x)
# Verify that the ExampleMultiSuperclassParent2.y is there
self.assertEqual(c.y, ExampleMultiSuperclassParent2.y)
# Verify that the __init__ of ExampleMultiSuperclassExplicitConstructor is used
self.assertEqual(c.z, new_value)
# Verify that the get_value() of ExampleMultiSuperclassExplicitConstructor is used
self.assertEqual(c.get_value(), new_value)
# Verify that the combination of the variables is correct
self.assertEqual(c.get_values(), ':'.join([ExampleMultiSuperclassParent1.x, ExampleMultiSuperclassParent2.y, new_value]))
def testMultiDoubleInherit(self):
i = DoubleInherit(1,2,3)
self.assertEqual(i.get_x(), 1)
self.assertEqual(i.get_y(), 2)
self.assertEqual(i.get_z(), 3)
MultiInherit2.set_x(i, 5)
self.assertEqual(MultiInherit1.get_x(i), 5)
self.assertEqual(i.getName(), 'MultiInherit2', 'Inheritance problem issue #560')
self.assertEqual(str(i), 'MultiInherit2', 'Inheritance problem issue #560')
i = DoubleInheritReversed(1,2,3)
self.assertEqual(i.getName(), 'MultiInherit2')
self.assertEqual(str(i), 'MultiInherit2')
def testClassArguments(self):
c = ClassArguments()
try:
# FIXME: This should raise:
# TypeError: no_args() takes no arguments (1 given)
c.no_args()
self.fail("Exception should be raised on 'c.no_args()'")
except TypeError, e:
self.assertEqual(e.args[0], "no_args() takes no arguments (1 given)")
self.assertEqual(c.self_arg(), True)
self.assertEqual(c.two_args(1), 1)
try:
# FIXME: This should raise:
# 'TypeError: two_args() takes exactly 2 arguments (1 given)
c.two_args()
self.fail("Exception should be raised on 'c.two_args()'")
except TypeError, e:
self.assertEqual(e.args[0], "two_args() takes exactly 2 arguments (1 given)")
def testSuperTest(self):
c = DoubleInherit(1,2,3)
self.assertEqual(super(DoubleInherit, c).get_y(), 2)
c.y = 4
self.assertEqual(super(DoubleInherit, c).get_y(), 4)
instance = super(MultiBase, MultiInherit1).__new__(MultiInherit1)
self.assertEqual(instance.name, 'MultiInherit1')
instance = super(MultiBase, MultiInherit1).__new__(MultiBase)
self.assertEqual(instance.name, 'MultiBase')
instance = super(MultiBase, MultiInherit1).__new__(MultiInherit1)
instance.__init__(1,2)
self.assertEqual(instance.x, 1)
self.assertEqual(instance.y, 2)
try:
z = instance.z
self.fail("failed to raise error for instance.z")
except AttributeError, e:
self.assertTrue(True)
except:
self.fail("failed to raise Attribute error for instance.z")
def testSuperArgTest(self):
a2 = SuperArg2(a=1,b=2,c=3)
a3 = SuperArg3(a=1,b=2,c=3)
self.assertEqual(["SuperArg2",a2.a1_args], ['SuperArg2', [('a', 1), ('b', 2), ('c', 3)]])
self.assertEqual(["SuperArg3",a3.a1_args], ['SuperArg3', [('a', 1), ('b', 2), ('c', 3)]])
def testImportTest(self):
import imports
self.assertEqual(imports.exec_order[0], 'circ1-1')
self.assertEqual(exec_order[1], 'circ2-1')
self.assertEqual(EXEC_ORDER[2], 'circ2-2')
self.assertEqual(imports.exec_order[3], 'circ1-2')
self.assertEqual(imports.exec_order[3], IMPORTS.exec_order[3])
import imports.child
teststring = 'import test'
try:
c = imports.child.Child()
self.assertEqual(c.value(teststring), teststring)
except AttributeError, e:
self.fail(e.message)
class C(imports.child.Child): pass
c = C()
self.assertEqual(c.value(teststring), teststring)
def testPassMeAClass(self):
res = PassMeAClassFunction(PassMeAClass)
self.assertEqual(res, "foo in PassMeAClass")
def testClassAttributeAccess(self):
self.assertEqual(GetAttribute.getIt, WithAttribute.ATTR)
def testNameMapping(self):
instance = MultiBase('a')
r = instance.prototype(1, 2, 3)
self.assertEqual(r[0], 'MultiBase')
self.assertEqual(r[1], 1)
self.assertEqual(r[2], 2)
self.assertEqual(r[3], 3)
instance = MultiInherit1('a', 'b')
r = instance.call(1, 2, 3)
self.assertEqual(r[0], 'MultiInherit1')
self.assertEqual(r[1], 1)
self.assertEqual(r[2], 2)
self.assertEqual(r[3], 3)
def testGlobalClassFactory(self):
gregister("passme", PassMeAClass)
gregister("exchild", ExampleChildClass)
gregister("mscp1", ExampleMultiSuperclassParent1)
pmc = ggetObject("passme")
self.assertEqual(pmc.foo(), "foo in PassMeAClass", "foo !in PassMeAClass")
try:
pmc = ggetObject("mscp1", 5)
except:
self.assertEqual(False, True, "Exception indicates bug in compiler: 'Error: uncaught exception: ExampleMultiSuperclassParent1() arguments after ** must be a dictionary 5'")
else:
self.assertEqual(pmc.x, 5, "pass me class x != 5")
try:
pmc = ggetObject("exchild", 5, 7) # 5 is ignored
except:
self.assertEqual(False, True, "Exception indicates bug in compiler: 'Error: uncaught exception: ExampleChildClass() arguments after ** must be a dictionary 7'")
else:
self.assertEqual(pmc.prop_a, 1, "pass me class prop_a != 1")
self.assertEqual(pmc.prop_b, 7, "pass me class prop_b != 7")
def testClassFactory(self):
f = Factory()
f.register("passme", PassMeAClass)
f.register("exchild", ExampleChildClass)
try:
pmc = f.getObjectCompilerBug("passme")
except:
self.assertEqual(False, True, "Compiler bug in class factory test")
else:
self.assertEqual(pmc.foo(), "foo in PassMeAClass")
pmc = f.getObject("passme")
self.assertEqual(pmc.foo(), "foo in PassMeAClass")
try:
pmc = f.getObject("exchild", 5, 7) # 5 is ignored
except:
self.assertEqual(False, True, "Exception indicates bug in compiler: 'Error: uncaught exception: ExampleChildClass() arguments after ** must be a dictionary 7'")
else:
self.assertEqual(pmc.prop_a, 1)
self.assertEqual(pmc.prop_b, 7)
def testClassFactory(self):
f = Factory()
f.register("passme", PassMeAClass)
f.register("exchild", ExampleChildClass)
try:
pmc = f.getObjectCompilerBug("passme")
except:
self.assertEqual(False, True, "Compiler bug in class factory test")
else:
self.assertEqual(pmc.foo(), "foo in PassMeAClass")
pmc = f.getObject("passme")
self.assertEqual(pmc.foo(), "foo in PassMeAClass")
try:
pmc = f.getObject("exchild", 5, 7) # 5 is ignored
except:
self.assertEqual(False, True, "Exception indicates bug in compiler: 'Error: uncaught exception: ExampleChildClass() arguments after ** must be a dictionary 7'")
else:
self.assertEqual(pmc.prop_a, 1)
self.assertEqual(pmc.prop_b, 7)
def testImportKeywords(self):
import imports.enum.super
self.assertEqual(imports.enum.super.var, 1)
self.assertEqual(imports.enum.super.function(), 2)
from imports import enumerate
self.assertEqual(enumerate.list, 1)
from imports.enumerate import dict
self.assertEqual(dict(), (1,2))
def testDescriptors(self):
global revealAccessLog
decorated = Decorated()
revealAccessLog = None
self.assertEqual(decorated.x, 10)
self.assertEqual(revealAccessLog, "Retrieving var 'x'")
decorated.x = 5
self.assertEqual(revealAccessLog, "Updating var 'x': 5")
self.assertEqual(decorated.x, 5)
del decorated.x
self.assertEqual(revealAccessLog, "Deleting var 'x'")
try:
x = decorated.x
self.fail("Failed to raise error for 'del decorated.x'")
except AttributeError, e:
self.assertTrue(True)
#self.assertEqual(e[0], "'RevealAccess' object has no attribute 'val'")
except:
self.fail("Failed to raise Attribute error for 'del decorated.x'")
def testProperty(self):
p = OldStylePropertyDecorating()
p.x = 1
self.assertEqual(p._x, 1)
self.assertEqual(p.x, 1)
del p.x
try:
x = p._x
self.fail("Failed to raise error for 'x = p._x'")
except AttributeError, e:
self.assertTrue(True)
except:
self.fail("Failed to raise Attribute error for 'x = p._x'")
p = NewStylePropertyDecorating()
p.x = 1
self.assertEqual(p._x, 1)
self.assertEqual(p.x, 1)
del p.x
try:
x = p._x
self.fail("Failed to raise error for 'x = p._x'")
except AttributeError, e:
self.assertTrue(True)
except:
self.fail("Failed to raise Attribute error for 'x = p._x'")
def testDynamicLoading(self):
self.assertEqual(I18N.i18n.example(),
'This is an example')
self.assertEqual(I18N.domain.i18n.example(),
'This is a domain example')
self.assertEqual(I18N.domain.subdomain.i18n.example(),
'This is a subdomain example')
self.assertEqual(I18N.i18n.another_example(),
'This is another example')
self.assertEqual(I18N.domain.i18n.another_example(),
'This is another example')
I18N.set_locale('en_US')
self.assertEqual(I18N.i18n.example(),
'This is an en_US example')
self.assertEqual(I18N.domain.i18n.example(),
'This is a domain en_US example')
self.assertEqual(I18N.domain.subdomain.i18n.example(),
'This is a subdomain en_US example')
self.assertEqual(I18N.i18n.another_example(),
'This is en_US another example')
self.assertEqual(I18N.domain.i18n.another_example(),
'This is en_US another example')
def testClassesAnywhere(self):
class A(object):
def __init__(self, what):
if not what:
class B(object):
def __init__(self):
self.v = 0
else:
class B(object):
def __init__(self):
self.v = 1
self.b = B()
a = A(0)
self.assertEqual(a.b.v, 0)
a = A(1)
self.assertEqual(a.b.v, 1)
def testClassDefinitionCode(self):
class A(object):
def __init__(self, what):
class B(object):
if not what:
def __init__(self):
self.v = 0
else:
def __init__(self):
self.v = 1
self.b = B()
a = A(0)
self.assertEqual(a.b.v, 0)
a = A(1)
self.assertEqual(a.b.v, 1)
class A(object):
l = [1,2,3]
l[1] = 22
d = {}
d['a'] = 1
l1 = []
l2 = []
for i in range(4):
l1.append(i)
i = 0
while i < 4:
l2.append(i)
i += 1
a = A()
v = [1,22,3]
self.assertTrue(a.l == v, "%r == %r" % (a.l, v))
v = {'a': 1}
self.assertTrue(a.d == v, "%r == %r" % (a.d, v))
v = [0,1,2,3]
self.assertTrue(a.l1 == v, "%r == %r" % (a.l1, v))
self.assertTrue(a.l2 == v, "%r == %r" % (a.l2, v))
def testGenericMethodDecorators(self):
"""
issues #309, #318
"""
obj = DecoratedMethods()
self.assertEqual(obj.mtd1("b"), "1b2")
self.assertEqual(obj.mtd2("b"), "31b24")
self.assertEqual(obj.mtd3("b"), "abc")
self.assertEqual(obj.mtd4("b"), "a3b4c")
exc_raised = False
try:
res = obj.mtd5("b")
except TypeError, t:
exc_raised = True
self.assertTrue(exc_raised, "TypeError wrong arguments count not raised")
self.assertEqual(obj.mtd_static("b"), "5b6")
self.assertEqual(DecoratedMethods.mtd_static(*["b"], **{}), "5b6")
self.assertEqual(obj.mtd_static2("b"), "55b66")
self.assertEqual(DecoratedMethods.mtd_static("b"), "5b6")
self.assertEqual(DecoratedMethods.mtd_static2("b"), "55b66")
try:
self.assertEqual(obj.mtd_class("b"), "7b8")
self.assertEqual(obj.mtd_class2("b"), "77b88")
self.assertEqual(DecoratedMethods.mtd_class("b"), "7b8")
self.assertEqual(DecoratedMethods.mtd_class2("b"), "77b88")
except TypeError, e:
msg = str(e)
if "fnc() takes exactly 2 arguments (1 given)" in msg:
msg = "bug #318 - " + msg
self.fail("Bug #580 : %s " % msg)
def testExpressionInherit(self):
class X(object):
def m1(self):
return 1
class Y(object):
def m2(self):
return 2
cl = [list, X, Y]
class T(cl[0]):
pass
self.assertEqual(T([1]), [1])
class T(cl[1], cl[2]):
pass
t = T()
self.assertEqual(t.m1(), 1)
self.assertEqual(t.m2(), 2)
class T2(type(t)):
pass
t2 = T2()
self.assertEqual(t2.m1(), 1)
self.assertEqual(t2.m2(), 2)
class PassMeAClass(object):
def __init__(self):
pass
def foo(self):
return "foo in PassMeAClass"
def PassMeAClassFunction(klass):
c = klass()
return c.foo()
# testMetaClass
def method(self):
return 1
# testClassVars
class ExampleClass:
x = "test"
a = 1
b = 2
c = a|b
@classmethod
def sampleClassMethod(cls, arg):
return cls, arg
@classmethod
def sampleClassMethodVarargs(cls, *args):
return cls, args
@classmethod
def sampleClassMethodKwargs(cls, a=0, b=1, c=2):
return cls, a, b, c
@staticmethod
def sampleStaticMethod(arg):
return arg
def shouldntWork(arg):
return "before" + arg + "after"
oldIdiomStaticMethod = staticmethod(shouldntWork)
def fail_a(self):
return a
# Global variable to test variable selection order
x = 'global test'
# testInheritedProperties
class ExampleParentClass:
x = "test"
def __init__(self, a=1, b=2):
self.prop_a = a
self.prop_b = b
@classmethod
def sampleClassMethod(cls, arg):
return cls, arg
def get_x(self):
return self.x
def inert(self, x):
return x
def global_x1(self):
return x
def global_x2(self):
return x
class ExampleChildClass(ExampleParentClass):
def __init__(self, a = 11, b = 22):
ExampleParentClass.__init__(self, b = b)
# testInheritedPropertyObjects
class ExampleParentObject:
x = []
class ExampleChildObject(ExampleParentObject):
pass
# testInheritedConstructors
class ExampleGrandParentConstructor:
z = "grandparent"
def __init__(self):
self.r = "older"
def older(self):
self.w = 2
class ExampleParentConstructor(ExampleGrandParentConstructor):
y = "parent"
def __init__(self):
self.x = "test"
def dosomething(self):
self.m = 1
class ExampleChildConstructor(ExampleParentConstructor):
def __init__(self):
pass
class ExampleChildNoConstructor(ExampleParentConstructor):
pass
class ExampleChildExplicitConstructor(ExampleParentConstructor):
def __init__(self):
ExampleParentConstructor.__init__(self)
# XXX doing this should throw a "Name" exception
#
#class ExampleSubclassDefinedBeforeSuperclass(ExampleSuperclassDefinedAfterSubclass):
# pass
#class ExampleSuperclassDefinedAfterSubclass:
# def someMethod(self):
# return 'abc'
class ObjectClass(object):
def __init__(self):
self.prop = 1
class OtherClass1(object):
def __new__(cls):
return ObjectClass()
class OtherSubclass1(OtherClass1):
pass
class OtherClass2(object):
init = False
def __new__(cls):
return ObjectClass.__new__(cls)
def __init__(self):
self.init = True
class OtherClass3(object):
def __new__(cls, x, y):
val = object.__new__(cls)
val.x, val.y = x,y
return val
class OtherClass4Mixin:
pass
class OtherClass4(object, OtherClass4Mixin):
def __new__(cls, *args, **kwargs):
return super(OtherClass4, cls).__new__(cls, *args, **kwargs)
class OtherSubclass4(OtherClass4):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
class ExampleMultiSuperclassParent1:
x = 'Initial X'
def __init__(self, x):
self.x = x
def get_value(self):
return self.x
class ExampleMultiSuperclassParent2:
y = 'Initial Y'
def __init__(self, y):
self.y = y
def get_value(self):
return self.y
class ExampleMultiSuperclassNoConstructor(ExampleMultiSuperclassParent1, ExampleMultiSuperclassParent2):
z = 'Initial Z'
class ExampleMultiSuperclassExplicitConstructor(ExampleMultiSuperclassParent1, ExampleMultiSuperclassParent2):
z = 'Initial Z'
def __init__(self, z):
self.z = z
def get_value(self):
return self.z
def get_values(self):
return ':'.join([self.x, self.y, self.z])
class ClassArguments:
def no_args( ):
return False
def self_arg(self):
return True
def two_args(self, arg1):
return arg1
class MultiBase(object):
name = 'MultiBase'
def __init__(self, x):
self.x = x
def get_x(self):
return self.x
def set_x(self,x ):
self.x = x
def prototype(self, default, arguments, this):
return (self.name, default, arguments, this)
def getName(self):
return 'MultiBase'
class MultiInherit1(MultiBase):
name = 'MultiInherit1'
def __init__(self, x, y):
self.y = y
MultiBase.__init__(self, x) # yes it gets called twice
def get_y(self):
return self.y
def call(self, default, arguments, this):
return self.prototype(default, arguments, this)
class MultiInherit2(MultiBase):
name = 'MultiInherit2'
def __init__(self, x, z):
self.z = z
MultiBase.__init__(self, x) # yes it gets called twice
def get_z(self):
return self.z
def __str__(self):
return 'MultiInherit2'
def getName(self):
return 'MultiInherit2'
class DoubleInherit(MultiInherit1, MultiInherit2):
name = 'DoubleInherit'
def __init__(self, x, y, z):
MultiInherit1.__init__(self, x, y) # MultiBase __init__ called once
MultiInherit2.__init__(self, x, z) # MultiBase __init__ called twice
class DoubleInheritReversed(MultiInherit2, MultiInherit1):
name = 'DoubleInheritReversed'
def __init__(self, x, y, z):
MultiInherit1.__init__(self, x, y) # MultiBase __init__ called once
MultiInherit2.__init__(self, x, z) # MultiBase __init__ called twice
class RecurseMe(object):
chain = []
def __init__(self):
self.chain.append(0)
class RecurseMe(RecurseMe):
def __init__(self):
# Cannot do RecurseMe._init__(self), that would really call myself
# And we can only do this once...
super(self.__class__, self).__init__()
self.chain.append(1)
class Factory:
_classes = {}
def __init__(self):
pass
def register(self, className, classe):
Factory._classes[className] = classe
def getObjectCompilerBug(self, className,*args, **kargs):
return Factory._classes[className](*args, **kargs)
def getObject(self, className,*args, **kargs):
f = Factory._classes[className]
return f(*args, **kargs)
global gclasses
gclasses = {}
def gregister(className, classe):
gclasses[className] = classe
def ggetObject(className, *args, **kargs):
classe = gclasses[className]
return classe(*args, **kargs)
revealAccessLog = None
class RevealAccess(object):
def __init__(self, initval=None, name='var'):
self.val = initval
self.name = name
def __get__(self, obj, objtype=None):
global revealAccessLog
revealAccessLog = 'Retrieving %s' % self.name
return self.val
def __set__(self, obj, val):
global revealAccessLog
revealAccessLog = 'Updating %s: %s' % (self.name, val)
self.val = val
def __delete__(self, obj):
global revealAccessLog
revealAccessLog = 'Deleting %s' % self.name
del self.val
class Decorated(object):
x = RevealAccess(10, "var 'x'")
class OldStylePropertyDecorating(object):
def __init__(self):
self._x = None
def getx(self):
return self._x
def setx(self, value):
self._x = value
def delx(self):
del self._x
x = property(getx, setx, delx, "I'm the 'x' property.")
# Property class that gives python 2.5 a setter and a deleter
class Property(object):
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.fget = fget
self.fset = fset
self.fdel = fdel
if not doc is None or not hasattr(fget, '__doc__') :
self.__doc__ = doc
else:
self.__doc__ = fget.__doc__
def __get__(self, obj, objtype=None):
if obj is None:
return self
if self.fget is None:
raise AttributeError, "unreadable attribute"
return self.fget(obj)
def __set__(self, obj, value):
if self.fset is None:
raise AttributeError, "can't set attribute"
self.fset(obj, value)
def __delete__(self, obj):
if self.fdel is None:
raise AttributeError, "can't delete attribute"
self.fdel(obj)
def setter(self, fset):
self.fset = fset
return self
def deleter(self, fdel):
self.fdel = fdel
return self
def property_setter(self, fset):
self.__setattr__('fset', fset)
return self
def property_deleter(self, fdel):
self.__setattr__('fdel', fdel)
return self
# Bug in pyjs that appears when the next lines are executed
# The 'property = Property' makes property a module variable, which is
# not set if the next line not is executed
property = property
if not hasattr(property, 'setter'):
# Replace python 2.5 property class
property = Property
class NewStylePropertyDecorating(object):
def __init__(self):
self._x = None
@property
def x(self):
"""I'm the 'x' property."""
return self._x
@x.setter
def x(self, value):
self._x = value
@x.deleter
def x(self):
del self._x
class SuperArg1(object) :
def __init__(self,a=None,b=None,c=None) :
self.a1_args = [('a', a),('b',b),('c',c)]
class SuperArg2(SuperArg1) :
def __init__(self,a=None,b=None,c=None) :
self.a2_args = [('a', a),('b',b),('c',c)]
super(SuperArg2,self).__init__(a=a,b=b,c=c)
class SuperArg3(SuperArg1) :
def __init__(self,a=None,b=None,c=None) :
self.a3_args = [('a', a),('b',b),('c',c)]
super(SuperArg3,self).__init__(a,b,c)
############################################################################
# generic decoerators for methods
############################################################################
def mdeco1(f):
def fn1(self, x):
if not isinstance(self, DecoratedMethods):
raise TypeError("fn1 - self is not instance of DecoratedMethods")
return "1" + f(self, x) + "2"
return fn1
def mdeco2(f):
def fn2(self, x):
if not isinstance(self, DecoratedMethods):
raise TypeError("fn2 - self is not instance of DecoratedMethods")
return "3" + f(self, x) + "4"
return fn2
def mdeco_with_wrong_args(f):
def fn_wwa(x): # correct definition should be fn(self, x), this must raise an exc
return "5" + f(x) + "6"
return fn_wwa
def mdeco_static(f):
def fns(x):
return "5" + f(x) + "6"
return fns
def mdeco_class(f):
def fnc(cls, x):
if cls is not DecoratedMethods:
raise TypeError("fnc - cls is not DecoratedMethods")
return "7" + f(cls, x) + "8"
return fnc
class DecoratedMethods(object):
@mdeco1
def mtd1(self, x):
return x
@mdeco2
@mdeco1
def mtd2(self, x):
return x
@imports.decors.othermoduledeco1
def mtd3(self, x):
return x
@imports.decors.othermoduledeco1
@mdeco2
def mtd4(self, x):
return x
@mdeco_with_wrong_args
def mtd5(self, x):
return x
@staticmethod
@mdeco_static
def mtd_static(x):
return x
@staticmethod
@mdeco_static
@mdeco_static
def mtd_static2(x):
return x
@classmethod
@mdeco_class
def mtd_class(cls, x):
return x
@classmethod
@mdeco_class
@mdeco_class
def mtd_class2(cls, x):
return x
|
apache-2.0
|
D4rk4/Neural-Network-Trading-Bot
|
pyTrader/trading_floor.py
|
1
|
4891
|
import sys, time
from file_check import *
check_data_files()
from print_trade_info import set_low_balance_msg, print_trade, reset_print_info
from db_access import db_connect, db_exists, output_records_exist, output_init_record, get_last_output_record
from balances import *
from analyst import *
from trader import *
from info_track import *
nonce_file = 'auth/nonce.sqlite'
output_file = parent_dir + 'data/output.sqlite'
bal = Balance(0, 0)
timer = Timer()
info = Trade_Counter()
initial_order = True
while not db_exists(output_file):
timer.track()
sys.stdout.write('\rWaiting for output.sqlite, execute neural network...%i:%i:%i' % (timer.hour, timer.min, timer.sec))
sys.stdout.flush()
time.sleep(1)
timer.reset()
db_connect(nonce_file, output_file)
while not output_records_exist():
timer.track()
sys.stdout.write('\rWaiting for first prediction from neural network %i:%i:%i' % (timer.hour, timer.min, timer.sec))
sys.stdout.flush()
time.sleep(1)
timer.reset()
balance_response = make_request('getInfo', 0, 0, 0)
bal.set_initial_balances(balance_response['return']['funds']['btc'], balance_response['return']['funds']['usd'])
bal.set_balances(balance_response['return']['funds']['btc'], balance_response['return']['funds']['usd'])
last_order_entry = output_init_record()
while True:
output_data = get_last_output_record()
current_row = output_data[0]
current_price = output_data[1]
predicted_price = output_data[2]
order_type = output_data[3]
err_rate = output_data[4]
# A new output.sqlite record is present if this evaluates to true or the first record was inserted
if current_row != last_order_entry or initial_order:
last_order_entry = current_row
initial_order = False
balance_response = make_request('getInfo', 0, 0, 0)
bal.set_balances(balance_response['return']['funds']['btc'], balance_response['return']['funds']['usd'])
if order_type == 'buy':
if bal.balance_check(order_type):
usd_to_btc_qty = get_order_size(err_rate) * bal.usd_bal()
if profit_check(order_type, usd_to_btc_qty, current_price, predicted_price):
if exceeds_threshold(usd_to_btc_qty / current_price):
prediction_buy_response = make_request(order_type, usd_to_btc_qty / current_price, current_price, 0)
bal.set_balances(prediction_buy_response['return']['funds']['btc'], prediction_buy_response['return']['funds']['usd'])
info.prediction_trade_complete()
cs_args = post_trade_check(bal.btc_bal(), 'COUNTER_SELL', usd_to_btc_qty, predicted_price)
counter_sell_response = make_request(cs_args[0], cs_args[1], cs_args[2], cs_args[3])
info.counter_trade_complete()
timer.reset()
else:
set_low_balance_msg(str(order_type + ' order aborted - Inadequate USD balance ($' + str(bal.btc_bal()) + ')'))
elif order_type == 'sell':
if bal.balance_check(order_type):
btc_to_usd_qty = get_order_size(err_rate) * bal.btc_bal()
if profit_check(order_type, btc_to_usd_qty, current_price, predicted_price):
if exceeds_threshold(btc_to_usd_qty):
prediction_sell_response = make_request(order_type, btc_to_usd_qty, current_price, 0)
bal.set_balances(prediction_sell_response['return']['funds']['btc'], prediction_sell_response['return']['funds']['usd'])
info.prediction_trade_complete()
cb_args = post_trade_check(bal.usd_bal(), 'COUNTER_BUY', btc_to_usd_qty, predicted_price)
counter_buy_response = make_request(cb_args[0], cb_args[1], cb_args[2], cb_args[3])
info.counter_trade_complete()
timer.reset()
else:
set_low_balance_msg(str(order_type + ' order aborted - Inadequate BTC balance (B' + str(bal.btc_bal()) + ')'))
else:
print('UNKNOWN ORDER TYPE')
active_order_response = make_request('active', 0, 0, 0)
open_btc_order_value = active_order_value(active_order_response)
print('\r/---------------------------------------------------------------------------\\')
print_trade()
bal.print_bal(open_btc_order_value, current_price)
reset_print_info()
print('\r\---------------------------------------------------------------------------/')
else:
timer.track()
sys.stdout.write('\rPrimary Trades: %i // Counter Trades: %i // Last Trade %i:%i:%i ' % (info.prediction_trades, info.counter_trades, timer.hour, timer.min, timer.sec))
sys.stdout.flush()
time.sleep(1)
|
mit
|
centwave/jg82ksgvqkuan
|
django/contrib/gis/geos/prototypes/geom.py
|
311
|
4465
|
from ctypes import c_char_p, c_int, c_size_t, c_ubyte, c_uint, POINTER
from django.contrib.gis.geos.libgeos import CS_PTR, GEOM_PTR, PREPGEOM_PTR, GEOS_PREPARE
from django.contrib.gis.geos.prototypes.errcheck import \
check_geom, check_minus_one, check_sized_string, check_string, check_zero
from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc
# This is the return type used by binary output (WKB, HEX) routines.
c_uchar_p = POINTER(c_ubyte)
# We create a simple subclass of c_char_p here because when the response
# type is set to c_char_p, you get a _Python_ string and there's no way
# to access the string's address inside the error checking function.
# In other words, you can't free the memory allocated inside GEOS. Previously,
# the return type would just be omitted and the integer address would be
# used -- but this allows us to be specific in the function definition and
# keeps the reference so it may be free'd.
class geos_char_p(c_char_p):
pass
### ctypes generation functions ###
def bin_constructor(func):
"Generates a prototype for binary construction (HEX, WKB) GEOS routines."
func.argtypes = [c_char_p, c_size_t]
func.restype = GEOM_PTR
func.errcheck = check_geom
return func
# HEX & WKB output
def bin_output(func):
"Generates a prototype for the routines that return a a sized string."
func.argtypes = [GEOM_PTR, POINTER(c_size_t)]
func.errcheck = check_sized_string
func.restype = c_uchar_p
return func
def geom_output(func, argtypes):
"For GEOS routines that return a geometry."
if argtypes: func.argtypes = argtypes
func.restype = GEOM_PTR
func.errcheck = check_geom
return func
def geom_index(func):
"For GEOS routines that return geometries from an index."
return geom_output(func, [GEOM_PTR, c_int])
def int_from_geom(func, zero=False):
"Argument is a geometry, return type is an integer."
func.argtypes = [GEOM_PTR]
func.restype = c_int
if zero:
func.errcheck = check_zero
else:
func.errcheck = check_minus_one
return func
def string_from_geom(func):
"Argument is a Geometry, return type is a string."
func.argtypes = [GEOM_PTR]
func.restype = geos_char_p
func.errcheck = check_string
return func
### ctypes prototypes ###
# Deprecated creation routines from WKB, HEX, WKT
from_hex = bin_constructor(GEOSFunc('GEOSGeomFromHEX_buf'))
from_wkb = bin_constructor(GEOSFunc('GEOSGeomFromWKB_buf'))
from_wkt = geom_output(GEOSFunc('GEOSGeomFromWKT'), [c_char_p])
# Deprecated output routines
to_hex = bin_output(GEOSFunc('GEOSGeomToHEX_buf'))
to_wkb = bin_output(GEOSFunc('GEOSGeomToWKB_buf'))
to_wkt = string_from_geom(GEOSFunc('GEOSGeomToWKT'))
# The GEOS geometry type, typeid, num_coordites and number of geometries
geos_normalize = int_from_geom(GEOSFunc('GEOSNormalize'))
geos_type = string_from_geom(GEOSFunc('GEOSGeomType'))
geos_typeid = int_from_geom(GEOSFunc('GEOSGeomTypeId'))
get_dims = int_from_geom(GEOSFunc('GEOSGeom_getDimensions'), zero=True)
get_num_coords = int_from_geom(GEOSFunc('GEOSGetNumCoordinates'))
get_num_geoms = int_from_geom(GEOSFunc('GEOSGetNumGeometries'))
# Geometry creation factories
create_point = geom_output(GEOSFunc('GEOSGeom_createPoint'), [CS_PTR])
create_linestring = geom_output(GEOSFunc('GEOSGeom_createLineString'), [CS_PTR])
create_linearring = geom_output(GEOSFunc('GEOSGeom_createLinearRing'), [CS_PTR])
# Polygon and collection creation routines are special and will not
# have their argument types defined.
create_polygon = geom_output(GEOSFunc('GEOSGeom_createPolygon'), None)
create_collection = geom_output(GEOSFunc('GEOSGeom_createCollection'), None)
# Ring routines
get_extring = geom_output(GEOSFunc('GEOSGetExteriorRing'), [GEOM_PTR])
get_intring = geom_index(GEOSFunc('GEOSGetInteriorRingN'))
get_nrings = int_from_geom(GEOSFunc('GEOSGetNumInteriorRings'))
# Collection Routines
get_geomn = geom_index(GEOSFunc('GEOSGetGeometryN'))
# Cloning
geom_clone = GEOSFunc('GEOSGeom_clone')
geom_clone.argtypes = [GEOM_PTR]
geom_clone.restype = GEOM_PTR
# Destruction routine.
destroy_geom = GEOSFunc('GEOSGeom_destroy')
destroy_geom.argtypes = [GEOM_PTR]
destroy_geom.restype = None
# SRID routines
geos_get_srid = GEOSFunc('GEOSGetSRID')
geos_get_srid.argtypes = [GEOM_PTR]
geos_get_srid.restype = c_int
geos_set_srid = GEOSFunc('GEOSSetSRID')
geos_set_srid.argtypes = [GEOM_PTR, c_int]
geos_set_srid.restype = None
|
bsd-3-clause
|
kensho-technologies/graphql-compiler
|
graphql_compiler/tests/test_sqlalchemy_extensions.py
|
1
|
2928
|
# Copyright 2019-present Kensho Technologies, LLC.
import unittest
import sqlalchemy
import sqlalchemy.dialects.mssql as mssql
import sqlalchemy.dialects.postgresql as postgresql
from ..compiler.sqlalchemy_extensions import print_sqlalchemy_query_string
from .test_helpers import compare_sql, get_sqlalchemy_schema_info
class CommonIrLoweringTests(unittest.TestCase):
def setUp(self):
"""Disable max diff limits for all tests."""
self.maxDiff = None
self.sql_schema_info = get_sqlalchemy_schema_info()
def test_print_query_mssql_basic(self) -> None:
query = sqlalchemy.select([self.sql_schema_info.vertex_name_to_table["Animal"].c.name])
text = print_sqlalchemy_query_string(query, mssql.dialect())
expected_text = """
SELECT db_1.schema_1.[Animal].name
FROM db_1.schema_1.[Animal]
"""
compare_sql(self, expected_text, text)
text = print_sqlalchemy_query_string(query, postgresql.dialect())
expected_text = """
SELECT "db_1.schema_1"."Animal".name
FROM "db_1.schema_1"."Animal"
"""
compare_sql(self, expected_text, text)
def test_print_query_mssql_string_argument(self) -> None:
animal = self.sql_schema_info.vertex_name_to_table["Animal"].alias()
query = sqlalchemy.select([animal.c.name]).where(
animal.c.name == sqlalchemy.bindparam("name", expanding=False)
)
text = print_sqlalchemy_query_string(query, mssql.dialect())
expected_text = """
SELECT [Animal_1].name
FROM db_1.schema_1.[Animal] AS [Animal_1]
WHERE [Animal_1].name = :name
"""
compare_sql(self, expected_text, text)
text = print_sqlalchemy_query_string(query, postgresql.dialect())
expected_text = """
SELECT "Animal_1".name
FROM "db_1.schema_1"."Animal" AS "Animal_1"
WHERE "Animal_1".name = :name
"""
compare_sql(self, expected_text, text)
def test_print_query_mssql_list_argument(self) -> None:
animal = self.sql_schema_info.vertex_name_to_table["Animal"].alias()
query = sqlalchemy.select([animal.c.name]).where(
animal.c.name.in_(sqlalchemy.bindparam("names", expanding=True))
)
text = print_sqlalchemy_query_string(query, mssql.dialect())
expected_text = """
SELECT [Animal_1].name
FROM db_1.schema_1.[Animal] AS [Animal_1]
WHERE [Animal_1].name IN :names
"""
compare_sql(self, expected_text, text)
text = print_sqlalchemy_query_string(query, postgresql.dialect())
expected_text = """
SELECT "Animal_1".name
FROM "db_1.schema_1"."Animal" AS "Animal_1"
WHERE "Animal_1".name IN :names
"""
compare_sql(self, expected_text, text)
|
apache-2.0
|
cwvh/maidfs
|
maidsim/selectionalgorithm.py
|
1
|
1591
|
from __future__ import division
class SelectionAlgorithm:
'''
Implements a selective compression algorithm. This algorithm determines
whether a given file should be compressed or not.
This class is designed as a base class; actual selection algorithms should
be implemented as child classes.
'''
def should_compress(self, file_info):
# Returns a boolean indicating if the file should be compressed or not.
return False
class NoCompressionSelectionAlgorithm(SelectionAlgorithm):
'''
Most basic selection algorithm: don't compress anything. This is actually
the same as the base SelectionAlgorithm.
'''
class CompressEverythingSelectionAlgorithm(SelectionAlgorithm):
'''
The other most basic selection algorithm: compress everything.
'''
def should_compress(self, file_info):
return True
class ThresholdCompressionAlgorithm(SelectionAlgorithm):
'''
Makes compression decisions based on a threshold and information about the
compression algorithm used. If the compression ratio for a given file
(based on the file type) is expected to be below the threshold, then the
file is compressed.
'''
threshold = None
compression_alg = None
def __init__(self, threshold, compression_alg):
self.threshold = threshold
self.compression_alg = compression_alg
def should_compress(self, file_info):
compression_ratio = \
self.compression_alg.compression_ratio[file_info.file_type]
return compression_ratio <= self.threshold
|
bsd-2-clause
|
nekulin/arangodb
|
3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/win32/Demos/security/security_enums.py
|
17
|
9461
|
import win32security, ntsecuritycon, winnt
class Enum:
def __init__(self, *const_names):
"""Accepts variable number of constant names that can be found in either
win32security, ntsecuritycon, or winnt."""
for const_name in const_names:
try:
const_val=getattr(win32security,const_name)
except AttributeError:
try:
const_val=getattr(ntsecuritycon, const_name)
except AttributeError:
try:
const_val=getattr(winnt, const_name)
except AttributeError:
raise AttributeError, 'Constant "%s" not found in win32security, ntsecuritycon, or winnt.' %const_name
setattr(self, const_name, const_val)
def lookup_name(self, const_val):
"""Looks up the name of a particular value."""
for k,v in self.__dict__.items():
if v==const_val:
return k
raise AttributeError, 'Value %s not found in enum' %const_val
def lookup_flags(self, flags):
"""Returns the names of all recognized flags in input, and any flags not found in the enum."""
flag_names=[]
unknown_flags=flags
for k,v in self.__dict__.items():
if flags & v == v:
flag_names.append(k)
unknown_flags = unknown_flags & ~v
return flag_names, unknown_flags
TOKEN_INFORMATION_CLASS = Enum(
'TokenUser',
'TokenGroups',
'TokenPrivileges',
'TokenOwner',
'TokenPrimaryGroup',
'TokenDefaultDacl',
'TokenSource',
'TokenType',
'TokenImpersonationLevel',
'TokenStatistics',
'TokenRestrictedSids',
'TokenSessionId',
'TokenGroupsAndPrivileges',
'TokenSessionReference',
'TokenSandBoxInert',
'TokenAuditPolicy',
'TokenOrigin',
'TokenElevationType',
'TokenLinkedToken',
'TokenElevation',
'TokenHasRestrictions',
'TokenAccessInformation',
'TokenVirtualizationAllowed',
'TokenVirtualizationEnabled',
'TokenIntegrityLevel',
'TokenUIAccess',
'TokenMandatoryPolicy',
'TokenLogonSid')
TOKEN_TYPE = Enum(
'TokenPrimary',
'TokenImpersonation')
TOKEN_ELEVATION_TYPE = Enum(
'TokenElevationTypeDefault',
'TokenElevationTypeFull',
'TokenElevationTypeLimited')
POLICY_AUDIT_EVENT_TYPE = Enum(
'AuditCategorySystem',
'AuditCategoryLogon',
'AuditCategoryObjectAccess',
'AuditCategoryPrivilegeUse',
'AuditCategoryDetailedTracking',
'AuditCategoryPolicyChange',
'AuditCategoryAccountManagement',
'AuditCategoryDirectoryServiceAccess',
'AuditCategoryAccountLogon')
POLICY_INFORMATION_CLASS = Enum(
'PolicyAuditLogInformation',
'PolicyAuditEventsInformation',
'PolicyPrimaryDomainInformation',
'PolicyPdAccountInformation',
'PolicyAccountDomainInformation',
'PolicyLsaServerRoleInformation',
'PolicyReplicaSourceInformation',
'PolicyDefaultQuotaInformation',
'PolicyModificationInformation',
'PolicyAuditFullSetInformation',
'PolicyAuditFullQueryInformation',
'PolicyDnsDomainInformation')
POLICY_LSA_SERVER_ROLE = Enum(
'PolicyServerRoleBackup',
'PolicyServerRolePrimary')
## access modes for opening a policy handle - this is not a real enum
POLICY_ACCESS_MODES = Enum(
'POLICY_VIEW_LOCAL_INFORMATION',
'POLICY_VIEW_AUDIT_INFORMATION',
'POLICY_GET_PRIVATE_INFORMATION',
'POLICY_TRUST_ADMIN',
'POLICY_CREATE_ACCOUNT',
'POLICY_CREATE_SECRET',
'POLICY_CREATE_PRIVILEGE',
'POLICY_SET_DEFAULT_QUOTA_LIMITS',
'POLICY_SET_AUDIT_REQUIREMENTS',
'POLICY_AUDIT_LOG_ADMIN',
'POLICY_SERVER_ADMIN',
'POLICY_LOOKUP_NAMES',
'POLICY_NOTIFICATION',
'POLICY_ALL_ACCESS',
'POLICY_READ',
'POLICY_WRITE',
'POLICY_EXECUTE')
## EventAuditingOptions flags - not a real enum
POLICY_AUDIT_EVENT_OPTIONS_FLAGS = Enum(
'POLICY_AUDIT_EVENT_UNCHANGED',
'POLICY_AUDIT_EVENT_SUCCESS',
'POLICY_AUDIT_EVENT_FAILURE',
'POLICY_AUDIT_EVENT_NONE')
# AceType in ACE_HEADER - not a real enum
ACE_TYPE = Enum(
'ACCESS_MIN_MS_ACE_TYPE',
'ACCESS_ALLOWED_ACE_TYPE',
'ACCESS_DENIED_ACE_TYPE',
'SYSTEM_AUDIT_ACE_TYPE',
'SYSTEM_ALARM_ACE_TYPE',
'ACCESS_MAX_MS_V2_ACE_TYPE',
'ACCESS_ALLOWED_COMPOUND_ACE_TYPE',
'ACCESS_MAX_MS_V3_ACE_TYPE',
'ACCESS_MIN_MS_OBJECT_ACE_TYPE',
'ACCESS_ALLOWED_OBJECT_ACE_TYPE',
'ACCESS_DENIED_OBJECT_ACE_TYPE',
'SYSTEM_AUDIT_OBJECT_ACE_TYPE',
'SYSTEM_ALARM_OBJECT_ACE_TYPE',
'ACCESS_MAX_MS_OBJECT_ACE_TYPE',
'ACCESS_MAX_MS_V4_ACE_TYPE',
'ACCESS_MAX_MS_ACE_TYPE',
'ACCESS_ALLOWED_CALLBACK_ACE_TYPE',
'ACCESS_DENIED_CALLBACK_ACE_TYPE',
'ACCESS_ALLOWED_CALLBACK_OBJECT_ACE_TYPE',
'ACCESS_DENIED_CALLBACK_OBJECT_ACE_TYPE',
'SYSTEM_AUDIT_CALLBACK_ACE_TYPE',
'SYSTEM_ALARM_CALLBACK_ACE_TYPE',
'SYSTEM_AUDIT_CALLBACK_OBJECT_ACE_TYPE',
'SYSTEM_ALARM_CALLBACK_OBJECT_ACE_TYPE',
'SYSTEM_MANDATORY_LABEL_ACE_TYPE',
'ACCESS_MAX_MS_V5_ACE_TYPE')
#bit flags for AceFlags - not a real enum
ACE_FLAGS = Enum(
'CONTAINER_INHERIT_ACE',
'FAILED_ACCESS_ACE_FLAG',
'INHERIT_ONLY_ACE',
'INHERITED_ACE',
'NO_PROPAGATE_INHERIT_ACE',
'OBJECT_INHERIT_ACE',
'SUCCESSFUL_ACCESS_ACE_FLAG',
'NO_INHERITANCE',
'SUB_CONTAINERS_AND_OBJECTS_INHERIT',
'SUB_CONTAINERS_ONLY_INHERIT',
'SUB_OBJECTS_ONLY_INHERIT')
# used in SetEntriesInAcl - very similar to ACE_TYPE
ACCESS_MODE = Enum(
'NOT_USED_ACCESS',
'GRANT_ACCESS',
'SET_ACCESS',
'DENY_ACCESS',
'REVOKE_ACCESS',
'SET_AUDIT_SUCCESS',
'SET_AUDIT_FAILURE')
# Bit flags in PSECURITY_DESCRIPTOR->Control - not a real enum
SECURITY_DESCRIPTOR_CONTROL_FLAGS = Enum(
'SE_DACL_AUTO_INHERITED', ## win2k and up
'SE_SACL_AUTO_INHERITED', ## win2k and up
'SE_DACL_PROTECTED', ## win2k and up
'SE_SACL_PROTECTED', ## win2k and up
'SE_DACL_DEFAULTED',
'SE_DACL_PRESENT',
'SE_GROUP_DEFAULTED',
'SE_OWNER_DEFAULTED',
'SE_SACL_PRESENT',
'SE_SELF_RELATIVE',
'SE_SACL_DEFAULTED')
# types of SID
SID_NAME_USE = Enum(
'SidTypeUser',
'SidTypeGroup',
'SidTypeDomain',
'SidTypeAlias',
'SidTypeWellKnownGroup',
'SidTypeDeletedAccount',
'SidTypeInvalid',
'SidTypeUnknown',
'SidTypeComputer',
'SidTypeLabel')
## bit flags, not a real enum
TOKEN_ACCESS_PRIVILEGES = Enum(
'TOKEN_ADJUST_DEFAULT',
'TOKEN_ADJUST_GROUPS',
'TOKEN_ADJUST_PRIVILEGES',
'TOKEN_ALL_ACCESS',
'TOKEN_ASSIGN_PRIMARY',
'TOKEN_DUPLICATE',
'TOKEN_EXECUTE',
'TOKEN_IMPERSONATE',
'TOKEN_QUERY',
'TOKEN_QUERY_SOURCE',
'TOKEN_READ',
'TOKEN_WRITE')
SECURITY_IMPERSONATION_LEVEL = Enum(
'SecurityAnonymous',
'SecurityIdentification',
'SecurityImpersonation',
'SecurityDelegation')
POLICY_SERVER_ENABLE_STATE = Enum(
'PolicyServerEnabled',
'PolicyServerDisabled')
POLICY_NOTIFICATION_INFORMATION_CLASS = Enum(
'PolicyNotifyAuditEventsInformation',
'PolicyNotifyAccountDomainInformation',
'PolicyNotifyServerRoleInformation',
'PolicyNotifyDnsDomainInformation',
'PolicyNotifyDomainEfsInformation',
'PolicyNotifyDomainKerberosTicketInformation',
'PolicyNotifyMachineAccountPasswordInformation')
TRUSTED_INFORMATION_CLASS = Enum(
'TrustedDomainNameInformation',
'TrustedControllersInformation',
'TrustedPosixOffsetInformation',
'TrustedPasswordInformation',
'TrustedDomainInformationBasic',
'TrustedDomainInformationEx',
'TrustedDomainAuthInformation',
'TrustedDomainFullInformation',
'TrustedDomainAuthInformationInternal',
'TrustedDomainFullInformationInternal',
'TrustedDomainInformationEx2Internal',
'TrustedDomainFullInformation2Internal')
TRUSTEE_FORM = Enum(
'TRUSTEE_IS_SID',
'TRUSTEE_IS_NAME',
'TRUSTEE_BAD_FORM',
'TRUSTEE_IS_OBJECTS_AND_SID',
'TRUSTEE_IS_OBJECTS_AND_NAME')
TRUSTEE_TYPE = Enum(
'TRUSTEE_IS_UNKNOWN',
'TRUSTEE_IS_USER',
'TRUSTEE_IS_GROUP',
'TRUSTEE_IS_DOMAIN',
'TRUSTEE_IS_ALIAS',
'TRUSTEE_IS_WELL_KNOWN_GROUP',
'TRUSTEE_IS_DELETED',
'TRUSTEE_IS_INVALID',
'TRUSTEE_IS_COMPUTER')
## SE_OBJECT_TYPE - securable objects
SE_OBJECT_TYPE = Enum(
'SE_UNKNOWN_OBJECT_TYPE',
'SE_FILE_OBJECT',
'SE_SERVICE',
'SE_PRINTER',
'SE_REGISTRY_KEY',
'SE_LMSHARE',
'SE_KERNEL_OBJECT',
'SE_WINDOW_OBJECT',
'SE_DS_OBJECT',
'SE_DS_OBJECT_ALL',
'SE_PROVIDER_DEFINED_OBJECT',
'SE_WMIGUID_OBJECT',
'SE_REGISTRY_WOW64_32KEY')
PRIVILEGE_FLAGS = Enum(
'SE_PRIVILEGE_ENABLED_BY_DEFAULT',
'SE_PRIVILEGE_ENABLED',
'SE_PRIVILEGE_USED_FOR_ACCESS')
# Group flags used with TokenGroups
TOKEN_GROUP_ATTRIBUTES = Enum(
'SE_GROUP_MANDATORY',
'SE_GROUP_ENABLED_BY_DEFAULT',
'SE_GROUP_ENABLED',
'SE_GROUP_OWNER',
'SE_GROUP_USE_FOR_DENY_ONLY',
'SE_GROUP_INTEGRITY',
'SE_GROUP_INTEGRITY_ENABLED',
'SE_GROUP_LOGON_ID',
'SE_GROUP_RESOURCE')
# Privilege flags returned by TokenPrivileges
TOKEN_PRIVILEGE_ATTRIBUTES = Enum(
'SE_PRIVILEGE_ENABLED_BY_DEFAULT',
'SE_PRIVILEGE_ENABLED',
'SE_PRIVILEGE_REMOVED',
'SE_PRIVILEGE_USED_FOR_ACCESS')
|
apache-2.0
|
CaptainThrowback/kernel_htc_m8whl_2.16.651.4
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py
|
12527
|
1935
|
# Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <[email protected]>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
|
gpl-2.0
|
oso/pymcda
|
apps/learn-from-dataset.py
|
1
|
5147
|
import os, sys
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)) + "/../")
import time
import datetime
from pymcda.electre_tri import MRSort
from pymcda.generate import generate_categories_profiles
from pymcda.pt_sorted import SortedPerformanceTable
from pymcda.types import CriterionValue, CriteriaValues
from pymcda.utils import compute_ca
from pymcda.learning.meta_mrsort3 import MetaMRSortPop3
from pymcda.learning.heur_mrsort_init_profiles import HeurMRSortInitProfiles
from pymcda.learning.lp_mrsort_weights import LpMRSortWeights
from pymcda.learning.heur_mrsort_profiles4 import MetaMRSortProfiles4
from pymcda.learning.lp_mrsort_mobius import LpMRSortMobius
from pymcda.learning.heur_mrsort_profiles_choquet import MetaMRSortProfilesChoquet
from pymcda.learning.mip_mrsort import MipMRSort
from pymcda.learning.lp_avfsort import LpAVFSort
from pymcda.learning.lp_avfsort_compat import LpAVFSortCompat
from pymcda.learning.lp_mrsort_post_weights import LpMRSortPostWeights
from pymcda.ui.graphic import display_electre_tri_models
from pymcda.ui.graphic_uta import display_utadis_model
from pymcda.uta import AVFSort
from pymcda.utils import compute_confusion_matrix
from pymcda.utils import print_confusion_matrix
from pymcda.utils import print_pt_and_assignments
from test_utils import load_mcda_input_data
from test_utils import save_to_xmcda
DATADIR = os.getenv('DATADIR', '%s/pymcda-data' % os.path.expanduser('~'))
def usage():
print("%s file.csv meta_mrsort|meta_mrsortc|mip_mrsort|lp_utadis|lp_utadis_compat" % sys.argv[0])
sys.exit(1)
if len(sys.argv) != 3:
usage()
algo = sys.argv[2]
nseg = 4
nmodels = 20
nloop = 7
nmeta = 40
data = load_mcda_input_data(sys.argv[1])
print(data.c)
worst = data.pt.get_worst(data.c)
best = data.pt.get_best(data.c)
t1 = time.time()
if algo == 'meta_mrsort':
heur_init_profiles = HeurMRSortInitProfiles
lp_weights = LpMRSortWeights
heur_profiles = MetaMRSortProfiles4
elif algo == 'meta_mrsortc':
heur_init_profiles = HeurMRSortInitProfiles
lp_weights = LpMRSortMobius
heur_profiles = MetaMRSortProfilesChoquet
if algo == 'meta_mrsort' or algo == 'meta_mrsortc':
model_type = 'mrsort'
cat_profiles = generate_categories_profiles(data.cats)
model = MRSort(data.c, None, None, None, cat_profiles)
pt_sorted = SortedPerformanceTable(data.pt)
meta = MetaMRSortPop3(nmodels, model.criteria,
model.categories_profiles.to_categories(),
pt_sorted, data.aa,
heur_init_profiles,
lp_weights,
heur_profiles)
for i in range(0, nloop):
model, ca_learning = meta.optimize(nmeta)
print(ca_learning)
if ca_learning == 1:
break
elif algo == 'mip_mrsort':
model_type = 'mrsort'
cat_profiles = generate_categories_profiles(data.cats)
model = MRSort(data.c, None, None, None, cat_profiles)
mip = MipMRSort(model, data.pt, data.aa)
mip.solve()
elif algo == 'lp_utadis':
model_type = 'utadis'
css = CriteriaValues(CriterionValue(c.id, nseg) for c in data.c)
lp = LpAVFSort(data.c, css, data.cats, worst, best)
obj, cvs, cfs, catv = lp.solve(data.aa, data.pt)
model = AVFSort(data.c, cvs, cfs, catv)
elif algo == 'lp_utadis_compat':
model_type = 'utadis'
css = CriteriaValues(CriterionValue(c.id, nseg) for c in data.c)
print("LpAVFSortCompat")
lp = LpAVFSortCompat(data.c, css, data.cats, worst, best)
obj, cvs, cfs, catv = lp.solve(data.aa, data.pt)
model = AVFSort(data.c, cvs, cfs, catv)
else:
print("Invalid algorithm!")
sys.exit(1)
t_total = time.time() - t1
model.id = 'learned'
data.pt.id = 'learning_set'
data.aa.id = 'learning_set'
dt = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
save_to_xmcda("%s/%s-all-%s-%s.bz2" % (DATADIR, algo, data.name, dt),
data.aa, data.pt, model)
aa2 = model.get_assignments(data.pt)
ca = compute_ca(data.aa, aa2)
auc = model.auc(data.aa, data.pt)
anok = []
for a in data.a:
if data.aa[a.id].category_id != aa2[a.id].category_id:
anok.append(a)
if len(anok) > 0:
print("Alternatives wrongly assigned:")
print_pt_and_assignments(anok.keys(), data.c.keys(), [data.aa, aa2], data.pt)
print("Model parameters:")
cids = model.criteria.keys()
if model_type == 'mrsort':
print(model.bpt)
print(model.cv)
print("lambda: %.7s" % model.lbda)
print("Weights and lambda optimization:")
if algo == 'meta_mrsort':
lp = LpMRSortPostWeights(model.cv, model.lbda)
obj, model.cv, model.lbda = lp.solve()
print(model.cv)
print(model.lbda)
# display_electre_tri_models([model], [worst], [best])
elif model_type == 'utadis':
model.cfs.display(criterion_ids = cids)
model.cat_values.display()
# display_utadis_model(model.cfs)
print("t: %g" % t_total)
print("CA: %g" % ca)
print("AUC: %g" % auc)
print("Confusion matrix:")
print_confusion_matrix(compute_confusion_matrix(data.aa, aa2,
data.cats.get_ordered_categories()))
|
gpl-3.0
|
akretion/stock-logistics-workflow
|
stock_route_sales_team/tests/test_sale_team_route.py
|
6
|
2813
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.tests import common
class TestSaleTeamRoute(common.TransactionCase):
def setUp(self):
super(TestSaleTeamRoute, self).setUp()
self.sale_order_model = self.env['sale.order']
self.sale_line_model = self.env['sale.order.line']
self.product = self.env.ref('product.product_product_7')
self.partner = self.env.ref('base.res_partner_1')
self.sales_team = self.env.ref('sales_team.crm_case_section_1')
self.route = self.env['stock.location.route'].create({
'name': 'Test Route',
'section_selectable': True,
})
self.line_route = self.env['stock.location.route'].create({
'name': 'Line Test Route',
})
def test_sales_team_route(self):
self.sales_team.route_id = self.route
order = self.sale_order_model.create({
'partner_id': self.partner.id,
'section_id': self.sales_team.id,
})
line = self.sale_line_model.create({
'order_id': order.id,
'product_id': self.product.id,
'product_uom_qty': 1,
})
order.action_button_confirm()
procurement = line.procurement_ids
self.assertEquals(procurement.route_ids, self.route)
def test_sales_team_route_line_has_priority(self):
self.sales_team.route_id = self.route
order = self.sale_order_model.create({
'partner_id': self.partner.id,
'section_id': self.sales_team.id,
})
line = self.sale_line_model.create({
'order_id': order.id,
'product_id': self.product.id,
'product_uom_qty': 1,
'route_id': self.line_route.id,
})
order.action_button_confirm()
procurement = line.procurement_ids
self.assertEquals(procurement.route_ids, self.line_route)
|
agpl-3.0
|
leki75/ansible
|
lib/ansible/modules/cloud/amazon/cloudwatchevent_rule.py
|
71
|
14950
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cloudwatchevent_rule
short_description: Manage CloudWatch Event rules and targets
description:
- This module creates and manages CloudWatch event rules and targets.
version_added: "2.2"
extends_documentation_fragment:
- aws
author: "Jim Dalton (@jsdalton) <[email protected]>"
requirements:
- python >= 2.6
- boto3
notes:
- A rule must contain at least an I(event_pattern) or I(schedule_expression). A
rule can have both an I(event_pattern) and a I(schedule_expression), in which
case the rule will trigger on matching events as well as on a schedule.
- When specifying targets, I(input) and I(input_path) are mutually-exclusive
and optional parameters.
options:
name:
description:
- The name of the rule you are creating, updating or deleting. No spaces
or special characters allowed (i.e. must match C([\.\-_A-Za-z0-9]+))
required: true
schedule_expression:
description:
- A cron or rate expression that defines the schedule the rule will
trigger on. For example, C(cron(0 20 * * ? *)), C(rate(5 minutes))
required: false
event_pattern:
description:
- A string pattern (in valid JSON format) that is used to match against
incoming events to determine if the rule should be triggered
required: false
state:
description:
- Whether the rule is present (and enabled), disabled, or absent
choices: ["present", "disabled", "absent"]
default: present
required: false
description:
description:
- A description of the rule
required: false
role_arn:
description:
- The Amazon Resource Name (ARN) of the IAM role associated with the rule
required: false
targets:
description:
- "A dictionary array of targets to add to or update for the rule, in the
form C({ id: [string], arn: [string], input: [valid JSON string], input_path: [valid JSONPath string] }).
I(id) [required] is the unique target assignment ID. I(arn) (required)
is the Amazon Resource Name associated with the target. I(input)
(optional) is a JSON object that will override the event data when
passed to the target. I(input_path) (optional) is a JSONPath string
(e.g. C($.detail)) that specifies the part of the event data to be
passed to the target. If neither I(input) nor I(input_path) is
specified, then the entire event is passed to the target in JSON form."
required: false
'''
EXAMPLES = '''
- cloudwatchevent_rule:
name: MyCronTask
schedule_expression: "cron(0 20 * * ? *)"
description: Run my scheduled task
targets:
- id: MyTargetId
arn: arn:aws:lambda:us-east-1:123456789012:function:MyFunction
- cloudwatchevent_rule:
name: MyDisabledCronTask
schedule_expression: "cron(5 minutes)"
description: Run my disabled scheduled task
state: disabled
targets:
- id: MyOtherTargetId
arn: arn:aws:lambda:us-east-1:123456789012:function:MyFunction
input: '{"foo": "bar"}'
- cloudwatchevent_rule:
name: MyCronTask
state: absent
'''
RETURN = '''
rule:
description: CloudWatch Event rule data
returned: success
type: dict
sample: "{ 'arn': 'arn:aws:events:us-east-1:123456789012:rule/MyCronTask', 'description': 'Run my scheduled task', 'name': 'MyCronTask', 'schedule_expression': 'cron(0 20 * * ? *)', 'state': 'ENABLED' }"
targets:
description: CloudWatch Event target(s) assigned to the rule
returned: success
type: list
sample: "[{ 'arn': 'arn:aws:lambda:us-east-1:123456789012:function:MyFunction', 'id': 'MyTargetId' }]"
''' # NOQA
class CloudWatchEventRule(object):
def __init__(self, module, name, client, schedule_expression=None,
event_pattern=None, description=None, role_arn=None):
self.name = name
self.client = client
self.changed = False
self.schedule_expression = schedule_expression
self.event_pattern = event_pattern
self.description = description
self.role_arn = role_arn
def describe(self):
"""Returns the existing details of the rule in AWS"""
try:
rule_info = self.client.describe_rule(Name=self.name)
except botocore.exceptions.ClientError as e:
error_code = e.response.get('Error', {}).get('Code')
if error_code == 'ResourceNotFoundException':
return {}
raise
return self._snakify(rule_info)
def put(self, enabled=True):
"""Creates or updates the rule in AWS"""
request = {
'Name': self.name,
'State': "ENABLED" if enabled else "DISABLED",
}
if self.schedule_expression:
request['ScheduleExpression'] = self.schedule_expression
if self.event_pattern:
request['EventPattern'] = self.event_pattern
if self.description:
request['Description'] = self.description
if self.role_arn:
request['RoleArn'] = self.role_arn
response = self.client.put_rule(**request)
self.changed = True
return response
def delete(self):
"""Deletes the rule in AWS"""
self.remove_all_targets()
response = self.client.delete_rule(Name=self.name)
self.changed = True
return response
def enable(self):
"""Enables the rule in AWS"""
response = self.client.enable_rule(Name=self.name)
self.changed = True
return response
def disable(self):
"""Disables the rule in AWS"""
response = self.client.disable_rule(Name=self.name)
self.changed = True
return response
def list_targets(self):
"""Lists the existing targets for the rule in AWS"""
try:
targets = self.client.list_targets_by_rule(Rule=self.name)
except botocore.exceptions.ClientError as e:
error_code = e.response.get('Error', {}).get('Code')
if error_code == 'ResourceNotFoundException':
return []
raise
return self._snakify(targets)['targets']
def put_targets(self, targets):
"""Creates or updates the provided targets on the rule in AWS"""
if not targets:
return
request = {
'Rule': self.name,
'Targets': self._targets_request(targets),
}
response = self.client.put_targets(**request)
self.changed = True
return response
def remove_targets(self, target_ids):
"""Removes the provided targets from the rule in AWS"""
if not target_ids:
return
request = {
'Rule': self.name,
'Ids': target_ids
}
response = self.client.remove_targets(**request)
self.changed = True
return response
def remove_all_targets(self):
"""Removes all targets on rule"""
targets = self.list_targets()
return self.remove_targets([t['id'] for t in targets])
def _targets_request(self, targets):
"""Formats each target for the request"""
targets_request = []
for target in targets:
target_request = {
'Id': target['id'],
'Arn': target['arn']
}
if 'input' in target:
target_request['Input'] = target['input']
if 'input_path' in target:
target_request['InputPath'] = target['input_path']
targets_request.append(target_request)
return targets_request
def _snakify(self, dict):
"""Converts cammel case to snake case"""
return camel_dict_to_snake_dict(dict)
class CloudWatchEventRuleManager(object):
RULE_FIELDS = ['name', 'event_pattern', 'schedule_expression', 'description', 'role_arn']
def __init__(self, rule, targets):
self.rule = rule
self.targets = targets
def ensure_present(self, enabled=True):
"""Ensures the rule and targets are present and synced"""
rule_description = self.rule.describe()
if rule_description:
# Rule exists so update rule, targets and state
self._sync_rule(enabled)
self._sync_targets()
self._sync_state(enabled)
else:
# Rule does not exist, so create new rule and targets
self._create(enabled)
def ensure_disabled(self):
"""Ensures the rule and targets are present, but disabled, and synced"""
self.ensure_present(enabled=False)
def ensure_absent(self):
"""Ensures the rule and targets are absent"""
rule_description = self.rule.describe()
if not rule_description:
# Rule doesn't exist so don't need to delete
return
self.rule.delete()
def fetch_aws_state(self):
"""Retrieves rule and target state from AWS"""
aws_state = {
'rule': {},
'targets': [],
'changed': self.rule.changed
}
rule_description = self.rule.describe()
if not rule_description:
return aws_state
# Don't need to include response metadata noise in response
del rule_description['response_metadata']
aws_state['rule'] = rule_description
aws_state['targets'].extend(self.rule.list_targets())
return aws_state
def _sync_rule(self, enabled=True):
"""Syncs local rule state with AWS"""
if not self._rule_matches_aws():
self.rule.put(enabled)
def _sync_targets(self):
"""Syncs local targets with AWS"""
# Identify and remove extraneous targets on AWS
target_ids_to_remove = self._remote_target_ids_to_remove()
if target_ids_to_remove:
self.rule.remove_targets(target_ids_to_remove)
# Identify targets that need to be added or updated on AWS
targets_to_put = self._targets_to_put()
if targets_to_put:
self.rule.put_targets(targets_to_put)
def _sync_state(self, enabled=True):
"""Syncs local rule state with AWS"""
remote_state = self._remote_state()
if enabled and remote_state != 'ENABLED':
self.rule.enable()
elif not enabled and remote_state != 'DISABLED':
self.rule.disable()
def _create(self, enabled=True):
"""Creates rule and targets on AWS"""
self.rule.put(enabled)
self.rule.put_targets(self.targets)
def _rule_matches_aws(self):
"""Checks if the local rule data matches AWS"""
aws_rule_data = self.rule.describe()
# The rule matches AWS only if all rule data fields are equal
# to their corresponding local value defined in the task
return all([
getattr(self.rule, field) == aws_rule_data.get(field, None)
for field in self.RULE_FIELDS
])
def _targets_to_put(self):
"""Returns a list of targets that need to be updated or added remotely"""
remote_targets = self.rule.list_targets()
return [t for t in self.targets if t not in remote_targets]
def _remote_target_ids_to_remove(self):
"""Returns a list of targets that need to be removed remotely"""
target_ids = [t['id'] for t in self.targets]
remote_targets = self.rule.list_targets()
return [
rt['id'] for rt in remote_targets if rt['id'] not in target_ids
]
def _remote_state(self):
"""Returns the remote state from AWS"""
description = self.rule.describe()
if not description:
return
return description['state']
def get_cloudwatchevents_client(module):
"""Returns a boto3 client for accessing CloudWatch Events"""
try:
region, ec2_url, aws_conn_kwargs = get_aws_connection_info(module,
boto3=True)
if not region:
module.fail_json(msg="Region must be specified as a parameter, in \
EC2_REGION or AWS_REGION environment variables \
or in boto configuration file")
return boto3_conn(module, conn_type='client',
resource='events',
region=region, endpoint=ec2_url,
**aws_conn_kwargs)
except boto3.exception.NoAuthHandlerFound as e:
module.fail_json(msg=str(e))
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
name = dict(required=True),
schedule_expression = dict(),
event_pattern = dict(),
state = dict(choices=['present', 'disabled', 'absent'],
default='present'),
description = dict(),
role_arn = dict(),
targets = dict(type='list', default=[]),
))
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
rule_data = dict(
[(rf, module.params.get(rf)) for rf in CloudWatchEventRuleManager.RULE_FIELDS]
)
targets = module.params.get('targets')
state = module.params.get('state')
cwe_rule = CloudWatchEventRule(module,
client=get_cloudwatchevents_client(module),
**rule_data)
cwe_rule_manager = CloudWatchEventRuleManager(cwe_rule, targets)
if state == 'present':
cwe_rule_manager.ensure_present()
elif state == 'disabled':
cwe_rule_manager.ensure_disabled()
elif state == 'absent':
cwe_rule_manager.ensure_absent()
else:
module.fail_json(msg="Invalid state '{0}' provided".format(state))
module.exit_json(**cwe_rule_manager.fetch_aws_state())
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
kvesteri/intervals
|
tests/interval/test_properties.py
|
1
|
3829
|
from datetime import date, datetime
from decimal import Decimal
from infinity import inf
from pytest import mark
from intervals import (
DateInterval,
DateTimeInterval,
DecimalInterval,
FloatInterval,
IntInterval
)
class TestIntervalProperties(object):
@mark.parametrize(
('interval', 'length'),
(
(IntInterval([1, 4]), 3),
(IntInterval([-1, 1]), 2),
(IntInterval([-inf, inf]), inf),
(IntInterval([1, inf]), inf),
(IntInterval.from_string('(0, 3)'), 1),
(IntInterval.from_string('[0, 3)'), 2),
(IntInterval.from_string('(0, 2)'), 0),
(IntInterval.from_string('(0, 1)'), 0)
)
)
def test_length(self, interval, length):
assert interval.length == length
@mark.parametrize(
('number_range', 'radius'),
(
([1, 4], 1.5),
([-1, 1], 1.0),
([-4, -1], 1.5),
((-inf, inf), inf),
((1, inf), inf),
)
)
def test_radius(self, number_range, radius):
assert IntInterval(number_range).radius == radius
@mark.parametrize(
('number_range', 'centre'),
(
([1, 4], 2.5),
([-1, 1], 0),
([-4, -1], -2.5),
((1, inf), inf),
)
)
def test_centre(self, number_range, centre):
assert IntInterval(number_range).centre == centre
@mark.parametrize(
('interval', 'is_open'),
(
(IntInterval((2, 3)), True),
(IntInterval.from_string('(2, 5)'), True),
(IntInterval.from_string('[3, 4)'), False),
(IntInterval.from_string('(4, 5]'), False),
(IntInterval.from_string('3 - 4'), False),
(IntInterval([4, 5]), False),
(IntInterval.from_string('[4, 5]'), False)
)
)
def test_is_open(self, interval, is_open):
assert interval.is_open == is_open
@mark.parametrize(
('interval', 'is_closed'),
(
(IntInterval((2, 3)), False),
(IntInterval.from_string('(2, 5)'), False),
(IntInterval.from_string('[3, 4)'), False),
(IntInterval.from_string('(4, 5]'), False),
(IntInterval.from_string('3 - 4'), True),
(IntInterval([4, 5]), True),
(IntInterval.from_string('[4, 5]'), True)
)
)
def test_closed(self, interval, is_closed):
assert interval.is_closed == is_closed
@mark.parametrize(
('interval', 'empty'),
(
(IntInterval((2, 3)), True),
(IntInterval([2, 3]), False),
(IntInterval([2, 2]), False),
(IntInterval.from_string('[2, 2)'), True),
(IntInterval.from_string('(2, 2]'), True),
(IntInterval.from_string('[2, 3)'), False),
(IntInterval((2, 10)), False),
)
)
def test_empty(self, interval, empty):
assert interval.empty == empty
@mark.parametrize(
('interval', 'degenerate'),
(
(IntInterval((2, 4)), False),
(IntInterval.from_string('[2, 2]'), True),
(IntInterval.from_string('[0, 0)'), True),
)
)
def test_degenerate(self, interval, degenerate):
assert interval.degenerate == degenerate
@mark.parametrize(
('interval', 'discrete'),
(
(IntInterval((2, 3)), True),
(IntInterval(5), True),
(FloatInterval(3.5), False),
(DecimalInterval(Decimal('2.4')), False),
(DateTimeInterval(datetime(2002, 1, 1)), False),
(DateInterval(date(2002, 1, 1)), True)
)
)
def test_discrete(self, interval, discrete):
assert interval.discrete == discrete
|
bsd-3-clause
|
danielballan/filestore
|
filestore/file_writers.py
|
2
|
4154
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from .handlers_base import HandlerBase
import errno
import six
import logging
import numpy as np
import uuid
import os
import os.path as op
import datetime
import filestore.api as fsc
logger = logging.getLogger(__name__)
class NpyWriter(HandlerBase):
"""
Class to handle writing a numpy array out to disk and registering
that write with FileStore.
This class is only good for one call to add_data.
Parameters
----------
fpath : str
Path (including filename) of where to save the file
resource_kwargs : dict, optional
Saved in the resource_kwargs field of the fileBase document. Valid
keys are {mmap_mode, }
"""
SPEC_NAME = 'npy'
def __init__(self, fpath, resource_kwargs=None):
if op.exists(fpath):
raise IOError("the requested file {fpath} already exist")
self._fpath = fpath
if resource_kwargs is None:
resource_kwargs = dict()
for k in resource_kwargs.keys():
if k != 'mmap_mode':
raise ValueError("The only valid resource_kwargs key is 'mmap_mode' "
"you passed in {}".format(k))
self._f_custom = dict(resource_kwargs)
self._writable = True
def add_data(self, data, uid=None, resource_kwargs=None):
"""
Parameters
----------
data : ndarray
The data to save
uid : str, optional
The uid to be used for this entry,
if not given use uuid1 to generate one
resource_kwargs : None, optional
Currently raises if not 'falsy' and is ignored.
Returns
-------
uid : str
The uid used to register this data with filestore, can
be used to retrieve it
"""
if not self._writable:
raise RuntimeError("This writer can only write one data entry "
"and has already been used")
if resource_kwargs:
raise ValueError("This writer does not support resource_kwargs")
if op.exists(self._fpath):
raise IOError("the requested file {fpath} "
"already exist".format(fpath=self._fpath))
if uid is None:
uid = str(uuid.uuid1())
np.save(self._fpath, np.asanyarray(data))
self._writable = False
fb = fsc.insert_resource(self.SPEC_NAME, self._fpath, self._f_custom)
evl = fsc.insert_datum(fb, uid)
return evl.datum_id
def save_ndarray(data, base_path=None, filename=None):
"""
Helper method to mindlessly save a numpy array to disk.
Defaults to saving files in :path:`~/.fs_cache/YYYY-MM-DD`
Parameters
----------
data : ndarray
The data to be saved
base_path : str, optional
The base-path to use for saving files. If not given
default to `~/.fs_cache`. Will add a sub-directory for
each day in this path.
filename : str, optional
The name of the file to save to disk. Defaults to a uuid4 if none is
given
"""
if base_path is None:
xdg_data = os.getenv('XDG_DATA_HOME')
if not xdg_data:
xdg_data = op.join(op.expanduser('~'), '.local', 'share')
base_path = op.join(xdg_data, 'fs_cache',
str(datetime.date.today()))
if filename is None:
filename = str(uuid.uuid4())
_make_sure_path_exists(base_path)
fpath = op.join(base_path, filename + '.npy')
with NpyWriter(fpath) as fout:
eid = fout.add_data(data)
return eid
if six.PY2:
# http://stackoverflow.com/a/5032238/380231
def _make_sure_path_exists(path):
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
else:
# technically, this won't work with py3.1, but no one uses that
def _make_sure_path_exists(path):
return os.makedirs(path, exist_ok=True)
|
bsd-3-clause
|
cgcgbcbc/django-xadmin
|
xadmin/__init__.py
|
1
|
2296
|
from xadmin.sites import AdminSite, site
VERSION = [0,4,4]
class Settings(object):
pass
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
setattr(settings, 'CRISPY_TEMPLATE_PACK', 'bootstrap3')
setattr(settings, 'CRISPY_CLASS_CONVERTERS', {
"textinput": "textinput textInput form-control",
"fileinput": "fileinput fileUpload form-control",
"passwordinput": "textinput textInput form-control",
})
from xadmin.views import register_builtin_views
register_builtin_views(site)
# load xadmin settings from XADMIN_CONF module
try:
xadmin_conf = getattr(settings, 'XADMIN_CONF', 'xadmin_conf.py')
conf_mod = import_module(xadmin_conf)
except Exception:
conf_mod = None
if conf_mod:
for key in dir(conf_mod):
setting = getattr(conf_mod, key)
try:
if issubclass(setting, Settings):
site.register_settings(setting.__name__, setting)
except Exception:
pass
from xadmin.plugins import register_builtin_plugins
register_builtin_plugins(site)
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = site.copy_registry()
import_module('%s.adminx' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site.restore_registry(before_import_registry)
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'adminx'):
raise
|
bsd-3-clause
|
jianghuaw/nova
|
nova/objects/floating_ip.py
|
3
|
10691
|
# Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import db
from nova.db.sqlalchemy import api as db_api
from nova.db.sqlalchemy import models
from nova import exception
from nova import objects
from nova.objects import base as obj_base
from nova.objects import fields
FLOATING_IP_OPTIONAL_ATTRS = ['fixed_ip']
# TODO(berrange): Remove NovaObjectDictCompat
@obj_base.NovaObjectRegistry.register
class FloatingIP(obj_base.NovaPersistentObject, obj_base.NovaObject,
obj_base.NovaObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: Added _get_addresses_by_instance_uuid()
# Version 1.2: FixedIP <= version 1.2
# Version 1.3: FixedIP <= version 1.3
# Version 1.4: FixedIP <= version 1.4
# Version 1.5: FixedIP <= version 1.5
# Version 1.6: FixedIP <= version 1.6
# Version 1.7: FixedIP <= version 1.11
# Version 1.8: FixedIP <= version 1.12
# Version 1.9: FixedIP <= version 1.13
# Version 1.10: FixedIP <= version 1.14
VERSION = '1.10'
fields = {
'id': fields.IntegerField(),
'address': fields.IPAddressField(),
'fixed_ip_id': fields.IntegerField(nullable=True),
'project_id': fields.UUIDField(nullable=True),
'host': fields.StringField(nullable=True),
'auto_assigned': fields.BooleanField(),
'pool': fields.StringField(nullable=True),
'interface': fields.StringField(nullable=True),
'fixed_ip': fields.ObjectField('FixedIP', nullable=True),
}
@staticmethod
def _from_db_object(context, floatingip, db_floatingip,
expected_attrs=None):
if expected_attrs is None:
expected_attrs = []
for field in floatingip.fields:
if field not in FLOATING_IP_OPTIONAL_ATTRS:
floatingip[field] = db_floatingip[field]
if ('fixed_ip' in expected_attrs and
db_floatingip['fixed_ip'] is not None):
floatingip.fixed_ip = objects.FixedIP._from_db_object(
context, objects.FixedIP(context), db_floatingip['fixed_ip'])
floatingip._context = context
floatingip.obj_reset_changes()
return floatingip
def obj_load_attr(self, attrname):
if attrname not in FLOATING_IP_OPTIONAL_ATTRS:
raise exception.ObjectActionError(
action='obj_load_attr',
reason='attribute %s is not lazy-loadable' % attrname)
if not self._context:
raise exception.OrphanedObjectError(method='obj_load_attr',
objtype=self.obj_name())
if self.fixed_ip_id is not None:
self.fixed_ip = objects.FixedIP.get_by_id(
self._context, self.fixed_ip_id, expected_attrs=['network'])
else:
self.fixed_ip = None
@obj_base.remotable_classmethod
def get_by_id(cls, context, id):
db_floatingip = db.floating_ip_get(context, id)
# XXX joins fixed.instance
return cls._from_db_object(context, cls(context), db_floatingip,
expected_attrs=['fixed_ip'])
@obj_base.remotable_classmethod
def get_by_address(cls, context, address):
db_floatingip = db.floating_ip_get_by_address(context, str(address))
return cls._from_db_object(context, cls(context), db_floatingip)
@obj_base.remotable_classmethod
def get_pool_names(cls, context):
return [x['name'] for x in db.floating_ip_get_pools(context)]
@obj_base.remotable_classmethod
def allocate_address(cls, context, project_id, pool, auto_assigned=False):
return db.floating_ip_allocate_address(context, project_id, pool,
auto_assigned=auto_assigned)
@obj_base.remotable_classmethod
def associate(cls, context, floating_address, fixed_address, host):
db_fixed = db.floating_ip_fixed_ip_associate(context,
str(floating_address),
str(fixed_address),
host)
if db_fixed is None:
return None
floating = FloatingIP(
context=context, address=floating_address, host=host,
fixed_ip_id=db_fixed['id'],
fixed_ip=objects.FixedIP._from_db_object(
context, objects.FixedIP(context), db_fixed,
expected_attrs=['network']))
return floating
@obj_base.remotable_classmethod
def deallocate(cls, context, address):
return db.floating_ip_deallocate(context, str(address))
@obj_base.remotable_classmethod
def destroy(cls, context, address):
db.floating_ip_destroy(context, str(address))
@obj_base.remotable_classmethod
def disassociate(cls, context, address):
db_fixed = db.floating_ip_disassociate(context, str(address))
return cls(context=context, address=address,
fixed_ip_id=db_fixed['id'],
fixed_ip=objects.FixedIP._from_db_object(
context, objects.FixedIP(context), db_fixed,
expected_attrs=['network']))
@obj_base.remotable_classmethod
def _get_addresses_by_instance_uuid(cls, context, instance_uuid):
return db.instance_floating_address_get_all(context, instance_uuid)
@classmethod
def get_addresses_by_instance(cls, context, instance):
return cls._get_addresses_by_instance_uuid(context, instance['uuid'])
@obj_base.remotable
def save(self):
updates = self.obj_get_changes()
if 'address' in updates:
raise exception.ObjectActionError(action='save',
reason='address is not mutable')
if 'fixed_ip_id' in updates:
reason = 'fixed_ip_id is not mutable'
raise exception.ObjectActionError(action='save', reason=reason)
# NOTE(danms): Make sure we don't pass the calculated fixed_ip
# relationship to the DB update method
updates.pop('fixed_ip', None)
db_floatingip = db.floating_ip_update(self._context, str(self.address),
updates)
self._from_db_object(self._context, self, db_floatingip)
@obj_base.NovaObjectRegistry.register
class FloatingIPList(obj_base.ObjectListBase, obj_base.NovaObject):
# Version 1.3: FloatingIP 1.2
# Version 1.4: FloatingIP 1.3
# Version 1.5: FloatingIP 1.4
# Version 1.6: FloatingIP 1.5
# Version 1.7: FloatingIP 1.6
# Version 1.8: FloatingIP 1.7
# Version 1.9: FloatingIP 1.8
# Version 1.10: FloatingIP 1.9
# Version 1.11: FloatingIP 1.10
# Version 1.12: Added get_count_by_project() for quotas
fields = {
'objects': fields.ListOfObjectsField('FloatingIP'),
}
VERSION = '1.12'
@staticmethod
@db_api.pick_context_manager_reader
def _get_count_by_project_from_db(context, project_id):
return context.session.query(models.FloatingIp.id).\
filter_by(deleted=0).\
filter_by(project_id=project_id).\
filter_by(auto_assigned=False).\
count()
@obj_base.remotable_classmethod
def get_all(cls, context):
db_floatingips = db.floating_ip_get_all(context)
return obj_base.obj_make_list(context, cls(context),
objects.FloatingIP, db_floatingips)
@obj_base.remotable_classmethod
def get_by_host(cls, context, host):
db_floatingips = db.floating_ip_get_all_by_host(context, host)
return obj_base.obj_make_list(context, cls(context),
objects.FloatingIP, db_floatingips)
@obj_base.remotable_classmethod
def get_by_project(cls, context, project_id):
db_floatingips = db.floating_ip_get_all_by_project(context, project_id)
return obj_base.obj_make_list(context, cls(context),
objects.FloatingIP, db_floatingips)
@obj_base.remotable_classmethod
def get_by_fixed_address(cls, context, fixed_address):
db_floatingips = db.floating_ip_get_by_fixed_address(
context, str(fixed_address))
return obj_base.obj_make_list(context, cls(context),
objects.FloatingIP, db_floatingips)
@obj_base.remotable_classmethod
def get_by_fixed_ip_id(cls, context, fixed_ip_id):
db_floatingips = db.floating_ip_get_by_fixed_ip_id(context,
fixed_ip_id)
return obj_base.obj_make_list(context, cls(), FloatingIP,
db_floatingips)
@staticmethod
def make_ip_info(address, pool, interface):
return {'address': str(address),
'pool': pool,
'interface': interface}
@obj_base.remotable_classmethod
def create(cls, context, ip_info, want_result=False):
db_floatingips = db.floating_ip_bulk_create(context, ip_info,
want_result=want_result)
if want_result:
return obj_base.obj_make_list(context, cls(), FloatingIP,
db_floatingips)
@obj_base.remotable_classmethod
def destroy(cls, context, ips):
db.floating_ip_bulk_destroy(context, ips)
@obj_base.remotable_classmethod
def get_count_by_project(cls, context, project_id):
return cls._get_count_by_project_from_db(context, project_id)
# We don't want to register this object because it will not be passed
# around on RPC, it just makes our lives a lot easier in the API when
# dealing with floating IP operations
@obj_base.NovaObjectRegistry.register_if(False)
class NeutronFloatingIP(FloatingIP):
# Version 1.0: Initial Version
VERSION = '1.0'
fields = {
'id': fields.UUIDField(),
'fixed_ip_id': fields.UUIDField(nullable=True)
}
|
apache-2.0
|
lawrence34/python-social-auth
|
social/backends/open_id.py
|
66
|
14306
|
import datetime
from calendar import timegm
from jwt import InvalidTokenError, decode as jwt_decode
from openid.consumer.consumer import Consumer, SUCCESS, CANCEL, FAILURE
from openid.consumer.discover import DiscoveryFailure
from openid.extensions import sreg, ax, pape
from social.utils import url_add_parameters
from social.exceptions import AuthException, AuthFailed, AuthCanceled, \
AuthUnknownError, AuthMissingParameter, \
AuthTokenError
from social.backends.base import BaseAuth
from social.backends.oauth import BaseOAuth2
# OpenID configuration
OLD_AX_ATTRS = [
('http://schema.openid.net/contact/email', 'old_email'),
('http://schema.openid.net/namePerson', 'old_fullname'),
('http://schema.openid.net/namePerson/friendly', 'old_nickname')
]
AX_SCHEMA_ATTRS = [
# Request both the full name and first/last components since some
# providers offer one but not the other.
('http://axschema.org/contact/email', 'email'),
('http://axschema.org/namePerson', 'fullname'),
('http://axschema.org/namePerson/first', 'first_name'),
('http://axschema.org/namePerson/last', 'last_name'),
('http://axschema.org/namePerson/friendly', 'nickname'),
]
SREG_ATTR = [
('email', 'email'),
('fullname', 'fullname'),
('nickname', 'nickname')
]
OPENID_ID_FIELD = 'openid_identifier'
SESSION_NAME = 'openid'
class OpenIdAuth(BaseAuth):
"""Generic OpenID authentication backend"""
name = 'openid'
URL = None
USERNAME_KEY = 'username'
def get_user_id(self, details, response):
"""Return user unique id provided by service"""
return response.identity_url
def get_ax_attributes(self):
attrs = self.setting('AX_SCHEMA_ATTRS', [])
if attrs and self.setting('IGNORE_DEFAULT_AX_ATTRS', True):
return attrs
return attrs + AX_SCHEMA_ATTRS + OLD_AX_ATTRS
def get_sreg_attributes(self):
return self.setting('SREG_ATTR') or SREG_ATTR
def values_from_response(self, response, sreg_names=None, ax_names=None):
"""Return values from SimpleRegistration response or
AttributeExchange response if present.
@sreg_names and @ax_names must be a list of name and aliases
for such name. The alias will be used as mapping key.
"""
values = {}
# Use Simple Registration attributes if provided
if sreg_names:
resp = sreg.SRegResponse.fromSuccessResponse(response)
if resp:
values.update((alias, resp.get(name) or '')
for name, alias in sreg_names)
# Use Attribute Exchange attributes if provided
if ax_names:
resp = ax.FetchResponse.fromSuccessResponse(response)
if resp:
for src, alias in ax_names:
name = alias.replace('old_', '')
values[name] = resp.getSingle(src, '') or values.get(name)
return values
def get_user_details(self, response):
"""Return user details from an OpenID request"""
values = {'username': '', 'email': '', 'fullname': '',
'first_name': '', 'last_name': ''}
# update values using SimpleRegistration or AttributeExchange
# values
values.update(self.values_from_response(
response, self.get_sreg_attributes(), self.get_ax_attributes()
))
fullname = values.get('fullname') or ''
first_name = values.get('first_name') or ''
last_name = values.get('last_name') or ''
email = values.get('email') or ''
if not fullname and first_name and last_name:
fullname = first_name + ' ' + last_name
elif fullname:
try:
first_name, last_name = fullname.rsplit(' ', 1)
except ValueError:
last_name = fullname
username_key = self.setting('USERNAME_KEY') or self.USERNAME_KEY
values.update({'fullname': fullname, 'first_name': first_name,
'last_name': last_name,
'username': values.get(username_key) or
(first_name.title() + last_name.title()),
'email': email})
return values
def extra_data(self, user, uid, response, details=None, *args, **kwargs):
"""Return defined extra data names to store in extra_data field.
Settings will be inspected to get more values names that should be
stored on extra_data field. Setting name is created from current
backend name (all uppercase) plus _SREG_EXTRA_DATA and
_AX_EXTRA_DATA because values can be returned by SimpleRegistration
or AttributeExchange schemas.
Both list must be a value name and an alias mapping similar to
SREG_ATTR, OLD_AX_ATTRS or AX_SCHEMA_ATTRS
"""
sreg_names = self.setting('SREG_EXTRA_DATA')
ax_names = self.setting('AX_EXTRA_DATA')
values = self.values_from_response(response, sreg_names, ax_names)
from_details = super(OpenIdAuth, self).extra_data(
user, uid, {}, details, *args, **kwargs
)
values.update(from_details)
return values
def auth_url(self):
"""Return auth URL returned by service"""
openid_request = self.setup_request(self.auth_extra_arguments())
# Construct completion URL, including page we should redirect to
return_to = self.strategy.absolute_uri(self.redirect_uri)
return openid_request.redirectURL(self.trust_root(), return_to)
def auth_html(self):
"""Return auth HTML returned by service"""
openid_request = self.setup_request(self.auth_extra_arguments())
return_to = self.strategy.absolute_uri(self.redirect_uri)
form_tag = {'id': 'openid_message'}
return openid_request.htmlMarkup(self.trust_root(), return_to,
form_tag_attrs=form_tag)
def trust_root(self):
"""Return trust-root option"""
return self.setting('OPENID_TRUST_ROOT') or \
self.strategy.absolute_uri('/')
def continue_pipeline(self, *args, **kwargs):
"""Continue previous halted pipeline"""
response = self.consumer().complete(dict(self.data.items()),
self.strategy.absolute_uri(
self.redirect_uri
))
kwargs.update({'response': response, 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
def auth_complete(self, *args, **kwargs):
"""Complete auth process"""
response = self.consumer().complete(dict(self.data.items()),
self.strategy.absolute_uri(
self.redirect_uri
))
self.process_error(response)
kwargs.update({'response': response, 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
def process_error(self, data):
if not data:
raise AuthException(self, 'OpenID relying party endpoint')
elif data.status == FAILURE:
raise AuthFailed(self, data.message)
elif data.status == CANCEL:
raise AuthCanceled(self)
elif data.status != SUCCESS:
raise AuthUnknownError(self, data.status)
def setup_request(self, params=None):
"""Setup request"""
request = self.openid_request(params)
# Request some user details. Use attribute exchange if provider
# advertises support.
if request.endpoint.supportsType(ax.AXMessage.ns_uri):
fetch_request = ax.FetchRequest()
# Mark all attributes as required, Google ignores optional ones
for attr, alias in self.get_ax_attributes():
fetch_request.add(ax.AttrInfo(attr, alias=alias,
required=True))
else:
fetch_request = sreg.SRegRequest(
optional=list(dict(self.get_sreg_attributes()).keys())
)
request.addExtension(fetch_request)
# Add PAPE Extension for if configured
preferred_policies = self.setting(
'OPENID_PAPE_PREFERRED_AUTH_POLICIES'
)
preferred_level_types = self.setting(
'OPENID_PAPE_PREFERRED_AUTH_LEVEL_TYPES'
)
max_age = self.setting('OPENID_PAPE_MAX_AUTH_AGE')
if max_age is not None:
try:
max_age = int(max_age)
except (ValueError, TypeError):
max_age = None
if max_age is not None or preferred_policies or preferred_level_types:
pape_request = pape.Request(
max_auth_age=max_age,
preferred_auth_policies=preferred_policies,
preferred_auth_level_types=preferred_level_types
)
request.addExtension(pape_request)
return request
def consumer(self):
"""Create an OpenID Consumer object for the given Django request."""
if not hasattr(self, '_consumer'):
self._consumer = self.create_consumer(self.strategy.openid_store())
return self._consumer
def create_consumer(self, store=None):
return Consumer(self.strategy.openid_session_dict(SESSION_NAME), store)
def uses_redirect(self):
"""Return true if openid request will be handled with redirect or
HTML content will be returned.
"""
return self.openid_request().shouldSendRedirect()
def openid_request(self, params=None):
"""Return openid request"""
try:
return self.consumer().begin(url_add_parameters(self.openid_url(),
params))
except DiscoveryFailure as err:
raise AuthException(self, 'OpenID discovery error: {0}'.format(
err
))
def openid_url(self):
"""Return service provider URL.
This base class is generic accepting a POST parameter that specifies
provider URL."""
if self.URL:
return self.URL
elif OPENID_ID_FIELD in self.data:
return self.data[OPENID_ID_FIELD]
else:
raise AuthMissingParameter(self, OPENID_ID_FIELD)
class OpenIdConnectAssociation(object):
""" Use Association model to save the nonce by force. """
def __init__(self, handle, secret='', issued=0, lifetime=0, assoc_type=''):
self.handle = handle # as nonce
self.secret = secret.encode() # not use
self.issued = issued # not use
self.lifetime = lifetime # not use
self.assoc_type = assoc_type # as state
class OpenIdConnectAuth(BaseOAuth2):
"""
Base class for Open ID Connect backends.
Currently only the code response type is supported.
"""
ID_TOKEN_ISSUER = None
DEFAULT_SCOPE = ['openid']
EXTRA_DATA = ['id_token', 'refresh_token', ('sub', 'id')]
# Set after access_token is retrieved
id_token = None
def auth_params(self, state=None):
"""Return extra arguments needed on auth process."""
params = super(OpenIdConnectAuth, self).auth_params(state)
params['nonce'] = self.get_and_store_nonce(
self.AUTHORIZATION_URL, state
)
return params
def auth_complete_params(self, state=None):
params = super(OpenIdConnectAuth, self).auth_complete_params(state)
# Add a nonce to the request so that to help counter CSRF
params['nonce'] = self.get_and_store_nonce(
self.ACCESS_TOKEN_URL, state
)
return params
def get_and_store_nonce(self, url, state):
# Create a nonce
nonce = self.strategy.random_string(64)
# Store the nonce
association = OpenIdConnectAssociation(nonce, assoc_type=state)
self.strategy.storage.association.store(url, association)
return nonce
def get_nonce(self, nonce):
try:
return self.strategy.storage.association.get(
server_url=self.ACCESS_TOKEN_URL,
handle=nonce
)[0]
except IndexError:
pass
def remove_nonce(self, nonce_id):
self.strategy.storage.association.remove([nonce_id])
def validate_and_return_id_token(self, id_token):
"""
Validates the id_token according to the steps at
http://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation.
"""
client_id, _client_secret = self.get_key_and_secret()
decryption_key = self.setting('ID_TOKEN_DECRYPTION_KEY')
try:
# Decode the JWT and raise an error if the secret is invalid or
# the response has expired.
id_token = jwt_decode(id_token, decryption_key, audience=client_id,
issuer=self.ID_TOKEN_ISSUER,
algorithms=['HS256'])
except InvalidTokenError as err:
raise AuthTokenError(self, err)
# Verify the token was issued in the last 10 minutes
utc_timestamp = timegm(datetime.datetime.utcnow().utctimetuple())
if id_token['iat'] < (utc_timestamp - 600):
raise AuthTokenError(self, 'Incorrect id_token: iat')
# Validate the nonce to ensure the request was not modified
nonce = id_token.get('nonce')
if not nonce:
raise AuthTokenError(self, 'Incorrect id_token: nonce')
nonce_obj = self.get_nonce(nonce)
if nonce_obj:
self.remove_nonce(nonce_obj.id)
else:
raise AuthTokenError(self, 'Incorrect id_token: nonce')
return id_token
def request_access_token(self, *args, **kwargs):
"""
Retrieve the access token. Also, validate the id_token and
store it (temporarily).
"""
response = self.get_json(*args, **kwargs)
self.id_token = self.validate_and_return_id_token(response['id_token'])
return response
|
bsd-3-clause
|
fidomason/kbengine
|
kbe/res/scripts/common/Lib/site-packages/pip/req.py
|
328
|
83557
|
from email.parser import FeedParser
import os
import imp
import locale
import re
import sys
import shutil
import tempfile
import textwrap
import zipfile
from distutils.util import change_root
from pip.locations import (bin_py, running_under_virtualenv,PIP_DELETE_MARKER_FILENAME,
write_delete_marker_file, bin_user)
from pip.exceptions import (InstallationError, UninstallationError, UnsupportedWheel,
BestVersionAlreadyInstalled, InvalidWheelFilename,
DistributionNotFound, PreviousBuildDirError)
from pip.vcs import vcs
from pip.log import logger
from pip.util import (display_path, rmtree, ask, ask_path_exists, backup_dir,
is_installable_dir, is_local, dist_is_local,
dist_in_usersite, dist_in_site_packages, renames,
normalize_path, egg_link_path, make_path_relative,
call_subprocess, is_prerelease, normalize_name)
from pip.backwardcompat import (urlparse, urllib, uses_pycache,
ConfigParser, string_types, HTTPError,
get_python_version, b)
from pip.index import Link
from pip.locations import build_prefix
from pip.download import (PipSession, get_file_content, is_url, url_to_path,
path_to_url, is_archive_file,
unpack_vcs_link, is_vcs_url, is_file_url,
unpack_file_url, unpack_http_url)
import pip.wheel
from pip.wheel import move_wheel_files, Wheel, wheel_ext
from pip._vendor import pkg_resources, six
def read_text_file(filename):
"""Return the contents of *filename*.
Try to decode the file contents with utf-8, the preffered system encoding
(e.g., cp1252 on some Windows machines) and latin1, in that order. Decoding
a byte string with latin1 will never raise an error. In the worst case, the
returned string will contain some garbage characters.
"""
with open(filename, 'rb') as fp:
data = fp.read()
encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1']
for enc in encodings:
try:
data = data.decode(enc)
except UnicodeDecodeError:
continue
break
assert type(data) != bytes # Latin1 should have worked.
return data
class InstallRequirement(object):
def __init__(self, req, comes_from, source_dir=None, editable=False,
url=None, as_egg=False, update=True, prereleases=None,
editable_options=None, from_bundle=False, pycompile=True):
self.extras = ()
if isinstance(req, string_types):
req = pkg_resources.Requirement.parse(req)
self.extras = req.extras
self.req = req
self.comes_from = comes_from
self.source_dir = source_dir
self.editable = editable
if editable_options is None:
editable_options = {}
self.editable_options = editable_options
self.url = url
self.as_egg = as_egg
self._egg_info_path = None
# This holds the pkg_resources.Distribution object if this requirement
# is already available:
self.satisfied_by = None
# This hold the pkg_resources.Distribution object if this requirement
# conflicts with another installed distribution:
self.conflicts_with = None
self._temp_build_dir = None
self._is_bundle = None
# True if the editable should be updated:
self.update = update
# Set to True after successful installation
self.install_succeeded = None
# UninstallPathSet of uninstalled distribution (for possible rollback)
self.uninstalled = None
self.use_user_site = False
self.target_dir = None
self.from_bundle = from_bundle
self.pycompile = pycompile
# True if pre-releases are acceptable
if prereleases:
self.prereleases = True
elif self.req is not None:
self.prereleases = any([is_prerelease(x[1]) and x[0] != "!=" for x in self.req.specs])
else:
self.prereleases = False
@classmethod
def from_editable(cls, editable_req, comes_from=None, default_vcs=None):
name, url, extras_override = parse_editable(editable_req, default_vcs)
if url.startswith('file:'):
source_dir = url_to_path(url)
else:
source_dir = None
res = cls(name, comes_from, source_dir=source_dir,
editable=True,
url=url,
editable_options=extras_override,
prereleases=True)
if extras_override is not None:
res.extras = extras_override
return res
@classmethod
def from_line(cls, name, comes_from=None, prereleases=None):
"""Creates an InstallRequirement from a name, which might be a
requirement, directory containing 'setup.py', filename, or URL.
"""
url = None
name = name.strip()
req = None
path = os.path.normpath(os.path.abspath(name))
link = None
if is_url(name):
link = Link(name)
elif os.path.isdir(path) and (os.path.sep in name or name.startswith('.')):
if not is_installable_dir(path):
raise InstallationError("Directory %r is not installable. File 'setup.py' not found." % name)
link = Link(path_to_url(name))
elif is_archive_file(path):
if not os.path.isfile(path):
logger.warn('Requirement %r looks like a filename, but the file does not exist', name)
link = Link(path_to_url(name))
# If the line has an egg= definition, but isn't editable, pull the requirement out.
# Otherwise, assume the name is the req for the non URL/path/archive case.
if link and req is None:
url = link.url_without_fragment
req = link.egg_fragment #when fragment is None, this will become an 'unnamed' requirement
# Handle relative file URLs
if link.scheme == 'file' and re.search(r'\.\./', url):
url = path_to_url(os.path.normpath(os.path.abspath(link.path)))
# fail early for invalid or unsupported wheels
if link.ext == wheel_ext:
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
if not wheel.supported():
raise UnsupportedWheel("%s is not a supported wheel on this platform." % wheel.filename)
else:
req = name
return cls(req, comes_from, url=url, prereleases=prereleases)
def __str__(self):
if self.req:
s = str(self.req)
if self.url:
s += ' from %s' % self.url
else:
s = self.url
if self.satisfied_by is not None:
s += ' in %s' % display_path(self.satisfied_by.location)
if self.comes_from:
if isinstance(self.comes_from, string_types):
comes_from = self.comes_from
else:
comes_from = self.comes_from.from_path()
if comes_from:
s += ' (from %s)' % comes_from
return s
def from_path(self):
if self.req is None:
return None
s = str(self.req)
if self.comes_from:
if isinstance(self.comes_from, string_types):
comes_from = self.comes_from
else:
comes_from = self.comes_from.from_path()
if comes_from:
s += '->' + comes_from
return s
def build_location(self, build_dir, unpack=True):
if self._temp_build_dir is not None:
return self._temp_build_dir
if self.req is None:
self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-')
self._ideal_build_dir = build_dir
return self._temp_build_dir
if self.editable:
name = self.name.lower()
else:
name = self.name
# FIXME: Is there a better place to create the build_dir? (hg and bzr need this)
if not os.path.exists(build_dir):
_make_build_dir(build_dir)
return os.path.join(build_dir, name)
def correct_build_location(self):
"""If the build location was a temporary directory, this will move it
to a new more permanent location"""
if self.source_dir is not None:
return
assert self.req is not None
assert self._temp_build_dir
old_location = self._temp_build_dir
new_build_dir = self._ideal_build_dir
del self._ideal_build_dir
if self.editable:
name = self.name.lower()
else:
name = self.name
new_location = os.path.join(new_build_dir, name)
if not os.path.exists(new_build_dir):
logger.debug('Creating directory %s' % new_build_dir)
_make_build_dir(new_build_dir)
if os.path.exists(new_location):
raise InstallationError(
'A package already exists in %s; please remove it to continue'
% display_path(new_location))
logger.debug('Moving package %s from %s to new location %s'
% (self, display_path(old_location), display_path(new_location)))
shutil.move(old_location, new_location)
self._temp_build_dir = new_location
self.source_dir = new_location
self._egg_info_path = None
@property
def name(self):
if self.req is None:
return None
return self.req.project_name
@property
def url_name(self):
if self.req is None:
return None
return urllib.quote(self.req.unsafe_name)
@property
def setup_py(self):
try:
import setuptools
except ImportError:
# Setuptools is not available
raise InstallationError(
"setuptools must be installed to install from a source "
"distribution"
)
setup_file = 'setup.py'
if self.editable_options and 'subdirectory' in self.editable_options:
setup_py = os.path.join(self.source_dir,
self.editable_options['subdirectory'],
setup_file)
else:
setup_py = os.path.join(self.source_dir, setup_file)
# Python2 __file__ should not be unicode
if six.PY2 and isinstance(setup_py, six.text_type):
setup_py = setup_py.encode(sys.getfilesystemencoding())
return setup_py
def run_egg_info(self, force_root_egg_info=False):
assert self.source_dir
if self.name:
logger.notify('Running setup.py (path:%s) egg_info for package %s' % (self.setup_py, self.name))
else:
logger.notify('Running setup.py (path:%s) egg_info for package from %s' % (self.setup_py, self.url))
logger.indent += 2
try:
# if it's distribute>=0.7, it won't contain an importable
# setuptools, and having an egg-info dir blocks the ability of
# setup.py to find setuptools plugins, so delete the egg-info dir if
# no setuptools. it will get recreated by the run of egg_info
# NOTE: this self.name check only works when installing from a specifier
# (not archive path/urls)
# TODO: take this out later
if self.name == 'distribute' and not os.path.isdir(os.path.join(self.source_dir, 'setuptools')):
rmtree(os.path.join(self.source_dir, 'distribute.egg-info'))
script = self._run_setup_py
script = script.replace('__SETUP_PY__', repr(self.setup_py))
script = script.replace('__PKG_NAME__', repr(self.name))
egg_info_cmd = [sys.executable, '-c', script, 'egg_info']
# We can't put the .egg-info files at the root, because then the source code will be mistaken
# for an installed egg, causing problems
if self.editable or force_root_egg_info:
egg_base_option = []
else:
egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info')
if not os.path.exists(egg_info_dir):
os.makedirs(egg_info_dir)
egg_base_option = ['--egg-base', 'pip-egg-info']
call_subprocess(
egg_info_cmd + egg_base_option,
cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False,
command_level=logger.VERBOSE_DEBUG,
command_desc='python setup.py egg_info')
finally:
logger.indent -= 2
if not self.req:
self.req = pkg_resources.Requirement.parse(
"%(Name)s==%(Version)s" % self.pkg_info())
self.correct_build_location()
## FIXME: This is a lame hack, entirely for PasteScript which has
## a self-provided entry point that causes this awkwardness
_run_setup_py = """
__file__ = __SETUP_PY__
from setuptools.command import egg_info
import pkg_resources
import os
import tokenize
def replacement_run(self):
self.mkpath(self.egg_info)
installer = self.distribution.fetch_build_egg
for ep in pkg_resources.iter_entry_points('egg_info.writers'):
# require=False is the change we're making:
writer = ep.load(require=False)
if writer:
writer(self, ep.name, os.path.join(self.egg_info,ep.name))
self.find_sources()
egg_info.egg_info.run = replacement_run
exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))
"""
def egg_info_data(self, filename):
if self.satisfied_by is not None:
if not self.satisfied_by.has_metadata(filename):
return None
return self.satisfied_by.get_metadata(filename)
assert self.source_dir
filename = self.egg_info_path(filename)
if not os.path.exists(filename):
return None
data = read_text_file(filename)
return data
def egg_info_path(self, filename):
if self._egg_info_path is None:
if self.editable:
base = self.source_dir
else:
base = os.path.join(self.source_dir, 'pip-egg-info')
filenames = os.listdir(base)
if self.editable:
filenames = []
for root, dirs, files in os.walk(base):
for dir in vcs.dirnames:
if dir in dirs:
dirs.remove(dir)
# Iterate over a copy of ``dirs``, since mutating
# a list while iterating over it can cause trouble.
# (See https://github.com/pypa/pip/pull/462.)
for dir in list(dirs):
# Don't search in anything that looks like a virtualenv environment
if (os.path.exists(os.path.join(root, dir, 'bin', 'python'))
or os.path.exists(os.path.join(root, dir, 'Scripts', 'Python.exe'))):
dirs.remove(dir)
# Also don't search through tests
if dir == 'test' or dir == 'tests':
dirs.remove(dir)
filenames.extend([os.path.join(root, dir)
for dir in dirs])
filenames = [f for f in filenames if f.endswith('.egg-info')]
if not filenames:
raise InstallationError('No files/directories in %s (from %s)' % (base, filename))
assert filenames, "No files/directories in %s (from %s)" % (base, filename)
# if we have more than one match, we pick the toplevel one. This can
# easily be the case if there is a dist folder which contains an
# extracted tarball for testing purposes.
if len(filenames) > 1:
filenames.sort(key=lambda x: x.count(os.path.sep) +
(os.path.altsep and
x.count(os.path.altsep) or 0))
self._egg_info_path = os.path.join(base, filenames[0])
return os.path.join(self._egg_info_path, filename)
def egg_info_lines(self, filename):
data = self.egg_info_data(filename)
if not data:
return []
result = []
for line in data.splitlines():
line = line.strip()
if not line or line.startswith('#'):
continue
result.append(line)
return result
def pkg_info(self):
p = FeedParser()
data = self.egg_info_data('PKG-INFO')
if not data:
logger.warn('No PKG-INFO file found in %s' % display_path(self.egg_info_path('PKG-INFO')))
p.feed(data or '')
return p.close()
@property
def dependency_links(self):
return self.egg_info_lines('dependency_links.txt')
_requirements_section_re = re.compile(r'\[(.*?)\]')
def requirements(self, extras=()):
in_extra = None
for line in self.egg_info_lines('requires.txt'):
match = self._requirements_section_re.match(line.lower())
if match:
in_extra = match.group(1)
continue
if in_extra and in_extra not in extras:
logger.debug('skipping extra %s' % in_extra)
# Skip requirement for an extra we aren't requiring
continue
yield line
@property
def absolute_versions(self):
for qualifier, version in self.req.specs:
if qualifier == '==':
yield version
@property
def installed_version(self):
return self.pkg_info()['version']
def assert_source_matches_version(self):
assert self.source_dir
version = self.installed_version
if version not in self.req:
logger.warn('Requested %s, but installing version %s' % (self, self.installed_version))
else:
logger.debug('Source in %s has version %s, which satisfies requirement %s'
% (display_path(self.source_dir), version, self))
def update_editable(self, obtain=True):
if not self.url:
logger.info("Cannot update repository at %s; repository location is unknown" % self.source_dir)
return
assert self.editable
assert self.source_dir
if self.url.startswith('file:'):
# Static paths don't get updated
return
assert '+' in self.url, "bad url: %r" % self.url
if not self.update:
return
vc_type, url = self.url.split('+', 1)
backend = vcs.get_backend(vc_type)
if backend:
vcs_backend = backend(self.url)
if obtain:
vcs_backend.obtain(self.source_dir)
else:
vcs_backend.export(self.source_dir)
else:
assert 0, (
'Unexpected version control type (in %s): %s'
% (self.url, vc_type))
def uninstall(self, auto_confirm=False):
"""
Uninstall the distribution currently satisfying this requirement.
Prompts before removing or modifying files unless
``auto_confirm`` is True.
Refuses to delete or modify files outside of ``sys.prefix`` -
thus uninstallation within a virtual environment can only
modify that virtual environment, even if the virtualenv is
linked to global site-packages.
"""
if not self.check_if_exists():
raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,))
dist = self.satisfied_by or self.conflicts_with
paths_to_remove = UninstallPathSet(dist)
pip_egg_info_path = os.path.join(dist.location,
dist.egg_name()) + '.egg-info'
dist_info_path = os.path.join(dist.location,
'-'.join(dist.egg_name().split('-')[:2])
) + '.dist-info'
# workaround for http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=618367
debian_egg_info_path = pip_egg_info_path.replace(
'-py%s' % pkg_resources.PY_MAJOR, '')
easy_install_egg = dist.egg_name() + '.egg'
develop_egg_link = egg_link_path(dist)
pip_egg_info_exists = os.path.exists(pip_egg_info_path)
debian_egg_info_exists = os.path.exists(debian_egg_info_path)
dist_info_exists = os.path.exists(dist_info_path)
if pip_egg_info_exists or debian_egg_info_exists:
# package installed by pip
if pip_egg_info_exists:
egg_info_path = pip_egg_info_path
else:
egg_info_path = debian_egg_info_path
paths_to_remove.add(egg_info_path)
if dist.has_metadata('installed-files.txt'):
for installed_file in dist.get_metadata('installed-files.txt').splitlines():
path = os.path.normpath(os.path.join(egg_info_path, installed_file))
paths_to_remove.add(path)
#FIXME: need a test for this elif block
#occurs with --single-version-externally-managed/--record outside of pip
elif dist.has_metadata('top_level.txt'):
if dist.has_metadata('namespace_packages.txt'):
namespaces = dist.get_metadata('namespace_packages.txt')
else:
namespaces = []
for top_level_pkg in [p for p
in dist.get_metadata('top_level.txt').splitlines()
if p and p not in namespaces]:
path = os.path.join(dist.location, top_level_pkg)
paths_to_remove.add(path)
paths_to_remove.add(path + '.py')
paths_to_remove.add(path + '.pyc')
elif dist.location.endswith(easy_install_egg):
# package installed by easy_install
paths_to_remove.add(dist.location)
easy_install_pth = os.path.join(os.path.dirname(dist.location),
'easy-install.pth')
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
elif develop_egg_link:
# develop egg
fh = open(develop_egg_link, 'r')
link_pointer = os.path.normcase(fh.readline().strip())
fh.close()
assert (link_pointer == dist.location), 'Egg-link %s does not match installed location of %s (at %s)' % (link_pointer, self.name, dist.location)
paths_to_remove.add(develop_egg_link)
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
'easy-install.pth')
paths_to_remove.add_pth(easy_install_pth, dist.location)
elif dist_info_exists:
for path in pip.wheel.uninstallation_paths(dist):
paths_to_remove.add(path)
# find distutils scripts= scripts
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
for script in dist.metadata_listdir('scripts'):
if dist_in_usersite(dist):
bin_dir = bin_user
else:
bin_dir = bin_py
paths_to_remove.add(os.path.join(bin_dir, script))
if sys.platform == 'win32':
paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
# find console_scripts
if dist.has_metadata('entry_points.txt'):
config = ConfigParser.SafeConfigParser()
config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt')))
if config.has_section('console_scripts'):
for name, value in config.items('console_scripts'):
if dist_in_usersite(dist):
bin_dir = bin_user
else:
bin_dir = bin_py
paths_to_remove.add(os.path.join(bin_dir, name))
if sys.platform == 'win32':
paths_to_remove.add(os.path.join(bin_dir, name) + '.exe')
paths_to_remove.add(os.path.join(bin_dir, name) + '.exe.manifest')
paths_to_remove.add(os.path.join(bin_dir, name) + '-script.py')
paths_to_remove.remove(auto_confirm)
self.uninstalled = paths_to_remove
def rollback_uninstall(self):
if self.uninstalled:
self.uninstalled.rollback()
else:
logger.error("Can't rollback %s, nothing uninstalled."
% (self.project_name,))
def commit_uninstall(self):
if self.uninstalled:
self.uninstalled.commit()
else:
logger.error("Can't commit %s, nothing uninstalled."
% (self.project_name,))
def archive(self, build_dir):
assert self.source_dir
create_archive = True
archive_name = '%s-%s.zip' % (self.name, self.installed_version)
archive_path = os.path.join(build_dir, archive_name)
if os.path.exists(archive_path):
response = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' %
display_path(archive_path), ('i', 'w', 'b'))
if response == 'i':
create_archive = False
elif response == 'w':
logger.warn('Deleting %s' % display_path(archive_path))
os.remove(archive_path)
elif response == 'b':
dest_file = backup_dir(archive_path)
logger.warn('Backing up %s to %s'
% (display_path(archive_path), display_path(dest_file)))
shutil.move(archive_path, dest_file)
if create_archive:
zip = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED)
dir = os.path.normcase(os.path.abspath(self.source_dir))
for dirpath, dirnames, filenames in os.walk(dir):
if 'pip-egg-info' in dirnames:
dirnames.remove('pip-egg-info')
for dirname in dirnames:
dirname = os.path.join(dirpath, dirname)
name = self._clean_zip_name(dirname, dir)
zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
zipdir.external_attr = 0x1ED << 16 # 0o755
zip.writestr(zipdir, '')
for filename in filenames:
if filename == PIP_DELETE_MARKER_FILENAME:
continue
filename = os.path.join(dirpath, filename)
name = self._clean_zip_name(filename, dir)
zip.write(filename, self.name + '/' + name)
zip.close()
logger.indent -= 2
logger.notify('Saved %s' % display_path(archive_path))
def _clean_zip_name(self, name, prefix):
assert name.startswith(prefix+os.path.sep), (
"name %r doesn't start with prefix %r" % (name, prefix))
name = name[len(prefix)+1:]
name = name.replace(os.path.sep, '/')
return name
def install(self, install_options, global_options=(), root=None):
if self.editable:
self.install_editable(install_options, global_options)
return
if self.is_wheel:
version = pip.wheel.wheel_version(self.source_dir)
pip.wheel.check_compatibility(version, self.name)
self.move_wheel_files(self.source_dir, root=root)
self.install_succeeded = True
return
temp_location = tempfile.mkdtemp('-record', 'pip-')
record_filename = os.path.join(temp_location, 'install-record.txt')
try:
install_args = [sys.executable]
install_args.append('-c')
install_args.append(
"import setuptools, tokenize;__file__=%r;"\
"exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py)
install_args += list(global_options) + ['install','--record', record_filename]
if not self.as_egg:
install_args += ['--single-version-externally-managed']
if root is not None:
install_args += ['--root', root]
if self.pycompile:
install_args += ["--compile"]
else:
install_args += ["--no-compile"]
if running_under_virtualenv():
## FIXME: I'm not sure if this is a reasonable location; probably not
## but we can't put it in the default location, as that is a virtualenv symlink that isn't writable
install_args += ['--install-headers',
os.path.join(sys.prefix, 'include', 'site',
'python' + get_python_version())]
logger.notify('Running setup.py install for %s' % self.name)
logger.indent += 2
try:
call_subprocess(install_args + install_options,
cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False)
finally:
logger.indent -= 2
if not os.path.exists(record_filename):
logger.notify('Record file %s not found' % record_filename)
return
self.install_succeeded = True
if self.as_egg:
# there's no --always-unzip option we can pass to install command
# so we unable to save the installed-files.txt
return
def prepend_root(path):
if root is None or not os.path.isabs(path):
return path
else:
return change_root(root, path)
f = open(record_filename)
for line in f:
line = line.strip()
if line.endswith('.egg-info'):
egg_info_dir = prepend_root(line)
break
else:
logger.warn('Could not find .egg-info directory in install record for %s' % self)
## FIXME: put the record somewhere
## FIXME: should this be an error?
return
f.close()
new_lines = []
f = open(record_filename)
for line in f:
filename = line.strip()
if os.path.isdir(filename):
filename += os.path.sep
new_lines.append(make_path_relative(prepend_root(filename), egg_info_dir))
f.close()
f = open(os.path.join(egg_info_dir, 'installed-files.txt'), 'w')
f.write('\n'.join(new_lines)+'\n')
f.close()
finally:
if os.path.exists(record_filename):
os.remove(record_filename)
os.rmdir(temp_location)
def remove_temporary_source(self):
"""Remove the source files from this requirement, if they are marked
for deletion"""
if self.is_bundle or os.path.exists(self.delete_marker_filename):
logger.info('Removing source in %s' % self.source_dir)
if self.source_dir:
rmtree(self.source_dir)
self.source_dir = None
if self._temp_build_dir and os.path.exists(self._temp_build_dir):
rmtree(self._temp_build_dir)
self._temp_build_dir = None
def install_editable(self, install_options, global_options=()):
logger.notify('Running setup.py develop for %s' % self.name)
logger.indent += 2
try:
## FIXME: should we do --install-headers here too?
call_subprocess(
[sys.executable, '-c',
"import setuptools, tokenize; __file__=%r; exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py]
+ list(global_options) + ['develop', '--no-deps'] + list(install_options),
cwd=self.source_dir, filter_stdout=self._filter_install,
show_stdout=False)
finally:
logger.indent -= 2
self.install_succeeded = True
def _filter_install(self, line):
level = logger.NOTIFY
for regex in [r'^running .*', r'^writing .*', '^creating .*', '^[Cc]opying .*',
r'^reading .*', r"^removing .*\.egg-info' \(and everything under it\)$",
r'^byte-compiling ',
# Not sure what this warning is, but it seems harmless:
r"^warning: manifest_maker: standard file '-c' not found$"]:
if re.search(regex, line.strip()):
level = logger.INFO
break
return (level, line)
def check_if_exists(self):
"""Find an installed distribution that satisfies or conflicts
with this requirement, and set self.satisfied_by or
self.conflicts_with appropriately."""
if self.req is None:
return False
try:
# DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts)
# if we've already set distribute as a conflict to setuptools
# then this check has already run before. we don't want it to
# run again, and return False, since it would block the uninstall
# TODO: remove this later
if (self.req.project_name == 'setuptools'
and self.conflicts_with
and self.conflicts_with.project_name == 'distribute'):
return True
else:
self.satisfied_by = pkg_resources.get_distribution(self.req)
except pkg_resources.DistributionNotFound:
return False
except pkg_resources.VersionConflict:
existing_dist = pkg_resources.get_distribution(self.req.project_name)
if self.use_user_site:
if dist_in_usersite(existing_dist):
self.conflicts_with = existing_dist
elif running_under_virtualenv() and dist_in_site_packages(existing_dist):
raise InstallationError("Will not install to the user site because it will lack sys.path precedence to %s in %s"
%(existing_dist.project_name, existing_dist.location))
else:
self.conflicts_with = existing_dist
return True
@property
def is_wheel(self):
return self.url and '.whl' in self.url
@property
def is_bundle(self):
if self._is_bundle is not None:
return self._is_bundle
base = self._temp_build_dir
if not base:
## FIXME: this doesn't seem right:
return False
self._is_bundle = (os.path.exists(os.path.join(base, 'pip-manifest.txt'))
or os.path.exists(os.path.join(base, 'pyinstall-manifest.txt')))
return self._is_bundle
def bundle_requirements(self):
for dest_dir in self._bundle_editable_dirs:
package = os.path.basename(dest_dir)
## FIXME: svnism:
for vcs_backend in vcs.backends:
url = rev = None
vcs_bundle_file = os.path.join(
dest_dir, vcs_backend.bundle_file)
if os.path.exists(vcs_bundle_file):
vc_type = vcs_backend.name
fp = open(vcs_bundle_file)
content = fp.read()
fp.close()
url, rev = vcs_backend().parse_vcs_bundle_file(content)
break
if url:
url = '%s+%s@%s' % (vc_type, url, rev)
else:
url = None
yield InstallRequirement(
package, self, editable=True, url=url,
update=False, source_dir=dest_dir, from_bundle=True)
for dest_dir in self._bundle_build_dirs:
package = os.path.basename(dest_dir)
yield InstallRequirement(package, self,source_dir=dest_dir, from_bundle=True)
def move_bundle_files(self, dest_build_dir, dest_src_dir):
base = self._temp_build_dir
assert base
src_dir = os.path.join(base, 'src')
build_dir = os.path.join(base, 'build')
bundle_build_dirs = []
bundle_editable_dirs = []
for source_dir, dest_dir, dir_collection in [
(src_dir, dest_src_dir, bundle_editable_dirs),
(build_dir, dest_build_dir, bundle_build_dirs)]:
if os.path.exists(source_dir):
for dirname in os.listdir(source_dir):
dest = os.path.join(dest_dir, dirname)
dir_collection.append(dest)
if os.path.exists(dest):
logger.warn('The directory %s (containing package %s) already exists; cannot move source from bundle %s'
% (dest, dirname, self))
continue
if not os.path.exists(dest_dir):
logger.info('Creating directory %s' % dest_dir)
os.makedirs(dest_dir)
shutil.move(os.path.join(source_dir, dirname), dest)
if not os.listdir(source_dir):
os.rmdir(source_dir)
self._temp_build_dir = None
self._bundle_build_dirs = bundle_build_dirs
self._bundle_editable_dirs = bundle_editable_dirs
def move_wheel_files(self, wheeldir, root=None):
move_wheel_files(
self.name, self.req, wheeldir,
user=self.use_user_site,
home=self.target_dir,
root=root,
pycompile=self.pycompile,
)
@property
def delete_marker_filename(self):
assert self.source_dir
return os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)
class Requirements(object):
def __init__(self):
self._keys = []
self._dict = {}
def keys(self):
return self._keys
def values(self):
return [self._dict[key] for key in self._keys]
def __contains__(self, item):
return item in self._keys
def __setitem__(self, key, value):
if key not in self._keys:
self._keys.append(key)
self._dict[key] = value
def __getitem__(self, key):
return self._dict[key]
def __repr__(self):
values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()]
return 'Requirements({%s})' % ', '.join(values)
class RequirementSet(object):
def __init__(self, build_dir, src_dir, download_dir, download_cache=None,
upgrade=False, ignore_installed=False, as_egg=False,
target_dir=None, ignore_dependencies=False,
force_reinstall=False, use_user_site=False, session=None,
pycompile=True, wheel_download_dir=None):
self.build_dir = build_dir
self.src_dir = src_dir
self.download_dir = download_dir
if download_cache:
download_cache = os.path.expanduser(download_cache)
self.download_cache = download_cache
self.upgrade = upgrade
self.ignore_installed = ignore_installed
self.force_reinstall = force_reinstall
self.requirements = Requirements()
# Mapping of alias: real_name
self.requirement_aliases = {}
self.unnamed_requirements = []
self.ignore_dependencies = ignore_dependencies
self.successfully_downloaded = []
self.successfully_installed = []
self.reqs_to_cleanup = []
self.as_egg = as_egg
self.use_user_site = use_user_site
self.target_dir = target_dir #set from --target option
self.session = session or PipSession()
self.pycompile = pycompile
self.wheel_download_dir = wheel_download_dir
def __str__(self):
reqs = [req for req in self.requirements.values()
if not req.comes_from]
reqs.sort(key=lambda req: req.name.lower())
return ' '.join([str(req.req) for req in reqs])
def add_requirement(self, install_req):
name = install_req.name
install_req.as_egg = self.as_egg
install_req.use_user_site = self.use_user_site
install_req.target_dir = self.target_dir
install_req.pycompile = self.pycompile
if not name:
#url or path requirement w/o an egg fragment
self.unnamed_requirements.append(install_req)
else:
if self.has_requirement(name):
raise InstallationError(
'Double requirement given: %s (already in %s, name=%r)'
% (install_req, self.get_requirement(name), name))
self.requirements[name] = install_req
## FIXME: what about other normalizations? E.g., _ vs. -?
if name.lower() != name:
self.requirement_aliases[name.lower()] = name
def has_requirement(self, project_name):
for name in project_name, project_name.lower():
if name in self.requirements or name in self.requirement_aliases:
return True
return False
@property
def has_requirements(self):
return list(self.requirements.values()) or self.unnamed_requirements
@property
def has_editables(self):
if any(req.editable for req in self.requirements.values()):
return True
if any(req.editable for req in self.unnamed_requirements):
return True
return False
@property
def is_download(self):
if self.download_dir:
self.download_dir = os.path.expanduser(self.download_dir)
if os.path.exists(self.download_dir):
return True
else:
logger.fatal('Could not find download directory')
raise InstallationError(
"Could not find or access download directory '%s'"
% display_path(self.download_dir))
return False
def get_requirement(self, project_name):
for name in project_name, project_name.lower():
if name in self.requirements:
return self.requirements[name]
if name in self.requirement_aliases:
return self.requirements[self.requirement_aliases[name]]
raise KeyError("No project with the name %r" % project_name)
def uninstall(self, auto_confirm=False):
for req in self.requirements.values():
req.uninstall(auto_confirm=auto_confirm)
req.commit_uninstall()
def locate_files(self):
## FIXME: duplicates code from prepare_files; relevant code should
## probably be factored out into a separate method
unnamed = list(self.unnamed_requirements)
reqs = list(self.requirements.values())
while reqs or unnamed:
if unnamed:
req_to_install = unnamed.pop(0)
else:
req_to_install = reqs.pop(0)
install_needed = True
if not self.ignore_installed and not req_to_install.editable:
req_to_install.check_if_exists()
if req_to_install.satisfied_by:
if self.upgrade:
#don't uninstall conflict if user install and and conflict is not user install
if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)):
req_to_install.conflicts_with = req_to_install.satisfied_by
req_to_install.satisfied_by = None
else:
install_needed = False
if req_to_install.satisfied_by:
logger.notify('Requirement already satisfied '
'(use --upgrade to upgrade): %s'
% req_to_install)
if req_to_install.editable:
if req_to_install.source_dir is None:
req_to_install.source_dir = req_to_install.build_location(self.src_dir)
elif install_needed:
req_to_install.source_dir = req_to_install.build_location(self.build_dir, not self.is_download)
if req_to_install.source_dir is not None and not os.path.isdir(req_to_install.source_dir):
raise InstallationError('Could not install requirement %s '
'because source folder %s does not exist '
'(perhaps --no-download was used without first running '
'an equivalent install with --no-install?)'
% (req_to_install, req_to_install.source_dir))
def prepare_files(self, finder, force_root_egg_info=False, bundle=False):
"""Prepare process. Create temp directories, download and/or unpack files."""
unnamed = list(self.unnamed_requirements)
reqs = list(self.requirements.values())
while reqs or unnamed:
if unnamed:
req_to_install = unnamed.pop(0)
else:
req_to_install = reqs.pop(0)
install = True
best_installed = False
not_found = None
if not self.ignore_installed and not req_to_install.editable:
req_to_install.check_if_exists()
if req_to_install.satisfied_by:
if self.upgrade:
if not self.force_reinstall and not req_to_install.url:
try:
url = finder.find_requirement(
req_to_install, self.upgrade)
except BestVersionAlreadyInstalled:
best_installed = True
install = False
except DistributionNotFound:
not_found = sys.exc_info()[1]
else:
# Avoid the need to call find_requirement again
req_to_install.url = url.url
if not best_installed:
#don't uninstall conflict if user install and conflict is not user install
if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)):
req_to_install.conflicts_with = req_to_install.satisfied_by
req_to_install.satisfied_by = None
else:
install = False
if req_to_install.satisfied_by:
if best_installed:
logger.notify('Requirement already up-to-date: %s'
% req_to_install)
else:
logger.notify('Requirement already satisfied '
'(use --upgrade to upgrade): %s'
% req_to_install)
if req_to_install.editable:
logger.notify('Obtaining %s' % req_to_install)
elif install:
if req_to_install.url and req_to_install.url.lower().startswith('file:'):
logger.notify('Unpacking %s' % display_path(url_to_path(req_to_install.url)))
else:
logger.notify('Downloading/unpacking %s' % req_to_install)
logger.indent += 2
try:
is_bundle = False
is_wheel = False
if req_to_install.editable:
if req_to_install.source_dir is None:
location = req_to_install.build_location(self.src_dir)
req_to_install.source_dir = location
else:
location = req_to_install.source_dir
if not os.path.exists(self.build_dir):
_make_build_dir(self.build_dir)
req_to_install.update_editable(not self.is_download)
if self.is_download:
req_to_install.run_egg_info()
req_to_install.archive(self.download_dir)
else:
req_to_install.run_egg_info()
elif install:
##@@ if filesystem packages are not marked
##editable in a req, a non deterministic error
##occurs when the script attempts to unpack the
##build directory
# NB: This call can result in the creation of a temporary build directory
location = req_to_install.build_location(self.build_dir, not self.is_download)
unpack = True
url = None
# In the case where the req comes from a bundle, we should
# assume a build dir exists and move on
if req_to_install.from_bundle:
pass
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
elif os.path.exists(os.path.join(location, 'setup.py')):
raise PreviousBuildDirError(textwrap.dedent("""
pip can't proceed with requirement '%s' due to a pre-existing build directory.
location: %s
This is likely due to a previous installation that failed.
pip is being responsible and not assuming it can delete this.
Please delete it and try again.
""" % (req_to_install, location)))
else:
## FIXME: this won't upgrade when there's an existing package unpacked in `location`
if req_to_install.url is None:
if not_found:
raise not_found
url = finder.find_requirement(req_to_install, upgrade=self.upgrade)
else:
## FIXME: should req_to_install.url already be a link?
url = Link(req_to_install.url)
assert url
if url:
try:
if (
url.filename.endswith(wheel_ext)
and self.wheel_download_dir
):
# when doing 'pip wheel`
download_dir = self.wheel_download_dir
do_download = True
else:
download_dir = self.download_dir
do_download = self.is_download
self.unpack_url(
url, location, download_dir,
do_download,
)
except HTTPError as exc:
logger.fatal(
'Could not install requirement %s because '
'of error %s' % (req_to_install, exc)
)
raise InstallationError(
'Could not install requirement %s because of HTTP error %s for URL %s'
% (req_to_install, e, url))
else:
unpack = False
if unpack:
is_bundle = req_to_install.is_bundle
is_wheel = url and url.filename.endswith(wheel_ext)
if is_bundle:
req_to_install.move_bundle_files(self.build_dir, self.src_dir)
for subreq in req_to_install.bundle_requirements():
reqs.append(subreq)
self.add_requirement(subreq)
elif self.is_download:
req_to_install.source_dir = location
if not is_wheel:
# FIXME: see https://github.com/pypa/pip/issues/1112
req_to_install.run_egg_info()
if url and url.scheme in vcs.all_schemes:
req_to_install.archive(self.download_dir)
elif is_wheel:
req_to_install.source_dir = location
req_to_install.url = url.url
else:
req_to_install.source_dir = location
req_to_install.run_egg_info()
if force_root_egg_info:
# We need to run this to make sure that the .egg-info/
# directory is created for packing in the bundle
req_to_install.run_egg_info(force_root_egg_info=True)
req_to_install.assert_source_matches_version()
#@@ sketchy way of identifying packages not grabbed from an index
if bundle and req_to_install.url:
self.copy_to_build_dir(req_to_install)
install = False
# req_to_install.req is only avail after unpack for URL pkgs
# repeat check_if_exists to uninstall-on-upgrade (#14)
if not self.ignore_installed:
req_to_install.check_if_exists()
if req_to_install.satisfied_by:
if self.upgrade or self.ignore_installed:
#don't uninstall conflict if user install and and conflict is not user install
if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)):
req_to_install.conflicts_with = req_to_install.satisfied_by
req_to_install.satisfied_by = None
else:
logger.notify(
'Requirement already satisfied (use '
'--upgrade to upgrade): %s' %
req_to_install
)
install = False
if is_wheel:
dist = list(
pkg_resources.find_distributions(location)
)[0]
if not req_to_install.req:
req_to_install.req = dist.as_requirement()
self.add_requirement(req_to_install)
if not self.ignore_dependencies:
for subreq in dist.requires(
req_to_install.extras):
if self.has_requirement(
subreq.project_name):
continue
subreq = InstallRequirement(str(subreq),
req_to_install)
reqs.append(subreq)
self.add_requirement(subreq)
# sdists
elif not is_bundle:
## FIXME: shouldn't be globally added:
finder.add_dependency_links(req_to_install.dependency_links)
if (req_to_install.extras):
logger.notify("Installing extra requirements: %r" % ','.join(req_to_install.extras))
if not self.ignore_dependencies:
for req in req_to_install.requirements(req_to_install.extras):
try:
name = pkg_resources.Requirement.parse(req).project_name
except ValueError:
e = sys.exc_info()[1]
## FIXME: proper warning
logger.error('Invalid requirement: %r (%s) in requirement %s' % (req, e, req_to_install))
continue
if self.has_requirement(name):
## FIXME: check for conflict
continue
subreq = InstallRequirement(req, req_to_install)
reqs.append(subreq)
self.add_requirement(subreq)
if not self.has_requirement(req_to_install.name):
#'unnamed' requirements will get added here
self.add_requirement(req_to_install)
# cleanup tmp src
if not is_bundle:
if (
self.is_download or
req_to_install._temp_build_dir is not None
):
self.reqs_to_cleanup.append(req_to_install)
if install:
self.successfully_downloaded.append(req_to_install)
if bundle and (req_to_install.url and req_to_install.url.startswith('file:///')):
self.copy_to_build_dir(req_to_install)
finally:
logger.indent -= 2
def cleanup_files(self, bundle=False):
"""Clean up files, remove builds."""
logger.notify('Cleaning up...')
logger.indent += 2
for req in self.reqs_to_cleanup:
req.remove_temporary_source()
remove_dir = []
if self._pip_has_created_build_dir():
remove_dir.append(self.build_dir)
# The source dir of a bundle can always be removed.
# FIXME: not if it pre-existed the bundle!
if bundle:
remove_dir.append(self.src_dir)
for dir in remove_dir:
if os.path.exists(dir):
logger.info('Removing temporary dir %s...' % dir)
rmtree(dir)
logger.indent -= 2
def _pip_has_created_build_dir(self):
return (self.build_dir == build_prefix and
os.path.exists(os.path.join(self.build_dir, PIP_DELETE_MARKER_FILENAME)))
def copy_to_build_dir(self, req_to_install):
target_dir = req_to_install.editable and self.src_dir or self.build_dir
logger.info("Copying %s to %s" % (req_to_install.name, target_dir))
dest = os.path.join(target_dir, req_to_install.name)
shutil.copytree(req_to_install.source_dir, dest)
call_subprocess(["python", "%s/setup.py" % dest, "clean"], cwd=dest,
command_desc='python setup.py clean')
def unpack_url(self, link, location, download_dir=None,
only_download=False):
if download_dir is None:
download_dir = self.download_dir
# non-editable vcs urls
if is_vcs_url(link):
if only_download:
loc = download_dir
else:
loc = location
unpack_vcs_link(link, loc, only_download)
# file urls
elif is_file_url(link):
unpack_file_url(link, location, download_dir)
if only_download:
write_delete_marker_file(location)
# http urls
else:
unpack_http_url(
link,
location,
self.download_cache,
download_dir,
self.session,
)
if only_download:
write_delete_marker_file(location)
def install(self, install_options, global_options=(), *args, **kwargs):
"""Install everything in this set (after having downloaded and unpacked the packages)"""
to_install = [r for r in self.requirements.values()
if not r.satisfied_by]
# DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts)
# move the distribute-0.7.X wrapper to the end because it does not
# install a setuptools package. by moving it to the end, we ensure it's
# setuptools dependency is handled first, which will provide the
# setuptools package
# TODO: take this out later
distribute_req = pkg_resources.Requirement.parse("distribute>=0.7")
for req in to_install:
if req.name == 'distribute' and req.installed_version in distribute_req:
to_install.remove(req)
to_install.append(req)
if to_install:
logger.notify('Installing collected packages: %s' % ', '.join([req.name for req in to_install]))
logger.indent += 2
try:
for requirement in to_install:
# DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts)
# when upgrading from distribute-0.6.X to the new merged
# setuptools in py2, we need to force setuptools to uninstall
# distribute. In py3, which is always using distribute, this
# conversion is already happening in distribute's pkg_resources.
# It's ok *not* to check if setuptools>=0.7 because if someone
# were actually trying to ugrade from distribute to setuptools
# 0.6.X, then all this could do is actually help, although that
# upgade path was certainly never "supported"
# TODO: remove this later
if requirement.name == 'setuptools':
try:
# only uninstall distribute<0.7. For >=0.7, setuptools
# will also be present, and that's what we need to
# uninstall
distribute_requirement = pkg_resources.Requirement.parse("distribute<0.7")
existing_distribute = pkg_resources.get_distribution("distribute")
if existing_distribute in distribute_requirement:
requirement.conflicts_with = existing_distribute
except pkg_resources.DistributionNotFound:
# distribute wasn't installed, so nothing to do
pass
if requirement.conflicts_with:
logger.notify('Found existing installation: %s'
% requirement.conflicts_with)
logger.indent += 2
try:
requirement.uninstall(auto_confirm=True)
finally:
logger.indent -= 2
try:
requirement.install(install_options, global_options, *args, **kwargs)
except:
# if install did not succeed, rollback previous uninstall
if requirement.conflicts_with and not requirement.install_succeeded:
requirement.rollback_uninstall()
raise
else:
if requirement.conflicts_with and requirement.install_succeeded:
requirement.commit_uninstall()
requirement.remove_temporary_source()
finally:
logger.indent -= 2
self.successfully_installed = to_install
def create_bundle(self, bundle_filename):
## FIXME: can't decide which is better; zip is easier to read
## random files from, but tar.bz2 is smaller and not as lame a
## format.
## FIXME: this file should really include a manifest of the
## packages, maybe some other metadata files. It would make
## it easier to detect as well.
zip = zipfile.ZipFile(bundle_filename, 'w', zipfile.ZIP_DEFLATED)
vcs_dirs = []
for dir, basename in (self.build_dir, 'build'), (self.src_dir, 'src'):
dir = os.path.normcase(os.path.abspath(dir))
for dirpath, dirnames, filenames in os.walk(dir):
for backend in vcs.backends:
vcs_backend = backend()
vcs_url = vcs_rev = None
if vcs_backend.dirname in dirnames:
for vcs_dir in vcs_dirs:
if dirpath.startswith(vcs_dir):
# vcs bundle file already in parent directory
break
else:
vcs_url, vcs_rev = vcs_backend.get_info(
os.path.join(dir, dirpath))
vcs_dirs.append(dirpath)
vcs_bundle_file = vcs_backend.bundle_file
vcs_guide = vcs_backend.guide % {'url': vcs_url,
'rev': vcs_rev}
dirnames.remove(vcs_backend.dirname)
break
if 'pip-egg-info' in dirnames:
dirnames.remove('pip-egg-info')
for dirname in dirnames:
dirname = os.path.join(dirpath, dirname)
name = self._clean_zip_name(dirname, dir)
zip.writestr(basename + '/' + name + '/', '')
for filename in filenames:
if filename == PIP_DELETE_MARKER_FILENAME:
continue
filename = os.path.join(dirpath, filename)
name = self._clean_zip_name(filename, dir)
zip.write(filename, basename + '/' + name)
if vcs_url:
name = os.path.join(dirpath, vcs_bundle_file)
name = self._clean_zip_name(name, dir)
zip.writestr(basename + '/' + name, vcs_guide)
zip.writestr('pip-manifest.txt', self.bundle_requirements())
zip.close()
BUNDLE_HEADER = '''\
# This is a pip bundle file, that contains many source packages
# that can be installed as a group. You can install this like:
# pip this_file.zip
# The rest of the file contains a list of all the packages included:
'''
def bundle_requirements(self):
parts = [self.BUNDLE_HEADER]
for req in [req for req in self.requirements.values()
if not req.comes_from]:
parts.append('%s==%s\n' % (req.name, req.installed_version))
parts.append('# These packages were installed to satisfy the above requirements:\n')
for req in [req for req in self.requirements.values()
if req.comes_from]:
parts.append('%s==%s\n' % (req.name, req.installed_version))
## FIXME: should we do something with self.unnamed_requirements?
return ''.join(parts)
def _clean_zip_name(self, name, prefix):
assert name.startswith(prefix+os.path.sep), (
"name %r doesn't start with prefix %r" % (name, prefix))
name = name[len(prefix)+1:]
name = name.replace(os.path.sep, '/')
return name
def _make_build_dir(build_dir):
os.makedirs(build_dir)
write_delete_marker_file(build_dir)
_scheme_re = re.compile(r'^(http|https|file):', re.I)
def parse_requirements(filename, finder=None, comes_from=None, options=None,
session=None):
if session is None:
session = PipSession()
skip_match = None
skip_regex = options.skip_requirements_regex if options else None
if skip_regex:
skip_match = re.compile(skip_regex)
reqs_file_dir = os.path.dirname(os.path.abspath(filename))
filename, content = get_file_content(filename,
comes_from=comes_from,
session=session,
)
for line_number, line in enumerate(content.splitlines()):
line_number += 1
line = line.strip()
# Remove comments from file
line = re.sub(r"(^|\s)#.*$", "", line)
if not line or line.startswith('#'):
continue
if skip_match and skip_match.search(line):
continue
if line.startswith('-r') or line.startswith('--requirement'):
if line.startswith('-r'):
req_url = line[2:].strip()
else:
req_url = line[len('--requirement'):].strip().strip('=')
if _scheme_re.search(filename):
# Relative to a URL
req_url = urlparse.urljoin(filename, req_url)
elif not _scheme_re.search(req_url):
req_url = os.path.join(os.path.dirname(filename), req_url)
for item in parse_requirements(req_url, finder, comes_from=filename, options=options, session=session):
yield item
elif line.startswith('-Z') or line.startswith('--always-unzip'):
# No longer used, but previously these were used in
# requirement files, so we'll ignore.
pass
elif line.startswith('-f') or line.startswith('--find-links'):
if line.startswith('-f'):
line = line[2:].strip()
else:
line = line[len('--find-links'):].strip().lstrip('=')
## FIXME: it would be nice to keep track of the source of
## the find_links:
# support a find-links local path relative to a requirements file
relative_to_reqs_file = os.path.join(reqs_file_dir, line)
if os.path.exists(relative_to_reqs_file):
line = relative_to_reqs_file
if finder:
finder.find_links.append(line)
elif line.startswith('-i') or line.startswith('--index-url'):
if line.startswith('-i'):
line = line[2:].strip()
else:
line = line[len('--index-url'):].strip().lstrip('=')
if finder:
finder.index_urls = [line]
elif line.startswith('--extra-index-url'):
line = line[len('--extra-index-url'):].strip().lstrip('=')
if finder:
finder.index_urls.append(line)
elif line.startswith('--use-wheel'):
finder.use_wheel = True
elif line.startswith('--no-index'):
finder.index_urls = []
elif line.startswith("--allow-external"):
line = line[len("--allow-external"):].strip().lstrip("=")
finder.allow_external |= set([normalize_name(line).lower()])
elif line.startswith("--allow-all-external"):
finder.allow_all_external = True
# Remove in 1.7
elif line.startswith("--no-allow-external"):
pass
# Remove in 1.7
elif line.startswith("--no-allow-insecure"):
pass
# Remove after 1.7
elif line.startswith("--allow-insecure"):
line = line[len("--allow-insecure"):].strip().lstrip("=")
finder.allow_unverified |= set([normalize_name(line).lower()])
elif line.startswith("--allow-unverified"):
line = line[len("--allow-unverified"):].strip().lstrip("=")
finder.allow_unverified |= set([normalize_name(line).lower()])
else:
comes_from = '-r %s (line %s)' % (filename, line_number)
if line.startswith('-e') or line.startswith('--editable'):
if line.startswith('-e'):
line = line[2:].strip()
else:
line = line[len('--editable'):].strip().lstrip('=')
req = InstallRequirement.from_editable(
line, comes_from=comes_from, default_vcs=options.default_vcs if options else None)
else:
req = InstallRequirement.from_line(line, comes_from, prereleases=getattr(options, "pre", None))
yield req
def _strip_postfix(req):
"""
Strip req postfix ( -dev, 0.2, etc )
"""
## FIXME: use package_to_requirement?
match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req)
if match:
# Strip off -dev, -0.2, etc.
req = match.group(1)
return req
def _build_req_from_url(url):
parts = [p for p in url.split('#', 1)[0].split('/') if p]
req = None
if parts[-2] in ('tags', 'branches', 'tag', 'branch'):
req = parts[-3]
elif parts[-1] == 'trunk':
req = parts[-2]
return req
def _build_editable_options(req):
"""
This method generates a dictionary of the query string
parameters contained in a given editable URL.
"""
regexp = re.compile(r"[\?#&](?P<name>[^&=]+)=(?P<value>[^&=]+)")
matched = regexp.findall(req)
if matched:
ret = dict()
for option in matched:
(name, value) = option
if name in ret:
raise Exception("%s option already defined" % name)
ret[name] = value
return ret
return None
def parse_editable(editable_req, default_vcs=None):
"""Parses svn+http://blahblah@rev#egg=Foobar into a requirement
(Foobar) and a URL"""
url = editable_req
extras = None
# If a file path is specified with extras, strip off the extras.
m = re.match(r'^(.+)(\[[^\]]+\])$', url)
if m:
url_no_extras = m.group(1)
extras = m.group(2)
else:
url_no_extras = url
if os.path.isdir(url_no_extras):
if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
raise InstallationError("Directory %r is not installable. File 'setup.py' not found." % url_no_extras)
# Treating it as code that has already been checked out
url_no_extras = path_to_url(url_no_extras)
if url_no_extras.lower().startswith('file:'):
if extras:
return None, url_no_extras, pkg_resources.Requirement.parse('__placeholder__' + extras).extras
else:
return None, url_no_extras, None
for version_control in vcs:
if url.lower().startswith('%s:' % version_control):
url = '%s+%s' % (version_control, url)
break
if '+' not in url:
if default_vcs:
url = default_vcs + '+' + url
else:
raise InstallationError(
'%s should either be a path to a local project or a VCS url beginning with svn+, git+, hg+, or bzr+' % editable_req)
vc_type = url.split('+', 1)[0].lower()
if not vcs.get_backend(vc_type):
error_message = 'For --editable=%s only ' % editable_req + \
', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
' is currently supported'
raise InstallationError(error_message)
try:
options = _build_editable_options(editable_req)
except Exception:
message = sys.exc_info()[1]
raise InstallationError(
'--editable=%s error in editable options:%s' % (editable_req, message))
if not options or 'egg' not in options:
req = _build_req_from_url(editable_req)
if not req:
raise InstallationError('--editable=%s is not the right format; it must have #egg=Package' % editable_req)
else:
req = options['egg']
package = _strip_postfix(req)
return package, url, options
class UninstallPathSet(object):
"""A set of file paths to be removed in the uninstallation of a
requirement."""
def __init__(self, dist):
self.paths = set()
self._refuse = set()
self.pth = {}
self.dist = dist
self.save_dir = None
self._moved_paths = []
def _permitted(self, path):
"""
Return True if the given path is one we are permitted to
remove/modify, False otherwise.
"""
return is_local(path)
def _can_uninstall(self):
if not dist_is_local(self.dist):
logger.notify("Not uninstalling %s at %s, outside environment %s"
% (self.dist.project_name, normalize_path(self.dist.location), sys.prefix))
return False
return True
def add(self, path):
path = normalize_path(path)
if not os.path.exists(path):
return
if self._permitted(path):
self.paths.add(path)
else:
self._refuse.add(path)
# __pycache__ files can show up after 'installed-files.txt' is created, due to imports
if os.path.splitext(path)[1] == '.py' and uses_pycache:
self.add(imp.cache_from_source(path))
def add_pth(self, pth_file, entry):
pth_file = normalize_path(pth_file)
if self._permitted(pth_file):
if pth_file not in self.pth:
self.pth[pth_file] = UninstallPthEntries(pth_file)
self.pth[pth_file].add(entry)
else:
self._refuse.add(pth_file)
def compact(self, paths):
"""Compact a path set to contain the minimal number of paths
necessary to contain all paths in the set. If /a/path/ and
/a/path/to/a/file.txt are both in the set, leave only the
shorter path."""
short_paths = set()
for path in sorted(paths, key=len):
if not any([(path.startswith(shortpath) and
path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
for shortpath in short_paths]):
short_paths.add(path)
return short_paths
def _stash(self, path):
return os.path.join(
self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep))
def remove(self, auto_confirm=False):
"""Remove paths in ``self.paths`` with confirmation (unless
``auto_confirm`` is True)."""
if not self._can_uninstall():
return
if not self.paths:
logger.notify("Can't uninstall '%s'. No files were found to uninstall." % self.dist.project_name)
return
logger.notify('Uninstalling %s:' % self.dist.project_name)
logger.indent += 2
paths = sorted(self.compact(self.paths))
try:
if auto_confirm:
response = 'y'
else:
for path in paths:
logger.notify(path)
response = ask('Proceed (y/n)? ', ('y', 'n'))
if self._refuse:
logger.notify('Not removing or modifying (outside of prefix):')
for path in self.compact(self._refuse):
logger.notify(path)
if response == 'y':
self.save_dir = tempfile.mkdtemp(suffix='-uninstall',
prefix='pip-')
for path in paths:
new_path = self._stash(path)
logger.info('Removing file or directory %s' % path)
self._moved_paths.append(path)
renames(path, new_path)
for pth in self.pth.values():
pth.remove()
logger.notify('Successfully uninstalled %s' % self.dist.project_name)
finally:
logger.indent -= 2
def rollback(self):
"""Rollback the changes previously made by remove()."""
if self.save_dir is None:
logger.error("Can't roll back %s; was not uninstalled" % self.dist.project_name)
return False
logger.notify('Rolling back uninstall of %s' % self.dist.project_name)
for path in self._moved_paths:
tmp_path = self._stash(path)
logger.info('Replacing %s' % path)
renames(tmp_path, path)
for pth in self.pth:
pth.rollback()
def commit(self):
"""Remove temporary save dir: rollback will no longer be possible."""
if self.save_dir is not None:
rmtree(self.save_dir)
self.save_dir = None
self._moved_paths = []
class UninstallPthEntries(object):
def __init__(self, pth_file):
if not os.path.isfile(pth_file):
raise UninstallationError("Cannot remove entries from nonexistent file %s" % pth_file)
self.file = pth_file
self.entries = set()
self._saved_lines = None
def add(self, entry):
entry = os.path.normcase(entry)
# On Windows, os.path.normcase converts the entry to use
# backslashes. This is correct for entries that describe absolute
# paths outside of site-packages, but all the others use forward
# slashes.
if sys.platform == 'win32' and not os.path.splitdrive(entry)[0]:
entry = entry.replace('\\', '/')
self.entries.add(entry)
def remove(self):
logger.info('Removing pth entries from %s:' % self.file)
fh = open(self.file, 'rb')
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
lines = fh.readlines()
self._saved_lines = lines
fh.close()
if any(b('\r\n') in line for line in lines):
endline = '\r\n'
else:
endline = '\n'
for entry in self.entries:
try:
logger.info('Removing entry: %s' % entry)
lines.remove(b(entry + endline))
except ValueError:
pass
fh = open(self.file, 'wb')
fh.writelines(lines)
fh.close()
def rollback(self):
if self._saved_lines is None:
logger.error('Cannot roll back changes to %s, none were made' % self.file)
return False
logger.info('Rolling %s back to previous state' % self.file)
fh = open(self.file, 'wb')
fh.writelines(self._saved_lines)
fh.close()
return True
class FakeFile(object):
"""Wrap a list of lines in an object with readline() to make
ConfigParser happy."""
def __init__(self, lines):
self._gen = (l for l in lines)
def readline(self):
try:
try:
return next(self._gen)
except NameError:
return self._gen.next()
except StopIteration:
return ''
def __iter__(self):
return self._gen
|
lgpl-3.0
|
hdinsight/hue
|
desktop/core/ext-py/Django-1.6.10/django/conf/locale/de/formats.py
|
118
|
1104
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = 'j. F Y H:i:s'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i:s'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
apache-2.0
|
ndtran/l10n-switzerland
|
l10n_ch_account_statement_base_import/parser/g11_file_parser.py
|
1
|
8249
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Steve Ferry
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import datetime
import time
import logging
import uuid
from openerp import fields, _
from .base_parser import BaseSwissParser
_logger = logging.getLogger(__name__)
class G11Parser(BaseSwissParser):
"""
Parser for BVR DD type 2 Postfinance Statements
(can be wrapped in a g11 file)
"""
_ftype = 'g11'
def __init__(self, data_file):
"""Constructor
Splitting data_file in lines
"""
super(G11Parser, self).__init__(data_file)
self.lines = data_file.splitlines()
self.reject_reason = {
'01': _("Insufficient cover funds."),
'02': _("Debtor protestation."),
'03': _("Debtor’s account number and address do not match."),
'04': _("Postal account closed."),
'05': _("Postal account blocked/frozen."),
'06': _("Postal account holder deceased."),
'07': _("Postal account number non-existent.")
}
self.balance_end = 0.0
def ftype(self):
"""Gives the type of file we want to import
:return: imported file type
:rtype: string
"""
return super(G11Parser, self).ftype()
def get_currency(self):
"""Returns the ISO currency code of the parsed file
:return: The ISO currency code of the parsed file eg: CHF
:rtype: string
"""
return super(G11Parser, self).get_currency()
def get_account_number(self):
"""Return the account_number related to parsed file
:return: The account number of the parsed file
:rtype: string
"""
return super(G11Parser, self).get_account_number()
def get_statements(self):
"""Return the list of bank statement dict.
Bank statements data: list of dict containing
(optional items marked by o) :
- 'name': string (e.g: '000000123')
- 'date': date (e.g: 2013-06-26)
-o 'balance_start': float (e.g: 8368.56)
-o 'balance_end_real': float (e.g: 8888.88)
- 'transactions': list of dict containing :
- 'name': string
(e.g: 'KBC-INVESTERINGSKREDIET 787-5562831-01')
- 'date': date
- 'amount': float
- 'unique_import_id': string
-o 'account_number': string
Will be used to find/create the res.partner.bank in odoo
-o 'note': string
-o 'partner_name': string
-o 'ref': string
:return: a list of statement
:rtype: list
"""
return super(G11Parser, self).get_statements()
def file_is_known(self):
"""Predicate the tells if the parser can parse the data file
:return: True if file is supported
:rtype: bool
"""
return self.lines[-1][0:3] == '097'
def _parse_currency_code(self):
"""Parse file currency ISO code
:return: the currency ISO code of the file eg: CHF
:rtype: string
"""
return self.lines[-1][128:131]
def _parse_statement_balance_end(self, line=None):
"""Parse file start and end balance
:return: the file end balance
:rtype: float
"""
total_line = line or self.lines[-1]
return ((float(total_line[45:57]) / 100) -
(float(total_line[101:113]) / 100))
def _parse_transactions(self):
"""Parse bank statement lines from file
list of dict containing :
- 'name': string (e.g: 'KBC-INVESTERINGSKREDIET 787-5562831-01')
- 'date': date
- 'amount': float
- 'unique_import_id': string
-o 'account_number': string
Will be used to find/create the res.partner.bank in odoo
-o 'note': string
-o 'partner_name': string
-o 'ref': string
:return: a list of transactions
:rtype: list
"""
transactions = []
for line in self.lines[:-1]:
if line[0:3] != '097':
ref = line[15:42]
currency = line[42:45]
amount = float(line[45:57]) / 100
transaction_date = time.strftime(
'%Y-%m-%d', time.strptime(line[108:116], '%Y%m%d'))
# commission = float(line[141:147]) / 100
note = ''
if line[0:3] == '084':
# Fail / Debit record
reject_code = line[128:130]
if reject_code == '02':
# Debit record
amount *= -1
note = self.reject_reason[reject_code]
else:
# Failed transactions. Get the error reason and
# put it on the OBI field.
note = self.reject_reason[
reject_code] + '\n' + _(
"Amount to debit was %s %f") % (
currency, amount)
amount = 0.0
# Add information to OBI if the transaction is a test.
if line[5] == '3':
note = _("-- Test transaction --") + '\n' + note
transactions.append({
'name': '/',
'ref': ref,
'unique_import_id': str(uuid.uuid4()),
'amount': amount,
'date': transaction_date,
'note': note,
})
else:
self.balance_end += self._parse_statement_balance_end(line)
return transactions
def validate(self):
"""Validate the bank statement
:param total_line: Last line in the g11 file. Beginning with '097'
:return: Boolean
"""
total_line = self.lines[-1]
transactions = 0
transactions += int(
total_line[57:69]) + int(
total_line[89:101]) + int(
total_line[113:125])
return (len(self.statements[0]['transactions']) == transactions)
def _parse_statement_date(self):
"""Parse file statement date
:return: A date usable by Odoo in write or create dict
"""
date = datetime.date.today()
return fields.Date.to_string(date)
def _parse(self):
"""
Launch the parsing through The g11 file.
"""
self.currency_code = self._parse_currency_code()
statement = {}
self.balance_end = self._parse_statement_balance_end()
statement['balance_start'] = 0.0
statement['date'] = self._parse_statement_date()
statement['attachments'] = []
statement['transactions'] = self._parse_transactions()
statement['balance_end_real'] = self.balance_end
self.statements.append(statement)
return self.validate()
|
agpl-3.0
|
holly/gistcli
|
lib/gistcli/argparser.py
|
1
|
6704
|
#!/usr/bin/env python
# vim:fileencoding=utf-8
from argparse import ArgumentParser, FileType
import time
import warnings
import os, sys, io
import signal
class ArgParser(object):
def __init__(self, description, version):
self.__description = description
self.__version = version
self.__parser = None
self.__initialized()
@property
def description(self):
return self.__description
@property
def version(self):
return self.__version
@property
def parser(self):
return self.__parser
def print_help(self):
self.__parser.print_help()
def args(self):
return self.__parser.parse_args()
def __initialized(self):
parser = ArgumentParser(description=self.description)
parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + self.version)
subparsers = parser.add_subparsers(help='sub-command help', dest='subparser_name')
list_parser = subparsers.add_parser('list', help='list help')
list_parser.add_argument('--user', '-u', action='store', metavar='USER', help='github your account name')
list_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token, if you want private gist')
list_parser.add_argument('--number', '-n', action='store_true', help='number of your gists')
list_parser.add_argument('--no-headers', action='store_true', help='print no header line at all')
list_parser.add_argument('--verbose', action='store_true', help='verbose output')
show_parser = subparsers.add_parser('show', help='show help')
show_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token, if you want private gist')
show_parser.add_argument('--id', '-I', action='store', required=True, metavar='ID', help='gist id')
show_parser.add_argument('--verbose', action='store_true', help='verbose output')
fetch_parser = subparsers.add_parser('fetch', help='fetch help')
fetch_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token, if you want private gist')
fetch_parser.add_argument('--id', '-I', action='store', required=True, metavar='ID', help='gist id')
fetch_parser.add_argument('--download-dir', '-d', action='store', metavar='DOWNLOAD_DIR', help='download directory')
fetch_parser.add_argument('--type', '-t', action='store', default="git", metavar='DOWNLOAD_TYPE', choices=['git', 'tarball', 'zip'], help='gistfetch download type(default:git. other type are tarball and zip)')
fetch_parser.add_argument('--verbose', action='store_true', help='verbose output')
post_parser = subparsers.add_parser('post', help='post help')
post_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token')
post_parser.add_argument('--name', '-n', action='store', metavar='FILE_NAME', help='gist file name')
post_parser.add_argument('--description', '-d', action='store', metavar='DESCRIPTION', help='gist file description')
post_parser.add_argument('--private', '-p', action='store_true', help='private gist')
post_parser.add_argument('--verbose', action='store_true', help='verbose output')
post_parser.add_argument('infile', type=FileType("r"), nargs="*", default=sys.stdin, metavar='INFILE', help='post target file or stdin data')
update_parser = subparsers.add_parser('update', help='update help')
update_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token')
update_parser.add_argument('--id', '-I', action='store', required=True, metavar='ID', help='gist id')
update_parser.add_argument('--name', '-n', action='store', metavar='FILE_NAME', help='gist file name')
update_parser.add_argument('--description', '-d', action='store', metavar='DESCRIPTION', help='gist file description')
update_parser.add_argument('--private', '-p', action='store_true', help='private gist')
update_parser.add_argument('--verbose', action='store_true', help='verbose output')
update_parser.add_argument('infile', type=FileType("r"), nargs="*", default=sys.stdin, metavar='INFILE', help='update target file or stdin data')
delete_parser = subparsers.add_parser('delete', help='delete help')
delete_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token')
delete_parser.add_argument('--id', '-I', action='store', required=True, metavar='ID', help='gist id')
delete_parser.add_argument('--verbose', action='store_true', help='verbose output')
#show_from_name_parser = subparsers.add_parser('show_from_name', help='show_from_name help')
#show_from_name_parser.add_argument('--user', '-u', action='store', metavar='USER', help='github your account name')
#show_from_name_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token, if you want private gist')
#show_from_name_parser.add_argument('--name', '-n', action='store', required=True, metavar='FILE_NAME', help='gist file name')
#show_from_name_parser.add_argument('--verbose', action='store_true', help='verbose output')
#
#fetch_from_name_parser = subparsers.add_parser('fetch_from_name', help='fetch_from_name help')
#fetch_from_name_parser.add_argument('--user', '-u', action='store', metavar='USER', help='github your account name')
#fetch_from_name_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token, if you want private gist')
#fetch_from_name_parser.add_argument('--name', '-n', action='store', required=True, metavar='FILE_NAME', help='gist file name')
#fetch_from_name_parser.add_argument('--output', '-o', type=FileType('w'), metavar='FILE_NAME', help='write to FILE instead of stdout')
#fetch_from_name_parser.add_argument('--remote-name', '-O', action='store_true', help='write output to a file named as the remote file')
#fetch_from_name_parser.add_argument('--add-executable', '-x', action='store_true', help='add executable mode. enable --output or --remote-name option')
#fetch_from_name_parser.add_argument('--verbose', action='store_true', help='verbose output')
#args = parser.parse_args()
self.__parser = parser
|
mit
|
SUNET/eduid-idproofing-letter
|
src/idproofing_letter/app.py
|
1
|
1737
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from flask import Flask
from eduid_common.api.logging import init_logging
from eduid_common.api.exceptions import init_exception_handlers
from eduid_userdb import UserDB
from eduid_userdb.proofing import LetterProofingStateDB
from idproofing_letter.ekopost import Ekopost
from idproofing_letter.msg import init_celery
__author__ = 'lundberg'
def init_idproofing_letter_app(name, config=None):
"""
:param name: The name of the instance, it will affect the configuration loaded.
:param config: any additional configuration settings. Specially useful
in test cases
:type name: str
:type config: dict
:return: the flask app
:rtype: flask.Flask
"""
app = Flask(name, static_folder=None)
# Load configuration
app.config.from_object('idproofing_letter.settings.common')
app.config.from_envvar('IDPROOFING_LETTER_SETTINGS', silent=True)
if config:
app.config.update(config)
# Setup logging
app = init_logging(app)
# Setup exception handling
app = init_exception_handlers(app)
# Register views
from idproofing_letter.views import idproofing_letter_views
app.register_blueprint(idproofing_letter_views)
# Init dbs
app.central_userdb = UserDB(app.config['MONGO_URI'], 'eduid_am')
app.proofing_statedb = LetterProofingStateDB(app.config['MONGO_URI'])
# Init celery
init_celery(app)
# Initiate external modules
app.ekopost = Ekopost(app)
# Check for secret key
if app.config['SECRET_KEY'] is None:
app.logger.error('Missing SECRET_KEY in the settings file')
app.logger.info('Application initialized')
return app
|
bsd-3-clause
|
xin3liang/platform_external_chromium_org
|
third_party/protobuf/python/google/protobuf/internal/containers.py
|
224
|
10004
|
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains container classes to represent different protocol buffer types.
This file defines container classes which represent categories of protocol
buffer field types which need extra maintenance. Currently these categories
are:
- Repeated scalar fields - These are all repeated fields which aren't
composite (e.g. they are of simple types like int32, string, etc).
- Repeated composite fields - Repeated fields which are composite. This
includes groups and nested messages.
"""
__author__ = '[email protected] (Petar Petrov)'
class BaseContainer(object):
"""Base container class."""
# Minimizes memory usage and disallows assignment to other attributes.
__slots__ = ['_message_listener', '_values']
def __init__(self, message_listener):
"""
Args:
message_listener: A MessageListener implementation.
The RepeatedScalarFieldContainer will call this object's
Modified() method when it is modified.
"""
self._message_listener = message_listener
self._values = []
def __getitem__(self, key):
"""Retrieves item by the specified key."""
return self._values[key]
def __len__(self):
"""Returns the number of elements in the container."""
return len(self._values)
def __ne__(self, other):
"""Checks if another instance isn't equal to this one."""
# The concrete classes should define __eq__.
return not self == other
def __hash__(self):
raise TypeError('unhashable object')
def __repr__(self):
return repr(self._values)
def sort(self, *args, **kwargs):
# Continue to support the old sort_function keyword argument.
# This is expected to be a rare occurrence, so use LBYL to avoid
# the overhead of actually catching KeyError.
if 'sort_function' in kwargs:
kwargs['cmp'] = kwargs.pop('sort_function')
self._values.sort(*args, **kwargs)
class RepeatedScalarFieldContainer(BaseContainer):
"""Simple, type-checked, list-like container for holding repeated scalars."""
# Disallows assignment to other attributes.
__slots__ = ['_type_checker']
def __init__(self, message_listener, type_checker):
"""
Args:
message_listener: A MessageListener implementation.
The RepeatedScalarFieldContainer will call this object's
Modified() method when it is modified.
type_checker: A type_checkers.ValueChecker instance to run on elements
inserted into this container.
"""
super(RepeatedScalarFieldContainer, self).__init__(message_listener)
self._type_checker = type_checker
def append(self, value):
"""Appends an item to the list. Similar to list.append()."""
self._type_checker.CheckValue(value)
self._values.append(value)
if not self._message_listener.dirty:
self._message_listener.Modified()
def insert(self, key, value):
"""Inserts the item at the specified position. Similar to list.insert()."""
self._type_checker.CheckValue(value)
self._values.insert(key, value)
if not self._message_listener.dirty:
self._message_listener.Modified()
def extend(self, elem_seq):
"""Extends by appending the given sequence. Similar to list.extend()."""
if not elem_seq:
return
new_values = []
for elem in elem_seq:
self._type_checker.CheckValue(elem)
new_values.append(elem)
self._values.extend(new_values)
self._message_listener.Modified()
def MergeFrom(self, other):
"""Appends the contents of another repeated field of the same type to this
one. We do not check the types of the individual fields.
"""
self._values.extend(other._values)
self._message_listener.Modified()
def remove(self, elem):
"""Removes an item from the list. Similar to list.remove()."""
self._values.remove(elem)
self._message_listener.Modified()
def __setitem__(self, key, value):
"""Sets the item on the specified position."""
self._type_checker.CheckValue(value)
self._values[key] = value
self._message_listener.Modified()
def __getslice__(self, start, stop):
"""Retrieves the subset of items from between the specified indices."""
return self._values[start:stop]
def __setslice__(self, start, stop, values):
"""Sets the subset of items from between the specified indices."""
new_values = []
for value in values:
self._type_checker.CheckValue(value)
new_values.append(value)
self._values[start:stop] = new_values
self._message_listener.Modified()
def __delitem__(self, key):
"""Deletes the item at the specified position."""
del self._values[key]
self._message_listener.Modified()
def __delslice__(self, start, stop):
"""Deletes the subset of items from between the specified indices."""
del self._values[start:stop]
self._message_listener.Modified()
def __eq__(self, other):
"""Compares the current instance with another one."""
if self is other:
return True
# Special case for the same type which should be common and fast.
if isinstance(other, self.__class__):
return other._values == self._values
# We are presumably comparing against some other sequence type.
return other == self._values
class RepeatedCompositeFieldContainer(BaseContainer):
"""Simple, list-like container for holding repeated composite fields."""
# Disallows assignment to other attributes.
__slots__ = ['_message_descriptor']
def __init__(self, message_listener, message_descriptor):
"""
Note that we pass in a descriptor instead of the generated directly,
since at the time we construct a _RepeatedCompositeFieldContainer we
haven't yet necessarily initialized the type that will be contained in the
container.
Args:
message_listener: A MessageListener implementation.
The RepeatedCompositeFieldContainer will call this object's
Modified() method when it is modified.
message_descriptor: A Descriptor instance describing the protocol type
that should be present in this container. We'll use the
_concrete_class field of this descriptor when the client calls add().
"""
super(RepeatedCompositeFieldContainer, self).__init__(message_listener)
self._message_descriptor = message_descriptor
def add(self, **kwargs):
"""Adds a new element at the end of the list and returns it. Keyword
arguments may be used to initialize the element.
"""
new_element = self._message_descriptor._concrete_class(**kwargs)
new_element._SetListener(self._message_listener)
self._values.append(new_element)
if not self._message_listener.dirty:
self._message_listener.Modified()
return new_element
def extend(self, elem_seq):
"""Extends by appending the given sequence of elements of the same type
as this one, copying each individual message.
"""
message_class = self._message_descriptor._concrete_class
listener = self._message_listener
values = self._values
for message in elem_seq:
new_element = message_class()
new_element._SetListener(listener)
new_element.MergeFrom(message)
values.append(new_element)
listener.Modified()
def MergeFrom(self, other):
"""Appends the contents of another repeated field of the same type to this
one, copying each individual message.
"""
self.extend(other._values)
def remove(self, elem):
"""Removes an item from the list. Similar to list.remove()."""
self._values.remove(elem)
self._message_listener.Modified()
def __getslice__(self, start, stop):
"""Retrieves the subset of items from between the specified indices."""
return self._values[start:stop]
def __delitem__(self, key):
"""Deletes the item at the specified position."""
del self._values[key]
self._message_listener.Modified()
def __delslice__(self, start, stop):
"""Deletes the subset of items from between the specified indices."""
del self._values[start:stop]
self._message_listener.Modified()
def __eq__(self, other):
"""Compares the current instance with another one."""
if self is other:
return True
if not isinstance(other, self.__class__):
raise TypeError('Can only compare repeated composite fields against '
'other repeated composite fields.')
return self._values == other._values
|
bsd-3-clause
|
malcolmpl/fleetpanel
|
JumpBridge.py
|
1
|
1343
|
#!/usr/bin/python
# vim:sw=4:softtabstop=4:expandtab:set fileencoding=ISO8859-2
#
# JumpBridge.py, part of the FleetPanel
#
# Copyright (c) 2008-2009 Pawe³ 'Reef' Polewicz
# All rights reserved.
#
# This software is licensed as described in the file LICENSE, which
# you should have received as part of this distribution. The terms
# are also available at http://www.opensource.org/licenses/mit-license.php.
class JumpBridge:
""" class representing a Jump Bridge between two systems. The order of from/to is irrelevant """
def __init__(self, sys_from, planet_from, moon_from, sys_to, planet_to, moon_to, owner, password, comment=""):
self.sys_from = sys_from
self.planet_from = planet_from
self.moon_from = moon_from
self.sys_to = sys_to
self.planet_to = planet_to
self.moon_to = moon_to
self.owner = owner
self.password = password
self.comment = comment
def exact_to(self):
return self.planet_to + "-" + self.moon_to
def exact_from(self):
return self.planet_from + "-" + self.moon_from
def __contains__(self, item):
return self.sys_from==item or self.sys_to==item
def other_side_than(self, sys):
if self.sys_from==sys:
return self.sys_to
else:
return self.sys_from
#
|
mit
|
lisa-lab/pylearn2
|
doc/conf.py
|
2
|
6651
|
# -*- coding: utf-8 -*-
#
# pylearn2 documentation build configuration file
# It is based on Theano documentation build
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
import sys, os
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.append(os.path.abspath('some/directory'))
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'numpydoc',
'sphinx.ext.autosummary'] #, 'ext']
#Needed otherwise, there is many autosummary error done by numpydo:
#https://github.com/phn/pytpm/issues/3#issuecomment-12133978
numpydoc_show_class_members = False
todo_include_todos = True
# We do it like this to support multiple sphinx version without having warning.
# Our buildbot consider warning as error.
try:
from sphinx.ext import imgmath
extensions.append('sphinx.ext.imgmath')
except ImportError:
try:
from sphinx.ext import pngmath
extensions.append('sphinx.ext.pngmath')
except ImportError:
pass
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = 'Pylearn2'
copyright = '2011-2015, LISA lab'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
# The short X.Y version.
version = 'dev'
# The full version, including alpha/beta/rc tags.
release = 'dev'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directories, that shouldn't be searched
# for source files.
exclude_dirs = ['images', 'scripts', 'sandbox']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
#html_style = 'default.css'
html_theme = 'solar'
html_theme_path = ["./themes"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (within the static path) to place at the top of
# the sidebar.
#html_logo = 'images/theano_logo-200x67.png'
#html_logo = 'images/theano_logo_allblue_200x46.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = [] # '.static', 'images']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'theanodoc'
# Options for LaTeX output
# ------------------------
latex_elements = {
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '11pt',
# Additional stuff for the LaTeX preamble.
#latex_preamble = '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
('index', 'pylearn2.tex', 'Pylearn2 Documentation',
'LISA lab, University of Montreal', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = 'images/snake_theta2-trans.png'
latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
|
bsd-3-clause
|
zooba/PTVS
|
Python/Product/Miniconda/Miniconda3-x64/Lib/_osx_support.py
|
16
|
19138
|
"""Shared OS X support functions."""
import os
import re
import sys
__all__ = [
'compiler_fixup',
'customize_config_vars',
'customize_compiler',
'get_platform_osx',
]
# configuration variables that may contain universal build flags,
# like "-arch" or "-isdkroot", that may need customization for
# the user environment
_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS')
# configuration variables that may contain compiler calls
_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')
# prefix added to original configuration variable names
_INITPRE = '_OSX_SUPPORT_INITIAL_'
def _find_executable(executable, path=None):
"""Tries to find 'executable' in the directories listed in 'path'.
A string listing directories separated by 'os.pathsep'; defaults to
os.environ['PATH']. Returns the complete filename or None if not found.
"""
if path is None:
path = os.environ['PATH']
paths = path.split(os.pathsep)
base, ext = os.path.splitext(executable)
if (sys.platform == 'win32') and (ext != '.exe'):
executable = executable + '.exe'
if not os.path.isfile(executable):
for p in paths:
f = os.path.join(p, executable)
if os.path.isfile(f):
# the file exists, we have a shot at spawn working
return f
return None
else:
return executable
def _read_output(commandstring):
"""Output from successful command execution or None"""
# Similar to os.popen(commandstring, "r").read(),
# but without actually using os.popen because that
# function is not usable during python bootstrap.
# tempfile is also not available then.
import contextlib
try:
import tempfile
fp = tempfile.NamedTemporaryFile()
except ImportError:
fp = open("/tmp/_osx_support.%s"%(
os.getpid(),), "w+b")
with contextlib.closing(fp) as fp:
cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name)
return fp.read().decode('utf-8').strip() if not os.system(cmd) else None
def _find_build_tool(toolname):
"""Find a build tool on current path or using xcrun"""
return (_find_executable(toolname)
or _read_output("/usr/bin/xcrun -find %s" % (toolname,))
or ''
)
_SYSTEM_VERSION = None
def _get_system_version():
"""Return the OS X system version as a string"""
# Reading this plist is a documented way to get the system
# version (see the documentation for the Gestalt Manager)
# We avoid using platform.mac_ver to avoid possible bootstrap issues during
# the build of Python itself (distutils is used to build standard library
# extensions).
global _SYSTEM_VERSION
if _SYSTEM_VERSION is None:
_SYSTEM_VERSION = ''
try:
f = open('/System/Library/CoreServices/SystemVersion.plist')
except OSError:
# We're on a plain darwin box, fall back to the default
# behaviour.
pass
else:
try:
m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
r'<string>(.*?)</string>', f.read())
finally:
f.close()
if m is not None:
_SYSTEM_VERSION = '.'.join(m.group(1).split('.')[:2])
# else: fall back to the default behaviour
return _SYSTEM_VERSION
def _remove_original_values(_config_vars):
"""Remove original unmodified values for testing"""
# This is needed for higher-level cross-platform tests of get_platform.
for k in list(_config_vars):
if k.startswith(_INITPRE):
del _config_vars[k]
def _save_modified_value(_config_vars, cv, newvalue):
"""Save modified and original unmodified value of configuration var"""
oldvalue = _config_vars.get(cv, '')
if (oldvalue != newvalue) and (_INITPRE + cv not in _config_vars):
_config_vars[_INITPRE + cv] = oldvalue
_config_vars[cv] = newvalue
def _supports_universal_builds():
"""Returns True if universal builds are supported on this system"""
# As an approximation, we assume that if we are running on 10.4 or above,
# then we are running with an Xcode environment that supports universal
# builds, in particular -isysroot and -arch arguments to the compiler. This
# is in support of allowing 10.4 universal builds to run on 10.3.x systems.
osx_version = _get_system_version()
if osx_version:
try:
osx_version = tuple(int(i) for i in osx_version.split('.'))
except ValueError:
osx_version = ''
return bool(osx_version >= (10, 4)) if osx_version else False
def _find_appropriate_compiler(_config_vars):
"""Find appropriate C compiler for extension module builds"""
# Issue #13590:
# The OSX location for the compiler varies between OSX
# (or rather Xcode) releases. With older releases (up-to 10.5)
# the compiler is in /usr/bin, with newer releases the compiler
# can only be found inside Xcode.app if the "Command Line Tools"
# are not installed.
#
# Furthermore, the compiler that can be used varies between
# Xcode releases. Up to Xcode 4 it was possible to use 'gcc-4.2'
# as the compiler, after that 'clang' should be used because
# gcc-4.2 is either not present, or a copy of 'llvm-gcc' that
# miscompiles Python.
# skip checks if the compiler was overridden with a CC env variable
if 'CC' in os.environ:
return _config_vars
# The CC config var might contain additional arguments.
# Ignore them while searching.
cc = oldcc = _config_vars['CC'].split()[0]
if not _find_executable(cc):
# Compiler is not found on the shell search PATH.
# Now search for clang, first on PATH (if the Command LIne
# Tools have been installed in / or if the user has provided
# another location via CC). If not found, try using xcrun
# to find an uninstalled clang (within a selected Xcode).
# NOTE: Cannot use subprocess here because of bootstrap
# issues when building Python itself (and os.popen is
# implemented on top of subprocess and is therefore not
# usable as well)
cc = _find_build_tool('clang')
elif os.path.basename(cc).startswith('gcc'):
# Compiler is GCC, check if it is LLVM-GCC
data = _read_output("'%s' --version"
% (cc.replace("'", "'\"'\"'"),))
if data and 'llvm-gcc' in data:
# Found LLVM-GCC, fall back to clang
cc = _find_build_tool('clang')
if not cc:
raise SystemError(
"Cannot locate working compiler")
if cc != oldcc:
# Found a replacement compiler.
# Modify config vars using new compiler, if not already explicitly
# overridden by an env variable, preserving additional arguments.
for cv in _COMPILER_CONFIG_VARS:
if cv in _config_vars and cv not in os.environ:
cv_split = _config_vars[cv].split()
cv_split[0] = cc if cv != 'CXX' else cc + '++'
_save_modified_value(_config_vars, cv, ' '.join(cv_split))
return _config_vars
def _remove_universal_flags(_config_vars):
"""Remove all universal build arguments from config vars"""
for cv in _UNIVERSAL_CONFIG_VARS:
# Do not alter a config var explicitly overridden by env var
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub(r'-arch\s+\w+\s', ' ', flags, flags=re.ASCII)
flags = re.sub('-isysroot [^ \t]*', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def _remove_unsupported_archs(_config_vars):
"""Remove any unsupported archs from config vars"""
# Different Xcode releases support different sets for '-arch'
# flags. In particular, Xcode 4.x no longer supports the
# PPC architectures.
#
# This code automatically removes '-arch ppc' and '-arch ppc64'
# when these are not supported. That makes it possible to
# build extensions on OSX 10.7 and later with the prebuilt
# 32-bit installer on the python.org website.
# skip checks if the compiler was overridden with a CC env variable
if 'CC' in os.environ:
return _config_vars
if re.search(r'-arch\s+ppc', _config_vars['CFLAGS']) is not None:
# NOTE: Cannot use subprocess here because of bootstrap
# issues when building Python itself
status = os.system(
"""echo 'int main{};' | """
"""'%s' -c -arch ppc -x c -o /dev/null /dev/null 2>/dev/null"""
%(_config_vars['CC'].replace("'", "'\"'\"'"),))
if status:
# The compile failed for some reason. Because of differences
# across Xcode and compiler versions, there is no reliable way
# to be sure why it failed. Assume here it was due to lack of
# PPC support and remove the related '-arch' flags from each
# config variables not explicitly overridden by an environment
# variable. If the error was for some other reason, we hope the
# failure will show up again when trying to compile an extension
# module.
for cv in _UNIVERSAL_CONFIG_VARS:
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub(r'-arch\s+ppc\w*\s', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def _override_all_archs(_config_vars):
"""Allow override of all archs with ARCHFLAGS env var"""
# NOTE: This name was introduced by Apple in OSX 10.5 and
# is used by several scripting languages distributed with
# that OS release.
if 'ARCHFLAGS' in os.environ:
arch = os.environ['ARCHFLAGS']
for cv in _UNIVERSAL_CONFIG_VARS:
if cv in _config_vars and '-arch' in _config_vars[cv]:
flags = _config_vars[cv]
flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
flags = flags + ' ' + arch
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def _check_for_unavailable_sdk(_config_vars):
"""Remove references to any SDKs not available"""
# If we're on OSX 10.5 or later and the user tries to
# compile an extension using an SDK that is not present
# on the current machine it is better to not use an SDK
# than to fail. This is particularly important with
# the standalone Command Line Tools alternative to a
# full-blown Xcode install since the CLT packages do not
# provide SDKs. If the SDK is not present, it is assumed
# that the header files and dev libs have been installed
# to /usr and /System/Library by either a standalone CLT
# package or the CLT component within Xcode.
cflags = _config_vars.get('CFLAGS', '')
m = re.search(r'-isysroot\s+(\S+)', cflags)
if m is not None:
sdk = m.group(1)
if not os.path.exists(sdk):
for cv in _UNIVERSAL_CONFIG_VARS:
# Do not alter a config var explicitly overridden by env var
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub(r'-isysroot\s+\S+(?:\s|$)', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def compiler_fixup(compiler_so, cc_args):
"""
This function will strip '-isysroot PATH' and '-arch ARCH' from the
compile flags if the user has specified one them in extra_compile_flags.
This is needed because '-arch ARCH' adds another architecture to the
build, without a way to remove an architecture. Furthermore GCC will
barf if multiple '-isysroot' arguments are present.
"""
stripArch = stripSysroot = False
compiler_so = list(compiler_so)
if not _supports_universal_builds():
# OSX before 10.4.0, these don't support -arch and -isysroot at
# all.
stripArch = stripSysroot = True
else:
stripArch = '-arch' in cc_args
stripSysroot = '-isysroot' in cc_args
if stripArch or 'ARCHFLAGS' in os.environ:
while True:
try:
index = compiler_so.index('-arch')
# Strip this argument and the next one:
del compiler_so[index:index+2]
except ValueError:
break
if 'ARCHFLAGS' in os.environ and not stripArch:
# User specified different -arch flags in the environ,
# see also distutils.sysconfig
compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
if stripSysroot:
while True:
try:
index = compiler_so.index('-isysroot')
# Strip this argument and the next one:
del compiler_so[index:index+2]
except ValueError:
break
# Check if the SDK that is used during compilation actually exists,
# the universal build requires the usage of a universal SDK and not all
# users have that installed by default.
sysroot = None
if '-isysroot' in cc_args:
idx = cc_args.index('-isysroot')
sysroot = cc_args[idx+1]
elif '-isysroot' in compiler_so:
idx = compiler_so.index('-isysroot')
sysroot = compiler_so[idx+1]
if sysroot and not os.path.isdir(sysroot):
from distutils import log
log.warn("Compiling with an SDK that doesn't seem to exist: %s",
sysroot)
log.warn("Please check your Xcode installation")
return compiler_so
def customize_config_vars(_config_vars):
"""Customize Python build configuration variables.
Called internally from sysconfig with a mutable mapping
containing name/value pairs parsed from the configured
makefile used to build this interpreter. Returns
the mapping updated as needed to reflect the environment
in which the interpreter is running; in the case of
a Python from a binary installer, the installed
environment may be very different from the build
environment, i.e. different OS levels, different
built tools, different available CPU architectures.
This customization is performed whenever
distutils.sysconfig.get_config_vars() is first
called. It may be used in environments where no
compilers are present, i.e. when installing pure
Python dists. Customization of compiler paths
and detection of unavailable archs is deferred
until the first extension module build is
requested (in distutils.sysconfig.customize_compiler).
Currently called from distutils.sysconfig
"""
if not _supports_universal_builds():
# On Mac OS X before 10.4, check if -arch and -isysroot
# are in CFLAGS or LDFLAGS and remove them if they are.
# This is needed when building extensions on a 10.3 system
# using a universal build of python.
_remove_universal_flags(_config_vars)
# Allow user to override all archs with ARCHFLAGS env var
_override_all_archs(_config_vars)
# Remove references to sdks that are not found
_check_for_unavailable_sdk(_config_vars)
return _config_vars
def customize_compiler(_config_vars):
"""Customize compiler path and configuration variables.
This customization is performed when the first
extension module build is requested
in distutils.sysconfig.customize_compiler).
"""
# Find a compiler to use for extension module builds
_find_appropriate_compiler(_config_vars)
# Remove ppc arch flags if not supported here
_remove_unsupported_archs(_config_vars)
# Allow user to override all archs with ARCHFLAGS env var
_override_all_archs(_config_vars)
return _config_vars
def get_platform_osx(_config_vars, osname, release, machine):
"""Filter values for get_platform()"""
# called from get_platform() in sysconfig and distutils.util
#
# For our purposes, we'll assume that the system version from
# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
# to. This makes the compatibility story a bit more sane because the
# machine is going to compile and link as if it were
# MACOSX_DEPLOYMENT_TARGET.
macver = _config_vars.get('MACOSX_DEPLOYMENT_TARGET', '')
macrelease = _get_system_version() or macver
macver = macver or macrelease
if macver:
release = macver
osname = "macosx"
# Use the original CFLAGS value, if available, so that we
# return the same machine type for the platform string.
# Otherwise, distutils may consider this a cross-compiling
# case and disallow installs.
cflags = _config_vars.get(_INITPRE+'CFLAGS',
_config_vars.get('CFLAGS', ''))
if macrelease:
try:
macrelease = tuple(int(i) for i in macrelease.split('.')[0:2])
except ValueError:
macrelease = (10, 0)
else:
# assume no universal support
macrelease = (10, 0)
if (macrelease >= (10, 4)) and '-arch' in cflags.strip():
# The universal build will build fat binaries, but not on
# systems before 10.4
machine = 'fat'
archs = re.findall(r'-arch\s+(\S+)', cflags)
archs = tuple(sorted(set(archs)))
if len(archs) == 1:
machine = archs[0]
elif archs == ('i386', 'ppc'):
machine = 'fat'
elif archs == ('i386', 'x86_64'):
machine = 'intel'
elif archs == ('i386', 'ppc', 'x86_64'):
machine = 'fat3'
elif archs == ('ppc64', 'x86_64'):
machine = 'fat64'
elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
machine = 'universal'
else:
raise ValueError(
"Don't know machine value for archs=%r" % (archs,))
elif machine == 'i386':
# On OSX the machine type returned by uname is always the
# 32-bit variant, even if the executable architecture is
# the 64-bit variant
if sys.maxsize >= 2**32:
machine = 'x86_64'
elif machine in ('PowerPC', 'Power_Macintosh'):
# Pick a sane name for the PPC architecture.
# See 'i386' case
if sys.maxsize >= 2**32:
machine = 'ppc64'
else:
machine = 'ppc'
return (osname, release, machine)
|
apache-2.0
|
Djabbz/wakatime
|
wakatime/packages/tzlocal3/unix.py
|
11
|
3929
|
import os
import re
import pytz3 as pytz
_cache_tz = None
def _tz_from_env(tzenv):
if tzenv[0] == ':':
tzenv = tzenv[1:]
# TZ specifies a file
if os.path.exists(tzenv):
with open(tzenv, 'rb') as tzfile:
return pytz.tzfile.build_tzinfo('local', tzfile)
# TZ specifies a zoneinfo zone.
try:
tz = pytz.timezone(tzenv)
# That worked, so we return this:
return tz
except pytz.UnknownTimeZoneError:
raise pytz.UnknownTimeZoneError(
"tzlocal() does not support non-zoneinfo timezones like %s. \n"
"Please use a timezone in the form of Continent/City")
def _get_localzone(_root='/'):
"""Tries to find the local timezone configuration.
This method prefers finding the timezone name and passing that to pytz,
over passing in the localtime file, as in the later case the zoneinfo
name is unknown.
The parameter _root makes the function look for files like /etc/localtime
beneath the _root directory. This is primarily used by the tests.
In normal usage you call the function without parameters."""
tzenv = os.environ.get('TZ')
if tzenv:
return _tz_from_env(tzenv)
# Now look for distribution specific configuration files
# that contain the timezone name.
tzpath = os.path.join(_root, 'etc/timezone')
if os.path.exists(tzpath):
with open(tzpath, 'rb') as tzfile:
data = tzfile.read()
# Issue #3 was that /etc/timezone was a zoneinfo file.
# That's a misconfiguration, but we need to handle it gracefully:
if data[:5] != 'TZif2':
etctz = data.strip().decode()
# Get rid of host definitions and comments:
if ' ' in etctz:
etctz, dummy = etctz.split(' ', 1)
if '#' in etctz:
etctz, dummy = etctz.split('#', 1)
return pytz.timezone(etctz.replace(' ', '_'))
# CentOS has a ZONE setting in /etc/sysconfig/clock,
# OpenSUSE has a TIMEZONE setting in /etc/sysconfig/clock and
# Gentoo has a TIMEZONE setting in /etc/conf.d/clock
# We look through these files for a timezone:
zone_re = re.compile('\s*ZONE\s*=\s*\"')
timezone_re = re.compile('\s*TIMEZONE\s*=\s*\"')
end_re = re.compile('\"')
for filename in ('etc/sysconfig/clock', 'etc/conf.d/clock'):
tzpath = os.path.join(_root, filename)
if not os.path.exists(tzpath):
continue
with open(tzpath, 'rt') as tzfile:
data = tzfile.readlines()
for line in data:
# Look for the ZONE= setting.
match = zone_re.match(line)
if match is None:
# No ZONE= setting. Look for the TIMEZONE= setting.
match = timezone_re.match(line)
if match is not None:
# Some setting existed
line = line[match.end():]
etctz = line[:end_re.search(line).start()]
# We found a timezone
return pytz.timezone(etctz.replace(' ', '_'))
# No explicit setting existed. Use localtime
for filename in ('etc/localtime', 'usr/local/etc/localtime'):
tzpath = os.path.join(_root, filename)
if not os.path.exists(tzpath):
continue
with open(tzpath, 'rb') as tzfile:
return pytz.tzfile.build_tzinfo('local', tzfile)
raise pytz.UnknownTimeZoneError('Can not find any timezone configuration')
def get_localzone():
"""Get the computers configured local timezone, if any."""
global _cache_tz
if _cache_tz is None:
_cache_tz = _get_localzone()
return _cache_tz
def reload_localzone():
"""Reload the cached localzone. You need to call this if the timezone has changed."""
global _cache_tz
_cache_tz = _get_localzone()
return _cache_tz
|
bsd-3-clause
|
QuantConnect/Lean
|
Algorithm.Python/AddFutureOptionContractDataStreamingRegressionAlgorithm.py
|
3
|
4028
|
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from AlgorithmImports import *
### <summary>
### This regression algorithm tests that we receive the expected data when
### we add future option contracts individually using <see cref="AddFutureOptionContract"/>
### </summary>
class AddFutureOptionContractDataStreamingRegressionAlgorithm(QCAlgorithm):
def Initialize(self):
self.onDataReached = False
self.invested = False
self.symbolsReceived = []
self.expectedSymbolsReceived = []
self.dataReceived = {}
self.SetStartDate(2020, 1, 5)
self.SetEndDate(2020, 1, 6)
self.es20h20 = self.AddFutureContract(
Symbol.CreateFuture(Futures.Indices.SP500EMini, Market.CME, datetime(2020, 3, 20)),
Resolution.Minute).Symbol
self.es19m20 = self.AddFutureContract(
Symbol.CreateFuture(Futures.Indices.SP500EMini, Market.CME, datetime(2020, 6, 19)),
Resolution.Minute).Symbol
optionChains = self.OptionChainProvider.GetOptionContractList(self.es20h20, self.Time)
optionChains += self.OptionChainProvider.GetOptionContractList(self.es19m20, self.Time)
for optionContract in optionChains:
self.expectedSymbolsReceived.append(self.AddFutureOptionContract(optionContract, Resolution.Minute).Symbol)
def OnData(self, data: Slice):
if not data.HasData:
return
self.onDataReached = True
hasOptionQuoteBars = False
for qb in data.QuoteBars.Values:
if qb.Symbol.SecurityType != SecurityType.FutureOption:
continue
hasOptionQuoteBars = True
self.symbolsReceived.append(qb.Symbol)
if qb.Symbol not in self.dataReceived:
self.dataReceived[qb.Symbol] = []
self.dataReceived[qb.Symbol].append(qb)
if self.invested or not hasOptionQuoteBars:
return
if data.ContainsKey(self.es20h20) and data.ContainsKey(self.es19m20):
self.SetHoldings(self.es20h20, 0.2)
self.SetHoldings(self.es19m20, 0.2)
self.invested = True
def OnEndOfAlgorithm(self):
super().OnEndOfAlgorithm()
self.symbolsReceived = list(set(self.symbolsReceived))
self.expectedSymbolsReceived = list(set(self.expectedSymbolsReceived))
if not self.onDataReached:
raise AssertionError("OnData() was never called.")
if len(self.symbolsReceived) != len(self.expectedSymbolsReceived):
raise AssertionError(f"Expected {len(self.expectedSymbolsReceived)} option contracts Symbols, found {len(self.symbolsReceived)}")
missingSymbols = [expectedSymbol for expectedSymbol in self.expectedSymbolsReceived if expectedSymbol not in self.symbolsReceived]
if any(missingSymbols):
raise AssertionError(f'Symbols: "{", ".join(missingSymbols)}" were not found in OnData')
for expectedSymbol in self.expectedSymbolsReceived:
data = self.dataReceived[expectedSymbol]
for dataPoint in data:
dataPoint.EndTime = datetime(1970, 1, 1)
nonDupeDataCount = len(set(data))
if nonDupeDataCount < 1000:
raise AssertionError(f"Received too few data points. Expected >=1000, found {nonDupeDataCount} for {expectedSymbol}")
|
apache-2.0
|
Edraak/edraak-platform
|
lms/djangoapps/courseware/access_utils.py
|
13
|
3032
|
"""
Simple utility functions for computing access.
It allows us to share code between access.py and block transformers.
"""
from datetime import datetime, timedelta
from logging import getLogger
from django.conf import settings
from pytz import UTC
from courseware.access_response import AccessResponse, StartDateError
from courseware.masquerade import is_masquerading_as_student
from openedx.features.course_experience import COURSE_PRE_START_ACCESS_FLAG
from student.roles import CourseBetaTesterRole
from xmodule.util.django import get_current_request_hostname
DEBUG_ACCESS = False
log = getLogger(__name__)
ACCESS_GRANTED = AccessResponse(True)
ACCESS_DENIED = AccessResponse(False)
def debug(*args, **kwargs):
"""
Helper function for local debugging.
"""
# to avoid overly verbose output, this is off by default
if DEBUG_ACCESS:
log.debug(*args, **kwargs)
def adjust_start_date(user, days_early_for_beta, start, course_key):
"""
If user is in a beta test group, adjust the start date by the appropriate number of
days.
Returns:
A datetime. Either the same as start, or earlier for beta testers.
"""
if days_early_for_beta is None:
# bail early if no beta testing is set up
return start
if CourseBetaTesterRole(course_key).has_user(user):
debug("Adjust start time: user in beta role for %s", course_key)
delta = timedelta(days_early_for_beta)
effective = start - delta
return effective
return start
def check_start_date(user, days_early_for_beta, start, course_key):
"""
Verifies whether the given user is allowed access given the
start date and the Beta offset for the given course.
Returns:
AccessResponse: Either ACCESS_GRANTED or StartDateError.
"""
start_dates_disabled = settings.FEATURES['DISABLE_START_DATES']
if start_dates_disabled and not is_masquerading_as_student(user, course_key):
return ACCESS_GRANTED
else:
now = datetime.now(UTC)
if start is None or in_preview_mode():
return ACCESS_GRANTED
effective_start = adjust_start_date(user, days_early_for_beta, start, course_key)
if now > effective_start:
return ACCESS_GRANTED
return StartDateError(start)
def in_preview_mode():
"""
Returns whether the user is in preview mode or not.
"""
hostname = get_current_request_hostname()
preview_lms_base = settings.FEATURES.get('PREVIEW_LMS_BASE', None)
return bool(preview_lms_base and hostname and hostname.split(':')[0] == preview_lms_base.split(':')[0])
def check_course_open_for_learner(user, course):
"""
Check if the course is open for learners based on the start date.
Returns:
AccessResponse: Either ACCESS_GRANTED or StartDateError.
"""
if COURSE_PRE_START_ACCESS_FLAG.is_enabled(course.id):
return ACCESS_GRANTED
return check_start_date(user, course.days_early_for_beta, course.start, course.id)
|
agpl-3.0
|
jbonofre/beam
|
sdks/python/apache_beam/internal/gcp/json_value_test.py
|
7
|
3701
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the json_value module."""
import unittest
from apache_beam.internal.gcp.json_value import from_json_value
from apache_beam.internal.gcp.json_value import to_json_value
from apache_beam.options.value_provider import RuntimeValueProvider
from apache_beam.options.value_provider import StaticValueProvider
# Protect against environments where apitools library is not available.
# pylint: disable=wrong-import-order, wrong-import-position
try:
from apitools.base.py.extra_types import JsonValue
except ImportError:
JsonValue = None
# pylint: enable=wrong-import-order, wrong-import-position
@unittest.skipIf(JsonValue is None, 'GCP dependencies are not installed')
class JsonValueTest(unittest.TestCase):
def test_string_to(self):
self.assertEquals(JsonValue(string_value='abc'), to_json_value('abc'))
def test_true_to(self):
self.assertEquals(JsonValue(boolean_value=True), to_json_value(True))
def test_false_to(self):
self.assertEquals(JsonValue(boolean_value=False), to_json_value(False))
def test_int_to(self):
self.assertEquals(JsonValue(integer_value=14), to_json_value(14))
def test_float_to(self):
self.assertEquals(JsonValue(double_value=2.75), to_json_value(2.75))
def test_static_value_provider_to(self):
svp = StaticValueProvider(str, 'abc')
self.assertEquals(JsonValue(string_value=svp.value), to_json_value(svp))
def test_runtime_value_provider_to(self):
RuntimeValueProvider.runtime_options = None
rvp = RuntimeValueProvider('arg', 123, int)
self.assertEquals(JsonValue(is_null=True), to_json_value(rvp))
def test_none_to(self):
self.assertEquals(JsonValue(is_null=True), to_json_value(None))
def test_string_from(self):
self.assertEquals('WXYZ', from_json_value(to_json_value('WXYZ')))
def test_true_from(self):
self.assertEquals(True, from_json_value(to_json_value(True)))
def test_false_from(self):
self.assertEquals(False, from_json_value(to_json_value(False)))
def test_int_from(self):
self.assertEquals(-27, from_json_value(to_json_value(-27)))
def test_float_from(self):
self.assertEquals(4.5, from_json_value(to_json_value(4.5)))
def test_with_type(self):
rt = from_json_value(to_json_value('abcd', with_type=True))
self.assertEquals('http://schema.org/Text', rt['@type'])
self.assertEquals('abcd', rt['value'])
def test_none_from(self):
self.assertIsNone(from_json_value(to_json_value(None)))
def test_large_integer(self):
num = 1 << 35
self.assertEquals(num, from_json_value(to_json_value(num)))
self.assertEquals(long(num), from_json_value(to_json_value(long(num))))
def test_long_value(self):
self.assertEquals(long(27), from_json_value(to_json_value(long(27))))
def test_too_long_value(self):
with self.assertRaises(TypeError):
to_json_value(long(1 << 64))
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
nzavagli/UnrealPy
|
UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/django-1.8.2/django/contrib/sites/models.py
|
82
|
3951
|
from __future__ import unicode_literals
import string
import warnings
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.db import models
from django.db.models.signals import pre_delete, pre_save
from django.utils.deprecation import RemovedInDjango19Warning
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from .requests import RequestSite as RealRequestSite
from .shortcuts import get_current_site as real_get_current_site
SITE_CACHE = {}
def _simple_domain_name_validator(value):
"""
Validates that the given value contains no whitespaces to prevent common
typos.
"""
if not value:
return
checks = ((s in value) for s in string.whitespace)
if any(checks):
raise ValidationError(
_("The domain name cannot contain any spaces or tabs."),
code='invalid',
)
class SiteManager(models.Manager):
use_in_migrations = True
def _get_site_by_id(self, site_id):
if site_id not in SITE_CACHE:
site = self.get(pk=site_id)
SITE_CACHE[site_id] = site
return SITE_CACHE[site_id]
def _get_site_by_request(self, request):
host = request.get_host()
if host not in SITE_CACHE:
site = self.get(domain__iexact=host)
SITE_CACHE[host] = site
return SITE_CACHE[host]
def get_current(self, request=None):
"""
Returns the current Site based on the SITE_ID in the project's settings.
If SITE_ID isn't defined, it returns the site with domain matching
request.get_host(). The ``Site`` object is cached the first time it's
retrieved from the database.
"""
from django.conf import settings
if getattr(settings, 'SITE_ID', ''):
site_id = settings.SITE_ID
return self._get_site_by_id(site_id)
elif request:
return self._get_site_by_request(request)
raise ImproperlyConfigured(
"You're using the Django \"sites framework\" without having "
"set the SITE_ID setting. Create a site in your database and "
"set the SITE_ID setting or pass a request to "
"Site.objects.get_current() to fix this error."
)
def clear_cache(self):
"""Clears the ``Site`` object cache."""
global SITE_CACHE
SITE_CACHE = {}
@python_2_unicode_compatible
class Site(models.Model):
domain = models.CharField(_('domain name'), max_length=100,
validators=[_simple_domain_name_validator])
name = models.CharField(_('display name'), max_length=50)
objects = SiteManager()
class Meta:
db_table = 'django_site'
verbose_name = _('site')
verbose_name_plural = _('sites')
ordering = ('domain',)
def __str__(self):
return self.domain
class RequestSite(RealRequestSite):
def __init__(self, *args, **kwargs):
warnings.warn(
"Please import RequestSite from django.contrib.sites.requests.",
RemovedInDjango19Warning, stacklevel=2)
super(RequestSite, self).__init__(*args, **kwargs)
def get_current_site(request):
warnings.warn(
"Please import get_current_site from django.contrib.sites.shortcuts.",
RemovedInDjango19Warning, stacklevel=2)
return real_get_current_site(request)
def clear_site_cache(sender, **kwargs):
"""
Clears the cache (if primed) each time a site is saved or deleted
"""
instance = kwargs['instance']
using = kwargs['using']
try:
del SITE_CACHE[instance.pk]
except KeyError:
pass
try:
del SITE_CACHE[Site.objects.using(using).get(pk=instance.pk).domain]
except (KeyError, Site.DoesNotExist):
pass
pre_save.connect(clear_site_cache, sender=Site)
pre_delete.connect(clear_site_cache, sender=Site)
|
mit
|
yousafsyed/casperjs
|
bin/Lib/encodings/big5hkscs.py
|
816
|
1039
|
#
# big5hkscs.py: Python Unicode Codec for BIG5HKSCS
#
# Written by Hye-Shik Chang <[email protected]>
#
import _codecs_hk, codecs
import _multibytecodec as mbc
codec = _codecs_hk.getcodec('big5hkscs')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='big5hkscs',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
mit
|
jsirois/pants
|
src/python/pants/core/goals/typecheck.py
|
1
|
6985
|
# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Dict, Iterable, Optional, Tuple
from pants.core.goals.style_request import StyleRequest
from pants.core.util_rules.filter_empty_sources import (
FieldSetsWithSources,
FieldSetsWithSourcesRequest,
)
from pants.engine.console import Console
from pants.engine.engine_aware import EngineAwareReturnType
from pants.engine.goal import Goal, GoalSubsystem
from pants.engine.process import FallibleProcessResult
from pants.engine.rules import Get, MultiGet, QueryRule, _uncacheable_rule, collect_rules, goal_rule
from pants.engine.target import Targets
from pants.engine.unions import UnionMembership, union
from pants.util.logging import LogLevel
from pants.util.memo import memoized_property
from pants.util.meta import frozen_after_init
from pants.util.strutil import strip_v2_chroot_path
@dataclass(frozen=True)
class TypecheckResult(EngineAwareReturnType):
exit_code: int
stdout: str
stderr: str
partition_description: Optional[str] = None
@staticmethod
def from_fallible_process_result(
process_result: FallibleProcessResult,
*,
partition_description: Optional[str] = None,
strip_chroot_path: bool = False,
) -> TypecheckResult:
def prep_output(s: bytes) -> str:
return strip_v2_chroot_path(s) if strip_chroot_path else s.decode()
return TypecheckResult(
exit_code=process_result.exit_code,
stdout=prep_output(process_result.stdout),
stderr=prep_output(process_result.stderr),
partition_description=partition_description,
)
def metadata(self) -> Dict[str, Any]:
return {"partition": self.partition_description}
@frozen_after_init
@dataclass(unsafe_hash=True)
class TypecheckResults:
"""Zero or more TypecheckResult objects for a single type checker.
Typically, type checkers will return one result. If they no-oped, they will return zero results.
However, some type checkers may need to partition their input and thus may need to return
multiple results.
"""
results: Tuple[TypecheckResult, ...]
typechecker_name: str
def __init__(self, results: Iterable[TypecheckResult], *, typechecker_name: str) -> None:
self.results = tuple(results)
self.typechecker_name = typechecker_name
@property
def skipped(self) -> bool:
return bool(self.results) is False
@memoized_property
def exit_code(self) -> int:
return next((result.exit_code for result in self.results if result.exit_code != 0), 0)
class EnrichedTypecheckResults(TypecheckResults, EngineAwareReturnType):
"""`TypecheckResults` that are enriched for the sake of logging results as they come in.
Plugin authors only need to return `TypecheckResults`, and a rule will upcast those into
`TypecheckResults`.
"""
def level(self) -> Optional[LogLevel]:
if self.skipped:
return LogLevel.DEBUG
return LogLevel.WARN if self.exit_code != 0 else LogLevel.INFO
def message(self) -> Optional[str]:
if self.skipped:
return f"{self.typechecker_name} skipped."
message = self.typechecker_name
message += (
" succeeded." if self.exit_code == 0 else f" failed (exit code {self.exit_code})."
)
def msg_for_result(result: TypecheckResult) -> str:
msg = ""
if result.stdout:
msg += f"\n{result.stdout}"
if result.stderr:
msg += f"\n{result.stderr}"
if msg:
msg = f"{msg.rstrip()}\n\n"
return msg
if len(self.results) == 1:
results_msg = msg_for_result(self.results[0])
else:
results_msg = "\n"
for i, result in enumerate(self.results):
msg = f"Partition #{i + 1}"
msg += (
f" - {result.partition_description}:" if result.partition_description else ":"
)
msg += msg_for_result(result) or "\n\n"
results_msg += msg
message += results_msg
return message
@union
class TypecheckRequest(StyleRequest):
"""A union for StyleRequests that should be type-checkable.
Subclass and install a member of this type to provide a linter.
"""
class TypecheckSubsystem(GoalSubsystem):
name = "typecheck"
help = "Run type checkers."
required_union_implementations = (TypecheckRequest,)
class Typecheck(Goal):
subsystem_cls = TypecheckSubsystem
@goal_rule
async def typecheck(
console: Console, targets: Targets, union_membership: UnionMembership
) -> Typecheck:
typecheck_request_types = union_membership[TypecheckRequest]
requests: Iterable[StyleRequest] = tuple(
lint_request_type(
lint_request_type.field_set_type.create(target)
for target in targets
if lint_request_type.field_set_type.is_applicable(target)
)
for lint_request_type in typecheck_request_types
)
field_sets_with_sources: Iterable[FieldSetsWithSources] = await MultiGet(
Get(FieldSetsWithSources, FieldSetsWithSourcesRequest(request.field_sets))
for request in requests
)
valid_requests: Iterable[StyleRequest] = tuple(
request_cls(request)
for request_cls, request in zip(typecheck_request_types, field_sets_with_sources)
if request
)
all_results = await MultiGet(
Get(EnrichedTypecheckResults, TypecheckRequest, request) for request in valid_requests
)
exit_code = 0
if all_results:
console.print_stderr("")
for results in sorted(all_results, key=lambda results: results.typechecker_name):
if results.skipped:
sigil = console.yellow("-")
status = "skipped"
elif results.exit_code == 0:
sigil = console.green("✓")
status = "succeeded"
else:
sigil = console.red("𐄂")
status = "failed"
exit_code = results.exit_code
console.print_stderr(f"{sigil} {results.typechecker_name} {status}.")
return Typecheck(exit_code)
# NB: We mark this uncachable to ensure that the results are always streamed, even if the
# underlying TypecheckResults is memoized. This rule is very cheap, so there's little performance
# hit.
@_uncacheable_rule(desc="typecheck")
def enrich_typecheck_results(results: TypecheckResults) -> EnrichedTypecheckResults:
return EnrichedTypecheckResults(
results=results.results, typechecker_name=results.typechecker_name
)
def rules():
return [
*collect_rules(),
# NB: Would be unused otherwise.
QueryRule(TypecheckSubsystem, []),
]
|
apache-2.0
|
DasIch/django
|
django/contrib/auth/hashers.py
|
66
|
17463
|
from __future__ import unicode_literals
import base64
import binascii
import hashlib
import importlib
from collections import OrderedDict
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.signals import setting_changed
from django.dispatch import receiver
from django.utils import lru_cache
from django.utils.crypto import (
constant_time_compare, get_random_string, pbkdf2,
)
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.module_loading import import_string
from django.utils.translation import ugettext_noop as _
UNUSABLE_PASSWORD_PREFIX = '!' # This will never be a valid encoded hash
UNUSABLE_PASSWORD_SUFFIX_LENGTH = 40 # number of random chars to add after UNUSABLE_PASSWORD_PREFIX
def is_password_usable(encoded):
if encoded is None or encoded.startswith(UNUSABLE_PASSWORD_PREFIX):
return False
try:
identify_hasher(encoded)
except ValueError:
return False
return True
def check_password(password, encoded, setter=None, preferred='default'):
"""
Returns a boolean of whether the raw password matches the three
part encoded digest.
If setter is specified, it'll be called when you need to
regenerate the password.
"""
if password is None or not is_password_usable(encoded):
return False
preferred = get_hasher(preferred)
hasher = identify_hasher(encoded)
must_update = hasher.algorithm != preferred.algorithm
if not must_update:
must_update = preferred.must_update(encoded)
is_correct = hasher.verify(password, encoded)
if setter and is_correct and must_update:
setter(password)
return is_correct
def make_password(password, salt=None, hasher='default'):
"""
Turn a plain-text password into a hash for database storage
Same as encode() but generates a new random salt.
If password is None then a concatenation of
UNUSABLE_PASSWORD_PREFIX and a random string will be returned
which disallows logins. Additional random string reduces chances
of gaining access to staff or superuser accounts.
See ticket #20079 for more info.
"""
if password is None:
return UNUSABLE_PASSWORD_PREFIX + get_random_string(UNUSABLE_PASSWORD_SUFFIX_LENGTH)
hasher = get_hasher(hasher)
if not salt:
salt = hasher.salt()
return hasher.encode(password, salt)
@lru_cache.lru_cache()
def get_hashers():
hashers = []
for hasher_path in settings.PASSWORD_HASHERS:
hasher_cls = import_string(hasher_path)
hasher = hasher_cls()
if not getattr(hasher, 'algorithm'):
raise ImproperlyConfigured("hasher doesn't specify an "
"algorithm name: %s" % hasher_path)
hashers.append(hasher)
return hashers
@lru_cache.lru_cache()
def get_hashers_by_algorithm():
return {hasher.algorithm: hasher for hasher in get_hashers()}
@receiver(setting_changed)
def reset_hashers(**kwargs):
if kwargs['setting'] == 'PASSWORD_HASHERS':
get_hashers.cache_clear()
get_hashers_by_algorithm.cache_clear()
def get_hasher(algorithm='default'):
"""
Returns an instance of a loaded password hasher.
If algorithm is 'default', the default hasher will be returned.
This function will also lazy import hashers specified in your
settings file if needed.
"""
if hasattr(algorithm, 'algorithm'):
return algorithm
elif algorithm == 'default':
return get_hashers()[0]
else:
hashers = get_hashers_by_algorithm()
try:
return hashers[algorithm]
except KeyError:
raise ValueError("Unknown password hashing algorithm '%s'. "
"Did you specify it in the PASSWORD_HASHERS "
"setting?" % algorithm)
def identify_hasher(encoded):
"""
Returns an instance of a loaded password hasher.
Identifies hasher algorithm by examining encoded hash, and calls
get_hasher() to return hasher. Raises ValueError if
algorithm cannot be identified, or if hasher is not loaded.
"""
# Ancient versions of Django created plain MD5 passwords and accepted
# MD5 passwords with an empty salt.
if ((len(encoded) == 32 and '$' not in encoded) or
(len(encoded) == 37 and encoded.startswith('md5$$'))):
algorithm = 'unsalted_md5'
# Ancient versions of Django accepted SHA1 passwords with an empty salt.
elif len(encoded) == 46 and encoded.startswith('sha1$$'):
algorithm = 'unsalted_sha1'
else:
algorithm = encoded.split('$', 1)[0]
return get_hasher(algorithm)
def mask_hash(hash, show=6, char="*"):
"""
Returns the given hash, with only the first ``show`` number shown. The
rest are masked with ``char`` for security reasons.
"""
masked = hash[:show]
masked += char * len(hash[show:])
return masked
class BasePasswordHasher(object):
"""
Abstract base class for password hashers
When creating your own hasher, you need to override algorithm,
verify(), encode() and safe_summary().
PasswordHasher objects are immutable.
"""
algorithm = None
library = None
def _load_library(self):
if self.library is not None:
if isinstance(self.library, (tuple, list)):
name, mod_path = self.library
else:
mod_path = self.library
try:
module = importlib.import_module(mod_path)
except ImportError as e:
raise ValueError("Couldn't load %r algorithm library: %s" %
(self.__class__.__name__, e))
return module
raise ValueError("Hasher %r doesn't specify a library attribute" %
self.__class__.__name__)
def salt(self):
"""
Generates a cryptographically secure nonce salt in ASCII
"""
return get_random_string()
def verify(self, password, encoded):
"""
Checks if the given password is correct
"""
raise NotImplementedError('subclasses of BasePasswordHasher must provide a verify() method')
def encode(self, password, salt):
"""
Creates an encoded database value
The result is normally formatted as "algorithm$salt$hash" and
must be fewer than 128 characters.
"""
raise NotImplementedError('subclasses of BasePasswordHasher must provide an encode() method')
def safe_summary(self, encoded):
"""
Returns a summary of safe values
The result is a dictionary and will be used where the password field
must be displayed to construct a safe representation of the password.
"""
raise NotImplementedError('subclasses of BasePasswordHasher must provide a safe_summary() method')
def must_update(self, encoded):
return False
class PBKDF2PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the PBKDF2 algorithm (recommended)
Configured to use PBKDF2 + HMAC + SHA256.
The result is a 64 byte binary string. Iterations may be changed
safely but you must rename the algorithm if you change SHA256.
"""
algorithm = "pbkdf2_sha256"
iterations = 30000
digest = hashlib.sha256
def encode(self, password, salt, iterations=None):
assert password is not None
assert salt and '$' not in salt
if not iterations:
iterations = self.iterations
hash = pbkdf2(password, salt, iterations, digest=self.digest)
hash = base64.b64encode(hash).decode('ascii').strip()
return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, hash)
def verify(self, password, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt, int(iterations))
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
assert algorithm == self.algorithm
return OrderedDict([
(_('algorithm'), algorithm),
(_('iterations'), iterations),
(_('salt'), mask_hash(salt)),
(_('hash'), mask_hash(hash)),
])
def must_update(self, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
return int(iterations) != self.iterations
class PBKDF2SHA1PasswordHasher(PBKDF2PasswordHasher):
"""
Alternate PBKDF2 hasher which uses SHA1, the default PRF
recommended by PKCS #5. This is compatible with other
implementations of PBKDF2, such as openssl's
PKCS5_PBKDF2_HMAC_SHA1().
"""
algorithm = "pbkdf2_sha1"
digest = hashlib.sha1
class BCryptSHA256PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the bcrypt algorithm (recommended)
This is considered by many to be the most secure algorithm but you
must first install the bcrypt library. Please be warned that
this library depends on native C code and might cause portability
issues.
"""
algorithm = "bcrypt_sha256"
digest = hashlib.sha256
library = ("bcrypt", "bcrypt")
rounds = 12
def salt(self):
bcrypt = self._load_library()
return bcrypt.gensalt(rounds=self.rounds)
def encode(self, password, salt):
bcrypt = self._load_library()
# Need to reevaluate the force_bytes call once bcrypt is supported on
# Python 3
# Hash the password prior to using bcrypt to prevent password truncation
# See: https://code.djangoproject.com/ticket/20138
if self.digest is not None:
# We use binascii.hexlify here because Python3 decided that a hex encoded
# bytestring is somehow a unicode.
password = binascii.hexlify(self.digest(force_bytes(password)).digest())
else:
password = force_bytes(password)
data = bcrypt.hashpw(password, salt)
return "%s$%s" % (self.algorithm, force_text(data))
def verify(self, password, encoded):
algorithm, data = encoded.split('$', 1)
assert algorithm == self.algorithm
bcrypt = self._load_library()
# Hash the password prior to using bcrypt to prevent password truncation
# See: https://code.djangoproject.com/ticket/20138
if self.digest is not None:
# We use binascii.hexlify here because Python3 decided that a hex encoded
# bytestring is somehow a unicode.
password = binascii.hexlify(self.digest(force_bytes(password)).digest())
else:
password = force_bytes(password)
# Ensure that our data is a bytestring
data = force_bytes(data)
# force_bytes() necessary for py-bcrypt compatibility
hashpw = force_bytes(bcrypt.hashpw(password, data))
return constant_time_compare(data, hashpw)
def safe_summary(self, encoded):
algorithm, empty, algostr, work_factor, data = encoded.split('$', 4)
assert algorithm == self.algorithm
salt, checksum = data[:22], data[22:]
return OrderedDict([
(_('algorithm'), algorithm),
(_('work factor'), work_factor),
(_('salt'), mask_hash(salt)),
(_('checksum'), mask_hash(checksum)),
])
def must_update(self, encoded):
algorithm, empty, algostr, rounds, data = encoded.split('$', 4)
return int(rounds) != self.rounds
class BCryptPasswordHasher(BCryptSHA256PasswordHasher):
"""
Secure password hashing using the bcrypt algorithm
This is considered by many to be the most secure algorithm but you
must first install the bcrypt library. Please be warned that
this library depends on native C code and might cause portability
issues.
This hasher does not first hash the password which means it is subject to
the 72 character bcrypt password truncation, most use cases should prefer
the BCryptSha512PasswordHasher.
See: https://code.djangoproject.com/ticket/20138
"""
algorithm = "bcrypt"
digest = None
class SHA1PasswordHasher(BasePasswordHasher):
"""
The SHA1 password hashing algorithm (not recommended)
"""
algorithm = "sha1"
def encode(self, password, salt):
assert password is not None
assert salt and '$' not in salt
hash = hashlib.sha1(force_bytes(salt + password)).hexdigest()
return "%s$%s$%s" % (self.algorithm, salt, hash)
def verify(self, password, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt)
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
return OrderedDict([
(_('algorithm'), algorithm),
(_('salt'), mask_hash(salt, show=2)),
(_('hash'), mask_hash(hash)),
])
class MD5PasswordHasher(BasePasswordHasher):
"""
The Salted MD5 password hashing algorithm (not recommended)
"""
algorithm = "md5"
def encode(self, password, salt):
assert password is not None
assert salt and '$' not in salt
hash = hashlib.md5(force_bytes(salt + password)).hexdigest()
return "%s$%s$%s" % (self.algorithm, salt, hash)
def verify(self, password, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt)
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
return OrderedDict([
(_('algorithm'), algorithm),
(_('salt'), mask_hash(salt, show=2)),
(_('hash'), mask_hash(hash)),
])
class UnsaltedSHA1PasswordHasher(BasePasswordHasher):
"""
Very insecure algorithm that you should *never* use; stores SHA1 hashes
with an empty salt.
This class is implemented because Django used to accept such password
hashes. Some older Django installs still have these values lingering
around so we need to handle and upgrade them properly.
"""
algorithm = "unsalted_sha1"
def salt(self):
return ''
def encode(self, password, salt):
assert salt == ''
hash = hashlib.sha1(force_bytes(password)).hexdigest()
return 'sha1$$%s' % hash
def verify(self, password, encoded):
encoded_2 = self.encode(password, '')
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
assert encoded.startswith('sha1$$')
hash = encoded[6:]
return OrderedDict([
(_('algorithm'), self.algorithm),
(_('hash'), mask_hash(hash)),
])
class UnsaltedMD5PasswordHasher(BasePasswordHasher):
"""
Incredibly insecure algorithm that you should *never* use; stores unsalted
MD5 hashes without the algorithm prefix, also accepts MD5 hashes with an
empty salt.
This class is implemented because Django used to store passwords this way
and to accept such password hashes. Some older Django installs still have
these values lingering around so we need to handle and upgrade them
properly.
"""
algorithm = "unsalted_md5"
def salt(self):
return ''
def encode(self, password, salt):
assert salt == ''
return hashlib.md5(force_bytes(password)).hexdigest()
def verify(self, password, encoded):
if len(encoded) == 37 and encoded.startswith('md5$$'):
encoded = encoded[5:]
encoded_2 = self.encode(password, '')
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
return OrderedDict([
(_('algorithm'), self.algorithm),
(_('hash'), mask_hash(encoded, show=3)),
])
class CryptPasswordHasher(BasePasswordHasher):
"""
Password hashing using UNIX crypt (not recommended)
The crypt module is not supported on all platforms.
"""
algorithm = "crypt"
library = "crypt"
def salt(self):
return get_random_string(2)
def encode(self, password, salt):
crypt = self._load_library()
assert len(salt) == 2
data = crypt.crypt(force_str(password), salt)
# we don't need to store the salt, but Django used to do this
return "%s$%s$%s" % (self.algorithm, '', data)
def verify(self, password, encoded):
crypt = self._load_library()
algorithm, salt, data = encoded.split('$', 2)
assert algorithm == self.algorithm
return constant_time_compare(data, crypt.crypt(force_str(password), data))
def safe_summary(self, encoded):
algorithm, salt, data = encoded.split('$', 2)
assert algorithm == self.algorithm
return OrderedDict([
(_('algorithm'), algorithm),
(_('salt'), salt),
(_('hash'), mask_hash(data, show=3)),
])
|
bsd-3-clause
|
cm13-kinzie-port-from-clark/kernel_motorola_msm8992
|
tools/perf/scripts/python/sched-migration.py
|
11215
|
11670
|
#!/usr/bin/python
#
# Cpu task migration overview toy
#
# Copyright (C) 2010 Frederic Weisbecker <[email protected]>
#
# perf script event handlers have been generated by perf script -g python
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import os
import sys
from collections import defaultdict
from UserList import UserList
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from SchedGui import *
threads = { 0 : "idle"}
def thread_name(pid):
return "%s:%d" % (threads[pid], pid)
class RunqueueEventUnknown:
@staticmethod
def color():
return None
def __repr__(self):
return "unknown"
class RunqueueEventSleep:
@staticmethod
def color():
return (0, 0, 0xff)
def __init__(self, sleeper):
self.sleeper = sleeper
def __repr__(self):
return "%s gone to sleep" % thread_name(self.sleeper)
class RunqueueEventWakeup:
@staticmethod
def color():
return (0xff, 0xff, 0)
def __init__(self, wakee):
self.wakee = wakee
def __repr__(self):
return "%s woke up" % thread_name(self.wakee)
class RunqueueEventFork:
@staticmethod
def color():
return (0, 0xff, 0)
def __init__(self, child):
self.child = child
def __repr__(self):
return "new forked task %s" % thread_name(self.child)
class RunqueueMigrateIn:
@staticmethod
def color():
return (0, 0xf0, 0xff)
def __init__(self, new):
self.new = new
def __repr__(self):
return "task migrated in %s" % thread_name(self.new)
class RunqueueMigrateOut:
@staticmethod
def color():
return (0xff, 0, 0xff)
def __init__(self, old):
self.old = old
def __repr__(self):
return "task migrated out %s" % thread_name(self.old)
class RunqueueSnapshot:
def __init__(self, tasks = [0], event = RunqueueEventUnknown()):
self.tasks = tuple(tasks)
self.event = event
def sched_switch(self, prev, prev_state, next):
event = RunqueueEventUnknown()
if taskState(prev_state) == "R" and next in self.tasks \
and prev in self.tasks:
return self
if taskState(prev_state) != "R":
event = RunqueueEventSleep(prev)
next_tasks = list(self.tasks[:])
if prev in self.tasks:
if taskState(prev_state) != "R":
next_tasks.remove(prev)
elif taskState(prev_state) == "R":
next_tasks.append(prev)
if next not in next_tasks:
next_tasks.append(next)
return RunqueueSnapshot(next_tasks, event)
def migrate_out(self, old):
if old not in self.tasks:
return self
next_tasks = [task for task in self.tasks if task != old]
return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old))
def __migrate_in(self, new, event):
if new in self.tasks:
self.event = event
return self
next_tasks = self.tasks[:] + tuple([new])
return RunqueueSnapshot(next_tasks, event)
def migrate_in(self, new):
return self.__migrate_in(new, RunqueueMigrateIn(new))
def wake_up(self, new):
return self.__migrate_in(new, RunqueueEventWakeup(new))
def wake_up_new(self, new):
return self.__migrate_in(new, RunqueueEventFork(new))
def load(self):
""" Provide the number of tasks on the runqueue.
Don't count idle"""
return len(self.tasks) - 1
def __repr__(self):
ret = self.tasks.__repr__()
ret += self.origin_tostring()
return ret
class TimeSlice:
def __init__(self, start, prev):
self.start = start
self.prev = prev
self.end = start
# cpus that triggered the event
self.event_cpus = []
if prev is not None:
self.total_load = prev.total_load
self.rqs = prev.rqs.copy()
else:
self.rqs = defaultdict(RunqueueSnapshot)
self.total_load = 0
def __update_total_load(self, old_rq, new_rq):
diff = new_rq.load() - old_rq.load()
self.total_load += diff
def sched_switch(self, ts_list, prev, prev_state, next, cpu):
old_rq = self.prev.rqs[cpu]
new_rq = old_rq.sched_switch(prev, prev_state, next)
if old_rq is new_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def migrate(self, ts_list, new, old_cpu, new_cpu):
if old_cpu == new_cpu:
return
old_rq = self.prev.rqs[old_cpu]
out_rq = old_rq.migrate_out(new)
self.rqs[old_cpu] = out_rq
self.__update_total_load(old_rq, out_rq)
new_rq = self.prev.rqs[new_cpu]
in_rq = new_rq.migrate_in(new)
self.rqs[new_cpu] = in_rq
self.__update_total_load(new_rq, in_rq)
ts_list.append(self)
if old_rq is not out_rq:
self.event_cpus.append(old_cpu)
self.event_cpus.append(new_cpu)
def wake_up(self, ts_list, pid, cpu, fork):
old_rq = self.prev.rqs[cpu]
if fork:
new_rq = old_rq.wake_up_new(pid)
else:
new_rq = old_rq.wake_up(pid)
if new_rq is old_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def next(self, t):
self.end = t
return TimeSlice(t, self)
class TimeSliceList(UserList):
def __init__(self, arg = []):
self.data = arg
def get_time_slice(self, ts):
if len(self.data) == 0:
slice = TimeSlice(ts, TimeSlice(-1, None))
else:
slice = self.data[-1].next(ts)
return slice
def find_time_slice(self, ts):
start = 0
end = len(self.data)
found = -1
searching = True
while searching:
if start == end or start == end - 1:
searching = False
i = (end + start) / 2
if self.data[i].start <= ts and self.data[i].end >= ts:
found = i
end = i
continue
if self.data[i].end < ts:
start = i
elif self.data[i].start > ts:
end = i
return found
def set_root_win(self, win):
self.root_win = win
def mouse_down(self, cpu, t):
idx = self.find_time_slice(t)
if idx == -1:
return
ts = self[idx]
rq = ts.rqs[cpu]
raw = "CPU: %d\n" % cpu
raw += "Last event : %s\n" % rq.event.__repr__()
raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000)
raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6))
raw += "Load = %d\n" % rq.load()
for t in rq.tasks:
raw += "%s \n" % thread_name(t)
self.root_win.update_summary(raw)
def update_rectangle_cpu(self, slice, cpu):
rq = slice.rqs[cpu]
if slice.total_load != 0:
load_rate = rq.load() / float(slice.total_load)
else:
load_rate = 0
red_power = int(0xff - (0xff * load_rate))
color = (0xff, red_power, red_power)
top_color = None
if cpu in slice.event_cpus:
top_color = rq.event.color()
self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end)
def fill_zone(self, start, end):
i = self.find_time_slice(start)
if i == -1:
return
for i in xrange(i, len(self.data)):
timeslice = self.data[i]
if timeslice.start > end:
return
for cpu in timeslice.rqs:
self.update_rectangle_cpu(timeslice, cpu)
def interval(self):
if len(self.data) == 0:
return (0, 0)
return (self.data[0].start, self.data[-1].end)
def nr_rectangles(self):
last_ts = self.data[-1]
max_cpu = 0
for cpu in last_ts.rqs:
if cpu > max_cpu:
max_cpu = cpu
return max_cpu
class SchedEventProxy:
def __init__(self):
self.current_tsk = defaultdict(lambda : -1)
self.timeslices = TimeSliceList()
def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
""" Ensure the task we sched out this cpu is really the one
we logged. Otherwise we may have missed traces """
on_cpu_task = self.current_tsk[headers.cpu]
if on_cpu_task != -1 and on_cpu_task != prev_pid:
print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \
(headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid)
threads[prev_pid] = prev_comm
threads[next_pid] = next_comm
self.current_tsk[headers.cpu] = next_pid
ts = self.timeslices.get_time_slice(headers.ts())
ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu)
def migrate(self, headers, pid, prio, orig_cpu, dest_cpu):
ts = self.timeslices.get_time_slice(headers.ts())
ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
def wake_up(self, headers, comm, pid, success, target_cpu, fork):
if success == 0:
return
ts = self.timeslices.get_time_slice(headers.ts())
ts.wake_up(self.timeslices, pid, target_cpu, fork)
def trace_begin():
global parser
parser = SchedEventProxy()
def trace_end():
app = wx.App(False)
timeslices = parser.timeslices
frame = RootFrame(timeslices, "Migration")
app.MainLoop()
def sched__sched_stat_runtime(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, runtime, vruntime):
pass
def sched__sched_stat_iowait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_sleep(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_process_fork(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
parent_comm, parent_pid, child_comm, child_pid):
pass
def sched__sched_process_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_free(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_migrate_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, orig_cpu,
dest_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
def sched__sched_switch(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio)
def sched__sched_wakeup_new(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 1)
def sched__sched_wakeup(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 0)
def sched__sched_wait_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_kthread_stop_ret(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
ret):
pass
def sched__sched_kthread_stop(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid):
pass
def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
pass
|
gpl-2.0
|
CanalTP/navitia
|
source/jormungandr/jormungandr/scenarios/helper_classes/tests/__init__.py
|
3
|
1257
|
# coding=utf-8
# Copyright (c) 2001-2018, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org
# https://groups.google.com/d/forum/navitia
# www.navitia.io
|
agpl-3.0
|
moylop260/odoo-dev
|
addons/account/wizard/account_reconcile_partner_process.py
|
385
|
5775
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
class account_partner_reconcile_process(osv.osv_memory):
_name = 'account.partner.reconcile.process'
_description = 'Reconcilation Process partner by partner'
def _get_to_reconcile(self, cr, uid, context=None):
cr.execute("""
SELECT p_id FROM (SELECT l.partner_id as p_id, SUM(l.debit) AS debit, SUM(l.credit) AS credit
FROM account_move_line AS l LEFT JOIN account_account a ON (l.account_id = a.id)
LEFT JOIN res_partner p ON (p.id = l.partner_id)
WHERE a.reconcile = 't'
AND l.reconcile_id IS NULL
AND (%s > to_char(p.last_reconciliation_date, 'YYYY-MM-DD') OR p.last_reconciliation_date IS NULL )
AND l.state <> 'draft'
GROUP BY l.partner_id) AS tmp
WHERE debit > 0
AND credit > 0
""",(time.strftime('%Y-%m-%d'),)
)
return len(map(lambda x: x[0], cr.fetchall())) - 1
def _get_today_reconciled(self, cr, uid, context=None):
cr.execute(
"SELECT l.partner_id " \
"FROM account_move_line AS l LEFT JOIN res_partner p ON (p.id = l.partner_id) " \
"WHERE l.reconcile_id IS NULL " \
"AND %s = to_char(p.last_reconciliation_date, 'YYYY-MM-DD') " \
"AND l.state <> 'draft' " \
"GROUP BY l.partner_id ",(time.strftime('%Y-%m-%d'),)
)
return len(map(lambda x: x[0], cr.fetchall())) + 1
def _get_partner(self, cr, uid, context=None):
move_line_obj = self.pool.get('account.move.line')
partner = move_line_obj.list_partners_to_reconcile(cr, uid, context=context)
if not partner:
return False
return partner[0][0]
def data_get(self, cr, uid, to_reconcile, today_reconciled, context=None):
return {'progress': (100 / (float(to_reconcile + today_reconciled) or 1.0)) * today_reconciled}
def default_get(self, cr, uid, fields, context=None):
res = super(account_partner_reconcile_process, self).default_get(cr, uid, fields, context=context)
if 'to_reconcile' in res and 'today_reconciled' in res:
data = self.data_get(cr, uid, res['to_reconcile'], res['today_reconciled'], context)
res.update(data)
return res
def next_partner(self, cr, uid, ids, context=None):
if context is None:
context = {}
move_line_obj = self.pool.get('account.move.line')
res_partner_obj = self.pool.get('res.partner')
partner_id = move_line_obj.read(cr, uid, context['active_id'], ['partner_id'])['partner_id']
if partner_id:
res_partner_obj.write(cr, uid, partner_id[0], {'last_reconciliation_date': time.strftime('%Y-%m-%d')}, context)
#TODO: we have to find a way to update the context of the current tab (we could open a new tab with the context but it's not really handy)
#TODO: remove that comments when the client side dev is done
return {'type': 'ir.actions.act_window_close'}
_columns = {
'to_reconcile': fields.float('Remaining Partners', readonly=True, help='This is the remaining partners for who you should check if there is something to reconcile or not. This figure already count the current partner as reconciled.'),
'today_reconciled': fields.float('Partners Reconciled Today', readonly=True, help='This figure depicts the total number of partners that have gone throught the reconciliation process today. The current partner is counted as already processed.'),
'progress': fields.float('Progress', readonly=True, help='Shows you the progress made today on the reconciliation process. Given by \nPartners Reconciled Today \ (Remaining Partners + Partners Reconciled Today)'),
'next_partner_id': fields.many2one('res.partner', 'Next Partner to Reconcile', readonly=True, help='This field shows you the next partner that will be automatically chosen by the system to go through the reconciliation process, based on the latest day it have been reconciled.'), # TODO: remove the readonly=True when teh client side will allow to update the context of existing tab, so that the user can change this value if he doesn't want to follow openerp proposal
}
_defaults = {
'to_reconcile': _get_to_reconcile,
'today_reconciled': _get_today_reconciled,
'next_partner_id': _get_partner,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
itpcc/FPGA-IA-Journy-game
|
image2verilog-colorgroup.py
|
1
|
4467
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import Tkinter
import tkFileDialog
from PIL import Image
import ImageDraw
import PIL.ImageOps
__DEFAULT_VAR_NAME__ = "var1"
__IS_MARK_CENTER__ = False
__INVERT_COLOR__ = False
def main():
try:
Tkinter.Tk().withdraw() # Close the root window
in_path = str(tkFileDialog.askopenfilename())
print("Opening: {0}".format(in_path))
outputpath = str(tkFileDialog.asksaveasfilename())
im = Image.open(in_path)
w, h = im.size
outputpath = outputpath.format(width=w, w=w, height=h, h=h)
with open(outputpath, 'w') as f:
print("Output: {0}".format(outputpath))
variableName = raw_input('Type variable name (enter to use "{0}" as variable name) :'.format(__DEFAULT_VAR_NAME__)).strip()
if variableName == "":
variableName = __DEFAULT_VAR_NAME__
if __INVERT_COLOR__:
r,g,b,a = im.split()
rgb_image = Image.merge('RGB', (r,g,b))
inverted_image = PIL.ImageOps.invert(rgb_image)
r2,g2,b2 = inverted_image.split()
im = Image.merge('RGBA', (r2,g2,b2,a))
rgb_im = im.convert('RGBA')
resultImage = Image.new('RGBA', (w, h))
draw = ImageDraw.Draw(resultImage)
# f.write("reg [{3}:0] {0}[{1}] = {2} ".format(variableName, h, '{', (w*2)-1 ))
f.write("module {0}(\n".format(variableName))
f.write(" input clk,\n")
f.write(" input wire [9:0] characterPositionX,\n")
f.write(" input wire [8:0] characterPositionY,\n")
f.write(" input wire [9:0] drawingPositionX,\n")
f.write(" input wire [8:0] drawingPositionY,\n")
f.write(" output reg [2:0] rgb\n")
f.write(");\n")
f.write(" reg [9:0] x;\n")
f.write(" reg [9:0] y;\n")
f.write(" initial begin\n")
f.write(" x = 'd0;\n")
f.write(" y = 'd0;\n")
f.write(" end\n")
f.write("\n")
f.write(" always @(posedge clk) begin\n")
f.write(" x <= (drawingPositionX - characterPositionX + {0});\n".format(int(w/2)+1))
f.write(" y <= (drawingPositionY - characterPositionY + {0});\n".format(int(h/2)+1))
if(__IS_MARK_CENTER__):
f.write(" if(");
f.write("x == {0:d} && y == {1:d} || ".format((w/2), (h/2)))
f.write("x == {0:d} && y == {1:d} || ".format((w/2), (h/2)-1))
f.write("x == {0:d} && y == {1:d} || ".format((w/2), (h/2)+1))
f.write("x == {0:d} && y == {1:d} || ".format((w/2)-1, (h/2)))
f.write("x == {0:d} && y == {1:d} || ".format((w/2)-1, (h/2)-1))
f.write("x == {0:d} && y == {1:d} || ".format((w/2)-1, (h/2)+1))
f.write("x == {0:d} && y == {1:d} || ".format((w/2)+1, (h/2)))
f.write("x == {0:d} && y == {1:d} || ".format((w/2)+1, (h/2)-1))
f.write("x == {0:d} && y == {1:d} )".format((w/2)+1, (h/2)+1))
f.write("\tbegin\trgb <= 3'b010;\tend\n")
colorGroup = {}
for y in xrange(1,h):
# f.write("{0}'b".format((w*2)))
for x in xrange(1,w):
r, g, b, alpha = rgb_im.getpixel((x, y))
# grayScale = r * 299.0/1000 + g * 587.0/1000 + b * 114.0/1000
# grayScale = int(grayScale/256*4)
# grayScale = int(grayScale/256*8)
grayScale = 0;
if r > 125:
grayScale += 4
if g > 125:
grayScale += 2
if b > 125:
grayScale += 1
print "Pixel: ({0},{1}) = ({2},{3},{4}, {6}) => {5:03b}".format(x, y, r, g, b, grayScale, alpha)
draw.point((x,y), (255 if r > 125 else 0, 255 if g > 125 else 0, 255 if b > 125 else 0, alpha))
# f.write("{0}{1}".format('1' if grayScale >= 2 else '0', '1' if grayScale%2 == 1 else '0'))
if(alpha > 128 and grayScale > 0) :
colorCode = "3'b{0}{1}{2}".format(1 if r > 125 else 0, 1 if g > 125 else 0, 1 if b > 125 else 0)
if colorCode not in colorGroup:
colorGroup[colorCode] = []
colorGroup[colorCode].append("x=={0} && y=={1}".format(x, y))
f.write("\t\t")
if __IS_MARK_CENTER__:
f.write("else ")
for colorCode in colorGroup:
f.write("if ( (")
f.write(") || (".join(colorGroup[colorCode]))
f.write(") ) begin rgb <= {0} end\n\t\telse ".format(colorCode))
f.write("begin rgb <= 3'b000; end// Width: {0}, Height: {1} From: {2}\n".format(w,h,in_path))
f.write(" end\nendmodule\n")
f.close()
resultImage.save(outputpath+".png", 'PNG')
os.system("start "+outputpath)
os.system("start "+outputpath+".png")
except Exception, e:
print e
if __name__ == "__main__":
main()
# im = Image.open('image.gif')
# rgb_im = im.convert('RGB')
|
mit
|
liyitest/rr
|
openstack_dashboard/dashboards/admin/hypervisors/views.py
|
48
|
2811
|
# Copyright 2013 B1 Systems GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tables
from horizon import tabs
from horizon.utils import functions as utils
from openstack_dashboard import api
from openstack_dashboard.dashboards.admin.hypervisors \
import tables as project_tables
from openstack_dashboard.dashboards.admin.hypervisors \
import tabs as project_tabs
class AdminIndexView(tabs.TabbedTableView):
tab_group_class = project_tabs.HypervisorHostTabs
template_name = 'admin/hypervisors/index.html'
page_title = _("All Hypervisors")
def get_data(self):
hypervisors = []
try:
hypervisors = api.nova.hypervisor_list(self.request)
hypervisors.sort(key=utils.natural_sort('hypervisor_hostname'))
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve hypervisor information.'))
return hypervisors
def get_context_data(self, **kwargs):
context = super(AdminIndexView, self).get_context_data(**kwargs)
try:
context["stats"] = api.nova.hypervisor_stats(self.request)
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve hypervisor statistics.'))
return context
class AdminDetailView(tables.DataTableView):
table_class = project_tables.AdminHypervisorInstancesTable
template_name = 'admin/hypervisors/detail.html'
page_title = _("Hypervisor Servers")
def get_data(self):
instances = []
try:
id, name = self.kwargs['hypervisor'].split('_', 1)
result = api.nova.hypervisor_search(self.request,
name)
for hypervisor in result:
if str(hypervisor.id) == id:
try:
instances += hypervisor.servers
except AttributeError:
pass
except Exception:
exceptions.handle(
self.request,
_('Unable to retrieve hypervisor instances list.'))
return instances
|
apache-2.0
|
stars2014/quick-ng
|
tools/cocos2d-console/toexec/build_console.py
|
6
|
7612
|
#!/usr/bin/python
# ----------------------------------------------------------------------------
# build_console: Build cocos2d-console into executable binary file with PyInstaller
#
# Author: Bin Zhang
#
# License: MIT
# ----------------------------------------------------------------------------
'''
Build cocos2d-console into executable binary file with PyInstaller
'''
import os
import json
import subprocess
import excopy
import ConfigParser
import sys
import shutil
from argparse import ArgumentParser
def run_shell(cmd, cwd=None):
p = subprocess.Popen(cmd, shell=True, cwd=cwd)
p.wait()
if p.returncode:
raise subprocess.CalledProcessError(returncode=p.returncode, cmd=cmd)
return p.returncode
def os_is_win32():
return sys.platform == 'win32'
def os_is_linux():
return 'linux' in sys.platform
class Builder(object):
CONFIG_FILE = "config.json"
KEY_COPY_CONFIG = "copy_config"
KEY_MODIFY_CONFIG = "modify_config"
KEY_HIDDEN_IMPORT = "hidden_import"
ENTRANCE_FILE = "bin/cocos.py"
CMD_FORMAT = 'pyinstaller -F %s %s --distpath "%s" --specpath "%s" --workpath "%s" --clean -y "%s"'
def __init__(self, args):
self.my_path = os.path.realpath(os.path.dirname(__file__))
# get the source path
if args.src_path is None:
src_path = os.path.abspath(os.path.join(self.my_path, os.path.pardir))
else:
src_path = os.path.expanduser(args.src_path)
if os.path.isabs(src_path):
self.src_path = src_path
else:
self.src_path = os.path.abspath(src_path)
if not os.path.isdir(self.src_path):
raise Exception("%s is not a available path." % self.src_path)
self.entrance_file = os.path.join(self.src_path, Builder.ENTRANCE_FILE)
# get the dst path
if args.dst_path is None:
self.dst_path = os.path.abspath("output")
else:
dst_path = os.path.expanduser(args.dst_path)
if os.path.isabs(dst_path):
self.dst_path = dst_path
else:
self.dst_path = os.path.abspath(dst_path)
if os_is_linux():
self.dst_path = os.path.join(self.dst_path, "linux")
else:
self.dst_path = os.path.join(self.dst_path, sys.platform)
# parse config file
cfg_file = os.path.join(self.my_path, Builder.CONFIG_FILE)
f = open(cfg_file)
self.cfg_info = json.load(f)
f.close()
def _get_dirs(self, path, dir_list=None):
if dir_list is None:
dir_list = []
if not os.path.isdir(path):
return dir_list
for name in os.listdir(path):
full_path = os.path.join(path, name)
if os.path.isdir(full_path):
dir_list.append(full_path)
self._get_dirs(full_path, dir_list)
return dir_list
def modify_files(self, modify_info):
import re
modify_file = modify_info["file_path"]
if not os.path.isabs(modify_file):
modify_file = os.path.abspath(os.path.join(self.dst_path, modify_file))
if not os.path.isfile(modify_file):
return
pattern = modify_info["pattern"]
replace_str = modify_info["replace_string"]
f = open(modify_file)
lines = f.readlines()
f.close()
new_lines = []
for line in lines:
new_line = re.sub(pattern, replace_str, line)
new_lines.append(new_line)
f = open(modify_file, "w")
f.writelines(new_lines)
f.close()
def do_build(self):
print("Source Path : %s" % self.src_path)
print("Output Path : %s" % self.dst_path)
print("Start building")
if os.path.exists(self.dst_path):
shutil.rmtree(self.dst_path)
# copy files
copy_config = self.cfg_info[Builder.KEY_COPY_CONFIG]
copy_cfgs = copy_config["common"]
if sys.platform in copy_config:
copy_cfgs += copy_config[sys.platform]
elif os_is_linux():
copy_cfgs += copy_config["linux"]
for element in copy_cfgs:
excopy.copy_files_with_config(element, self.src_path, self.dst_path)
# modify files
modify_config = self.cfg_info[Builder.KEY_MODIFY_CONFIG]
for element in modify_config:
self.modify_files(element)
# get the path parameter
plugins_path = os.path.join(self.src_path, "plugins")
bin_path = os.path.join(self.src_path, "bin")
dir_list = self._get_dirs(plugins_path)
dir_list.append(plugins_path)
dir_list.append(bin_path)
dir_list.append(self.src_path)
if os_is_win32():
sep = ";"
else:
sep = ":"
path_param = "-p %s" % sep.join(dir_list)
# get the runtime-hook parameter
_cp = ConfigParser.ConfigParser(allow_no_value=True)
_cp.optionxform = str
_cp.read(os.path.join(self.src_path, "bin/cocos2d.ini"))
runtime_hook_param = ""
hidden_import_param = ""
# add hidden import params for config.json
hidden_import_cfg = self.cfg_info[Builder.KEY_HIDDEN_IMPORT]
if len(hidden_import_cfg) > 0:
for key in hidden_import_cfg:
hidden_import_param += "--hidden-import %s " % key
runtime_hook_param += '--runtime-hook "%s" ' % os.path.join(self.src_path, hidden_import_cfg[key])
for s in _cp.sections():
if s == 'plugins':
for classname in _cp.options(s):
parts = classname.split(".")
module_name = parts[0]
hidden_import_param += "--hidden-import %s " % module_name
module_path = os.path.join(plugins_path, module_name)
if os.path.isdir(module_path):
runtime_hook_param += '--runtime-hook "%s" ' % ("%s/__init__.py" % module_path)
else:
module_file = "%s.py" % module_path
if os.path.isfile(module_file):
runtime_hook_param += '--runtime-hook "%s" ' % module_file
# additional hooks path
add_hook_dir_param = '--additional-hooks-dir "%s" ' % plugins_path
add_hook_dir_param += '--additional-hooks-dir "%s" ' % bin_path
add_hook_dir_param += '--additional-hooks-dir "%s"' % self.src_path
# build *.py
if os_is_linux():
spec_path = os.path.join(self.my_path, "build", "linux")
else:
spec_path = os.path.join(self.my_path, "build", sys.platform)
work_path = spec_path
if os.path.exists(spec_path):
shutil.rmtree(spec_path)
build_cmd = Builder.CMD_FORMAT % (path_param, '%s %s %s' % (hidden_import_param, add_hook_dir_param, runtime_hook_param), self.dst_path, spec_path, work_path, self.entrance_file)
run_shell(build_cmd)
print("Building succeed.")
if __name__ == "__main__":
parser = ArgumentParser(description="Generate executable file for cocos2d-console by PyInstaller.")
parser.add_argument('-s', '--src-path', dest='src_path', help='Specify the path of cocos2d-console.')
parser.add_argument('-d', '--dst-path', dest='dst_path', help='Specify the path of output.')
(args, unknown) = parser.parse_known_args()
if len(unknown) > 0:
print("unknown arguments: %s" % unknown)
builder = Builder(args)
builder.do_build()
|
mit
|
Xeralux/tensorflow
|
tensorflow/contrib/nn/python/ops/alpha_dropout.py
|
17
|
3427
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numbers
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import math_ops
def alpha_dropout(x, keep_prob, noise_shape=None, seed=None, name=None): # pylint: disable=invalid-name
"""Computes alpha dropout.
Alpha Dropout is a dropout that maintains the self-normalizing property. For
an input with zero mean and unit standard deviation, the output of
Alpha Dropout maintains the original mean and standard deviation of the input.
See [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)
Args:
x: A tensor.
keep_prob: A scalar `Tensor` with the same type as x. The probability
that each element is kept.
noise_shape: A 1-D `Tensor` of type `int32`, representing the
shape for randomly generated keep/drop flags.
seed: A Python integer. Used to create random seeds. See
@{tf.set_random_seed} for behavior.
name: A name for this operation (optional).
Returns:
A Tensor of the same shape of `x`.
Raises:
ValueError: If `keep_prob` is not in `(0, 1]`.
"""
with ops.name_scope(name, "alpha_dropout", [x]) as name:
x = ops.convert_to_tensor(x, name="x")
if isinstance(keep_prob, numbers.Real) and not 0 < keep_prob <= 1.:
raise ValueError("keep_prob must be a scalar tensor or a float in the "
"range (0, 1], got %g" % keep_prob)
keep_prob = ops.convert_to_tensor(keep_prob,
dtype=x.dtype,
name="keep_prob")
keep_prob.get_shape().assert_is_compatible_with(tensor_shape.scalar())
# Do nothing if we know keep_prob == 1
if tensor_util.constant_value(keep_prob) == 1:
return x
alpha = -1.7580993408473766
noise_shape = noise_shape if noise_shape is not None else array_ops.shape(x)
random_tensor = random_ops.random_uniform(noise_shape,
seed=seed,
dtype=x.dtype)
kept_idx = gen_math_ops.greater_equal(random_tensor, 1 - keep_prob)
kept_idx = math_ops.cast(kept_idx, x.dtype)
# Mask
x = x * kept_idx + alpha * (1 - kept_idx)
# Affine transformation parameters
a = (keep_prob + keep_prob * (1 - keep_prob) * alpha ** 2) ** -0.5
b = -a * alpha * (1 - keep_prob)
# Affine transformation
return a * x + b
|
apache-2.0
|
droark/bitcoin
|
test/functional/feature_bip68_sequence.py
|
2
|
18287
|
#!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test BIP68 implementation."""
import time
from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment
from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut, FromHex, ToHex
from test_framework.script import CScript
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_greater_than, assert_raises_rpc_error, get_bip9_status, satoshi_round, sync_blocks
SEQUENCE_LOCKTIME_DISABLE_FLAG = (1<<31)
SEQUENCE_LOCKTIME_TYPE_FLAG = (1<<22) # this means use time (0 means height)
SEQUENCE_LOCKTIME_GRANULARITY = 9 # this is a bit-shift
SEQUENCE_LOCKTIME_MASK = 0x0000ffff
# RPC error for non-BIP68 final transactions
NOT_FINAL_ERROR = "non-BIP68-final (code 64)"
class BIP68Test(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [[], ["-acceptnonstdtxn=0"]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"]
# Generate some coins
self.nodes[0].generate(110)
self.log.info("Running test disable flag")
self.test_disable_flag()
self.log.info("Running test sequence-lock-confirmed-inputs")
self.test_sequence_lock_confirmed_inputs()
self.log.info("Running test sequence-lock-unconfirmed-inputs")
self.test_sequence_lock_unconfirmed_inputs()
self.log.info("Running test BIP68 not consensus before versionbits activation")
self.test_bip68_not_consensus()
self.log.info("Activating BIP68 (and 112/113)")
self.activateCSV()
self.log.info("Verifying nVersion=2 transactions are standard.")
self.log.info("Note that nVersion=2 transactions are always standard (independent of BIP68 activation status).")
self.test_version2_relay()
self.log.info("Passed")
# Test that BIP68 is not in effect if tx version is 1, or if
# the first sequence bit is set.
def test_disable_flag(self):
# Create some unconfirmed inputs
new_addr = self.nodes[0].getnewaddress()
self.nodes[0].sendtoaddress(new_addr, 2) # send 2 BTC
utxos = self.nodes[0].listunspent(0, 0)
assert len(utxos) > 0
utxo = utxos[0]
tx1 = CTransaction()
value = int(satoshi_round(utxo["amount"] - self.relayfee)*COIN)
# Check that the disable flag disables relative locktime.
# If sequence locks were used, this would require 1 block for the
# input to mature.
sequence_value = SEQUENCE_LOCKTIME_DISABLE_FLAG | 1
tx1.vin = [CTxIn(COutPoint(int(utxo["txid"], 16), utxo["vout"]), nSequence=sequence_value)]
tx1.vout = [CTxOut(value, CScript([b'a']))]
tx1_signed = self.nodes[0].signrawtransactionwithwallet(ToHex(tx1))["hex"]
tx1_id = self.nodes[0].sendrawtransaction(tx1_signed)
tx1_id = int(tx1_id, 16)
# This transaction will enable sequence-locks, so this transaction should
# fail
tx2 = CTransaction()
tx2.nVersion = 2
sequence_value = sequence_value & 0x7fffffff
tx2.vin = [CTxIn(COutPoint(tx1_id, 0), nSequence=sequence_value)]
tx2.vout = [CTxOut(int(value - self.relayfee * COIN), CScript([b'a' * 35]))]
tx2.rehash()
assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, ToHex(tx2))
# Setting the version back down to 1 should disable the sequence lock,
# so this should be accepted.
tx2.nVersion = 1
self.nodes[0].sendrawtransaction(ToHex(tx2))
# Calculate the median time past of a prior block ("confirmations" before
# the current tip).
def get_median_time_past(self, confirmations):
block_hash = self.nodes[0].getblockhash(self.nodes[0].getblockcount()-confirmations)
return self.nodes[0].getblockheader(block_hash)["mediantime"]
# Test that sequence locks are respected for transactions spending confirmed inputs.
def test_sequence_lock_confirmed_inputs(self):
# Create lots of confirmed utxos, and use them to generate lots of random
# transactions.
max_outputs = 50
addresses = []
while len(addresses) < max_outputs:
addresses.append(self.nodes[0].getnewaddress())
while len(self.nodes[0].listunspent()) < 200:
import random
random.shuffle(addresses)
num_outputs = random.randint(1, max_outputs)
outputs = {}
for i in range(num_outputs):
outputs[addresses[i]] = random.randint(1, 20)*0.01
self.nodes[0].sendmany("", outputs)
self.nodes[0].generate(1)
utxos = self.nodes[0].listunspent()
# Try creating a lot of random transactions.
# Each time, choose a random number of inputs, and randomly set
# some of those inputs to be sequence locked (and randomly choose
# between height/time locking). Small random chance of making the locks
# all pass.
for i in range(400):
# Randomly choose up to 10 inputs
num_inputs = random.randint(1, 10)
random.shuffle(utxos)
# Track whether any sequence locks used should fail
should_pass = True
# Track whether this transaction was built with sequence locks
using_sequence_locks = False
tx = CTransaction()
tx.nVersion = 2
value = 0
for j in range(num_inputs):
sequence_value = 0xfffffffe # this disables sequence locks
# 50% chance we enable sequence locks
if random.randint(0,1):
using_sequence_locks = True
# 10% of the time, make the input sequence value pass
input_will_pass = (random.randint(1,10) == 1)
sequence_value = utxos[j]["confirmations"]
if not input_will_pass:
sequence_value += 1
should_pass = False
# Figure out what the median-time-past was for the confirmed input
# Note that if an input has N confirmations, we're going back N blocks
# from the tip so that we're looking up MTP of the block
# PRIOR to the one the input appears in, as per the BIP68 spec.
orig_time = self.get_median_time_past(utxos[j]["confirmations"])
cur_time = self.get_median_time_past(0) # MTP of the tip
# can only timelock this input if it's not too old -- otherwise use height
can_time_lock = True
if ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY) >= SEQUENCE_LOCKTIME_MASK:
can_time_lock = False
# if time-lockable, then 50% chance we make this a time lock
if random.randint(0,1) and can_time_lock:
# Find first time-lock value that fails, or latest one that succeeds
time_delta = sequence_value << SEQUENCE_LOCKTIME_GRANULARITY
if input_will_pass and time_delta > cur_time - orig_time:
sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)
elif (not input_will_pass and time_delta <= cur_time - orig_time):
sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)+1
sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG
tx.vin.append(CTxIn(COutPoint(int(utxos[j]["txid"], 16), utxos[j]["vout"]), nSequence=sequence_value))
value += utxos[j]["amount"]*COIN
# Overestimate the size of the tx - signatures should be less than 120 bytes, and leave 50 for the output
tx_size = len(ToHex(tx))//2 + 120*num_inputs + 50
tx.vout.append(CTxOut(int(value-self.relayfee*tx_size*COIN/1000), CScript([b'a'])))
rawtx = self.nodes[0].signrawtransactionwithwallet(ToHex(tx))["hex"]
if (using_sequence_locks and not should_pass):
# This transaction should be rejected
assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, rawtx)
else:
# This raw transaction should be accepted
self.nodes[0].sendrawtransaction(rawtx)
utxos = self.nodes[0].listunspent()
# Test that sequence locks on unconfirmed inputs must have nSequence
# height or time of 0 to be accepted.
# Then test that BIP68-invalid transactions are removed from the mempool
# after a reorg.
def test_sequence_lock_unconfirmed_inputs(self):
# Store height so we can easily reset the chain at the end of the test
cur_height = self.nodes[0].getblockcount()
# Create a mempool tx.
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2)
tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
tx1.rehash()
# Anyone-can-spend mempool tx.
# Sequence lock of 0 should pass.
tx2 = CTransaction()
tx2.nVersion = 2
tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
tx2_raw = self.nodes[0].signrawtransactionwithwallet(ToHex(tx2))["hex"]
tx2 = FromHex(tx2, tx2_raw)
tx2.rehash()
self.nodes[0].sendrawtransaction(tx2_raw)
# Create a spend of the 0th output of orig_tx with a sequence lock
# of 1, and test what happens when submitting.
# orig_tx.vout[0] must be an anyone-can-spend output
def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock):
sequence_value = 1
if not use_height_lock:
sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG
tx = CTransaction()
tx.nVersion = 2
tx.vin = [CTxIn(COutPoint(orig_tx.sha256, 0), nSequence=sequence_value)]
tx.vout = [CTxOut(int(orig_tx.vout[0].nValue - relayfee * COIN), CScript([b'a' * 35]))]
tx.rehash()
if (orig_tx.hash in node.getrawmempool()):
# sendrawtransaction should fail if the tx is in the mempool
assert_raises_rpc_error(-26, NOT_FINAL_ERROR, node.sendrawtransaction, ToHex(tx))
else:
# sendrawtransaction should succeed if the tx is not in the mempool
node.sendrawtransaction(ToHex(tx))
return tx
test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True)
test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
# Now mine some blocks, but make sure tx2 doesn't get mined.
# Use prioritisetransaction to lower the effective feerate to 0
self.nodes[0].prioritisetransaction(txid=tx2.hash, fee_delta=int(-self.relayfee*COIN))
cur_time = int(time.time())
for i in range(10):
self.nodes[0].setmocktime(cur_time + 600)
self.nodes[0].generate(1)
cur_time += 600
assert tx2.hash in self.nodes[0].getrawmempool()
test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True)
test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
# Mine tx2, and then try again
self.nodes[0].prioritisetransaction(txid=tx2.hash, fee_delta=int(self.relayfee*COIN))
# Advance the time on the node so that we can test timelocks
self.nodes[0].setmocktime(cur_time+600)
self.nodes[0].generate(1)
assert tx2.hash not in self.nodes[0].getrawmempool()
# Now that tx2 is not in the mempool, a sequence locked spend should
# succeed
tx3 = test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
assert tx3.hash in self.nodes[0].getrawmempool()
self.nodes[0].generate(1)
assert tx3.hash not in self.nodes[0].getrawmempool()
# One more test, this time using height locks
tx4 = test_nonzero_locks(tx3, self.nodes[0], self.relayfee, use_height_lock=True)
assert tx4.hash in self.nodes[0].getrawmempool()
# Now try combining confirmed and unconfirmed inputs
tx5 = test_nonzero_locks(tx4, self.nodes[0], self.relayfee, use_height_lock=True)
assert tx5.hash not in self.nodes[0].getrawmempool()
utxos = self.nodes[0].listunspent()
tx5.vin.append(CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["vout"]), nSequence=1))
tx5.vout[0].nValue += int(utxos[0]["amount"]*COIN)
raw_tx5 = self.nodes[0].signrawtransactionwithwallet(ToHex(tx5))["hex"]
assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, raw_tx5)
# Test mempool-BIP68 consistency after reorg
#
# State of the transactions in the last blocks:
# ... -> [ tx2 ] -> [ tx3 ]
# tip-1 tip
# And currently tx4 is in the mempool.
#
# If we invalidate the tip, tx3 should get added to the mempool, causing
# tx4 to be removed (fails sequence-lock).
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
assert tx4.hash not in self.nodes[0].getrawmempool()
assert tx3.hash in self.nodes[0].getrawmempool()
# Now mine 2 empty blocks to reorg out the current tip (labeled tip-1 in
# diagram above).
# This would cause tx2 to be added back to the mempool, which in turn causes
# tx3 to be removed.
tip = int(self.nodes[0].getblockhash(self.nodes[0].getblockcount()-1), 16)
height = self.nodes[0].getblockcount()
for i in range(2):
block = create_block(tip, create_coinbase(height), cur_time)
block.nVersion = 3
block.rehash()
block.solve()
tip = block.sha256
height += 1
self.nodes[0].submitblock(ToHex(block))
cur_time += 1
mempool = self.nodes[0].getrawmempool()
assert tx3.hash not in mempool
assert tx2.hash in mempool
# Reset the chain and get rid of the mocktimed-blocks
self.nodes[0].setmocktime(0)
self.nodes[0].invalidateblock(self.nodes[0].getblockhash(cur_height+1))
self.nodes[0].generate(10)
# Make sure that BIP68 isn't being used to validate blocks, prior to
# versionbits activation. If more blocks are mined prior to this test
# being run, then it's possible the test has activated the soft fork, and
# this test should be moved to run earlier, or deleted.
def test_bip68_not_consensus(self):
assert get_bip9_status(self.nodes[0], 'csv')['status'] != 'active'
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2)
tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
tx1.rehash()
# Make an anyone-can-spend transaction
tx2 = CTransaction()
tx2.nVersion = 1
tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
# sign tx2
tx2_raw = self.nodes[0].signrawtransactionwithwallet(ToHex(tx2))["hex"]
tx2 = FromHex(tx2, tx2_raw)
tx2.rehash()
self.nodes[0].sendrawtransaction(ToHex(tx2))
# Now make an invalid spend of tx2 according to BIP68
sequence_value = 100 # 100 block relative locktime
tx3 = CTransaction()
tx3.nVersion = 2
tx3.vin = [CTxIn(COutPoint(tx2.sha256, 0), nSequence=sequence_value)]
tx3.vout = [CTxOut(int(tx2.vout[0].nValue - self.relayfee * COIN), CScript([b'a' * 35]))]
tx3.rehash()
assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, ToHex(tx3))
# make a block that violates bip68; ensure that the tip updates
tip = int(self.nodes[0].getbestblockhash(), 16)
block = create_block(tip, create_coinbase(self.nodes[0].getblockcount()+1))
block.nVersion = 3
block.vtx.extend([tx1, tx2, tx3])
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
add_witness_commitment(block)
block.solve()
self.nodes[0].submitblock(block.serialize(True).hex())
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
def activateCSV(self):
# activation should happen at block height 432 (3 periods)
# getblockchaininfo will show CSV as active at block 431 (144 * 3 -1) since it's returning whether CSV is active for the next block.
min_activation_height = 432
height = self.nodes[0].getblockcount()
assert_greater_than(min_activation_height - height, 2)
self.nodes[0].generate(min_activation_height - height - 2)
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], "locked_in")
self.nodes[0].generate(1)
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], "active")
sync_blocks(self.nodes)
# Use self.nodes[1] to test that version 2 transactions are standard.
def test_version2_relay(self):
inputs = [ ]
outputs = { self.nodes[1].getnewaddress() : 1.0 }
rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
rawtxfund = self.nodes[1].fundrawtransaction(rawtx)['hex']
tx = FromHex(CTransaction(), rawtxfund)
tx.nVersion = 2
tx_signed = self.nodes[1].signrawtransactionwithwallet(ToHex(tx))["hex"]
self.nodes[1].sendrawtransaction(tx_signed)
if __name__ == '__main__':
BIP68Test().main()
|
mit
|
frankyrumple/ope
|
admin_app/gluon/tests/test_template.py
|
7
|
6382
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Unit tests for gluon.template
"""
import unittest
from fix_path import fix_sys_path
fix_sys_path(__file__)
import template
from template import render
class TestTemplate(unittest.TestCase):
def testRun(self):
self.assertEqual(render(content='{{for i in range(n):}}{{=i}}{{pass}}',
context=dict(n=3)), '012')
self.assertEqual(render(content='{{if n>2:}}ok{{pass}}',
context=dict(n=3)), 'ok')
self.assertEqual(
render(content='{{try:}}{{n/0}}{{except:}}fail{{pass}}',
context=dict(n=3)), 'fail')
self.assertEqual(render(content='{{="<&>"}}'), '<&>')
self.assertEqual(render(content='"abc"'), '"abc"')
self.assertEqual(render(content='"a\'bc"'), '"a\'bc"')
self.assertEqual(render(content='"a\"bc"'), '"a\"bc"')
self.assertEqual(render(content=r'''"a\"bc"'''), r'"a\"bc"')
self.assertEqual(render(content=r'''"""abc\""""'''), r'"""abc\""""')
def testEqualWrite(self):
"test generation of response.write from ="
self.assertEqual(render(content='{{="abc"}}'), 'abc')
# whitespace is stripped
self.assertEqual(render(content='{{ ="abc"}}'), 'abc')
self.assertEqual(render(content='{{ ="abc" }}'), 'abc')
self.assertEqual(render(content='{{pass\n="abc" }}'), 'abc')
# = recognized only at the beginning of a physical line
self.assertEqual(render(
content='{{xyz = "xyz"\n="abc"\n="def"\n=xyz }}'), 'abcdefxyz')
# = in python blocks
self.assertEqual(render(content='{{if True:\n="abc"\npass }}'), 'abc')
self.assertEqual(
render(content='{{if True:\n="abc"\npass\n="def" }}'), 'abcdef')
self.assertEqual(
render(content='{{if False:\n="abc"\npass\n="def" }}'), 'def')
self.assertEqual(render(
content='{{if True:\n="abc"\nelse:\n="def"\npass }}'), 'abc')
self.assertEqual(render(
content='{{if False:\n="abc"\nelse:\n="def"\npass }}'), 'def')
# codeblock-leading = handles internal newlines, escaped or not
self.assertEqual(render(content='{{=list((1,2,3))}}'), '[1, 2, 3]')
self.assertEqual(render(content='{{=list((1,2,\\\n3))}}'), '[1, 2, 3]')
self.assertEqual(render(content='{{=list((1,2,\n3))}}'), '[1, 2, 3]')
# ...but that means no more = operators in the codeblock
self.assertRaises(SyntaxError, render, content='{{="abc"\n="def" }}')
# = embedded in codeblock won't handle newlines in its argument
self.assertEqual(
render(content='{{pass\n=list((1,2,\\\n3))}}'), '[1, 2, 3]')
self.assertRaises(
SyntaxError, render, content='{{pass\n=list((1,2,\n3))}}')
def testWithDummyFileSystem(self):
from os.path import join as pjoin
import contextlib
from StringIO import StringIO
from gluon.restricted import RestrictedError
@contextlib.contextmanager
def monkey_patch(module, fn_name, patch):
try:
unpatch = getattr(module, fn_name)
except AttributeError:
unpatch = None
setattr(module, fn_name, patch)
try:
yield
finally:
if unpatch is None:
delattr(module, fn_name)
else:
setattr(module, fn_name, unpatch)
def dummy_open(path, mode):
if path == pjoin('views', 'layout.html'):
return StringIO("{{block left_sidebar}}left{{end}}"
"{{include}}"
"{{block right_sidebar}}right{{end}}")
elif path == pjoin('views', 'layoutbrackets.html'):
return StringIO("[[block left_sidebar]]left[[end]]"
"[[include]]"
"[[block right_sidebar]]right[[end]]")
elif path == pjoin('views', 'default', 'index.html'):
return StringIO("{{extend 'layout.html'}}"
"{{block left_sidebar}}{{super}} {{end}}"
"to"
"{{block right_sidebar}} {{super}}{{end}}")
elif path == pjoin('views', 'default', 'indexbrackets.html'):
return StringIO("[[extend 'layoutbrackets.html']]"
"[[block left_sidebar]][[super]] [[end]]"
"to"
"[[block right_sidebar]] [[super]][[end]]")
elif path == pjoin('views', 'default', 'missing.html'):
return StringIO("{{extend 'wut'}}"
"{{block left_sidebar}}{{super}} {{end}}"
"to"
"{{block right_sidebar}} {{super}}{{end}}")
elif path == pjoin('views', 'default', 'noescape.html'):
return StringIO("""{{=NOESCAPE('<script></script>')}}""")
raise IOError
with monkey_patch(template, 'open', dummy_open):
self.assertEqual(
render(filename=pjoin('views', 'default', 'index.html'),
path='views'),
'left to right')
self.assertEqual(
render(filename=pjoin('views', 'default', 'indexbrackets.html'),
path='views', delimiters=('[[', ']]')),
'left to right')
self.assertRaises(
RestrictedError,
render,
filename=pjoin('views', 'default', 'missing.html'),
path='views')
response = template.DummyResponse()
response.delimiters = ('[[', ']]')
self.assertEqual(
render(filename=pjoin('views', 'default', 'indexbrackets.html'),
path='views', context={'response': response}),
'left to right')
self.assertEqual(
render(filename=pjoin('views', 'default', 'noescape.html'),
context={'NOESCAPE': template.NOESCAPE}),
'<script></script>')
if __name__ == '__main__':
unittest.main()
|
mit
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.