repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values | var_hash
int64 -9,223,186,179,200,150,000
9,223,291,175B
| doc_hash
int64 -9,223,304,365,658,930,000
9,223,309,051B
| line_mean
float64 3.5
99.8
| line_max
int64 13
999
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
shanot/imp | modules/rmf/examples/link.py | 2 | 1236 | ## \example rmf/link.py
# This example is like module/rmf/pdb.py except that instead of creating a
# new hierarchy from the rmf file, it simply links the existing hierarchy
# to the file. This mechanism can be used for loading multiple
# conformations for scoring or other analysis without having to set up
# restraints and things each time.
from __future__ import print_function
import IMP.atom
import IMP.rmf
import RMF
import sys
IMP.setup_from_argv(sys.argv, "link")
m = IMP.Model()
# Create a new IMP.atom.Hierarchy from the contents of the pdb file
h = IMP.atom.read_pdb(IMP.rmf.get_example_path("simple.pdb"), m)
tfn = "link.rmf"
print("File name is", tfn)
# open the file, clearing any existing contents
rh = RMF.create_rmf_file(tfn)
# add the hierarchy to the file
IMP.rmf.add_hierarchies(rh, [h])
# add the current configuration to the file as frame 0
IMP.rmf.save_frame(rh)
# close the file
del rh
# reopen it, don't clear the file when opening it
rh = RMF.open_rmf_file_read_only(tfn)
# link to the existing pdb hierarchy
IMP.rmf.link_hierarchies(rh, [h])
# load the same coordinates in, ok, that is not very exciting
IMP.rmf.load_frame(rh, RMF.FrameID(0))
print("Try running rmf_display or rmf_show on", tfn)
| gpl-3.0 | 6,140,628,555,045,499,000 | -3,106,124,252,549,399,600 | 25.869565 | 74 | 0.7411 | false |
karesansui/karesansui | bin/restart_network.py | 1 | 4392 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of Karesansui.
#
# Copyright (C) 2012 HDE, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
import sys
import logging
from optparse import OptionParser
from ksscommand import KssCommand, KssCommandException, KssCommandOptException
import __cmd__
try:
import karesansui
from karesansui import __version__
from karesansui.lib.virt.virt import KaresansuiVirtConnection, KaresansuiVirtException
from karesansui.lib.const import NETWORK_IFCONFIG_COMMAND, NETWORK_BRCTL_COMMAND
from karesansui.lib.utils import load_locale
from karesansui.lib.utils import execute_command
except ImportError, e:
print >>sys.stderr, "[Error] some packages not found. - %s" % e
sys.exit(1)
_ = load_locale()
usage = '%prog [options]'
def getopts():
optp = OptionParser(usage=usage, version=__version__)
optp.add_option('-n', '--name', dest='name', help=_('Network name'))
optp.add_option('-f', '--force', dest='force', action="store_true", help=_('Do everything to bring up network'))
return optp.parse_args()
def chkopts(opts):
if not opts.name:
raise KssCommandOptException('ERROR: %s option is required.' % '-n or --name')
class RestartNetwork(KssCommand):
def process(self):
(opts, args) = getopts()
chkopts(opts)
self.up_progress(10)
conn = KaresansuiVirtConnection(readonly=False)
try:
active_networks = conn.list_active_network()
inactive_networks = conn.list_inactive_network()
if not (opts.name in active_networks or opts.name in inactive_networks):
raise KssCommandException('Could not find the specified network. - net=%s' % (opts.name))
self.up_progress(10)
try:
conn.stop_network(opts.name)
except KaresansuiVirtException, e:
if opt.force is not True:
raise KssCommandException('Could not stop the specified network. - net=%s' % (opts.name))
self.up_progress(20)
try:
conn.start_network(opts.name)
except KaresansuiVirtException, e:
if opts.force is not True:
raise KssCommandException('Could not start the specified network. - net=%s' % (opts.name))
# try to bring down existing bridge
kvn = conn.search_kvn_networks(opts.name)[0]
try:
bridge_name = kvn.get_info()['bridge']['name']
except KeyError:
pass
ret, res = execute_command([NETWORK_IFCONFIG_COMMAND, bridge_name, 'down'])
ret, res = execute_command([NETWORK_BRCTL_COMMAND, 'delbr', bridge_name])
# try again
conn.start_network(opts.name)
self.up_progress(10)
if not (opts.name in conn.list_active_network()):
raise KssCommandException('Failed to start network. - net=%s' % (opts.name))
self.logger.info('Restarted network. - net=%s' % (opts.name))
print >>sys.stdout, _('Restarted network. - net=%s') % (opts.name)
return True
finally:
conn.close()
if __name__ == "__main__":
target = RestartNetwork()
sys.exit(target.run())
| mit | -194,398,140,602,570,200 | -2,083,306,833,067,337,700 | 36.538462 | 116 | 0.645492 | false |
PwnArt1st/searx | tests/unit/engines/test_youtube_api.py | 13 | 3848 | from collections import defaultdict
import mock
from searx.engines import youtube_api
from searx.testing import SearxTestCase
class TestYoutubeAPIEngine(SearxTestCase):
def test_request(self):
query = 'test_query'
dicto = defaultdict(dict)
dicto['pageno'] = 0
dicto['language'] = 'fr_FR'
params = youtube_api.request(query, dicto)
self.assertTrue('url' in params)
self.assertTrue(query in params['url'])
self.assertIn('googleapis.com', params['url'])
self.assertIn('youtube', params['url'])
self.assertIn('fr', params['url'])
dicto['language'] = 'all'
params = youtube_api.request(query, dicto)
self.assertFalse('fr' in params['url'])
def test_response(self):
self.assertRaises(AttributeError, youtube_api.response, None)
self.assertRaises(AttributeError, youtube_api.response, [])
self.assertRaises(AttributeError, youtube_api.response, '')
self.assertRaises(AttributeError, youtube_api.response, '[]')
response = mock.Mock(text='{}')
self.assertEqual(youtube_api.response(response), [])
response = mock.Mock(text='{"data": []}')
self.assertEqual(youtube_api.response(response), [])
json = """
{
"kind": "youtube#searchListResponse",
"etag": "xmg9xJZuZD438sF4hb-VcBBREXc/YJQDcTBCDcaBvl-sRZJoXdvy1ME",
"nextPageToken": "CAUQAA",
"pageInfo": {
"totalResults": 1000000,
"resultsPerPage": 20
},
"items": [
{
"kind": "youtube#searchResult",
"etag": "xmg9xJZuZD438sF4hb-VcBBREXc/IbLO64BMhbHIgWLwLw7MDYe7Hs4",
"id": {
"kind": "youtube#video",
"videoId": "DIVZCPfAOeM"
},
"snippet": {
"publishedAt": "2015-05-29T22:41:04.000Z",
"channelId": "UCNodmx1ERIjKqvcJLtdzH5Q",
"title": "Title",
"description": "Description",
"thumbnails": {
"default": {
"url": "https://i.ytimg.com/vi/DIVZCPfAOeM/default.jpg"
},
"medium": {
"url": "https://i.ytimg.com/vi/DIVZCPfAOeM/mqdefault.jpg"
},
"high": {
"url": "https://i.ytimg.com/vi/DIVZCPfAOeM/hqdefault.jpg"
}
},
"channelTitle": "MinecraftUniverse",
"liveBroadcastContent": "none"
}
}
]
}
"""
response = mock.Mock(text=json)
results = youtube_api.response(response)
self.assertEqual(type(results), list)
self.assertEqual(len(results), 1)
self.assertEqual(results[0]['title'], 'Title')
self.assertEqual(results[0]['url'], 'https://www.youtube.com/watch?v=DIVZCPfAOeM')
self.assertEqual(results[0]['content'], 'Description')
self.assertEqual(results[0]['thumbnail'], 'https://i.ytimg.com/vi/DIVZCPfAOeM/hqdefault.jpg')
self.assertTrue('DIVZCPfAOeM' in results[0]['embedded'])
json = """
{
"kind": "youtube#searchListResponse",
"etag": "xmg9xJZuZD438sF4hb-VcBBREXc/YJQDcTBCDcaBvl-sRZJoXdvy1ME",
"nextPageToken": "CAUQAA",
"pageInfo": {
"totalResults": 1000000,
"resultsPerPage": 20
}
}
"""
response = mock.Mock(text=json)
results = youtube_api.response(response)
self.assertEqual(type(results), list)
self.assertEqual(len(results), 0)
json = """
{"toto":{"entry":[]
}
}
"""
response = mock.Mock(text=json)
results = youtube_api.response(response)
self.assertEqual(type(results), list)
self.assertEqual(len(results), 0)
| agpl-3.0 | 8,438,617,234,263,743,000 | 6,623,143,400,743,619,000 | 33.666667 | 101 | 0.558472 | false |
aquarimeter/aquarimeter | lib/python2.7/site-packages/pkg_resources.py | 134 | 99605 | """
Package resource API
--------------------
A resource is a logical file contained within a package, or a logical
subdirectory thereof. The package resource API expects resource names
to have their path parts separated with ``/``, *not* whatever the local
path separator is. Do not use os.path operations to manipulate resource
names being passed into the API.
The package resource API is designed to work with normal filesystem packages,
.egg files, and unpacked .egg files. It can also work in a limited way with
.zip files and with custom PEP 302 loaders that support the ``get_data()``
method.
"""
import sys
import os
import time
import re
import imp
import zipfile
import zipimport
import warnings
import stat
import functools
import pkgutil
import token
import symbol
import operator
import platform
from pkgutil import get_importer
try:
from urlparse import urlparse, urlunparse
except ImportError:
from urllib.parse import urlparse, urlunparse
try:
frozenset
except NameError:
from sets import ImmutableSet as frozenset
try:
basestring
next = lambda o: o.next()
from cStringIO import StringIO as BytesIO
except NameError:
basestring = str
from io import BytesIO
def execfile(fn, globs=None, locs=None):
if globs is None:
globs = globals()
if locs is None:
locs = globs
exec(compile(open(fn).read(), fn, 'exec'), globs, locs)
# capture these to bypass sandboxing
from os import utime
try:
from os import mkdir, rename, unlink
WRITE_SUPPORT = True
except ImportError:
# no write support, probably under GAE
WRITE_SUPPORT = False
from os import open as os_open
from os.path import isdir, split
# Avoid try/except due to potential problems with delayed import mechanisms.
if sys.version_info >= (3, 3) and sys.implementation.name == "cpython":
import importlib._bootstrap as importlib_bootstrap
else:
importlib_bootstrap = None
try:
import parser
except ImportError:
pass
def _bypass_ensure_directory(name, mode=0x1FF): # 0777
# Sandbox-bypassing version of ensure_directory()
if not WRITE_SUPPORT:
raise IOError('"os.mkdir" not supported on this platform.')
dirname, filename = split(name)
if dirname and filename and not isdir(dirname):
_bypass_ensure_directory(dirname)
mkdir(dirname, mode)
_state_vars = {}
def _declare_state(vartype, **kw):
g = globals()
for name, val in kw.items():
g[name] = val
_state_vars[name] = vartype
def __getstate__():
state = {}
g = globals()
for k, v in _state_vars.items():
state[k] = g['_sget_'+v](g[k])
return state
def __setstate__(state):
g = globals()
for k, v in state.items():
g['_sset_'+_state_vars[k]](k, g[k], v)
return state
def _sget_dict(val):
return val.copy()
def _sset_dict(key, ob, state):
ob.clear()
ob.update(state)
def _sget_object(val):
return val.__getstate__()
def _sset_object(key, ob, state):
ob.__setstate__(state)
_sget_none = _sset_none = lambda *args: None
def get_supported_platform():
"""Return this platform's maximum compatible version.
distutils.util.get_platform() normally reports the minimum version
of Mac OS X that would be required to *use* extensions produced by
distutils. But what we want when checking compatibility is to know the
version of Mac OS X that we are *running*. To allow usage of packages that
explicitly require a newer version of Mac OS X, we must also know the
current version of the OS.
If this condition occurs for any other platform with a version in its
platform strings, this function should be extended accordingly.
"""
plat = get_build_platform()
m = macosVersionString.match(plat)
if m is not None and sys.platform == "darwin":
try:
plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
except ValueError:
pass # not Mac OS X
return plat
__all__ = [
# Basic resource access and distribution/entry point discovery
'require', 'run_script', 'get_provider', 'get_distribution',
'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
'resource_string', 'resource_stream', 'resource_filename',
'resource_listdir', 'resource_exists', 'resource_isdir',
# Environmental control
'declare_namespace', 'working_set', 'add_activation_listener',
'find_distributions', 'set_extraction_path', 'cleanup_resources',
'get_default_cache',
# Primary implementation classes
'Environment', 'WorkingSet', 'ResourceManager',
'Distribution', 'Requirement', 'EntryPoint',
# Exceptions
'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
'ExtractionError',
# Parsing functions and string utilities
'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
# filesystem utilities
'ensure_directory', 'normalize_path',
# Distribution "precedence" constants
'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
# "Provider" interfaces, implementations, and registration/lookup APIs
'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
'register_finder', 'register_namespace_handler', 'register_loader_type',
'fixup_namespace_packages', 'get_importer',
# Deprecated/backward compatibility only
'run_main', 'AvailableDistributions',
]
class ResolutionError(Exception):
"""Abstract base for dependency resolution errors"""
def __repr__(self):
return self.__class__.__name__+repr(self.args)
class VersionConflict(ResolutionError):
"""An already-installed version conflicts with the requested version"""
class DistributionNotFound(ResolutionError):
"""A requested distribution was not found"""
class UnknownExtra(ResolutionError):
"""Distribution doesn't have an "extra feature" of the given name"""
_provider_factories = {}
PY_MAJOR = sys.version[:3]
EGG_DIST = 3
BINARY_DIST = 2
SOURCE_DIST = 1
CHECKOUT_DIST = 0
DEVELOP_DIST = -1
def register_loader_type(loader_type, provider_factory):
"""Register `provider_factory` to make providers for `loader_type`
`loader_type` is the type or class of a PEP 302 ``module.__loader__``,
and `provider_factory` is a function that, passed a *module* object,
returns an ``IResourceProvider`` for that module.
"""
_provider_factories[loader_type] = provider_factory
def get_provider(moduleOrReq):
"""Return an IResourceProvider for the named module or requirement"""
if isinstance(moduleOrReq,Requirement):
return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
try:
module = sys.modules[moduleOrReq]
except KeyError:
__import__(moduleOrReq)
module = sys.modules[moduleOrReq]
loader = getattr(module, '__loader__', None)
return _find_adapter(_provider_factories, loader)(module)
def _macosx_vers(_cache=[]):
if not _cache:
import platform
version = platform.mac_ver()[0]
# fallback for MacPorts
if version == '':
import plistlib
plist = '/System/Library/CoreServices/SystemVersion.plist'
if os.path.exists(plist):
if hasattr(plistlib, 'readPlist'):
plist_content = plistlib.readPlist(plist)
if 'ProductVersion' in plist_content:
version = plist_content['ProductVersion']
_cache.append(version.split('.'))
return _cache[0]
def _macosx_arch(machine):
return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
def get_build_platform():
"""Return this platform's string for platform-specific distributions
XXX Currently this is the same as ``distutils.util.get_platform()``, but it
needs some hacks for Linux and Mac OS X.
"""
try:
# Python 2.7 or >=3.2
from sysconfig import get_platform
except ImportError:
from distutils.util import get_platform
plat = get_platform()
if sys.platform == "darwin" and not plat.startswith('macosx-'):
try:
version = _macosx_vers()
machine = os.uname()[4].replace(" ", "_")
return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
_macosx_arch(machine))
except ValueError:
# if someone is running a non-Mac darwin system, this will fall
# through to the default implementation
pass
return plat
macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
get_platform = get_build_platform # XXX backward compat
def compatible_platforms(provided,required):
"""Can code for the `provided` platform run on the `required` platform?
Returns true if either platform is ``None``, or the platforms are equal.
XXX Needs compatibility checks for Linux and other unixy OSes.
"""
if provided is None or required is None or provided==required:
return True # easy case
# Mac OS X special cases
reqMac = macosVersionString.match(required)
if reqMac:
provMac = macosVersionString.match(provided)
# is this a Mac package?
if not provMac:
# this is backwards compatibility for packages built before
# setuptools 0.6. All packages built after this point will
# use the new macosx designation.
provDarwin = darwinVersionString.match(provided)
if provDarwin:
dversion = int(provDarwin.group(1))
macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
if dversion == 7 and macosversion >= "10.3" or \
dversion == 8 and macosversion >= "10.4":
#import warnings
#warnings.warn("Mac eggs should be rebuilt to "
# "use the macosx designation instead of darwin.",
# category=DeprecationWarning)
return True
return False # egg isn't macosx or legacy darwin
# are they the same major version and machine type?
if provMac.group(1) != reqMac.group(1) or \
provMac.group(3) != reqMac.group(3):
return False
# is the required OS major update >= the provided one?
if int(provMac.group(2)) > int(reqMac.group(2)):
return False
return True
# XXX Linux and other platforms' special cases should go here
return False
def run_script(dist_spec, script_name):
"""Locate distribution `dist_spec` and run its `script_name` script"""
ns = sys._getframe(1).f_globals
name = ns['__name__']
ns.clear()
ns['__name__'] = name
require(dist_spec)[0].run_script(script_name, ns)
run_main = run_script # backward compatibility
def get_distribution(dist):
"""Return a current distribution object for a Requirement or string"""
if isinstance(dist,basestring): dist = Requirement.parse(dist)
if isinstance(dist,Requirement): dist = get_provider(dist)
if not isinstance(dist,Distribution):
raise TypeError("Expected string, Requirement, or Distribution", dist)
return dist
def load_entry_point(dist, group, name):
"""Return `name` entry point of `group` for `dist` or raise ImportError"""
return get_distribution(dist).load_entry_point(group, name)
def get_entry_map(dist, group=None):
"""Return the entry point map for `group`, or the full entry map"""
return get_distribution(dist).get_entry_map(group)
def get_entry_info(dist, group, name):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return get_distribution(dist).get_entry_info(group, name)
class IMetadataProvider:
def has_metadata(name):
"""Does the package's distribution contain the named metadata?"""
def get_metadata(name):
"""The named metadata resource as a string"""
def get_metadata_lines(name):
"""Yield named metadata resource as list of non-blank non-comment lines
Leading and trailing whitespace is stripped from each line, and lines
with ``#`` as the first non-blank character are omitted."""
def metadata_isdir(name):
"""Is the named metadata a directory? (like ``os.path.isdir()``)"""
def metadata_listdir(name):
"""List of metadata names in the directory (like ``os.listdir()``)"""
def run_script(script_name, namespace):
"""Execute the named script in the supplied namespace dictionary"""
class IResourceProvider(IMetadataProvider):
"""An object that provides access to package resources"""
def get_resource_filename(manager, resource_name):
"""Return a true filesystem path for `resource_name`
`manager` must be an ``IResourceManager``"""
def get_resource_stream(manager, resource_name):
"""Return a readable file-like object for `resource_name`
`manager` must be an ``IResourceManager``"""
def get_resource_string(manager, resource_name):
"""Return a string containing the contents of `resource_name`
`manager` must be an ``IResourceManager``"""
def has_resource(resource_name):
"""Does the package contain the named resource?"""
def resource_isdir(resource_name):
"""Is the named resource a directory? (like ``os.path.isdir()``)"""
def resource_listdir(resource_name):
"""List of resource names in the directory (like ``os.listdir()``)"""
class WorkingSet(object):
"""A collection of active distributions on sys.path (or a similar list)"""
def __init__(self, entries=None):
"""Create working set from list of path entries (default=sys.path)"""
self.entries = []
self.entry_keys = {}
self.by_key = {}
self.callbacks = []
if entries is None:
entries = sys.path
for entry in entries:
self.add_entry(entry)
def add_entry(self, entry):
"""Add a path item to ``.entries``, finding any distributions on it
``find_distributions(entry, True)`` is used to find distributions
corresponding to the path entry, and they are added. `entry` is
always appended to ``.entries``, even if it is already present.
(This is because ``sys.path`` can contain the same value more than
once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
equal ``sys.path``.)
"""
self.entry_keys.setdefault(entry, [])
self.entries.append(entry)
for dist in find_distributions(entry, True):
self.add(dist, entry, False)
def __contains__(self,dist):
"""True if `dist` is the active distribution for its project"""
return self.by_key.get(dist.key) == dist
def find(self, req):
"""Find a distribution matching requirement `req`
If there is an active distribution for the requested project, this
returns it as long as it meets the version requirement specified by
`req`. But, if there is an active distribution for the project and it
does *not* meet the `req` requirement, ``VersionConflict`` is raised.
If there is no active distribution for the requested project, ``None``
is returned.
"""
dist = self.by_key.get(req.key)
if dist is not None and dist not in req:
raise VersionConflict(dist,req) # XXX add more info
else:
return dist
def iter_entry_points(self, group, name=None):
"""Yield entry point objects from `group` matching `name`
If `name` is None, yields all entry points in `group` from all
distributions in the working set, otherwise only ones matching
both `group` and `name` are yielded (in distribution order).
"""
for dist in self:
entries = dist.get_entry_map(group)
if name is None:
for ep in entries.values():
yield ep
elif name in entries:
yield entries[name]
def run_script(self, requires, script_name):
"""Locate distribution for `requires` and run `script_name` script"""
ns = sys._getframe(1).f_globals
name = ns['__name__']
ns.clear()
ns['__name__'] = name
self.require(requires)[0].run_script(script_name, ns)
def __iter__(self):
"""Yield distributions for non-duplicate projects in the working set
The yield order is the order in which the items' path entries were
added to the working set.
"""
seen = {}
for item in self.entries:
if item not in self.entry_keys:
# workaround a cache issue
continue
for key in self.entry_keys[item]:
if key not in seen:
seen[key]=1
yield self.by_key[key]
def add(self, dist, entry=None, insert=True, replace=False):
"""Add `dist` to working set, associated with `entry`
If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
On exit from this routine, `entry` is added to the end of the working
set's ``.entries`` (if it wasn't already present).
`dist` is only added to the working set if it's for a project that
doesn't already have a distribution in the set, unless `replace=True`.
If it's added, any callbacks registered with the ``subscribe()`` method
will be called.
"""
if insert:
dist.insert_on(self.entries, entry)
if entry is None:
entry = dist.location
keys = self.entry_keys.setdefault(entry,[])
keys2 = self.entry_keys.setdefault(dist.location,[])
if not replace and dist.key in self.by_key:
return # ignore hidden distros
self.by_key[dist.key] = dist
if dist.key not in keys:
keys.append(dist.key)
if dist.key not in keys2:
keys2.append(dist.key)
self._added_new(dist)
def resolve(self, requirements, env=None, installer=None,
replace_conflicting=False):
"""List all distributions needed to (recursively) meet `requirements`
`requirements` must be a sequence of ``Requirement`` objects. `env`,
if supplied, should be an ``Environment`` instance. If
not supplied, it defaults to all distributions available within any
entry or distribution in the working set. `installer`, if supplied,
will be invoked with each requirement that cannot be met by an
already-installed distribution; it should return a ``Distribution`` or
``None``.
Unless `replace_conflicting=True`, raises a VersionConflict exception if
any requirements are found on the path that have the correct name but
the wrong version. Otherwise, if an `installer` is supplied it will be
invoked to obtain the correct version of the requirement and activate
it.
"""
requirements = list(requirements)[::-1] # set up the stack
processed = {} # set of processed requirements
best = {} # key -> dist
to_activate = []
while requirements:
req = requirements.pop(0) # process dependencies breadth-first
if req in processed:
# Ignore cyclic or redundant dependencies
continue
dist = best.get(req.key)
if dist is None:
# Find the best distribution and add it to the map
dist = self.by_key.get(req.key)
if dist is None or (dist not in req and replace_conflicting):
ws = self
if env is None:
if dist is None:
env = Environment(self.entries)
else:
# Use an empty environment and workingset to avoid
# any further conflicts with the conflicting
# distribution
env = Environment([])
ws = WorkingSet([])
dist = best[req.key] = env.best_match(req, ws, installer)
if dist is None:
#msg = ("The '%s' distribution was not found on this "
# "system, and is required by this application.")
#raise DistributionNotFound(msg % req)
# unfortunately, zc.buildout uses a str(err)
# to get the name of the distribution here..
raise DistributionNotFound(req)
to_activate.append(dist)
if dist not in req:
# Oops, the "best" so far conflicts with a dependency
raise VersionConflict(dist,req) # XXX put more info here
requirements.extend(dist.requires(req.extras)[::-1])
processed[req] = True
return to_activate # return list of distros to activate
def find_plugins(self, plugin_env, full_env=None, installer=None,
fallback=True):
"""Find all activatable distributions in `plugin_env`
Example usage::
distributions, errors = working_set.find_plugins(
Environment(plugin_dirlist)
)
map(working_set.add, distributions) # add plugins+libs to sys.path
print 'Could not load', errors # display errors
The `plugin_env` should be an ``Environment`` instance that contains
only distributions that are in the project's "plugin directory" or
directories. The `full_env`, if supplied, should be an ``Environment``
contains all currently-available distributions. If `full_env` is not
supplied, one is created automatically from the ``WorkingSet`` this
method is called on, which will typically mean that every directory on
``sys.path`` will be scanned for distributions.
`installer` is a standard installer callback as used by the
``resolve()`` method. The `fallback` flag indicates whether we should
attempt to resolve older versions of a plugin if the newest version
cannot be resolved.
This method returns a 2-tuple: (`distributions`, `error_info`), where
`distributions` is a list of the distributions found in `plugin_env`
that were loadable, along with any other distributions that are needed
to resolve their dependencies. `error_info` is a dictionary mapping
unloadable plugin distributions to an exception instance describing the
error that occurred. Usually this will be a ``DistributionNotFound`` or
``VersionConflict`` instance.
"""
plugin_projects = list(plugin_env)
plugin_projects.sort() # scan project names in alphabetic order
error_info = {}
distributions = {}
if full_env is None:
env = Environment(self.entries)
env += plugin_env
else:
env = full_env + plugin_env
shadow_set = self.__class__([])
list(map(shadow_set.add, self)) # put all our entries in shadow_set
for project_name in plugin_projects:
for dist in plugin_env[project_name]:
req = [dist.as_requirement()]
try:
resolvees = shadow_set.resolve(req, env, installer)
except ResolutionError:
v = sys.exc_info()[1]
error_info[dist] = v # save error info
if fallback:
continue # try the next older version of project
else:
break # give up on this project, keep going
else:
list(map(shadow_set.add, resolvees))
distributions.update(dict.fromkeys(resolvees))
# success, no need to try any more versions of this project
break
distributions = list(distributions)
distributions.sort()
return distributions, error_info
def require(self, *requirements):
"""Ensure that distributions matching `requirements` are activated
`requirements` must be a string or a (possibly-nested) sequence
thereof, specifying the distributions and versions required. The
return value is a sequence of the distributions that needed to be
activated to fulfill the requirements; all relevant distributions are
included, even if they were already activated in this working set.
"""
needed = self.resolve(parse_requirements(requirements))
for dist in needed:
self.add(dist)
return needed
def subscribe(self, callback):
"""Invoke `callback` for all distributions (including existing ones)"""
if callback in self.callbacks:
return
self.callbacks.append(callback)
for dist in self:
callback(dist)
def _added_new(self, dist):
for callback in self.callbacks:
callback(dist)
def __getstate__(self):
return (
self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
self.callbacks[:]
)
def __setstate__(self, e_k_b_c):
entries, keys, by_key, callbacks = e_k_b_c
self.entries = entries[:]
self.entry_keys = keys.copy()
self.by_key = by_key.copy()
self.callbacks = callbacks[:]
class Environment(object):
"""Searchable snapshot of distributions on a search path"""
def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
"""Snapshot distributions available on a search path
Any distributions found on `search_path` are added to the environment.
`search_path` should be a sequence of ``sys.path`` items. If not
supplied, ``sys.path`` is used.
`platform` is an optional string specifying the name of the platform
that platform-specific distributions must be compatible with. If
unspecified, it defaults to the current platform. `python` is an
optional string naming the desired version of Python (e.g. ``'3.3'``);
it defaults to the current version.
You may explicitly set `platform` (and/or `python`) to ``None`` if you
wish to map *all* distributions, not just those compatible with the
running platform or Python version.
"""
self._distmap = {}
self._cache = {}
self.platform = platform
self.python = python
self.scan(search_path)
def can_add(self, dist):
"""Is distribution `dist` acceptable for this environment?
The distribution must match the platform and python version
requirements specified when this environment was created, or False
is returned.
"""
return (self.python is None or dist.py_version is None
or dist.py_version==self.python) \
and compatible_platforms(dist.platform,self.platform)
def remove(self, dist):
"""Remove `dist` from the environment"""
self._distmap[dist.key].remove(dist)
def scan(self, search_path=None):
"""Scan `search_path` for distributions usable in this environment
Any distributions found are added to the environment.
`search_path` should be a sequence of ``sys.path`` items. If not
supplied, ``sys.path`` is used. Only distributions conforming to
the platform/python version defined at initialization are added.
"""
if search_path is None:
search_path = sys.path
for item in search_path:
for dist in find_distributions(item):
self.add(dist)
def __getitem__(self,project_name):
"""Return a newest-to-oldest list of distributions for `project_name`
"""
try:
return self._cache[project_name]
except KeyError:
project_name = project_name.lower()
if project_name not in self._distmap:
return []
if project_name not in self._cache:
dists = self._cache[project_name] = self._distmap[project_name]
_sort_dists(dists)
return self._cache[project_name]
def add(self,dist):
"""Add `dist` if we ``can_add()`` it and it isn't already added"""
if self.can_add(dist) and dist.has_version():
dists = self._distmap.setdefault(dist.key,[])
if dist not in dists:
dists.append(dist)
if dist.key in self._cache:
_sort_dists(self._cache[dist.key])
def best_match(self, req, working_set, installer=None):
"""Find distribution best matching `req` and usable on `working_set`
This calls the ``find(req)`` method of the `working_set` to see if a
suitable distribution is already active. (This may raise
``VersionConflict`` if an unsuitable version of the project is already
active in the specified `working_set`.) If a suitable distribution
isn't active, this method returns the newest distribution in the
environment that meets the ``Requirement`` in `req`. If no suitable
distribution is found, and `installer` is supplied, then the result of
calling the environment's ``obtain(req, installer)`` method will be
returned.
"""
dist = working_set.find(req)
if dist is not None:
return dist
for dist in self[req.key]:
if dist in req:
return dist
return self.obtain(req, installer) # try and download/install
def obtain(self, requirement, installer=None):
"""Obtain a distribution matching `requirement` (e.g. via download)
Obtain a distro that matches requirement (e.g. via download). In the
base ``Environment`` class, this routine just returns
``installer(requirement)``, unless `installer` is None, in which case
None is returned instead. This method is a hook that allows subclasses
to attempt other ways of obtaining a distribution before falling back
to the `installer` argument."""
if installer is not None:
return installer(requirement)
def __iter__(self):
"""Yield the unique project names of the available distributions"""
for key in self._distmap.keys():
if self[key]: yield key
def __iadd__(self, other):
"""In-place addition of a distribution or environment"""
if isinstance(other,Distribution):
self.add(other)
elif isinstance(other,Environment):
for project in other:
for dist in other[project]:
self.add(dist)
else:
raise TypeError("Can't add %r to environment" % (other,))
return self
def __add__(self, other):
"""Add an environment or distribution to an environment"""
new = self.__class__([], platform=None, python=None)
for env in self, other:
new += env
return new
AvailableDistributions = Environment # XXX backward compatibility
class ExtractionError(RuntimeError):
"""An error occurred extracting a resource
The following attributes are available from instances of this exception:
manager
The resource manager that raised this exception
cache_path
The base directory for resource extraction
original_error
The exception instance that caused extraction to fail
"""
class ResourceManager:
"""Manage resource extraction and packages"""
extraction_path = None
def __init__(self):
self.cached_files = {}
def resource_exists(self, package_or_requirement, resource_name):
"""Does the named resource exist?"""
return get_provider(package_or_requirement).has_resource(resource_name)
def resource_isdir(self, package_or_requirement, resource_name):
"""Is the named resource an existing directory?"""
return get_provider(package_or_requirement).resource_isdir(
resource_name
)
def resource_filename(self, package_or_requirement, resource_name):
"""Return a true filesystem path for specified resource"""
return get_provider(package_or_requirement).get_resource_filename(
self, resource_name
)
def resource_stream(self, package_or_requirement, resource_name):
"""Return a readable file-like object for specified resource"""
return get_provider(package_or_requirement).get_resource_stream(
self, resource_name
)
def resource_string(self, package_or_requirement, resource_name):
"""Return specified resource as a string"""
return get_provider(package_or_requirement).get_resource_string(
self, resource_name
)
def resource_listdir(self, package_or_requirement, resource_name):
"""List the contents of the named resource directory"""
return get_provider(package_or_requirement).resource_listdir(
resource_name
)
def extraction_error(self):
"""Give an error message for problems extracting file(s)"""
old_exc = sys.exc_info()[1]
cache_path = self.extraction_path or get_default_cache()
err = ExtractionError("""Can't extract file(s) to egg cache
The following error occurred while trying to extract file(s) to the Python egg
cache:
%s
The Python egg cache directory is currently set to:
%s
Perhaps your account does not have write access to this directory? You can
change the cache directory by setting the PYTHON_EGG_CACHE environment
variable to point to an accessible directory.
""" % (old_exc, cache_path)
)
err.manager = self
err.cache_path = cache_path
err.original_error = old_exc
raise err
def get_cache_path(self, archive_name, names=()):
"""Return absolute location in cache for `archive_name` and `names`
The parent directory of the resulting path will be created if it does
not already exist. `archive_name` should be the base filename of the
enclosing egg (which may not be the name of the enclosing zipfile!),
including its ".egg" extension. `names`, if provided, should be a
sequence of path name parts "under" the egg's extraction location.
This method should only be called by resource providers that need to
obtain an extraction location, and only for names they intend to
extract, as it tracks the generated names for possible cleanup later.
"""
extract_path = self.extraction_path or get_default_cache()
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
try:
_bypass_ensure_directory(target_path)
except:
self.extraction_error()
self._warn_unsafe_extraction_path(extract_path)
self.cached_files[target_path] = 1
return target_path
@staticmethod
def _warn_unsafe_extraction_path(path):
"""
If the default extraction path is overridden and set to an insecure
location, such as /tmp, it opens up an opportunity for an attacker to
replace an extracted file with an unauthorized payload. Warn the user
if a known insecure location is used.
See Distribute #375 for more details.
"""
if os.name == 'nt' and not path.startswith(os.environ['windir']):
# On Windows, permissions are generally restrictive by default
# and temp directories are not writable by other users, so
# bypass the warning.
return
mode = os.stat(path).st_mode
if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
msg = ("%s is writable by group/others and vulnerable to attack "
"when "
"used with get_resource_filename. Consider a more secure "
"location (set with .set_extraction_path or the "
"PYTHON_EGG_CACHE environment variable)." % path)
warnings.warn(msg, UserWarning)
def postprocess(self, tempname, filename):
"""Perform any platform-specific postprocessing of `tempname`
This is where Mac header rewrites should be done; other platforms don't
have anything special they should do.
Resource providers should call this method ONLY after successfully
extracting a compressed resource. They must NOT call it on resources
that are already in the filesystem.
`tempname` is the current (temporary) name of the file, and `filename`
is the name it will be renamed to by the caller after this routine
returns.
"""
if os.name == 'posix':
# Make the resource executable
mode = ((os.stat(tempname).st_mode) | 0x16D) & 0xFFF # 0555, 07777
os.chmod(tempname, mode)
def set_extraction_path(self, path):
"""Set the base path where resources will be extracted to, if needed.
If you do not call this routine before any extractions take place, the
path defaults to the return value of ``get_default_cache()``. (Which
is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
platform-specific fallbacks. See that routine's documentation for more
details.)
Resources are extracted to subdirectories of this path based upon
information given by the ``IResourceProvider``. You may set this to a
temporary directory, but then you must call ``cleanup_resources()`` to
delete the extracted files when done. There is no guarantee that
``cleanup_resources()`` will be able to remove all extracted files.
(Note: you may not change the extraction path for a given resource
manager once resources have been extracted, unless you first call
``cleanup_resources()``.)
"""
if self.cached_files:
raise ValueError(
"Can't change extraction path, files already extracted"
)
self.extraction_path = path
def cleanup_resources(self, force=False):
"""
Delete all extracted resource files and directories, returning a list
of the file and directory names that could not be successfully removed.
This function does not have any concurrency protection, so it should
generally only be called when the extraction path is a temporary
directory exclusive to a single process. This method is not
automatically called; you must call it explicitly or register it as an
``atexit`` function if you wish to ensure cleanup of a temporary
directory used for extractions.
"""
# XXX
def get_default_cache():
"""Determine the default cache location
This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
"Application Data" directory. On all other systems, it's "~/.python-eggs".
"""
try:
return os.environ['PYTHON_EGG_CACHE']
except KeyError:
pass
if os.name!='nt':
return os.path.expanduser('~/.python-eggs')
app_data = 'Application Data' # XXX this may be locale-specific!
app_homes = [
(('APPDATA',), None), # best option, should be locale-safe
(('USERPROFILE',), app_data),
(('HOMEDRIVE','HOMEPATH'), app_data),
(('HOMEPATH',), app_data),
(('HOME',), None),
(('WINDIR',), app_data), # 95/98/ME
]
for keys, subdir in app_homes:
dirname = ''
for key in keys:
if key in os.environ:
dirname = os.path.join(dirname, os.environ[key])
else:
break
else:
if subdir:
dirname = os.path.join(dirname,subdir)
return os.path.join(dirname, 'Python-Eggs')
else:
raise RuntimeError(
"Please set the PYTHON_EGG_CACHE enviroment variable"
)
def safe_name(name):
"""Convert an arbitrary string to a standard distribution name
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
"""
return re.sub('[^A-Za-z0-9.]+', '-', name)
def safe_version(version):
"""Convert an arbitrary string to a standard version string
Spaces become dots, and all other non-alphanumeric characters become
dashes, with runs of multiple dashes condensed to a single dash.
"""
version = version.replace(' ','.')
return re.sub('[^A-Za-z0-9.]+', '-', version)
def safe_extra(extra):
"""Convert an arbitrary string to a standard 'extra' name
Any runs of non-alphanumeric characters are replaced with a single '_',
and the result is always lowercased.
"""
return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
def to_filename(name):
"""Convert a project or version name to its filename-escaped form
Any '-' characters are currently replaced with '_'.
"""
return name.replace('-','_')
class MarkerEvaluation(object):
values = {
'os_name': lambda: os.name,
'sys_platform': lambda: sys.platform,
'python_full_version': lambda: sys.version.split()[0],
'python_version': lambda:'%s.%s' % (sys.version_info[0], sys.version_info[1]),
'platform_version': platform.version,
'platform_machine': platform.machine,
'python_implementation': platform.python_implementation,
}
@classmethod
def is_invalid_marker(cls, text):
"""
Validate text as a PEP 426 environment marker; return an exception
if invalid or False otherwise.
"""
try:
cls.evaluate_marker(text)
except SyntaxError:
return cls.normalize_exception(sys.exc_info()[1])
return False
@staticmethod
def normalize_exception(exc):
"""
Given a SyntaxError from a marker evaluation, normalize the error message:
- Remove indications of filename and line number.
- Replace platform-specific error messages with standard error messages.
"""
subs = {
'unexpected EOF while parsing': 'invalid syntax',
'parenthesis is never closed': 'invalid syntax',
}
exc.filename = None
exc.lineno = None
exc.msg = subs.get(exc.msg, exc.msg)
return exc
@classmethod
def and_test(cls, nodelist):
# MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
return functools.reduce(operator.and_, [cls.interpret(nodelist[i]) for i in range(1,len(nodelist),2)])
@classmethod
def test(cls, nodelist):
# MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
return functools.reduce(operator.or_, [cls.interpret(nodelist[i]) for i in range(1,len(nodelist),2)])
@classmethod
def atom(cls, nodelist):
t = nodelist[1][0]
if t == token.LPAR:
if nodelist[2][0] == token.RPAR:
raise SyntaxError("Empty parentheses")
return cls.interpret(nodelist[2])
raise SyntaxError("Language feature not supported in environment markers")
@classmethod
def comparison(cls, nodelist):
if len(nodelist)>4:
raise SyntaxError("Chained comparison not allowed in environment markers")
comp = nodelist[2][1]
cop = comp[1]
if comp[0] == token.NAME:
if len(nodelist[2]) == 3:
if cop == 'not':
cop = 'not in'
else:
cop = 'is not'
try:
cop = cls.get_op(cop)
except KeyError:
raise SyntaxError(repr(cop)+" operator not allowed in environment markers")
return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3]))
@classmethod
def get_op(cls, op):
ops = {
symbol.test: cls.test,
symbol.and_test: cls.and_test,
symbol.atom: cls.atom,
symbol.comparison: cls.comparison,
'not in': lambda x, y: x not in y,
'in': lambda x, y: x in y,
'==': operator.eq,
'!=': operator.ne,
}
if hasattr(symbol, 'or_test'):
ops[symbol.or_test] = cls.test
return ops[op]
@classmethod
def evaluate_marker(cls, text, extra=None):
"""
Evaluate a PEP 426 environment marker on CPython 2.4+.
Return a boolean indicating the marker result in this environment.
Raise SyntaxError if marker is invalid.
This implementation uses the 'parser' module, which is not implemented on
Jython and has been superseded by the 'ast' module in Python 2.6 and
later.
"""
return cls.interpret(parser.expr(text).totuple(1)[1])
@classmethod
def _markerlib_evaluate(cls, text):
"""
Evaluate a PEP 426 environment marker using markerlib.
Return a boolean indicating the marker result in this environment.
Raise SyntaxError if marker is invalid.
"""
import _markerlib
# markerlib implements Metadata 1.2 (PEP 345) environment markers.
# Translate the variables to Metadata 2.0 (PEP 426).
env = _markerlib.default_environment()
for key in env.keys():
new_key = key.replace('.', '_')
env[new_key] = env.pop(key)
try:
result = _markerlib.interpret(text, env)
except NameError:
e = sys.exc_info()[1]
raise SyntaxError(e.args[0])
return result
if 'parser' not in globals():
# Fall back to less-complete _markerlib implementation if 'parser' module
# is not available.
evaluate_marker = _markerlib_evaluate
@classmethod
def interpret(cls, nodelist):
while len(nodelist)==2: nodelist = nodelist[1]
try:
op = cls.get_op(nodelist[0])
except KeyError:
raise SyntaxError("Comparison or logical expression expected")
return op(nodelist)
@classmethod
def evaluate(cls, nodelist):
while len(nodelist)==2: nodelist = nodelist[1]
kind = nodelist[0]
name = nodelist[1]
if kind==token.NAME:
try:
op = cls.values[name]
except KeyError:
raise SyntaxError("Unknown name %r" % name)
return op()
if kind==token.STRING:
s = nodelist[1]
if s[:1] not in "'\"" or s.startswith('"""') or s.startswith("'''") \
or '\\' in s:
raise SyntaxError(
"Only plain strings allowed in environment markers")
return s[1:-1]
raise SyntaxError("Language feature not supported in environment markers")
invalid_marker = MarkerEvaluation.is_invalid_marker
evaluate_marker = MarkerEvaluation.evaluate_marker
class NullProvider:
"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""
egg_name = None
egg_info = None
loader = None
def __init__(self, module):
self.loader = getattr(module, '__loader__', None)
self.module_path = os.path.dirname(getattr(module, '__file__', ''))
def get_resource_filename(self, manager, resource_name):
return self._fn(self.module_path, resource_name)
def get_resource_stream(self, manager, resource_name):
return BytesIO(self.get_resource_string(manager, resource_name))
def get_resource_string(self, manager, resource_name):
return self._get(self._fn(self.module_path, resource_name))
def has_resource(self, resource_name):
return self._has(self._fn(self.module_path, resource_name))
def has_metadata(self, name):
return self.egg_info and self._has(self._fn(self.egg_info,name))
if sys.version_info <= (3,):
def get_metadata(self, name):
if not self.egg_info:
return ""
return self._get(self._fn(self.egg_info,name))
else:
def get_metadata(self, name):
if not self.egg_info:
return ""
return self._get(self._fn(self.egg_info,name)).decode("utf-8")
def get_metadata_lines(self, name):
return yield_lines(self.get_metadata(name))
def resource_isdir(self,resource_name):
return self._isdir(self._fn(self.module_path, resource_name))
def metadata_isdir(self,name):
return self.egg_info and self._isdir(self._fn(self.egg_info,name))
def resource_listdir(self,resource_name):
return self._listdir(self._fn(self.module_path,resource_name))
def metadata_listdir(self,name):
if self.egg_info:
return self._listdir(self._fn(self.egg_info,name))
return []
def run_script(self,script_name,namespace):
script = 'scripts/'+script_name
if not self.has_metadata(script):
raise ResolutionError("No script named %r" % script_name)
script_text = self.get_metadata(script).replace('\r\n','\n')
script_text = script_text.replace('\r','\n')
script_filename = self._fn(self.egg_info,script)
namespace['__file__'] = script_filename
if os.path.exists(script_filename):
execfile(script_filename, namespace, namespace)
else:
from linecache import cache
cache[script_filename] = (
len(script_text), 0, script_text.split('\n'), script_filename
)
script_code = compile(script_text,script_filename,'exec')
exec(script_code, namespace, namespace)
def _has(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _isdir(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _listdir(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _fn(self, base, resource_name):
if resource_name:
return os.path.join(base, *resource_name.split('/'))
return base
def _get(self, path):
if hasattr(self.loader, 'get_data'):
return self.loader.get_data(path)
raise NotImplementedError(
"Can't perform this operation for loaders without 'get_data()'"
)
register_loader_type(object, NullProvider)
class EggProvider(NullProvider):
"""Provider based on a virtual filesystem"""
def __init__(self,module):
NullProvider.__init__(self,module)
self._setup_prefix()
def _setup_prefix(self):
# we assume here that our metadata may be nested inside a "basket"
# of multiple eggs; that's why we use module_path instead of .archive
path = self.module_path
old = None
while path!=old:
if path.lower().endswith('.egg'):
self.egg_name = os.path.basename(path)
self.egg_info = os.path.join(path, 'EGG-INFO')
self.egg_root = path
break
old = path
path, base = os.path.split(path)
class DefaultProvider(EggProvider):
"""Provides access to package resources in the filesystem"""
def _has(self, path):
return os.path.exists(path)
def _isdir(self,path):
return os.path.isdir(path)
def _listdir(self,path):
return os.listdir(path)
def get_resource_stream(self, manager, resource_name):
return open(self._fn(self.module_path, resource_name), 'rb')
def _get(self, path):
stream = open(path, 'rb')
try:
return stream.read()
finally:
stream.close()
register_loader_type(type(None), DefaultProvider)
if importlib_bootstrap is not None:
register_loader_type(importlib_bootstrap.SourceFileLoader, DefaultProvider)
class EmptyProvider(NullProvider):
"""Provider that returns nothing for all requests"""
_isdir = _has = lambda self,path: False
_get = lambda self,path: ''
_listdir = lambda self,path: []
module_path = None
def __init__(self):
pass
empty_provider = EmptyProvider()
def build_zipmanifest(path):
"""
This builds a similar dictionary to the zipimport directory
caches. However instead of tuples, ZipInfo objects are stored.
The translation of the tuple is as follows:
* [0] - zipinfo.filename on stock pythons this needs "/" --> os.sep
on pypy it is the same (one reason why distribute did work
in some cases on pypy and win32).
* [1] - zipinfo.compress_type
* [2] - zipinfo.compress_size
* [3] - zipinfo.file_size
* [4] - len(utf-8 encoding of filename) if zipinfo & 0x800
len(ascii encoding of filename) otherwise
* [5] - (zipinfo.date_time[0] - 1980) << 9 |
zipinfo.date_time[1] << 5 | zipinfo.date_time[2]
* [6] - (zipinfo.date_time[3] - 1980) << 11 |
zipinfo.date_time[4] << 5 | (zipinfo.date_time[5] // 2)
* [7] - zipinfo.CRC
"""
zipinfo = dict()
zfile = zipfile.ZipFile(path)
#Got ZipFile has not __exit__ on python 3.1
try:
for zitem in zfile.namelist():
zpath = zitem.replace('/', os.sep)
zipinfo[zpath] = zfile.getinfo(zitem)
assert zipinfo[zpath] is not None
finally:
zfile.close()
return zipinfo
class ZipProvider(EggProvider):
"""Resource support for zips and eggs"""
eagers = None
def __init__(self, module):
EggProvider.__init__(self,module)
self.zipinfo = build_zipmanifest(self.loader.archive)
self.zip_pre = self.loader.archive+os.sep
def _zipinfo_name(self, fspath):
# Convert a virtual filename (full path to file) into a zipfile subpath
# usable with the zipimport directory cache for our target archive
if fspath.startswith(self.zip_pre):
return fspath[len(self.zip_pre):]
raise AssertionError(
"%s is not a subpath of %s" % (fspath,self.zip_pre)
)
def _parts(self,zip_path):
# Convert a zipfile subpath into an egg-relative path part list
fspath = self.zip_pre+zip_path # pseudo-fs path
if fspath.startswith(self.egg_root+os.sep):
return fspath[len(self.egg_root)+1:].split(os.sep)
raise AssertionError(
"%s is not a subpath of %s" % (fspath,self.egg_root)
)
def get_resource_filename(self, manager, resource_name):
if not self.egg_name:
raise NotImplementedError(
"resource_filename() only supported for .egg, not .zip"
)
# no need to lock for extraction, since we use temp names
zip_path = self._resource_to_zip(resource_name)
eagers = self._get_eager_resources()
if '/'.join(self._parts(zip_path)) in eagers:
for name in eagers:
self._extract_resource(manager, self._eager_to_zip(name))
return self._extract_resource(manager, zip_path)
@staticmethod
def _get_date_and_size(zip_stat):
size = zip_stat.file_size
date_time = zip_stat.date_time + (0, 0, -1) # ymdhms+wday, yday, dst
#1980 offset already done
timestamp = time.mktime(date_time)
return timestamp, size
def _extract_resource(self, manager, zip_path):
if zip_path in self._index():
for name in self._index()[zip_path]:
last = self._extract_resource(
manager, os.path.join(zip_path, name)
)
return os.path.dirname(last) # return the extracted directory name
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
if not WRITE_SUPPORT:
raise IOError('"os.rename" and "os.unlink" are not supported '
'on this platform')
try:
real_path = manager.get_cache_path(
self.egg_name, self._parts(zip_path)
)
if self._is_current(real_path, zip_path):
return real_path
outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
os.write(outf, self.loader.get_data(zip_path))
os.close(outf)
utime(tmpnam, (timestamp,timestamp))
manager.postprocess(tmpnam, real_path)
try:
rename(tmpnam, real_path)
except os.error:
if os.path.isfile(real_path):
if self._is_current(real_path, zip_path):
# the file became current since it was checked above,
# so proceed.
return real_path
elif os.name=='nt': # Windows, del old file and retry
unlink(real_path)
rename(tmpnam, real_path)
return real_path
raise
except os.error:
manager.extraction_error() # report a user-friendly error
return real_path
def _is_current(self, file_path, zip_path):
"""
Return True if the file_path is current for this zip_path
"""
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
if not os.path.isfile(file_path):
return False
stat = os.stat(file_path)
if stat.st_size!=size or stat.st_mtime!=timestamp:
return False
# check that the contents match
zip_contents = self.loader.get_data(zip_path)
f = open(file_path, 'rb')
file_contents = f.read()
f.close()
return zip_contents == file_contents
def _get_eager_resources(self):
if self.eagers is None:
eagers = []
for name in ('native_libs.txt', 'eager_resources.txt'):
if self.has_metadata(name):
eagers.extend(self.get_metadata_lines(name))
self.eagers = eagers
return self.eagers
def _index(self):
try:
return self._dirindex
except AttributeError:
ind = {}
for path in self.zipinfo:
parts = path.split(os.sep)
while parts:
parent = os.sep.join(parts[:-1])
if parent in ind:
ind[parent].append(parts[-1])
break
else:
ind[parent] = [parts.pop()]
self._dirindex = ind
return ind
def _has(self, fspath):
zip_path = self._zipinfo_name(fspath)
return zip_path in self.zipinfo or zip_path in self._index()
def _isdir(self,fspath):
return self._zipinfo_name(fspath) in self._index()
def _listdir(self,fspath):
return list(self._index().get(self._zipinfo_name(fspath), ()))
def _eager_to_zip(self,resource_name):
return self._zipinfo_name(self._fn(self.egg_root,resource_name))
def _resource_to_zip(self,resource_name):
return self._zipinfo_name(self._fn(self.module_path,resource_name))
register_loader_type(zipimport.zipimporter, ZipProvider)
class FileMetadata(EmptyProvider):
"""Metadata handler for standalone PKG-INFO files
Usage::
metadata = FileMetadata("/path/to/PKG-INFO")
This provider rejects all data and metadata requests except for PKG-INFO,
which is treated as existing, and will be the contents of the file at
the provided location.
"""
def __init__(self,path):
self.path = path
def has_metadata(self,name):
return name=='PKG-INFO'
def get_metadata(self,name):
if name=='PKG-INFO':
f = open(self.path,'rU')
metadata = f.read()
f.close()
return metadata
raise KeyError("No metadata except PKG-INFO is available")
def get_metadata_lines(self,name):
return yield_lines(self.get_metadata(name))
class PathMetadata(DefaultProvider):
"""Metadata provider for egg directories
Usage::
# Development eggs:
egg_info = "/path/to/PackageName.egg-info"
base_dir = os.path.dirname(egg_info)
metadata = PathMetadata(base_dir, egg_info)
dist_name = os.path.splitext(os.path.basename(egg_info))[0]
dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
# Unpacked egg directories:
egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
dist = Distribution.from_filename(egg_path, metadata=metadata)
"""
def __init__(self, path, egg_info):
self.module_path = path
self.egg_info = egg_info
class EggMetadata(ZipProvider):
"""Metadata provider for .egg files"""
def __init__(self, importer):
"""Create a metadata provider from a zipimporter"""
self.zipinfo = build_zipmanifest(importer.archive)
self.zip_pre = importer.archive+os.sep
self.loader = importer
if importer.prefix:
self.module_path = os.path.join(importer.archive, importer.prefix)
else:
self.module_path = importer.archive
self._setup_prefix()
_declare_state('dict', _distribution_finders = {})
def register_finder(importer_type, distribution_finder):
"""Register `distribution_finder` to find distributions in sys.path items
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
handler), and `distribution_finder` is a callable that, passed a path
item and the importer instance, yields ``Distribution`` instances found on
that path item. See ``pkg_resources.find_on_path`` for an example."""
_distribution_finders[importer_type] = distribution_finder
def find_distributions(path_item, only=False):
"""Yield distributions accessible via `path_item`"""
importer = get_importer(path_item)
finder = _find_adapter(_distribution_finders, importer)
return finder(importer, path_item, only)
def find_eggs_in_zip(importer, path_item, only=False):
"""
Find eggs in zip files; possibly multiple nested eggs.
"""
if importer.archive.endswith('.whl'):
# wheels are not supported with this finder
# they don't have PKG-INFO metadata, and won't ever contain eggs
return
metadata = EggMetadata(importer)
if metadata.has_metadata('PKG-INFO'):
yield Distribution.from_filename(path_item, metadata=metadata)
if only:
return # don't yield nested distros
for subitem in metadata.resource_listdir('/'):
if subitem.endswith('.egg'):
subpath = os.path.join(path_item, subitem)
for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
yield dist
register_finder(zipimport.zipimporter, find_eggs_in_zip)
def find_nothing(importer, path_item, only=False):
return ()
register_finder(object,find_nothing)
def find_on_path(importer, path_item, only=False):
"""Yield distributions accessible on a sys.path directory"""
path_item = _normalize_cached(path_item)
if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
if path_item.lower().endswith('.egg'):
# unpacked egg
yield Distribution.from_filename(
path_item, metadata=PathMetadata(
path_item, os.path.join(path_item,'EGG-INFO')
)
)
else:
# scan for .egg and .egg-info in directory
for entry in os.listdir(path_item):
lower = entry.lower()
if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
fullpath = os.path.join(path_item, entry)
if os.path.isdir(fullpath):
# egg-info directory, allow getting metadata
metadata = PathMetadata(path_item, fullpath)
else:
metadata = FileMetadata(fullpath)
yield Distribution.from_location(
path_item,entry,metadata,precedence=DEVELOP_DIST
)
elif not only and lower.endswith('.egg'):
for dist in find_distributions(os.path.join(path_item, entry)):
yield dist
elif not only and lower.endswith('.egg-link'):
entry_file = open(os.path.join(path_item, entry))
try:
entry_lines = entry_file.readlines()
finally:
entry_file.close()
for line in entry_lines:
if not line.strip(): continue
for item in find_distributions(os.path.join(path_item,line.rstrip())):
yield item
break
register_finder(pkgutil.ImpImporter,find_on_path)
if importlib_bootstrap is not None:
register_finder(importlib_bootstrap.FileFinder, find_on_path)
_declare_state('dict', _namespace_handlers={})
_declare_state('dict', _namespace_packages={})
def register_namespace_handler(importer_type, namespace_handler):
"""Register `namespace_handler` to declare namespace packages
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
handler), and `namespace_handler` is a callable like this::
def namespace_handler(importer,path_entry,moduleName,module):
# return a path_entry to use for child packages
Namespace handlers are only called if the importer object has already
agreed that it can handle the relevant path item, and they should only
return a subpath if the module __path__ does not already contain an
equivalent subpath. For an example namespace handler, see
``pkg_resources.file_ns_handler``.
"""
_namespace_handlers[importer_type] = namespace_handler
def _handle_ns(packageName, path_item):
"""Ensure that named package includes a subpath of path_item (if needed)"""
importer = get_importer(path_item)
if importer is None:
return None
loader = importer.find_module(packageName)
if loader is None:
return None
module = sys.modules.get(packageName)
if module is None:
module = sys.modules[packageName] = imp.new_module(packageName)
module.__path__ = []
_set_parent_ns(packageName)
elif not hasattr(module,'__path__'):
raise TypeError("Not a package:", packageName)
handler = _find_adapter(_namespace_handlers, importer)
subpath = handler(importer, path_item, packageName, module)
if subpath is not None:
path = module.__path__
path.append(subpath)
loader.load_module(packageName)
for path_item in path:
if path_item not in module.__path__:
module.__path__.append(path_item)
return subpath
def declare_namespace(packageName):
"""Declare that package 'packageName' is a namespace package"""
imp.acquire_lock()
try:
if packageName in _namespace_packages:
return
path, parent = sys.path, None
if '.' in packageName:
parent = '.'.join(packageName.split('.')[:-1])
declare_namespace(parent)
if parent not in _namespace_packages:
__import__(parent)
try:
path = sys.modules[parent].__path__
except AttributeError:
raise TypeError("Not a package:", parent)
# Track what packages are namespaces, so when new path items are added,
# they can be updated
_namespace_packages.setdefault(parent,[]).append(packageName)
_namespace_packages.setdefault(packageName,[])
for path_item in path:
# Ensure all the parent's path items are reflected in the child,
# if they apply
_handle_ns(packageName, path_item)
finally:
imp.release_lock()
def fixup_namespace_packages(path_item, parent=None):
"""Ensure that previously-declared namespace packages include path_item"""
imp.acquire_lock()
try:
for package in _namespace_packages.get(parent,()):
subpath = _handle_ns(package, path_item)
if subpath: fixup_namespace_packages(subpath,package)
finally:
imp.release_lock()
def file_ns_handler(importer, path_item, packageName, module):
"""Compute an ns-package subpath for a filesystem or zipfile importer"""
subpath = os.path.join(path_item, packageName.split('.')[-1])
normalized = _normalize_cached(subpath)
for item in module.__path__:
if _normalize_cached(item)==normalized:
break
else:
# Only return the path if it's not already there
return subpath
register_namespace_handler(pkgutil.ImpImporter,file_ns_handler)
register_namespace_handler(zipimport.zipimporter,file_ns_handler)
if importlib_bootstrap is not None:
register_namespace_handler(importlib_bootstrap.FileFinder, file_ns_handler)
def null_ns_handler(importer, path_item, packageName, module):
return None
register_namespace_handler(object,null_ns_handler)
def normalize_path(filename):
"""Normalize a file/dir name for comparison purposes"""
return os.path.normcase(os.path.realpath(filename))
def _normalize_cached(filename,_cache={}):
try:
return _cache[filename]
except KeyError:
_cache[filename] = result = normalize_path(filename)
return result
def _set_parent_ns(packageName):
parts = packageName.split('.')
name = parts.pop()
if parts:
parent = '.'.join(parts)
setattr(sys.modules[parent], name, sys.modules[packageName])
def yield_lines(strs):
"""Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
if isinstance(strs,basestring):
for s in strs.splitlines():
s = s.strip()
if s and not s.startswith('#'): # skip blank lines/comments
yield s
else:
for ss in strs:
for s in yield_lines(ss):
yield s
LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment
CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation
DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra
VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info
COMMA = re.compile(r"\s*,").match # comma between items
OBRACKET = re.compile(r"\s*\[").match
CBRACKET = re.compile(r"\s*\]").match
MODULE = re.compile(r"\w+(\.\w+)*$").match
EGG_NAME = re.compile(
r"(?P<name>[^-]+)"
r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
re.VERBOSE | re.IGNORECASE
).match
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
def _parse_version_parts(s):
for part in component_re.split(s):
part = replace(part,part)
if not part or part=='.':
continue
if part[:1] in '0123456789':
yield part.zfill(8) # pad for numeric comparison
else:
yield '*'+part
yield '*final' # ensure that alpha/beta/candidate are before final
def parse_version(s):
"""Convert a version string to a chronologically-sortable key
This is a rough cross between distutils' StrictVersion and LooseVersion;
if you give it versions that would work with StrictVersion, then it behaves
the same; otherwise it acts like a slightly-smarter LooseVersion. It is
*possible* to create pathological version coding schemes that will fool
this parser, but they should be very rare in practice.
The returned value will be a tuple of strings. Numeric portions of the
version are padded to 8 digits so they will compare numerically, but
without relying on how numbers compare relative to strings. Dots are
dropped, but dashes are retained. Trailing zeros between alpha segments
or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
"2.4". Alphanumeric parts are lower-cased.
The algorithm assumes that strings like "-" and any alpha string that
alphabetically follows "final" represents a "patch level". So, "2.4-1"
is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
considered newer than "2.4-1", which in turn is newer than "2.4".
Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
come before "final" alphabetically) are assumed to be pre-release versions,
so that the version "2.4" is considered newer than "2.4a1".
Finally, to handle miscellaneous cases, the strings "pre", "preview", and
"rc" are treated as if they were "c", i.e. as though they were release
candidates, and therefore are not as new as a version string that does not
contain them, and "dev" is replaced with an '@' so that it sorts lower than
than any other pre-release tag.
"""
parts = []
for part in _parse_version_parts(s.lower()):
if part.startswith('*'):
if part<'*final': # remove '-' before a prerelease tag
while parts and parts[-1]=='*final-': parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1]=='00000000':
parts.pop()
parts.append(part)
return tuple(parts)
class EntryPoint(object):
"""Object representing an advertised importable object"""
def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
if not MODULE(module_name):
raise ValueError("Invalid module name", module_name)
self.name = name
self.module_name = module_name
self.attrs = tuple(attrs)
self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
self.dist = dist
def __str__(self):
s = "%s = %s" % (self.name, self.module_name)
if self.attrs:
s += ':' + '.'.join(self.attrs)
if self.extras:
s += ' [%s]' % ','.join(self.extras)
return s
def __repr__(self):
return "EntryPoint.parse(%r)" % str(self)
def load(self, require=True, env=None, installer=None):
if require: self.require(env, installer)
entry = __import__(self.module_name, globals(),globals(), ['__name__'])
for attr in self.attrs:
try:
entry = getattr(entry,attr)
except AttributeError:
raise ImportError("%r has no %r attribute" % (entry,attr))
return entry
def require(self, env=None, installer=None):
if self.extras and not self.dist:
raise UnknownExtra("Can't require() without a distribution", self)
list(map(working_set.add,
working_set.resolve(self.dist.requires(self.extras),env,installer)))
@classmethod
def parse(cls, src, dist=None):
"""Parse a single entry point from string `src`
Entry point syntax follows the form::
name = some.module:some.attr [extra1,extra2]
The entry name and module name are required, but the ``:attrs`` and
``[extras]`` parts are optional
"""
try:
attrs = extras = ()
name,value = src.split('=',1)
if '[' in value:
value,extras = value.split('[',1)
req = Requirement.parse("x["+extras)
if req.specs: raise ValueError
extras = req.extras
if ':' in value:
value,attrs = value.split(':',1)
if not MODULE(attrs.rstrip()):
raise ValueError
attrs = attrs.rstrip().split('.')
except ValueError:
raise ValueError(
"EntryPoint must be in 'name=module:attrs [extras]' format",
src
)
else:
return cls(name.strip(), value.strip(), attrs, extras, dist)
@classmethod
def parse_group(cls, group, lines, dist=None):
"""Parse an entry point group"""
if not MODULE(group):
raise ValueError("Invalid group name", group)
this = {}
for line in yield_lines(lines):
ep = cls.parse(line, dist)
if ep.name in this:
raise ValueError("Duplicate entry point", group, ep.name)
this[ep.name]=ep
return this
@classmethod
def parse_map(cls, data, dist=None):
"""Parse a map of entry point groups"""
if isinstance(data,dict):
data = data.items()
else:
data = split_sections(data)
maps = {}
for group, lines in data:
if group is None:
if not lines:
continue
raise ValueError("Entry points must be listed in groups")
group = group.strip()
if group in maps:
raise ValueError("Duplicate group name", group)
maps[group] = cls.parse_group(group, lines, dist)
return maps
def _remove_md5_fragment(location):
if not location:
return ''
parsed = urlparse(location)
if parsed[-1].startswith('md5='):
return urlunparse(parsed[:-1] + ('',))
return location
class Distribution(object):
"""Wrap an actual or potential sys.path entry w/metadata"""
PKG_INFO = 'PKG-INFO'
def __init__(self, location=None, metadata=None, project_name=None,
version=None, py_version=PY_MAJOR, platform=None,
precedence=EGG_DIST):
self.project_name = safe_name(project_name or 'Unknown')
if version is not None:
self._version = safe_version(version)
self.py_version = py_version
self.platform = platform
self.location = location
self.precedence = precedence
self._provider = metadata or empty_provider
@classmethod
def from_location(cls,location,basename,metadata=None,**kw):
project_name, version, py_version, platform = [None]*4
basename, ext = os.path.splitext(basename)
if ext.lower() in _distributionImpl:
# .dist-info gets much metadata differently
match = EGG_NAME(basename)
if match:
project_name, version, py_version, platform = match.group(
'name','ver','pyver','plat'
)
cls = _distributionImpl[ext.lower()]
return cls(
location, metadata, project_name=project_name, version=version,
py_version=py_version, platform=platform, **kw
)
hashcmp = property(
lambda self: (
getattr(self,'parsed_version',()),
self.precedence,
self.key,
_remove_md5_fragment(self.location),
self.py_version,
self.platform
)
)
def __hash__(self): return hash(self.hashcmp)
def __lt__(self, other):
return self.hashcmp < other.hashcmp
def __le__(self, other):
return self.hashcmp <= other.hashcmp
def __gt__(self, other):
return self.hashcmp > other.hashcmp
def __ge__(self, other):
return self.hashcmp >= other.hashcmp
def __eq__(self, other):
if not isinstance(other, self.__class__):
# It's not a Distribution, so they are not equal
return False
return self.hashcmp == other.hashcmp
def __ne__(self, other):
return not self == other
# These properties have to be lazy so that we don't have to load any
# metadata until/unless it's actually needed. (i.e., some distributions
# may not know their name or version without loading PKG-INFO)
@property
def key(self):
try:
return self._key
except AttributeError:
self._key = key = self.project_name.lower()
return key
@property
def parsed_version(self):
try:
return self._parsed_version
except AttributeError:
self._parsed_version = pv = parse_version(self.version)
return pv
@property
def version(self):
try:
return self._version
except AttributeError:
for line in self._get_metadata(self.PKG_INFO):
if line.lower().startswith('version:'):
self._version = safe_version(line.split(':',1)[1].strip())
return self._version
else:
raise ValueError(
"Missing 'Version:' header and/or %s file" % self.PKG_INFO, self
)
@property
def _dep_map(self):
try:
return self.__dep_map
except AttributeError:
dm = self.__dep_map = {None: []}
for name in 'requires.txt', 'depends.txt':
for extra,reqs in split_sections(self._get_metadata(name)):
if extra:
if ':' in extra:
extra, marker = extra.split(':',1)
if invalid_marker(marker):
reqs=[] # XXX warn
elif not evaluate_marker(marker):
reqs=[]
extra = safe_extra(extra) or None
dm.setdefault(extra,[]).extend(parse_requirements(reqs))
return dm
def requires(self,extras=()):
"""List of Requirements needed for this distro if `extras` are used"""
dm = self._dep_map
deps = []
deps.extend(dm.get(None,()))
for ext in extras:
try:
deps.extend(dm[safe_extra(ext)])
except KeyError:
raise UnknownExtra(
"%s has no such extra feature %r" % (self, ext)
)
return deps
def _get_metadata(self,name):
if self.has_metadata(name):
for line in self.get_metadata_lines(name):
yield line
def activate(self,path=None):
"""Ensure distribution is importable on `path` (default=sys.path)"""
if path is None: path = sys.path
self.insert_on(path)
if path is sys.path:
fixup_namespace_packages(self.location)
list(map(declare_namespace, self._get_metadata('namespace_packages.txt')))
def egg_name(self):
"""Return what this distribution's standard .egg filename should be"""
filename = "%s-%s-py%s" % (
to_filename(self.project_name), to_filename(self.version),
self.py_version or PY_MAJOR
)
if self.platform:
filename += '-'+self.platform
return filename
def __repr__(self):
if self.location:
return "%s (%s)" % (self,self.location)
else:
return str(self)
def __str__(self):
try: version = getattr(self,'version',None)
except ValueError: version = None
version = version or "[unknown version]"
return "%s %s" % (self.project_name,version)
def __getattr__(self,attr):
"""Delegate all unrecognized public attributes to .metadata provider"""
if attr.startswith('_'):
raise AttributeError(attr)
return getattr(self._provider, attr)
@classmethod
def from_filename(cls,filename,metadata=None, **kw):
return cls.from_location(
_normalize_cached(filename), os.path.basename(filename), metadata,
**kw
)
def as_requirement(self):
"""Return a ``Requirement`` that matches this distribution exactly"""
return Requirement.parse('%s==%s' % (self.project_name, self.version))
def load_entry_point(self, group, name):
"""Return the `name` entry point of `group` or raise ImportError"""
ep = self.get_entry_info(group,name)
if ep is None:
raise ImportError("Entry point %r not found" % ((group,name),))
return ep.load()
def get_entry_map(self, group=None):
"""Return the entry point map for `group`, or the full entry map"""
try:
ep_map = self._ep_map
except AttributeError:
ep_map = self._ep_map = EntryPoint.parse_map(
self._get_metadata('entry_points.txt'), self
)
if group is not None:
return ep_map.get(group,{})
return ep_map
def get_entry_info(self, group, name):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return self.get_entry_map(group).get(name)
def insert_on(self, path, loc = None):
"""Insert self.location in path before its nearest parent directory"""
loc = loc or self.location
if not loc:
return
nloc = _normalize_cached(loc)
bdir = os.path.dirname(nloc)
npath= [(p and _normalize_cached(p) or p) for p in path]
for p, item in enumerate(npath):
if item==nloc:
break
elif item==bdir and self.precedence==EGG_DIST:
# if it's an .egg, give it precedence over its directory
if path is sys.path:
self.check_version_conflict()
path.insert(p, loc)
npath.insert(p, nloc)
break
else:
if path is sys.path:
self.check_version_conflict()
path.append(loc)
return
# p is the spot where we found or inserted loc; now remove duplicates
while 1:
try:
np = npath.index(nloc, p+1)
except ValueError:
break
else:
del npath[np], path[np]
p = np # ha!
return
def check_version_conflict(self):
if self.key=='setuptools':
return # ignore the inevitable setuptools self-conflicts :(
nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
loc = normalize_path(self.location)
for modname in self._get_metadata('top_level.txt'):
if (modname not in sys.modules or modname in nsp
or modname in _namespace_packages):
continue
if modname in ('pkg_resources', 'setuptools', 'site'):
continue
fn = getattr(sys.modules[modname], '__file__', None)
if fn and (normalize_path(fn).startswith(loc) or
fn.startswith(self.location)):
continue
issue_warning(
"Module %s was already imported from %s, but %s is being added"
" to sys.path" % (modname, fn, self.location),
)
def has_version(self):
try:
self.version
except ValueError:
issue_warning("Unbuilt egg for "+repr(self))
return False
return True
def clone(self,**kw):
"""Copy this distribution, substituting in any changed keyword args"""
for attr in (
'project_name', 'version', 'py_version', 'platform', 'location',
'precedence'
):
kw.setdefault(attr, getattr(self,attr,None))
kw.setdefault('metadata', self._provider)
return self.__class__(**kw)
@property
def extras(self):
return [dep for dep in self._dep_map if dep]
class DistInfoDistribution(Distribution):
"""Wrap an actual or potential sys.path entry w/metadata, .dist-info style"""
PKG_INFO = 'METADATA'
EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
@property
def _parsed_pkg_info(self):
"""Parse and cache metadata"""
try:
return self._pkg_info
except AttributeError:
from email.parser import Parser
self._pkg_info = Parser().parsestr(self.get_metadata(self.PKG_INFO))
return self._pkg_info
@property
def _dep_map(self):
try:
return self.__dep_map
except AttributeError:
self.__dep_map = self._compute_dependencies()
return self.__dep_map
def _preparse_requirement(self, requires_dist):
"""Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz')
Split environment marker, add == prefix to version specifiers as
necessary, and remove parenthesis.
"""
parts = requires_dist.split(';', 1) + ['']
distvers = parts[0].strip()
mark = parts[1].strip()
distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers)
distvers = distvers.replace('(', '').replace(')', '')
return (distvers, mark)
def _compute_dependencies(self):
"""Recompute this distribution's dependencies."""
from _markerlib import compile as compile_marker
dm = self.__dep_map = {None: []}
reqs = []
# Including any condition expressions
for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
distvers, mark = self._preparse_requirement(req)
parsed = next(parse_requirements(distvers))
parsed.marker_fn = compile_marker(mark)
reqs.append(parsed)
def reqs_for_extra(extra):
for req in reqs:
if req.marker_fn(override={'extra':extra}):
yield req
common = frozenset(reqs_for_extra(None))
dm[None].extend(common)
for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
extra = safe_extra(extra.strip())
dm[extra] = list(frozenset(reqs_for_extra(extra)) - common)
return dm
_distributionImpl = {
'.egg': Distribution,
'.egg-info': Distribution,
'.dist-info': DistInfoDistribution,
}
def issue_warning(*args,**kw):
level = 1
g = globals()
try:
# find the first stack frame that is *not* code in
# the pkg_resources module, to use for the warning
while sys._getframe(level).f_globals is g:
level += 1
except ValueError:
pass
from warnings import warn
warn(stacklevel = level+1, *args, **kw)
def parse_requirements(strs):
"""Yield ``Requirement`` objects for each specification in `strs`
`strs` must be an instance of ``basestring``, or a (possibly-nested)
iterable thereof.
"""
# create a steppable iterator, so we can handle \-continuations
lines = iter(yield_lines(strs))
def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
items = []
while not TERMINATOR(line,p):
if CONTINUE(line,p):
try:
line = next(lines)
p = 0
except StopIteration:
raise ValueError(
"\\ must not appear on the last nonblank line"
)
match = ITEM(line,p)
if not match:
raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
items.append(match.group(*groups))
p = match.end()
match = COMMA(line,p)
if match:
p = match.end() # skip the comma
elif not TERMINATOR(line,p):
raise ValueError(
"Expected ',' or end-of-list in",line,"at",line[p:]
)
match = TERMINATOR(line,p)
if match: p = match.end() # skip the terminator, if any
return line, p, items
for line in lines:
match = DISTRO(line)
if not match:
raise ValueError("Missing distribution spec", line)
project_name = match.group(1)
p = match.end()
extras = []
match = OBRACKET(line,p)
if match:
p = match.end()
line, p, extras = scan_list(
DISTRO, CBRACKET, line, p, (1,), "'extra' name"
)
line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
specs = [(op,safe_version(val)) for op,val in specs]
yield Requirement(project_name, specs, extras)
def _sort_dists(dists):
tmp = [(dist.hashcmp,dist) for dist in dists]
tmp.sort()
dists[::-1] = [d for hc,d in tmp]
class Requirement:
def __init__(self, project_name, specs, extras):
"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
self.unsafe_name, project_name = project_name, safe_name(project_name)
self.project_name, self.key = project_name, project_name.lower()
index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
index.sort()
self.specs = [(op,ver) for parsed,trans,op,ver in index]
self.index, self.extras = index, tuple(map(safe_extra,extras))
self.hashCmp = (
self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
frozenset(self.extras)
)
self.__hash = hash(self.hashCmp)
def __str__(self):
specs = ','.join([''.join(s) for s in self.specs])
extras = ','.join(self.extras)
if extras: extras = '[%s]' % extras
return '%s%s%s' % (self.project_name, extras, specs)
def __eq__(self,other):
return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
def __contains__(self,item):
if isinstance(item,Distribution):
if item.key != self.key: return False
if self.index: item = item.parsed_version # only get if we need it
elif isinstance(item,basestring):
item = parse_version(item)
last = None
compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1
for parsed,trans,op,ver in self.index:
action = trans[compare(item,parsed)] # Indexing: 0, 1, -1
if action=='F':
return False
elif action=='T':
return True
elif action=='+':
last = True
elif action=='-' or last is None: last = False
if last is None: last = True # no rules encountered
return last
def __hash__(self):
return self.__hash
def __repr__(self): return "Requirement.parse(%r)" % str(self)
@staticmethod
def parse(s):
reqs = list(parse_requirements(s))
if reqs:
if len(reqs)==1:
return reqs[0]
raise ValueError("Expected only one requirement", s)
raise ValueError("No requirements found", s)
state_machine = {
# =><
'<': '--T',
'<=': 'T-T',
'>': 'F+F',
'>=': 'T+F',
'==': 'T..',
'!=': 'F++',
}
def _get_mro(cls):
"""Get an mro for a type or classic class"""
if not isinstance(cls,type):
class cls(cls,object): pass
return cls.__mro__[1:]
return cls.__mro__
def _find_adapter(registry, ob):
"""Return an adapter factory for `ob` from `registry`"""
for t in _get_mro(getattr(ob, '__class__', type(ob))):
if t in registry:
return registry[t]
def ensure_directory(path):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
def split_sections(s):
"""Split a string or iterable thereof into (section,content) pairs
Each ``section`` is a stripped version of the section header ("[section]")
and each ``content`` is a list of stripped lines excluding blank lines and
comment-only lines. If there are any such lines before the first section
header, they're returned in a first ``section`` of ``None``.
"""
section = None
content = []
for line in yield_lines(s):
if line.startswith("["):
if line.endswith("]"):
if section or content:
yield section, content
section = line[1:-1].strip()
content = []
else:
raise ValueError("Invalid section heading", line)
else:
content.append(line)
# wrap up last segment
yield section, content
def _mkstemp(*args,**kw):
from tempfile import mkstemp
old_open = os.open
try:
os.open = os_open # temporarily bypass sandboxing
return mkstemp(*args,**kw)
finally:
os.open = old_open # and then put it back
# Set up global resource manager (deliberately not state-saved)
_manager = ResourceManager()
def _initialize(g):
for name in dir(_manager):
if not name.startswith('_'):
g[name] = getattr(_manager, name)
_initialize(globals())
# Prepare the master working set and make the ``require()`` API available
_declare_state('object', working_set = WorkingSet())
try:
# Does the main program list any requirements?
from __main__ import __requires__
except ImportError:
pass # No: just use the default working set based on sys.path
else:
# Yes: ensure the requirements are met, by prefixing sys.path if necessary
try:
working_set.require(__requires__)
except VersionConflict: # try it without defaults already on sys.path
working_set = WorkingSet([]) # by starting with an empty path
for dist in working_set.resolve(
parse_requirements(__requires__), Environment()
):
working_set.add(dist)
for entry in sys.path: # add any missing entries from sys.path
if entry not in working_set.entries:
working_set.add_entry(entry)
sys.path[:] = working_set.entries # then copy back to sys.path
require = working_set.require
iter_entry_points = working_set.iter_entry_points
add_activation_listener = working_set.subscribe
run_script = working_set.run_script
run_main = run_script # backward compatibility
# Activate all distributions already on sys.path, and ensure that
# all distributions added to the working set in the future (e.g. by
# calling ``require()``) will get activated as well.
add_activation_listener(lambda dist: dist.activate())
working_set.entries=[]
list(map(working_set.add_entry,sys.path)) # match order
| apache-2.0 | 1,798,283,831,742,520,600 | 7,295,549,544,394,551,000 | 35.378744 | 110 | 0.600492 | false |
aleonliao/depot_tools | download_from_google_storage.py | 15 | 20248 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Download files from Google Storage based on SHA1 sums."""
import hashlib
import optparse
import os
import Queue
import re
import shutil
import stat
import sys
import tarfile
import threading
import time
import subprocess2
GSUTIL_DEFAULT_PATH = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'gsutil.py')
# Maps sys.platform to what we actually want to call them.
PLATFORM_MAPPING = {
'cygwin': 'win',
'darwin': 'mac',
'linux2': 'linux',
'win32': 'win',
}
class FileNotFoundError(IOError):
pass
class InvalidFileError(IOError):
pass
class InvalidPlatformError(Exception):
pass
def GetNormalizedPlatform():
"""Returns the result of sys.platform accounting for cygwin.
Under cygwin, this will always return "win32" like the native Python."""
if sys.platform == 'cygwin':
return 'win32'
return sys.platform
# Common utilities
class Gsutil(object):
"""Call gsutil with some predefined settings. This is a convenience object,
and is also immutable."""
def __init__(self, path, boto_path=None, timeout=None, version='4.15'):
if not os.path.exists(path):
raise FileNotFoundError('GSUtil not found in %s' % path)
self.path = path
self.timeout = timeout
self.boto_path = boto_path
self.version = version
def get_sub_env(self):
env = os.environ.copy()
if self.boto_path == os.devnull:
env['AWS_CREDENTIAL_FILE'] = ''
env['BOTO_CONFIG'] = ''
elif self.boto_path:
env['AWS_CREDENTIAL_FILE'] = self.boto_path
env['BOTO_CONFIG'] = self.boto_path
return env
def call(self, *args):
cmd = [sys.executable, self.path, '--force-version', self.version]
cmd.extend(args)
return subprocess2.call(cmd, env=self.get_sub_env(), timeout=self.timeout)
def check_call(self, *args):
cmd = [sys.executable, self.path, '--force-version', self.version]
cmd.extend(args)
((out, err), code) = subprocess2.communicate(
cmd,
stdout=subprocess2.PIPE,
stderr=subprocess2.PIPE,
env=self.get_sub_env(),
timeout=self.timeout)
# Parse output.
status_code_match = re.search('status=([0-9]+)', err)
if status_code_match:
return (int(status_code_match.group(1)), out, err)
if ('You are attempting to access protected data with '
'no configured credentials.' in err):
return (403, out, err)
if 'matched no objects' in err:
return (404, out, err)
return (code, out, err)
def check_platform(target):
"""Checks if any parent directory of target matches (win|mac|linux)."""
assert os.path.isabs(target)
root, target_name = os.path.split(target)
if not target_name:
return None
if target_name in ('linux', 'mac', 'win'):
return target_name
return check_platform(root)
def get_sha1(filename):
sha1 = hashlib.sha1()
with open(filename, 'rb') as f:
while True:
# Read in 1mb chunks, so it doesn't all have to be loaded into memory.
chunk = f.read(1024*1024)
if not chunk:
break
sha1.update(chunk)
return sha1.hexdigest()
# Download-specific code starts here
def enumerate_work_queue(input_filename, work_queue, directory,
recursive, ignore_errors, output, sha1_file,
auto_platform):
if sha1_file:
if not os.path.exists(input_filename):
if not ignore_errors:
raise FileNotFoundError('%s not found.' % input_filename)
print >> sys.stderr, '%s not found.' % input_filename
with open(input_filename, 'rb') as f:
sha1_match = re.match('^([A-Za-z0-9]{40})$', f.read(1024).rstrip())
if sha1_match:
work_queue.put((sha1_match.groups(1)[0], output))
return 1
if not ignore_errors:
raise InvalidFileError('No sha1 sum found in %s.' % input_filename)
print >> sys.stderr, 'No sha1 sum found in %s.' % input_filename
return 0
if not directory:
work_queue.put((input_filename, output))
return 1
work_queue_size = 0
for root, dirs, files in os.walk(input_filename):
if not recursive:
for item in dirs[:]:
dirs.remove(item)
else:
for exclude in ['.svn', '.git']:
if exclude in dirs:
dirs.remove(exclude)
for filename in files:
full_path = os.path.join(root, filename)
if full_path.endswith('.sha1'):
if auto_platform:
# Skip if the platform does not match.
target_platform = check_platform(os.path.abspath(full_path))
if not target_platform:
err = ('--auto_platform passed in but no platform name found in '
'the path of %s' % full_path)
if not ignore_errors:
raise InvalidFileError(err)
print >> sys.stderr, err
continue
current_platform = PLATFORM_MAPPING[sys.platform]
if current_platform != target_platform:
continue
with open(full_path, 'rb') as f:
sha1_match = re.match('^([A-Za-z0-9]{40})$', f.read(1024).rstrip())
if sha1_match:
work_queue.put(
(sha1_match.groups(1)[0], full_path.replace('.sha1', '')))
work_queue_size += 1
else:
if not ignore_errors:
raise InvalidFileError('No sha1 sum found in %s.' % filename)
print >> sys.stderr, 'No sha1 sum found in %s.' % filename
return work_queue_size
def _validate_tar_file(tar, prefix):
def _validate(tarinfo):
"""Returns false if the tarinfo is something we explicitly forbid."""
if tarinfo.issym() or tarinfo.islnk():
return False
if '..' in tarinfo.name or not tarinfo.name.startswith(prefix):
return False
return True
return all(map(_validate, tar.getmembers()))
def _downloader_worker_thread(thread_num, q, force, base_url,
gsutil, out_q, ret_codes, verbose, extract,
delete=True):
while True:
input_sha1_sum, output_filename = q.get()
if input_sha1_sum is None:
return
if os.path.exists(output_filename) and not force:
if get_sha1(output_filename) == input_sha1_sum:
if verbose:
out_q.put(
'%d> File %s exists and SHA1 matches. Skipping.' % (
thread_num, output_filename))
continue
# Check if file exists.
file_url = '%s/%s' % (base_url, input_sha1_sum)
(code, _, err) = gsutil.check_call('ls', file_url)
if code != 0:
if code == 404:
out_q.put('%d> File %s for %s does not exist, skipping.' % (
thread_num, file_url, output_filename))
ret_codes.put((1, 'File %s for %s does not exist.' % (
file_url, output_filename)))
else:
# Other error, probably auth related (bad ~/.boto, etc).
out_q.put('%d> Failed to fetch file %s for %s, skipping. [Err: %s]' % (
thread_num, file_url, output_filename, err))
ret_codes.put((1, 'Failed to fetch file %s for %s. [Err: %s]' % (
file_url, output_filename, err)))
continue
# Fetch the file.
out_q.put('%d> Downloading %s...' % (thread_num, output_filename))
try:
if delete:
os.remove(output_filename) # Delete the file if it exists already.
except OSError:
if os.path.exists(output_filename):
out_q.put('%d> Warning: deleting %s failed.' % (
thread_num, output_filename))
code, _, err = gsutil.check_call('cp', file_url, output_filename)
if code != 0:
out_q.put('%d> %s' % (thread_num, err))
ret_codes.put((code, err))
continue
remote_sha1 = get_sha1(output_filename)
if remote_sha1 != input_sha1_sum:
msg = ('%d> ERROR remote sha1 (%s) does not match expected sha1 (%s).' %
(thread_num, remote_sha1, input_sha1_sum))
out_q.put(msg)
ret_codes.put((20, msg))
continue
if extract:
if (not tarfile.is_tarfile(output_filename)
or not output_filename.endswith('.tar.gz')):
out_q.put('%d> Error: %s is not a tar.gz archive.' % (
thread_num, output_filename))
ret_codes.put((1, '%s is not a tar.gz archive.' % (output_filename)))
continue
with tarfile.open(output_filename, 'r:gz') as tar:
dirname = os.path.dirname(os.path.abspath(output_filename))
extract_dir = output_filename[0:len(output_filename)-7]
if not _validate_tar_file(tar, os.path.basename(extract_dir)):
out_q.put('%d> Error: %s contains files outside %s.' % (
thread_num, output_filename, extract_dir))
ret_codes.put((1, '%s contains invalid entries.' % (output_filename)))
continue
if os.path.exists(extract_dir):
try:
shutil.rmtree(extract_dir)
out_q.put('%d> Removed %s...' % (thread_num, extract_dir))
except OSError:
out_q.put('%d> Warning: Can\'t delete: %s' % (
thread_num, extract_dir))
ret_codes.put((1, 'Can\'t delete %s.' % (extract_dir)))
continue
out_q.put('%d> Extracting %d entries from %s to %s' %
(thread_num, len(tar.getmembers()),output_filename,
extract_dir))
tar.extractall(path=dirname)
# Set executable bit.
if sys.platform == 'cygwin':
# Under cygwin, mark all files as executable. The executable flag in
# Google Storage will not be set when uploading from Windows, so if
# this script is running under cygwin and we're downloading an
# executable, it will be unrunnable from inside cygwin without this.
st = os.stat(output_filename)
os.chmod(output_filename, st.st_mode | stat.S_IEXEC)
elif sys.platform != 'win32':
# On non-Windows platforms, key off of the custom header
# "x-goog-meta-executable".
code, out, _ = gsutil.check_call('stat', file_url)
if code != 0:
out_q.put('%d> %s' % (thread_num, err))
ret_codes.put((code, err))
elif re.search(r'executable:\s*1', out):
st = os.stat(output_filename)
os.chmod(output_filename, st.st_mode | stat.S_IEXEC)
def printer_worker(output_queue):
while True:
line = output_queue.get()
# Its plausible we want to print empty lines.
if line is None:
break
print line
def download_from_google_storage(
input_filename, base_url, gsutil, num_threads, directory, recursive,
force, output, ignore_errors, sha1_file, verbose, auto_platform, extract):
# Start up all the worker threads.
all_threads = []
download_start = time.time()
stdout_queue = Queue.Queue()
work_queue = Queue.Queue()
ret_codes = Queue.Queue()
ret_codes.put((0, None))
for thread_num in range(num_threads):
t = threading.Thread(
target=_downloader_worker_thread,
args=[thread_num, work_queue, force, base_url,
gsutil, stdout_queue, ret_codes, verbose, extract])
t.daemon = True
t.start()
all_threads.append(t)
printer_thread = threading.Thread(target=printer_worker, args=[stdout_queue])
printer_thread.daemon = True
printer_thread.start()
# Enumerate our work queue.
work_queue_size = enumerate_work_queue(
input_filename, work_queue, directory, recursive,
ignore_errors, output, sha1_file, auto_platform)
for _ in all_threads:
work_queue.put((None, None)) # Used to tell worker threads to stop.
# Wait for all downloads to finish.
for t in all_threads:
t.join()
stdout_queue.put(None)
printer_thread.join()
# See if we ran into any errors.
max_ret_code = 0
for ret_code, message in ret_codes.queue:
max_ret_code = max(ret_code, max_ret_code)
if message:
print >> sys.stderr, message
if verbose and not max_ret_code:
print 'Success!'
if verbose:
print 'Downloading %d files took %1f second(s)' % (
work_queue_size, time.time() - download_start)
return max_ret_code
def main(args):
usage = ('usage: %prog [options] target\n'
'Target must be:\n'
' (default) a sha1 sum ([A-Za-z0-9]{40}).\n'
' (-s or --sha1_file) a .sha1 file, containing a sha1 sum on '
'the first line.\n'
' (-d or --directory) A directory to scan for .sha1 files.')
parser = optparse.OptionParser(usage)
parser.add_option('-o', '--output',
help='Specify the output file name. Defaults to: '
'(a) Given a SHA1 hash, the name is the SHA1 hash. '
'(b) Given a .sha1 file or directory, the name will '
'match (.*).sha1.')
parser.add_option('-b', '--bucket',
help='Google Storage bucket to fetch from.')
parser.add_option('-e', '--boto',
help='Specify a custom boto file.')
parser.add_option('-c', '--no_resume', action='store_true',
help='Resume download if file is partially downloaded.')
parser.add_option('-f', '--force', action='store_true',
help='Force download even if local file exists.')
parser.add_option('-i', '--ignore_errors', action='store_true',
help='Don\'t throw error if we find an invalid .sha1 file.')
parser.add_option('-r', '--recursive', action='store_true',
help='Scan folders recursively for .sha1 files. '
'Must be used with -d/--directory')
parser.add_option('-t', '--num_threads', default=1, type='int',
help='Number of downloader threads to run.')
parser.add_option('-d', '--directory', action='store_true',
help='The target is a directory. '
'Cannot be used with -s/--sha1_file.')
parser.add_option('-s', '--sha1_file', action='store_true',
help='The target is a file containing a sha1 sum. '
'Cannot be used with -d/--directory.')
parser.add_option('-g', '--config', action='store_true',
help='Alias for "gsutil config". Run this if you want '
'to initialize your saved Google Storage '
'credentials. This will create a read-only '
'credentials file in ~/.boto.depot_tools.')
parser.add_option('-n', '--no_auth', action='store_true',
help='Skip auth checking. Use if it\'s known that the '
'target bucket is a public bucket.')
parser.add_option('-p', '--platform',
help='A regular expression that is compared against '
'Python\'s sys.platform. If this option is specified, '
'the download will happen only if there is a match.')
parser.add_option('-a', '--auto_platform',
action='store_true',
help='Detects if any parent folder of the target matches '
'(linux|mac|win). If so, the script will only '
'process files that are in the paths that '
'that matches the current platform.')
parser.add_option('-u', '--extract',
action='store_true',
help='Extract a downloaded tar.gz file. '
'Leaves the tar.gz file around for sha1 verification'
'If a directory with the same name as the tar.gz '
'file already exists, is deleted (to get a '
'clean state in case of update.)')
parser.add_option('-v', '--verbose', action='store_true', default=True,
help='DEPRECATED: Defaults to True. Use --no-verbose '
'to suppress.')
parser.add_option('-q', '--quiet', action='store_false', dest='verbose',
help='Suppresses diagnostic and progress information.')
(options, args) = parser.parse_args()
# Make sure we should run at all based on platform matching.
if options.platform:
if options.auto_platform:
parser.error('--platform can not be specified with --auto_platform')
if not re.match(options.platform, GetNormalizedPlatform()):
if options.verbose:
print('The current platform doesn\'t match "%s", skipping.' %
options.platform)
return 0
# Set the boto file to /dev/null if we don't need auth.
if options.no_auth:
if (set(('http_proxy', 'https_proxy')).intersection(
env.lower() for env in os.environ) and
'NO_AUTH_BOTO_CONFIG' not in os.environ):
print >> sys.stderr, ('NOTICE: You have PROXY values set in your '
'environment, but gsutil in depot_tools does not '
'(yet) obey them.')
print >> sys.stderr, ('Also, --no_auth prevents the normal BOTO_CONFIG '
'environment variable from being used.')
print >> sys.stderr, ('To use a proxy in this situation, please supply '
'those settings in a .boto file pointed to by '
'the NO_AUTH_BOTO_CONFIG environment var.')
options.boto = os.environ.get('NO_AUTH_BOTO_CONFIG', os.devnull)
# Make sure gsutil exists where we expect it to.
if os.path.exists(GSUTIL_DEFAULT_PATH):
gsutil = Gsutil(GSUTIL_DEFAULT_PATH,
boto_path=options.boto)
else:
parser.error('gsutil not found in %s, bad depot_tools checkout?' %
GSUTIL_DEFAULT_PATH)
# Passing in -g/--config will run our copy of GSUtil, then quit.
if options.config:
print '===Note from depot_tools==='
print 'If you do not have a project ID, enter "0" when asked for one.'
print '===End note from depot_tools==='
print
return gsutil.call('config')
if not args:
parser.error('Missing target.')
if len(args) > 1:
parser.error('Too many targets.')
if not options.bucket:
parser.error('Missing bucket. Specify bucket with --bucket.')
if options.sha1_file and options.directory:
parser.error('Both --directory and --sha1_file are specified, '
'can only specify one.')
if options.recursive and not options.directory:
parser.error('--recursive specified but --directory not specified.')
if options.output and options.directory:
parser.error('--directory is specified, so --output has no effect.')
if (not (options.sha1_file or options.directory)
and options.auto_platform):
parser.error('--auto_platform must be specified with either '
'--sha1_file or --directory')
input_filename = args[0]
# Set output filename if not specified.
if not options.output and not options.directory:
if not options.sha1_file:
# Target is a sha1 sum, so output filename would also be the sha1 sum.
options.output = input_filename
elif options.sha1_file:
# Target is a .sha1 file.
if not input_filename.endswith('.sha1'):
parser.error('--sha1_file is specified, but the input filename '
'does not end with .sha1, and no --output is specified. '
'Either make sure the input filename has a .sha1 '
'extension, or specify --output.')
options.output = input_filename[:-5]
else:
parser.error('Unreachable state.')
# Check if output file already exists.
if not options.directory and not options.force and not options.no_resume:
if os.path.exists(options.output):
parser.error('Output file %s exists and --no_resume is specified.'
% options.output)
base_url = 'gs://%s' % options.bucket
return download_from_google_storage(
input_filename, base_url, gsutil, options.num_threads, options.directory,
options.recursive, options.force, options.output, options.ignore_errors,
options.sha1_file, options.verbose, options.auto_platform,
options.extract)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause | -8,785,016,001,264,581,000 | -5,584,541,794,577,994,000 | 38.013487 | 80 | 0.600751 | false |
qnzhou/ThingiverseCrawler | thingiverse_crawler.py | 1 | 9320 | #!//usr/bin/env python
import argparse
import datetime
import os
import os.path
import requests
import re
import time
import urllib
import urlparse
from subprocess import check_call
def utc_mktime(utc_tuple):
"""Returns number of seconds elapsed since epoch
Note that no timezone are taken into consideration.
utc tuple must be: (year, month, day, hour, minute, second)
"""
if len(utc_tuple) == 6:
utc_tuple += (0, 0, 0)
return time.mktime(utc_tuple) - time.mktime((1970, 1, 1, 0, 0, 0, 0, 0, 0))
def datetime_to_timestamp(dt):
"""Converts a datetime object to UTC timestamp"""
return int(utc_mktime(dt.timetuple()))
def parse_thing_ids(text):
pattern = "thing:(\d{5,7})"
matched = re.findall(pattern, text)
return [int(val) for val in matched]
def parse_file_ids(text):
pattern = "download:(\d{5,7})"
matched = re.findall(pattern, text)
return [int(val) for val in matched]
known_licenses = [
("Creative Commons - Attribution",
re.compile("http://creativecommons.org/licenses/by/\d(.\d)?/")),
("Creative Commons - Attribution - Share Alike",
re.compile("http://creativecommons.org/licenses/by-sa/\d(.\d)?/")),
("Creative Commons - Attribution - No Derivatives",
re.compile("http://creativecommons.org/licenses/by-nd/\d(.\d)?/")),
("Creative Commons - Attribution - Non-Commercial",
re.compile("http://creativecommons.org/licenses/by-nc/\d(.\d)?/")),
("Attribution - Non-Commercial - Share Alike",
re.compile("http://creativecommons.org/licenses/by-nc-sa/\d(.\d)?/")),
("Attribution - Non-Commercial - No Derivatives",
re.compile("http://creativecommons.org/licenses/by-nc-nd/\d(.\d)?/")),
("Creative Commons - Public Domain Dedication",
re.compile("http://creativecommons.org/publicdomain/zero/\d(.\d)?/")),
("GNU - GPL",
re.compile("http://creativecommons.org/licenses/GPL/\d(.\d)?/")),
("GNU - LGPL",
re.compile("http://creativecommons.org/licenses/LGPL/\d(.\d)?/")),
("BSD License",
re.compile("http://creativecommons.org/licenses/BSD/")),
("Nokia",
re.compile("http://www.developer.nokia.com/Terms_and_conditions/3d-printing.xhtml")),
("Public Domain",
re.compile("http://creativecommons.org/licenses/publicdomain/")),
]
def parse_license(text):
for name, pattern in known_licenses:
if pattern.search(text):
return name
return "unknown_license"
def crawl_thing_ids(N, end_date=None):
""" This method extract N things that were uploaded to thingiverse.com
before end_date. If end_date is None, use today's date.
"""
baseurl = "http://www.thingiverse.com/search/recent/things/page:{}?q=&start_date=&stop_date={}&search_mode=advanced&description=&username=&tags=&license="
end_date = datetime_to_timestamp(end_date)
thing_ids = set()
for i in range(N/12 + 1):
url = baseurl.format(i, end_date)
r = requests.get(url)
assert(r.status_code==200)
thing_ids.update(parse_thing_ids(r.text))
if len(thing_ids) > N:
break
# Sleep a bit to avoid being mistaken as DoS.
time.sleep(0.5)
return thing_ids
def crawl_things(N, output_dir, term=None, category=None, source=None, organize=False):
#baseurl = "http://www.thingiverse.com/newest/page:{}"
#baseurl = "http://www.thingiverse.com/explore/popular/page:{}"
key = None
if term is None:
assert(source is not None);
url_prefix= "http://www.thingiverse.com/explore/{}/".format(source);
if category is None:
baseurl = url_prefix + "page:{}"
else:
baseurl = url_prefix + urllib.quote_plus(category) + "/page:{}"
key = category
else:
baseurl = "http://www.thingiverse.com/search/page:{}?type=things&q=" + urllib.quote_plus(term)
key = term
thing_ids = set()
file_ids = set()
records = []
num_files = 0
page = 0
previous_path = ''
while True:
url = baseurl.format(page+1)
contents = get_url(url)
page += 1
# If the previous url ends up being the same as the old one, we should stop as there are no more pages
current_path = urlparse.urlparse(contents.url).path
if previous_path == current_path:
return records
else:
previous_path = current_path
for thing_id in parse_thing_ids(contents.text):
if thing_id in thing_ids:
continue
print("thing id: {}".format(thing_id))
thing_ids.add(thing_id)
license, thing_files = get_thing(thing_id)
for file_id in thing_files:
if file_id in file_ids:
continue
file_ids.add(file_id)
print(" file id: {}".format(file_id))
result = download_file(file_id, thing_id, output_dir, organize)
if result is None: continue
filename, link = result
if filename is not None:
records.append((thing_id, file_id, filename, license, link))
if N is not None and len(records) >= N:
return records
# Sleep a bit to avoid being mistaken as DoS.
time.sleep(0.5)
save_records(records, key)
def get_thing(thing_id):
base_url = "http://www.thingiverse.com/{}:{}"
file_ids = []
url = base_url.format("thing", thing_id)
contents = get_url(url).text
license = parse_license(contents)
return license, parse_file_ids(contents)
def get_url(url, time_out=600):
r = requests.get(url)
sleep_time = 1.0
while r.status_code != 200:
print("sleep {}s".format(sleep_time))
print(url)
time.sleep(sleep_time)
r = requests.get(url)
sleep_time += 2
if (sleep_time > time_out):
# We have sleeped for over 10 minutes, the page probably does
# not exist.
break
if r.status_code != 200:
print("failed to retrieve {}".format(url))
else:
return r
# return r.text
def get_download_link(file_id):
base_url = "https://www.thingiverse.com/{}:{}"
url = base_url.format("download", file_id)
r = requests.head(url)
link = r.headers.get("Location", None)
if link is not None:
__, ext = os.path.splitext(link)
if ext.lower() not in [".stl", ".obj", ".ply", ".off"]:
return None
return link
def download_file(file_id, thing_id, output_dir, organize):
link = get_download_link(file_id)
if link is None:
return None
__, ext = os.path.splitext(link)
output_file = "{}{}".format(file_id, ext.lower())
if organize:
output_file = os.path.join(str(thing_id), output_file)
output_file = os.path.join(output_dir, output_file)
command = "wget -q --tries=20 --waitretry 20 -O {} {}".format(output_file, link)
#check_call(command.split())
return output_file, link
def save_records(records, key=None):
# Enforce kebab case file name
output_name = re.sub('(\w) (\w)', r'\1-\2', key).lower()+"-" if key else ""
output_name += "summary"
with open(output_name+".csv", 'w') as fout:
fout.write("thing_id, file_id, file, license, link\n")
for entry in records:
fout.write(",".join([str(val) for val in entry]) + "\n")
def parse_args():
parser = argparse.ArgumentParser(
description="Crawl data from thingiverse",
epilog="Written by Qingnan Zhou <qnzhou at gmail dot com> Modified by Mike Gleason")
parser.add_argument("--output-dir", "-o", help="output directories",
default=".")
parser.add_argument("--number", "-n", type=int,
help="how many files to crawl", default=None)
group = parser.add_mutually_exclusive_group()
group.add_argument("--search-term", "-s", type=str, default=None,
help="term to search for")
group.add_argument("--category", "-c", type=str, default=None,
help="catergory to search for")
parser.add_argument('--organize', dest='organized', default=False, action='store_true',
help="organize files by their main category")
parser.add_argument("--source", choices=("newest", "featured", "popular",
"verified", "made-things", "derivatives", "customizable",
"random-things", "firehose"), default="featured");
return parser
def main():
parser = parse_args()
args = parser.parse_args()
if args.number is None and (args.search_term is None and args.category is None):
parser.error('Number or Search/Category Term required')
output_dir = args.output_dir
number = args.number
records = crawl_things(
args.number,
output_dir,
args.search_term,
args.category,
args.source,
args.organized)
if args.search_term:
save_records(records, args.search_term)
elif args.category:
save_records(records, args.category)
else:
save_records(records)
if __name__ == "__main__":
main()
| mit | -2,393,227,691,155,196,000 | 3,486,811,799,563,783,000 | 33.64684 | 158 | 0.593777 | false |
PyBossa/pybossa | pybossa/model/counter.py | 2 | 1787 | # -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2017 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy import Integer
from sqlalchemy.schema import Column, ForeignKey
from sqlalchemy.dialects.postgresql import TIMESTAMP
from pybossa.core import db
from pybossa.model import DomainObject, make_timestamp
class Counter(db.Model, DomainObject):
'''A Counter lists the number of task runs for a given Task.'''
__tablename__ = 'counter'
#: Counter.ID
id = Column(Integer, primary_key=True)
#: UTC timestamp when the counter was created.
created = Column(TIMESTAMP, default=make_timestamp)
#: Project.ID that this counter is associated with.
project_id = Column(Integer, ForeignKey('project.id',
ondelete='CASCADE'),
nullable=False)
#: Task.ID that this counter is associated with.
task_id = Column(Integer, ForeignKey('task.id',
ondelete='CASCADE'),
nullable=False)
#: Number of task_runs for this task.
n_task_runs = Column(Integer, default=0, nullable=False)
| agpl-3.0 | 9,071,343,691,471,213,000 | -911,303,757,406,662,500 | 39.613636 | 77 | 0.684947 | false |
pplatek/odoo | addons/base_report_designer/plugin/openerp_report_designer/bin/script/lib/logreport.py | 386 | 1736 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import tempfile
LOG_DEBUG='debug'
LOG_INFO='info'
LOG_WARNING='warn'
LOG_ERROR='error'
LOG_CRITICAL='critical'
_logger = logging.getLogger(__name__)
def log_detail(self):
import os
logfile_name = os.path.join(tempfile.gettempdir(), "openerp_report_designer.log")
hdlr = logging.FileHandler(logfile_name)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
_logger.addHandler(hdlr)
_logger.setLevel(logging.INFO)
class Logger(object):
def log_write(self, name, level, msg):
getattr(_logger,level)(msg)
def shutdown(self):
logging.shutdown()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -8,998,707,619,581,972,000 | 1,263,757,464,720,990,700 | 35.166667 | 85 | 0.644585 | false |
arpitn30/open-event-orga-server | tests/unittests/api/test_custom_fields.py | 9 | 3404 | import unittest
from app import current_app as app
from app.api.helpers.custom_fields import Color, Email, Uri, \
ImageUri, DateTime, Integer, Float, ChoiceString, Upload
from tests.unittests.utils import OpenEventTestCase
class TestCustomFieldsValidation(OpenEventTestCase):
"""
Test the validation methods of custom fields
"""
def _test_common(self, field):
field.required = False
self.assertTrue(field.validate(None))
field.required = True
self.assertFalse(field.validate(None))
if field.__schema_type__ != 'string':
self.assertFalse(field.validate(''))
def test_color_field(self):
field = Color()
self._test_common(field)
self.assertFalse(field.validate('randomnothing'))
self.assertTrue(field.validate('black'))
self.assertTrue(field.validate('#44ff3b'))
def test_email_field(self):
field = Email()
self._test_common(field)
self.assertFalse(field.validate('website.com'))
self.assertTrue(field.validate('[email protected]'))
def test_uri_field(self):
field = Uri()
self._test_common(field)
self.assertFalse(field.validate('somestring'))
self.assertFalse(field.validate('website.com'))
self.assertFalse(field.validate('www.website.com'))
self.assertFalse(field.validate('http://bazooka'))
self.assertTrue(field.validate('http://localhost/file'))
self.assertTrue(field.validate('http://website.com'))
self.assertTrue(field.validate('ftp://domain.com/blah'))
def test_image_uri_field(self):
field = ImageUri()
self._test_common(field)
# same as uri field, not many tests needed
self.assertFalse(field.validate('imgur.com/image.png'))
self.assertTrue(field.validate('http://imgur.com/image.png'))
def test_datetime_field(self):
field = DateTime()
self._test_common(field)
self.assertTrue(field.validate('2014-12-31 23:11:44'))
self.assertTrue(field.validate('2014-12-31T23:11:44'))
self.assertFalse(field.validate('2014-31-12T23:11:44'))
self.assertFalse(field.validate('2014-12-32'))
self.assertFalse(field.validate('2014-06-30 12:00'))
def test_integer_field(self):
field = Integer()
self._test_common(field)
self.assertTrue(field.validate(0))
self.assertFalse(field.validate(-2323.23))
self.assertFalse(field.validate(2323.23))
def test_float_field(self):
field = Float()
self._test_common(field)
self.assertTrue(field.validate(92))
def test_choice_string_field(self):
field = ChoiceString(choice_list=['a', 'b', 'c'])
self._test_common(field)
self.assertTrue(field.validate('a'))
self.assertFalse(field.validate('d'))
self.assertFalse(field.validate('ab'))
def test_upload_field(self):
with app.test_request_context():
field = Upload()
self._test_common(field)
link = '/static/1'
self.assertTrue(field.validate(link))
z = field.format(link)
self.assertNotEqual(link, z)
self.assertTrue(field.validate(z), msg=z)
self.assertEqual('http://site.co/1', field.format('http://site.co/1'))
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | -761,694,532,504,157,400 | 7,089,550,508,480,456,000 | 35.212766 | 82 | 0.629553 | false |
marbu/pylatest | tests/xdocutils/test_utils.py | 1 | 6663 | # -*- coding: utf8 -*-
"""
Tests of helper functions from pylatest.xdocutils.utils module.
"""
# Copyright (C) 2018 Martin Bukatovič <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import textwrap
from docutils.core import publish_doctree
import pytest
from pylatest.xdocutils.core import pylatest_publish_parts
from pylatest.xdocutils.readers import NoDocInfoReader
from pylatest.xdocutils.utils import get_field_list
from pylatest.xdocutils.utils import get_testcase_id
from pylatest.xdocutils.utils import get_testcase_requirements
def _publish(source):
"""
Parse rst source string into doctree.
"""
doctree = publish_doctree(
source=source,
reader=NoDocInfoReader(),
parser_name='restructuredtext',)
return doctree
def test_get_field_list_null(empty_doctree):
assert get_field_list(empty_doctree) == None
def test_get_field_list_missing():
doctree = _publish(textwrap.dedent('''\
Test Foo
********
There is no field list.
Description
===========
Nothing here as well.
'''))
assert get_field_list(doctree) == None
def test_get_field_list_present():
doctree = _publish(textwrap.dedent('''\
Test Foo
********
:id: FOO-122
:author: [email protected]
:component: foo
'''))
assert get_field_list(doctree) is not None
def test_get_testcase_id_null(empty_doctree):
assert get_testcase_id(empty_doctree) == None
def test_get_testcase_id():
doctree = _publish(textwrap.dedent('''\
Test Foo
********
:id: FOO-122
:author: [email protected]
:component: foo
'''))
assert get_testcase_id(doctree) == "FOO-122"
#
# requirements
#
def test_get_testcase_requirements_null(empty_doctree):
assert get_testcase_requirements(empty_doctree) == []
REQUIREMENT_FIELD_NAMES = ["requirement", "requirements"]
@pytest.mark.parametrize("field_name", REQUIREMENT_FIELD_NAMES)
def test_get_testcase_requirements_single(field_name):
doctree = _publish(textwrap.dedent('''\
Test Foo
********
:author: [email protected]
:component: foo
:{}: FOO-212
'''.format(field_name)))
assert get_testcase_requirements(doctree) == ["FOO-212"]
@pytest.mark.parametrize("field_name", REQUIREMENT_FIELD_NAMES)
def test_get_testcase_requirements_single_empty(field_name):
doctree = _publish(textwrap.dedent('''\
Test Foo
********
:author: [email protected]
:component: foo
:{}:
'''.format(field_name)))
assert get_testcase_requirements(doctree) == []
def test_get_testcase_requirements_many():
doctree = _publish(textwrap.dedent('''\
Test Foo
********
:author: [email protected]
:requirement: FOO-212
:requirement: FOO-232
:component: foo
'''))
assert get_testcase_requirements(doctree) == ["FOO-212", "FOO-232"]
@pytest.mark.parametrize("field_name", REQUIREMENT_FIELD_NAMES)
def test_get_testcase_requirements_list_single(field_name):
doctree = _publish(textwrap.dedent('''\
Test Foo
********
:author: [email protected]
:component: foo
:{}:
- FOO-212
'''.format(field_name)))
assert get_testcase_requirements(doctree) == ["FOO-212"]
@pytest.mark.parametrize("field_name", REQUIREMENT_FIELD_NAMES)
def test_get_testcase_requirements_list_many(field_name):
doctree = _publish(textwrap.dedent('''\
Test Foo
********
:author: [email protected]
:component: foo
:{}:
- FOO-212
- FOO-232
'''.format(field_name)))
assert get_testcase_requirements(doctree) == ["FOO-212", "FOO-232"]
@pytest.mark.parametrize("field_name", REQUIREMENT_FIELD_NAMES)
def test_get_testcase_requirements_list_many_someemptyitems(field_name):
doctree = _publish(textwrap.dedent('''\
Test Foo
********
:author: [email protected]
:component: foo
:{}:
-
- FOO-132
-
:requirement: FOO-130
'''.format(field_name)))
assert get_testcase_requirements(doctree) == ["FOO-132", "FOO-130"]
@pytest.mark.parametrize("field_name", REQUIREMENT_FIELD_NAMES)
def test_get_testcase_requirements_list_many_onlyemptyitems(field_name):
doctree = _publish(textwrap.dedent('''\
Test Foo
********
:author: [email protected]
:component: foo
:{}:
-
-
'''.format(field_name)))
assert get_testcase_requirements(doctree) == []
def test_get_testcase_requirements_many_list_many():
doctree = _publish(textwrap.dedent('''\
Test Foo
********
:author: [email protected]
:component: foo
:requirement: FOO-012
:requirement: FOO-032
:requirements:
- FOO-212
- FOO-232
'''))
assert get_testcase_requirements(doctree) == [
"FOO-012", "FOO-032", "FOO-212", "FOO-232"]
@pytest.mark.parametrize("field_name", REQUIREMENT_FIELD_NAMES)
def test_get_testcase_requirements_single_url_link(field_name):
doctree = _publish(textwrap.dedent('''\
Test Foo
********
:author: [email protected]
:component: foo
:requirement: https://example.com
'''.format(field_name)))
results = get_testcase_requirements(doctree)
assert len(results) == 1
# check that we get actual rst node for a link (reference node)
assert results[0].tagname == "reference"
assert results[0].astext() == "https://example.com"
def test_get_testcase_requirements_many_list_url_link():
doctree = _publish(textwrap.dedent('''\
Test Foo
********
:author: [email protected]
:component: foo
:requirements:
- https://example.com/foo
- https://example.com/bar
'''))
results = get_testcase_requirements(doctree)
assert len(results) == 2
# check that we get actual rst node for a link (reference node)
assert results[0].tagname == "reference"
assert results[1].tagname == "reference"
# and expected content
assert results[0].astext() == "https://example.com/foo"
assert results[1].astext() == "https://example.com/bar"
| gpl-3.0 | -1,521,595,036,487,576,300 | -1,206,488,043,871,609,300 | 25.12549 | 72 | 0.655509 | false |
openshift/openshift-tools | openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/openshift_health_checker/test/etcd_volume_test.py | 55 | 3964 | import pytest
from openshift_checks.etcd_volume import EtcdVolume
from openshift_checks import OpenShiftCheckException
@pytest.mark.parametrize('ansible_mounts,extra_words', [
([], ['none']), # empty ansible_mounts
([{'mount': '/mnt'}], ['/mnt']), # missing relevant mount paths
])
def test_cannot_determine_available_disk(ansible_mounts, extra_words):
task_vars = dict(
ansible_mounts=ansible_mounts,
)
with pytest.raises(OpenShiftCheckException) as excinfo:
EtcdVolume(fake_execute_module, task_vars).run()
for word in ['Unable to determine mount point'] + extra_words:
assert word in str(excinfo.value)
@pytest.mark.parametrize('size_limit,ansible_mounts', [
(
# if no size limit is specified, expect max usage
# limit to default to 90% of size_total
None,
[{
'mount': '/',
'size_available': 40 * 10**9,
'size_total': 80 * 10**9
}],
),
(
1,
[{
'mount': '/',
'size_available': 30 * 10**9,
'size_total': 30 * 10**9,
}],
),
(
20000000000,
[{
'mount': '/',
'size_available': 20 * 10**9,
'size_total': 40 * 10**9,
}],
),
(
5000000000,
[{
# not enough space on / ...
'mount': '/',
'size_available': 0,
'size_total': 0,
}, {
# not enough space on /var/lib ...
'mount': '/var/lib',
'size_available': 2 * 10**9,
'size_total': 21 * 10**9,
}, {
# ... but enough on /var/lib/etcd
'mount': '/var/lib/etcd',
'size_available': 36 * 10**9,
'size_total': 40 * 10**9
}],
)
])
def test_succeeds_with_recommended_disk_space(size_limit, ansible_mounts):
task_vars = dict(
etcd_device_usage_threshold_percent=size_limit,
ansible_mounts=ansible_mounts,
)
if task_vars["etcd_device_usage_threshold_percent"] is None:
task_vars.pop("etcd_device_usage_threshold_percent")
result = EtcdVolume(fake_execute_module, task_vars).run()
assert not result.get('failed', False)
@pytest.mark.parametrize('size_limit_percent,ansible_mounts,extra_words', [
(
# if no size limit is specified, expect max usage
# limit to default to 90% of size_total
None,
[{
'mount': '/',
'size_available': 1 * 10**9,
'size_total': 100 * 10**9,
}],
['99.0%'],
),
(
70.0,
[{
'mount': '/',
'size_available': 1 * 10**6,
'size_total': 5 * 10**9,
}],
['100.0%'],
),
(
40.0,
[{
'mount': '/',
'size_available': 2 * 10**9,
'size_total': 6 * 10**9,
}],
['66.7%'],
),
(
None,
[{
# enough space on /var ...
'mount': '/var',
'size_available': 20 * 10**9,
'size_total': 20 * 10**9,
}, {
# .. but not enough on /var/lib
'mount': '/var/lib',
'size_available': 1 * 10**9,
'size_total': 20 * 10**9,
}],
['95.0%'],
),
])
def test_fails_with_insufficient_disk_space(size_limit_percent, ansible_mounts, extra_words):
task_vars = dict(
etcd_device_usage_threshold_percent=size_limit_percent,
ansible_mounts=ansible_mounts,
)
if task_vars["etcd_device_usage_threshold_percent"] is None:
task_vars.pop("etcd_device_usage_threshold_percent")
result = EtcdVolume(fake_execute_module, task_vars).run()
assert result['failed']
for word in extra_words:
assert word in result['msg']
def fake_execute_module(*args):
raise AssertionError('this function should not be called')
| apache-2.0 | 5,769,695,187,069,855,000 | 8,968,958,636,883,365,000 | 25.965986 | 93 | 0.502018 | false |
opavader/fabric | tests/test_state.py | 44 | 1109 | from nose.tools import eq_
from fabric.state import _AliasDict
def test_dict_aliasing():
"""
Assigning values to aliases updates aliased keys
"""
ad = _AliasDict(
{'bar': False, 'biz': True, 'baz': False},
aliases={'foo': ['bar', 'biz', 'baz']}
)
# Before
eq_(ad['bar'], False)
eq_(ad['biz'], True)
eq_(ad['baz'], False)
# Change
ad['foo'] = True
# After
eq_(ad['bar'], True)
eq_(ad['biz'], True)
eq_(ad['baz'], True)
def test_nested_dict_aliasing():
"""
Aliases can be nested
"""
ad = _AliasDict(
{'bar': False, 'biz': True},
aliases={'foo': ['bar', 'nested'], 'nested': ['biz']}
)
# Before
eq_(ad['bar'], False)
eq_(ad['biz'], True)
# Change
ad['foo'] = True
# After
eq_(ad['bar'], True)
eq_(ad['biz'], True)
def test_dict_alias_expansion():
"""
Alias expansion
"""
ad = _AliasDict(
{'bar': False, 'biz': True},
aliases={'foo': ['bar', 'nested'], 'nested': ['biz']}
)
eq_(ad.expand_aliases(['foo']), ['bar', 'biz'])
| bsd-2-clause | -864,878,882,616,057,200 | 5,858,160,753,534,349,000 | 20.326923 | 61 | 0.492335 | false |
cubicova17/annet | venv/lib/python2.7/site-packages/django/contrib/gis/gdal/feature.py | 219 | 4255 | # The GDAL C library, OGR exception, and the Field object
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.error import OGRException, OGRIndexError
from django.contrib.gis.gdal.field import Field
from django.contrib.gis.gdal.geometries import OGRGeometry, OGRGeomType
# ctypes function prototypes
from django.contrib.gis.gdal.prototypes import ds as capi, geom as geom_api
from django.utils.encoding import force_bytes, force_text
from django.utils import six
from django.utils.six.moves import xrange
# For more information, see the OGR C API source code:
# http://www.gdal.org/ogr/ogr__api_8h.html
#
# The OGR_F_* routines are relevant here.
class Feature(GDALBase):
"""
This class that wraps an OGR Feature, needs to be instantiated
from a Layer object.
"""
#### Python 'magic' routines ####
def __init__(self, feat, layer):
"""
Initializes Feature from a pointer and its Layer object.
"""
if not feat:
raise OGRException('Cannot create OGR Feature, invalid pointer given.')
self.ptr = feat
self._layer = layer
def __del__(self):
"Releases a reference to this object."
if self._ptr: capi.destroy_feature(self._ptr)
def __getitem__(self, index):
"""
Gets the Field object at the specified index, which may be either
an integer or the Field's string label. Note that the Field object
is not the field's _value_ -- use the `get` method instead to
retrieve the value (e.g. an integer) instead of a Field instance.
"""
if isinstance(index, six.string_types):
i = self.index(index)
else:
if index < 0 or index > self.num_fields:
raise OGRIndexError('index out of range')
i = index
return Field(self, i)
def __iter__(self):
"Iterates over each field in the Feature."
for i in xrange(self.num_fields):
yield self[i]
def __len__(self):
"Returns the count of fields in this feature."
return self.num_fields
def __str__(self):
"The string name of the feature."
return 'Feature FID %d in Layer<%s>' % (self.fid, self.layer_name)
def __eq__(self, other):
"Does equivalence testing on the features."
return bool(capi.feature_equal(self.ptr, other._ptr))
#### Feature Properties ####
@property
def encoding(self):
return self._layer._ds.encoding
@property
def fid(self):
"Returns the feature identifier."
return capi.get_fid(self.ptr)
@property
def layer_name(self):
"Returns the name of the layer for the feature."
name = capi.get_feat_name(self._layer._ldefn)
return force_text(name, self.encoding, strings_only=True)
@property
def num_fields(self):
"Returns the number of fields in the Feature."
return capi.get_feat_field_count(self.ptr)
@property
def fields(self):
"Returns a list of fields in the Feature."
return [capi.get_field_name(capi.get_field_defn(self._layer._ldefn, i))
for i in xrange(self.num_fields)]
@property
def geom(self):
"Returns the OGR Geometry for this Feature."
# Retrieving the geometry pointer for the feature.
geom_ptr = capi.get_feat_geom_ref(self.ptr)
return OGRGeometry(geom_api.clone_geom(geom_ptr))
@property
def geom_type(self):
"Returns the OGR Geometry Type for this Feture."
return OGRGeomType(capi.get_fd_geom_type(self._layer._ldefn))
#### Feature Methods ####
def get(self, field):
"""
Returns the value of the field, instead of an instance of the Field
object. May take a string of the field name or a Field object as
parameters.
"""
field_name = getattr(field, 'name', field)
return self[field_name].value
def index(self, field_name):
"Returns the index of the given field name."
i = capi.get_field_index(self.ptr, force_bytes(field_name))
if i < 0:
raise OGRIndexError('invalid OFT field name given: "%s"' % field_name)
return i
| mit | 423,253,242,156,994,800 | -7,711,737,298,051,118,000 | 33.314516 | 83 | 0.629142 | false |
Ali-aqrabawi/ezclinic | lib/django/contrib/staticfiles/management/commands/findstatic.py | 106 | 1745 | from __future__ import unicode_literals
import os
from django.contrib.staticfiles import finders
from django.core.management.base import LabelCommand
from django.utils.encoding import force_text
class Command(LabelCommand):
help = "Finds the absolute paths for the given static file(s)."
label = 'staticfile'
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--first', action='store_false', dest='all',
default=True,
help="Only return the first match for each static file.",
)
def handle_label(self, path, **options):
verbosity = options['verbosity']
result = finders.find(path, all=options['all'])
path = force_text(path)
if verbosity >= 2:
searched_locations = (
"\nLooking in the following locations:\n %s" %
"\n ".join(force_text(location) for location in finders.searched_locations)
)
else:
searched_locations = ''
if result:
if not isinstance(result, (list, tuple)):
result = [result]
result = (force_text(os.path.realpath(path)) for path in result)
if verbosity >= 1:
file_list = '\n '.join(result)
return ("Found '%s' here:\n %s%s" %
(path, file_list, searched_locations))
else:
return '\n'.join(result)
else:
message = ["No matching file found for '%s'." % path]
if verbosity >= 2:
message.append(searched_locations)
if verbosity >= 1:
self.stderr.write('\n'.join(message))
| mit | -6,979,971,019,580,933,000 | 6,955,746,807,108,318,000 | 35.354167 | 92 | 0.557593 | false |
mostafa8026/MyObjectListView | docs/conf.py | 2 | 5224 | # -*- coding: utf-8 -*-
#
# ObjectListView documentation build configuration file, created by
# sphinx-quickstart on Sun May 18 14:41:14 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
import sys, os
# sys.path.append(os.path.abspath(".."))
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.append(os.path.abspath("sphinxext"))
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
#extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = 'ObjectListView'
copyright = '2006-2015, Phillip Piper'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
# The short X.Y version.
version = '2.9'
# The full version, including alpha/beta/rc tags.
release = '2.9.0'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directories, that shouldn't be searched
# for source files.
#exclude_dirs = []
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
highlight_language = 'c#'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'master.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# The name of an image file (within the static path) to place at the top of
# the sidebar.
#html_logo = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['.static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_use_modindex = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
html_copy_source = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'ObjectListViewDoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = "a4"
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '11pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
('index', 'ObjectListView.tex', 'ObjectListView Documentation', 'Phillip Piper', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
| gpl-3.0 | 3,203,555,484,922,014,700 | -6,839,765,581,274,563,000 | 30.660606 | 93 | 0.719564 | false |
havard024/prego | venv/lib/python2.7/site-packages/django/middleware/csrf.py | 20 | 8862 | """
Cross Site Request Forgery Middleware.
This module provides a middleware that implements protection
against request forgeries from other sites.
"""
from __future__ import unicode_literals
import hashlib
import logging
import re
import random
from django.conf import settings
from django.core.urlresolvers import get_callable
from django.utils.cache import patch_vary_headers
from django.utils.encoding import force_text
from django.utils.http import same_origin
from django.utils.crypto import constant_time_compare, get_random_string
logger = logging.getLogger('django.request')
REASON_NO_REFERER = "Referer checking failed - no Referer."
REASON_BAD_REFERER = "Referer checking failed - %s does not match %s."
REASON_NO_CSRF_COOKIE = "CSRF cookie not set."
REASON_BAD_TOKEN = "CSRF token missing or incorrect."
CSRF_KEY_LENGTH = 32
def _get_failure_view():
"""
Returns the view to be used for CSRF rejections
"""
return get_callable(settings.CSRF_FAILURE_VIEW)
def _get_new_csrf_key():
return get_random_string(CSRF_KEY_LENGTH)
def get_token(request):
"""
Returns the CSRF token required for a POST form. The token is an
alphanumeric value.
A side effect of calling this function is to make the csrf_protect
decorator and the CsrfViewMiddleware add a CSRF cookie and a 'Vary: Cookie'
header to the outgoing response. For this reason, you may need to use this
function lazily, as is done by the csrf context processor.
"""
request.META["CSRF_COOKIE_USED"] = True
return request.META.get("CSRF_COOKIE", None)
def _sanitize_token(token):
# Allow only alphanum
if len(token) > CSRF_KEY_LENGTH:
return _get_new_csrf_key()
token = re.sub('[^a-zA-Z0-9]+', '', force_text(token))
if token == "":
# In case the cookie has been truncated to nothing at some point.
return _get_new_csrf_key()
return token
class CsrfViewMiddleware(object):
"""
Middleware that requires a present and correct csrfmiddlewaretoken
for POST requests that have a CSRF cookie, and sets an outgoing
CSRF cookie.
This middleware should be used in conjunction with the csrf_token template
tag.
"""
# The _accept and _reject methods currently only exist for the sake of the
# requires_csrf_token decorator.
def _accept(self, request):
# Avoid checking the request twice by adding a custom attribute to
# request. This will be relevant when both decorator and middleware
# are used.
request.csrf_processing_done = True
return None
def _reject(self, request, reason):
return _get_failure_view()(request, reason=reason)
def process_view(self, request, callback, callback_args, callback_kwargs):
if getattr(request, 'csrf_processing_done', False):
return None
try:
csrf_token = _sanitize_token(
request.COOKIES[settings.CSRF_COOKIE_NAME])
# Use same token next time
request.META['CSRF_COOKIE'] = csrf_token
except KeyError:
csrf_token = None
# Generate token and store it in the request, so it's
# available to the view.
request.META["CSRF_COOKIE"] = _get_new_csrf_key()
# Wait until request.META["CSRF_COOKIE"] has been manipulated before
# bailing out, so that get_token still works
if getattr(callback, 'csrf_exempt', False):
return None
# Assume that anything not defined as 'safe' by RFC2616 needs protection
if request.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'):
if getattr(request, '_dont_enforce_csrf_checks', False):
# Mechanism to turn off CSRF checks for test suite.
# It comes after the creation of CSRF cookies, so that
# everything else continues to work exactly the same
# (e.g. cookies are sent, etc.), but before any
# branches that call reject().
return self._accept(request)
if request.is_secure():
# Suppose user visits http://example.com/
# An active network attacker (man-in-the-middle, MITM) sends a
# POST form that targets https://example.com/detonate-bomb/ and
# submits it via JavaScript.
#
# The attacker will need to provide a CSRF cookie and token, but
# that's no problem for a MITM and the session-independent
# nonce we're using. So the MITM can circumvent the CSRF
# protection. This is true for any HTTP connection, but anyone
# using HTTPS expects better! For this reason, for
# https://example.com/ we need additional protection that treats
# http://example.com/ as completely untrusted. Under HTTPS,
# Barth et al. found that the Referer header is missing for
# same-domain requests in only about 0.2% of cases or less, so
# we can use strict Referer checking.
referer = request.META.get('HTTP_REFERER')
if referer is None:
logger.warning('Forbidden (%s): %s',
REASON_NO_REFERER, request.path,
extra={
'status_code': 403,
'request': request,
}
)
return self._reject(request, REASON_NO_REFERER)
# Note that request.get_host() includes the port.
good_referer = 'https://%s/' % request.get_host()
if not same_origin(referer, good_referer):
reason = REASON_BAD_REFERER % (referer, good_referer)
logger.warning('Forbidden (%s): %s', reason, request.path,
extra={
'status_code': 403,
'request': request,
}
)
return self._reject(request, reason)
if csrf_token is None:
# No CSRF cookie. For POST requests, we insist on a CSRF cookie,
# and in this way we can avoid all CSRF attacks, including login
# CSRF.
logger.warning('Forbidden (%s): %s',
REASON_NO_CSRF_COOKIE, request.path,
extra={
'status_code': 403,
'request': request,
}
)
return self._reject(request, REASON_NO_CSRF_COOKIE)
# Check non-cookie token for match.
request_csrf_token = ""
if request.method == "POST":
request_csrf_token = request.POST.get('csrfmiddlewaretoken', '')
if request_csrf_token == "":
# Fall back to X-CSRFToken, to make things easier for AJAX,
# and possible for PUT/DELETE.
request_csrf_token = request.META.get('HTTP_X_CSRFTOKEN', '')
if not constant_time_compare(request_csrf_token, csrf_token):
logger.warning('Forbidden (%s): %s',
REASON_BAD_TOKEN, request.path,
extra={
'status_code': 403,
'request': request,
}
)
return self._reject(request, REASON_BAD_TOKEN)
return self._accept(request)
def process_response(self, request, response):
if getattr(response, 'csrf_processing_done', False):
return response
# If CSRF_COOKIE is unset, then CsrfViewMiddleware.process_view was
# never called, probaby because a request middleware returned a response
# (for example, contrib.auth redirecting to a login page).
if request.META.get("CSRF_COOKIE") is None:
return response
if not request.META.get("CSRF_COOKIE_USED", False):
return response
# Set the CSRF cookie even if it's already set, so we renew
# the expiry timer.
response.set_cookie(settings.CSRF_COOKIE_NAME,
request.META["CSRF_COOKIE"],
max_age = 60 * 60 * 24 * 7 * 52,
domain=settings.CSRF_COOKIE_DOMAIN,
path=settings.CSRF_COOKIE_PATH,
secure=settings.CSRF_COOKIE_SECURE
)
# Content varies with the CSRF cookie, so set the Vary header.
patch_vary_headers(response, ('Cookie',))
response.csrf_processing_done = True
return response
| mit | -7,528,660,802,233,049,000 | 8,898,426,878,070,267,000 | 39.651376 | 80 | 0.577071 | false |
gogogo/gogogo-hk | gogogo/models/property.py | 1 | 3233 | from google.appengine.ext import db
from django import forms
from django.utils.translation import ugettext_lazy as _
class TransitTypeProperty(db.IntegerProperty):
"""
Transit Type Property - Storage of transit type
"""
def __init__ (self,*args,**kwargs):
kwargs["choices"] = range(0,8)
db.IntegerProperty.__init__(self,*args,**kwargs)
def validate(self, value):
if isinstance(value,basestring):
value = int(value)
return super(TransitTypeProperty, self).validate(value)
def get_form_field(self, **kwargs):
attrs = {
'form_class': forms.ChoiceField,
'choices' : TransitTypeProperty.get_choices()
}
attrs.update(kwargs)
return super(TransitTypeProperty, self).get_form_field(**attrs)
def get_choices():
ret = [ (i,TransitTypeProperty.get_type_name(i)) for i in range(0,8)]
return ret
get_choices = staticmethod(get_choices)
def get_basic_type_name_list():
"""
Return a list of basic type name
"""
ret = [TransitTypeProperty.get_type_name(i) for i in range(0,8)]
return ret
get_basic_type_name_list = staticmethod(get_basic_type_name_list)
def get_type_name(type):
if type == 0:
return _("Tram, Streetcar, Light rail")
elif type == 1:
return _("Subway, Metro") #Any underground rail system within a metropolitan area
elif type == 2:
return _("Rail") #Used for intercity or long-distance travel.
elif type == 3:
return _("Bus")
elif type == 4:
return _("Ferry")
elif type == 5:
return _("Cable car")
elif type == 6:
return _("Gondola, Suspended cable car")
elif type == 7:
return _("Funicular")
else:
return ""
get_type_name = staticmethod(get_type_name)
class PaymentMethodProperty(db.IntegerProperty):
"""
Payment Method
"""
def __init__ (self,*args,**kwargs):
kwargs["choices"] = range(0,2)
if "default" not in kwargs:
kwargs["default"] = 0
db.IntegerProperty.__init__(self,*args,**kwargs)
def validate(self, value):
if isinstance(value,basestring):
value = int(value)
return super(PaymentMethodProperty, self).validate(value)
def get_form_field(self, **kwargs):
attrs = {
'form_class': forms.ChoiceField,
'choices' : PaymentMethodProperty.get_choices()
}
attrs.update(kwargs)
return super(PaymentMethodProperty, self).get_form_field(**attrs)
def get_choices():
ret = [ (i,PaymentMethodProperty.get_type_name(i)) for i in range(0,2)]
return ret
get_choices = staticmethod(get_choices)
def get_type_name(type):
if type == 0:
return _("Fare is paid on board")
elif type == 1:
return _("Fare must be paid before boarding")
get_type_name = staticmethod(get_type_name)
| agpl-3.0 | -8,843,272,087,870,216,000 | 7,848,323,638,988,359,000 | 30.086538 | 93 | 0.554903 | false |
ryfeus/lambda-packs | Tensorflow_LightGBM_Scipy_nightly/source/scipy/__config__.py | 2 | 1289 | # This file is generated by /tmp/pip-6gjs2vkw-build/-c
# It contains system_info results at the time of building this package.
__all__ = ["get_info","show"]
openblas_lapack_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]}
lapack_opt_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]}
blas_mkl_info={}
openblas_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]}
blas_opt_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]}
def get_info(name):
g = globals()
return g.get(name, g.get(name + "_info", {}))
def show():
for name,info_dict in globals().items():
if name[0] == "_" or type(info_dict) is not type({}): continue
print(name + ":")
if not info_dict:
print(" NOT AVAILABLE")
for k,v in info_dict.items():
v = str(v)
if k == "sources" and len(v) > 200:
v = v[:60] + " ...\n... " + v[-60:]
print(" %s = %s" % (k,v))
| mit | -4,158,901,246,120,468,000 | -4,306,812,392,972,486,000 | 48.615385 | 154 | 0.561676 | false |
franky88/emperioanimesta | env/Lib/site-packages/django/db/migrations/migration.py | 123 | 8324 | from __future__ import unicode_literals
from django.db.transaction import atomic
from django.utils.encoding import python_2_unicode_compatible
from .exceptions import IrreversibleError
@python_2_unicode_compatible
class Migration(object):
"""
The base class for all migrations.
Migration files will import this from django.db.migrations.Migration
and subclass it as a class called Migration. It will have one or more
of the following attributes:
- operations: A list of Operation instances, probably from django.db.migrations.operations
- dependencies: A list of tuples of (app_path, migration_name)
- run_before: A list of tuples of (app_path, migration_name)
- replaces: A list of migration_names
Note that all migrations come out of migrations and into the Loader or
Graph as instances, having been initialized with their app label and name.
"""
# Operations to apply during this migration, in order.
operations = []
# Other migrations that should be run before this migration.
# Should be a list of (app, migration_name).
dependencies = []
# Other migrations that should be run after this one (i.e. have
# this migration added to their dependencies). Useful to make third-party
# apps' migrations run after your AUTH_USER replacement, for example.
run_before = []
# Migration names in this app that this migration replaces. If this is
# non-empty, this migration will only be applied if all these migrations
# are not applied.
replaces = []
# Is this an initial migration? Initial migrations are skipped on
# --fake-initial if the table or fields already exist. If None, check if
# the migration has any dependencies to determine if there are dependencies
# to tell if db introspection needs to be done. If True, always perform
# introspection. If False, never perform introspection.
initial = None
# Whether to wrap the whole migration in a transaction. Only has an effect
# on database backends which support transactional DDL.
atomic = True
def __init__(self, name, app_label):
self.name = name
self.app_label = app_label
# Copy dependencies & other attrs as we might mutate them at runtime
self.operations = list(self.__class__.operations)
self.dependencies = list(self.__class__.dependencies)
self.run_before = list(self.__class__.run_before)
self.replaces = list(self.__class__.replaces)
def __eq__(self, other):
if not isinstance(other, Migration):
return False
return (self.name == other.name) and (self.app_label == other.app_label)
def __ne__(self, other):
return not (self == other)
def __repr__(self):
return "<Migration %s.%s>" % (self.app_label, self.name)
def __str__(self):
return "%s.%s" % (self.app_label, self.name)
def __hash__(self):
return hash("%s.%s" % (self.app_label, self.name))
def mutate_state(self, project_state, preserve=True):
"""
Takes a ProjectState and returns a new one with the migration's
operations applied to it. Preserves the original object state by
default and will return a mutated state from a copy.
"""
new_state = project_state
if preserve:
new_state = project_state.clone()
for operation in self.operations:
operation.state_forwards(self.app_label, new_state)
return new_state
def apply(self, project_state, schema_editor, collect_sql=False):
"""
Takes a project_state representing all migrations prior to this one
and a schema_editor for a live database and applies the migration
in a forwards order.
Returns the resulting project state for efficient re-use by following
Migrations.
"""
for operation in self.operations:
# If this operation cannot be represented as SQL, place a comment
# there instead
if collect_sql:
schema_editor.collected_sql.append("--")
if not operation.reduces_to_sql:
schema_editor.collected_sql.append(
"-- MIGRATION NOW PERFORMS OPERATION THAT CANNOT BE WRITTEN AS SQL:"
)
schema_editor.collected_sql.append("-- %s" % operation.describe())
schema_editor.collected_sql.append("--")
if not operation.reduces_to_sql:
continue
# Save the state before the operation has run
old_state = project_state.clone()
operation.state_forwards(self.app_label, project_state)
# Run the operation
atomic_operation = operation.atomic or (self.atomic and operation.atomic is not False)
if not schema_editor.atomic_migration and atomic_operation:
# Force a transaction on a non-transactional-DDL backend or an
# atomic operation inside a non-atomic migration.
with atomic(schema_editor.connection.alias):
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
else:
# Normal behaviour
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
return project_state
def unapply(self, project_state, schema_editor, collect_sql=False):
"""
Takes a project_state representing all migrations prior to this one
and a schema_editor for a live database and applies the migration
in a reverse order.
The backwards migration process consists of two phases:
1. The intermediate states from right before the first until right
after the last operation inside this migration are preserved.
2. The operations are applied in reverse order using the states
recorded in step 1.
"""
# Construct all the intermediate states we need for a reverse migration
to_run = []
new_state = project_state
# Phase 1
for operation in self.operations:
# If it's irreversible, error out
if not operation.reversible:
raise IrreversibleError("Operation %s in %s is not reversible" % (operation, self))
# Preserve new state from previous run to not tamper the same state
# over all operations
new_state = new_state.clone()
old_state = new_state.clone()
operation.state_forwards(self.app_label, new_state)
to_run.insert(0, (operation, old_state, new_state))
# Phase 2
for operation, to_state, from_state in to_run:
if collect_sql:
schema_editor.collected_sql.append("--")
if not operation.reduces_to_sql:
schema_editor.collected_sql.append(
"-- MIGRATION NOW PERFORMS OPERATION THAT CANNOT BE WRITTEN AS SQL:"
)
schema_editor.collected_sql.append("-- %s" % operation.describe())
schema_editor.collected_sql.append("--")
if not operation.reduces_to_sql:
continue
if not schema_editor.connection.features.can_rollback_ddl and operation.atomic:
# We're forcing a transaction on a non-transactional-DDL backend
with atomic(schema_editor.connection.alias):
operation.database_backwards(self.app_label, schema_editor, from_state, to_state)
else:
# Normal behaviour
operation.database_backwards(self.app_label, schema_editor, from_state, to_state)
return project_state
class SwappableTuple(tuple):
"""
Subclass of tuple so Django can tell this was originally a swappable
dependency when it reads the migration file.
"""
def __new__(cls, value, setting):
self = tuple.__new__(cls, value)
self.setting = setting
return self
def swappable_dependency(value):
"""
Turns a setting value into a dependency.
"""
return SwappableTuple((value.split(".", 1)[0], "__first__"), value)
| gpl-3.0 | -6,496,069,212,385,474,000 | -5,621,498,572,907,360,000 | 41.040404 | 104 | 0.632268 | false |
bmazin/ARCONS-pipeline | examples/Pal2014-J0337/hTestLimit.py | 1 | 8356 | #Filename: hTestLimit.py
#Author: Matt Strader
#
#This script opens a list of observed photon phases,
import numpy as np
import tables
import numexpr
import matplotlib.pyplot as plt
import multiprocessing
import functools
import time
from kuiper.kuiper import kuiper,kuiper_FPP
from kuiper.htest import h_test,h_fpp,h_test2
from pulsarUtils import nSigma,plotPulseProfile
from histMetrics import kuiperFpp,hTestFpp
from inverseTransformSampling import inverseTransformSampler
def hTestTrial(iTrial,nPhotons,photonPulseFraction,pulseModel,pulseModelQueryPoints):
np.random.seed(int((time.time()+iTrial)*1e6))
modelSampler = inverseTransformSampler(pdf=pulseModel,queryPoints=pulseModelQueryPoints)
nPulsePhotons = int(np.floor(photonPulseFraction*nPhotons))
nBackgroundPhotons = int(np.ceil((1.-photonPulseFraction) * nPhotons))
simPulsePhotons = modelSampler(nPulsePhotons)
#background photons come from a uniform distribution
simBackgroundPhotons = np.random.random(nBackgroundPhotons)
simPhases = np.append(simPulsePhotons,simBackgroundPhotons)
simHDict = h_test2(simPhases)
simH,simM,simPval,simFourierCoeffs = simHDict['H'],simHDict['M'],simHDict['fpp'],simHDict['cs']
print '{} - H,M,fpp,sig:'.format(iTrial),simH,simM,simPval
return {'H':simH,'M':simM,'fpp':simPval}
if __name__=='__main__':
path = '/Scratch/dataProcessing/J0337/masterPhotons3.h5'
wvlStart = 4000.
wvlEnd = 5500.
bLoadFromPl = True
nPhaseBins = 20
hTestPath = '/Scratch/dataProcessing/J0337/hTestResults_withProfiles_{}-{}.npz'.format(wvlStart,wvlEnd)
phaseBinEdges = np.linspace(0.,1.,nPhaseBins+1)
if bLoadFromPl:
photFile = tables.openFile(path,'r')
photTable = photFile.root.photons.photTable
phases = photTable.readWhere('(wvlStart < wavelength) & (wavelength < wvlEnd)')['phase']
photFile.close()
print 'cut wavelengths to range ({},{})'.format(wvlStart,wvlEnd)
nPhotons = len(phases)
print nPhotons,'real photons read'
observedProfile,_ = np.histogram(phases,bins=phaseBinEdges)
observedProfile = 1.0*observedProfile
observedProfileErrors = np.sqrt(observedProfile)
#Do H-test
hDict = h_test2(phases)
H,M,pval,fourierCoeffs = hDict['H'],hDict['M'],hDict['fpp'],hDict['cs']
print 'h-test on real data'
print 'H,M,fpp:',H,M,pval
print nSigma(1-pval),'sigmas'
#h_test2 calculates all fourierCoeffs out to 20, but for the fourier model, we only want the ones out to order M, which optimizes the Zm^2 metric
truncatedFourierCoeffs = fourierCoeffs[0:M]
print 'fourier coeffs:',truncatedFourierCoeffs
#for the model, we want the negative modes as well as positve, so add them
modelFourierCoeffs = np.concatenate([truncatedFourierCoeffs[::-1],[1.],np.conj(truncatedFourierCoeffs)])
#make array of mode numbers
modes = np.arange(-len(truncatedFourierCoeffs),len(truncatedFourierCoeffs)+1)
#save so next time we can set bLoadFromPl=False
np.savez(hTestPath,H=H,M=M,pval=pval,fourierCoeffs=fourierCoeffs,nPhotons=nPhotons,wvlRange=(wvlStart,wvlEnd),modelFourierCoeffs=modelFourierCoeffs,modes=modes,observedProfile=observedProfile,observedProfileErrors=observedProfileErrors,phaseBinEdges=phaseBinEdges)
else:
#Load values from previous run, when we had bLoadFromPl=True
hTestDict = np.load(hTestPath)
H,M,pval,fourierCoeffs,nPhotons,modelFourierCoeffs,modes = hTestDict['H'],hTestDict['M'],hTestDict['pval'],hTestDict['fourierCoeffs'],hTestDict['nPhotons'],hTestDict['modelFourierCoeffs'],hTestDict['modes']
observedProfile,observedProfileErrors,phaseBinEdges = hTestDict['observedProfile'],hTestDict['observedProfileErrors'],hTestDict['phaseBinEdges']
print 'h-test on real data'
print 'H,M,fpp:',H,M,pval
print nSigma(1-pval),'sigmas'
#Plot the observed profile
fig,ax = plt.subplots(1,1)
plotPulseProfile(phaseBinEdges,observedProfile,profileErrors=observedProfileErrors,color='k',plotDoublePulse=False,label='observed',ax=ax)
ax.set_ylabel('counts')
ax.set_xlabel('phase')
ax.set_title('Observed Folded Light Curve {}-{} nm'.format(wvlStart/10.,wvlEnd/10.))
#make as set of x points for the pulse model we'll make
#Do NOT include x=0, or the inverted function will have a jump that causes an excess of samples
#at phase=0
nSmoothPlotPoints=1000
pulseModelQueryPoints = np.linspace(1./nSmoothPlotPoints,1,nSmoothPlotPoints)
def modelProfile(thetas):
return np.sum( modelFourierCoeffs * np.exp(2.j*np.pi*modes*thetas[:,np.newaxis]),axis=1)
lightCurveModel = np.abs(modelProfile(pulseModelQueryPoints))
#for this test we only want the model to be the pulsed component. We will add a DC offset later
pulseModel = lightCurveModel - np.min(lightCurveModel)
#initialPhotonPulseFraction = 1.*np.sum(pulseModel) / np.sum(lightCurveModel)
photonPulseFraction=15400./nPhotons #skip to previously determined answer
print 'photon fraction',photonPulseFraction
#get samples with distribution of the modelProfile
#modelSampler = inverseTransformSampler(pdf=lightCurveModel,queryPoints=pulseModelQueryPoints)
modelSampler = inverseTransformSampler(pdf=pulseModel,queryPoints=pulseModelQueryPoints)
nTrials = 1
#for each trial run the h test on a set of photon phases with our model profile, and with the pulse fraction specified
#we want to make a distribution of H values for this pulse fraction, model, and number of photons
#make a function that only takes the trial number (as an identifier)
mappableHTestTrial = functools.partial(hTestTrial,pulseModel=pulseModel,
pulseModelQueryPoints=pulseModelQueryPoints,nPhotons=nPhotons,
photonPulseFraction=photonPulseFraction)
pool = multiprocessing.Pool(processes=multiprocessing.cpu_count()-3)#leave a few processors for other people
outDicts = pool.map(mappableHTestTrial,np.arange(nTrials))
simHs = np.array([out['H'] for out in outDicts])
simPvals = np.array([out['fpp'] for out in outDicts])
#save the resulting list of H vals
np.savez('sim3-h-{}.npz'.format(nTrials),simHs=simHs,simPvals=simPvals,pval=pval,H=H,photonPulseFraction=photonPulseFraction,nPhotons=nPhotons)
#make a model profile once more for a plot
modelSampler = inverseTransformSampler(pdf=pulseModel,queryPoints=pulseModelQueryPoints)
nPulsePhotons = int(np.floor(photonPulseFraction*nPhotons))
nBackgroundPhotons = int(np.ceil((1.-photonPulseFraction) * nPhotons))
simPulsePhotons = modelSampler(nPulsePhotons)
#background photons come from a uniform distribution
simBackgroundPhotons = np.random.random(nBackgroundPhotons)
#put them together for the full profile
simPhases = np.append(simPulsePhotons,simBackgroundPhotons)
#make a binned phase profile to plot
simProfile,_ = np.histogram(simPhases,bins=phaseBinEdges)
simProfileErrors = np.sqrt(simProfile)#assume Poisson errors
meanLevel = np.mean(simProfile)
fig,ax = plt.subplots(1,1)
ax.plot(pulseModelQueryPoints,meanLevel*lightCurveModel,color='r')
plotPulseProfile(phaseBinEdges,simProfile,profileErrors=simProfileErrors,color='b',plotDoublePulse=False,label='sim',ax=ax)
ax.set_title('Simulated profile')
#
#plt.show()
print '{} trials'.format(len(simHs))
print 'observed fpp:',pval
frac = 1.*np.sum(simPvals<pval)/len(simPvals)
print 'fraction of trials with H below observed fpp:',frac
#hHist,hBinEdges = np.histogram(simHs,bins=100,density=True)
fppHist,fppBinEdges = np.histogram(simPvals,bins=100,density=True)
if nTrials > 1:
fig,ax = plt.subplots(1,1)
ax.plot(fppBinEdges[0:-1],fppHist,drawstyle='steps-post',color='k')
ax.axvline(pval,color='r')
ax.set_xlabel('fpp')
ax.set_ylabel('frequency')
ax.set_title('Distribution of H for model profile')
magG = 17.93
sineMagDiff = -2.5*np.log10(photonPulseFraction)
print 'SDSS magnitude g: {:.2f}'.format(magG)
print 'magnitude difference: {:.2f}'.format(sineMagDiff)
print 'limiting g mag: {:.2f}'.format(magG+sineMagDiff)
plt.show()
| gpl-2.0 | -1,899,022,923,335,936,300 | 3,178,380,307,748,096,500 | 44.413043 | 272 | 0.728698 | false |
alanquillin/ryu | ryu/contrib/tinyrpc/transports/__init__.py | 43 | 1789 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
class ServerTransport(object):
"""Base class for all server transports."""
def receive_message(self):
"""Receive a message from the transport.
Blocks until another message has been received. May return a context
opaque to clients that should be passed on
:py:func:`~tinyrpc.transport.Transport.send_reply` to identify the
client later on.
:return: A tuple consisting of ``(context, message)``.
"""
raise NotImplementedError()
def send_reply(self, context, reply):
"""Sends a reply to a client.
The client is usually identified by passing ``context`` as returned
from the original
:py:func:`~tinyrpc.transport.Transport.receive_message` call.
Messages must be strings, it is up to the sender to convert the
beforehand. A non-string value raises a :py:exc:`TypeError`.
:param context: A context returned by
:py:func:`~tinyrpc.transport.Transport.receive_message`.
:param reply: A string to send back as the reply.
"""
raise NotImplementedError
class ClientTransport(object):
"""Base class for all client transports."""
def send_message(self, message, expect_reply=True):
"""Send a message to the server and possibly receive a reply.
Sends a message to the connected server.
Messages must be strings, it is up to the sender to convert the
beforehand. A non-string value raises a :py:exc:`TypeError`.
This function will block until one reply has been received.
:param message: A string to send.
:return: A string containing the server reply.
"""
raise NotImplementedError
| apache-2.0 | 7,832,669,055,779,481,000 | 3,043,946,072,369,843,000 | 33.403846 | 80 | 0.648407 | false |
ImmobilienScout24/moto | tests/test_kms/test_kms.py | 9 | 11046 | from __future__ import unicode_literals
import re
import boto.kms
from boto.exception import JSONResponseError
from boto.kms.exceptions import AlreadyExistsException, NotFoundException
import sure # noqa
from moto import mock_kms
from nose.tools import assert_raises
@mock_kms
def test_create_key():
conn = boto.kms.connect_to_region("us-west-2")
key = conn.create_key(policy="my policy", description="my key", key_usage='ENCRYPT_DECRYPT')
key['KeyMetadata']['Description'].should.equal("my key")
key['KeyMetadata']['KeyUsage'].should.equal("ENCRYPT_DECRYPT")
key['KeyMetadata']['Enabled'].should.equal(True)
@mock_kms
def test_describe_key():
conn = boto.kms.connect_to_region("us-west-2")
key = conn.create_key(policy="my policy", description="my key", key_usage='ENCRYPT_DECRYPT')
key_id = key['KeyMetadata']['KeyId']
key = conn.describe_key(key_id)
key['KeyMetadata']['Description'].should.equal("my key")
key['KeyMetadata']['KeyUsage'].should.equal("ENCRYPT_DECRYPT")
@mock_kms
def test_describe_missing_key():
conn = boto.kms.connect_to_region("us-west-2")
conn.describe_key.when.called_with("not-a-key").should.throw(JSONResponseError)
@mock_kms
def test_list_keys():
conn = boto.kms.connect_to_region("us-west-2")
conn.create_key(policy="my policy", description="my key1", key_usage='ENCRYPT_DECRYPT')
conn.create_key(policy="my policy", description="my key2", key_usage='ENCRYPT_DECRYPT')
keys = conn.list_keys()
keys['Keys'].should.have.length_of(2)
@mock_kms
def test__create_alias__returns_none_if_correct():
kms = boto.connect_kms()
create_resp = kms.create_key()
key_id = create_resp['KeyMetadata']['KeyId']
resp = kms.create_alias('alias/my-alias', key_id)
resp.should.be.none
@mock_kms
def test__create_alias__raises_if_reserved_alias():
kms = boto.connect_kms()
create_resp = kms.create_key()
key_id = create_resp['KeyMetadata']['KeyId']
reserved_aliases = [
'alias/aws/ebs',
'alias/aws/s3',
'alias/aws/redshift',
'alias/aws/rds',
]
for alias_name in reserved_aliases:
with assert_raises(JSONResponseError) as err:
kms.create_alias(alias_name, key_id)
ex = err.exception
ex.error_message.should.be.none
ex.error_code.should.equal('NotAuthorizedException')
ex.body.should.equal({'__type': 'NotAuthorizedException'})
ex.reason.should.equal('Bad Request')
ex.status.should.equal(400)
@mock_kms
def test__create_alias__can_create_multiple_aliases_for_same_key_id():
kms = boto.connect_kms()
create_resp = kms.create_key()
key_id = create_resp['KeyMetadata']['KeyId']
kms.create_alias('alias/my-alias3', key_id).should.be.none
kms.create_alias('alias/my-alias4', key_id).should.be.none
kms.create_alias('alias/my-alias5', key_id).should.be.none
@mock_kms
def test__create_alias__raises_if_wrong_prefix():
kms = boto.connect_kms()
create_resp = kms.create_key()
key_id = create_resp['KeyMetadata']['KeyId']
with assert_raises(JSONResponseError) as err:
kms.create_alias('wrongprefix/my-alias', key_id)
ex = err.exception
ex.error_message.should.equal('Invalid identifier')
ex.error_code.should.equal('ValidationException')
ex.body.should.equal({'message': 'Invalid identifier', '__type': 'ValidationException'})
ex.reason.should.equal('Bad Request')
ex.status.should.equal(400)
@mock_kms
def test__create_alias__raises_if_duplicate():
region = 'us-west-2'
kms = boto.kms.connect_to_region(region)
create_resp = kms.create_key()
key_id = create_resp['KeyMetadata']['KeyId']
alias = 'alias/my-alias'
kms.create_alias(alias, key_id)
with assert_raises(AlreadyExistsException) as err:
kms.create_alias(alias, key_id)
ex = err.exception
ex.error_message.should.match(r'An alias with the name arn:aws:kms:{region}:\d{{12}}:{alias} already exists'
.format(**locals()))
ex.error_code.should.be.none
ex.box_usage.should.be.none
ex.request_id.should.be.none
ex.body['message'].should.match(r'An alias with the name arn:aws:kms:{region}:\d{{12}}:{alias} already exists'
.format(**locals()))
ex.body['__type'].should.equal('AlreadyExistsException')
ex.reason.should.equal('Bad Request')
ex.status.should.equal(400)
@mock_kms
def test__create_alias__raises_if_alias_has_restricted_characters():
kms = boto.connect_kms()
create_resp = kms.create_key()
key_id = create_resp['KeyMetadata']['KeyId']
alias_names_with_restricted_characters = [
'alias/my-alias!',
'alias/my-alias$',
'alias/my-alias@',
]
for alias_name in alias_names_with_restricted_characters:
with assert_raises(JSONResponseError) as err:
kms.create_alias(alias_name, key_id)
ex = err.exception
ex.body['__type'].should.equal('ValidationException')
ex.body['message'].should.equal("1 validation error detected: Value '{alias_name}' at 'aliasName' failed to satisfy constraint: Member must satisfy regular expression pattern: ^[a-zA-Z0-9:/_-]+$".format(**locals()))
ex.error_code.should.equal('ValidationException')
ex.message.should.equal("1 validation error detected: Value '{alias_name}' at 'aliasName' failed to satisfy constraint: Member must satisfy regular expression pattern: ^[a-zA-Z0-9:/_-]+$".format(**locals()))
ex.reason.should.equal('Bad Request')
ex.status.should.equal(400)
@mock_kms
def test__create_alias__raises_if_alias_has_colon_character():
# For some reason, colons are not accepted for an alias, even though they are accepted by regex ^[a-zA-Z0-9:/_-]+$
kms = boto.connect_kms()
create_resp = kms.create_key()
key_id = create_resp['KeyMetadata']['KeyId']
alias_names_with_restricted_characters = [
'alias/my:alias',
]
for alias_name in alias_names_with_restricted_characters:
with assert_raises(JSONResponseError) as err:
kms.create_alias(alias_name, key_id)
ex = err.exception
ex.body['__type'].should.equal('ValidationException')
ex.body['message'].should.equal("{alias_name} contains invalid characters for an alias".format(**locals()))
ex.error_code.should.equal('ValidationException')
ex.message.should.equal("{alias_name} contains invalid characters for an alias".format(**locals()))
ex.reason.should.equal('Bad Request')
ex.status.should.equal(400)
@mock_kms
def test__create_alias__accepted_characters():
kms = boto.connect_kms()
create_resp = kms.create_key()
key_id = create_resp['KeyMetadata']['KeyId']
alias_names_with_accepted_characters = [
'alias/my-alias_/',
'alias/my_alias-/',
]
for alias_name in alias_names_with_accepted_characters:
kms.create_alias(alias_name, key_id)
@mock_kms
def test__create_alias__raises_if_target_key_id_is_existing_alias():
kms = boto.connect_kms()
create_resp = kms.create_key()
key_id = create_resp['KeyMetadata']['KeyId']
alias = 'alias/my-alias'
kms.create_alias(alias, key_id)
with assert_raises(JSONResponseError) as err:
kms.create_alias(alias, alias)
ex = err.exception
ex.body['__type'].should.equal('ValidationException')
ex.body['message'].should.equal('Aliases must refer to keys. Not aliases')
ex.error_code.should.equal('ValidationException')
ex.message.should.equal('Aliases must refer to keys. Not aliases')
ex.reason.should.equal('Bad Request')
ex.status.should.equal(400)
@mock_kms
def test__delete_alias():
kms = boto.connect_kms()
create_resp = kms.create_key()
key_id = create_resp['KeyMetadata']['KeyId']
alias = 'alias/my-alias'
kms.create_alias(alias, key_id)
resp = kms.delete_alias(alias)
resp.should.be.none
# we can create the alias again, since it has been deleted
kms.create_alias(alias, key_id)
@mock_kms
def test__delete_alias__raises_if_wrong_prefix():
kms = boto.connect_kms()
with assert_raises(JSONResponseError) as err:
kms.delete_alias('wrongprefix/my-alias')
ex = err.exception
ex.body['__type'].should.equal('ValidationException')
ex.body['message'].should.equal('Invalid identifier')
ex.error_code.should.equal('ValidationException')
ex.message.should.equal('Invalid identifier')
ex.reason.should.equal('Bad Request')
ex.status.should.equal(400)
@mock_kms
def test__delete_alias__raises_if_alias_is_not_found():
region = 'us-west-2'
kms = boto.kms.connect_to_region(region)
alias_name = 'alias/unexisting-alias'
with assert_raises(NotFoundException) as err:
kms.delete_alias(alias_name)
ex = err.exception
ex.body['__type'].should.equal('NotFoundException')
ex.body['message'].should.match(r'Alias arn:aws:kms:{region}:\d{{12}}:{alias_name} is not found.'.format(**locals()))
ex.box_usage.should.be.none
ex.error_code.should.be.none
ex.message.should.match(r'Alias arn:aws:kms:{region}:\d{{12}}:{alias_name} is not found.'.format(**locals()))
ex.reason.should.equal('Bad Request')
ex.request_id.should.be.none
ex.status.should.equal(400)
@mock_kms
def test__list_aliases():
region = "eu-west-1"
kms = boto.kms.connect_to_region(region)
create_resp = kms.create_key()
key_id = create_resp['KeyMetadata']['KeyId']
kms.create_alias('alias/my-alias1', key_id)
kms.create_alias('alias/my-alias2', key_id)
kms.create_alias('alias/my-alias3', key_id)
resp = kms.list_aliases()
resp['Truncated'].should.be.false
aliases = resp['Aliases']
def has_correct_arn(alias_obj):
alias_name = alias_obj['AliasName']
alias_arn = alias_obj['AliasArn']
return re.match(r'arn:aws:kms:{region}:\d{{12}}:{alias_name}'.format(region=region, alias_name=alias_name),
alias_arn)
len([alias for alias in aliases if
has_correct_arn(alias) and 'alias/aws/ebs' == alias['AliasName']]).should.equal(1)
len([alias for alias in aliases if
has_correct_arn(alias) and 'alias/aws/rds' == alias['AliasName']]).should.equal(1)
len([alias for alias in aliases if
has_correct_arn(alias) and 'alias/aws/redshift' == alias['AliasName']]).should.equal(1)
len([alias for alias in aliases if
has_correct_arn(alias) and 'alias/aws/s3' == alias['AliasName']]).should.equal(1)
len([alias for alias in aliases if
has_correct_arn(alias) and 'alias/my-alias1' == alias['AliasName']]).should.equal(1)
len([alias for alias in aliases if
has_correct_arn(alias) and 'alias/my-alias2' == alias['AliasName']]).should.equal(1)
len([alias for alias in aliases if 'TargetKeyId' in alias and key_id == alias['TargetKeyId']]).should.equal(3)
len(aliases).should.equal(7)
| apache-2.0 | -3,609,275,142,634,579,000 | -5,506,534,805,154,471,000 | 34.066667 | 223 | 0.66214 | false |
DESHRAJ/fjord | vendor/packages/translate-toolkit/translate/misc/multistring.py | 29 | 3583 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2006 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Supports a hybrid Unicode string that can also have a list of alternate
strings in the strings attribute"""
from translate.misc import autoencode
class multistring(autoencode.autoencode):
def __new__(newtype, string=u"", encoding=None, errors=None):
if isinstance(string, list):
if not string:
raise ValueError("multistring must contain at least one string")
mainstring = string[0]
newstring = multistring.__new__(newtype, string[0],
encoding, errors)
newstring.strings = [newstring] + [autoencode.autoencode.__new__(autoencode.autoencode, altstring, encoding, errors) for altstring in string[1:]]
else:
newstring = autoencode.autoencode.__new__(newtype, string,
encoding, errors)
newstring.strings = [newstring]
return newstring
def __init__(self, *args, **kwargs):
super(multistring, self).__init__()
if not hasattr(self, "strings"):
self.strings = []
def __cmp__(self, otherstring):
if isinstance(otherstring, multistring):
parentcompare = cmp(autoencode.autoencode(self), otherstring)
if parentcompare:
return parentcompare
else:
return cmp(self.strings[1:], otherstring.strings[1:])
elif isinstance(otherstring, autoencode.autoencode):
return cmp(autoencode.autoencode(self), otherstring)
elif isinstance(otherstring, unicode):
return cmp(unicode(self), otherstring)
elif isinstance(otherstring, str):
return cmp(str(self), otherstring)
elif isinstance(otherstring, list) and otherstring:
return cmp(self, multistring(otherstring))
else:
return cmp(type(self), type(otherstring))
def __ne__(self, otherstring):
return self.__cmp__(otherstring) != 0
def __eq__(self, otherstring):
return self.__cmp__(otherstring) == 0
def __repr__(self):
parts = [autoencode.autoencode.__repr__(self)] + \
[repr(a) for a in self.strings[1:]]
return "multistring([" + ",".join(parts) + "])"
def replace(self, old, new, count=None):
if count is None:
newstr = multistring(super(multistring, self) \
.replace(old, new), self.encoding)
else:
newstr = multistring(super(multistring, self) \
.replace(old, new, count), self.encoding)
for s in self.strings[1:]:
if count is None:
newstr.strings.append(s.replace(old, new))
else:
newstr.strings.append(s.replace(old, new, count))
return newstr
| bsd-3-clause | 6,756,929,010,532,581,000 | -6,003,434,742,209,642,000 | 39.258427 | 157 | 0.612336 | false |
Saevon/webdnd | shared/utils/debug_toolbars.py | 1 | 1502 | import django
from django.conf import settings
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from debug_toolbar.panels import DebugPanel
import sys
class VersionDebugPanel(DebugPanel):
'''
Panel that displays the Django version.
'''
name = 'Version'
has_content = True
def nav_title(self):
return _('Versions')
def nav_subtitle(self):
return 'Django %s' % django.get_version()
def url(self):
return ''
def title(self):
return _('Versions')
def content(self):
versions = {}
versions['Web D&D'] = settings.VERSION
versions['Syncrae'] = settings.SYNCRAE_VERSION
context = self.context.copy()
context.update({
'versions': versions,
'paths': sys.path,
})
return render_to_string('debug_toolbar/panels/versions.html', context)
class SyncraeSpyDebugPanel(DebugPanel):
'''
Panel that shows Syncrae Messages
'''
name = 'Syncrae'
has_content = True
def nav_title(self):
return _('Syncrae')
def nav_subtitle(self):
return ''
def url(self):
return ''
def title(self):
return _('Syncrae')
def content(self):
return render_to_string('debug_syncrae.html', self.context)
class DividerDebugPanel(DebugPanel):
name = 'Divider'
has_content = False
def nav_title(self):
return ' '
| mit | -4,343,598,543,396,808,700 | 7,031,216,237,284,824,000 | 18.25641 | 78 | 0.608522 | false |
stefco/geco_data | geco_irig_plot.py | 1 | 5662 | #!/usr/bin/env python
# (c) Stefan Countryman, 2016-2017
DESC="""Plot an IRIG-B signal read from stdin. Assumes that the timeseries
is a sequence of newline-delimited float literals."""
FAST_CHANNEL_BITRATE = 16384 # for IRIG-B, DuoTone, etc.
# THE REST OF THE IMPORTS ARE AFTER THIS IF STATEMENT.
# Quits immediately on --help or -h flags to skip slow imports when you just
# want to read the help documentation.
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description=DESC)
# TODO: make this -i and --ifo instead of detector.
parser.add_argument("--detector",
help=("the detector; used in the title of the output "
"plot"))
parser.add_argument("-O", "--outfile",
help="the filename of the generated plot")
parser.add_argument("-T", "--timeseries",
help="copy from stdin to stdout while reading",
action="store_true")
parser.add_argument("-A", "--actualtime",
help=("actual time signal was recorded "
"(appears in title)"))
args = parser.parse_args()
# Force matplotlib to not use any Xwindows backend. NECESSARY ON CLUSTER.
import matplotlib
matplotlib.use('Agg')
import sys
import time
import numpy as np
import matplotlib.pyplot as plt
import geco_irig_decode
def read_timeseries_stdin(num_lines, cat_to_stdout=False):
"""Read in newline-delimited numerical data from stdin; don't read more
than a second worth of data. If cat_to_stdout is True, print data that
has been read in back to stdout (useful for piped commands)."""
timeseries = np.zeros(num_lines)
line = ""
i = 0
while i < num_lines:
line = float(sys.stdin.readline())
timeseries[i] = line
if cat_to_stdout:
print(line)
i += 1
return timeseries
def irigb_decoded_title(timeseries, IFO=None, actual_time=None):
"""Get a title for an IRIG-B timeseries plot that includes the decoded
time in the timeseries itself."""
# get the detector name
if IFO is None:
detector_suffix = ""
else:
detector_suffix = " at " + IFO
# get the actual time of recording, if provided
if actual_time is None:
actual_time_str = ""
else:
actual_time_str = "\nActual Time: {}".format(actual_time)
# add title and so on
try:
decoded_time = geco_irig_decode.get_date_from_timeseries(timeseries)
decoded_time_str = decoded_time.strftime('%a %b %d %X %Y')
except ValueError as e:
decoded_time_str = "COULD NOT DECODE TIME"
fmt = "One Second of IRIG-B Signal{}\nDecoded Time: {}{}"
return fmt.format(detector_suffix, decoded_time_str, actual_time_str)
def irigb_output_filename(outfile=None):
"""Get the output filename for an IRIG-B plot."""
if outfile is None:
output_filename = "irigb-plot-made-at-" + str(time.time()) + ".png"
else:
output_filename = outfile
# append .png if not already there
if output_filename.split(".")[-1] != "png":
output_filename += ".png"
return output_filename
def plot_with_zoomed_views(timeseries, title, num_subdivs=5, dt=1.,
output_filename=None, overlay=False, linewidth=1):
"""Plot a timeseries and produce num_subdivs subplots that show equal-sized
subdivisions of the full timeseries data to show details (good for
high-bitrate timeseries). If you want to keep plotting data to the same
figure, set 'overlay=True', and the current figure will be plotted to."""
bitrate = int(len(timeseries) / float(dt))
times = np.linspace(0, 1, num=bitrate, endpoint=False)
# find max and min values in timeseries; use these to set plot boundaries
yrange = timeseries.max() - timeseries.min()
ymax = timeseries.max() + 0.1*yrange
ymin = timeseries.min() - 0.1*yrange
if not overlay:
plt.figure()
# print("making plot")
plt.gcf().set_figwidth(7)
plt.gcf().set_figheight(4+1.2*num_subdivs) # ~1.2in height per zoomed plot
# plot the full second on the first row; lines should be black ('k' option).
plt.subplot(num_subdivs + 1, 1, 1)
plt.ylim(ymin, ymax)
plt.plot(times, timeseries, 'k', linewidth=linewidth)
plt.tick_params(axis='y', labelsize='small')
# make num_subdivs subplots to better show the full second
for i in range(num_subdivs):
# print("making plot " + str(i))
plt.subplot(num_subdivs+1, 1, i+2)
plt.ylim(ymin, ymax)
plt.xlim(float(i)/num_subdivs, (float(i)+1)/num_subdivs)
start = bitrate*i // num_subdivs
end = bitrate*(i+1) // num_subdivs
plt.plot(times[start:end], timeseries[start:end], 'k',
linewidth=linewidth)
plt.tick_params(axis='y', labelsize='small')
plt.suptitle(title)
plt.xlabel("Time since start of second [$s$]")
# print("saving plot")
plt.subplots_adjust(left=0.125, right=0.9, bottom=0.1, top=0.9, wspace=0.2,
hspace=0.5)
if not (output_filename is None):
plt.savefig(output_filename)
return plt
if __name__ == '__main__':
timeseries = read_timeseries_stdin(FAST_CHANNEL_BITRATE,
cat_to_stdout=args.timeseries)
title = irigb_decoded_title(timeseries, args.detector, args.actualtime)
output_filename = irigb_output_filename(args.outfile)
plot_with_zoomed_views(timeseries, title, num_subdivs=5, dt=1.,
output_filename=output_filename)
| mit | -2,458,444,458,719,165,000 | 3,849,121,076,902,836,000 | 40.028986 | 80 | 0.628753 | false |
TNick/pyl2extra | pyl2extra/datasets/images.py | 1 | 13590 | """
Dataset for images and related functionality.
This module does not have dependencies inside pyl2extra package, so you
can just copy-paste it inside your source tree.
To use this dataset prepare a .csv file with targets (integers or real numbers)
on first column and file paths on the second column:
.. code::
0,file1.png
1,file2.png
Image file paths are relative to current directory (``os.getcwd()``). The
images need not be square and can be in any format recognized by the
``Image`` module. Internally, the images are converted to RGB and are made
square for you.
Use it in a .yaml file like so:
.. code::
dataset: &trndataset !obj:pyl2extra.datasets.images.Images {
source: 'train.csv',
image_size: 128
}
The ``image_size`` can be skipped, in which case the size of the images is
derived from first image that is provided.
By default the class assumes a classification problem (targets are integers).
If you need to uset it in a regression problem create it like so:
.. code::
dataset: &trndataset !obj:pyl2extra.datasets.images.Images {
source: 'train.csv',
image_size: 128,
regression: True
}
As the dataset simply wraps the ``DenseDesignMatrix``, parameters like
``rng`` (random number generator), ``preprocessor`` and ``fit_preprocessor``
can be used and will be passed to ``DenseDesignMatrix`` superclass.
"""
__authors__ = "Nicu Tofan"
__copyright__ = "Copyright 2015, Nicu Tofan"
__credits__ = ["Nicu Tofan"]
__license__ = "3-clause BSD"
__maintainer__ = "Nicu Tofan"
__email__ = "[email protected]"
import csv
import numpy
import os
from PIL import Image
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
import theano
class Images(DenseDesignMatrix):
"""
A pylearn2 dataset that loads the images from a list or csv file.
Please note that - if you use this dataset and your model has a
final Softmax layer you should construct it like so (YAML syntax):
.. code::
!obj:pylearn2.models.mlp.Softmax {
layer_name: 'y',
irange: .0,
n_classes: %(classes)d,
binary_target_dim: 1
}
where ``classes`` is the same number of classes passed to ``Images``
constructor. ``binary_target_dim`` is important and failing to set it
constructs the wrong architecture, causing errors like:
ValueError: VectorSpace(dim=1, dtype=float32) with total dimension 1
can't format a batch into VectorSpace(dim=X, dtype=float32) because
its total dimension is X.
Parameters
----------
source: OrderedDict, dict, str, tuple, list
This argument provides the input images and (optionally)
associated categories. The meaning of the argument depends
on the data type:
- if ``source`` is a string, it is interpreted to be the
path towards a csv file; the file must NOT have a header,
first column must contain the targets (classes or values) and
second column must contain the paths for the image files;
- if ``source`` is a dictionary, the keys must be the
paths for image files, ``Image`` instances or numpy arrays and
the values must be the classes or values (None or empty
string if this instance does not provide the labels);
- a tuple or list must have exactly one or two
members: first one must be a list or tuple of image paths or
Images or numpy arrays, while second one (optional)
has the targets (classes as integers or real values).
image_size: int, optional
The size of the images in the final dataset. All images
will be resized to be ``image_size`` x ``image_size``
pixels.
classes: int, optional
If this is a classification problem the parameter should be
used to indicate the total number of classes and targets are
expected to be integers in the range ``[0; classes-1]``.
If this is a regression problem the parameter should be ``None`` and
targets are expected to be real numbers.
rng: object, optional
A random number generator used for picking random \
indices into the design matrix when choosing minibatches.
preprocessor: Preprocessor, optional
Preprocessor to apply to images.
fit_preprocessor: bool, optional
Whether preprocessor can fit parameters when applied to training
data.
"""
def __init__(self, source, image_size=None, classes=None,
rng=None, preprocessor=None, fit_preprocessor=False):
#: preserve original argument for future reference
self.source = source
#: Number of classes (None for regression)
self.classes = classes
# all images are loaded in ``ind`` variable
ind = _init_input(source)
# DenseDesignMatrix expects us to provide a numpy array
# we choose to have number of examples on first axis ('b'),
# then rows and columns of the image, then the channels
# always 3 in our case
self.axes = ('b', 0, 1, 'c')
if image_size is None:
dense_x = None
else:
dense_x = numpy.zeros(shape=(len(ind), image_size, image_size, 3),
dtype='uint8')
categories = []
has_targets = False
for i, (img, ctg) in enumerate(ind):
if isinstance(img, Image.Image):
img = numpy.array(img)
width = img.shape[1]
height = img.shape[0]
largest = max(width, height)
if image_size is None:
# if the user did not specify an image size we determine
# the size using the first image that we encounter; this is
# usefull if all images are already of required size,
# for example
image_size = largest
dense_x = numpy.zeros(shape=(len(ind), image_size,
image_size, 3),
dtype='uint8')
imgin = img
# do we need to enlarge / shrink the image?
elif largest != image_size:
wpercent = image_size / float(largest)
width = int(width * wpercent)
height = int(height * wpercent)
largest = max(width, height)
# inefficient? could use scipy.ndimage.zoom.
img_tmp = Image.fromarray(img)
img_tmp = img_tmp.resize((width, height), Image.ANTIALIAS)
imgin = numpy.array(img_tmp)
else:
imgin = img
delta_x = (largest - width) / 2
delta_y = (largest - height) / 2
delta_x2 = delta_x + width
delta_y2 = delta_y + height
#print delta_x, delta_y, delta_x2, delta_y2, width, height
dense_x[i, delta_y:delta_y2, delta_x:delta_x2, :] = imgin
categories.append(ctg)
if ctg != '':
has_targets = True
dense_x = numpy.cast[theano.config.floatX](dense_x)
# if we have categories / values convert them to proper format
if has_targets:
if classes is None:
# in regression we expect real values
dense_y = numpy.empty(shape=(len(ind), 1),
dtype=theano.config.floatX)
for i, ctg in enumerate(categories):
dense_y[i, 0] = float(ctg)
else:
# in classification we expect integers
dense_y = numpy.empty(shape=(len(ind), 1), dtype=int)
for i, ctg in enumerate(categories):
dense_y[i, 0] = int(ctg)
else:
dense_y = None
if rng is None:
rng = DenseDesignMatrix._default_seed
# everything else is handled by the DenseDesignMatrix superclass
super(Images, self).__init__(topo_view=dense_x,
y=dense_y,
axes=self.axes,
view_converter=None,
preprocessor=preprocessor,
fit_preprocessor=fit_preprocessor,
X_labels=None,
y_labels=classes if has_targets else None)
def _init_input(source):
"""
Homogenize sources.
"""
if isinstance(source, basestring):
# this is a csv file that we're going to read
result = _load_list(_load_csv(source))
elif isinstance(source, dict):
# keys are file names, values are classes
result = _load_list(source.items())
elif isinstance(source, (list, tuple)):
# one item lists the files, the other lists the classes
if len(source) == 1:
result = _load_list([(src, None) for src in source[0]])
elif len(source) == 2:
if len(source[0]) == len(source[1]):
result = _load_list(zip(source[0], source[1]))
else:
raise ValueError("Lists/tuples provded to Images class "
"constructor are expected to have "
"same length (%d != %d)" %
(len(source[0]), len(source[1])))
else:
raise ValueError("Lists/tuples provided to Images class "
"constructor are expected to have one "
"(images only) or two members (images"
" and classes); the input has %d members." %
len(source))
else:
raise ValueError("Images class expects for its `source` argument "
"a file path (string), a dictionary of "
"file:class pairs, or a pair of lists (tuples); "
"%s is not supported" % str(source.__class__))
return result
def _load_csv(csv_path):
"""
Internal function for loading the content from a .csv file.
Parameters
----------
csv_path: str
The path towards the .csv file to read.
Returns
-------
result: list of tuples
The method creates a list of tuples that should be passed to
`_load_list()`.
"""
# we're going to accumulate files and categories here
result = []
# compute absolute path of the source csv file
csv_path = os.path.abspath(csv_path)
with open(csv_path, 'rt') as fhand:
# the reader is flexible, allowing delimiters
# other than comma; quotation can also be customized
csvr = csv.reader(fhand,
delimiter=',',
quotechar='"')
# the reader will give us a list for each row of
# the source file
for row in csvr:
# we're going to skip empty rows without warning
if len(row) == 0:
continue
# we could skip the header here, if present; we
# could even detect the column index from its
# name; but we try to keep the things simple
# class/value is always first, file path second
result.append((row[1], row[0]))
return result
def _load_list(srclist):
"""
Internal function for loading the content from a list.
Image files are converted to `numpy.ndarray`;
empty classes are normalized to a string of lenghth 0.
Parameters
----------
srclist: list of tuples
A list of tuples, with first entry in tuple being
a string, an Image or `numpy.ndarray` instances and
second being classes (None for no class).
Returns
-------
result: list of tuples
The method creates a list of tuples, with first entry in tuple being
`numpy.ndarray` instances and second being targets (None for no
target) - integer classes (classification) or real values
(regression).
"""
# we're going to accumulate Images and categories here
result = []
for img, cls in srclist:
if isinstance(img, basestring):
imgin = Image.open(img)
elif isinstance(img, numpy.ndarray):
imgin = Image.fromarray(img)
elif isinstance(img, Image.Image):
imgin = img
elif Image.isImageType(img):
imgin = img
else:
raise ValueError("Valid input for images are strings (a "
"path towards a file), pil images "
"and numpy arrays; %s is not supported" %
str(img.__class__))
if cls is None:
cls = ''
imgin = imgin.convert('RGB')
result.append((numpy.array(imgin), cls))
return result
def one_image(image, image_size=None, classes=None,
rng=None, preprocessor=None, fit_preprocessor=False):
"""
Convenience function that creates an Images dataset from a single image.
Parameters
----------
image: string, image or numpy.ndarray
The image to use as source.
See :class:`Images` for a description of other parameters.
"""
return Images(source=((image,),),
image_size=image_size, classes=classes,
rng=rng, preprocessor=preprocessor,
fit_preprocessor=fit_preprocessor)
| bsd-3-clause | 8,599,360,565,119,837,000 | -13,187,445,058,430,780 | 36.960894 | 79 | 0.576306 | false |
shanglt/youtube-dl | youtube_dl/extractor/planetaplay.py | 113 | 1921 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import ExtractorError
class PlanetaPlayIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?planetaplay\.com/\?sng=(?P<id>[0-9]+)'
_API_URL = 'http://planetaplay.com/action/playlist/?sng={0:}'
_THUMBNAIL_URL = 'http://planetaplay.com/img/thumb/{thumb:}'
_TEST = {
'url': 'http://planetaplay.com/?sng=3586',
'md5': '9d569dceb7251a4e01355d5aea60f9db',
'info_dict': {
'id': '3586',
'ext': 'flv',
'title': 'md5:e829428ee28b1deed00de90de49d1da1',
},
'skip': 'Not accessible from Travis CI server',
}
_SONG_FORMATS = {
'lq': (0, 'http://www.planetaplay.com/videoplayback/{med_hash:}'),
'hq': (1, 'http://www.planetaplay.com/videoplayback/hi/{med_hash:}'),
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
response = self._download_json(
self._API_URL.format(video_id), video_id)['response']
try:
data = response.get('data')[0]
except IndexError:
raise ExtractorError(
'%s: failed to get the playlist' % self.IE_NAME, expected=True)
title = '{song_artists:} - {sng_name:}'.format(**data)
thumbnail = self._THUMBNAIL_URL.format(**data)
formats = []
for format_id, (quality, url_template) in self._SONG_FORMATS.items():
formats.append({
'format_id': format_id,
'url': url_template.format(**data),
'quality': quality,
'ext': 'flv',
})
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnail': thumbnail,
}
| unlicense | -5,989,220,383,539,782,000 | -626,642,929,717,195,900 | 30.491803 | 79 | 0.537741 | false |
ampax/edx-platform-backup | common/djangoapps/external_auth/migrations/0001_initial.py | 114 | 6388 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ExternalAuthMap'
db.create_table('external_auth_externalauthmap', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('external_id', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('external_domain', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('external_credentials', self.gf('django.db.models.fields.TextField')(blank=True)),
('external_email', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('external_name', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=255, blank=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['auth.User'], unique=True, null=True)),
('internal_password', self.gf('django.db.models.fields.CharField')(max_length=31, blank=True)),
('dtcreated', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('dtsignup', self.gf('django.db.models.fields.DateTimeField')(null=True)),
))
db.send_create_signal('external_auth', ['ExternalAuthMap'])
# Adding unique constraint on 'ExternalAuthMap', fields ['external_id', 'external_domain']
db.create_unique('external_auth_externalauthmap', ['external_id', 'external_domain'])
def backwards(self, orm):
# Removing unique constraint on 'ExternalAuthMap', fields ['external_id', 'external_domain']
db.delete_unique('external_auth_externalauthmap', ['external_id', 'external_domain'])
# Deleting model 'ExternalAuthMap'
db.delete_table('external_auth_externalauthmap')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'external_auth.externalauthmap': {
'Meta': {'unique_together': "(('external_id', 'external_domain'),)", 'object_name': 'ExternalAuthMap'},
'dtcreated': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'dtsignup': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'external_credentials': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'external_domain': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'external_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'external_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internal_password': ('django.db.models.fields.CharField', [], {'max_length': '31', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True'})
}
}
complete_apps = ['external_auth']
| agpl-3.0 | -4,022,480,689,438,917,600 | -4,759,585,057,472,861,000 | 69.977778 | 182 | 0.583594 | false |
prasanna08/oppia | scripts/linters/test_files/invalid_urlencode.py | 4 | 1489 | # coding: utf-8
#
# Copyright 2020 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python file with invalid syntax, used by scripts/linters/
python_linter_test. This file is using urlencode which is not allowed.
"""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import urllib
import python_utils
class FakeClass(python_utils.OBJECT):
"""This is a fake docstring for invalid syntax purposes."""
def __init__(self, fake_arg):
self.fake_arg = fake_arg
def fake_method(self, source_url, doseq):
"""This doesn't do anything.
Args:
source_url: str. The URL.
doseq: bool. Boolean value.
Returns:
urlencode(object): Returns urlencode object.
"""
# Use of urlencode is not allowed.
return urllib.urlencode(source_url, doseq=doseq)
| apache-2.0 | 6,436,128,460,174,635,000 | -7,144,160,106,778,943,000 | 31.369565 | 78 | 0.698455 | false |
mahak/cloudify-cli | cloudify_cli/commands/users.py | 1 | 9023 | ########
# Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############
from .. import env
from ..cli import cfy
from ..table import print_data, print_single
from ..utils import handle_client_error
USER_COLUMNS = ['username', 'groups', 'role', 'group_system_roles', 'active',
'last_login_at', 'is_locked']
GET_DATA_COLUMNS = ['user_tenants', 'group_tenants']
NO_GET_DATA_COLUMNS = ['tenants']
USER_LABELS = {'role': 'system wide role',
'group_system_roles': 'system wide roles via groups'}
def _format_user(user):
user_tenants = dict(
(str(tenant), str(user.user_tenants[tenant]))
for tenant in user.user_tenants
)
group_tenants = dict(
(str(tenant),
dict(
(str(role),
[str(group) for group in user.group_tenants[tenant][role]])
for role in user.group_tenants[tenant]
))
for tenant in user.group_tenants
)
user['user_tenants'] = str(user_tenants)[1:-1]
user['group_tenants'] = str(group_tenants)[1:-1]
return user
def _format_group_system_roles(user):
group_system_roles = dict(
(str(role),
[str(user_group) for user_group in user['group_system_roles'][role]])
for role in user['group_system_roles']
)
user['group_system_roles'] = str(group_system_roles).strip('{}')
return user
@cfy.group(name='users')
@cfy.options.common_options
def users():
"""Handle Cloudify users
"""
if not env.is_initialized():
env.raise_uninitialized()
@users.command(name='list', short_help='List users [manager only]')
@cfy.options.sort_by('username')
@cfy.options.descending
@cfy.options.common_options
@cfy.options.get_data
@cfy.options.search
@cfy.options.pagination_offset
@cfy.options.pagination_size
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def list(sort_by,
descending,
get_data,
search,
pagination_offset,
pagination_size,
logger,
client):
"""List all users
"""
logger.info('Listing all users...')
users_list = client.users.list(
sort=sort_by,
is_descending=descending,
_get_data=get_data,
_search=search,
_offset=pagination_offset,
_size=pagination_size
)
total = users_list.metadata.pagination.total
# copy list
columns = [] + USER_COLUMNS
users_list = [_format_group_system_roles(user) for user in users_list]
if get_data:
users_list = [_format_user(user) for user in users_list]
columns += GET_DATA_COLUMNS
else:
columns += NO_GET_DATA_COLUMNS
print_data(columns, users_list, 'Users:', labels=USER_LABELS)
logger.info('Showing {0} of {1} users'.format(len(users_list), total))
@users.command(name='create', short_help='Create a user [manager only]')
@cfy.argument('username', callback=cfy.validate_name)
@cfy.options.common_options
@cfy.options.security_role
@cfy.options.password
@cfy.options.tenant_name(required=False)
@cfy.options.user_tenant_role(required=False,
options_flags=['-l', '--user-tenant-role'])
@cfy.assert_manager_active()
@cfy.pass_client(use_tenant_in_header=False)
@cfy.pass_logger
def create(username,
security_role,
password,
tenant_name,
user_tenant_role,
logger,
client):
"""Create a new user on the manager
`USERNAME` is the username of the user
"""
client.users.create(username, password, security_role)
logger.info('User `{0}` created with `{1}` security role'.format(
username, security_role))
if tenant_name and user_tenant_role:
client.tenants.add_user(username, tenant_name, user_tenant_role)
logger.info(
'User `{0}` added successfully to tenant `{1}` with `{2}` role'
.format(username, tenant_name, user_tenant_role))
@users.command(name='set-password',
short_help='Set a new password for a user [manager only]')
@cfy.argument('username', callback=cfy.validate_name)
@cfy.options.password
@cfy.options.common_options
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def set_password(username, password, logger, client):
"""Set a new password for a user
`USERNAME` is the username of the user
"""
logger.info('Setting new password for user {0}...'.format(username))
client.users.set_password(username, password)
logger.info('New password set')
@users.command(name='set-role',
short_help='Set a new role for a user [manager only]')
@cfy.argument('username', callback=cfy.validate_name)
@cfy.options.security_role
@cfy.options.common_options
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def set_role(username, security_role, logger, client):
"""Set a new role for a user
`USERNAME` is the username of the user
"""
logger.info('Setting new role for user {0}...'.format(username))
client.users.set_role(username, security_role)
logger.info('New role `{0}` set'.format(security_role))
@users.command(name='get',
short_help='Get details for a single user [manager only]')
@cfy.argument(
'username', callback=cfy.validate_name, default=env.get_username())
@cfy.options.common_options
@cfy.options.get_data
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def get(username, get_data, logger, client):
"""Get details for a single user
`USERNAME` is the username of the user. (default: current user)
"""
logger.info('Getting info for user `{0}`...'.format(username))
if username == env.get_username():
user_details = client.users.get_self(_get_data=get_data)
else:
user_details = client.users.get(username, _get_data=get_data)
# copy list
columns = [] + USER_COLUMNS
if get_data:
_format_user(user_details)
columns += GET_DATA_COLUMNS
else:
columns += NO_GET_DATA_COLUMNS
print_single(columns,
user_details,
'Requested user info:',
labels=USER_LABELS)
@users.command(name='delete',
short_help='Delete a user [manager only]')
@cfy.argument('username', callback=cfy.validate_name)
@cfy.options.common_options
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def delete(username, logger, client):
"""Delete a user
`USERNAME` is the username of the user
"""
logger.info('Deleting user `{0}`...'.format(username))
client.users.delete(username)
logger.info('User removed')
@users.command(name='activate',
short_help='Make an inactive user active [manager only]')
@cfy.argument('username')
@cfy.options.common_options
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def activate(username, logger, client):
"""Activate a user
`USERNAME` is the username of the user
"""
graceful_msg = 'User `{0}` is already active'.format(username)
logger.info('Activating user `{0}`...'.format(username))
with handle_client_error(409, graceful_msg, logger):
client.users.activate(username)
logger.info('User activated')
@users.command(name='deactivate',
short_help='Make an active user inactive [manager only]')
@cfy.argument('username')
@cfy.options.common_options
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def deactivate(username, logger, client):
"""Deactivate a user
`USERNAME` is the username of the user
"""
graceful_msg = 'User `{0}` is already inactive'.format(username)
logger.info('Deactivating user `{0}`...'.format(username))
with handle_client_error(409, graceful_msg, logger):
client.users.deactivate(username)
logger.info('User deactivated')
@users.command(name='unlock',
short_help='Unlock a locked user [manager only]')
@cfy.argument('username')
@cfy.options.common_options
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def unlock(username, logger, client):
"""Unlock a locked user
`USERNAME` is the username of the user
"""
graceful_msg = 'User `{0}` is already unlocked'.format(username)
logger.info('Unlocking user `{0}`...'.format(username))
with handle_client_error(409, graceful_msg, logger):
client.users.unlock(username)
logger.info('User unlocked')
| apache-2.0 | 8,845,007,746,241,322,000 | -1,016,786,993,699,110,700 | 30.883392 | 78 | 0.654217 | false |
Krossom/python-for-android | python3-alpha/python3-src/Lib/encodings/aliases.py | 58 | 15133 | """ Encoding Aliases Support
This module is used by the encodings package search function to
map encodings names to module names.
Note that the search function normalizes the encoding names before
doing the lookup, so the mapping will have to map normalized
encoding names to module names.
Contents:
The following aliases dictionary contains mappings of all IANA
character set names for which the Python core library provides
codecs. In addition to these, a few Python specific codec
aliases have also been added.
"""
aliases = {
# Please keep this list sorted alphabetically by value !
# ascii codec
'646' : 'ascii',
'ansi_x3.4_1968' : 'ascii',
'ansi_x3_4_1968' : 'ascii', # some email headers use this non-standard name
'ansi_x3.4_1986' : 'ascii',
'cp367' : 'ascii',
'csascii' : 'ascii',
'ibm367' : 'ascii',
'iso646_us' : 'ascii',
'iso_646.irv_1991' : 'ascii',
'iso_ir_6' : 'ascii',
'us' : 'ascii',
'us_ascii' : 'ascii',
## base64_codec codec
#'base64' : 'base64_codec',
#'base_64' : 'base64_codec',
# big5 codec
'big5_tw' : 'big5',
'csbig5' : 'big5',
# big5hkscs codec
'big5_hkscs' : 'big5hkscs',
'hkscs' : 'big5hkscs',
## bz2_codec codec
#'bz2' : 'bz2_codec',
# cp037 codec
'037' : 'cp037',
'csibm037' : 'cp037',
'ebcdic_cp_ca' : 'cp037',
'ebcdic_cp_nl' : 'cp037',
'ebcdic_cp_us' : 'cp037',
'ebcdic_cp_wt' : 'cp037',
'ibm037' : 'cp037',
'ibm039' : 'cp037',
# cp1026 codec
'1026' : 'cp1026',
'csibm1026' : 'cp1026',
'ibm1026' : 'cp1026',
# cp1140 codec
'1140' : 'cp1140',
'ibm1140' : 'cp1140',
# cp1250 codec
'1250' : 'cp1250',
'windows_1250' : 'cp1250',
# cp1251 codec
'1251' : 'cp1251',
'windows_1251' : 'cp1251',
# cp1252 codec
'1252' : 'cp1252',
'windows_1252' : 'cp1252',
# cp1253 codec
'1253' : 'cp1253',
'windows_1253' : 'cp1253',
# cp1254 codec
'1254' : 'cp1254',
'windows_1254' : 'cp1254',
# cp1255 codec
'1255' : 'cp1255',
'windows_1255' : 'cp1255',
# cp1256 codec
'1256' : 'cp1256',
'windows_1256' : 'cp1256',
# cp1257 codec
'1257' : 'cp1257',
'windows_1257' : 'cp1257',
# cp1258 codec
'1258' : 'cp1258',
'windows_1258' : 'cp1258',
# cp424 codec
'424' : 'cp424',
'csibm424' : 'cp424',
'ebcdic_cp_he' : 'cp424',
'ibm424' : 'cp424',
# cp437 codec
'437' : 'cp437',
'cspc8codepage437' : 'cp437',
'ibm437' : 'cp437',
# cp500 codec
'500' : 'cp500',
'csibm500' : 'cp500',
'ebcdic_cp_be' : 'cp500',
'ebcdic_cp_ch' : 'cp500',
'ibm500' : 'cp500',
# cp775 codec
'775' : 'cp775',
'cspc775baltic' : 'cp775',
'ibm775' : 'cp775',
# cp850 codec
'850' : 'cp850',
'cspc850multilingual' : 'cp850',
'ibm850' : 'cp850',
# cp852 codec
'852' : 'cp852',
'cspcp852' : 'cp852',
'ibm852' : 'cp852',
# cp855 codec
'855' : 'cp855',
'csibm855' : 'cp855',
'ibm855' : 'cp855',
# cp857 codec
'857' : 'cp857',
'csibm857' : 'cp857',
'ibm857' : 'cp857',
# cp858 codec
'858' : 'cp858',
'csibm858' : 'cp858',
'ibm858' : 'cp858',
# cp860 codec
'860' : 'cp860',
'csibm860' : 'cp860',
'ibm860' : 'cp860',
# cp861 codec
'861' : 'cp861',
'cp_is' : 'cp861',
'csibm861' : 'cp861',
'ibm861' : 'cp861',
# cp862 codec
'862' : 'cp862',
'cspc862latinhebrew' : 'cp862',
'ibm862' : 'cp862',
# cp863 codec
'863' : 'cp863',
'csibm863' : 'cp863',
'ibm863' : 'cp863',
# cp864 codec
'864' : 'cp864',
'csibm864' : 'cp864',
'ibm864' : 'cp864',
# cp865 codec
'865' : 'cp865',
'csibm865' : 'cp865',
'ibm865' : 'cp865',
# cp866 codec
'866' : 'cp866',
'csibm866' : 'cp866',
'ibm866' : 'cp866',
# cp869 codec
'869' : 'cp869',
'cp_gr' : 'cp869',
'csibm869' : 'cp869',
'ibm869' : 'cp869',
# cp932 codec
'932' : 'cp932',
'ms932' : 'cp932',
'mskanji' : 'cp932',
'ms_kanji' : 'cp932',
# cp949 codec
'949' : 'cp949',
'ms949' : 'cp949',
'uhc' : 'cp949',
# cp950 codec
'950' : 'cp950',
'ms950' : 'cp950',
# euc_jis_2004 codec
'jisx0213' : 'euc_jis_2004',
'eucjis2004' : 'euc_jis_2004',
'euc_jis2004' : 'euc_jis_2004',
# euc_jisx0213 codec
'eucjisx0213' : 'euc_jisx0213',
# euc_jp codec
'eucjp' : 'euc_jp',
'ujis' : 'euc_jp',
'u_jis' : 'euc_jp',
# euc_kr codec
'euckr' : 'euc_kr',
'korean' : 'euc_kr',
'ksc5601' : 'euc_kr',
'ks_c_5601' : 'euc_kr',
'ks_c_5601_1987' : 'euc_kr',
'ksx1001' : 'euc_kr',
'ks_x_1001' : 'euc_kr',
# gb18030 codec
'gb18030_2000' : 'gb18030',
# gb2312 codec
'chinese' : 'gb2312',
'csiso58gb231280' : 'gb2312',
'euc_cn' : 'gb2312',
'euccn' : 'gb2312',
'eucgb2312_cn' : 'gb2312',
'gb2312_1980' : 'gb2312',
'gb2312_80' : 'gb2312',
'iso_ir_58' : 'gb2312',
# gbk codec
'936' : 'gbk',
'cp936' : 'gbk',
'ms936' : 'gbk',
## hex_codec codec
#'hex' : 'hex_codec',
# hp_roman8 codec
'roman8' : 'hp_roman8',
'r8' : 'hp_roman8',
'csHPRoman8' : 'hp_roman8',
# hz codec
'hzgb' : 'hz',
'hz_gb' : 'hz',
'hz_gb_2312' : 'hz',
# iso2022_jp codec
'csiso2022jp' : 'iso2022_jp',
'iso2022jp' : 'iso2022_jp',
'iso_2022_jp' : 'iso2022_jp',
# iso2022_jp_1 codec
'iso2022jp_1' : 'iso2022_jp_1',
'iso_2022_jp_1' : 'iso2022_jp_1',
# iso2022_jp_2 codec
'iso2022jp_2' : 'iso2022_jp_2',
'iso_2022_jp_2' : 'iso2022_jp_2',
# iso2022_jp_2004 codec
'iso_2022_jp_2004' : 'iso2022_jp_2004',
'iso2022jp_2004' : 'iso2022_jp_2004',
# iso2022_jp_3 codec
'iso2022jp_3' : 'iso2022_jp_3',
'iso_2022_jp_3' : 'iso2022_jp_3',
# iso2022_jp_ext codec
'iso2022jp_ext' : 'iso2022_jp_ext',
'iso_2022_jp_ext' : 'iso2022_jp_ext',
# iso2022_kr codec
'csiso2022kr' : 'iso2022_kr',
'iso2022kr' : 'iso2022_kr',
'iso_2022_kr' : 'iso2022_kr',
# iso8859_10 codec
'csisolatin6' : 'iso8859_10',
'iso_8859_10' : 'iso8859_10',
'iso_8859_10_1992' : 'iso8859_10',
'iso_ir_157' : 'iso8859_10',
'l6' : 'iso8859_10',
'latin6' : 'iso8859_10',
# iso8859_11 codec
'thai' : 'iso8859_11',
'iso_8859_11' : 'iso8859_11',
'iso_8859_11_2001' : 'iso8859_11',
# iso8859_13 codec
'iso_8859_13' : 'iso8859_13',
'l7' : 'iso8859_13',
'latin7' : 'iso8859_13',
# iso8859_14 codec
'iso_8859_14' : 'iso8859_14',
'iso_8859_14_1998' : 'iso8859_14',
'iso_celtic' : 'iso8859_14',
'iso_ir_199' : 'iso8859_14',
'l8' : 'iso8859_14',
'latin8' : 'iso8859_14',
# iso8859_15 codec
'iso_8859_15' : 'iso8859_15',
'l9' : 'iso8859_15',
'latin9' : 'iso8859_15',
# iso8859_16 codec
'iso_8859_16' : 'iso8859_16',
'iso_8859_16_2001' : 'iso8859_16',
'iso_ir_226' : 'iso8859_16',
'l10' : 'iso8859_16',
'latin10' : 'iso8859_16',
# iso8859_2 codec
'csisolatin2' : 'iso8859_2',
'iso_8859_2' : 'iso8859_2',
'iso_8859_2_1987' : 'iso8859_2',
'iso_ir_101' : 'iso8859_2',
'l2' : 'iso8859_2',
'latin2' : 'iso8859_2',
# iso8859_3 codec
'csisolatin3' : 'iso8859_3',
'iso_8859_3' : 'iso8859_3',
'iso_8859_3_1988' : 'iso8859_3',
'iso_ir_109' : 'iso8859_3',
'l3' : 'iso8859_3',
'latin3' : 'iso8859_3',
# iso8859_4 codec
'csisolatin4' : 'iso8859_4',
'iso_8859_4' : 'iso8859_4',
'iso_8859_4_1988' : 'iso8859_4',
'iso_ir_110' : 'iso8859_4',
'l4' : 'iso8859_4',
'latin4' : 'iso8859_4',
# iso8859_5 codec
'csisolatincyrillic' : 'iso8859_5',
'cyrillic' : 'iso8859_5',
'iso_8859_5' : 'iso8859_5',
'iso_8859_5_1988' : 'iso8859_5',
'iso_ir_144' : 'iso8859_5',
# iso8859_6 codec
'arabic' : 'iso8859_6',
'asmo_708' : 'iso8859_6',
'csisolatinarabic' : 'iso8859_6',
'ecma_114' : 'iso8859_6',
'iso_8859_6' : 'iso8859_6',
'iso_8859_6_1987' : 'iso8859_6',
'iso_ir_127' : 'iso8859_6',
# iso8859_7 codec
'csisolatingreek' : 'iso8859_7',
'ecma_118' : 'iso8859_7',
'elot_928' : 'iso8859_7',
'greek' : 'iso8859_7',
'greek8' : 'iso8859_7',
'iso_8859_7' : 'iso8859_7',
'iso_8859_7_1987' : 'iso8859_7',
'iso_ir_126' : 'iso8859_7',
# iso8859_8 codec
'csisolatinhebrew' : 'iso8859_8',
'hebrew' : 'iso8859_8',
'iso_8859_8' : 'iso8859_8',
'iso_8859_8_1988' : 'iso8859_8',
'iso_ir_138' : 'iso8859_8',
# iso8859_9 codec
'csisolatin5' : 'iso8859_9',
'iso_8859_9' : 'iso8859_9',
'iso_8859_9_1989' : 'iso8859_9',
'iso_ir_148' : 'iso8859_9',
'l5' : 'iso8859_9',
'latin5' : 'iso8859_9',
# johab codec
'cp1361' : 'johab',
'ms1361' : 'johab',
# koi8_r codec
'cskoi8r' : 'koi8_r',
# latin_1 codec
#
# Note that the latin_1 codec is implemented internally in C and a
# lot faster than the charmap codec iso8859_1 which uses the same
# encoding. This is why we discourage the use of the iso8859_1
# codec and alias it to latin_1 instead.
#
'8859' : 'latin_1',
'cp819' : 'latin_1',
'csisolatin1' : 'latin_1',
'ibm819' : 'latin_1',
'iso8859' : 'latin_1',
'iso8859_1' : 'latin_1',
'iso_8859_1' : 'latin_1',
'iso_8859_1_1987' : 'latin_1',
'iso_ir_100' : 'latin_1',
'l1' : 'latin_1',
'latin' : 'latin_1',
'latin1' : 'latin_1',
# mac_cyrillic codec
'maccyrillic' : 'mac_cyrillic',
# mac_greek codec
'macgreek' : 'mac_greek',
# mac_iceland codec
'maciceland' : 'mac_iceland',
# mac_latin2 codec
'maccentraleurope' : 'mac_latin2',
'maclatin2' : 'mac_latin2',
# mac_roman codec
'macintosh' : 'mac_roman',
'macroman' : 'mac_roman',
# mac_turkish codec
'macturkish' : 'mac_turkish',
# mbcs codec
'dbcs' : 'mbcs',
# ptcp154 codec
'csptcp154' : 'ptcp154',
'pt154' : 'ptcp154',
'cp154' : 'ptcp154',
'cyrillic_asian' : 'ptcp154',
## quopri_codec codec
#'quopri' : 'quopri_codec',
#'quoted_printable' : 'quopri_codec',
#'quotedprintable' : 'quopri_codec',
## rot_13 codec
#'rot13' : 'rot_13',
# shift_jis codec
'csshiftjis' : 'shift_jis',
'shiftjis' : 'shift_jis',
'sjis' : 'shift_jis',
's_jis' : 'shift_jis',
# shift_jis_2004 codec
'shiftjis2004' : 'shift_jis_2004',
'sjis_2004' : 'shift_jis_2004',
's_jis_2004' : 'shift_jis_2004',
# shift_jisx0213 codec
'shiftjisx0213' : 'shift_jisx0213',
'sjisx0213' : 'shift_jisx0213',
's_jisx0213' : 'shift_jisx0213',
# tactis codec
'tis260' : 'tactis',
# tis_620 codec
'tis620' : 'tis_620',
'tis_620_0' : 'tis_620',
'tis_620_2529_0' : 'tis_620',
'tis_620_2529_1' : 'tis_620',
'iso_ir_166' : 'tis_620',
# utf_16 codec
'u16' : 'utf_16',
'utf16' : 'utf_16',
# utf_16_be codec
'unicodebigunmarked' : 'utf_16_be',
'utf_16be' : 'utf_16_be',
# utf_16_le codec
'unicodelittleunmarked' : 'utf_16_le',
'utf_16le' : 'utf_16_le',
# utf_32 codec
'u32' : 'utf_32',
'utf32' : 'utf_32',
# utf_32_be codec
'utf_32be' : 'utf_32_be',
# utf_32_le codec
'utf_32le' : 'utf_32_le',
# utf_7 codec
'u7' : 'utf_7',
'utf7' : 'utf_7',
'unicode_1_1_utf_7' : 'utf_7',
# utf_8 codec
'u8' : 'utf_8',
'utf' : 'utf_8',
'utf8' : 'utf_8',
'utf8_ucs2' : 'utf_8',
'utf8_ucs4' : 'utf_8',
## uu_codec codec
#'uu' : 'uu_codec',
## zlib_codec codec
#'zip' : 'zlib_codec',
#'zlib' : 'zlib_codec',
# temporary mac CJK aliases, will be replaced by proper codecs in 3.1
'x_mac_japanese' : 'shift_jis',
'x_mac_korean' : 'euc_kr',
'x_mac_simp_chinese' : 'gb2312',
'x_mac_trad_chinese' : 'big5',
}
| apache-2.0 | -2,926,718,273,522,495,500 | -8,536,228,660,457,063,000 | 27.39212 | 83 | 0.421397 | false |
bclau/nova | tools/patch_tox_venv.py | 11 | 1659 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
import install_venv_common as install_venv # noqa
def first_file(file_list):
for candidate in file_list:
if os.path.exists(candidate):
return candidate
def main(argv):
root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
venv = os.environ['VIRTUAL_ENV']
pip_requires = first_file([
os.path.join(root, 'requirements.txt'),
os.path.join(root, 'tools', 'pip-requires'),
])
test_requires = first_file([
os.path.join(root, 'test-requirements.txt'),
os.path.join(root, 'tools', 'test-requires'),
])
py_version = "python%s.%s" % (sys.version_info[0], sys.version_info[1])
project = 'nova'
install = install_venv.InstallVenv(root, venv, pip_requires, test_requires,
py_version, project)
#NOTE(dprince): For Tox we only run post_process (which patches files, etc)
install.post_process()
if __name__ == '__main__':
main(sys.argv)
| apache-2.0 | 4,822,886,912,756,472,000 | -7,328,828,488,537,568,000 | 32.18 | 79 | 0.654008 | false |
rednach/krill | modules/dummy_poller/module.py | 18 | 4992 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
# This Class is an example of an Scheduler module
# Here for the configuration phase AND running one
import sys
import signal
import time
from Queue import Empty
from shinken.basemodule import BaseModule
from shinken.log import logger
properties = {
'daemons': ['poller'],
'type': 'dummy_poller',
'external': False,
# To be a real worker module, you must set this
'worker_capable': True,
}
# called by the plugin manager to get a broker
def get_instance(mod_conf):
logger.info("[Dummy Poller] Get a Dummy poller module for plugin %s", mod_conf.get_name())
instance = Dummy_poller(mod_conf)
return instance
# Just print some stuff
class Dummy_poller(BaseModule):
def __init__(self, mod_conf):
BaseModule.__init__(self, mod_conf)
# Called by poller to say 'let's prepare yourself guy'
def init(self):
logger.info("[Dummy Poller] Initialization of the dummy poller module")
self.i_am_dying = False
# Get new checks if less than nb_checks_max
# If no new checks got and no check in queue,
# sleep for 1 sec
# REF: doc/shinken-action-queues.png (3)
def get_new_checks(self):
try:
while(True):
logger.debug("[Dummy Poller] I %d wait for a message", self.id)
msg = self.s.get(block=False)
if msg is not None:
self.checks.append(msg.get_data())
logger.debug("[Dummy Poller] I, %d, got a message!", self.id)
except Empty, exp:
if len(self.checks) == 0:
time.sleep(1)
# Launch checks that are in status
# REF: doc/shinken-action-queues.png (4)
def launch_new_checks(self):
# queue
for chk in self.checks:
if chk.status == 'queue':
logger.warning("[Dummy Poller] Dummy (bad) check for %s", str(chk.command))
chk.exit_status = 2
chk.get_outputs('All is NOT SO well', 8012)
chk.status = 'done'
chk.execution_time = 0.1
# Check the status of checks
# if done, return message finished :)
# REF: doc/shinken-action-queues.png (5)
def manage_finished_checks(self):
to_del = []
for action in self.checks:
to_del.append(action)
try:
self.returns_queue.put(action)
except IOError, exp:
logger.info("[Dummy Poller] %d exiting: %s", self.id, exp)
sys.exit(2)
for chk in to_del:
self.checks.remove(chk)
# id = id of the worker
# s = Global Queue Master->Slave
# m = Queue Slave->Master
# return_queue = queue managed by manager
# c = Control Queue for the worker
def work(self, s, returns_queue, c):
logger.info("[Dummy Poller] Module Dummy started!")
## restore default signal handler for the workers:
signal.signal(signal.SIGTERM, signal.SIG_DFL)
timeout = 1.0
self.checks = []
self.returns_queue = returns_queue
self.s = s
self.t_each_loop = time.time()
while True:
begin = time.time()
msg = None
cmsg = None
# If we are dying (big problem!) we do not
# take new jobs, we just finished the current one
if not self.i_am_dying:
# REF: doc/shinken-action-queues.png (3)
self.get_new_checks()
# REF: doc/shinken-action-queues.png (4)
self.launch_new_checks()
# REF: doc/shinken-action-queues.png (5)
self.manage_finished_checks()
# Now get order from master
try:
cmsg = c.get(block=False)
if cmsg.get_type() == 'Die':
logger.info("[Dummy Poller] %d : Dad say we are dying...", self.id)
break
except Exception:
pass
timeout -= time.time() - begin
if timeout < 0:
timeout = 1.0
| agpl-3.0 | 2,414,028,798,665,835,000 | -8,321,615,974,752,010,000 | 32.959184 | 94 | 0.591747 | false |
nicoboss/Floatmotion | OpenGL/GL/ARB/texture_env_add.py | 9 | 1124 | '''OpenGL extension ARB.texture_env_add
This module customises the behaviour of the
OpenGL.raw.GL.ARB.texture_env_add to provide a more
Python-friendly API
Overview (from the spec)
New texture environment function ADD is supported with the following
equation:
Cv = min(1, Cf + Ct)
New function may be specified by calling TexEnv with ADD token.
One possible application is to add a specular highlight texture to
a Gouraud-shaded primitive to emulate Phong shading, in a single
pass.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/ARB/texture_env_add.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.ARB.texture_env_add import *
from OpenGL.raw.GL.ARB.texture_env_add import _EXTENSION_NAME
def glInitTextureEnvAddARB():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | agpl-3.0 | -6,365,613,139,350,713,000 | -6,564,750,240,999,051,000 | 30.25 | 71 | 0.764235 | false |
openstack/heat | heat/engine/update.py | 1 | 12695 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from heat.common import exception
from heat.engine import dependencies
from heat.engine import resource
from heat.engine import scheduler
from heat.engine import stk_defn
from heat.objects import resource as resource_objects
LOG = logging.getLogger(__name__)
class StackUpdate(object):
"""A Task to perform the update of an existing stack to a new template."""
def __init__(self, existing_stack, new_stack, previous_stack,
rollback=False):
"""Initialise with the existing stack and the new stack."""
self.existing_stack = existing_stack
self.new_stack = new_stack
self.previous_stack = previous_stack
self.rollback = rollback
self.existing_snippets = dict((n, r.frozen_definition())
for n, r in self.existing_stack.items()
if n in self.new_stack)
def __repr__(self):
if self.rollback:
return '%s Rollback' % str(self.existing_stack)
else:
return '%s Update' % str(self.existing_stack)
def __call__(self):
"""Return a co-routine that updates the stack."""
cleanup_prev = scheduler.DependencyTaskGroup(
self.previous_stack.dependencies,
self._remove_backup_resource,
reverse=True)
def get_error_wait_time(resource):
return resource.cancel_grace_period()
updater = scheduler.DependencyTaskGroup(
self.dependencies(),
self._resource_update,
error_wait_time=get_error_wait_time)
if not self.rollback:
yield from cleanup_prev()
try:
yield from updater()
finally:
self.previous_stack.reset_dependencies()
def _resource_update(self, res):
if res.name in self.new_stack and self.new_stack[res.name] is res:
return self._process_new_resource_update(res)
else:
return self._process_existing_resource_update(res)
def _remove_backup_resource(self, prev_res):
if prev_res.state not in ((prev_res.INIT, prev_res.COMPLETE),
(prev_res.DELETE, prev_res.COMPLETE)):
LOG.debug("Deleting backup resource %s", prev_res.name)
yield from prev_res.destroy()
@staticmethod
def _exchange_stacks(existing_res, prev_res):
resource_objects.Resource.exchange_stacks(existing_res.stack.context,
existing_res.id, prev_res.id)
prev_stack, existing_stack = prev_res.stack, existing_res.stack
prev_stack.add_resource(existing_res)
existing_stack.add_resource(prev_res)
def _create_resource(self, new_res):
res_name = new_res.name
# Clean up previous resource
if res_name in self.previous_stack:
prev_res = self.previous_stack[res_name]
if prev_res.state not in ((prev_res.INIT, prev_res.COMPLETE),
(prev_res.DELETE, prev_res.COMPLETE)):
# Swap in the backup resource if it is in a valid state,
# instead of creating a new resource
if prev_res.status == prev_res.COMPLETE:
LOG.debug("Swapping in backup Resource %s", res_name)
self._exchange_stacks(self.existing_stack[res_name],
prev_res)
return
LOG.debug("Deleting backup Resource %s", res_name)
yield from prev_res.destroy()
# Back up existing resource
if res_name in self.existing_stack:
LOG.debug("Backing up existing Resource %s", res_name)
existing_res = self.existing_stack[res_name]
self.previous_stack.add_resource(existing_res)
existing_res.state_set(existing_res.UPDATE, existing_res.COMPLETE)
self.existing_stack.add_resource(new_res)
# Save new resource definition to backup stack if it is not
# present in backup stack template already
# it allows to resolve all dependencies that existing resource
# can have if it was copied to backup stack
if (res_name not in
self.previous_stack.t[self.previous_stack.t.RESOURCES]):
LOG.debug("Storing definition of new Resource %s", res_name)
self.previous_stack.t.add_resource(new_res.t)
self.previous_stack.t.store(self.previous_stack.context)
yield from new_res.create()
self._update_resource_data(new_res)
def _check_replace_restricted(self, res):
registry = res.stack.env.registry
restricted_actions = registry.get_rsrc_restricted_actions(res.name)
existing_res = self.existing_stack[res.name]
if 'replace' in restricted_actions:
ex = exception.ResourceActionRestricted(action='replace')
failure = exception.ResourceFailure(ex, existing_res,
existing_res.UPDATE)
existing_res._add_event(existing_res.UPDATE, existing_res.FAILED,
str(ex))
raise failure
def _update_resource_data(self, resource):
# Use the *new* template to determine the attrs to cache
node_data = resource.node_data(self.new_stack.defn)
stk_defn.update_resource_data(self.existing_stack.defn,
resource.name, node_data)
# Also update the new stack's definition with the data, so that
# following resources can calculate dep_attr values correctly (e.g. if
# the actual attribute name in a get_attr function also comes from a
# get_attr function.)
stk_defn.update_resource_data(self.new_stack.defn,
resource.name, node_data)
def _process_new_resource_update(self, new_res):
res_name = new_res.name
if res_name in self.existing_stack:
existing_res = self.existing_stack[res_name]
is_substituted = existing_res.check_is_substituted(type(new_res))
if type(existing_res) is type(new_res) or is_substituted:
try:
yield from self._update_in_place(existing_res,
new_res,
is_substituted)
except resource.UpdateReplace:
pass
else:
# Save updated resource definition to backup stack
# cause it allows the backup stack resources to be
# synchronized
LOG.debug("Storing definition of updated Resource %s",
res_name)
self.previous_stack.t.add_resource(new_res.t)
self.previous_stack.t.store(self.previous_stack.context)
self.existing_stack.t.add_resource(new_res.t)
self.existing_stack.t.store(self.existing_stack.context)
LOG.info("Resource %(res_name)s for stack "
"%(stack_name)s updated",
{'res_name': res_name,
'stack_name': self.existing_stack.name})
self._update_resource_data(existing_res)
return
else:
self._check_replace_restricted(new_res)
yield from self._create_resource(new_res)
def _update_in_place(self, existing_res, new_res, is_substituted=False):
existing_snippet = self.existing_snippets[existing_res.name]
prev_res = self.previous_stack.get(new_res.name)
# Note the new resource snippet is resolved in the context
# of the existing stack (which is the stack being updated)
# but with the template of the new stack (in case the update
# is switching template implementations)
new_snippet = new_res.t.reparse(self.existing_stack.defn,
self.new_stack.t)
if is_substituted:
substitute = type(new_res)(existing_res.name,
existing_res.t,
existing_res.stack)
existing_res.stack.resources[existing_res.name] = substitute
existing_res = substitute
existing_res.converge = self.new_stack.converge
yield from existing_res.update(new_snippet, existing_snippet,
prev_resource=prev_res)
def _process_existing_resource_update(self, existing_res):
res_name = existing_res.name
if res_name in self.previous_stack:
backup_res = self.previous_stack[res_name]
yield from self._remove_backup_resource(backup_res)
if res_name in self.new_stack:
new_res = self.new_stack[res_name]
if new_res.state == (new_res.INIT, new_res.COMPLETE):
# Already updated in-place
return
if existing_res.stack is not self.previous_stack:
yield from existing_res.destroy()
if res_name not in self.new_stack:
self.existing_stack.remove_resource(res_name)
def dependencies(self):
"""Return the Dependencies graph for the update.
Returns a Dependencies object representing the dependencies between
update operations to move from an existing stack definition to a new
one.
"""
existing_deps = self.existing_stack.dependencies
new_deps = self.new_stack.dependencies
def edges():
# Create/update the new stack's resources in create order
for e in new_deps.graph().edges():
yield e
# Destroy/cleanup the old stack's resources in delete order
for e in existing_deps.graph(reverse=True).edges():
yield e
# Don't cleanup old resources until after they have been replaced
for name, res in self.existing_stack.items():
if name in self.new_stack:
yield (res, self.new_stack[name])
return dependencies.Dependencies(edges())
def preview(self):
upd_keys = set(self.new_stack.resources.keys())
cur_keys = set(self.existing_stack.resources.keys())
common_keys = cur_keys.intersection(upd_keys)
deleted_keys = cur_keys.difference(upd_keys)
added_keys = upd_keys.difference(cur_keys)
updated_keys = []
replaced_keys = []
for key in common_keys:
current_res = self.existing_stack.resources[key]
updated_res = self.new_stack.resources[key]
current_props = current_res.frozen_definition().properties(
current_res.properties_schema, current_res.context)
updated_props = updated_res.frozen_definition().properties(
updated_res.properties_schema, updated_res.context)
# type comparison must match that in _process_new_resource_update
if type(current_res) is not type(updated_res):
replaced_keys.append(key)
continue
try:
if current_res.preview_update(updated_res.frozen_definition(),
current_res.frozen_definition(),
updated_props, current_props,
None):
updated_keys.append(key)
except resource.UpdateReplace:
replaced_keys.append(key)
return {
'unchanged': list(set(common_keys).difference(
set(updated_keys + replaced_keys))),
'updated': updated_keys,
'replaced': replaced_keys,
'added': list(added_keys),
'deleted': list(deleted_keys),
}
| apache-2.0 | 4,415,865,945,288,940,500 | 1,782,740,860,180,770,000 | 41.316667 | 79 | 0.579992 | false |
ray-project/ray | rllib/env/wrappers/tests/test_exception_wrapper.py | 2 | 1736 | import random
import unittest
import gym
from ray.rllib.env.wrappers.exception_wrapper import ResetOnExceptionWrapper, \
TooManyResetAttemptsException
class TestResetOnExceptionWrapper(unittest.TestCase):
def test_unstable_env(self):
class UnstableEnv(gym.Env):
observation_space = gym.spaces.Discrete(2)
action_space = gym.spaces.Discrete(2)
def step(self, action):
if random.choice([True, False]):
raise ValueError("An error from a unstable environment.")
return self.observation_space.sample(), 0.0, False, {}
def reset(self):
return self.observation_space.sample()
env = UnstableEnv()
env = ResetOnExceptionWrapper(env)
try:
self._run_for_100_steps(env)
except Exception:
self.fail()
def test_very_unstable_env(self):
class VeryUnstableEnv(gym.Env):
observation_space = gym.spaces.Discrete(2)
action_space = gym.spaces.Discrete(2)
def step(self, action):
return self.observation_space.sample(), 0.0, False, {}
def reset(self):
raise ValueError("An error from a very unstable environment.")
env = VeryUnstableEnv()
env = ResetOnExceptionWrapper(env)
self.assertRaises(TooManyResetAttemptsException,
lambda: self._run_for_100_steps(env))
@staticmethod
def _run_for_100_steps(env):
env.reset()
for _ in range(100):
env.step(env.action_space.sample())
if __name__ == "__main__":
import sys
import pytest
sys.exit(pytest.main(["-v", __file__]))
| apache-2.0 | -4,539,802,147,740,572,700 | -4,443,060,814,466,401,300 | 29.45614 | 79 | 0.593894 | false |
diefenbach/django-lfs | lfs/marketing/models.py | 1 | 1821 | # django imports
from django.db import models
from django.utils.translation import ugettext_lazy as _, ugettext
# lfs imports
from lfs.catalog.models import Product
from lfs.order.models import Order
class Topseller(models.Model):
"""Selected products are in any case among topsellers.
"""
product = models.ForeignKey(Product, models.CASCADE, verbose_name=_(u"Product"))
position = models.PositiveSmallIntegerField(_(u"Position"), default=1)
class Meta:
ordering = ["position"]
app_label = 'marketing'
def __str__(self):
return u"%s (%s)" % (self.product.name, self.position)
class ProductSales(models.Model):
"""Stores totals sales per product.
"""
product = models.ForeignKey(Product, models.CASCADE, verbose_name=_(u"Product"))
sales = models.IntegerField(_(u"sales"), default=0)
class Meta:
app_label = 'marketing'
class FeaturedProduct(models.Model):
"""Featured products are manually selected by the shop owner
"""
product = models.ForeignKey(Product, models.CASCADE, verbose_name=_(u"Product"))
position = models.PositiveSmallIntegerField(_(u"Position"), default=1)
active = models.BooleanField(_(u"Active"), default=True)
class Meta:
ordering = ["position"]
app_label = 'marketing'
def __str__(self):
return u"%s (%s)" % (self.product.name, self.position)
class OrderRatingMail(models.Model):
"""Saves whether and when a rating mail has been send for an order.
"""
order = models.ForeignKey(Order, models.CASCADE, verbose_name=_(u"Order"))
send_date = models.DateTimeField(auto_now=True)
def __str__(self):
return u"%s (%s)" % (self.order.id, self.send_date.strftime(ugettext('DATE_FORMAT')))
class Meta:
app_label = 'marketing'
| bsd-3-clause | 8,168,233,904,965,520,000 | -7,681,988,183,293,036,000 | 29.864407 | 93 | 0.667216 | false |
JCROM-Android/jcrom_external_chromium_org | third_party/tlslite/tlslite/X509CertChain.py | 76 | 9052 | """Class representing an X.509 certificate chain."""
from utils import cryptomath
from X509 import X509
class X509CertChain:
"""This class represents a chain of X.509 certificates.
@type x509List: list
@ivar x509List: A list of L{tlslite.X509.X509} instances,
starting with the end-entity certificate and with every
subsequent certificate certifying the previous.
"""
def __init__(self, x509List=None):
"""Create a new X509CertChain.
@type x509List: list
@param x509List: A list of L{tlslite.X509.X509} instances,
starting with the end-entity certificate and with every
subsequent certificate certifying the previous.
"""
if x509List:
self.x509List = x509List
else:
self.x509List = []
def parseChain(self, s):
"""Parse a PEM-encoded X.509 certificate file chain file.
@type s: str
@param s: A PEM-encoded (eg: Base64) X.509 certificate file, with every
certificate wrapped within "-----BEGIN CERTIFICATE-----" and
"-----END CERTIFICATE-----" tags). Extraneous data outside such tags,
such as human readable representations, will be ignored.
"""
class PEMIterator(object):
"""Simple iterator over PEM-encoded certificates within a string.
@type data: string
@ivar data: A string containing PEM-encoded (Base64) certificates,
with every certificate wrapped within "-----BEGIN CERTIFICATE-----"
and "-----END CERTIFICATE-----" tags). Extraneous data outside such
tags, such as human readable representations, will be ignored.
@type index: integer
@ivar index: The current offset within data to begin iterating from.
"""
_CERTIFICATE_HEADER = "-----BEGIN CERTIFICATE-----"
"""The PEM encoding block header for X.509 certificates."""
_CERTIFICATE_FOOTER = "-----END CERTIFICATE-----"
"""The PEM encoding block footer for X.509 certificates."""
def __init__(self, s):
self.data = s
self.index = 0
def __iter__(self):
return self
def next(self):
"""Iterates and returns the next L{tlslite.X509.X509}
certificate in data.
@rtype tlslite.X509.X509
"""
self.index = self.data.find(self._CERTIFICATE_HEADER,
self.index)
if self.index == -1:
raise StopIteration
end = self.data.find(self._CERTIFICATE_FOOTER, self.index)
if end == -1:
raise StopIteration
certStr = self.data[self.index+len(self._CERTIFICATE_HEADER) :
end]
self.index = end + len(self._CERTIFICATE_FOOTER)
bytes = cryptomath.base64ToBytes(certStr)
return X509().parseBinary(bytes)
self.x509List = list(PEMIterator(s))
return self
def getNumCerts(self):
"""Get the number of certificates in this chain.
@rtype: int
"""
return len(self.x509List)
def getEndEntityPublicKey(self):
"""Get the public key from the end-entity certificate.
@rtype: L{tlslite.utils.RSAKey.RSAKey}
"""
if self.getNumCerts() == 0:
raise AssertionError()
return self.x509List[0].publicKey
def getFingerprint(self):
"""Get the hex-encoded fingerprint of the end-entity certificate.
@rtype: str
@return: A hex-encoded fingerprint.
"""
if self.getNumCerts() == 0:
raise AssertionError()
return self.x509List[0].getFingerprint()
def getCommonName(self):
"""Get the Subject's Common Name from the end-entity certificate.
The cryptlib_py module must be installed in order to use this
function.
@rtype: str or None
@return: The CN component of the certificate's subject DN, if
present.
"""
if self.getNumCerts() == 0:
raise AssertionError()
return self.x509List[0].getCommonName()
def validate(self, x509TrustList):
"""Check the validity of the certificate chain.
This checks that every certificate in the chain validates with
the subsequent one, until some certificate validates with (or
is identical to) one of the passed-in root certificates.
The cryptlib_py module must be installed in order to use this
function.
@type x509TrustList: list of L{tlslite.X509.X509}
@param x509TrustList: A list of trusted root certificates. The
certificate chain must extend to one of these certificates to
be considered valid.
"""
import cryptlib_py
c1 = None
c2 = None
lastC = None
rootC = None
try:
rootFingerprints = [c.getFingerprint() for c in x509TrustList]
#Check that every certificate in the chain validates with the
#next one
for cert1, cert2 in zip(self.x509List, self.x509List[1:]):
#If we come upon a root certificate, we're done.
if cert1.getFingerprint() in rootFingerprints:
return True
c1 = cryptlib_py.cryptImportCert(cert1.writeBytes(),
cryptlib_py.CRYPT_UNUSED)
c2 = cryptlib_py.cryptImportCert(cert2.writeBytes(),
cryptlib_py.CRYPT_UNUSED)
try:
cryptlib_py.cryptCheckCert(c1, c2)
except:
return False
cryptlib_py.cryptDestroyCert(c1)
c1 = None
cryptlib_py.cryptDestroyCert(c2)
c2 = None
#If the last certificate is one of the root certificates, we're
#done.
if self.x509List[-1].getFingerprint() in rootFingerprints:
return True
#Otherwise, find a root certificate that the last certificate
#chains to, and validate them.
lastC = cryptlib_py.cryptImportCert(self.x509List[-1].writeBytes(),
cryptlib_py.CRYPT_UNUSED)
for rootCert in x509TrustList:
rootC = cryptlib_py.cryptImportCert(rootCert.writeBytes(),
cryptlib_py.CRYPT_UNUSED)
if self._checkChaining(lastC, rootC):
try:
cryptlib_py.cryptCheckCert(lastC, rootC)
return True
except:
return False
return False
finally:
if not (c1 is None):
cryptlib_py.cryptDestroyCert(c1)
if not (c2 is None):
cryptlib_py.cryptDestroyCert(c2)
if not (lastC is None):
cryptlib_py.cryptDestroyCert(lastC)
if not (rootC is None):
cryptlib_py.cryptDestroyCert(rootC)
def _checkChaining(self, lastC, rootC):
import cryptlib_py
import array
def compareNames(name):
try:
length = cryptlib_py.cryptGetAttributeString(lastC, name, None)
lastName = array.array('B', [0] * length)
cryptlib_py.cryptGetAttributeString(lastC, name, lastName)
lastName = lastName.tostring()
except cryptlib_py.CryptException, e:
if e[0] == cryptlib_py.CRYPT_ERROR_NOTFOUND:
lastName = None
try:
length = cryptlib_py.cryptGetAttributeString(rootC, name, None)
rootName = array.array('B', [0] * length)
cryptlib_py.cryptGetAttributeString(rootC, name, rootName)
rootName = rootName.tostring()
except cryptlib_py.CryptException, e:
if e[0] == cryptlib_py.CRYPT_ERROR_NOTFOUND:
rootName = None
return lastName == rootName
cryptlib_py.cryptSetAttribute(lastC,
cryptlib_py.CRYPT_CERTINFO_ISSUERNAME,
cryptlib_py.CRYPT_UNUSED)
if not compareNames(cryptlib_py.CRYPT_CERTINFO_COUNTRYNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_LOCALITYNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_ORGANIZATIONNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_ORGANIZATIONALUNITNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_COMMONNAME):
return False
return True | bsd-3-clause | 6,361,858,978,673,033,000 | -4,801,222,585,154,768,000 | 36.409091 | 80 | 0.558551 | false |
ehashman/oh-mainline | vendor/packages/Pygments/pygments/lexers/dalvik.py | 364 | 3442 | # -*- coding: utf-8 -*-
"""
pygments.lexers.dalvik
~~~~~~~~~~~~~~~~~~~~~~
Pygments lexers for Dalvik VM-related languages.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include, bygroups
from pygments.token import Keyword, Text, Comment, Name, String, Number, \
Punctuation
__all__ = ['SmaliLexer']
class SmaliLexer(RegexLexer):
"""
For `Smali <http://code.google.com/p/smali/>`_ (Android/Dalvik) assembly
code.
*New in Pygments 1.6.*
"""
name = 'Smali'
aliases = ['smali']
filenames = ['*.smali']
mimetypes = ['text/smali']
tokens = {
'root': [
include('comment'),
include('label'),
include('field'),
include('method'),
include('class'),
include('directive'),
include('access-modifier'),
include('instruction'),
include('literal'),
include('punctuation'),
include('type'),
include('whitespace')
],
'directive': [
(r'^[ \t]*\.(class|super|implements|field|subannotation|annotation|'
r'enum|method|registers|locals|array-data|packed-switch|'
r'sparse-switch|catchall|catch|line|parameter|local|prologue|'
r'epilogue|source)', Keyword),
(r'^[ \t]*\.end (field|subannotation|annotation|method|array-data|'
'packed-switch|sparse-switch|parameter|local)', Keyword),
(r'^[ \t]*\.restart local', Keyword),
],
'access-modifier': [
(r'(public|private|protected|static|final|synchronized|bridge|'
r'varargs|native|abstract|strictfp|synthetic|constructor|'
r'declared-synchronized|interface|enum|annotation|volatile|'
r'transient)', Keyword),
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
],
'instruction': [
(r'\b[vp]\d+\b', Name.Builtin), # registers
(r'\b[a-z][A-Za-z0-9/-]+\s+', Text), # instructions
],
'literal': [
(r'".*"', String),
(r'0x[0-9A-Fa-f]+t?', Number.Hex),
(r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'[0-9]+L?', Number.Integer),
],
'field': [
(r'(\$?\b)([A-Za-z0-9_$]*)(:)',
bygroups(Punctuation, Name.Variable, Punctuation)),
],
'method': [
(r'<(?:cl)?init>', Name.Function), # constructor
(r'(\$?\b)([A-Za-z0-9_$]*)(\()',
bygroups(Punctuation, Name.Function, Punctuation)),
],
'label': [
(r':[A-Za-z0-9_]+', Name.Label),
],
'class': [
# class names in the form Lcom/namespace/ClassName;
# I only want to color the ClassName part, so the namespace part is
# treated as 'Text'
(r'(L)((?:[A-Za-z0-9_$]+/)*)([A-Za-z0-9_$]+)(;)',
bygroups(Keyword.Type, Text, Name.Class, Text)),
],
'punctuation': [
(r'->', Punctuation),
(r'[{},\(\):=\.-]', Punctuation),
],
'type': [
(r'[ZBSCIJFDV\[]+', Keyword.Type),
],
'comment': [
(r'#.*?\n', Comment),
],
}
| agpl-3.0 | -3,502,131,583,944,684,500 | 7,123,839,807,865,886,000 | 32.096154 | 80 | 0.481116 | false |
rain2o/collective.pfg.skiplogic | setup.py | 1 | 1050 | from setuptools import setup, find_packages
import os
version = '0.1'
setup(name='collective.pfg.skiplogic',
version=version,
description="Adds skip logic capabilities to ploneformgen forms",
long_description=open("README.txt").read() + "\n" +
open(os.path.join("docs", "HISTORY.txt")).read(),
# Get more strings from
# http://pypi.python.org/pypi?:action=list_classifiers
classifiers=[
"Framework :: Plone",
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='http://svn.plone.org/svn/collective/',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
namespace_packages=['collective', 'collective.pfg'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
[z3c.autoinclude.plugin]
target = plone
""",
)
| gpl-2.0 | -3,489,383,680,810,735,000 | -7,949,174,715,848,442,000 | 28.166667 | 72 | 0.569524 | false |
adviti/melange | thirdparty/google_appengine/lib/django_1_2/tests/regressiontests/middleware_exceptions/tests.py | 51 | 1441 | import sys
from django.test import TestCase
from django.core.signals import got_request_exception
class TestException(Exception):
pass
class TestMiddleware(object):
def process_request(self, request):
raise TestException('Test Exception')
class MiddlewareExceptionTest(TestCase):
def setUp(self):
self.exceptions = []
got_request_exception.connect(self._on_request_exception)
self.client.handler.load_middleware()
def tearDown(self):
got_request_exception.disconnect(self._on_request_exception)
self.exceptions = []
def _on_request_exception(self, sender, request, **kwargs):
self.exceptions.append(sys.exc_info())
def test_process_request(self):
self.client.handler._request_middleware.insert(0, TestMiddleware().process_request)
try:
response = self.client.get('/')
except TestException, e:
# Test client indefinitely re-raises any exceptions being raised
# during request handling. Hence actual testing that exception was
# properly handled is done by relying on got_request_exception
# signal being sent.
pass
except Exception, e:
self.fail("Unexpected exception: %s" % e)
self.assertEquals(len(self.exceptions), 1)
exception, value, tb = self.exceptions[0]
self.assertEquals(value.args, ('Test Exception', ))
| apache-2.0 | -1,145,441,438,518,481,000 | 7,078,224,458,239,577,000 | 35.025 | 91 | 0.664816 | false |
yasserglez/tagfs | packages/tagfs/contrib/django/db/backends/postgresql/introspection.py | 9 | 3694 | from django.db.backends import BaseDatabaseIntrospection
class DatabaseIntrospection(BaseDatabaseIntrospection):
# Maps type codes to Django Field types.
data_types_reverse = {
16: 'BooleanField',
21: 'SmallIntegerField',
23: 'IntegerField',
25: 'TextField',
700: 'FloatField',
701: 'FloatField',
869: 'IPAddressField',
1043: 'CharField',
1082: 'DateField',
1083: 'TimeField',
1114: 'DateTimeField',
1184: 'DateTimeField',
1266: 'TimeField',
1700: 'DecimalField',
}
def get_table_list(self, cursor):
"Returns a list of table names in the current database."
cursor.execute("""
SELECT c.relname
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('r', 'v', '')
AND n.nspname NOT IN ('pg_catalog', 'pg_toast')
AND pg_catalog.pg_table_is_visible(c.oid)""")
return [row[0] for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"Returns a description of the table, with the DB-API cursor.description interface."
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
return cursor.description
def get_relations(self, cursor, table_name):
"""
Returns a dictionary of {field_index: (field_index_other_table, other_table)}
representing all relationships to the given table. Indexes are 0-based.
"""
cursor.execute("""
SELECT con.conkey, con.confkey, c2.relname
FROM pg_constraint con, pg_class c1, pg_class c2
WHERE c1.oid = con.conrelid
AND c2.oid = con.confrelid
AND c1.relname = %s
AND con.contype = 'f'""", [table_name])
relations = {}
for row in cursor.fetchall():
try:
# row[0] and row[1] are like "{2}", so strip the curly braces.
relations[int(row[0][1:-1]) - 1] = (int(row[1][1:-1]) - 1, row[2])
except ValueError:
continue
return relations
def get_indexes(self, cursor, table_name):
"""
Returns a dictionary of fieldname -> infodict for the given table,
where each infodict is in the format:
{'primary_key': boolean representing whether it's the primary key,
'unique': boolean representing whether it's a unique index}
"""
# This query retrieves each index on the given table, including the
# first associated field name
cursor.execute("""
SELECT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary
FROM pg_catalog.pg_class c, pg_catalog.pg_class c2,
pg_catalog.pg_index idx, pg_catalog.pg_attribute attr
WHERE c.oid = idx.indrelid
AND idx.indexrelid = c2.oid
AND attr.attrelid = c.oid
AND attr.attnum = idx.indkey[0]
AND c.relname = %s""", [table_name])
indexes = {}
for row in cursor.fetchall():
# row[1] (idx.indkey) is stored in the DB as an array. It comes out as
# a string of space-separated integers. This designates the field
# indexes (1-based) of the fields that have indexes on the table.
# Here, we skip any indexes across multiple fields.
if ' ' in row[1]:
continue
indexes[row[0]] = {'primary_key': row[3], 'unique': row[2]}
return indexes
| mit | 6,557,527,022,374,402,000 | -5,515,559,886,687,417,000 | 41.45977 | 95 | 0.572821 | false |
benregn/itu-courses | itu/pipelines.py | 1 | 1027 | import pymongo
from scrapy.exceptions import DropItem
from scrapy.conf import settings
from scrapy import log
class MongoDBPipeline(object):
def __init__(self):
connection = pymongo.Connection(
settings['MONGODB_SERVER'], settings['MONGODB_PORT'])
db = connection[settings['MONGODB_DB']]
self.collection = db[settings['MONGODB_COLLECTION']]
def process_item(self, item, spider):
valid = True
for data in item:
# here we only check if the data is not null
# but we could do any crazy validation we want
if not data:
valid = False
raise DropItem(
"Missing %s course from %s" % (data, item['url']))
if valid:
self.collection.insert(dict(item))
log.msg("Item written to MongoDB database %s/%s" %
(settings['MONGODB_DB'], settings['MONGODB_COLLECTION']),
level=log.DEBUG, spider=spider)
return item
| mit | 8,977,330,260,983,433,000 | 953,603,380,877,427,200 | 33.233333 | 77 | 0.581305 | false |
mnip91/proactive-component-monitoring | dev/scripts/perf/perf_graph.py | 12 | 2516 | #!/usr/bin/env python
import sys
import os
import string
import numpy as np
import matplotlib.pyplot as plt
import re
def main():
dir = sys.argv[1]
if len(sys.argv) == 1:
dict = create_dict(dir)
draw_graph(dict)
else:
for i in range(2, len(sys.argv)):
dict = create_dict(dir, sys.argv[i])
draw_graph(dict, sys.argv[i])
def create_dict(rootdir, match='.*'):
pattern = re.compile(match)
dict = {}
for branch in os.listdir(rootdir):
branch_dict = {}
for test in os.listdir(os.path.join(rootdir, branch)):
if pattern.match(test):
file = open(os.path.join(rootdir, branch, test))
str = file.readline()
str = str.strip()
start = str.find("=")
if start != -1:
branch_dict[test] = round(string.atof(str[start+1:]),2)
else:
branch_dict[test] = -1.
dict[branch] = branch_dict
return dict
def get_all_test_name(dict):
for branch in dict:
return dict[branch].keys()
def get_branches(dict):
return dict.keys()
def compare_by_branch(dict):
def local_print(test, d):
print test
for t in d:
print "\t" + t + "\t" + str(d[t])
print
for test in get_all_test_name(dict):
local_dict = {}
for branch in dict:
local_dict[branch] = dict[branch][test]
local_print(test, local_dict)
### Unused ###
def short_test_name(long_name):
return long_name[long_name.rfind('.Test')+5:]
def draw_graph(dict, title):
def autolabel(rects):
for rect in rects:
height = rect.get_height()
ax.text(rect.get_x()+rect.get_width()/2., 1.05*height, '%d'%int(height),
ha='center', va='bottom')
def set_legend(bars, branches):
bs = ()
for bar in bars:
bs = bs + (bar[0],)
ax.legend( bs, branches)
colors = ['b', 'g', 'r', 'c', 'm', 'y', 'b']
branches = get_branches(dict)
all_tests = get_all_test_name(dict)
N = len(all_tests)
ind = np.arange(N)
width = 0.35
fig = plt.figure()
ax = fig.add_subplot(111)
data_sets = []
for branch in branches:
data =()
for test in all_tests:
data = data + (dict[branch].get(test, 0),)
data_sets.append(data)
bars = []
counter = 0
for data in data_sets:
bar = ax.bar(ind + (counter*width), data, width, color=colors[counter])
bars.append(bar)
counter += 1
# add some
ax.set_ylabel('Perf')
ax.set_title('Branch perf comparison for ' + title)
ax.set_xticks(ind+width)
ax.set_xticklabels(map(short_test_name, all_tests))
set_legend(bars, branches)
for bar in bars:
autolabel(bar)
plt.savefig(title + ".png")
if __name__ == "__main__":
main()
| agpl-3.0 | 6,716,833,668,477,986,000 | 6,616,573,802,953,859,000 | 18.811024 | 75 | 0.627186 | false |
robertnishihara/ray | streaming/python/tests/test_word_count.py | 1 | 1689 | import os
import ray
from ray.streaming import StreamingContext
def test_word_count():
ray.init(_load_code_from_local=True)
ctx = StreamingContext.Builder() \
.build()
ctx.read_text_file(__file__) \
.set_parallelism(1) \
.flat_map(lambda x: x.split()) \
.map(lambda x: (x, 1)) \
.key_by(lambda x: x[0]) \
.reduce(lambda old_value, new_value:
(old_value[0], old_value[1] + new_value[1])) \
.filter(lambda x: "ray" not in x) \
.sink(lambda x: print("result", x))
ctx.submit("word_count")
import time
time.sleep(3)
ray.shutdown()
def test_simple_word_count():
ray.init(_load_code_from_local=True)
ctx = StreamingContext.Builder() \
.build()
sink_file = "/tmp/ray_streaming_test_simple_word_count.txt"
if os.path.exists(sink_file):
os.remove(sink_file)
def sink_func(x):
with open(sink_file, "a") as f:
line = "{}:{},".format(x[0], x[1])
print("sink_func", line)
f.write(line)
ctx.from_values("a", "b", "c") \
.set_parallelism(1) \
.flat_map(lambda x: [x, x]) \
.map(lambda x: (x, 1)) \
.key_by(lambda x: x[0]) \
.reduce(lambda old_value, new_value:
(old_value[0], old_value[1] + new_value[1])) \
.sink(sink_func)
ctx.submit("word_count")
import time
time.sleep(3)
ray.shutdown()
with open(sink_file, "r") as f:
result = f.read()
assert "a:2" in result
assert "b:2" in result
assert "c:2" in result
if __name__ == "__main__":
test_word_count()
test_simple_word_count()
| apache-2.0 | 6,123,783,684,095,021,000 | 5,631,039,510,754,855,000 | 27.15 | 63 | 0.536412 | false |
letolab/airy | airy/utils/cache.py | 1 | 9676 | """
This module contains helper functions for controlling caching. It does so by
managing the "Vary" header of responses. It includes functions to patch the
header of response objects directly and decorators that change functions to do
that header-patching themselves.
For information on the Vary header, see:
http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.44
Essentially, the "Vary" HTTP header defines which headers a cache should take
into account when building its cache key. Requests with the same path but
different header content for headers named in "Vary" need to get different
cache keys to prevent delivery of wrong content.
An example: i18n middleware would need to distinguish caches by the
"Accept-language" header.
"""
import re
import time
from airy.core.conf import settings
from airy.core.cache import get_cache
from airy.utils.encoding import smart_str, iri_to_uri
from airy.utils.http import http_date
from airy.utils.hashcompat import md5_constructor
from airy.utils.translation import get_language
from airy.http import HttpRequest
cc_delim_re = re.compile(r'\s*,\s*')
def patch_cache_control(response, **kwargs):
"""
This function patches the Cache-Control header by adding all
keyword arguments to it. The transformation is as follows:
* All keyword parameter names are turned to lowercase, and underscores
are converted to hyphens.
* If the value of a parameter is True (exactly True, not just a
true value), only the parameter name is added to the header.
* All other parameters are added with their value, after applying
str() to it.
"""
def dictitem(s):
t = s.split('=', 1)
if len(t) > 1:
return (t[0].lower(), t[1])
else:
return (t[0].lower(), True)
def dictvalue(t):
if t[1] is True:
return t[0]
else:
return t[0] + '=' + smart_str(t[1])
if response.has_header('Cache-Control'):
cc = cc_delim_re.split(response['Cache-Control'])
cc = dict([dictitem(el) for el in cc])
else:
cc = {}
# If there's already a max-age header but we're being asked to set a new
# max-age, use the minimum of the two ages. In practice this happens when
# a decorator and a piece of middleware both operate on a given view.
if 'max-age' in cc and 'max_age' in kwargs:
kwargs['max_age'] = min(cc['max-age'], kwargs['max_age'])
# Allow overriding private caching and vice versa
if 'private' in cc and 'public' in kwargs:
del cc['private']
elif 'public' in cc and 'private' in kwargs:
del cc['public']
for (k, v) in kwargs.items():
cc[k.replace('_', '-')] = v
cc = ', '.join([dictvalue(el) for el in cc.items()])
response['Cache-Control'] = cc
def get_max_age(response):
"""
Returns the max-age from the response Cache-Control header as an integer
(or ``None`` if it wasn't found or wasn't an integer.
"""
if not response.has_header('Cache-Control'):
return
cc = dict([_to_tuple(el) for el in
cc_delim_re.split(response['Cache-Control'])])
if 'max-age' in cc:
try:
return int(cc['max-age'])
except (ValueError, TypeError):
pass
def patch_response_headers(response, cache_timeout=None):
"""
Adds some useful headers to the given HttpResponse object:
ETag, Last-Modified, Expires and Cache-Control
Each header is only added if it isn't already set.
cache_timeout is in seconds. The CACHE_MIDDLEWARE_SECONDS setting is used
by default.
"""
if cache_timeout is None:
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
if cache_timeout < 0:
cache_timeout = 0 # Can't have max-age negative
if settings.USE_ETAGS and not response.has_header('ETag'):
response['ETag'] = '"%s"' % md5_constructor(response.content).hexdigest()
if not response.has_header('Last-Modified'):
response['Last-Modified'] = http_date()
if not response.has_header('Expires'):
response['Expires'] = http_date(time.time() + cache_timeout)
patch_cache_control(response, max_age=cache_timeout)
def add_never_cache_headers(response):
"""
Adds headers to a response to indicate that a page should never be cached.
"""
patch_response_headers(response, cache_timeout=-1)
def patch_vary_headers(response, newheaders):
"""
Adds (or updates) the "Vary" header in the given HttpResponse object.
newheaders is a list of header names that should be in "Vary". Existing
headers in "Vary" aren't removed.
"""
# Note that we need to keep the original order intact, because cache
# implementations may rely on the order of the Vary contents in, say,
# computing an MD5 hash.
if response.has_header('Vary'):
vary_headers = cc_delim_re.split(response['Vary'])
else:
vary_headers = []
# Use .lower() here so we treat headers as case-insensitive.
existing_headers = set([header.lower() for header in vary_headers])
additional_headers = [newheader for newheader in newheaders
if newheader.lower() not in existing_headers]
response['Vary'] = ', '.join(vary_headers + additional_headers)
def has_vary_header(response, header_query):
"""
Checks to see if the response has a given header name in its Vary header.
"""
if not response.has_header('Vary'):
return False
vary_headers = cc_delim_re.split(response['Vary'])
existing_headers = set([header.lower() for header in vary_headers])
return header_query.lower() in existing_headers
def _i18n_cache_key_suffix(request, cache_key):
"""If enabled, returns the cache key ending with a locale."""
if settings.USE_I18N:
# first check if LocaleMiddleware or another middleware added
# LANGUAGE_CODE to request, then fall back to the active language
# which in turn can also fall back to settings.LANGUAGE_CODE
cache_key += '.%s' % getattr(request, 'LANGUAGE_CODE', get_language())
return cache_key
def _generate_cache_key(request, method, headerlist, key_prefix):
"""Returns a cache key from the headers given in the header list."""
ctx = md5_constructor()
for header in headerlist:
value = request.META.get(header, None)
if value is not None:
ctx.update(value)
path = md5_constructor(iri_to_uri(request.get_full_path()))
cache_key = 'views.decorators.cache.cache_page.%s.%s.%s.%s' % (
key_prefix, request.method, path.hexdigest(), ctx.hexdigest())
return _i18n_cache_key_suffix(request, cache_key)
def _generate_cache_header_key(key_prefix, request):
"""Returns a cache key for the header cache."""
path = md5_constructor(iri_to_uri(request.get_full_path()))
cache_key = 'views.decorators.cache.cache_header.%s.%s' % (
key_prefix, path.hexdigest())
return _i18n_cache_key_suffix(request, cache_key)
def get_cache_key(request, key_prefix=None, method='GET', cache=None):
"""
Returns a cache key based on the request path and query. It can be used
in the request phase because it pulls the list of headers to take into
account from the global path registry and uses those to build a cache key
to check against.
If there is no headerlist stored, the page needs to be rebuilt, so this
function returns None.
"""
if key_prefix is None:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
cache_key = _generate_cache_header_key(key_prefix, request)
if cache is None:
cache = get_cache(settings.CACHE_MIDDLEWARE_ALIAS)
headerlist = cache.get(cache_key, None)
if headerlist is not None:
return _generate_cache_key(request, method, headerlist, key_prefix)
else:
return None
def learn_cache_key(request, response, cache_timeout=None, key_prefix=None, cache=None):
"""
Learns what headers to take into account for some request path from the
response object. It stores those headers in a global path registry so that
later access to that path will know what headers to take into account
without building the response object itself. The headers are named in the
Vary header of the response, but we want to prevent response generation.
The list of headers to use for cache key generation is stored in the same
cache as the pages themselves. If the cache ages some data out of the
cache, this just means that we have to build the response once to get at
the Vary header and so at the list of headers to use for the cache key.
"""
if key_prefix is None:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
if cache_timeout is None:
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
cache_key = _generate_cache_header_key(key_prefix, request)
if cache is None:
cache = get_cache(settings.CACHE_MIDDLEWARE_ALIAS)
if response.has_header('Vary'):
headerlist = ['HTTP_'+header.upper().replace('-', '_')
for header in cc_delim_re.split(response['Vary'])]
cache.set(cache_key, headerlist, cache_timeout)
return _generate_cache_key(request, request.method, headerlist, key_prefix)
else:
# if there is no Vary header, we still need a cache key
# for the request.get_full_path()
cache.set(cache_key, [], cache_timeout)
return _generate_cache_key(request, request.method, [], key_prefix)
def _to_tuple(s):
t = s.split('=',1)
if len(t) == 2:
return t[0].lower(), t[1]
return t[0].lower(), True
| bsd-2-clause | -4,321,154,748,764,523,000 | 9,220,089,832,829,722,000 | 39.655462 | 88 | 0.671042 | false |
tboyce021/home-assistant | homeassistant/components/timer/reproduce_state.py | 16 | 2247 | """Reproduce an Timer state."""
import asyncio
import logging
from typing import Any, Dict, Iterable, Optional
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import Context, State
from homeassistant.helpers.typing import HomeAssistantType
from . import (
ATTR_DURATION,
DOMAIN,
SERVICE_CANCEL,
SERVICE_PAUSE,
SERVICE_START,
STATUS_ACTIVE,
STATUS_IDLE,
STATUS_PAUSED,
)
_LOGGER = logging.getLogger(__name__)
VALID_STATES = {STATUS_IDLE, STATUS_ACTIVE, STATUS_PAUSED}
async def _async_reproduce_state(
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce a single state."""
cur_state = hass.states.get(state.entity_id)
if cur_state is None:
_LOGGER.warning("Unable to find entity %s", state.entity_id)
return
if state.state not in VALID_STATES:
_LOGGER.warning(
"Invalid state specified for %s: %s", state.entity_id, state.state
)
return
# Return if we are already at the right state.
if cur_state.state == state.state and cur_state.attributes.get(
ATTR_DURATION
) == state.attributes.get(ATTR_DURATION):
return
service_data = {ATTR_ENTITY_ID: state.entity_id}
if state.state == STATUS_ACTIVE:
service = SERVICE_START
if ATTR_DURATION in state.attributes:
service_data[ATTR_DURATION] = state.attributes[ATTR_DURATION]
elif state.state == STATUS_PAUSED:
service = SERVICE_PAUSE
elif state.state == STATUS_IDLE:
service = SERVICE_CANCEL
await hass.services.async_call(
DOMAIN, service, service_data, context=context, blocking=True
)
async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce Timer states."""
await asyncio.gather(
*(
_async_reproduce_state(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
)
| apache-2.0 | -5,047,526,695,867,832,000 | 7,463,852,276,267,858,000 | 26.072289 | 81 | 0.64664 | false |
837468220/python-for-android | python3-alpha/python3-src/Lib/test/test_sys.py | 47 | 32005 | import unittest, test.support
import sys, io, os
import struct
import subprocess
import textwrap
import warnings
import operator
import codecs
# count the number of test runs, used to create unique
# strings to intern in test_intern()
numruns = 0
try:
import threading
except ImportError:
threading = None
class SysModuleTest(unittest.TestCase):
def setUp(self):
self.orig_stdout = sys.stdout
self.orig_stderr = sys.stderr
self.orig_displayhook = sys.displayhook
def tearDown(self):
sys.stdout = self.orig_stdout
sys.stderr = self.orig_stderr
sys.displayhook = self.orig_displayhook
test.support.reap_children()
def test_original_displayhook(self):
import builtins
out = io.StringIO()
sys.stdout = out
dh = sys.__displayhook__
self.assertRaises(TypeError, dh)
if hasattr(builtins, "_"):
del builtins._
dh(None)
self.assertEqual(out.getvalue(), "")
self.assertTrue(not hasattr(builtins, "_"))
dh(42)
self.assertEqual(out.getvalue(), "42\n")
self.assertEqual(builtins._, 42)
del sys.stdout
self.assertRaises(RuntimeError, dh, 42)
def test_lost_displayhook(self):
del sys.displayhook
code = compile("42", "<string>", "single")
self.assertRaises(RuntimeError, eval, code)
def test_custom_displayhook(self):
def baddisplayhook(obj):
raise ValueError
sys.displayhook = baddisplayhook
code = compile("42", "<string>", "single")
self.assertRaises(ValueError, eval, code)
def test_original_excepthook(self):
err = io.StringIO()
sys.stderr = err
eh = sys.__excepthook__
self.assertRaises(TypeError, eh)
try:
raise ValueError(42)
except ValueError as exc:
eh(*sys.exc_info())
self.assertTrue(err.getvalue().endswith("ValueError: 42\n"))
def test_excepthook(self):
with test.support.captured_output("stderr") as stderr:
sys.excepthook(1, '1', 1)
self.assertTrue("TypeError: print_exception(): Exception expected for " \
"value, str found" in stderr.getvalue())
# FIXME: testing the code for a lost or replaced excepthook in
# Python/pythonrun.c::PyErr_PrintEx() is tricky.
def test_exit(self):
self.assertRaises(TypeError, sys.exit, 42, 42)
# call without argument
try:
sys.exit(0)
except SystemExit as exc:
self.assertEqual(exc.code, 0)
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with tuple argument with one entry
# entry will be unpacked
try:
sys.exit(42)
except SystemExit as exc:
self.assertEqual(exc.code, 42)
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with integer argument
try:
sys.exit((42,))
except SystemExit as exc:
self.assertEqual(exc.code, 42)
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with string argument
try:
sys.exit("exit")
except SystemExit as exc:
self.assertEqual(exc.code, "exit")
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with tuple argument with two entries
try:
sys.exit((17, 23))
except SystemExit as exc:
self.assertEqual(exc.code, (17, 23))
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# test that the exit machinery handles SystemExits properly
rc = subprocess.call([sys.executable, "-c",
"raise SystemExit(47)"])
self.assertEqual(rc, 47)
def check_exit_message(code, expected, env=None):
process = subprocess.Popen([sys.executable, "-c", code],
stderr=subprocess.PIPE, env=env)
stdout, stderr = process.communicate()
self.assertEqual(process.returncode, 1)
self.assertTrue(stderr.startswith(expected),
"%s doesn't start with %s" % (ascii(stderr), ascii(expected)))
# test that stderr buffer if flushed before the exit message is written
# into stderr
check_exit_message(
r'import sys; sys.stderr.write("unflushed,"); sys.exit("message")',
b"unflushed,message")
# test that the exit message is written with backslashreplace error
# handler to stderr
check_exit_message(
r'import sys; sys.exit("surrogates:\uDCFF")',
b"surrogates:\\udcff")
# test that the unicode message is encoded to the stderr encoding
# instead of the default encoding (utf8)
env = os.environ.copy()
env['PYTHONIOENCODING'] = 'latin-1'
check_exit_message(
r'import sys; sys.exit("h\xe9")',
b"h\xe9", env=env)
def test_getdefaultencoding(self):
self.assertRaises(TypeError, sys.getdefaultencoding, 42)
# can't check more than the type, as the user might have changed it
self.assertIsInstance(sys.getdefaultencoding(), str)
# testing sys.settrace() is done in test_sys_settrace.py
# testing sys.setprofile() is done in test_sys_setprofile.py
def test_setcheckinterval(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self.assertRaises(TypeError, sys.setcheckinterval)
orig = sys.getcheckinterval()
for n in 0, 100, 120, orig: # orig last to restore starting state
sys.setcheckinterval(n)
self.assertEqual(sys.getcheckinterval(), n)
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_switchinterval(self):
self.assertRaises(TypeError, sys.setswitchinterval)
self.assertRaises(TypeError, sys.setswitchinterval, "a")
self.assertRaises(ValueError, sys.setswitchinterval, -1.0)
self.assertRaises(ValueError, sys.setswitchinterval, 0.0)
orig = sys.getswitchinterval()
# sanity check
self.assertTrue(orig < 0.5, orig)
try:
for n in 0.00001, 0.05, 3.0, orig:
sys.setswitchinterval(n)
self.assertAlmostEqual(sys.getswitchinterval(), n)
finally:
sys.setswitchinterval(orig)
def test_recursionlimit(self):
self.assertRaises(TypeError, sys.getrecursionlimit, 42)
oldlimit = sys.getrecursionlimit()
self.assertRaises(TypeError, sys.setrecursionlimit)
self.assertRaises(ValueError, sys.setrecursionlimit, -42)
sys.setrecursionlimit(10000)
self.assertEqual(sys.getrecursionlimit(), 10000)
sys.setrecursionlimit(oldlimit)
@unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
'fatal error if run with a trace function')
def test_recursionlimit_recovery(self):
# NOTE: this test is slightly fragile in that it depends on the current
# recursion count when executing the test being low enough so as to
# trigger the recursion recovery detection in the _Py_MakeEndRecCheck
# macro (see ceval.h).
oldlimit = sys.getrecursionlimit()
def f():
f()
try:
for i in (50, 1000):
# Issue #5392: stack overflow after hitting recursion limit twice
sys.setrecursionlimit(i)
self.assertRaises(RuntimeError, f)
self.assertRaises(RuntimeError, f)
finally:
sys.setrecursionlimit(oldlimit)
def test_recursionlimit_fatalerror(self):
# A fatal error occurs if a second recursion limit is hit when recovering
# from a first one.
if os.name == "nt":
raise unittest.SkipTest(
"under Windows, test would generate a spurious crash dialog")
code = textwrap.dedent("""
import sys
def f():
try:
f()
except RuntimeError:
f()
sys.setrecursionlimit(%d)
f()""")
for i in (50, 1000):
sub = subprocess.Popen([sys.executable, '-c', code % i],
stderr=subprocess.PIPE)
err = sub.communicate()[1]
self.assertTrue(sub.returncode, sub.returncode)
self.assertTrue(
b"Fatal Python error: Cannot recover from stack overflow" in err,
err)
def test_getwindowsversion(self):
# Raise SkipTest if sys doesn't have getwindowsversion attribute
test.support.get_attribute(sys, "getwindowsversion")
v = sys.getwindowsversion()
self.assertEqual(len(v), 5)
self.assertIsInstance(v[0], int)
self.assertIsInstance(v[1], int)
self.assertIsInstance(v[2], int)
self.assertIsInstance(v[3], int)
self.assertIsInstance(v[4], str)
self.assertRaises(IndexError, operator.getitem, v, 5)
self.assertIsInstance(v.major, int)
self.assertIsInstance(v.minor, int)
self.assertIsInstance(v.build, int)
self.assertIsInstance(v.platform, int)
self.assertIsInstance(v.service_pack, str)
self.assertIsInstance(v.service_pack_minor, int)
self.assertIsInstance(v.service_pack_major, int)
self.assertIsInstance(v.suite_mask, int)
self.assertIsInstance(v.product_type, int)
self.assertEqual(v[0], v.major)
self.assertEqual(v[1], v.minor)
self.assertEqual(v[2], v.build)
self.assertEqual(v[3], v.platform)
self.assertEqual(v[4], v.service_pack)
# This is how platform.py calls it. Make sure tuple
# still has 5 elements
maj, min, buildno, plat, csd = sys.getwindowsversion()
def test_call_tracing(self):
self.assertRaises(TypeError, sys.call_tracing, type, 2)
def test_dlopenflags(self):
if hasattr(sys, "setdlopenflags"):
self.assertTrue(hasattr(sys, "getdlopenflags"))
self.assertRaises(TypeError, sys.getdlopenflags, 42)
oldflags = sys.getdlopenflags()
self.assertRaises(TypeError, sys.setdlopenflags)
sys.setdlopenflags(oldflags+1)
self.assertEqual(sys.getdlopenflags(), oldflags+1)
sys.setdlopenflags(oldflags)
def test_refcount(self):
# n here must be a global in order for this test to pass while
# tracing with a python function. Tracing calls PyFrame_FastToLocals
# which will add a copy of any locals to the frame object, causing
# the reference count to increase by 2 instead of 1.
global n
self.assertRaises(TypeError, sys.getrefcount)
c = sys.getrefcount(None)
n = None
self.assertEqual(sys.getrefcount(None), c+1)
del n
self.assertEqual(sys.getrefcount(None), c)
if hasattr(sys, "gettotalrefcount"):
self.assertIsInstance(sys.gettotalrefcount(), int)
def test_getframe(self):
self.assertRaises(TypeError, sys._getframe, 42, 42)
self.assertRaises(ValueError, sys._getframe, 2000000000)
self.assertTrue(
SysModuleTest.test_getframe.__code__ \
is sys._getframe().f_code
)
# sys._current_frames() is a CPython-only gimmick.
def test_current_frames(self):
have_threads = True
try:
import _thread
except ImportError:
have_threads = False
if have_threads:
self.current_frames_with_threads()
else:
self.current_frames_without_threads()
# Test sys._current_frames() in a WITH_THREADS build.
@test.support.reap_threads
def current_frames_with_threads(self):
import threading, _thread
import traceback
# Spawn a thread that blocks at a known place. Then the main
# thread does sys._current_frames(), and verifies that the frames
# returned make sense.
entered_g = threading.Event()
leave_g = threading.Event()
thread_info = [] # the thread's id
def f123():
g456()
def g456():
thread_info.append(_thread.get_ident())
entered_g.set()
leave_g.wait()
t = threading.Thread(target=f123)
t.start()
entered_g.wait()
# At this point, t has finished its entered_g.set(), although it's
# impossible to guess whether it's still on that line or has moved on
# to its leave_g.wait().
self.assertEqual(len(thread_info), 1)
thread_id = thread_info[0]
d = sys._current_frames()
main_id = _thread.get_ident()
self.assertIn(main_id, d)
self.assertIn(thread_id, d)
# Verify that the captured main-thread frame is _this_ frame.
frame = d.pop(main_id)
self.assertTrue(frame is sys._getframe())
# Verify that the captured thread frame is blocked in g456, called
# from f123. This is a litte tricky, since various bits of
# threading.py are also in the thread's call stack.
frame = d.pop(thread_id)
stack = traceback.extract_stack(frame)
for i, (filename, lineno, funcname, sourceline) in enumerate(stack):
if funcname == "f123":
break
else:
self.fail("didn't find f123() on thread's call stack")
self.assertEqual(sourceline, "g456()")
# And the next record must be for g456().
filename, lineno, funcname, sourceline = stack[i+1]
self.assertEqual(funcname, "g456")
self.assertIn(sourceline, ["leave_g.wait()", "entered_g.set()"])
# Reap the spawned thread.
leave_g.set()
t.join()
# Test sys._current_frames() when thread support doesn't exist.
def current_frames_without_threads(self):
# Not much happens here: there is only one thread, with artificial
# "thread id" 0.
d = sys._current_frames()
self.assertEqual(len(d), 1)
self.assertIn(0, d)
self.assertTrue(d[0] is sys._getframe())
def test_attributes(self):
self.assertIsInstance(sys.api_version, int)
self.assertIsInstance(sys.argv, list)
self.assertIn(sys.byteorder, ("little", "big"))
self.assertIsInstance(sys.builtin_module_names, tuple)
self.assertIsInstance(sys.copyright, str)
self.assertIsInstance(sys.exec_prefix, str)
self.assertIsInstance(sys.executable, str)
self.assertEqual(len(sys.float_info), 11)
self.assertEqual(sys.float_info.radix, 2)
self.assertEqual(len(sys.int_info), 2)
self.assertTrue(sys.int_info.bits_per_digit % 5 == 0)
self.assertTrue(sys.int_info.sizeof_digit >= 1)
self.assertEqual(type(sys.int_info.bits_per_digit), int)
self.assertEqual(type(sys.int_info.sizeof_digit), int)
self.assertIsInstance(sys.hexversion, int)
self.assertEqual(len(sys.hash_info), 5)
self.assertLess(sys.hash_info.modulus, 2**sys.hash_info.width)
# sys.hash_info.modulus should be a prime; we do a quick
# probable primality test (doesn't exclude the possibility of
# a Carmichael number)
for x in range(1, 100):
self.assertEqual(
pow(x, sys.hash_info.modulus-1, sys.hash_info.modulus),
1,
"sys.hash_info.modulus {} is a non-prime".format(
sys.hash_info.modulus)
)
self.assertIsInstance(sys.hash_info.inf, int)
self.assertIsInstance(sys.hash_info.nan, int)
self.assertIsInstance(sys.hash_info.imag, int)
self.assertIsInstance(sys.maxsize, int)
self.assertIsInstance(sys.maxunicode, int)
self.assertIsInstance(sys.platform, str)
self.assertIsInstance(sys.prefix, str)
self.assertIsInstance(sys.version, str)
vi = sys.version_info
self.assertIsInstance(vi[:], tuple)
self.assertEqual(len(vi), 5)
self.assertIsInstance(vi[0], int)
self.assertIsInstance(vi[1], int)
self.assertIsInstance(vi[2], int)
self.assertIn(vi[3], ("alpha", "beta", "candidate", "final"))
self.assertIsInstance(vi[4], int)
self.assertIsInstance(vi.major, int)
self.assertIsInstance(vi.minor, int)
self.assertIsInstance(vi.micro, int)
self.assertIn(vi.releaselevel, ("alpha", "beta", "candidate", "final"))
self.assertIsInstance(vi.serial, int)
self.assertEqual(vi[0], vi.major)
self.assertEqual(vi[1], vi.minor)
self.assertEqual(vi[2], vi.micro)
self.assertEqual(vi[3], vi.releaselevel)
self.assertEqual(vi[4], vi.serial)
self.assertTrue(vi > (1,0,0))
self.assertIsInstance(sys.float_repr_style, str)
self.assertIn(sys.float_repr_style, ('short', 'legacy'))
if not sys.platform.startswith('win'):
self.assertIsInstance(sys.abiflags, str)
def test_43581(self):
# Can't use sys.stdout, as this is a StringIO object when
# the test runs under regrtest.
self.assertEqual(sys.__stdout__.encoding, sys.__stderr__.encoding)
def test_intern(self):
global numruns
numruns += 1
self.assertRaises(TypeError, sys.intern)
s = "never interned before" + str(numruns)
self.assertTrue(sys.intern(s) is s)
s2 = s.swapcase().swapcase()
self.assertTrue(sys.intern(s2) is s)
# Subclasses of string can't be interned, because they
# provide too much opportunity for insane things to happen.
# We don't want them in the interned dict and if they aren't
# actually interned, we don't want to create the appearance
# that they are by allowing intern() to succeed.
class S(str):
def __hash__(self):
return 123
self.assertRaises(TypeError, sys.intern, S("abc"))
def test_sys_flags(self):
self.assertTrue(sys.flags)
attrs = ("debug", "division_warning",
"inspect", "interactive", "optimize", "dont_write_bytecode",
"no_user_site", "no_site", "ignore_environment", "verbose",
"bytes_warning", "quiet")
for attr in attrs:
self.assertTrue(hasattr(sys.flags, attr), attr)
self.assertEqual(type(getattr(sys.flags, attr)), int, attr)
self.assertTrue(repr(sys.flags))
self.assertEqual(len(sys.flags), len(attrs))
def test_clear_type_cache(self):
sys._clear_type_cache()
def test_ioencoding(self):
env = dict(os.environ)
# Test character: cent sign, encoded as 0x4A (ASCII J) in CP424,
# not representable in ASCII.
env["PYTHONIOENCODING"] = "cp424"
p = subprocess.Popen([sys.executable, "-c", 'print(chr(0xa2))'],
stdout = subprocess.PIPE, env=env)
out = p.communicate()[0].strip()
self.assertEqual(out, "\xa2\n".encode("cp424"))
env["PYTHONIOENCODING"] = "ascii:replace"
p = subprocess.Popen([sys.executable, "-c", 'print(chr(0xa2))'],
stdout = subprocess.PIPE, env=env)
out = p.communicate()[0].strip()
self.assertEqual(out, b'?')
def test_executable(self):
# Issue #7774: Ensure that sys.executable is an empty string if argv[0]
# has been set to an non existent program name and Python is unable to
# retrieve the real program name
# For a normal installation, it should work without 'cwd'
# argument. For test runs in the build directory, see #7774.
python_dir = os.path.dirname(os.path.realpath(sys.executable))
p = subprocess.Popen(
["nonexistent", "-c",
'import sys; print(sys.executable.encode("ascii", "backslashreplace"))'],
executable=sys.executable, stdout=subprocess.PIPE, cwd=python_dir)
stdout = p.communicate()[0]
executable = stdout.strip().decode("ASCII")
p.wait()
self.assertIn(executable, ["b''", repr(sys.executable.encode("ascii", "backslashreplace"))])
def check_fsencoding(self, fs_encoding, expected=None):
self.assertIsNotNone(fs_encoding)
codecs.lookup(fs_encoding)
if expected:
self.assertEqual(fs_encoding, expected)
def test_getfilesystemencoding(self):
fs_encoding = sys.getfilesystemencoding()
if sys.platform == 'darwin':
expected = 'utf-8'
elif sys.platform == 'win32':
expected = 'mbcs'
else:
expected = None
self.check_fsencoding(fs_encoding, expected)
class SizeofTest(unittest.TestCase):
TPFLAGS_HAVE_GC = 1<<14
TPFLAGS_HEAPTYPE = 1<<9
def setUp(self):
self.c = len(struct.pack('c', b' '))
self.H = len(struct.pack('H', 0))
self.i = len(struct.pack('i', 0))
self.l = len(struct.pack('l', 0))
self.P = len(struct.pack('P', 0))
# due to missing size_t information from struct, it is assumed that
# sizeof(Py_ssize_t) = sizeof(void*)
self.header = 'PP'
self.vheader = self.header + 'P'
if hasattr(sys, "gettotalrefcount"):
self.header += '2P'
self.vheader += '2P'
self.longdigit = sys.int_info.sizeof_digit
import _testcapi
self.gc_headsize = _testcapi.SIZEOF_PYGC_HEAD
self.file = open(test.support.TESTFN, 'wb')
def tearDown(self):
self.file.close()
test.support.unlink(test.support.TESTFN)
def check_sizeof(self, o, size):
result = sys.getsizeof(o)
# add GC header size
if ((type(o) == type) and (o.__flags__ & self.TPFLAGS_HEAPTYPE) or\
((type(o) != type) and (type(o).__flags__ & self.TPFLAGS_HAVE_GC))):
size += self.gc_headsize
msg = 'wrong size for %s: got %d, expected %d' \
% (type(o), result, size)
self.assertEqual(result, size, msg)
def calcsize(self, fmt):
"""Wrapper around struct.calcsize which enforces the alignment of the
end of a structure to the alignment requirement of pointer.
Note: This wrapper should only be used if a pointer member is included
and no member with a size larger than a pointer exists.
"""
return struct.calcsize(fmt + '0P')
def test_gc_head_size(self):
# Check that the gc header size is added to objects tracked by the gc.
h = self.header
vh = self.vheader
size = self.calcsize
gc_header_size = self.gc_headsize
# bool objects are not gc tracked
self.assertEqual(sys.getsizeof(True), size(vh) + self.longdigit)
# but lists are
self.assertEqual(sys.getsizeof([]), size(vh + 'PP') + gc_header_size)
def test_default(self):
h = self.header
vh = self.vheader
size = self.calcsize
self.assertEqual(sys.getsizeof(True), size(vh) + self.longdigit)
self.assertEqual(sys.getsizeof(True, -1), size(vh) + self.longdigit)
def test_objecttypes(self):
# check all types defined in Objects/
h = self.header
vh = self.vheader
size = self.calcsize
check = self.check_sizeof
# bool
check(True, size(vh) + self.longdigit)
# buffer
# XXX
# builtin_function_or_method
check(len, size(h + '3P'))
# bytearray
samples = [b'', b'u'*100000]
for sample in samples:
x = bytearray(sample)
check(x, size(vh + 'iPP') + x.__alloc__() * self.c)
# bytearray_iterator
check(iter(bytearray()), size(h + 'PP'))
# cell
def get_cell():
x = 42
def inner():
return x
return inner
check(get_cell().__closure__[0], size(h + 'P'))
# code
check(get_cell().__code__, size(h + '5i8Pi3P'))
# complex
check(complex(0,1), size(h + '2d'))
# method_descriptor (descriptor object)
check(str.lower, size(h + '2PP'))
# classmethod_descriptor (descriptor object)
# XXX
# member_descriptor (descriptor object)
import datetime
check(datetime.timedelta.days, size(h + '2PP'))
# getset_descriptor (descriptor object)
import collections
check(collections.defaultdict.default_factory, size(h + '2PP'))
# wrapper_descriptor (descriptor object)
check(int.__add__, size(h + '2P2P'))
# method-wrapper (descriptor object)
check({}.__iter__, size(h + '2P'))
# dict
check({}, size(h + '3P2P' + 8*'P2P'))
longdict = {1:1, 2:2, 3:3, 4:4, 5:5, 6:6, 7:7, 8:8}
check(longdict, size(h + '3P2P' + 8*'P2P') + 16*size('P2P'))
# dictionary-keyiterator
check({}.keys(), size(h + 'P'))
# dictionary-valueiterator
check({}.values(), size(h + 'P'))
# dictionary-itemiterator
check({}.items(), size(h + 'P'))
# dictproxy
class C(object): pass
check(C.__dict__, size(h + 'P'))
# BaseException
check(BaseException(), size(h + '5P'))
# UnicodeEncodeError
check(UnicodeEncodeError("", "", 0, 0, ""), size(h + '5P 2P2PP'))
# UnicodeDecodeError
# XXX
# check(UnicodeDecodeError("", "", 0, 0, ""), size(h + '5P2PP'))
# UnicodeTranslateError
check(UnicodeTranslateError("", 0, 1, ""), size(h + '5P 2P2PP'))
# ellipses
check(Ellipsis, size(h + ''))
# EncodingMap
import codecs, encodings.iso8859_3
x = codecs.charmap_build(encodings.iso8859_3.decoding_table)
check(x, size(h + '32B2iB'))
# enumerate
check(enumerate([]), size(h + 'l3P'))
# reverse
check(reversed(''), size(h + 'PP'))
# float
check(float(0), size(h + 'd'))
# sys.floatinfo
check(sys.float_info, size(vh) + self.P * len(sys.float_info))
# frame
import inspect
CO_MAXBLOCKS = 20
x = inspect.currentframe()
ncells = len(x.f_code.co_cellvars)
nfrees = len(x.f_code.co_freevars)
extras = x.f_code.co_stacksize + x.f_code.co_nlocals +\
ncells + nfrees - 1
check(x, size(vh + '12P3i' + CO_MAXBLOCKS*'3i' + 'P' + extras*'P'))
# function
def func(): pass
check(func, size(h + '11P'))
class c():
@staticmethod
def foo():
pass
@classmethod
def bar(cls):
pass
# staticmethod
check(foo, size(h + 'P'))
# classmethod
check(bar, size(h + 'P'))
# generator
def get_gen(): yield 1
check(get_gen(), size(h + 'Pi2P'))
# iterator
check(iter('abc'), size(h + 'lP'))
# callable-iterator
import re
check(re.finditer('',''), size(h + '2P'))
# list
samples = [[], [1,2,3], ['1', '2', '3']]
for sample in samples:
check(sample, size(vh + 'PP') + len(sample)*self.P)
# sortwrapper (list)
# XXX
# cmpwrapper (list)
# XXX
# listiterator (list)
check(iter([]), size(h + 'lP'))
# listreverseiterator (list)
check(reversed([]), size(h + 'lP'))
# long
check(0, size(vh))
check(1, size(vh) + self.longdigit)
check(-1, size(vh) + self.longdigit)
PyLong_BASE = 2**sys.int_info.bits_per_digit
check(int(PyLong_BASE), size(vh) + 2*self.longdigit)
check(int(PyLong_BASE**2-1), size(vh) + 2*self.longdigit)
check(int(PyLong_BASE**2), size(vh) + 3*self.longdigit)
# memory
check(memoryview(b''), size(h + 'PP2P2i7P'))
# module
check(unittest, size(h + '3P'))
# None
check(None, size(h + ''))
# NotImplementedType
check(NotImplemented, size(h))
# object
check(object(), size(h + ''))
# property (descriptor object)
class C(object):
def getx(self): return self.__x
def setx(self, value): self.__x = value
def delx(self): del self.__x
x = property(getx, setx, delx, "")
check(x, size(h + '4Pi'))
# PyCapsule
# XXX
# rangeiterator
check(iter(range(1)), size(h + '4l'))
# reverse
check(reversed(''), size(h + 'PP'))
# range
check(range(1), size(h + '4P'))
check(range(66000), size(h + '4P'))
# set
# frozenset
PySet_MINSIZE = 8
samples = [[], range(10), range(50)]
s = size(h + '3P2P' + PySet_MINSIZE*'lP' + 'lP')
for sample in samples:
minused = len(sample)
if minused == 0: tmp = 1
# the computation of minused is actually a bit more complicated
# but this suffices for the sizeof test
minused = minused*2
newsize = PySet_MINSIZE
while newsize <= minused:
newsize = newsize << 1
if newsize <= 8:
check(set(sample), s)
check(frozenset(sample), s)
else:
check(set(sample), s + newsize*struct.calcsize('lP'))
check(frozenset(sample), s + newsize*struct.calcsize('lP'))
# setiterator
check(iter(set()), size(h + 'P3P'))
# slice
check(slice(0), size(h + '3P'))
# super
check(super(int), size(h + '3P'))
# tuple
check((), size(vh))
check((1,2,3), size(vh) + 3*self.P)
# type
# (PyTypeObject + PyNumberMethods + PyMappingMethods +
# PySequenceMethods + PyBufferProcs)
s = size(vh + 'P2P15Pl4PP9PP11PI') + size('16Pi17P 3P 10P 2P 2P')
check(int, s)
# class
class newstyleclass(object): pass
check(newstyleclass, s)
# unicode
usize = len('\0'.encode('unicode-internal'))
samples = ['', '1'*100]
# we need to test for both sizes, because we don't know if the string
# has been cached
for s in samples:
basicsize = size(h + 'PPPiP') + usize * (len(s) + 1)
check(s, basicsize)
# weakref
import weakref
check(weakref.ref(int), size(h + '2Pl2P'))
# weakproxy
# XXX
# weakcallableproxy
check(weakref.proxy(int), size(h + '2Pl2P'))
def test_pythontypes(self):
# check all types defined in Python/
h = self.header
vh = self.vheader
size = self.calcsize
check = self.check_sizeof
# _ast.AST
import _ast
check(_ast.AST(), size(h + ''))
# imp.NullImporter
import imp
check(imp.NullImporter(self.file.name), size(h + ''))
try:
raise TypeError
except TypeError:
tb = sys.exc_info()[2]
# traceback
if tb != None:
check(tb, size(h + '2P2i'))
# symtable entry
# XXX
# sys.flags
check(sys.flags, size(vh) + self.P * len(sys.flags))
def test_main():
test.support.run_unittest(SysModuleTest, SizeofTest)
if __name__ == "__main__":
test_main()
| apache-2.0 | -233,544,588,962,724,770 | -7,026,336,259,417,108,000 | 35.745121 | 100 | 0.574473 | false |
znick/anytask | anytask/users/models.py | 1 | 9320 | # -*- coding: utf-8 -*-
import logging
import os
from courses.models import Course
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from groups.models import Group
from mail.models import Message
from users.model_user_status import UserStatus
from years.common import get_current_year
from anytask.storage import OverwriteStorage
logger = logging.getLogger('django.request')
def get_upload_path(instance, filename):
return os.path.join('images', 'user_%d' % instance.user.id, filename)
class UserProfile(models.Model):
user = models.OneToOneField(User, db_index=True, null=False, blank=False, unique=True, related_name='profile')
middle_name = models.CharField(max_length=128, db_index=True, null=True, blank=True)
user_status = models.ManyToManyField(UserStatus, db_index=True, blank=True, related_name='users_by_status')
avatar = models.ImageField('profile picture', upload_to=get_upload_path, blank=True, null=True,
storage=OverwriteStorage())
birth_date = models.DateField(blank=True, null=True)
info = models.TextField(default="", blank=True, null=True)
phone = models.CharField(max_length=128, null=True, blank=True)
city_of_residence = models.CharField(max_length=191, null=True, blank=True)
university = models.CharField(max_length=191, null=True, blank=True)
university_in_process = models.BooleanField(null=False, blank=False, default=False)
university_class = models.CharField(max_length=191, null=True, blank=True)
university_department = models.CharField(max_length=191, null=True, blank=True)
university_year_end = models.CharField(max_length=191, null=True, blank=True)
additional_info = models.TextField(null=True, blank=True)
unit = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
position = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
academic_degree = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
academic_title = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
show_email = models.BooleanField(db_index=False, null=False, blank=False, default=True)
send_my_own_events = models.BooleanField(db_index=False, null=False, blank=False, default=False)
unread_messages = models.ManyToManyField(Message, blank=True, related_name='unread_messages')
deleted_messages = models.ManyToManyField(Message, blank=True, related_name='deleted_messages')
send_notify_messages = models.ManyToManyField(Message, blank=True, related_name='send_notify_messages')
added_time = models.DateTimeField(auto_now_add=True) # remove default=timezone.now
update_time = models.DateTimeField(auto_now=True) # remove default=timezone.now
updated_by = models.ForeignKey(User, db_index=False, null=True, blank=True)
login_via_yandex = models.BooleanField(db_index=False, null=False, blank=False, default=False)
ya_uid = models.IntegerField(null=True, blank=True)
ya_login = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
ya_contest_uid = models.CharField(max_length=191, null=True, blank=True)
ya_contest_oauth = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
ya_contest_login = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
ya_passport_uid = models.CharField(max_length=191, null=True, blank=True)
ya_passport_oauth = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
ya_passport_login = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
ya_passport_email = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
telegram_uid = models.IntegerField(default=None, null=True, blank=True)
notify_in_telegram = models.BooleanField(default=False, null=False, blank=False)
language = models.CharField(default="ru", max_length=128, unique=False, null=True, blank=True)
time_zone = models.TextField(null=False, blank=False, default='Europe/Moscow')
location = models.TextField(null=True, blank=True, default="")
def is_current_year_student(self):
return Group.objects.filter(year=get_current_year()).filter(students=self.user).count() > 0
def __unicode__(self):
return unicode(self.user)
def is_active(self):
for status in self.user_status.all():
if status.tag == 'not_active' or status.tag == 'academic':
return False
return True
def set_status(self, new_status):
if not isinstance(new_status, UserStatus):
new_status = UserStatus.objects.get(id=new_status)
if new_status.type:
self.user_status.remove(*self.user_status.filter(type=new_status.type))
self.user_status.add(new_status)
def get_unread_count(self):
return self.unread_messages.exclude(id__in=self.deleted_messages.all()).count()
def can_sync_contest(self):
for course in Course.objects.filter(is_active=True):
if course.get_user_group(self.user) and course.send_to_contest_from_users:
return True
return False
class UserProfileLog(models.Model):
user = models.ForeignKey(User, db_index=True, null=False, blank=False, related_name='profiles_logs_by_user')
middle_name = models.CharField(max_length=128, db_index=True, null=True, blank=True)
user_status = models.ManyToManyField(UserStatus, db_index=True, blank=True)
avatar = models.ImageField('profile picture', upload_to=get_upload_path, blank=True, null=True,
storage=OverwriteStorage())
birth_date = models.DateField(blank=True, null=True)
info = models.TextField(default="", blank=True, null=True)
phone = models.CharField(max_length=128, null=True, blank=True)
city_of_residence = models.CharField(max_length=191, null=True, blank=True)
university = models.CharField(max_length=191, null=True, blank=True)
university_in_process = models.BooleanField(null=False, blank=False, default=False)
university_class = models.CharField(max_length=50, null=True, blank=True)
university_department = models.CharField(max_length=191, null=True, blank=True)
university_year_end = models.CharField(max_length=20, null=True, blank=True)
additional_info = models.TextField(null=True, blank=True)
unit = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
position = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
academic_degree = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
academic_title = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
show_email = models.BooleanField(db_index=False, null=False, blank=False, default=True)
send_my_own_events = models.BooleanField(db_index=False, null=False, blank=False, default=False)
unread_messages = models.ManyToManyField(Message, blank=True, related_name='log_unread_messages')
deleted_messages = models.ManyToManyField(Message, blank=True, related_name='log_deleted_messages')
send_notify_messages = models.ManyToManyField(Message, blank=True, related_name='log_send_notify_messages')
added_time = models.DateTimeField(auto_now_add=True) # remove default=timezone.now
update_time = models.DateTimeField(auto_now=True) # remove default=timezone.now
login_via_yandex = models.BooleanField(db_index=False, null=False, blank=False, default=True)
ya_uid = models.IntegerField(null=True, blank=True)
ya_login = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
ya_contest_uid = models.IntegerField(null=True, blank=True)
ya_contest_oauth = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
ya_contest_login = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
ya_passport_uid = models.IntegerField(null=True, blank=True)
ya_passport_oauth = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
ya_passport_login = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
ya_passport_email = models.CharField(default="", max_length=128, unique=False, null=True, blank=True)
telegram_uid = models.IntegerField(default=None, null=True, blank=True)
notify_in_telegram = models.BooleanField(default=False, null=False, blank=False)
language = models.CharField(default="ru", max_length=128, unique=False, null=True, blank=True)
updated_by = models.ForeignKey(User, db_index=False, null=True, blank=True)
def is_current_year_student(self):
return Group.objects.filter(year=get_current_year()).filter(students=self.user).count() > 0
def __unicode__(self):
return unicode(self.user)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
| mit | -6,462,623,542,257,381,000 | 2,851,694,621,757,131,300 | 49.378378 | 114 | 0.720172 | false |
QQuick/Transcrypt | transcrypt/modules/org/transcrypt/autotester/__init__.py | 1 | 12645 | # First run a test from the command prompt, generating an HTML file.
# The output of the test is stored in a DIV.
# Also the script is automatically included in the HTML file.
# Loading the HTML file will run the script.
# This will compare the output of the script running in the browswer to the output in the DIV.
# If those two match, the test reports OK, else it reports failure.
from org.transcrypt.stubs.browser import __main__, __envir__, __pragma__
from org.transcrypt.autotester.html import HTMLGenerator, DataConverter, JSTesterUI, itemsAreEqual
# Don't import __envir__ from __base__ since it will overwrite __buildin__.__envir__ in the browser
# Import from stubs will be skipped in the browser
# ... The ice is a bit thin here
__pragma__ ('nokwargs')
import itertools
def getFileLocation(ancestor):
""" This function needs to crawl up the stack
and find out where the ancestor caller of
this function was in the source code of either the
python or javascript, depending on environment.
@param ancestor the ancestor of this function that
we want to capture file information about.
@return string indicating the file position and line number
"""
if __envir__.executor_name == __envir__.transpiler_name: # js
s = None
__pragma__('js', '{}',
'''
var e = new Error();
if ( ! e.stack ) {
console.log("MAJOR ISSUE: Browser Error lacks Stack");
} else {
s = e.stack;
}
''')
# Now we will process the stack to find the grandparent
# calling function
# @note - I'm explicitly not including a 're' module
# dependency here
frames = None
__pragma__('js', '{}',
'''
var linereg = new RegExp("\\n\\r|\\n", "g");
frames = s.toString().split(linereg);
''')
if ( frames is None or (len(frames) < 2)):
__pragma__('js', '{}', 'console.log("Failed to Split Stack");')
return("UNKNOWN:???")
# @note - if the call stack in transcrypts javascript
# translation changes then this index may need to change
# @todo - need more work here to determine this because
# this is fragile
gpFrame = frames[(ancestor*2 + 1)]
# This regex splits the string coming from the javascript
# stacktrace so that we can connect the file and line number
# runTests (http://localhost:8080/run/autotest.js:3159:8)
# func URL filename lineno:colno
# Group 1 = function
# Group 2 & 3 = protocol and hostname
# Group 4 = Path on this host (filename is at the end)
# Group 5 = lineno
# Group 6 = column number in file
frameReg = r"([^(]*)\(?([^:]*:)\/{2,3}([^:/]*:?)([^:]*):(\d+):(\d+)"
m = None
__pragma__('js', '{}',
'''
var r = new RegExp(frameReg);
m = r.exec(gpFrame);
''')
if m:
filepath = m[4]
# Split the filepath and take the last element
# to the get filename
pathParts = filepath.split("/")
filename = pathParts[len(pathParts)-1]
lineno = m[5]
return( "{}:{}".format(filename, lineno) )
else:
__pragma__('js', '{}', 'console.log("Failed to Match Frame", gpFrame);')
return("UNKNOWN:???")
#ELSE
# Needed because Transcrypt imports are compile time
__pragma__("skip")
from inspect import getframeinfo, stack
s = stack()
caller = getframeinfo(s[ancestor][0])
# Trim the file name path so that we don't get
# a lot of unnecessary content
filepath = caller.filename
# @todo - this is a hack - we should use os.path
pathParts = filepath.split('/')
filename = "/".join(pathParts[-2:])
return( "%s:%d" % (filename, caller.lineno))
__pragma__ ('noskip')
class AutoTester:
""" Main testing class for comparing CPython to Transcrypt. This
class is primarily used by calling the "check" method to confirm that
the result is the same in both environments and "done" when all checks
for a particular module have been completed.
"""
def __init__ (self, symbols = []):
self.symbols = symbols
# refDict/testDict contains the test results
# of each testlet identified by name as the key
self._currTestlet = "UNKNOWN"
self.testDict = {}
self.refDict = {}
if __envir__.executor_name == __envir__.transpiler_name:
self.ui = JSTesterUI()
else:
self.ui = None
def sortedRepr (self, any):
# When using sets or dicts, use elemens or keys
# of one type, in sort order
def tryGetNumKey (key):
if type (key) == str: # Try to interpret key as numerical, see comment with repr function in __builtins__
try:
return int (key)
except:
try:
return float (key)
except:
return key
else:
return key
if type (any) == dict:
return '{' + ', '.join ([
'{}: {}'.format (repr (key), repr (any [key]))
for index, key in enumerate (sorted ([tryGetNumKey (key) for key in any.keys ()], key = lambda aKey: str (aKey)))
]) + '}'
elif type (any) == set:
if len (any):
return '{' + ', '.join (sorted ([str (item) for item in list (any)])) + '}'
else:
return repr (any)
elif type (any) == range:
return repr (list (any))
else:
return repr (any)
__pragma__('kwargs')
def check (self, *args, ancestor = 2):
""" Given a set of values from either the python or transcrypt
environments, we log the position of the check call in the test
and representative values of the passed arguments for later
comparison.
"""
position=getFileLocation(ancestor)
# N.B. stubs.browser provides a special sorting repr
item = ' '.join ([self.sortedRepr (arg) for arg in args])
if __envir__.executor_name == __envir__.transpiler_name:
self.testDict[self._currTestlet].append((position,item))
else:
self.refDict[self._currTestlet].append((position,item))
__pragma__('nokwargs')
def expectException(self, func):
""" This method attempts to call the passed method and
checks to see whether an exception was generated.
@return string indicating "no exception" or "exception"
"""
try:
func()
return("no exception")
except Exception as exc:
return("exception")
def throwToError(self, func):
""" This function invokes the passed function and then
converts an exception to an error response so that
the unit test can continue even in the case where an
exception may or may not occur.
"""
try:
return(func())
except Exception as exc:
return (None, "!!!{}".format(str(exc)))
def checkEval(self, func):
""" Check the result of the passed function which is
invoked without arguments. If this function throws an
exception, that exception is caught and converted to an error
with can be compared against the result. This allows the
user to control for exception that may or may not be generated
in the unit tests
"""
ret = self.throwToError(func)
self.check(ret, ancestor = 3)
def checkPad(self, val, count):
""" This method is to help manage flow control in unit tests and
keep all unit tests aligned
"""
for i in range(0, count):
self.check(val)
def _getTotalErrorCnt(self, testData, refData):
""" This method determines the total number of non-matching
values in the test and reference data for a particular module.
"""
errCount = 0
for i,(refPos, refItem) in enumerate(refData):
try:
testPos,testItem = testData[i]
if not itemsAreEqual (testItem, refItem):
errCount+=1
except:
errCount+=1
return(errCount)
def compare (self):
# Load the python reference data from the hidden HTML div
dc = DataConverter()
self.refDict = dc.getPythonResults()
totalErrors = 0
sKeys = sorted(self.refDict.keys())
for key in sKeys:
refData = self.refDict[key]
try:
testData = self.testDict[key]
if ( testData is None ):
raise KeyError("No Test Data Module: {}".format(key))
except KeyError:
# No Test Data found for this key - we will populate with
# errors for all ref data
self.ui.appendSeqRowName(key, len(refData))
for i,(refPos, refItem) in enumerate(refData):
self.ui.appendTableResult(key, None, None, refPos, refItem, False)
continue
# know we have testData so let's determine the total number of
# errors for this test module. This will allow us to both set
# the num of errors in the test module header row and set the
# rows to the appropriate initial collapsed/expanded state.
errCount= self._getTotalErrorCnt(testData, refData)
collapse = (errCount == 0)
self.ui.appendSeqRowName(key, errCount)
# Now we will populate the table with all the rows
# of data fro the comparison
for i,(refPos, refItem) in enumerate(refData):
try:
# This will throw if testData's length is
# shorter than refData's
testPos,testItem = testData[i]
except:
testPos = None
testItem = None
self.ui.appendTableResult(
key, testPos, testItem, refPos, refItem, collapse
)
totalErrors += errCount
self.ui.setOutputStatus( totalErrors == 0 )
def _cleanName(self, name):
""" Clean the passed name of characters that won't be allowed
in CSS class or HTML id strings.
"""
# Convert testletName to replace any of the characters that
# are not acceptable in a CSS class or HTML id - this is to
# make our lives easier
# @note - I'm SPECIFICALLY not using a regex here because the
# regex engine module is still under dev and could possibly
# have issues
ret = name
invalidChars = [
'~', '!', '@', '$', '%',
'^', '&', '*', '(', ')',
'+', '=', ',', '.', '/',
"'", ';', ':', '"', '?',
'>', '<', '[', ']', '\\',
'{', '}', '|', '`', '#',
" ",
]
for ch in invalidChars:
ret = ret.replace(ch, "_")
return(ret)
def run (self, testlet, testletName):
testletName = self._cleanName(testletName)
self._currTestlet = testletName
if __envir__.executor_name == __envir__.transpiler_name:
self.testDict[self._currTestlet] = []
else:
self.refDict[self._currTestlet] = []
try:
testlet.run (self)
except Exception as exc:
if ( self.ui is not None ):
self.ui.setOutputStatus(False)
self.ui.showException(testletName, exc)
else:
# Error - No UI yet, reraise specific exception to enable finding out why
raise
def done (self):
if __envir__.executor_name == __envir__.transpiler_name:
self.compare ()
else:
fnameBase = __main__.__file__.replace ('\\', '/')
hg = HTMLGenerator(fnameBase)
hg.generate_html(self.refDict)
| apache-2.0 | 7,412,375,801,753,020,000 | -6,263,568,231,083,645,000 | 37.515625 | 129 | 0.535152 | false |
walterbender/Pippy | pippy_app.py | 2 | 59457 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007,2008,2009 Chris Ball, based on Collabora's
# "hellomesh" demo.
#
# Copyright (C) 2013,14 Walter Bender
# Copyright (C) 2013,14 Ignacio Rodriguez
# Copyright (C) 2013 Jorge Gomez
# Copyright (C) 2013,14 Sai Vineet
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Pippy Activity: A simple Python programming activity ."""
import re
import os
import subprocess
from random import uniform
import locale
import json
import sys
from shutil import copy2
from signal import SIGTERM
from gettext import gettext as _
import uuid
import dbus
from dbus.mainloop.glib import DBusGMainLoop
from gi import require_version
require_version('Gdk', '3.0')
require_version('Gtk', '3.0')
from gi.repository import Gdk
from gi.repository import Gtk
from gi.repository import GLib
from gi.repository import Pango
try:
require_version('Vte', '2.91')
except:
require_version('Vte', '2.90')
from gi.repository import Vte
from gi.repository import GObject
DBusGMainLoop(set_as_default=True)
bus = dbus.SessionBus()
from sugar3.datastore import datastore
from sugar3.activity import activity as activity
from sugar3.activity.widgets import EditToolbar
from sugar3.activity.widgets import StopButton
from sugar3.activity.activity import get_bundle_path
from sugar3.graphics.alert import Alert
from sugar3.graphics.alert import ConfirmationAlert
from sugar3.graphics.alert import NotifyAlert
from sugar3.graphics.icon import Icon
from sugar3.graphics.objectchooser import ObjectChooser
from sugar3.graphics.toggletoolbutton import ToggleToolButton
from sugar3.graphics.toolbarbox import ToolbarButton
from sugar3.graphics.toolbutton import ToolButton
from sugar3.graphics.toolbarbox import ToolbarBox
from sugar3.activity.widgets import ActivityToolbarButton
from jarabe.view.customizebundle import generate_unique_id
from activity import ViewSourceActivity
from activity import TARGET_TYPE_TEXT
from collabwrapper import CollabWrapper
from filedialog import FileDialog
from icondialog import IconDialog
from notebook import SourceNotebook, tab_object
from toolbars import DevelopViewToolbar
import sound_check
import logging
text_buffer = None
# magic prefix to use utf-8 source encoding
PYTHON_PREFIX = '''#!/usr/bin/python3
# -*- coding: utf-8 -*-
'''
# Force category names into Pootle
DEFAULT_CATEGORIES = [_('graphics'), _('math'), _('python'), _('sound'),
_('string'), _('tutorials')]
_logger = logging.getLogger('pippy-activity')
DISTUTILS_SETUP_SCRIPT = """#!/usr/bin/python3
# -*- coding: utf-8 -*-
from distutils.core import setup
setup(name='{modulename}',
version='1.0',
py_modules=[
{filenames}
],
)
""" # This is .format()'ed with the list of the file names.
DISTUTILS_SETUP_SCRIPT = """#!/usr/bin/python3
# -*- coding: utf-8 -*-
from distutils.core import setup
setup(name='{modulename}',
version='1.0',
py_modules=[
{filenames}
],
)
""" # This is .format()'ed with the list of the file names.
def _has_new_vte_api():
try:
return (Vte.MAJOR_VERSION >= 0 and
Vte.MINOR_VERSION >= 38)
except:
# Really old versions of Vte don't have VERSION
return False
def _find_object_id(activity_id, mimetype='text/x-python'):
''' Round-about way of accessing self._jobject.object_id '''
dsobjects, nobjects = datastore.find({'mime_type': [mimetype]})
for dsobject in dsobjects:
if 'activity_id' in dsobject.metadata and \
dsobject.metadata['activity_id'] == activity_id:
return dsobject.object_id
return None
class PippyActivity(ViewSourceActivity):
'''Pippy Activity as specified in activity.info'''
def __init__(self, handle):
self._pippy_instance = self
self.session_data = [] # Used to manage saving
self._loaded_session = [] # Used to manage tabs
self._py_file_loaded_from_journal = False
self._py_object_id = None
self._dialog = None
sys.path.append(os.path.join(self.get_activity_root(), 'Library'))
ViewSourceActivity.__init__(self, handle)
self._collab = CollabWrapper(self)
self._collab.message.connect(self.__message_cb)
self.set_canvas(self.initialize_display())
self.after_init()
self.connect("notify::active", self.__active_cb)
self._collab.setup()
def focus():
""" Enforce focus for the text view once. """
widget = self.get_toplevel().get_focus()
textview = self._source_tabs.get_text_view()
if widget is None and textview is not None:
textview.grab_focus()
return True
return False
GLib.timeout_add(100, focus)
def initialize_display(self):
'''Build activity toolbar with title input, share button and export
buttons
'''
toolbar_box = ToolbarBox()
activity_button = ActivityToolbarButton(self)
toolbar_box.toolbar.insert(activity_button, 0)
self.set_toolbar_box(toolbar_box)
activity_button.show()
toolbar_box.show()
activity_toolbar = activity_button.page
separator = Gtk.SeparatorToolItem()
activity_toolbar.insert(separator, -1)
separator.show()
button = ToolButton('pippy-import-doc')
button.set_tooltip(_('Import Python file to new tab'))
button.connect('clicked', self._import_py_cb)
activity_toolbar.insert(button, -1)
button.show()
button = ToolButton('pippy-export-doc')
button.set_tooltip(_('Export as Pippy document'))
button.connect('clicked', self._export_document_cb)
activity_toolbar.insert(button, -1)
button.show()
button = ToolButton('pippy-export-library')
button.set_tooltip(_('Save this file to the Pippy library'))
button.connect('clicked', self._save_as_library)
activity_toolbar.insert(button, -1)
if not self._library_writable():
button.set_sensitive(False)
button.show()
button = ToolButton('pippy-export-example')
button.set_tooltip(_('Export as new Pippy example'))
button.connect('clicked', self._export_example_cb)
activity_toolbar.insert(button, -1)
button.show()
button = ToolButton('pippy-create-bundle')
button.set_tooltip(_('Create a Sugar activity bundle'))
button.connect('clicked', self._create_bundle_cb)
activity_toolbar.insert(button, -1)
button.show()
button = ToolButton('pippy-create-distutils')
# TRANS: A distutils package is used to distribute Python modules
button.set_tooltip(_('Export as a distutils package'))
button.connect('clicked', self._export_distutils_cb)
activity_toolbar.insert(button, -1)
button.show()
self._edit_toolbar = EditToolbar()
button = ToolbarButton()
button.set_page(self._edit_toolbar)
button.props.icon_name = 'toolbar-edit'
button.props.label = _('Edit')
self.get_toolbar_box().toolbar.insert(button, -1)
button.show()
self._edit_toolbar.show()
self._edit_toolbar.undo.connect('clicked', self.__undobutton_cb)
self._edit_toolbar.redo.connect('clicked', self.__redobutton_cb)
self._edit_toolbar.copy.connect('clicked', self.__copybutton_cb)
self._edit_toolbar.paste.connect('clicked', self.__pastebutton_cb)
view_btn = ToolbarButton()
view_toolbar = DevelopViewToolbar(self)
view_btn.props.page = view_toolbar
view_btn.props.icon_name = 'toolbar-view'
view_btn.props.label = _('View')
view_toolbar.connect('font-size-changed',
self._font_size_changed_cb)
self.get_toolbar_box().toolbar.insert(view_btn, -1)
self.view_toolbar = view_toolbar
view_toolbar.show()
actions_toolbar = self.get_toolbar_box().toolbar
self._toggle_output = ToggleToolButton('tray-show')
self._toggle_output.set_tooltip(_('Show output panel'))
self._toggle_output.connect('toggled', self._toggle_output_cb)
actions_toolbar.insert(self._toggle_output, -1)
self._toggle_output.show()
self._inverted_colors = ToggleToolButton(icon_name='dark-theme')
self._inverted_colors.set_tooltip(_('Inverted Colors'))
self._inverted_colors.set_accelerator('<Ctrl><Shift>I')
self._inverted_colors.connect(
'toggled', self.__inverted_colors_toggled_cb)
actions_toolbar.insert(self._inverted_colors, -1)
self._inverted_colors.show()
icons_path = os.path.join(get_bundle_path(), 'icons')
icon_bw = Gtk.Image()
icon_bw.set_from_file(os.path.join(icons_path, 'run_bw.svg'))
icon_bw.show()
icon_color = Gtk.Image()
icon_color.set_from_file(os.path.join(icons_path, 'run_color.svg'))
icon_color.show()
button = ToolButton(label=_('Run!'))
button.props.accelerator = _('<alt>r')
button.set_icon_widget(icon_bw)
button.set_tooltip(_('Run!'))
button.connect('clicked', self._flash_cb,
dict({'bw': icon_bw, 'color': icon_color}))
button.connect('clicked', self._go_button_cb)
actions_toolbar.insert(button, -1)
button.show()
icon_bw = Gtk.Image()
icon_bw.set_from_file(os.path.join(icons_path, 'stopit_bw.svg'))
icon_bw.show()
icon_color = Gtk.Image()
icon_color.set_from_file(os.path.join(icons_path, 'stopit_color.svg'))
icon_color.show()
button = ToolButton(label=_('Stop'))
button.props.accelerator = _('<alt>s')
button.set_icon_widget(icon_bw)
button.connect('clicked', self._flash_cb,
dict({'bw': icon_bw, 'color': icon_color}))
button.connect('clicked', self._stop_button_cb)
button.set_tooltip(_('Stop'))
actions_toolbar.insert(button, -1)
button.show()
icon_bw = Gtk.Image()
icon_bw.set_from_file(os.path.join(icons_path, 'eraser_bw.svg'))
icon_bw.show()
icon_color = Gtk.Image()
icon_color.set_from_file(os.path.join(icons_path, 'eraser_color.svg'))
icon_color.show()
button = ToolButton(label=_('Clear output panel'))
button.props.accelerator = _('<alt>c')
button.set_icon_widget(icon_bw)
button.connect('clicked', self._clear_button_cb)
button.connect('clicked', self._flash_cb,
dict({'bw': icon_bw, 'color': icon_color}))
button.set_tooltip(_('Clear output panel'))
actions_toolbar.insert(button, -1)
button.show()
activity_toolbar.show()
separator = Gtk.SeparatorToolItem()
self.get_toolbar_box().toolbar.insert(separator, -1)
separator.show()
button = ToolButton('pippy-openoff')
button.set_tooltip(_('Open an example'))
button.connect('clicked', self._load_example_cb)
self.get_toolbar_box().toolbar.insert(button, -1)
button.show()
separator = Gtk.SeparatorToolItem()
separator.props.draw = False
separator.set_expand(True)
self.get_toolbar_box().toolbar.insert(separator, -1)
separator.show()
stop = StopButton(self)
self.get_toolbar_box().toolbar.insert(stop, -1)
stop.show()
vpane = Gtk.Paned.new(orientation=Gtk.Orientation.VERTICAL)
vpane.set_position(400) # setting initial position
self.paths = []
try:
if sound_check.finddir():
TAMTAM_AVAILABLE = True
else:
TAMTAM_AVAILABLE = False
except sound_check.SoundLibraryNotFoundError:
TAMTAM_AVAILABLE = False
data_path = os.path.join(get_bundle_path(), 'data')
# get default language from locale
locale_lang = locale.getdefaultlocale()[0]
if locale_lang is None:
lang = 'en'
else:
lang = locale_lang.split('_')[0]
_logger.debug(locale.getdefaultlocale())
_logger.debug(lang)
# construct the path for both
lang_path = os.path.join(data_path, lang)
en_lang_path = os.path.join(data_path, 'en')
# get all folders in lang examples
all_folders = []
if os.path.exists(lang_path):
for d in sorted(os.listdir(lang_path)):
all_folders.append(d)
# get all folders in English examples
for d in sorted(os.listdir(en_lang_path)):
# check if folder isn't already in list
if d not in all_folders:
all_folders.append(d)
for folder in all_folders:
# Skip sound folders if TAMTAM is not installed
if folder == 'sound' and not TAMTAM_AVAILABLE:
continue
direntry = {}
# check if dir exists in pref language, if exists, add it
if os.path.exists(os.path.join(lang_path, folder)):
direntry = {
'name': _(folder.capitalize()),
'path': os.path.join(lang_path, folder) + '/'}
# if not try to see if it's in default English path
elif os.path.exists(os.path.join(en_lang_path, folder)):
direntry = {
'name': _(folder.capitalize()),
'path': os.path.join(en_lang_path, folder) + '/'}
self.paths.append([direntry['name'], direntry['path']])
# Adding local examples
data_path = os.path.join(get_bundle_path(), 'data')
self.paths.append([_('My examples'), data_path])
self._source_tabs = SourceNotebook(self, self._collab)
self._source_tabs.connect('tab-added', self._add_source_cb)
self._source_tabs.connect('tab-renamed', self._rename_source_cb)
self._source_tabs.connect('tab-closed', self._close_source_cb)
if self._loaded_session:
for name, content, path in self._loaded_session:
self._source_tabs.add_tab(name, content, path)
else:
self.session_data.append(None)
self._source_tabs.add_tab() # New instance, ergo empty tab
vpane.add1(self._source_tabs)
self._source_tabs.show()
self._outbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
self._vte = Vte.Terminal()
self._vte.set_encoding('utf-8')
self._vte.set_size(30, 5)
self._vte.set_scrollback_lines(-1)
self._vte_set_colors('#000000', '#E7E7E7')
self._child_exited_handler = None
self._vte.connect('child_exited', self._child_exited_cb)
self._vte.connect('drag_data_received', self._vte_drop_cb)
self._outbox.pack_start(self._vte, True, True, 0)
outsb = Gtk.Scrollbar(orientation=Gtk.Orientation.VERTICAL)
outsb.set_adjustment(self._vte.get_vadjustment())
outsb.show()
self._outbox.pack_start(outsb, False, False, 0)
self._load_config()
vpane.add2(self._outbox)
self._outbox.show()
vpane.show()
return vpane
def _vte_set_colors(self, bg, fg):
# XXX support both Vte APIs
if _has_new_vte_api():
foreground = Gdk.RGBA()
foreground.parse(bg)
background = Gdk.RGBA()
background.parse(fg)
else:
foreground = Gdk.color_parse(bg)
background = Gdk.color_parse(fg)
self._vte.set_colors(foreground, background, [])
def after_init(self):
self._outbox.hide()
def _font_size_changed_cb(self, widget, size):
self._source_tabs.set_font_size(size)
self._vte.set_font(
Pango.FontDescription('Monospace {}'.format(size)))
def _store_config(self):
font_size = self._source_tabs.get_font_size()
_config_file_path = os.path.join(
activity.get_activity_root(), 'data',
'config.json')
with open(_config_file_path, "w") as f:
f.write(json.dumps(font_size))
def _load_config(self):
_config_file_path = os.path.join(
activity.get_activity_root(), 'data',
'config.json')
if not os.path.isfile(_config_file_path):
return
with open(_config_file_path, "r") as f:
font_size = json.loads(f.read())
self.view_toolbar.set_font_size(font_size)
self._vte.set_font(
Pango.FontDescription('Monospace {}'.format(font_size)))
def __active_cb(self, widget, event):
_logger.debug('__active_cb %r', self.props.active)
if self.props.active:
self.resume()
else:
self.pause()
def do_visibility_notify_event(self, event):
_logger.debug('do_visibility_notify_event %r', event.get_state())
if event.get_state() == Gdk.VisibilityState.FULLY_OBSCURED:
self.pause()
else:
self.resume()
def pause(self):
# FIXME: We had resume, but no pause?
pass
def resume(self):
if self._dialog is not None:
self._dialog.set_keep_above(True)
def _toggle_output_cb(self, button):
shown = button.get_active()
if shown:
self._outbox.show_all()
self._toggle_output.set_tooltip(_('Hide output panel'))
self._toggle_output.set_icon_name('tray-hide')
else:
self._outbox.hide()
self._toggle_output.set_tooltip(_('Show output panel'))
self._toggle_output.set_icon_name('tray-show')
def __inverted_colors_toggled_cb(self, button):
if button.props.active:
self._vte_set_colors('#E7E7E7', '#000000')
self._source_tabs.set_dark()
button.set_icon_name('light-theme')
button.set_tooltip(_('Normal Colors'))
else:
self._vte_set_colors('#000000', '#E7E7E7')
self._source_tabs.set_light()
button.set_icon_name('dark-theme')
button.set_tooltip(_('Inverted Colors'))
def _load_example_cb(self, widget):
widget.set_icon_name('pippy-openon')
self._dialog = FileDialog(self.paths, self, widget)
self._dialog.show()
self._dialog.run()
path = self._dialog.get_path()
if path:
self._select_func_cb(path)
def _add_source_cb(self, button, force=False, editor_id=None):
if self._collab._leader or force:
if editor_id is None:
editor_id = str(uuid.uuid1())
self._source_tabs.add_tab(editor_id=editor_id)
self.session_data.append(None)
self._source_tabs.get_nth_page(-1).show_all()
self._source_tabs.get_text_view().grab_focus()
if self._collab._leader:
self._collab.post(dict(
action='add-source',
editor_id=editor_id))
else:
# The leader must do it first so that they can set
# up the text buffer
self._collab.post(dict(action='add-source-request'))
# Check if dark mode enabled, apply it
if self._inverted_colors.props.active:
self._source_tabs.set_dark()
def _rename_source_cb(self, notebook, page, name):
_logger.debug('_rename_source_cb %r %r' % (page, name))
self._collab.post(dict(action='rename-source', page=page, name=name))
def _close_source_cb(self, notebook, page):
_logger.debug('_close_source_cb %r' % (page))
self._collab.post(dict(action='close-source', page=page))
def __message_cb(self, collab, buddy, msg):
action = msg.get('action')
if action == 'add-source-request' and self._collab._leader:
self._add_source_cb(None, force=True)
elif action == 'add-source':
self._add_source_cb(
None, force=True, editor_id=msg.get('editor_id'))
elif action == 'rename-source':
page = msg.get('page')
name = msg.get('name')
_logger.debug('__message_cb rename-source %r %r' % (page, name))
self._source_tabs.rename_tab(page, name)
elif action == 'close-source':
page = msg.get('page')
_logger.debug('__message_cb close-source %r' % (page))
self._source_tabs.close_tab(page)
def _vte_drop_cb(self, widget, context, x, y, selection, targetType, time):
if targetType == TARGET_TYPE_TEXT:
self._vte.feed_child(selection.data)
def get_data(self):
return self._source_tabs.get_all_data()
def set_data(self, data):
# Remove initial new/blank thing
self.session_data = []
self._loaded_session = []
try:
self._source_tabs.remove_page(0)
tab_object.pop(0)
self._source_tabs.last_tab = 0
except IndexError:
pass
list_ = list(zip(*data))
for name, code, path, modified, editor_id in list_:
self._source_tabs.add_tab(
label=name, editor_id=editor_id)
self.session_data.append(None) # maybe?
def _selection_cb(self, value):
self.save()
_logger.debug('clicked! %s' % value['path'])
_file = open(value['path'], 'r')
lines = _file.readlines()
self._add_source_cb(None)
text_buffer = self._source_tabs.get_text_buffer()
text_buffer.set_text(''.join(lines))
text_buffer.set_modified(False)
self._pippy_instance.metadata['title'] = value['name']
self._stop_button_cb(None)
self._reset_vte()
self._source_tabs.set_current_label(value['name'])
self._source_tabs.set_current_path(value['path'])
self._source_tabs.get_text_view().grab_focus()
def _select_func_cb(self, path):
values = {}
values['name'] = os.path.basename(path)
values['path'] = path
self._selection_cb(values)
def _timer_cb(self, button, icons):
button.set_icon_widget(icons['bw'])
button.show_all()
return False
def _flash_cb(self, button, icons):
button.set_icon_widget(icons['color'])
button.show_all()
GObject.timeout_add(400, self._timer_cb, button, icons)
def _clear_button_cb(self, button):
self.save()
self._stop_button_cb(None)
self._reset_vte()
self._source_tabs.get_text_view().grab_focus()
def _write_all_buffers(self, tmp_dir):
data = self._source_tabs.get_all_data()
zipdata = list(zip(data[0], data[1]))
for name, content in zipdata:
name = self._source_tabs.purify_name(name)
with open(os.path.join(tmp_dir, name), 'w') as f:
# Write utf-8 coding prefix if there's not one already
if re.match(r'coding[:=]\s*([-\w.]+)',
'\n'.join(content.splitlines()[:2])) is None:
f.write(PYTHON_PREFIX)
f.write(content)
def _reset_vte(self):
self._vte.grab_focus()
self._vte.feed(b'\x1B[H\x1B[J\x1B[0;39m')
def __undobutton_cb(self, butston):
text_buffer = self._source_tabs.get_text_buffer()
if text_buffer.can_undo():
text_buffer.undo()
def __redobutton_cb(self, button):
text_buffer = self._source_tabs.get_text_buffer()
if text_buffer.can_redo():
text_buffer.redo()
def __copybutton_cb(self, button):
text_buffer = self._source_tabs.get_text_buffer()
if self._vte.get_has_selection():
self._vte.copy_clipboard()
elif text_buffer.get_has_selection():
clipboard = Gtk.Clipboard.get(Gdk.SELECTION_CLIPBOARD)
text_buffer.copy_clipboard(clipboard)
def __pastebutton_cb(self, button):
text_buffer = self._source_tabs.get_text_buffer()
clipboard = Gtk.Clipboard.get(Gdk.SELECTION_CLIPBOARD)
text_buffer.paste_clipboard(clipboard, None, True)
def _go_button_cb(self, button):
self._stop_button_cb(button) # Try stopping old code first.
self._reset_vte()
# FIXME: We're losing an odd race here
# Gtk.main_iteration(block=False)
if self._toggle_output.get_active() is False:
self._outbox.show_all()
self._toggle_output.set_active(True)
pippy_tmp_dir = '%s/tmp/' % self.get_activity_root()
self._write_all_buffers(pippy_tmp_dir)
current_file = os.path.join(
pippy_tmp_dir,
self._source_tabs.get_current_file_name())
# Write activity.py here too, to support pippy-based activities.
copy2('%s/activity.py' % get_bundle_path(),
'%s/tmp/activity.py' % self.get_activity_root())
# XXX Support both Vte APIs
if _has_new_vte_api():
vte_run = self._vte.spawn_sync
else:
vte_run = self._vte.fork_command_full
self._pid = vte_run(
Vte.PtyFlags.DEFAULT,
get_bundle_path(),
['/bin/sh', '-c', 'python3 %s; sleep 1' % current_file,
'PYTHONPATH=%s/library:%s' % (get_bundle_path(),
os.getenv('PYTHONPATH', ''))],
['PYTHONPATH=%s/library:%s' % (get_bundle_path(),
os.getenv('PYTHONPATH', ''))],
GLib.SpawnFlags.DO_NOT_REAP_CHILD,
None,
None,)
def _stop_button_cb(self, button):
try:
if self._pid is not None:
os.kill(self._pid[1], SIGTERM)
except:
pass # Process must already be dead.
def _library_writable(self):
return os.access(os.path.join(get_bundle_path(), 'library'), os.W_OK)
def _save_as_library(self, button):
library_dir = os.path.join(get_bundle_path(), 'library')
file_name = self._source_tabs.get_current_file_name()
text_buffer = self._source_tabs.get_text_buffer()
content = text_buffer.get_text(
*text_buffer.get_bounds(),
include_hidden_chars=True)
if not os.path.isdir(library_dir):
os.mkdir(library_dir)
with open(os.path.join(library_dir, file_name), 'w') as f:
f.write(content)
success = True
if success:
alert = NotifyAlert(5)
alert.props.title = _('Python File added to Library')
IMPORT_MESSAGE = _('The file you selected has been added'
' to the library. Use "import {importname}"'
' to import the library for using.')
alert.props.msg = IMPORT_MESSAGE.format(importname=file_name[:-3])
alert.connect('response', self._remove_alert_cb)
self.add_alert(alert)
def _export_document_cb(self, __):
self.copy()
alert = NotifyAlert()
alert.props.title = _('Saved')
alert.props.msg = _('The document has been saved to journal.')
alert.connect('response', lambda x, i: self.remove_alert(x))
self.add_alert(alert)
def _remove_alert_cb(self, alert, response_id):
self.remove_alert(alert)
def _import_py_cb(self, button):
chooser = ObjectChooser()
result = chooser.run()
if result is Gtk.ResponseType.ACCEPT:
dsitem = chooser.get_selected_object()
if dsitem.metadata['mime_type'] != 'text/x-python':
alert = NotifyAlert(5)
alert.props.title = _('Error importing Python file')
alert.props.msg = _('The file you selected is not a '
'Python file.')
alert.connect('response', self._remove_alert_cb)
self.add_alert(alert)
elif dsitem.object_id in self.session_data:
alert = NotifyAlert(5)
alert.props.title = _('Error importing Python file')
alert.props.msg = _('The file you selected is already '
'open')
alert.connect('response', self._remove_alert_cb)
self.add_alert(alert)
else:
name = dsitem.metadata['title']
file_path = dsitem.get_file_path()
content = open(file_path, 'r').read()
self._source_tabs.add_tab(name, content, None)
self._source_tabs.set_current_label(name)
self.session_data.append(dsitem.object_id)
_logger.debug('after import py: %r' % self.session_data)
chooser.destroy()
def _create_bundle_cb(self, button):
from shutil import rmtree
from tempfile import mkdtemp
# Get the name of this pippy program.
title = self._pippy_instance.metadata['title'].replace('.py', '')
title = title.replace('-', '')
if title == 'Pippy Activity':
alert = Alert()
alert.props.title = _('Save as Activity Error')
alert.props.msg = _('Please give your activity a meaningful name '
'before attempting to save it as an activity.')
ok_icon = Icon(icon_name='dialog-ok')
alert.add_button(Gtk.ResponseType.OK, _('Ok'), ok_icon)
alert.connect('response', self._dismiss_alert_cb)
self.add_alert(alert)
return
alert_icon = Alert()
ok_icon = Icon(icon_name='dialog-ok')
alert_icon.add_button(Gtk.ResponseType.OK, _('Ok'), ok_icon)
alert_icon.props.title = _('Activity icon')
alert_icon.props.msg = _('Please select an activity icon.')
self._stop_button_cb(None) # try stopping old code first.
self._reset_vte()
self._outbox.show_all()
self._vte.feed(_("Creating activity bundle...").encode())
self._vte.feed(b'\r\n')
TMPDIR = 'instance'
app_temp = mkdtemp('.activity', 'Pippy',
os.path.join(self.get_activity_root(), TMPDIR))
sourcefile = os.path.join(app_temp, 'xyzzy.py')
# invoke ourself to build the activity bundle.
_logger.debug('writing out source file: %s' % sourcefile)
def internal_callback(window=None, event=None):
icon = '%s/activity/activity-default.svg' % (get_bundle_path())
if window:
icon = window.get_icon()
self._stop_button_cb(None) # Try stopping old code first.
self._reset_vte()
self._vte.feed(_('Creating activity bundle...').encode())
self._vte.feed(b'\r\n')
TMPDIR = 'instance'
app_temp = mkdtemp('.activity', 'Pippy',
os.path.join(self.get_activity_root(), TMPDIR))
sourcefile = os.path.join(app_temp, 'xyzzy.py')
# Invoke ourself to build the activity bundle.
_logger.debug('writing out source file: %s' % sourcefile)
# Write out application code
self._write_text_buffer(sourcefile)
try:
# FIXME: vte invocation was raising errors.
# Switched to subprocss
output = subprocess.check_output(
['/usr/bin/python3',
'%s/pippy_app.py' % get_bundle_path(),
'-p', '%s/library' % get_bundle_path(),
'-d', app_temp, title, sourcefile, icon])
self._vte.feed(output)
self._vte.feed(b'\r\n')
self._bundle_cb(title, app_temp)
except subprocess.CalledProcessError:
rmtree(app_temp, ignore_errors=True) # clean up!
self._vte.feed(_('Save as Activity Error').encode())
self._vte.feed(b'\r\n')
raise
def _alert_response(alert, response_id):
self.remove_alert(alert)
def _dialog():
dialog = IconDialog()
dialog.connect('destroy', internal_callback)
GObject.idle_add(_dialog)
alert_icon.connect('response', _alert_response)
self.add_alert(alert_icon)
def _write_text_buffer(self, filename):
text_buffer = self._source_tabs.get_text_buffer()
start, end = text_buffer.get_bounds()
text = text_buffer.get_text(start, end, True)
with open(filename, 'w') as f:
# Write utf-8 coding prefix if there's not one already
if re.match(r'coding[:=]\s*([-\w.]+)',
'\n'.join(text.splitlines()[:2])) is None:
f.write(PYTHON_PREFIX)
for line in text:
f.write(line)
def _export_distutils_cb(self, button):
app_temp = os.path.join(self.get_activity_root(), 'instance')
data = self._source_tabs.get_all_data()
for filename, content in zip(data[0], data[1]):
fileobj = open(os.path.join(app_temp, filename), 'w')
fileobj.write(content)
fileobj.close()
filenames = ','.join([("'" + name[:-3] + "'") for name in data[0]])
title = self._pippy_instance.metadata['title']
if title is _('Pippy Activity'):
alert = Alert()
alert.props.title = _('Save as distutils package error')
alert.props.msg = _('Please give your activity a meaningful '
'name before attempting to save it '
'as an distutils package.')
ok_icon = Icon(icon_name='dialog-ok')
alert.add_button(Gtk.ResponseType.OK, _('Ok'), ok_icon)
alert.connect('response', self._dismiss_alert_cb)
self.add_alert(alert)
return
found = next((
name for name in data[0]
if name != self._source_tabs.purify_name(name)),
None)
if found is not None:
example = self._source_tabs.purify_name(found)
alert = Alert()
alert.props.title = _('Save as distutils package error')
alert.props.msg = _('Please give your source files a proper '
'name, for example "%s", before attempting to '
'save it as an distutils package.') % example
ok_icon = Icon(icon_name='dialog-ok')
alert.add_button(Gtk.ResponseType.OK, _('Ok'), ok_icon)
alert.connect('response', self._dismiss_alert_cb)
self.add_alert(alert)
return
setup_script = DISTUTILS_SETUP_SCRIPT.format(modulename=title,
filenames=filenames)
setupfile = open(os.path.join(app_temp, 'setup.py'), 'w')
setupfile.write(setup_script)
setupfile.close()
os.chdir(app_temp)
subprocess.check_output(
['/usr/bin/python3', os.path.join(app_temp, 'setup.py'), 'sdist',
'-v'])
# Hand off to journal
os.chmod(app_temp, 0o777)
jobject = datastore.create()
metadata = {
'title': '%s distutils bundle' % title,
'title_set_by_user': '1',
'mime_type': 'application/x-gzip',
}
for k, v in list(metadata.items()):
# The dict.update method is missing =(
jobject.metadata[k] = v
tarname = 'dist/{modulename}-1.0.tar.gz'.format(modulename=title)
jobject.file_path = os.path.join(app_temp, tarname)
datastore.write(jobject)
def _export_example_cb(self, button):
# Get the name of this pippy program.
title = self._pippy_instance.metadata['title']
if title == _('Pippy Activity'):
alert = Alert()
alert.props.title = _('Save as Example Error')
alert.props.msg = \
_('Please give your activity a meaningful '
'name before attempting to save it as an example.')
ok_icon = Icon(icon_name='dialog-ok')
alert.add_button(Gtk.ResponseType.OK, _('Ok'), ok_icon)
alert.connect('response', self._dismiss_alert_cb)
self.add_alert(alert)
return
self._stop_button_cb(None) # Try stopping old code first.
self._reset_vte()
self._vte.feed(_('Creating example...').encode())
self._vte.feed(b'\r\n')
local_data = os.path.join(os.environ['SUGAR_ACTIVITY_ROOT'], 'data')
local_file = os.path.join(local_data, title)
if os.path.exists(local_file):
alert = ConfirmationAlert()
alert.props.title = _('Save as Example Warning')
alert.props.msg = _('This example already exists. '
'Do you want to overwrite it?')
alert.connect('response', self._confirmation_alert_cb, local_file)
self.add_alert(alert)
else:
self.write_file(local_file)
self._reset_vte()
self._vte.feed(_('Saved as example.').encode())
self._vte.feed(b'\r\n')
self._add_to_example_list(local_file)
def _child_exited_cb(self, *args):
'''Called whenever a child exits. If there's a handler, run it.'''
h, self._child_exited_handler = self._child_exited_handler, None
if h is not None:
h()
def _bundle_cb(self, title, app_temp):
'''Called when we're done building a bundle for a source file.'''
from sugar3 import profile
from shutil import rmtree
try:
# Find the .xo file: were we successful?
bundle_file = [f for f in os.listdir(app_temp)
if f.endswith('.xo')]
if len(bundle_file) != 1:
_logger.debug("Couldn't find bundle: %s" %
str(bundle_file))
self._vte.feed(b'\r\n')
self._vte.feed(_('Error saving activity to journal.').encode())
self._vte.feed(b'\r\n')
return # Something went wrong.
# Hand off to journal
os.chmod(app_temp, 0o755)
jobject = datastore.create()
metadata = {
'title': '%s Bundle' % title,
'title_set_by_user': '1',
'buddies': '',
'preview': '',
'icon-color': profile.get_color().to_string(),
'mime_type': 'application/vnd.olpc-sugar',
}
for k, v in list(metadata.items()):
# The dict.update method is missing =(
jobject.metadata[k] = v
jobject.file_path = os.path.join(app_temp, bundle_file[0])
datastore.write(jobject)
self._vte.feed(b'\r\n')
self._vte.feed(_('Activity saved to journal.').encode())
self._vte.feed(b'\r\n')
self.journal_show_object(jobject.object_id)
jobject.destroy()
finally:
rmtree(app_temp, ignore_errors=True) # clean up!
def _dismiss_alert_cb(self, alert, response_id):
self.remove_alert(alert)
def _confirmation_alert_cb(self, alert, response_id, local_file):
# Callback for conf alert
self.remove_alert(alert)
if response_id is Gtk.ResponseType.OK:
self.write_file(local_file)
self._reset_vte()
self._vte.feed(_('Saved as example.').encode())
self._vte.feed(b'\r\n')
else:
self._reset_vte()
def _add_to_example_list(self, local_file):
entry = {'name': _(os.path.basename(local_file)),
'path': local_file}
_iter = self.model.insert_before(self.example_iter, None)
self.model.set_value(_iter, 0, entry)
self.model.set_value(_iter, 1, entry['name'])
def is_example(self, path):
if path is None:
return False
for name in self.paths:
if path.startswith(name[1]):
return True
return False
def _get_pippy_object_id(self):
''' We need the object_id of this pippy instance to save in the .py
file metadata'''
if self._pippy_instance == self:
return _find_object_id(self.metadata['activity_id'],
mimetype='application/json')
else:
return self._pippy_instance.get_object_id()
def write_file(self, file_path):
pippy_id = self._get_pippy_object_id()
data = self._source_tabs.get_all_data()
zipped_data = list(zip(*data))
session_list = []
app_temp = os.path.join(self.get_activity_root(), 'instance')
tmpfile = os.path.join(app_temp, 'pippy-tempfile-storing.py')
if not self.session_data:
self.session_data.append(None)
for zipdata, content in zip(zipped_data, self.session_data):
_logger.debug('Session data %r', content)
name, python_code, path, modified, editor_id = zipdata
if content is not None and content == self._py_object_id:
_logger.debug('saving to self')
self.metadata['title'] = name
self.metadata['mime_type'] = 'text/x-python'
if pippy_id is not None:
self.metadata['pippy_instance'] = pippy_id
__file = open(file_path, 'w')
__file.write(python_code)
__file.close()
session_list.append([name, content])
elif content is not None and content[0] != '/':
_logger.debug('Saving an existing dsobject')
dsobject = datastore.get(content)
dsobject.metadata['title'] = name
dsobject.metadata['mime_type'] = 'text/x-python'
if pippy_id is not None:
dsobject.metadata['pippy_instance'] = pippy_id
__file = open(tmpfile, 'w')
__file.write(python_code)
__file.close()
dsobject.set_file_path(tmpfile)
datastore.write(dsobject)
session_list.append([name, dsobject.object_id])
elif modified:
_logger.debug('Creating new dsobj for modified code')
if len(python_code) > 0:
dsobject = datastore.create()
dsobject.metadata['title'] = name
dsobject.metadata['mime_type'] = 'text/x-python'
if pippy_id is not None:
dsobject.metadata['pippy_instance'] = pippy_id
__file = open(tmpfile, 'w')
__file.write(python_code)
__file.close()
dsobject.set_file_path(tmpfile)
datastore.write(dsobject)
session_list.append([name, dsobject.object_id])
# If there are multiple Nones, we need to find
# the correct one.
if content is None and \
self.session_data.count(None) > 1:
i = zipped_data.index(zipdata)
else:
i = self.session_data.index(content)
self.session_data[i] = dsobject.object_id
elif content is not None or path is not None:
_logger.debug('Saving reference to sample file')
if path is None: # Should not happen, but just in case...
_logger.error('path is None.')
session_list.append([name, content])
else:
session_list.append([name, path])
else: # Should not happen, but just in case...
_logger.debug('Nothing to save in tab? %s %s %s %s' %
(str(name), str(python_code), str(path),
str(content)))
self._pippy_instance.metadata['mime_type'] = 'application/json'
pippy_data = json.dumps(session_list)
# Override file path if we created a new Pippy instance
if self._py_file_loaded_from_journal:
file_path = os.path.join(app_temp, 'pippy-temp-instance-data')
_file = open(file_path, 'w')
_file.write(pippy_data)
_file.close()
if self._py_file_loaded_from_journal:
_logger.debug('setting pippy instance file_path to %s' %
file_path)
self._pippy_instance.set_file_path(file_path)
datastore.write(self._pippy_instance)
self._store_config()
def read_file(self, file_path):
# Either we are opening Python code or a list of objects
# stored (json-encoded) in a Pippy instance, or a shared
# session.
# Remove initial new/blank thing
self.session_data = []
self._loaded_session = []
try:
self._source_tabs.remove_page(0)
tab_object.pop(0)
self._source_tabs.last_tab = 0
except IndexError:
pass
if self.metadata['mime_type'] == 'text/x-python':
_logger.debug('Loading Python code')
# Opening some Python code directly
try:
text = open(file_path).read()
except:
alert = NotifyAlert(10)
alert.props.title = _('Error')
alert.props.msg = _('Error reading data.')
def _remove_alert(alert, response_id):
self.remove_alert(alert)
alert.connect("response", _remove_alert)
self.add_alert(alert)
return
self._py_file_loaded_from_journal = True
# Discard the '#!/usr/bin/python3' and 'coding: utf-8' lines,
# if present
python_code = re.sub(r'^' + re.escape(PYTHON_PREFIX), '', text)
name = self.metadata['title']
self._loaded_session.append([name, python_code, None])
# Since we loaded Python code, we need to create (or
# restore) a Pippy instance
if 'pippy_instance' in self.metadata:
_logger.debug('found a pippy instance: %s' %
self.metadata['pippy_instance'])
try:
self._pippy_instance = datastore.get(
self.metadata['pippy_instance'])
except:
_logger.debug('Cannot find old Pippy instance: %s')
self._pippy_instance = None
if self._pippy_instance in [self, None]:
self._pippy_instance = datastore.create()
self._pippy_instance.metadata['title'] = self.metadata['title']
self._pippy_instance.metadata['mime_type'] = 'application/json'
self._pippy_instance.metadata['activity'] = 'org.laptop.Pippy'
datastore.write(self._pippy_instance)
self.metadata['pippy_instance'] = \
self._pippy_instance.get_object_id()
_logger.debug('get_object_id %s' %
self.metadata['pippy_instance'])
# We need the Pippy file path so we can read the session data
file_path = self._pippy_instance.get_file_path()
# Finally, add this Python object to the session data
self._py_object_id = _find_object_id(self.metadata['activity_id'])
self.session_data.append(self._py_object_id)
_logger.debug('session_data: %s' % self.session_data)
if self.metadata['mime_type'] == 'application/json' or \
self._pippy_instance != self:
# Reading file list from Pippy instance
_logger.debug('Loading Pippy instance')
if len(file_path) == 0:
return
data = json.loads(open(file_path).read())
for name, content in data:
# content is either a datastore id or the path to some
# sample code
if content is not None and content[0] == '/': # a path
try:
python_code = open(content).read()
except:
_logger.error('Could not open %s; skipping' % content)
path = content
elif content != self._py_object_id:
try:
dsobject = datastore.get(content)
if 'mime_type' not in dsobject.metadata:
_logger.error(
'Warning: %s missing mime_type' % content)
elif dsobject.metadata['mime_type'] != 'text/x-python':
_logger.error(
'Warning: %s has unexpected mime_type %s' %
(content, dsobject.metadata['mime_type']))
except:
# Could be that the item has subsequently been
# deleted from the datastore, so we skip it.
_logger.error('Could not open %s; skipping' % content)
continue
try:
python_code = open(dsobject.get_file_path()).read()
except:
# Malformed bundle?
_logger.error('Could not open %s; skipping' %
dsobject.get_file_path())
continue
path = None
# Queue up the creation of the tabs...
# And add this content to the session data
if content not in self.session_data:
self.session_data.append(content)
self._loaded_session.append([name, python_code, path])
# Create tabs from the datastore, else add a blank tab
if self._loaded_session:
for name, content, path in self._loaded_session:
self._source_tabs.add_tab(name, content, path)
else:
self._source_tabs.add_tab()
# TEMPLATES AND INLINE FILES
ACTIVITY_INFO_TEMPLATE = '''
[Activity]
name = %(title)s
bundle_id = %(bundle_id)s
exec = sugar-activity3 %(class)s
icon = activity-icon
activity_version = %(version)d
mime_types = %(mime_types)s
show_launcher = yes
%(extra_info)s
'''
PIPPY_ICON = """<?xml version="1.0" ?><!DOCTYPE svg PUBLIC '-//W3C//DTD SVG
1.1//EN' 'http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd' [
<!ENTITY stroke_color "#010101">
<!ENTITY fill_color "#FFFFFF">
]>
<svg enable-background="new 0 0 55 55" height="55px" version="1.1"
viewBox="0 0 55 55" width="55px" x="0px" xml:space="preserve"
xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink" y="0px"><g display="block"
id="activity-pippy">
<path d="M28.497,48.507
c5.988,0,14.88-2.838,14.88-11.185
c0-9.285-7.743-10.143-10.954-11.083
c-3.549-0.799-5.913-1.914-6.055-3.455
c-0.243-2.642,1.158-3.671,3.946-3.671
c0,0,6.632,3.664,12.266,0.74
c1.588-0.823,4.432-4.668,4.432-7.32
c0-2.653-9.181-5.719-11.967-5.719
c-2.788,0-5.159,3.847-5.159,3.847
c-5.574,0-11.149,5.306-11.149,10.612
c0,5.305,5.333,9.455,11.707,10.612
c2.963,0.469,5.441,2.22,4.878,5.438
c-0.457,2.613-2.995,5.306-8.361,5.306
c-4.252,0-13.3-0.219-14.745-4.079
c-0.929-2.486,0.168-5.205,1.562-5.205l-0.027-0.16
c-1.42-0.158-5.548,0.16-5.548,5.465
C8.202,45.452,17.347,48.507,28.497,48.507z"
fill="&fill_color;" stroke="&stroke_color;"
stroke-linecap="round" stroke-linejoin="round" stroke-width="3.5"/>
<path d="M42.579,19.854c-2.623-0.287-6.611-2-7.467-5.022" fill="none"
stroke="&stroke_color;" stroke-linecap="round" stroke-width="3"/>
<circle cx="35.805" cy="10.96" fill="&stroke_color;" r="1.676"/>
</g></svg><!-- " -->
"""
# ACTIVITY META-INFORMATION
# this is used by Pippy to generate a bundle for itself.
def pippy_activity_version():
'''Returns the version number of the generated activity bundle.'''
return 39
def pippy_activity_extra_files():
'''Returns a map of 'extra' files which should be included in the
generated activity bundle.'''
# Cheat here and generate the map from the fs contents.
extra = {}
bp = get_bundle_path()
for d in ['po', 'data', 'post']: # everybody gets library
for root, dirs, files in os.walk(os.path.join(bp, d)):
for name in files:
fn = os.path.join(root, name).replace(bp + '/', '')
extra[fn] = open(os.path.join(root, name), 'r').read()
return extra
def pippy_activity_news():
'''Return the NEWS file for this activity.'''
# Cheat again.
return open(os.path.join(get_bundle_path(), 'NEWS')).read()
def pippy_activity_icon():
'''Return an SVG document specifying the icon for this activity.'''
return PIPPY_ICON
def pippy_activity_class():
'''Return the class which should be started to run this activity.'''
return 'pippy_app.PippyActivity'
def pippy_activity_bundle_id():
'''Return the bundle_id for the generated activity.'''
return 'org.laptop.Pippy'
def pippy_activity_mime_types():
'''Return the mime types handled by the generated activity, as a list.'''
return ['text/x-python']
def pippy_activity_extra_info():
return '''
license = GPLv2+
update_url = http://activities.sugarlabs.org '''
# ACTIVITY BUNDLER
def main():
'''Create a bundle from a pippy-style source file'''
from optparse import OptionParser
from pyclbr import readmodule_ex
from tempfile import mkdtemp
from shutil import copytree, copy2, rmtree
from sugar3.activity import bundlebuilder
parser = OptionParser(usage='%prog [options] [title] [sourcefile] [icon]')
parser.add_option('-d', '--dir', dest='dir', default='.', metavar='DIR',
help='Put generated bundle in the specified directory.')
parser.add_option('-p', '--pythonpath', dest='path', action='append',
default=[], metavar='DIR',
help='Append directory to python search path.')
(options, args) = parser.parse_args()
if len(args) < 3:
parser.error('The title, sourcefile and icon arguments are required.')
title = args[0]
sourcefile = args[1]
icon_path = args[2]
pytitle = re.sub(r'[^A-Za-z0-9_]', '', title)
if re.match(r'[0-9]', pytitle) is not None:
pytitle = '_' + pytitle # first character cannot be numeric
# First take a gander at the source file and see if it's got extra info
# for us.
sourcedir, basename = os.path.split(sourcefile)
if not sourcedir:
sourcedir = '.'
module, ext = os.path.splitext(basename)
f = open(icon_path, 'r')
icon = f.read()
f.close()
# Things we look for:
bundle_info = {
'version': 1,
'extra_files': {},
'news': 'No news.',
'icon': icon,
'class': 'activity.VteActivity',
'bundle_id': ('org.sugarlabs.pippy.%s%d' %
(generate_unique_id(),
int(round(uniform(1000, 9999), 0)))),
'mime_types': '',
'extra_info': '',
}
# Are any of these things in the module?
try_import = False
info = readmodule_ex(module, [sourcedir] + options.path)
for func in list(bundle_info.keys()):
p_a_func = 'pippy_activity_%s' % func
if p_a_func in info:
try_import = True
if try_import:
# Yes, let's try to execute them to get better info about our bundle
oldpath = list(sys.path)
sys.path[0:0] = [sourcedir] + options.path
modobj = __import__(module)
for func in list(bundle_info.keys()):
p_a_func = 'pippy_activity_%s' % func
if p_a_func in modobj.__dict__:
bundle_info[func] = modobj.__dict__[p_a_func]()
sys.path = oldpath
# Okay! We've done the hard part. Now let's build a bundle.
# Create a new temp dir in which to create the bundle.
app_temp = mkdtemp('.activity', 'Pippy') # Hope TMPDIR is set correctly!
bundle = get_bundle_path()
try:
copytree('%s/library' % bundle, '%s/library' % app_temp)
copy2('%s/activity.py' % bundle, '%s/activity.py' % app_temp)
# create activity.info file.
bundle_info['title'] = title
bundle_info['pytitle'] = pytitle
# put 'extra' files in place.
extra_files = {
'activity/activity.info': ACTIVITY_INFO_TEMPLATE % bundle_info,
'activity/activity-icon.svg': bundle_info['icon'],
'NEWS': bundle_info['news'],
}
extra_files.update(bundle_info['extra_files'])
for path, contents in list(extra_files.items()):
# safety first!
assert '..' not in path
dirname, filename = os.path.split(path)
dirname = os.path.join(app_temp, dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(os.path.join(dirname, filename), 'w') as f:
f.write(contents)
# Put script into $app_temp/pippy_app.py
copy2(sourcefile, '%s/pippy_app.py' % app_temp)
# Invoke bundle builder
olddir = os.getcwd()
oldargv = sys.argv
os.chdir(app_temp)
sys.argv = ['setup.py', 'dist_xo']
print('\r\nStarting bundlebuilder\r\n')
bundlebuilder.start()
sys.argv = oldargv
os.chdir(olddir)
# Move to destination directory.
src = '%s/dist/%s-%d.xo' % (app_temp, pytitle, bundle_info['version'])
dst = '%s/%s-%d.xo' % (options.dir, pytitle, bundle_info['version'])
if not os.path.exists(src):
print('Cannot find %s\r\n' % (src))
else:
copy2(src, dst)
finally:
rmtree(app_temp, ignore_errors=True)
print('Finally\r\n')
if __name__ == '__main__':
from gettext import gettext as _
if False: # Change this to True to test within Pippy
sys.argv = sys.argv + ['-d', '/tmp', 'Pippy',
'/home/olpc/pippy_app.py']
print(_('Working...'))
sys.stdout.flush()
main()
print(_('done!'))
sys.exit(0)
| gpl-3.0 | 3,572,272,523,267,175,400 | -585,729,146,103,818,200 | 37.860784 | 79 | 0.564677 | false |
meteorcloudy/tensorflow | tensorflow/python/kernel_tests/io_ops_test.py | 23 | 4898 | # -*- coding: utf-8 -*-
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.ops.io_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
import tempfile
from tensorflow.python.ops import io_ops
from tensorflow.python.platform import test
from tensorflow.python.util import compat
class IoOpsTest(test.TestCase):
def testReadFile(self):
cases = ['', 'Some contents', 'Неки садржаји на српском']
for contents in cases:
contents = compat.as_bytes(contents)
with tempfile.NamedTemporaryFile(
prefix='ReadFileTest', dir=self.get_temp_dir(), delete=False) as temp:
temp.write(contents)
with self.test_session():
read = io_ops.read_file(temp.name)
self.assertEqual([], read.get_shape())
self.assertEqual(read.eval(), contents)
os.remove(temp.name)
def testWriteFile(self):
cases = ['', 'Some contents']
for contents in cases:
contents = compat.as_bytes(contents)
with tempfile.NamedTemporaryFile(
prefix='WriteFileTest', dir=self.get_temp_dir(),
delete=False) as temp:
pass
with self.test_session() as sess:
w = io_ops.write_file(temp.name, contents)
sess.run(w)
with open(temp.name, 'rb') as f:
file_contents = f.read()
self.assertEqual(file_contents, contents)
os.remove(temp.name)
def testWriteFileCreateDir(self):
cases = ['', 'Some contents']
for contents in cases:
contents = compat.as_bytes(contents)
subdir = os.path.join(self.get_temp_dir(), 'subdir1')
filepath = os.path.join(subdir, 'subdir2', 'filename')
with self.test_session() as sess:
w = io_ops.write_file(filepath, contents)
sess.run(w)
with open(filepath, 'rb') as f:
file_contents = f.read()
self.assertEqual(file_contents, contents)
shutil.rmtree(subdir)
def _subset(self, files, indices):
return set(
compat.as_bytes(files[i].name) for i in range(len(files))
if i in indices)
def testMatchingFiles(self):
cases = [
'ABcDEF.GH', 'ABzDEF.GH', 'ABasdfjklDEF.GH', 'AB3DEF.GH', 'AB4DEF.GH',
'ABDEF.GH', 'XYZ'
]
files = [
tempfile.NamedTemporaryFile(
prefix=c, dir=self.get_temp_dir(), delete=True) for c in cases
]
with self.test_session():
# Test exact match without wildcards.
for f in files:
self.assertEqual(
io_ops.matching_files(f.name).eval(), compat.as_bytes(f.name))
# We will look for files matching "ABxDEF.GH*" where "x" is some wildcard.
directory_path = files[0].name[:files[0].name.find(cases[0])]
pattern = directory_path + 'AB%sDEF.GH*'
self.assertEqual(
set(io_ops.matching_files(pattern % 'z').eval()),
self._subset(files, [1]))
self.assertEqual(
set(io_ops.matching_files(pattern % '?').eval()),
self._subset(files, [0, 1, 3, 4]))
self.assertEqual(
set(io_ops.matching_files(pattern % '*').eval()),
self._subset(files, [0, 1, 2, 3, 4, 5]))
# NOTE(mrry): Windows uses PathMatchSpec to match file patterns, which
# does not support the following expressions.
if os.name != 'nt':
self.assertEqual(
set(io_ops.matching_files(pattern % '[cxz]').eval()),
self._subset(files, [0, 1]))
self.assertEqual(
set(io_ops.matching_files(pattern % '[0-9]').eval()),
self._subset(files, [3, 4]))
# Test an empty list input.
self.assertItemsEqual(io_ops.matching_files([]).eval(), [])
# Test multiple exact filenames.
self.assertItemsEqual(
io_ops.matching_files([
files[0].name, files[1].name, files[2].name]).eval(),
self._subset(files, [0, 1, 2]))
# Test multiple globs.
self.assertItemsEqual(
io_ops.matching_files([
pattern % '?', directory_path + 'X?Z*']).eval(),
self._subset(files, [0, 1, 3, 4, 6]))
for f in files:
f.close()
if __name__ == '__main__':
test.main()
| apache-2.0 | -5,030,257,302,920,582,000 | 5,505,030,604,017,624,000 | 33.835714 | 80 | 0.608981 | false |
richardcs/ansible | lib/ansible/modules/network/f5/bigip_software_update.py | 11 | 9756 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_software_update
short_description: Manage the software update settings of a BIG-IP
description:
- Manage the software update settings of a BIG-IP.
version_added: 2.5
options:
auto_check:
description:
- Specifies whether to automatically check for updates on the F5
Networks downloads server.
type: bool
auto_phone_home:
description:
- Specifies whether to automatically send phone home data to the
F5 Networks PhoneHome server.
type: bool
frequency:
description:
- Specifies the schedule for the automatic update check.
choices:
- daily
- monthly
- weekly
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Enable automatic update checking
bigip_software_update:
auto_check: yes
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Disable automatic update checking and phoning home
bigip_software_update:
auto_check: no
auto_phone_home: no
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
auto_check:
description: Whether the system checks for updates automatically.
returned: changed
type: bool
sample: True
auto_phone_home:
description: Whether the system automatically sends phone home data.
returned: changed
type: bool
sample: True
frequency:
description: Frequency of auto update checks
returned: changed
type: string
sample: weekly
'''
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
class Parameters(AnsibleF5Parameters):
api_map = {
'autoCheck': 'auto_check',
'autoPhonehome': 'auto_phone_home'
}
api_attributes = [
'autoCheck', 'autoPhonehome', 'frequency',
]
updatables = [
'auto_check', 'auto_phone_home', 'frequency',
]
returnables = [
'auto_check', 'auto_phone_home', 'frequency',
]
class ApiParameters(Parameters):
@property
def auto_check(self):
if self._values['auto_check'] is None:
return None
return self._values['auto_check']
class ModuleParameters(Parameters):
@property
def auto_check(self):
if self._values['auto_check'] is None:
return None
elif self._values['auto_check'] is True:
return 'enabled'
else:
return 'disabled'
@property
def auto_phone_home(self):
if self._values['auto_phone_home'] is None:
return None
elif self._values['auto_phone_home'] is True:
return 'enabled'
else:
return 'disabled'
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
@property
def auto_check(self):
if self._values['auto_check'] == 'enabled':
return True
elif self._values['auto_check'] == 'disabled':
return False
@property
def auto_phone_home(self):
if self._values['auto_phone_home'] == 'enabled':
return True
elif self._values['auto_phone_home'] == 'disabled':
return False
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = None
self.want = ModuleParameters(params=self.module.params)
self.changes = UsableChanges()
def exec_module(self): # lgtm [py/similar-function]
result = dict()
changed = self.update()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/software/update/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/sys/software/update/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
auto_check=dict(
type='bool'
),
auto_phone_home=dict(
type='bool'
),
frequency=dict(
choices=['daily', 'monthly', 'weekly']
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
| gpl-3.0 | -3,553,814,608,809,975,000 | -8,670,577,242,818,090,000 | 27.360465 | 91 | 0.606396 | false |
joshuahoman/vivisect | vstruct/qt/__init__.py | 6 | 5522 | '''
Some utils for QT code which uses vstruct...
'''
import vqt.tree as vq_tree
from PyQt4 import QtCore, QtGui
from vqt.main import idlethread, idlethreadsync
class VQStructNamespacesView(vq_tree.VQTreeView):
def __init__(self, parent=None):
vq_tree.VQTreeView.__init__(self, parent=parent)
#model = vq_tree.VTreeView(parent=self, columns=('Namespace', 'Structure'))
class VQStructSelectView(vq_tree.VQTreeView):
def __init__(self, vsbuilder, parent=None):
vq_tree.VQTreeView.__init__(self, parent=parent)
self.vsbuilder = vsbuilder
model = vq_tree.VQTreeModel(parent=self, columns=('Namespace', 'Structure'))
for nsname in vsbuilder.getVStructNamespaceNames():
pnode = model.append((nsname, ''))
pnode.structname = None
for sname in vsbuilder.getVStructNames(namespace=nsname):
spnode = model.append(('', sname), parent=pnode)
spnode.structname = '%s.%s' % (nsname, sname)
for sname in vsbuilder.getVStructNames():
node = model.append( ('', sname ) )
node.structname = sname
self.setModel(model)
class VQStructSelectDialog(QtGui.QDialog):
def __init__(self, vsbuilder, parent=None):
QtGui.QDialog.__init__(self, parent=parent)
self.structname = None
self.setWindowTitle('Select a structure...')
vlyt = QtGui.QVBoxLayout()
hlyt = QtGui.QHBoxLayout()
self.structtree = VQStructSelectView(vsbuilder, parent=self)
hbox = QtGui.QWidget(parent=self)
ok = QtGui.QPushButton("Ok", parent=hbox)
cancel = QtGui.QPushButton("Cancel", parent=hbox)
self.structtree.doubleClicked.connect( self.dialog_activated )
ok.clicked.connect(self.dialog_ok)
cancel.clicked.connect(self.dialog_cancel)
hlyt.addStretch(1)
hlyt.addWidget(cancel)
hlyt.addWidget(ok)
hbox.setLayout(hlyt)
vlyt.addWidget(self.structtree)
vlyt.addWidget(hbox)
self.setLayout(vlyt)
self.resize(500, 500)
def dialog_activated(self, idx):
if idx.isValid():
pnode = idx.internalPointer()
self.structname = pnode.structname
self.accept()
def dialog_ok(self):
for idx in self.structtree.selectedIndexes():
pnode = idx.internalPointer()
self.structname = pnode.structname
self.accept()
def dialog_cancel(self):
self.reject()
@idlethreadsync
def selectStructure(vsbuilder, parent=None):
d = VQStructSelectDialog(vsbuilder, parent=parent)
r = d.exec_()
return d.structname
class VQStructNamespacesView(vq_tree.VQTreeView):
def __init__(self, parent=None):
vq_tree.VQTreeView.__init__(self, parent=parent)
model = vq_tree.VQTreeModel(parent=self, columns=('Subsystem', 'Module Name'))
win = model.append(('windows', ''))
xp_i386_user = model.append(('Windows XP i386 Userland', ''), parent=win)
xp_i386_ntdll = model.append(('','ntdll'), parent=xp_i386_user)
xp_i386_ntdll.modinfo = ('ntdll','vstruct.defs.windows.win_5_1_i386.ntdll')
xp_i386_kern = model.append(('Windows XP i386 Kernel', ''), parent=win)
xp_i386_nt = model.append(('','nt'), parent=xp_i386_kern)
xp_i386_nt.modinfo = ('nt','vstruct.defs.windows.win_5_1_i386.ntoskrnl')
xp_i386_win32k = model.append(('','win32k'), parent=xp_i386_kern)
xp_i386_win32k.modinfo = ('win32k','vstruct.defs.windows.win_5_1_i386.win32k')
win7_amd64_user = model.append(('Windows 7 amd64 Userland', ''), parent=win)
win7_amd64_ntdll = model.append(('','ntdll'), parent=win7_amd64_user)
win7_amd64_ntdll.modinfo = ('ntdll','vstruct.defs.windows.win_6_1_amd64.ntdll')
pos = model.append(('posix',''))
pos_elf = model.append(('', 'Elf'), parent=pos)
pos_elf.modinfo = ('elf', 'vstruct.defs.elf')
self.setModel(model)
class VQStructNamespaceDialog(QtGui.QDialog):
def __init__(self, parent=None):
QtGui.QDialog.__init__(self, parent=parent)
self.modinfo = None
self.setWindowTitle('Select a module...')
vlyt = QtGui.QVBoxLayout()
hlyt = QtGui.QHBoxLayout()
self.structtree = VQStructNamespacesView(parent=self)
hbox = QtGui.QWidget(parent=self)
ok = QtGui.QPushButton("Ok", parent=hbox)
cancel = QtGui.QPushButton("Cancel", parent=hbox)
self.structtree.doubleClicked.connect( self.dialog_activated )
ok.clicked.connect(self.dialog_ok)
cancel.clicked.connect(self.dialog_cancel)
hlyt.addStretch(1)
hlyt.addWidget(cancel)
hlyt.addWidget(ok)
hbox.setLayout(hlyt)
vlyt.addWidget(self.structtree)
vlyt.addWidget(hbox)
self.setLayout(vlyt)
self.resize(500, 500)
def dialog_activated(self, idx):
if idx.isValid():
pnode = idx.internalPointer()
self.modinfo = getattr(pnode, 'modinfo', None)
self.accept()
def dialog_ok(self):
for idx in self.structtree.selectedIndexes():
pnode = idx.internalPointer()
self.modinfo = getattr(pnode, 'modinfo', None)
self.accept()
def dialog_cancel(self):
self.reject()
@idlethreadsync
def selectStructNamespace(parent=None):
d = VQStructNamespaceDialog(parent=parent)
r = d.exec_()
return d.modinfo
| apache-2.0 | -4,406,548,675,122,204,700 | 2,858,455,573,652,856,000 | 31.104651 | 87 | 0.626947 | false |
infrascloudy/flask-base | app/models/user.py | 1 | 6385 | from flask import current_app
from flask_login import AnonymousUserMixin, UserMixin
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from itsdangerous import BadSignature, SignatureExpired
from werkzeug.security import check_password_hash, generate_password_hash
from app import db, login_manager
class Permission:
GENERAL = 0x01
ADMINISTER = 0xff
class Role(db.Model):
__tablename__ = 'roles'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
index = db.Column(db.String(64))
default = db.Column(db.Boolean, default=False, index=True)
permissions = db.Column(db.Integer)
users = db.relationship('User', backref='role', lazy='dynamic')
@staticmethod
def insert_roles():
roles = {
'User': (Permission.GENERAL, 'main', True),
'Administrator': (
Permission.ADMINISTER,
'admin',
False # grants all permissions
)
}
for r in roles:
role = Role.query.filter_by(name=r).first()
if role is None:
role = Role(name=r)
role.permissions = roles[r][0]
role.index = roles[r][1]
role.default = roles[r][2]
db.session.add(role)
db.session.commit()
def __repr__(self):
return '<Role \'%s\'>' % self.name
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
confirmed = db.Column(db.Boolean, default=False)
first_name = db.Column(db.String(64), index=True)
last_name = db.Column(db.String(64), index=True)
email = db.Column(db.String(64), unique=True, index=True)
password_hash = db.Column(db.String(128))
role_id = db.Column(db.Integer, db.ForeignKey('roles.id'))
def __init__(self, **kwargs):
super(User, self).__init__(**kwargs)
if self.role is None:
if self.email == current_app.config['ADMIN_EMAIL']:
self.role = Role.query.filter_by(
permissions=Permission.ADMINISTER).first()
if self.role is None:
self.role = Role.query.filter_by(default=True).first()
def full_name(self):
return '%s %s' % (self.first_name, self.last_name)
def can(self, permissions):
return self.role is not None and \
(self.role.permissions & permissions) == permissions
def is_admin(self):
return self.can(Permission.ADMINISTER)
@property
def password(self):
raise AttributeError('`password` is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def generate_confirmation_token(self, expiration=604800):
"""Generate a confirmation token to email a new user."""
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'confirm': self.id})
def generate_email_change_token(self, new_email, expiration=3600):
"""Generate an email change token to email an existing user."""
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'change_email': self.id, 'new_email': new_email})
def generate_password_reset_token(self, expiration=3600):
"""
Generate a password reset change token to email to an existing user.
"""
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'reset': self.id})
def confirm_account(self, token):
"""Verify that the provided token is for this user's id."""
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except (BadSignature, SignatureExpired):
return False
if data.get('confirm') != self.id:
return False
self.confirmed = True
db.session.add(self)
db.session.commit()
return True
def change_email(self, token):
"""Verify the new email for this user."""
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except (BadSignature, SignatureExpired):
return False
if data.get('change_email') != self.id:
return False
new_email = data.get('new_email')
if new_email is None:
return False
if self.query.filter_by(email=new_email).first() is not None:
return False
self.email = new_email
db.session.add(self)
db.session.commit()
return True
def reset_password(self, token, new_password):
"""Verify the new password for this user."""
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except (BadSignature, SignatureExpired):
return False
if data.get('reset') != self.id:
return False
self.password = new_password
db.session.add(self)
db.session.commit()
return True
@staticmethod
def generate_fake(count=100, **kwargs):
"""Generate a number of fake users for testing."""
from sqlalchemy.exc import IntegrityError
from random import seed, choice
from faker import Faker
fake = Faker()
roles = Role.query.all()
seed()
for i in range(count):
u = User(
first_name=fake.first_name(),
last_name=fake.last_name(),
email=fake.email(),
password=fake.password(),
confirmed=True,
role=choice(roles),
**kwargs)
db.session.add(u)
try:
db.session.commit()
except IntegrityError:
db.session.rollback()
def __repr__(self):
return '<User \'%s\'>' % self.full_name()
class AnonymousUser(AnonymousUserMixin):
def can(self, _):
return False
def is_admin(self):
return False
login_manager.anonymous_user = AnonymousUser
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
| mit | 3,601,748,114,796,673,500 | -6,411,095,047,075,658,000 | 31.576531 | 76 | 0.592482 | false |
joernhees/git-hg-remote-bug_gae-init | main/lib/werkzeug/contrib/atom.py | 7 | 15329 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.atom
~~~~~~~~~~~~~~~~~~~~~
This module provides a class called :class:`AtomFeed` which can be
used to generate feeds in the Atom syndication format (see :rfc:`4287`).
Example::
def atom_feed(request):
feed = AtomFeed("My Blog", feed_url=request.url,
url=request.host_url,
subtitle="My example blog for a feed test.")
for post in Post.query.limit(10).all():
feed.add(post.title, post.body, content_type='html',
author=post.author, url=post.url, id=post.uid,
updated=post.last_update, published=post.pub_date)
return feed.get_response()
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from datetime import datetime
from werkzeug.utils import escape
from werkzeug.wrappers import BaseResponse
from werkzeug._compat import implements_to_string, string_types
XHTML_NAMESPACE = 'http://www.w3.org/1999/xhtml'
def _make_text_block(name, content, content_type=None):
"""Helper function for the builder that creates an XML text block."""
if content_type == 'xhtml':
return u'<%s type="xhtml"><div xmlns="%s">%s</div></%s>\n' % \
(name, XHTML_NAMESPACE, content, name)
if not content_type:
return u'<%s>%s</%s>\n' % (name, escape(content), name)
return u'<%s type="%s">%s</%s>\n' % (name, content_type,
escape(content), name)
def format_iso8601(obj):
"""Format a datetime object for iso8601"""
return obj.strftime('%Y-%m-%dT%H:%M:%SZ')
@implements_to_string
class AtomFeed(object):
"""A helper class that creates Atom feeds.
:param title: the title of the feed. Required.
:param title_type: the type attribute for the title element. One of
``'html'``, ``'text'`` or ``'xhtml'``.
:param url: the url for the feed (not the url *of* the feed)
:param id: a globally unique id for the feed. Must be an URI. If
not present the `feed_url` is used, but one of both is
required.
:param updated: the time the feed was modified the last time. Must
be a :class:`datetime.datetime` object. If not
present the latest entry's `updated` is used.
:param feed_url: the URL to the feed. Should be the URL that was
requested.
:param author: the author of the feed. Must be either a string (the
name) or a dict with name (required) and uri or
email (both optional). Can be a list of (may be
mixed, too) strings and dicts, too, if there are
multiple authors. Required if not every entry has an
author element.
:param icon: an icon for the feed.
:param logo: a logo for the feed.
:param rights: copyright information for the feed.
:param rights_type: the type attribute for the rights element. One of
``'html'``, ``'text'`` or ``'xhtml'``. Default is
``'text'``.
:param subtitle: a short description of the feed.
:param subtitle_type: the type attribute for the subtitle element.
One of ``'text'``, ``'html'``, ``'text'``
or ``'xhtml'``. Default is ``'text'``.
:param links: additional links. Must be a list of dictionaries with
href (required) and rel, type, hreflang, title, length
(all optional)
:param generator: the software that generated this feed. This must be
a tuple in the form ``(name, url, version)``. If
you don't want to specify one of them, set the item
to `None`.
:param entries: a list with the entries for the feed. Entries can also
be added later with :meth:`add`.
For more information on the elements see
http://www.atomenabled.org/developers/syndication/
Everywhere where a list is demanded, any iterable can be used.
"""
default_generator = ('Werkzeug', None, None)
def __init__(self, title=None, entries=None, **kwargs):
self.title = title
self.title_type = kwargs.get('title_type', 'text')
self.url = kwargs.get('url')
self.feed_url = kwargs.get('feed_url', self.url)
self.id = kwargs.get('id', self.feed_url)
self.updated = kwargs.get('updated')
self.author = kwargs.get('author', ())
self.icon = kwargs.get('icon')
self.logo = kwargs.get('logo')
self.rights = kwargs.get('rights')
self.rights_type = kwargs.get('rights_type')
self.subtitle = kwargs.get('subtitle')
self.subtitle_type = kwargs.get('subtitle_type', 'text')
self.generator = kwargs.get('generator')
if self.generator is None:
self.generator = self.default_generator
self.links = kwargs.get('links', [])
self.entries = entries and list(entries) or []
if not hasattr(self.author, '__iter__') \
or isinstance(self.author, string_types + (dict,)):
self.author = [self.author]
for i, author in enumerate(self.author):
if not isinstance(author, dict):
self.author[i] = {'name': author}
if not self.title:
raise ValueError('title is required')
if not self.id:
raise ValueError('id is required')
for author in self.author:
if 'name' not in author:
raise TypeError('author must contain at least a name')
def add(self, *args, **kwargs):
"""Add a new entry to the feed. This function can either be called
with a :class:`FeedEntry` or some keyword and positional arguments
that are forwarded to the :class:`FeedEntry` constructor.
"""
if len(args) == 1 and not kwargs and isinstance(args[0], FeedEntry):
self.entries.append(args[0])
else:
kwargs['feed_url'] = self.feed_url
self.entries.append(FeedEntry(*args, **kwargs))
def __repr__(self):
return '<%s %r (%d entries)>' % (
self.__class__.__name__,
self.title,
len(self.entries)
)
def generate(self):
"""Return a generator that yields pieces of XML."""
# atom demands either an author element in every entry or a global one
if not self.author:
if False in map(lambda e: bool(e.author), self.entries):
self.author = ({'name': 'Unknown author'},)
if not self.updated:
dates = sorted([entry.updated for entry in self.entries])
self.updated = dates and dates[-1] or datetime.utcnow()
yield u'<?xml version="1.0" encoding="utf-8"?>\n'
yield u'<feed xmlns="http://www.w3.org/2005/Atom">\n'
yield ' ' + _make_text_block('title', self.title, self.title_type)
yield u' <id>%s</id>\n' % escape(self.id)
yield u' <updated>%s</updated>\n' % format_iso8601(self.updated)
if self.url:
yield u' <link href="%s" />\n' % escape(self.url, True)
if self.feed_url:
yield u' <link href="%s" rel="self" />\n' % \
escape(self.feed_url, True)
for link in self.links:
yield u' <link %s/>\n' % ''.join('%s="%s" ' % \
(k, escape(link[k], True)) for k in link)
for author in self.author:
yield u' <author>\n'
yield u' <name>%s</name>\n' % escape(author['name'])
if 'uri' in author:
yield u' <uri>%s</uri>\n' % escape(author['uri'])
if 'email' in author:
yield ' <email>%s</email>\n' % escape(author['email'])
yield ' </author>\n'
if self.subtitle:
yield ' ' + _make_text_block('subtitle', self.subtitle,
self.subtitle_type)
if self.icon:
yield u' <icon>%s</icon>\n' % escape(self.icon)
if self.logo:
yield u' <logo>%s</logo>\n' % escape(self.logo)
if self.rights:
yield ' ' + _make_text_block('rights', self.rights,
self.rights_type)
generator_name, generator_url, generator_version = self.generator
if generator_name or generator_url or generator_version:
tmp = [u' <generator']
if generator_url:
tmp.append(u' uri="%s"' % escape(generator_url, True))
if generator_version:
tmp.append(u' version="%s"' % escape(generator_version, True))
tmp.append(u'>%s</generator>\n' % escape(generator_name))
yield u''.join(tmp)
for entry in self.entries:
for line in entry.generate():
yield u' ' + line
yield u'</feed>\n'
def to_string(self):
"""Convert the feed into a string."""
return u''.join(self.generate())
def get_response(self):
"""Return a response object for the feed."""
return BaseResponse(self.to_string(), mimetype='application/atom+xml')
def __call__(self, environ, start_response):
"""Use the class as WSGI response object."""
return self.get_response()(environ, start_response)
def __str__(self):
return self.to_string()
@implements_to_string
class FeedEntry(object):
"""Represents a single entry in a feed.
:param title: the title of the entry. Required.
:param title_type: the type attribute for the title element. One of
``'html'``, ``'text'`` or ``'xhtml'``.
:param content: the content of the entry.
:param content_type: the type attribute for the content element. One
of ``'html'``, ``'text'`` or ``'xhtml'``.
:param summary: a summary of the entry's content.
:param summary_type: the type attribute for the summary element. One
of ``'html'``, ``'text'`` or ``'xhtml'``.
:param url: the url for the entry.
:param id: a globally unique id for the entry. Must be an URI. If
not present the URL is used, but one of both is required.
:param updated: the time the entry was modified the last time. Must
be a :class:`datetime.datetime` object. Required.
:param author: the author of the entry. Must be either a string (the
name) or a dict with name (required) and uri or
email (both optional). Can be a list of (may be
mixed, too) strings and dicts, too, if there are
multiple authors. Required if the feed does not have an
author element.
:param published: the time the entry was initially published. Must
be a :class:`datetime.datetime` object.
:param rights: copyright information for the entry.
:param rights_type: the type attribute for the rights element. One of
``'html'``, ``'text'`` or ``'xhtml'``. Default is
``'text'``.
:param links: additional links. Must be a list of dictionaries with
href (required) and rel, type, hreflang, title, length
(all optional)
:param categories: categories for the entry. Must be a list of dictionaries
with term (required), scheme and label (all optional)
:param xml_base: The xml base (url) for this feed item. If not provided
it will default to the item url.
For more information on the elements see
http://www.atomenabled.org/developers/syndication/
Everywhere where a list is demanded, any iterable can be used.
"""
def __init__(self, title=None, content=None, feed_url=None, **kwargs):
self.title = title
self.title_type = kwargs.get('title_type', 'text')
self.content = content
self.content_type = kwargs.get('content_type', 'html')
self.url = kwargs.get('url')
self.id = kwargs.get('id', self.url)
self.updated = kwargs.get('updated')
self.summary = kwargs.get('summary')
self.summary_type = kwargs.get('summary_type', 'html')
self.author = kwargs.get('author', ())
self.published = kwargs.get('published')
self.rights = kwargs.get('rights')
self.links = kwargs.get('links', [])
self.categories = kwargs.get('categories', [])
self.xml_base = kwargs.get('xml_base', feed_url)
if not hasattr(self.author, '__iter__') \
or isinstance(self.author, string_types + (dict,)):
self.author = [self.author]
for i, author in enumerate(self.author):
if not isinstance(author, dict):
self.author[i] = {'name': author}
if not self.title:
raise ValueError('title is required')
if not self.id:
raise ValueError('id is required')
if not self.updated:
raise ValueError('updated is required')
def __repr__(self):
return '<%s %r>' % (
self.__class__.__name__,
self.title
)
def generate(self):
"""Yields pieces of ATOM XML."""
base = ''
if self.xml_base:
base = ' xml:base="%s"' % escape(self.xml_base, True)
yield u'<entry%s>\n' % base
yield u' ' + _make_text_block('title', self.title, self.title_type)
yield u' <id>%s</id>\n' % escape(self.id)
yield u' <updated>%s</updated>\n' % format_iso8601(self.updated)
if self.published:
yield u' <published>%s</published>\n' % \
format_iso8601(self.published)
if self.url:
yield u' <link href="%s" />\n' % escape(self.url)
for author in self.author:
yield u' <author>\n'
yield u' <name>%s</name>\n' % escape(author['name'])
if 'uri' in author:
yield u' <uri>%s</uri>\n' % escape(author['uri'])
if 'email' in author:
yield u' <email>%s</email>\n' % escape(author['email'])
yield u' </author>\n'
for link in self.links:
yield u' <link %s/>\n' % ''.join('%s="%s" ' % \
(k, escape(link[k], True)) for k in link)
for category in self.categories:
yield u' <category %s/>\n' % ''.join('%s="%s" ' % \
(k, escape(category[k], True)) for k in category)
if self.summary:
yield u' ' + _make_text_block('summary', self.summary,
self.summary_type)
if self.content:
yield u' ' + _make_text_block('content', self.content,
self.content_type)
yield u'</entry>\n'
def to_string(self):
"""Convert the feed item into a unicode object."""
return u''.join(self.generate())
def __str__(self):
return self.to_string()
| mit | 3,068,150,775,655,239,000 | -5,640,358,815,720,902,000 | 43.175793 | 79 | 0.553396 | false |
75651/kbengine_cloud | kbe/src/lib/python/Lib/hmac.py | 142 | 5063 | """HMAC (Keyed-Hashing for Message Authentication) Python module.
Implements the HMAC algorithm as described by RFC 2104.
"""
import warnings as _warnings
from _operator import _compare_digest as compare_digest
import hashlib as _hashlib
trans_5C = bytes((x ^ 0x5C) for x in range(256))
trans_36 = bytes((x ^ 0x36) for x in range(256))
# The size of the digests returned by HMAC depends on the underlying
# hashing module used. Use digest_size from the instance of HMAC instead.
digest_size = None
class HMAC:
"""RFC 2104 HMAC class. Also complies with RFC 4231.
This supports the API for Cryptographic Hash Functions (PEP 247).
"""
blocksize = 64 # 512-bit HMAC; can be changed in subclasses.
def __init__(self, key, msg = None, digestmod = None):
"""Create a new HMAC object.
key: key for the keyed hash object.
msg: Initial input for the hash, if provided.
digestmod: A module supporting PEP 247. *OR*
A hashlib constructor returning a new hash object. *OR*
A hash name suitable for hashlib.new().
Defaults to hashlib.md5.
Implicit default to hashlib.md5 is deprecated and will be
removed in Python 3.6.
Note: key and msg must be a bytes or bytearray objects.
"""
if not isinstance(key, (bytes, bytearray)):
raise TypeError("key: expected bytes or bytearray, but got %r" % type(key).__name__)
if digestmod is None:
_warnings.warn("HMAC() without an explicit digestmod argument "
"is deprecated.", PendingDeprecationWarning, 2)
digestmod = _hashlib.md5
if callable(digestmod):
self.digest_cons = digestmod
elif isinstance(digestmod, str):
self.digest_cons = lambda d=b'': _hashlib.new(digestmod, d)
else:
self.digest_cons = lambda d=b'': digestmod.new(d)
self.outer = self.digest_cons()
self.inner = self.digest_cons()
self.digest_size = self.inner.digest_size
if hasattr(self.inner, 'block_size'):
blocksize = self.inner.block_size
if blocksize < 16:
_warnings.warn('block_size of %d seems too small; using our '
'default of %d.' % (blocksize, self.blocksize),
RuntimeWarning, 2)
blocksize = self.blocksize
else:
_warnings.warn('No block_size attribute on given digest object; '
'Assuming %d.' % (self.blocksize),
RuntimeWarning, 2)
blocksize = self.blocksize
# self.blocksize is the default blocksize. self.block_size is
# effective block size as well as the public API attribute.
self.block_size = blocksize
if len(key) > blocksize:
key = self.digest_cons(key).digest()
key = key + bytes(blocksize - len(key))
self.outer.update(key.translate(trans_5C))
self.inner.update(key.translate(trans_36))
if msg is not None:
self.update(msg)
@property
def name(self):
return "hmac-" + self.inner.name
def update(self, msg):
"""Update this hashing object with the string msg.
"""
self.inner.update(msg)
def copy(self):
"""Return a separate copy of this hashing object.
An update to this copy won't affect the original object.
"""
# Call __new__ directly to avoid the expensive __init__.
other = self.__class__.__new__(self.__class__)
other.digest_cons = self.digest_cons
other.digest_size = self.digest_size
other.inner = self.inner.copy()
other.outer = self.outer.copy()
return other
def _current(self):
"""Return a hash object for the current state.
To be used only internally with digest() and hexdigest().
"""
h = self.outer.copy()
h.update(self.inner.digest())
return h
def digest(self):
"""Return the hash value of this hashing object.
This returns a string containing 8-bit data. The object is
not altered in any way by this function; you can continue
updating the object after calling this function.
"""
h = self._current()
return h.digest()
def hexdigest(self):
"""Like digest(), but returns a string of hexadecimal digits instead.
"""
h = self._current()
return h.hexdigest()
def new(key, msg = None, digestmod = None):
"""Create a new hashing object and return it.
key: The starting key for the hash.
msg: if available, will immediately be hashed into the object's starting
state.
You can now feed arbitrary strings into the object using its update()
method, and can ask for the hash value at any time by calling its digest()
method.
"""
return HMAC(key, msg, digestmod)
| lgpl-3.0 | 7,004,686,315,606,627,000 | 4,041,429,720,272,501,000 | 34.159722 | 96 | 0.597472 | false |
edwardzhou1980/bite-project | deps/mrtaskman/server/util/model_to_dict.py | 16 | 1536 | """Converts AppEngine db.Model's to JSON."""
from google.appengine.ext import db
from google.appengine.ext.blobstore import blobstore
import datetime
import json
import logging
import time
from util import db_properties
SIMPLE_TYPES = (int, long, float, bool, dict, basestring, list)
def ModelToDict(model):
"""Returns dictionary from given db.Model."""
if not isinstance(model, db.Model):
logging.error('%s is not an instance of db.Model. It is %s',
model, model.__class__)
assert isinstance(model, db.Model)
output = {}
output['id'] = model.key().id_or_name()
for key, prop in model.properties().iteritems():
value = getattr(model, key)
if value is None:
output[key] = value
elif isinstance(prop, db_properties.JsonProperty):
output[key] = json.loads(value)
elif isinstance(value, SIMPLE_TYPES):
output[key] = value
elif isinstance(value, datetime.date):
# Convert date/datetime to ms-since-epoch ("new Date()").
ms = time.mktime(value.utctimetuple()) * 1000
ms += getattr(value, 'microseconds', 0) / 1000
output[key] = int(ms)
elif isinstance(value, db.GeoPt):
output[key] = {'lat': value.lat, 'lon': value.lon}
elif isinstance(prop, blobstore.BlobReferenceProperty):
# TODO: Implement this if it's needed.
output[key] = 'UnimplementedBlobRef'
elif isinstance(value, db.Model):
output[key] = ModelToDict(value)
else:
raise ValueError('cannot encode ' + repr(prop))
return output
| apache-2.0 | -5,169,140,067,216,505,000 | 8,256,587,520,022,902,000 | 29.72 | 64 | 0.666667 | false |
cnbeining/you-get | src/you_get/extractors/huaban.py | 7 | 2281 | #!/usr/bin/env python
import json
import os
import re
import math
import traceback
import urllib.parse as urlparse
from ..common import *
__all__ = ['huaban_download']
site_info = '花瓣 (Huaban)'
LIMIT = 100
class Board:
def __init__(self, title, pins):
self.title = title
self.pins = pins
self.pin_count = len(pins)
class Pin:
host = 'http://img.hb.aicdn.com/'
def __init__(self, pin_json):
img_file = pin_json['file']
self.id = str(pin_json['pin_id'])
self.url = urlparse.urljoin(self.host, img_file['key'])
self.ext = img_file['type'].split('/')[-1]
def construct_url(url, **params):
param_str = urlparse.urlencode(params)
return url + '?' + param_str
def extract_json_data(url, **params):
url = construct_url(url, **params)
html = get_content(url, headers=fake_headers)
json_string = match1(html, r'app.page\["board"\] = (.*?});')
json_data = json.loads(json_string)
return json_data
def extract_board_data(url):
json_data = extract_json_data(url, limit=LIMIT)
pin_list = json_data['pins']
title = json_data['title']
pin_count = json_data['pin_count']
pin_count -= len(pin_list)
while pin_count > 0:
json_data = extract_json_data(url, max=pin_list[-1]['pin_id'],
limit=LIMIT)
pins = json_data['pins']
pin_list += pins
pin_count -= len(pins)
return Board(title, list(map(Pin, pin_list)))
def huaban_download_board(url, output_dir, **kwargs):
kwargs['merge'] = False
board = extract_board_data(url)
output_dir = os.path.join(output_dir, board.title)
print_info(site_info, board.title, 'jpg', float('Inf'))
for pin in board.pins:
download_urls([pin.url], pin.id, pin.ext, float('Inf'),
output_dir=output_dir, faker=True, **kwargs)
def huaban_download(url, output_dir='.', **kwargs):
if re.match(r'http://huaban\.com/boards/\d+/', url):
huaban_download_board(url, output_dir, **kwargs)
else:
print('Only board (画板) pages are supported currently')
print('ex: http://huaban.com/boards/12345678/')
download = huaban_download
download_playlist = playlist_not_supported("huaban")
| mit | -1,906,475,446,698,646,000 | -8,304,206,263,023,324,000 | 25.741176 | 70 | 0.605807 | false |
ME-ICA/me-ica | meica.libs/mdp/graph/graph.py | 1 | 13012 | # inspired by some code by Nathan Denny (1999)
# see http://www.ece.arizona.edu/~denny/python_nest/graph_lib_1.0.1.html
try:
# use reduce against BDFL's will even on python > 2.6
from functools import reduce
except ImportError:
pass
class GraphException(Exception):
"""Base class for exception in the graph package."""
pass
class GraphTopologicalException(GraphException):
"""Exception thrown during a topological sort if the graph is cyclical."""
pass
def is_sequence(x):
return isinstance(x, (list, tuple))
def recursive_map(func, seq):
"""Apply a function recursively on a sequence and all subsequences."""
def _func(x):
if is_sequence(x):
return recursive_map(func, x)
else:
return func(x)
return map(_func, seq)
def recursive_reduce(func, seq, *argv):
"""Apply reduce(func, seq) recursively to a sequence and all its
subsequences."""
def _func(x, y):
if is_sequence(y):
return func(x, recursive_reduce(func, y))
else:
return func(x, y)
return reduce(_func, seq, *argv)
class GraphNode(object):
"""Represent a graph node and all information attached to it."""
def __init__(self, data=None):
self.data = data
# edges in
self.ein = []
# edges out
self.eout = []
def add_edge_in(self, edge):
self.ein.append(edge)
def add_edge_out(self, edge):
self.eout.append(edge)
def remove_edge_in(self, edge):
self.ein.remove(edge)
def remove_edge_out(self, edge):
self.eout.remove(edge)
def get_edges_in(self, from_ = None):
"""Return a copy of the list of the entering edges. If from_
is specified, return only the nodes coming from that node."""
inedges = self.ein[:]
if from_:
inedges = [edge for edge in inedges if edge.head == from_]
return inedges
def get_edges_out(self, to_ = None):
"""Return a copy of the list of the outgoing edges. If to_
is specified, return only the nodes going to that node."""
outedges = self.eout[:]
if to_:
outedges = [edge for edge in outedges if edge.tail == to_]
return outedges
def get_edges(self, neighbor = None):
"""Return a copy of all edges. If neighbor is specified, return
only the edges connected to that node."""
return ( self.get_edges_in(from_=neighbor) +
self.get_edges_out(to_=neighbor) )
def in_degree(self):
"""Return the number of entering edges."""
return len(self.ein)
def out_degree(self):
"""Return the number of outgoing edges."""
return len(self.eout)
def degree(self):
"""Return the number of edges."""
return self.in_degree()+self.out_degree()
def in_neighbors(self):
"""Return the neighbors down in-edges (i.e. the parents nodes)."""
return map(lambda x: x.get_head(), self.ein)
def out_neighbors(self):
"""Return the neighbors down in-edges (i.e. the parents nodes)."""
return map(lambda x: x.get_tail(), self.eout)
def neighbors(self):
return self.in_neighbors() + self.out_neighbors()
class GraphEdge(object):
"""Represent a graph edge and all information attached to it."""
def __init__(self, head, tail, data=None):
# head node
self.head = head
# neighbors out
self.tail = tail
# arbitrary data slot
self.data = data
def get_ends(self):
"""Return the tuple (head_id, tail_id)."""
return (self.head, self.tail)
def get_tail(self):
return self.tail
def get_head(self):
return self.head
class Graph(object):
"""Represent a directed graph."""
def __init__(self):
# list of nodes
self.nodes = []
# list of edges
self.edges = []
# node functions
def add_node(self, data=None):
node = GraphNode(data=data)
self.nodes.append(node)
return node
def remove_node(self, node):
# the node is not in this graph
if node not in self.nodes:
errstr = 'This node is not part of the graph (%s)' % node
raise GraphException(errstr)
# remove all edges containing this node
for edge in node.get_edges():
self.remove_edge(edge)
# remove the node
self.nodes.remove(node)
# edge functions
def add_edge(self, head, tail, data=None):
"""Add an edge going from head to tail.
head : head node
tail : tail node
"""
# create edge
edge = GraphEdge(head, tail, data=data)
# add edge to head and tail node
head.add_edge_out(edge)
tail.add_edge_in(edge)
# add to the edges dictionary
self.edges.append(edge)
return edge
def remove_edge(self, edge):
head, tail = edge.get_ends()
# remove from head
head.remove_edge_out(edge)
# remove from tail
tail.remove_edge_in(edge)
# remove the edge
self.edges.remove(edge)
### populate functions
def add_nodes(self, data):
"""Add many nodes at once.
data -- number of nodes to add or sequence of data values, one for
each new node"""
if not is_sequence(data):
data = [None]*data
return map(self.add_node, data)
def add_tree(self, tree):
"""Add a tree to the graph.
The tree is specified with a nested list of tuple, in a LISP-like
notation. The values specified in the list become the values of
the single nodes.
Return an equivalent nested list with the nodes instead of the values.
Example:
>>> a=b=c=d=e=None
>>> g.add_tree( (a, b, (c, d ,e)) )
corresponds to this tree structure, with all node values set to None:
a
/ \
b c
/ \
d e
"""
def _add_edge(root, son):
self.add_edge(root, son)
return root
nodes = recursive_map(self.add_node, tree)
recursive_reduce(_add_edge, nodes)
return nodes
def add_full_connectivity(self, from_nodes, to_nodes):
"""Add full connectivity from a group of nodes to another one.
Return a list of lists of edges, one for each node in 'from_nodes'.
Example: create a two-layer graph with full connectivity.
>>> g = Graph()
>>> layer1 = g.add_nodes(10)
>>> layer2 = g.add_nodes(5)
>>> g.add_full_connectivity(layer1, layer2)
"""
edges = []
for from_ in from_nodes:
edges.append(map(lambda x: self.add_edge(from_, x), to_nodes))
return edges
###### graph algorithms
def topological_sort(self):
"""Perform a topological sort of the nodes. If the graph has a cycle,
throw a GraphTopologicalException with the list of successfully
ordered nodes."""
# topologically sorted list of the nodes (result)
topological_list = []
# queue (fifo list) of the nodes with in_degree 0
topological_queue = []
# {node: in_degree} for the remaining nodes (those with in_degree>0)
remaining_indegree = {}
# init queues and lists
for node in self.nodes:
indegree = node.in_degree()
if indegree == 0:
topological_queue.append(node)
else:
remaining_indegree[node] = indegree
# remove nodes with in_degree 0 and decrease the in_degree of their sons
while len(topological_queue):
# remove the first node with degree 0
node = topological_queue.pop(0)
topological_list.append(node)
# decrease the in_degree of the sons
for son in node.out_neighbors():
remaining_indegree[son] -= 1
if remaining_indegree[son] == 0:
topological_queue.append(son)
# if not all nodes were covered, the graph must have a cycle
# raise a GraphTopographicalException
if len(topological_list)!=len(self.nodes):
raise GraphTopologicalException(topological_list)
return topological_list
### Depth-First sort
def _dfs(self, neighbors_fct, root, visit_fct=None):
# core depth-first sort function
# changing the neighbors function to return the sons of a node,
# its parents, or both one gets normal dfs, reverse dfs, or
# dfs on the equivalent undirected graph, respectively
# result list containing the nodes in Depth-First order
dfs_list = []
# keep track of all already visited nodes
visited_nodes = { root: None }
# stack (lifo) list
dfs_stack = []
dfs_stack.append(root)
while len(dfs_stack):
# consider the next node on the stack
node = dfs_stack.pop()
dfs_list.append(node)
# visit the node
if visit_fct != None:
visit_fct(node)
# add all sons to the stack (if not already visited)
for son in neighbors_fct(node):
if son not in visited_nodes:
visited_nodes[son] = None
dfs_stack.append(son)
return dfs_list
def dfs(self, root, visit_fct=None):
"""Return a list of nodes in some Depth First order starting from
a root node. If defined, visit_fct is applied on each visited node.
The returned list does not have to contain all nodes in the
graph, but only the ones reachable from the root.
"""
neighbors_fct = lambda node: node.out_neighbors()
return self._dfs(neighbors_fct, root, visit_fct=visit_fct)
def undirected_dfs(self, root, visit_fct=None):
"""Perform Depth First sort.
This function is identical to dfs, but the sort is performed on
the equivalent undirected version of the graph."""
neighbors_fct = lambda node: node.neighbors()
return self._dfs(neighbors_fct, root, visit_fct=visit_fct)
### Connected components
def connected_components(self):
"""Return a list of lists containing the nodes of all connected
components of the graph."""
visited = {}
def visit_fct(node, visited=visited):
visited[node] = None
components = []
nodes = self.nodes
for node in nodes:
if node in visited:
continue
components.append(self.undirected_dfs(node, visit_fct))
return components
def is_weakly_connected(self):
"""Return True if the graph is weakly connected."""
return len(self.undirected_dfs(self.nodes[0]))==len(self.nodes)
### Breadth-First Sort
# BFS and DFS could be generalized to one function. I leave them
# distinct for clarity.
def _bfs(self, neighbors_fct, root, visit_fct=None):
# core breadth-first sort function
# changing the neighbors function to return the sons of a node,
# its parents, or both one gets normal bfs, reverse bfs, or
# bfs on the equivalent undirected graph, respectively
# result list containing the nodes in Breadth-First order
bfs_list = []
# keep track of all already visited nodes
visited_nodes = { root: None }
# queue (fifo) list
bfs_queue = []
bfs_queue.append(root)
while len(bfs_queue):
# consider the next node in the queue
node = bfs_queue.pop(0)
bfs_list.append(node)
# visit the node
if visit_fct != None:
visit_fct(node)
# add all sons to the queue (if not already visited)
for son in neighbors_fct(node):
if son not in visited_nodes:
visited_nodes[son] = None
bfs_queue.append(son)
return bfs_list
def bfs(self, root, visit_fct=None):
"""Return a list of nodes in some Breadth First order starting from
a root node. If defined, visit_fct is applied on each visited node.
Note the returned list does not have to contain all nodes in the
graph, but only the ones reachable from the root."""
neighbors_fct = lambda node: node.out_neighbors()
return self._bfs(neighbors_fct, root, visit_fct=visit_fct)
def undirected_bfs(self, root, visit_fct=None):
"""Perform Breadth First sort.
This function is identical to bfs, but the sort is performed on
the equivalent undirected version of the graph."""
neighbors_fct = lambda node: node.neighbors()
return self._bfs(neighbors_fct, root, visit_fct=visit_fct)
| lgpl-2.1 | -6,419,884,292,298,437,000 | 2,811,257,960,580,778,500 | 31.448878 | 80 | 0.587919 | false |
joequery/django | tests/utils_tests/test_html.py | 160 | 10711 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from datetime import datetime
from django.test import SimpleTestCase, ignore_warnings
from django.utils import html, safestring, six
from django.utils._os import upath
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text
class TestUtilsHtml(SimpleTestCase):
def check_output(self, function, value, output=None):
"""
Check that function(value) equals output. If output is None,
check that function(value) equals value.
"""
if output is None:
output = value
self.assertEqual(function(value), output)
def test_escape(self):
f = html.escape
items = (
('&', '&'),
('<', '<'),
('>', '>'),
('"', '"'),
("'", '''),
)
# Substitution patterns for testing the above items.
patterns = ("%s", "asdf%sfdsa", "%s1", "1%sb")
for value, output in items:
for pattern in patterns:
self.check_output(f, pattern % value, pattern % output)
# Check repeated values.
self.check_output(f, value * 2, output * 2)
# Verify it doesn't double replace &.
self.check_output(f, '<&', '<&')
def test_format_html(self):
self.assertEqual(
html.format_html("{} {} {third} {fourth}",
"< Dangerous >",
html.mark_safe("<b>safe</b>"),
third="< dangerous again",
fourth=html.mark_safe("<i>safe again</i>")
),
"< Dangerous > <b>safe</b> < dangerous again <i>safe again</i>"
)
def test_linebreaks(self):
f = html.linebreaks
items = (
("para1\n\npara2\r\rpara3", "<p>para1</p>\n\n<p>para2</p>\n\n<p>para3</p>"),
("para1\nsub1\rsub2\n\npara2", "<p>para1<br />sub1<br />sub2</p>\n\n<p>para2</p>"),
("para1\r\n\r\npara2\rsub1\r\rpara4", "<p>para1</p>\n\n<p>para2<br />sub1</p>\n\n<p>para4</p>"),
("para1\tmore\n\npara2", "<p>para1\tmore</p>\n\n<p>para2</p>"),
)
for value, output in items:
self.check_output(f, value, output)
def test_strip_tags(self):
f = html.strip_tags
items = (
('<p>See: 'é is an apostrophe followed by e acute</p>',
'See: 'é is an apostrophe followed by e acute'),
('<adf>a', 'a'),
('</adf>a', 'a'),
('<asdf><asdf>e', 'e'),
('hi, <f x', 'hi, <f x'),
('234<235, right?', '234<235, right?'),
('a4<a5 right?', 'a4<a5 right?'),
('b7>b2!', 'b7>b2!'),
('</fe', '</fe'),
('<x>b<y>', 'b'),
('a<p onclick="alert(\'<test>\')">b</p>c', 'abc'),
('a<p a >b</p>c', 'abc'),
('d<a:b c:d>e</p>f', 'def'),
('<strong>foo</strong><a href="http://example.com">bar</a>', 'foobar'),
# caused infinite loop on Pythons not patched with
# http://bugs.python.org/issue20288
('&gotcha&#;<>', '&gotcha&#;<>'),
)
for value, output in items:
self.check_output(f, value, output)
# Some convoluted syntax for which parsing may differ between python versions
output = html.strip_tags('<sc<!-- -->ript>test<<!-- -->/script>')
self.assertNotIn('<script>', output)
self.assertIn('test', output)
output = html.strip_tags('<script>alert()</script>&h')
self.assertNotIn('<script>', output)
self.assertIn('alert()', output)
# Test with more lengthy content (also catching performance regressions)
for filename in ('strip_tags1.html', 'strip_tags2.txt'):
path = os.path.join(os.path.dirname(upath(__file__)), 'files', filename)
with open(path, 'r') as fp:
content = force_text(fp.read())
start = datetime.now()
stripped = html.strip_tags(content)
elapsed = datetime.now() - start
self.assertEqual(elapsed.seconds, 0)
self.assertIn("Please try again.", stripped)
self.assertNotIn('<', stripped)
def test_strip_spaces_between_tags(self):
f = html.strip_spaces_between_tags
# Strings that should come out untouched.
items = (' <adf>', '<adf> ', ' </adf> ', ' <f> x</f>')
for value in items:
self.check_output(f, value)
# Strings that have spaces to strip.
items = (
('<d> </d>', '<d></d>'),
('<p>hello </p>\n<p> world</p>', '<p>hello </p><p> world</p>'),
('\n<p>\t</p>\n<p> </p>\n', '\n<p></p><p></p>\n'),
)
for value, output in items:
self.check_output(f, value, output)
@ignore_warnings(category=RemovedInDjango110Warning)
def test_strip_entities(self):
f = html.strip_entities
# Strings that should come out untouched.
values = ("&", "&a", "&a", "a&#a")
for value in values:
self.check_output(f, value)
# Valid entities that should be stripped from the patterns.
entities = ("", "", "&a;", "&fdasdfasdfasdf;")
patterns = (
("asdf %(entity)s ", "asdf "),
("%(entity)s%(entity)s", ""),
("&%(entity)s%(entity)s", "&"),
("%(entity)s3", "3"),
)
for entity in entities:
for in_pattern, output in patterns:
self.check_output(f, in_pattern % {'entity': entity}, output)
def test_escapejs(self):
f = html.escapejs
items = (
('"double quotes" and \'single quotes\'', '\\u0022double quotes\\u0022 and \\u0027single quotes\\u0027'),
(r'\ : backslashes, too', '\\u005C : backslashes, too'),
('and lots of whitespace: \r\n\t\v\f\b', 'and lots of whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008'),
(r'<script>and this</script>', '\\u003Cscript\\u003Eand this\\u003C/script\\u003E'),
('paragraph separator:\u2029and line separator:\u2028', 'paragraph separator:\\u2029and line separator:\\u2028'),
)
for value, output in items:
self.check_output(f, value, output)
@ignore_warnings(category=RemovedInDjango110Warning)
def test_remove_tags(self):
f = html.remove_tags
items = (
("<b><i>Yes</i></b>", "b i", "Yes"),
("<a>x</a> <p><b>y</b></p>", "a b", "x <p>y</p>"),
)
for value, tags, output in items:
self.assertEqual(f(value, tags), output)
def test_smart_urlquote(self):
quote = html.smart_urlquote
# Ensure that IDNs are properly quoted
self.assertEqual(quote('http://öäü.com/'), 'http://xn--4ca9at.com/')
self.assertEqual(quote('http://öäü.com/öäü/'), 'http://xn--4ca9at.com/%C3%B6%C3%A4%C3%BC/')
# Ensure that everything unsafe is quoted, !*'();:@&=+$,/?#[]~ is considered safe as per RFC
self.assertEqual(quote('http://example.com/path/öäü/'), 'http://example.com/path/%C3%B6%C3%A4%C3%BC/')
self.assertEqual(quote('http://example.com/%C3%B6/ä/'), 'http://example.com/%C3%B6/%C3%A4/')
self.assertEqual(quote('http://example.com/?x=1&y=2+3&z='), 'http://example.com/?x=1&y=2+3&z=')
self.assertEqual(quote('http://example.com/?x=<>"\''), 'http://example.com/?x=%3C%3E%22%27')
self.assertEqual(quote('http://example.com/?q=http://example.com/?x=1%26q=django'),
'http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango')
self.assertEqual(quote('http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango'),
'http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango')
def test_conditional_escape(self):
s = '<h1>interop</h1>'
self.assertEqual(html.conditional_escape(s),
'<h1>interop</h1>')
self.assertEqual(html.conditional_escape(safestring.mark_safe(s)), s)
def test_html_safe(self):
@html.html_safe
class HtmlClass(object):
if six.PY2:
def __unicode__(self):
return "<h1>I'm a html class!</h1>"
else:
def __str__(self):
return "<h1>I'm a html class!</h1>"
html_obj = HtmlClass()
self.assertTrue(hasattr(HtmlClass, '__html__'))
self.assertTrue(hasattr(html_obj, '__html__'))
self.assertEqual(force_text(html_obj), html_obj.__html__())
def test_html_safe_subclass(self):
if six.PY2:
class BaseClass(object):
def __html__(self):
# defines __html__ on its own
return 'some html content'
def __unicode__(self):
return 'some non html content'
@html.html_safe
class Subclass(BaseClass):
def __unicode__(self):
# overrides __unicode__ and is marked as html_safe
return 'some html safe content'
else:
class BaseClass(object):
def __html__(self):
# defines __html__ on its own
return 'some html content'
def __str__(self):
return 'some non html content'
@html.html_safe
class Subclass(BaseClass):
def __str__(self):
# overrides __str__ and is marked as html_safe
return 'some html safe content'
subclass_obj = Subclass()
self.assertEqual(force_text(subclass_obj), subclass_obj.__html__())
def test_html_safe_defines_html_error(self):
msg = "can't apply @html_safe to HtmlClass because it defines __html__()."
with self.assertRaisesMessage(ValueError, msg):
@html.html_safe
class HtmlClass(object):
def __html__(self):
return "<h1>I'm a html class!</h1>"
def test_html_safe_doesnt_define_str(self):
method_name = '__unicode__()' if six.PY2 else '__str__()'
msg = "can't apply @html_safe to HtmlClass because it doesn't define %s." % method_name
with self.assertRaisesMessage(ValueError, msg):
@html.html_safe
class HtmlClass(object):
pass
| bsd-3-clause | 5,874,473,784,212,847,000 | 7,328,648,325,675,783,000 | 41.792 | 125 | 0.520471 | false |
jjhuff/fcc-comments | lib/nltk/sem/evaluate.py | 5 | 22893 | # Natural Language Toolkit: Models for first-order languages with lambda
#
# Copyright (C) 2001-2012 NLTK Project
# Author: Ewan Klein <[email protected]>,
# URL: <http://nltk.sourceforge.net>
# For license information, see LICENSE.TXT
#TODO:
#- fix tracing
#- fix iterator-based approach to existentials
"""
This module provides data structures for representing first-order
models.
"""
from __future__ import print_function
from pprint import pformat
import inspect
import textwrap
from nltk.decorators import decorator
from nltk.sem.logic import (AbstractVariableExpression, AllExpression,
AndExpression, ApplicationExpression, EqualityExpression,
ExistsExpression, IffExpression, ImpExpression,
IndividualVariableExpression, LambdaExpression,
LogicParser, NegatedExpression, OrExpression,
Variable, is_indvar)
class Error(Exception): pass
class Undefined(Error): pass
def trace(f, *args, **kw):
argspec = inspect.getargspec(f)
d = dict(zip(argspec[0], args))
if d.pop('trace', None):
print()
for item in d.items():
print("%s => %s" % item)
return f(*args, **kw)
def is_rel(s):
"""
Check whether a set represents a relation (of any arity).
:param s: a set containing tuples of str elements
:type s: set
:rtype: bool
"""
# we have the empty relation, i.e. set()
if len(s) == 0:
return True
# all the elements are tuples of the same length
elif s == set([elem for elem in s if isinstance(elem, tuple)]) and\
len(max(s))==len(min(s)):
return True
else:
raise ValueError("Set %r contains sequences of different lengths" % s)
def set2rel(s):
"""
Convert a set containing individuals (strings or numbers) into a set of
unary tuples. Any tuples of strings already in the set are passed through
unchanged.
For example:
- set(['a', 'b']) => set([('a',), ('b',)])
- set([3, 27]) => set([('3',), ('27',)])
:type s: set
:rtype: set of tuple of str
"""
new = set()
for elem in s:
if isinstance(elem, str):
new.add((elem,))
elif isinstance(elem, int):
new.add((str(elem,)))
else:
new.add(elem)
return new
def arity(rel):
"""
Check the arity of a relation.
:type rel: set of tuples
:rtype: int of tuple of str
"""
if len(rel) == 0:
return 0
return len(list(rel)[0])
class Valuation(dict):
"""
A dictionary which represents a model-theoretic Valuation of non-logical constants.
Keys are strings representing the constants to be interpreted, and values correspond
to individuals (represented as strings) and n-ary relations (represented as sets of tuples
of strings).
An instance of ``Valuation`` will raise a KeyError exception (i.e.,
just behave like a standard dictionary) if indexed with an expression that
is not in its list of symbols.
"""
def __init__(self, iter):
"""
:param iter: a list of (symbol, value) pairs.
"""
dict.__init__(self)
for (sym, val) in iter:
if isinstance(val, str) or isinstance(val, bool):
self[sym] = val
elif isinstance(val, set):
self[sym] = set2rel(val)
else:
msg = textwrap.fill("Error in initializing Valuation. "
"Unrecognized value for symbol '%s':\n%s" % (sym, val), width=66)
raise ValueError(msg)
def __getitem__(self, key):
if key in self:
return dict.__getitem__(self, key)
else:
raise Undefined("Unknown expression: '%s'" % key)
def __str__(self):
return pformat(self)
@property
def domain(self):
"""Set-theoretic domain of the value-space of a Valuation."""
dom = []
for val in self.values():
if isinstance(val, str):
dom.append(val)
elif not isinstance(val, bool):
dom.extend([elem for tuple in val for elem in tuple if elem is not None])
return set(dom)
@property
def symbols(self):
"""The non-logical constants which the Valuation recognizes."""
return sorted(self.keys())
class Assignment(dict):
"""
A dictionary which represents an assignment of values to variables.
An assigment can only assign values from its domain.
If an unknown expression *a* is passed to a model *M*\ 's
interpretation function *i*, *i* will first check whether *M*\ 's
valuation assigns an interpretation to *a* as a constant, and if
this fails, *i* will delegate the interpretation of *a* to
*g*. *g* only assigns values to individual variables (i.e.,
members of the class ``IndividualVariableExpression`` in the ``logic``
module. If a variable is not assigned a value by *g*, it will raise
an ``Undefined`` exception.
A variable *Assignment* is a mapping from individual variables to
entities in the domain. Individual variables are usually indicated
with the letters ``'x'``, ``'y'``, ``'w'`` and ``'z'``, optionally
followed by an integer (e.g., ``'x0'``, ``'y332'``). Assignments are
created using the ``Assignment`` constructor, which also takes the
domain as a parameter.
>>> from nltk.sem.evaluate import Assignment
>>> dom = set(['u1', 'u2', 'u3', 'u4'])
>>> g3 = Assignment(dom, [('x', 'u1'), ('y', 'u2')])
>>> g3
{'y': 'u2', 'x': 'u1'}
There is also a ``print`` format for assignments which uses a notation
closer to that in logic textbooks:
>>> print g3
g[u2/y][u1/x]
It is also possible to update an assignment using the ``add`` method:
>>> dom = set(['u1', 'u2', 'u3', 'u4'])
>>> g4 = Assignment(dom)
>>> g4.add('x', 'u1')
{'x': 'u1'}
With no arguments, ``purge()`` is equivalent to ``clear()`` on a dictionary:
>>> g4.purge()
>>> g4
{}
:param domain: the domain of discourse
:type domain: set
:param assign: a list of (varname, value) associations
:type assign: list
"""
def __init__(self, domain, assign=None):
dict.__init__(self)
self.domain = domain
if assign:
for (var, val) in assign:
assert val in self.domain,\
"'%s' is not in the domain: %s" % (val, self.domain)
assert is_indvar(var),\
"Wrong format for an Individual Variable: '%s'" % var
self[var] = val
self._addvariant()
def __getitem__(self, key):
if key in self:
return dict.__getitem__(self, key)
else:
raise Undefined("Not recognized as a variable: '%s'" % key)
def copy(self):
new = Assignment(self.domain)
new.update(self)
return new
def purge(self, var=None):
"""
Remove one or all keys (i.e. logic variables) from an
assignment, and update ``self.variant``.
:param var: a Variable acting as a key for the assignment.
"""
if var:
val = self[var]
del self[var]
else:
self.clear()
self._addvariant()
return None
def __str__(self):
"""
Pretty printing for assignments. {'x', 'u'} appears as 'g[u/x]'
"""
gstring = "g"
for (val, var) in self.variant:
gstring += "[%s/%s]" % (val, var)
return gstring
def _addvariant(self):
"""
Create a more pretty-printable version of the assignment.
"""
list = []
for item in self.items():
pair = (item[1], item[0])
list.append(pair)
self.variant = list
return None
def add(self, var, val):
"""
Add a new variable-value pair to the assignment, and update
``self.variant``.
"""
assert val in self.domain,\
"%s is not in the domain %s" % (val, self.domain)
assert is_indvar(var),\
"Wrong format for an Individual Variable: '%s'" % var
self[var] = val
self._addvariant()
return self
class Model(object):
"""
A first order model is a domain *D* of discourse and a valuation *V*.
A domain *D* is a set, and a valuation *V* is a map that associates
expressions with values in the model.
The domain of *V* should be a subset of *D*.
Construct a new ``Model``.
:type domain: set
:param domain: A set of entities representing the domain of discourse of the model.
:type valuation: Valuation
:param valuation: the valuation of the model.
:param prop: If this is set, then we are building a propositional\
model and don't require the domain of *V* to be subset of *D*.
"""
def __init__(self, domain, valuation):
assert isinstance(domain, set)
self.domain = domain
self.valuation = valuation
if not domain.issuperset(valuation.domain):
raise Error("The valuation domain, %s, must be a subset of the model's domain, %s"\
% (valuation.domain, domain))
def __repr__(self):
return "(%r, %r)" % (self.domain, self.valuation)
def __str__(self):
return "Domain = %s,\nValuation = \n%s" % (self.domain, self.valuation)
def evaluate(self, expr, g, trace=None):
"""
Call the ``LogicParser`` to parse input expressions, and
provide a handler for ``satisfy``
that blocks further propagation of the ``Undefined`` error.
:param expr: An ``Expression`` of ``logic``.
:type g: Assignment
:param g: an assignment to individual variables.
:rtype: bool or 'Undefined'
"""
try:
lp = LogicParser()
parsed = lp.parse(expr)
value = self.satisfy(parsed, g, trace=trace)
if trace:
print()
print("'%s' evaluates to %s under M, %s" % (expr, value, g))
return value
except Undefined:
if trace:
print()
print("'%s' is undefined under M, %s" % (expr, g))
return 'Undefined'
def satisfy(self, parsed, g, trace=None):
"""
Recursive interpretation function for a formula of first-order logic.
Raises an ``Undefined`` error when ``parsed`` is an atomic string
but is not a symbol or an individual variable.
:return: Returns a truth value or ``Undefined`` if ``parsed`` is\
complex, and calls the interpretation function ``i`` if ``parsed``\
is atomic.
:param parsed: An expression of ``logic``.
:type g: Assignment
:param g: an assignment to individual variables.
"""
if isinstance(parsed, ApplicationExpression):
function, arguments = parsed.uncurry()
if isinstance(function, AbstractVariableExpression):
#It's a predicate expression ("P(x,y)"), so used uncurried arguments
funval = self.satisfy(function, g)
argvals = tuple([self.satisfy(arg, g) for arg in arguments])
return argvals in funval
else:
#It must be a lambda expression, so use curried form
funval = self.satisfy(parsed.function, g)
argval = self.satisfy(parsed.argument, g)
return funval[argval]
elif isinstance(parsed, NegatedExpression):
return not self.satisfy(parsed.term, g)
elif isinstance(parsed, AndExpression):
return self.satisfy(parsed.first, g) and \
self.satisfy(parsed.second, g)
elif isinstance(parsed, OrExpression):
return self.satisfy(parsed.first, g) or \
self.satisfy(parsed.second, g)
elif isinstance(parsed, ImpExpression):
return (not self.satisfy(parsed.first, g)) or \
self.satisfy(parsed.second, g)
elif isinstance(parsed, IffExpression):
return self.satisfy(parsed.first, g) == \
self.satisfy(parsed.second, g)
elif isinstance(parsed, EqualityExpression):
return self.satisfy(parsed.first, g) == \
self.satisfy(parsed.second, g)
elif isinstance(parsed, AllExpression):
new_g = g.copy()
for u in self.domain:
new_g.add(parsed.variable.name, u)
if not self.satisfy(parsed.term, new_g):
return False
return True
elif isinstance(parsed, ExistsExpression):
new_g = g.copy()
for u in self.domain:
new_g.add(parsed.variable.name, u)
if self.satisfy(parsed.term, new_g):
return True
return False
elif isinstance(parsed, LambdaExpression):
cf = {}
var = parsed.variable.name
for u in self.domain:
val = self.satisfy(parsed.term, g.add(var, u))
# NB the dict would be a lot smaller if we do this:
# if val: cf[u] = val
# But then need to deal with cases where f(a) should yield
# a function rather than just False.
cf[u] = val
return cf
else:
return self.i(parsed, g, trace)
#@decorator(trace_eval)
def i(self, parsed, g, trace=False):
"""
An interpretation function.
Assuming that ``parsed`` is atomic:
- if ``parsed`` is a non-logical constant, calls the valuation *V*
- else if ``parsed`` is an individual variable, calls assignment *g*
- else returns ``Undefined``.
:param parsed: an ``Expression`` of ``logic``.
:type g: Assignment
:param g: an assignment to individual variables.
:return: a semantic value
"""
# If parsed is a propositional letter 'p', 'q', etc, it could be in valuation.symbols
# and also be an IndividualVariableExpression. We want to catch this first case.
# So there is a procedural consequence to the ordering of clauses here:
if parsed.variable.name in self.valuation.symbols:
return self.valuation[parsed.variable.name]
elif isinstance(parsed, IndividualVariableExpression):
return g[parsed.variable.name]
else:
raise Undefined("Can't find a value for %s" % parsed)
def satisfiers(self, parsed, varex, g, trace=None, nesting=0):
"""
Generate the entities from the model's domain that satisfy an open formula.
:param parsed: an open formula
:type parsed: Expression
:param varex: the relevant free individual variable in ``parsed``.
:type varex: VariableExpression or str
:param g: a variable assignment
:type g: Assignment
:return: a set of the entities that satisfy ``parsed``.
"""
spacer = ' '
indent = spacer + (spacer * nesting)
candidates = []
if isinstance(varex, str):
var = Variable(varex)
else:
var = varex
if var in parsed.free():
if trace:
print()
print((spacer * nesting) + "Open formula is '%s' with assignment %s" % (parsed, g))
for u in self.domain:
new_g = g.copy()
new_g.add(var.name, u)
if trace > 1:
lowtrace = trace-1
else:
lowtrace = 0
value = self.satisfy(parsed, new_g, lowtrace)
if trace:
print(indent + "(trying assignment %s)" % new_g)
# parsed == False under g[u/var]?
if value == False:
if trace:
print(indent + "value of '%s' under %s is False" % (parsed, new_g))
# so g[u/var] is a satisfying assignment
else:
candidates.append(u)
if trace:
print(indent + "value of '%s' under %s is %s" % (parsed, new_g, value))
result = set(c for c in candidates)
# var isn't free in parsed
else:
raise Undefined("%s is not free in %s" % (var.name, parsed))
return result
#//////////////////////////////////////////////////////////////////////
# Demo..
#//////////////////////////////////////////////////////////////////////
# number of spacer chars
mult = 30
# Demo 1: Propositional Logic
#################
def propdemo(trace=None):
"""Example of a propositional model."""
global val1, dom1, m1, g1
val1 = Valuation([('P', True), ('Q', True), ('R', False)])
dom1 = set([])
m1 = Model(dom1, val1)
g1 = Assignment(dom1)
print()
print('*' * mult)
print("Propositional Formulas Demo")
print('*' * mult)
print('(Propositional constants treated as nullary predicates)')
print()
print("Model m1:\n", m1)
print('*' * mult)
sentences = [
'(P & Q)',
'(P & R)',
'- P',
'- R',
'- - P',
'- (P & R)',
'(P | R)',
'(R | P)',
'(R | R)',
'(- P | R)',
'(P | - P)',
'(P -> Q)',
'(P -> R)',
'(R -> P)',
'(P <-> P)',
'(R <-> R)',
'(P <-> R)',
]
for sent in sentences:
if trace:
print()
m1.evaluate(sent, g1, trace)
else:
print("The value of '%s' is: %s" % (sent, m1.evaluate(sent, g1)))
# Demo 2: FOL Model
#############
def folmodel(quiet=False, trace=None):
"""Example of a first-order model."""
global val2, v2, dom2, m2, g2
v2 = [('adam', 'b1'), ('betty', 'g1'), ('fido', 'd1'),\
('girl', set(['g1', 'g2'])), ('boy', set(['b1', 'b2'])), ('dog', set(['d1'])),
('love', set([('b1', 'g1'), ('b2', 'g2'), ('g1', 'b1'), ('g2', 'b1')]))]
val2 = Valuation(v2)
dom2 = val2.domain
m2 = Model(dom2, val2)
g2 = Assignment(dom2, [('x', 'b1'), ('y', 'g2')])
if not quiet:
print()
print('*' * mult)
print("Models Demo")
print("*" * mult)
print("Model m2:\n", "-" * 14,"\n", m2)
print("Variable assignment = ", g2)
exprs = ['adam', 'boy', 'love', 'walks', 'x', 'y', 'z']
lp = LogicParser()
parsed_exprs = [lp.parse(e) for e in exprs]
print()
for parsed in parsed_exprs:
try:
print("The interpretation of '%s' in m2 is %s" % (parsed, m2.i(parsed, g2)))
except Undefined:
print("The interpretation of '%s' in m2 is Undefined" % parsed)
applications = [('boy', ('adam')), ('walks', ('adam',)), ('love', ('adam', 'y')), ('love', ('y', 'adam'))]
for (fun, args) in applications:
try:
funval = m2.i(lp.parse(fun), g2)
argsval = tuple(m2.i(lp.parse(arg), g2) for arg in args)
print("%s(%s) evaluates to %s" % (fun, args, argsval in funval))
except Undefined:
print("%s(%s) evaluates to Undefined" % (fun, args))
# Demo 3: FOL
#########
def foldemo(trace=None):
"""
Interpretation of closed expressions in a first-order model.
"""
folmodel(quiet=True)
print()
print('*' * mult)
print("FOL Formulas Demo")
print('*' * mult)
formulas = [
'love (adam, betty)',
'(adam = mia)',
'\\x. (boy(x) | girl(x))',
'\\x. boy(x)(adam)',
'\\x y. love(x, y)',
'\\x y. love(x, y)(adam)(betty)',
'\\x y. love(x, y)(adam, betty)',
'\\x y. (boy(x) & love(x, y))',
'\\x. exists y. (boy(x) & love(x, y))',
'exists z1. boy(z1)',
'exists x. (boy(x) & -(x = adam))',
'exists x. (boy(x) & all y. love(y, x))',
'all x. (boy(x) | girl(x))',
'all x. (girl(x) -> exists y. boy(y) & love(x, y))', #Every girl loves exists boy.
'exists x. (boy(x) & all y. (girl(y) -> love(y, x)))', #There is exists boy that every girl loves.
'exists x. (boy(x) & all y. (girl(y) -> love(x, y)))', #exists boy loves every girl.
'all x. (dog(x) -> - girl(x))',
'exists x. exists y. (love(x, y) & love(x, y))'
]
for fmla in formulas:
g2.purge()
if trace:
m2.evaluate(fmla, g2, trace)
else:
print("The value of '%s' is: %s" % (fmla, m2.evaluate(fmla, g2)))
# Demo 3: Satisfaction
#############
def satdemo(trace=None):
"""Satisfiers of an open formula in a first order model."""
print()
print('*' * mult)
print("Satisfiers Demo")
print('*' * mult)
folmodel(quiet=True)
formulas = [
'boy(x)',
'(x = x)',
'(boy(x) | girl(x))',
'(boy(x) & girl(x))',
'love(adam, x)',
'love(x, adam)',
'-(x = adam)',
'exists z22. love(x, z22)',
'exists y. love(y, x)',
'all y. (girl(y) -> love(x, y))',
'all y. (girl(y) -> love(y, x))',
'all y. (girl(y) -> (boy(x) & love(y, x)))',
'(boy(x) & all y. (girl(y) -> love(x, y)))',
'(boy(x) & all y. (girl(y) -> love(y, x)))',
'(boy(x) & exists y. (girl(y) & love(y, x)))',
'(girl(x) -> dog(x))',
'all y. (dog(y) -> (x = y))',
'exists y. love(y, x)',
'exists y. (love(adam, y) & love(y, x))'
]
if trace:
print(m2)
lp = LogicParser()
for fmla in formulas:
print(fmla)
lp.parse(fmla)
parsed = [lp.parse(fmla) for fmla in formulas]
for p in parsed:
g2.purge()
print("The satisfiers of '%s' are: %s" % (p, m2.satisfiers(p, 'x', g2, trace)))
def demo(num=0, trace=None):
"""
Run exists demos.
- num = 1: propositional logic demo
- num = 2: first order model demo (only if trace is set)
- num = 3: first order sentences demo
- num = 4: satisfaction of open formulas demo
- any other value: run all the demos
:param trace: trace = 1, or trace = 2 for more verbose tracing
"""
demos = {
1: propdemo,
2: folmodel,
3: foldemo,
4: satdemo}
try:
demos[num](trace=trace)
except KeyError:
for num in demos:
demos[num](trace=trace)
if __name__ == "__main__":
demo(2, trace=0)
| apache-2.0 | -5,430,853,122,839,940,000 | 304,306,398,910,615,040 | 31.198312 | 114 | 0.528109 | false |
caffeinehit/yell | yell/backends/celery.py | 1 | 2316 | from __future__ import absolute_import
from celery.task import Task
from yell import Notification, notify, registry
class CeleryNotificationTask(Task):
""" Dispatch and run the notification. """
def run(self, name=None, backend=None, *args, **kwargs):
"""
The Celery task.
Delivers the notification via all backends returned by :param:`backend`.
"""
assert name is not None, "No 'name' specified to notify"
assert backend is not None, "No 'backend' specified to notify with"
backends = backend().get_backends(*args, **kwargs)
notify(name, backends=backends, *args, **kwargs)
class CeleryNotification(Notification):
"""
Delivers notifications through Celery.
:example:
::
from yell import notify, Notification
class EmailNotification(Notification):
name = 'async'
def notify(self, *args, **kwargs):
# Deliver email
class DBNotification(Notification):
name = 'async'
def notify(self, *args, **kwargs):
# Save to database
class AsyncNotification(CeleryNotification):
name = 'async'
notify('async', backends = [AsyncNotification],
text = "This notification is routed through Celery before being sent and saved")
In the above example when calling :attr:`yell.notify` will invoke ``EmailNotification`` and
``DBNotification`` once the task was delivered through Celery.
"""
name = None
"""
The name of this notification. Override in subclasses.
"""
def get_backends(self, *args, **kwargs):
"""
Return all backends the task should use to deliver notifications.
By default all backends with the same :attr:`name` except for subclasses
of :class:`CeleryNotifications` will be used.
"""
return filter(lambda cls: not issubclass(cls, self.__class__), registry.notifications[self.name])
def notify(self, *args, **kwargs):
"""
Dispatches the notification to Celery
"""
return CeleryNotificationTask.delay(name=self.name, backend=self.__class__, *args, **kwargs)
| mit | -8,640,716,685,448,208,000 | 6,029,433,265,600,117,000 | 32.565217 | 105 | 0.603195 | false |
CiNC0/Cartier | cartier-python-resign-linux/tests/test_versioning.py | 1 | 1194 | #!/usr/bin/env python
import os.path
import importlib
import unittest
tests_dir = os.path.abspath(os.path.dirname(__file__))
package_name = tests_dir.split(os.path.sep)[-2].replace('-', '_')
package = importlib.import_module(package_name)
class VersioningTestCase(unittest.TestCase):
def assert_proper_attribute(self, attribute):
try:
assert getattr(package, attribute), (
"{} improperly set".format(attribute))
except AttributeError:
assert False, "missing {}".format(attribute)
def test_version_attribute(self):
self.assert_proper_attribute("__version__")
# test major, minor, and patch are numbers
version_split = package.__version__.split(".")[:3]
assert version_split, "__version__ is not set"
for n in version_split:
try:
int(n)
except ValueError:
assert False, "'{}' is not an integer".format(n)
def test_commit_attribute(self):
self.assert_proper_attribute("__commit__")
def test_build_attribute(self):
self.assert_proper_attribute("__build__")
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -1,530,326,646,234,270,200 | -266,030,313,704,050,700 | 28.85 | 65 | 0.60804 | false |
ortylp/scipy | scipy/special/tests/test_basic.py | 4 | 122266 | # this program corresponds to special.py
### Means test is not done yet
# E Means test is giving error (E)
# F Means test is failing (F)
# EF Means test is giving error and Failing
#! Means test is segfaulting
# 8 Means test runs forever
### test_besselpoly
### test_mathieu_a
### test_mathieu_even_coef
### test_mathieu_odd_coef
### test_modfresnelp
### test_modfresnelm
# test_pbdv_seq
### test_pbvv_seq
### test_sph_harm
# test_sph_in
# test_sph_jn
# test_sph_kn
from __future__ import division, print_function, absolute_import
import itertools
import warnings
import numpy as np
from numpy import array, isnan, r_, arange, finfo, pi, sin, cos, tan, exp, \
log, zeros, sqrt, asarray, inf, nan_to_num, real, arctan, float_
from numpy.testing import assert_equal, assert_almost_equal, \
assert_array_equal, assert_array_almost_equal, assert_approx_equal, \
assert_, rand, dec, TestCase, run_module_suite, assert_allclose, \
assert_raises, assert_array_almost_equal_nulp
from scipy import special
import scipy.special._ufuncs as cephes
from scipy.special import ellipk
from scipy.special._testutils import assert_tol_equal, with_special_errors, \
assert_func_equal
class TestCephes(TestCase):
def test_airy(self):
cephes.airy(0)
def test_airye(self):
cephes.airye(0)
def test_binom(self):
n = np.array([0.264, 4, 5.2, 17])
k = np.array([2, 0.4, 7, 3.3])
nk = np.array(np.broadcast_arrays(n[:,None], k[None,:])
).reshape(2, -1).T
rknown = np.array([[-0.097152, 0.9263051596159367, 0.01858423645695389,
-0.007581020651518199],[6, 2.0214389119675666, 0, 2.9827344527963846],
[10.92, 2.22993515861399, -0.00585728, 10.468891352063146],
[136, 3.5252179590758828, 19448, 1024.5526916174495]])
assert_func_equal(cephes.binom, rknown.ravel(), nk, rtol=1e-13)
# Test branches in implementation
np.random.seed(1234)
n = np.r_[np.arange(-7, 30), 1000*np.random.rand(30) - 500]
k = np.arange(0, 102)
nk = np.array(np.broadcast_arrays(n[:,None], k[None,:])
).reshape(2, -1).T
assert_func_equal(cephes.binom,
cephes.binom(nk[:,0], nk[:,1] * (1 + 1e-15)),
nk,
atol=1e-10, rtol=1e-10)
def test_binom_2(self):
# Test branches in implementation
np.random.seed(1234)
n = np.r_[np.logspace(1, 300, 20)]
k = np.arange(0, 102)
nk = np.array(np.broadcast_arrays(n[:,None], k[None,:])
).reshape(2, -1).T
assert_func_equal(cephes.binom,
cephes.binom(nk[:,0], nk[:,1] * (1 + 1e-15)),
nk,
atol=1e-10, rtol=1e-10)
def test_binom_exact(self):
@np.vectorize
def binom_int(n, k):
n = int(n)
k = int(k)
num = int(1)
den = int(1)
for i in range(1, k+1):
num *= i + n - k
den *= i
return float(num/den)
np.random.seed(1234)
n = np.arange(1, 15)
k = np.arange(0, 15)
nk = np.array(np.broadcast_arrays(n[:,None], k[None,:])
).reshape(2, -1).T
nk = nk[nk[:,0] >= nk[:,1]]
assert_func_equal(cephes.binom,
binom_int(nk[:,0], nk[:,1]),
nk,
atol=0, rtol=0)
def test_bdtr(self):
assert_equal(cephes.bdtr(1,1,0.5),1.0)
def test_bdtri(self):
assert_equal(cephes.bdtri(1,3,0.5),0.5)
def test_bdtrc(self):
assert_equal(cephes.bdtrc(1,3,0.5),0.5)
def test_bdtrin(self):
assert_equal(cephes.bdtrin(1,0,1),5.0)
def test_bdtrik(self):
cephes.bdtrik(1,3,0.5)
def test_bei(self):
assert_equal(cephes.bei(0),0.0)
def test_beip(self):
assert_equal(cephes.beip(0),0.0)
def test_ber(self):
assert_equal(cephes.ber(0),1.0)
def test_berp(self):
assert_equal(cephes.berp(0),0.0)
def test_besselpoly(self):
assert_equal(cephes.besselpoly(0,0,0),1.0)
def test_beta(self):
assert_equal(cephes.beta(1,1),1.0)
assert_allclose(cephes.beta(-100.3, 1e-200), cephes.gamma(1e-200))
assert_allclose(cephes.beta(0.0342, 171), 24.070498359873497,
rtol=1e-13, atol=0)
def test_betainc(self):
assert_equal(cephes.betainc(1,1,1),1.0)
assert_allclose(cephes.betainc(0.0342, 171, 1e-10), 0.55269916901806648)
def test_betaln(self):
assert_equal(cephes.betaln(1,1),0.0)
assert_allclose(cephes.betaln(-100.3, 1e-200), cephes.gammaln(1e-200))
assert_allclose(cephes.betaln(0.0342, 170), 3.1811881124242447,
rtol=1e-14, atol=0)
def test_betaincinv(self):
assert_equal(cephes.betaincinv(1,1,1),1.0)
assert_allclose(cephes.betaincinv(0.0342, 171, 0.25),
8.4231316935498957e-21, rtol=3e-12, atol=0)
def test_beta_inf(self):
assert_(np.isinf(special.beta(-1, 2)))
def test_btdtr(self):
assert_equal(cephes.btdtr(1,1,1),1.0)
def test_btdtri(self):
assert_equal(cephes.btdtri(1,1,1),1.0)
def test_btdtria(self):
assert_equal(cephes.btdtria(1,1,1),5.0)
def test_btdtrib(self):
assert_equal(cephes.btdtrib(1,1,1),5.0)
def test_cbrt(self):
assert_approx_equal(cephes.cbrt(1),1.0)
def test_chdtr(self):
assert_equal(cephes.chdtr(1,0),0.0)
def test_chdtrc(self):
assert_equal(cephes.chdtrc(1,0),1.0)
def test_chdtri(self):
assert_equal(cephes.chdtri(1,1),0.0)
def test_chdtriv(self):
assert_equal(cephes.chdtriv(0,0),5.0)
def test_chndtr(self):
assert_equal(cephes.chndtr(0,1,0),0.0)
p = cephes.chndtr(np.linspace(20, 25, 5), 2, 1.07458615e+02)
assert_allclose(p, [1.21805009e-09, 2.81979982e-09, 6.25652736e-09,
1.33520017e-08, 2.74909967e-08],
rtol=1e-6, atol=0)
assert_almost_equal(cephes.chndtr(np.inf, np.inf, 0), 2.0)
assert_almost_equal(cephes.chndtr(2, 1, np.inf), 0.0)
assert_(np.isnan(cephes.chndtr(np.nan, 1, 2)))
assert_(np.isnan(cephes.chndtr(5, np.nan, 2)))
assert_(np.isnan(cephes.chndtr(5, 1, np.nan)))
def test_chndtridf(self):
assert_equal(cephes.chndtridf(0,0,1),5.0)
def test_chndtrinc(self):
assert_equal(cephes.chndtrinc(0,1,0),5.0)
def test_chndtrix(self):
assert_equal(cephes.chndtrix(0,1,0),0.0)
def test_cosdg(self):
assert_equal(cephes.cosdg(0),1.0)
def test_cosm1(self):
assert_equal(cephes.cosm1(0),0.0)
def test_cotdg(self):
assert_almost_equal(cephes.cotdg(45),1.0)
def test_dawsn(self):
assert_equal(cephes.dawsn(0),0.0)
assert_allclose(cephes.dawsn(1.23), 0.50053727749081767)
def test_diric(self):
# Test behavior near multiples of 2pi. Regression test for issue
# described in gh-4001.
n_odd = [1, 5, 25]
x = np.array(2*np.pi + 5e-5).astype(np.float32)
assert_almost_equal(special.diric(x, n_odd), 1.0, decimal=7)
x = np.array(2*np.pi + 1e-9).astype(np.float64)
assert_almost_equal(special.diric(x, n_odd), 1.0, decimal=15)
x = np.array(2*np.pi + 1e-15).astype(np.float64)
assert_almost_equal(special.diric(x, n_odd), 1.0, decimal=15)
if hasattr(np, 'float128'):
# No float128 available in 32-bit numpy
x = np.array(2*np.pi + 1e-12).astype(np.float128)
assert_almost_equal(special.diric(x, n_odd), 1.0, decimal=19)
n_even = [2, 4, 24]
x = np.array(2*np.pi + 1e-9).astype(np.float64)
assert_almost_equal(special.diric(x, n_even), -1.0, decimal=15)
# Test at some values not near a multiple of pi
x = np.arange(0.2*np.pi, 1.0*np.pi, 0.2*np.pi)
octave_result = [0.872677996249965, 0.539344662916632,
0.127322003750035, -0.206011329583298]
assert_almost_equal(special.diric(x, 3), octave_result, decimal=15)
def test_diric_broadcasting(self):
x = np.arange(5)
n = np.array([1, 3, 7])
assert_(special.diric(x[:, np.newaxis], n).shape == (x.size, n.size))
def test_ellipe(self):
assert_equal(cephes.ellipe(1),1.0)
def test_ellipeinc(self):
assert_equal(cephes.ellipeinc(0,1),0.0)
def test_ellipj(self):
cephes.ellipj(0,1)
def test_ellipk(self):
assert_allclose(ellipk(0), pi/2)
def test_ellipkinc(self):
assert_equal(cephes.ellipkinc(0,0),0.0)
def test_erf(self):
assert_equal(cephes.erf(0),0.0)
def test_erfc(self):
assert_equal(cephes.erfc(0),1.0)
def test_exp1(self):
cephes.exp1(1)
def test_expi(self):
cephes.expi(1)
def test_expn(self):
cephes.expn(1,1)
def test_exp1_reg(self):
# Regression for #834
a = cephes.exp1(-complex(19.9999990))
b = cephes.exp1(-complex(19.9999991))
assert_array_almost_equal(a.imag, b.imag)
def test_exp10(self):
assert_approx_equal(cephes.exp10(2),100.0)
def test_exp2(self):
assert_equal(cephes.exp2(2),4.0)
def test_expm1(self):
assert_equal(cephes.expm1(0),0.0)
def test_fdtr(self):
assert_equal(cephes.fdtr(1,1,0),0.0)
def test_fdtrc(self):
assert_equal(cephes.fdtrc(1,1,0),1.0)
def test_fdtri(self):
# cephes.fdtri(1,1,0.5) #BUG: gives NaN, should be 1
assert_allclose(cephes.fdtri(1, 1, [0.499, 0.501]),
array([0.9937365, 1.00630298]), rtol=1e-6)
def test_fdtridfd(self):
assert_equal(cephes.fdtridfd(1,0,0),5.0)
def test_fresnel(self):
assert_equal(cephes.fresnel(0),(0.0,0.0))
def test_gamma(self):
assert_equal(cephes.gamma(5),24.0)
def test_gammainc(self):
assert_equal(cephes.gammainc(5,0),0.0)
def test_gammaincc(self):
assert_equal(cephes.gammaincc(5,0),1.0)
def test_gammainccinv(self):
assert_equal(cephes.gammainccinv(5,1),0.0)
def test_gammaln(self):
cephes.gammaln(10)
def test_gammasgn(self):
vals = np.array([-4, -3.5, -2.3, 1, 4.2], np.float64)
assert_array_equal(cephes.gammasgn(vals), np.sign(cephes.rgamma(vals)))
def test_gdtr(self):
assert_equal(cephes.gdtr(1,1,0),0.0)
def test_gdtrc(self):
assert_equal(cephes.gdtrc(1,1,0),1.0)
def test_gdtria(self):
assert_equal(cephes.gdtria(0,1,1),0.0)
def test_gdtrib(self):
cephes.gdtrib(1,0,1)
# assert_equal(cephes.gdtrib(1,0,1),5.0)
def test_gdtrix(self):
cephes.gdtrix(1,1,.1)
def test_hankel1(self):
cephes.hankel1(1,1)
def test_hankel1e(self):
cephes.hankel1e(1,1)
def test_hankel2(self):
cephes.hankel2(1,1)
def test_hankel2e(self):
cephes.hankel2e(1,1)
def test_hyp1f1(self):
assert_approx_equal(cephes.hyp1f1(1,1,1), exp(1.0))
assert_approx_equal(cephes.hyp1f1(3,4,-6), 0.026056422099537251095)
cephes.hyp1f1(1,1,1)
def test_hyp1f2(self):
cephes.hyp1f2(1,1,1,1)
def test_hyp2f0(self):
cephes.hyp2f0(1,1,1,1)
def test_hyp2f1(self):
assert_equal(cephes.hyp2f1(1,1,1,0),1.0)
def test_hyp3f0(self):
assert_equal(cephes.hyp3f0(1,1,1,0),(1.0,0.0))
def test_hyperu(self):
assert_equal(cephes.hyperu(0,1,1),1.0)
def test_i0(self):
assert_equal(cephes.i0(0),1.0)
def test_i0e(self):
assert_equal(cephes.i0e(0),1.0)
def test_i1(self):
assert_equal(cephes.i1(0),0.0)
def test_i1e(self):
assert_equal(cephes.i1e(0),0.0)
def test_it2i0k0(self):
cephes.it2i0k0(1)
def test_it2j0y0(self):
cephes.it2j0y0(1)
def test_it2struve0(self):
cephes.it2struve0(1)
def test_itairy(self):
cephes.itairy(1)
def test_iti0k0(self):
assert_equal(cephes.iti0k0(0),(0.0,0.0))
def test_itj0y0(self):
assert_equal(cephes.itj0y0(0),(0.0,0.0))
def test_itmodstruve0(self):
assert_equal(cephes.itmodstruve0(0),0.0)
def test_itstruve0(self):
assert_equal(cephes.itstruve0(0),0.0)
def test_iv(self):
assert_equal(cephes.iv(1,0),0.0)
def _check_ive(self):
assert_equal(cephes.ive(1,0),0.0)
def test_j0(self):
assert_equal(cephes.j0(0),1.0)
def test_j1(self):
assert_equal(cephes.j1(0),0.0)
def test_jn(self):
assert_equal(cephes.jn(0,0),1.0)
def test_jv(self):
assert_equal(cephes.jv(0,0),1.0)
def _check_jve(self):
assert_equal(cephes.jve(0,0),1.0)
def test_k0(self):
cephes.k0(2)
def test_k0e(self):
cephes.k0e(2)
def test_k1(self):
cephes.k1(2)
def test_k1e(self):
cephes.k1e(2)
def test_kei(self):
cephes.kei(2)
def test_keip(self):
assert_equal(cephes.keip(0),0.0)
def test_ker(self):
cephes.ker(2)
def test_kerp(self):
cephes.kerp(2)
def _check_kelvin(self):
cephes.kelvin(2)
def test_kn(self):
cephes.kn(1,1)
def test_kolmogi(self):
assert_equal(cephes.kolmogi(1),0.0)
assert_(np.isnan(cephes.kolmogi(np.nan)))
def test_kolmogorov(self):
assert_equal(cephes.kolmogorov(0),1.0)
def _check_kv(self):
cephes.kv(1,1)
def _check_kve(self):
cephes.kve(1,1)
def test_log1p(self):
assert_equal(cephes.log1p(0),0.0)
def test_lpmv(self):
assert_equal(cephes.lpmv(0,0,1),1.0)
def test_mathieu_a(self):
assert_equal(cephes.mathieu_a(1,0),1.0)
def test_mathieu_b(self):
assert_equal(cephes.mathieu_b(1,0),1.0)
def test_mathieu_cem(self):
assert_equal(cephes.mathieu_cem(1,0,0),(1.0,0.0))
# Test AMS 20.2.27
@np.vectorize
def ce_smallq(m, q, z):
z *= np.pi/180
if m == 0:
return 2**(-0.5) * (1 - .5*q*cos(2*z)) # + O(q^2)
elif m == 1:
return cos(z) - q/8 * cos(3*z) # + O(q^2)
elif m == 2:
return cos(2*z) - q*(cos(4*z)/12 - 1/4) # + O(q^2)
else:
return cos(m*z) - q*(cos((m+2)*z)/(4*(m+1)) - cos((m-2)*z)/(4*(m-1))) # + O(q^2)
m = np.arange(0, 100)
q = np.r_[0, np.logspace(-30, -9, 10)]
assert_allclose(cephes.mathieu_cem(m[:,None], q[None,:], 0.123)[0],
ce_smallq(m[:,None], q[None,:], 0.123),
rtol=1e-14, atol=0)
def test_mathieu_sem(self):
assert_equal(cephes.mathieu_sem(1,0,0),(0.0,1.0))
# Test AMS 20.2.27
@np.vectorize
def se_smallq(m, q, z):
z *= np.pi/180
if m == 1:
return sin(z) - q/8 * sin(3*z) # + O(q^2)
elif m == 2:
return sin(2*z) - q*sin(4*z)/12 # + O(q^2)
else:
return sin(m*z) - q*(sin((m+2)*z)/(4*(m+1)) - sin((m-2)*z)/(4*(m-1))) # + O(q^2)
m = np.arange(1, 100)
q = np.r_[0, np.logspace(-30, -9, 10)]
assert_allclose(cephes.mathieu_sem(m[:,None], q[None,:], 0.123)[0],
se_smallq(m[:,None], q[None,:], 0.123),
rtol=1e-14, atol=0)
def test_mathieu_modcem1(self):
assert_equal(cephes.mathieu_modcem1(1,0,0),(0.0,0.0))
def test_mathieu_modcem2(self):
cephes.mathieu_modcem2(1,1,1)
# Test reflection relation AMS 20.6.19
m = np.arange(0, 4)[:,None,None]
q = np.r_[np.logspace(-2, 2, 10)][None,:,None]
z = np.linspace(0, 1, 7)[None,None,:]
y1 = cephes.mathieu_modcem2(m, q, -z)[0]
fr = -cephes.mathieu_modcem2(m, q, 0)[0] / cephes.mathieu_modcem1(m, q, 0)[0]
y2 = -cephes.mathieu_modcem2(m, q, z)[0] - 2*fr*cephes.mathieu_modcem1(m, q, z)[0]
assert_allclose(y1, y2, rtol=1e-10)
def test_mathieu_modsem1(self):
assert_equal(cephes.mathieu_modsem1(1,0,0),(0.0,0.0))
def test_mathieu_modsem2(self):
cephes.mathieu_modsem2(1,1,1)
# Test reflection relation AMS 20.6.20
m = np.arange(1, 4)[:,None,None]
q = np.r_[np.logspace(-2, 2, 10)][None,:,None]
z = np.linspace(0, 1, 7)[None,None,:]
y1 = cephes.mathieu_modsem2(m, q, -z)[0]
fr = cephes.mathieu_modsem2(m, q, 0)[1] / cephes.mathieu_modsem1(m, q, 0)[1]
y2 = cephes.mathieu_modsem2(m, q, z)[0] - 2*fr*cephes.mathieu_modsem1(m, q, z)[0]
assert_allclose(y1, y2, rtol=1e-10)
def test_mathieu_overflow(self):
# Check that these return NaNs instead of causing a SEGV
assert_equal(cephes.mathieu_cem(10000, 0, 1.3), (np.nan, np.nan))
assert_equal(cephes.mathieu_sem(10000, 0, 1.3), (np.nan, np.nan))
assert_equal(cephes.mathieu_cem(10000, 1.5, 1.3), (np.nan, np.nan))
assert_equal(cephes.mathieu_sem(10000, 1.5, 1.3), (np.nan, np.nan))
assert_equal(cephes.mathieu_modcem1(10000, 1.5, 1.3), (np.nan, np.nan))
assert_equal(cephes.mathieu_modsem1(10000, 1.5, 1.3), (np.nan, np.nan))
assert_equal(cephes.mathieu_modcem2(10000, 1.5, 1.3), (np.nan, np.nan))
assert_equal(cephes.mathieu_modsem2(10000, 1.5, 1.3), (np.nan, np.nan))
def test_mathieu_ticket_1847(self):
# Regression test --- this call had some out-of-bounds access
# and could return nan occasionally
for k in range(60):
v = cephes.mathieu_modsem2(2, 100, -1)
# Values from ACM TOMS 804 (derivate by numerical differentiation)
assert_allclose(v[0], 0.1431742913063671074347, rtol=1e-10)
assert_allclose(v[1], 0.9017807375832909144719, rtol=1e-4)
def test_modfresnelm(self):
cephes.modfresnelm(0)
def test_modfresnelp(self):
cephes.modfresnelp(0)
def _check_modstruve(self):
assert_equal(cephes.modstruve(1,0),0.0)
def test_nbdtr(self):
assert_equal(cephes.nbdtr(1,1,1),1.0)
def test_nbdtrc(self):
assert_equal(cephes.nbdtrc(1,1,1),0.0)
def test_nbdtri(self):
assert_equal(cephes.nbdtri(1,1,1),1.0)
def __check_nbdtrik(self):
cephes.nbdtrik(1,.4,.5)
def test_nbdtrin(self):
assert_equal(cephes.nbdtrin(1,0,0),5.0)
def test_ncfdtr(self):
assert_equal(cephes.ncfdtr(1,1,1,0),0.0)
def test_ncfdtri(self):
assert_equal(cephes.ncfdtri(1,1,1,0),0.0)
def test_ncfdtridfd(self):
cephes.ncfdtridfd(1,0.5,0,1)
def __check_ncfdtridfn(self):
cephes.ncfdtridfn(1,0.5,0,1)
def __check_ncfdtrinc(self):
cephes.ncfdtrinc(1,0.5,0,1)
def test_nctdtr(self):
assert_equal(cephes.nctdtr(1,0,0),0.5)
assert_equal(cephes.nctdtr(9, 65536, 45), 0.0)
assert_approx_equal(cephes.nctdtr(np.inf, 1., 1.), 0.5, 5)
assert_(np.isnan(cephes.nctdtr(2., np.inf, 10.)))
assert_approx_equal(cephes.nctdtr(2., 1., np.inf), 1.)
assert_(np.isnan(cephes.nctdtr(np.nan, 1., 1.)))
assert_(np.isnan(cephes.nctdtr(2., np.nan, 1.)))
assert_(np.isnan(cephes.nctdtr(2., 1., np.nan)))
def __check_nctdtridf(self):
cephes.nctdtridf(1,0.5,0)
def test_nctdtrinc(self):
cephes.nctdtrinc(1,0,0)
def test_nctdtrit(self):
cephes.nctdtrit(.1,0.2,.5)
def test_ndtr(self):
assert_equal(cephes.ndtr(0), 0.5)
assert_almost_equal(cephes.ndtr(1), 0.84134474606)
def test_ndtri(self):
assert_equal(cephes.ndtri(0.5),0.0)
def test_nrdtrimn(self):
assert_approx_equal(cephes.nrdtrimn(0.5,1,1),1.0)
def test_nrdtrisd(self):
assert_tol_equal(cephes.nrdtrisd(0.5,0.5,0.5), 0.0,
atol=0, rtol=0)
def test_obl_ang1(self):
cephes.obl_ang1(1,1,1,0)
def test_obl_ang1_cv(self):
result = cephes.obl_ang1_cv(1,1,1,1,0)
assert_almost_equal(result[0],1.0)
assert_almost_equal(result[1],0.0)
def _check_obl_cv(self):
assert_equal(cephes.obl_cv(1,1,0),2.0)
def test_obl_rad1(self):
cephes.obl_rad1(1,1,1,0)
def test_obl_rad1_cv(self):
cephes.obl_rad1_cv(1,1,1,1,0)
def test_obl_rad2(self):
cephes.obl_rad2(1,1,1,0)
def test_obl_rad2_cv(self):
cephes.obl_rad2_cv(1,1,1,1,0)
def test_pbdv(self):
assert_equal(cephes.pbdv(1,0),(0.0,1.0))
def test_pbvv(self):
cephes.pbvv(1,0)
def test_pbwa(self):
cephes.pbwa(1,0)
def test_pdtr(self):
val = cephes.pdtr(0, 1)
assert_almost_equal(val, np.exp(-1))
# Edge case: m = 0.
val = cephes.pdtr([0, 1, 2], 0.0)
assert_array_equal(val, [1, 1, 1])
def test_pdtrc(self):
val = cephes.pdtrc(0, 1)
assert_almost_equal(val, 1 - np.exp(-1))
# Edge case: m = 0.
val = cephes.pdtrc([0, 1, 2], 0.0)
assert_array_equal(val, [0, 0, 0])
def test_pdtri(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", RuntimeWarning)
cephes.pdtri(0.5,0.5)
def test_pdtrik(self):
k = cephes.pdtrik(0.5, 1)
assert_almost_equal(cephes.gammaincc(k + 1, 1), 0.5)
# Edge case: m = 0 or very small.
k = cephes.pdtrik([[0], [0.25], [0.95]], [0, 1e-20, 1e-6])
assert_array_equal(k, np.zeros((3, 3)))
def test_pro_ang1(self):
cephes.pro_ang1(1,1,1,0)
def test_pro_ang1_cv(self):
assert_array_almost_equal(cephes.pro_ang1_cv(1,1,1,1,0),
array((1.0,0.0)))
def _check_pro_cv(self):
assert_equal(cephes.pro_cv(1,1,0),2.0)
def test_pro_rad1(self):
cephes.pro_rad1(1,1,1,0.1)
def test_pro_rad1_cv(self):
cephes.pro_rad1_cv(1,1,1,1,0)
def test_pro_rad2(self):
cephes.pro_rad2(1,1,1,0)
def test_pro_rad2_cv(self):
cephes.pro_rad2_cv(1,1,1,1,0)
def test_psi(self):
cephes.psi(1)
def test_radian(self):
assert_equal(cephes.radian(0,0,0),0)
def test_rgamma(self):
assert_equal(cephes.rgamma(1),1.0)
def test_round(self):
assert_equal(cephes.round(3.4),3.0)
assert_equal(cephes.round(-3.4),-3.0)
assert_equal(cephes.round(3.6),4.0)
assert_equal(cephes.round(-3.6),-4.0)
assert_equal(cephes.round(3.5),4.0)
assert_equal(cephes.round(-3.5),-4.0)
def test_shichi(self):
cephes.shichi(1)
def test_sici(self):
cephes.sici(1)
s, c = cephes.sici(np.inf)
assert_almost_equal(s, np.pi * 0.5)
assert_almost_equal(c, 0)
s, c = cephes.sici(-np.inf)
assert_almost_equal(s, -np.pi * 0.5)
assert_(np.isnan(c), "cosine integral(-inf) is not nan")
def test_sindg(self):
assert_equal(cephes.sindg(90),1.0)
def test_smirnov(self):
assert_equal(cephes.smirnov(1,.1),0.9)
assert_(np.isnan(cephes.smirnov(1,np.nan)))
def test_smirnovi(self):
assert_almost_equal(cephes.smirnov(1,cephes.smirnovi(1,0.4)),0.4)
assert_almost_equal(cephes.smirnov(1,cephes.smirnovi(1,0.6)),0.6)
assert_(np.isnan(cephes.smirnovi(1,np.nan)))
def test_spence(self):
assert_equal(cephes.spence(1),0.0)
def test_stdtr(self):
assert_equal(cephes.stdtr(1,0),0.5)
assert_almost_equal(cephes.stdtr(1,1), 0.75)
assert_almost_equal(cephes.stdtr(1,2), 0.852416382349)
def test_stdtridf(self):
cephes.stdtridf(0.7,1)
def test_stdtrit(self):
cephes.stdtrit(1,0.7)
def test_struve(self):
assert_equal(cephes.struve(0,0),0.0)
def test_tandg(self):
assert_equal(cephes.tandg(45),1.0)
def test_tklmbda(self):
assert_almost_equal(cephes.tklmbda(1,1),1.0)
def test_y0(self):
cephes.y0(1)
def test_y1(self):
cephes.y1(1)
def test_yn(self):
cephes.yn(1,1)
def test_yv(self):
cephes.yv(1,1)
def _check_yve(self):
cephes.yve(1,1)
def test_zeta(self):
cephes.zeta(2,2)
def test_zetac(self):
assert_equal(cephes.zetac(0),-1.5)
def test_wofz(self):
z = [complex(624.2,-0.26123), complex(-0.4,3.), complex(0.6,2.),
complex(-1.,1.), complex(-1.,-9.), complex(-1.,9.),
complex(-0.0000000234545,1.1234), complex(-3.,5.1),
complex(-53,30.1), complex(0.0,0.12345),
complex(11,1), complex(-22,-2), complex(9,-28),
complex(21,-33), complex(1e5,1e5), complex(1e14,1e14)
]
w = [
complex(-3.78270245518980507452677445620103199303131110e-7,
0.000903861276433172057331093754199933411710053155),
complex(0.1764906227004816847297495349730234591778719532788,
-0.02146550539468457616788719893991501311573031095617),
complex(0.2410250715772692146133539023007113781272362309451,
0.06087579663428089745895459735240964093522265589350),
complex(0.30474420525691259245713884106959496013413834051768,
-0.20821893820283162728743734725471561394145872072738),
complex(7.317131068972378096865595229600561710140617977e34,
8.321873499714402777186848353320412813066170427e34),
complex(0.0615698507236323685519612934241429530190806818395,
-0.00676005783716575013073036218018565206070072304635),
complex(0.3960793007699874918961319170187598400134746631,
-5.593152259116644920546186222529802777409274656e-9),
complex(0.08217199226739447943295069917990417630675021771804,
-0.04701291087643609891018366143118110965272615832184),
complex(0.00457246000350281640952328010227885008541748668738,
-0.00804900791411691821818731763401840373998654987934),
complex(0.8746342859608052666092782112565360755791467973338452,
0.),
complex(0.00468190164965444174367477874864366058339647648741,
0.0510735563901306197993676329845149741675029197050),
complex(-0.0023193175200187620902125853834909543869428763219,
-0.025460054739731556004902057663500272721780776336),
complex(9.11463368405637174660562096516414499772662584e304,
3.97101807145263333769664875189354358563218932e305),
complex(-4.4927207857715598976165541011143706155432296e281,
-2.8019591213423077494444700357168707775769028e281),
complex(2.820947917809305132678577516325951485807107151e-6,
2.820947917668257736791638444590253942253354058e-6),
complex(2.82094791773878143474039725787438662716372268e-15,
2.82094791773878143474039725773333923127678361e-15)
]
assert_func_equal(cephes.wofz, w, z, rtol=1e-13)
class TestAiry(TestCase):
def test_airy(self):
# This tests the airy function to ensure 8 place accuracy in computation
x = special.airy(.99)
assert_array_almost_equal(x,array([0.13689066,-0.16050153,1.19815925,0.92046818]),8)
x = special.airy(.41)
assert_array_almost_equal(x,array([0.25238916,-.23480512,0.80686202,0.51053919]),8)
x = special.airy(-.36)
assert_array_almost_equal(x,array([0.44508477,-0.23186773,0.44939534,0.48105354]),8)
def test_airye(self):
a = special.airye(0.01)
b = special.airy(0.01)
b1 = [None]*4
for n in range(2):
b1[n] = b[n]*exp(2.0/3.0*0.01*sqrt(0.01))
for n in range(2,4):
b1[n] = b[n]*exp(-abs(real(2.0/3.0*0.01*sqrt(0.01))))
assert_array_almost_equal(a,b1,6)
def test_bi_zeros(self):
bi = special.bi_zeros(2)
bia = (array([-1.17371322, -3.2710930]),
array([-2.29443968, -4.07315509]),
array([-0.45494438, 0.39652284]),
array([0.60195789, -0.76031014]))
assert_array_almost_equal(bi,bia,4)
bi = special.bi_zeros(5)
assert_array_almost_equal(bi[0],array([-1.173713222709127,
-3.271093302836352,
-4.830737841662016,
-6.169852128310251,
-7.376762079367764]),11)
assert_array_almost_equal(bi[1],array([-2.294439682614122,
-4.073155089071828,
-5.512395729663599,
-6.781294445990305,
-7.940178689168587]),10)
assert_array_almost_equal(bi[2],array([-0.454944383639657,
0.396522836094465,
-0.367969161486959,
0.349499116831805,
-0.336026240133662]),11)
assert_array_almost_equal(bi[3],array([0.601957887976239,
-0.760310141492801,
0.836991012619261,
-0.88947990142654,
0.929983638568022]),10)
def test_ai_zeros(self):
ai = special.ai_zeros(1)
assert_array_almost_equal(ai,(array([-2.33810741]),
array([-1.01879297]),
array([0.5357]),
array([0.7012])),4)
def test_ai_zeros_big(self):
z, zp, ai_zpx, aip_zx = special.ai_zeros(50000)
ai_z, aip_z, _, _ = special.airy(z)
ai_zp, aip_zp, _, _ = special.airy(zp)
ai_envelope = 1/abs(z)**(1./4)
aip_envelope = abs(zp)**(1./4)
# Check values
assert_allclose(ai_zpx, ai_zp, rtol=1e-10)
assert_allclose(aip_zx, aip_z, rtol=1e-10)
# Check they are zeros
assert_allclose(ai_z/ai_envelope, 0, atol=1e-10, rtol=0)
assert_allclose(aip_zp/aip_envelope, 0, atol=1e-10, rtol=0)
# Check first zeros, DLMF 9.9.1
assert_allclose(z[:6],
[-2.3381074105, -4.0879494441, -5.5205598281,
-6.7867080901, -7.9441335871, -9.0226508533], rtol=1e-10)
assert_allclose(zp[:6],
[-1.0187929716, -3.2481975822, -4.8200992112,
-6.1633073556, -7.3721772550, -8.4884867340], rtol=1e-10)
def test_bi_zeros_big(self):
z, zp, bi_zpx, bip_zx = special.bi_zeros(50000)
_, _, bi_z, bip_z = special.airy(z)
_, _, bi_zp, bip_zp = special.airy(zp)
bi_envelope = 1/abs(z)**(1./4)
bip_envelope = abs(zp)**(1./4)
# Check values
assert_allclose(bi_zpx, bi_zp, rtol=1e-10)
assert_allclose(bip_zx, bip_z, rtol=1e-10)
# Check they are zeros
assert_allclose(bi_z/bi_envelope, 0, atol=1e-10, rtol=0)
assert_allclose(bip_zp/bip_envelope, 0, atol=1e-10, rtol=0)
# Check first zeros, DLMF 9.9.2
assert_allclose(z[:6],
[-1.1737132227, -3.2710933028, -4.8307378417,
-6.1698521283, -7.3767620794, -8.4919488465], rtol=1e-10)
assert_allclose(zp[:6],
[-2.2944396826, -4.0731550891, -5.5123957297,
-6.7812944460, -7.9401786892, -9.0195833588], rtol=1e-10)
class TestAssocLaguerre(TestCase):
def test_assoc_laguerre(self):
a1 = special.genlaguerre(11,1)
a2 = special.assoc_laguerre(.2,11,1)
assert_array_almost_equal(a2,a1(.2),8)
a2 = special.assoc_laguerre(1,11,1)
assert_array_almost_equal(a2,a1(1),8)
class TestBesselpoly(TestCase):
def test_besselpoly(self):
pass
class TestKelvin(TestCase):
def test_bei(self):
mbei = special.bei(2)
assert_almost_equal(mbei, 0.9722916273066613,5) # this may not be exact
def test_beip(self):
mbeip = special.beip(2)
assert_almost_equal(mbeip,0.91701361338403631,5) # this may not be exact
def test_ber(self):
mber = special.ber(2)
assert_almost_equal(mber,0.75173418271380821,5) # this may not be exact
def test_berp(self):
mberp = special.berp(2)
assert_almost_equal(mberp,-0.49306712470943909,5) # this may not be exact
def test_bei_zeros(self):
# Abramowitz & Stegun, Table 9.12
bi = special.bei_zeros(5)
assert_array_almost_equal(bi,array([5.02622,
9.45541,
13.89349,
18.33398,
22.77544]),4)
def test_beip_zeros(self):
bip = special.beip_zeros(5)
assert_array_almost_equal(bip,array([3.772673304934953,
8.280987849760042,
12.742147523633703,
17.193431752512542,
21.641143941167325]),8)
def test_ber_zeros(self):
ber = special.ber_zeros(5)
assert_array_almost_equal(ber,array([2.84892,
7.23883,
11.67396,
16.11356,
20.55463]),4)
def test_berp_zeros(self):
brp = special.berp_zeros(5)
assert_array_almost_equal(brp,array([6.03871,
10.51364,
14.96844,
19.41758,
23.86430]),4)
def test_kelvin(self):
mkelv = special.kelvin(2)
assert_array_almost_equal(mkelv,(special.ber(2) + special.bei(2)*1j,
special.ker(2) + special.kei(2)*1j,
special.berp(2) + special.beip(2)*1j,
special.kerp(2) + special.keip(2)*1j),8)
def test_kei(self):
mkei = special.kei(2)
assert_almost_equal(mkei,-0.20240006776470432,5)
def test_keip(self):
mkeip = special.keip(2)
assert_almost_equal(mkeip,0.21980790991960536,5)
def test_ker(self):
mker = special.ker(2)
assert_almost_equal(mker,-0.041664513991509472,5)
def test_kerp(self):
mkerp = special.kerp(2)
assert_almost_equal(mkerp,-0.10660096588105264,5)
def test_kei_zeros(self):
kei = special.kei_zeros(5)
assert_array_almost_equal(kei,array([3.91467,
8.34422,
12.78256,
17.22314,
21.66464]),4)
def test_keip_zeros(self):
keip = special.keip_zeros(5)
assert_array_almost_equal(keip,array([4.93181,
9.40405,
13.85827,
18.30717,
22.75379]),4)
# numbers come from 9.9 of A&S pg. 381
def test_kelvin_zeros(self):
tmp = special.kelvin_zeros(5)
berz,beiz,kerz,keiz,berpz,beipz,kerpz,keipz = tmp
assert_array_almost_equal(berz,array([2.84892,
7.23883,
11.67396,
16.11356,
20.55463]),4)
assert_array_almost_equal(beiz,array([5.02622,
9.45541,
13.89349,
18.33398,
22.77544]),4)
assert_array_almost_equal(kerz,array([1.71854,
6.12728,
10.56294,
15.00269,
19.44382]),4)
assert_array_almost_equal(keiz,array([3.91467,
8.34422,
12.78256,
17.22314,
21.66464]),4)
assert_array_almost_equal(berpz,array([6.03871,
10.51364,
14.96844,
19.41758,
23.86430]),4)
assert_array_almost_equal(beipz,array([3.77267,
# table from 1927 had 3.77320
# but this is more accurate
8.28099,
12.74215,
17.19343,
21.64114]),4)
assert_array_almost_equal(kerpz,array([2.66584,
7.17212,
11.63218,
16.08312,
20.53068]),4)
assert_array_almost_equal(keipz,array([4.93181,
9.40405,
13.85827,
18.30717,
22.75379]),4)
def test_ker_zeros(self):
ker = special.ker_zeros(5)
assert_array_almost_equal(ker,array([1.71854,
6.12728,
10.56294,
15.00269,
19.44381]),4)
def test_kerp_zeros(self):
kerp = special.kerp_zeros(5)
assert_array_almost_equal(kerp,array([2.66584,
7.17212,
11.63218,
16.08312,
20.53068]),4)
class TestBernoulli(TestCase):
def test_bernoulli(self):
brn = special.bernoulli(5)
assert_array_almost_equal(brn,array([1.0000,
-0.5000,
0.1667,
0.0000,
-0.0333,
0.0000]),4)
class TestBeta(TestCase):
def test_beta(self):
bet = special.beta(2,4)
betg = (special.gamma(2)*special.gamma(4))/special.gamma(6)
assert_almost_equal(bet,betg,8)
def test_betaln(self):
betln = special.betaln(2,4)
bet = log(abs(special.beta(2,4)))
assert_almost_equal(betln,bet,8)
def test_betainc(self):
btinc = special.betainc(1,1,.2)
assert_almost_equal(btinc,0.2,8)
def test_betaincinv(self):
y = special.betaincinv(2,4,.5)
comp = special.betainc(2,4,y)
assert_almost_equal(comp,.5,5)
class TestCombinatorics(TestCase):
def test_comb(self):
assert_array_almost_equal(special.comb([10, 10], [3, 4]), [120., 210.])
assert_almost_equal(special.comb(10, 3), 120.)
assert_equal(special.comb(10, 3, exact=True), 120)
assert_equal(special.comb(10, 3, exact=True, repetition=True), 220)
def test_comb_with_np_int64(self):
n = 70
k = 30
np_n = np.int64(n)
np_k = np.int64(k)
assert_equal(special.comb(np_n, np_k, exact=True),
special.comb(n, k, exact=True))
def test_comb_zeros(self):
assert_equal(special.comb(2, 3, exact=True), 0)
assert_equal(special.comb(-1, 3, exact=True), 0)
assert_equal(special.comb(2, -1, exact=True), 0)
assert_equal(special.comb(2, -1, exact=False), 0)
assert_array_almost_equal(special.comb([2, -1, 2, 10], [3, 3, -1, 3]),
[0., 0., 0., 120.])
def test_perm(self):
assert_array_almost_equal(special.perm([10, 10], [3, 4]), [720., 5040.])
assert_almost_equal(special.perm(10, 3), 720.)
assert_equal(special.perm(10, 3, exact=True), 720)
def test_perm_zeros(self):
assert_equal(special.perm(2, 3, exact=True), 0)
assert_equal(special.perm(-1, 3, exact=True), 0)
assert_equal(special.perm(2, -1, exact=True), 0)
assert_equal(special.perm(2, -1, exact=False), 0)
assert_array_almost_equal(special.perm([2, -1, 2, 10], [3, 3, -1, 3]),
[0., 0., 0., 720.])
class TestTrigonometric(TestCase):
def test_cbrt(self):
cb = special.cbrt(27)
cbrl = 27**(1.0/3.0)
assert_approx_equal(cb,cbrl)
def test_cbrtmore(self):
cb1 = special.cbrt(27.9)
cbrl1 = 27.9**(1.0/3.0)
assert_almost_equal(cb1,cbrl1,8)
def test_cosdg(self):
cdg = special.cosdg(90)
cdgrl = cos(pi/2.0)
assert_almost_equal(cdg,cdgrl,8)
def test_cosdgmore(self):
cdgm = special.cosdg(30)
cdgmrl = cos(pi/6.0)
assert_almost_equal(cdgm,cdgmrl,8)
def test_cosm1(self):
cs = (special.cosm1(0),special.cosm1(.3),special.cosm1(pi/10))
csrl = (cos(0)-1,cos(.3)-1,cos(pi/10)-1)
assert_array_almost_equal(cs,csrl,8)
def test_cotdg(self):
ct = special.cotdg(30)
ctrl = tan(pi/6.0)**(-1)
assert_almost_equal(ct,ctrl,8)
def test_cotdgmore(self):
ct1 = special.cotdg(45)
ctrl1 = tan(pi/4.0)**(-1)
assert_almost_equal(ct1,ctrl1,8)
def test_specialpoints(self):
assert_almost_equal(special.cotdg(45), 1.0, 14)
assert_almost_equal(special.cotdg(-45), -1.0, 14)
assert_almost_equal(special.cotdg(90), 0.0, 14)
assert_almost_equal(special.cotdg(-90), 0.0, 14)
assert_almost_equal(special.cotdg(135), -1.0, 14)
assert_almost_equal(special.cotdg(-135), 1.0, 14)
assert_almost_equal(special.cotdg(225), 1.0, 14)
assert_almost_equal(special.cotdg(-225), -1.0, 14)
assert_almost_equal(special.cotdg(270), 0.0, 14)
assert_almost_equal(special.cotdg(-270), 0.0, 14)
assert_almost_equal(special.cotdg(315), -1.0, 14)
assert_almost_equal(special.cotdg(-315), 1.0, 14)
assert_almost_equal(special.cotdg(765), 1.0, 14)
def test_sinc(self):
# the sinc implementation and more extensive sinc tests are in numpy
assert_array_equal(special.sinc([0]), 1)
assert_equal(special.sinc(0.0), 1.0)
def test_sindg(self):
sn = special.sindg(90)
assert_equal(sn,1.0)
def test_sindgmore(self):
snm = special.sindg(30)
snmrl = sin(pi/6.0)
assert_almost_equal(snm,snmrl,8)
snm1 = special.sindg(45)
snmrl1 = sin(pi/4.0)
assert_almost_equal(snm1,snmrl1,8)
class TestTandg(TestCase):
def test_tandg(self):
tn = special.tandg(30)
tnrl = tan(pi/6.0)
assert_almost_equal(tn,tnrl,8)
def test_tandgmore(self):
tnm = special.tandg(45)
tnmrl = tan(pi/4.0)
assert_almost_equal(tnm,tnmrl,8)
tnm1 = special.tandg(60)
tnmrl1 = tan(pi/3.0)
assert_almost_equal(tnm1,tnmrl1,8)
def test_specialpoints(self):
assert_almost_equal(special.tandg(0), 0.0, 14)
assert_almost_equal(special.tandg(45), 1.0, 14)
assert_almost_equal(special.tandg(-45), -1.0, 14)
assert_almost_equal(special.tandg(135), -1.0, 14)
assert_almost_equal(special.tandg(-135), 1.0, 14)
assert_almost_equal(special.tandg(180), 0.0, 14)
assert_almost_equal(special.tandg(-180), 0.0, 14)
assert_almost_equal(special.tandg(225), 1.0, 14)
assert_almost_equal(special.tandg(-225), -1.0, 14)
assert_almost_equal(special.tandg(315), -1.0, 14)
assert_almost_equal(special.tandg(-315), 1.0, 14)
class TestEllip(TestCase):
def test_ellipj_nan(self):
"""Regression test for #912."""
special.ellipj(0.5, np.nan)
def test_ellipj(self):
el = special.ellipj(0.2,0)
rel = [sin(0.2),cos(0.2),1.0,0.20]
assert_array_almost_equal(el,rel,13)
def test_ellipk(self):
elk = special.ellipk(.2)
assert_almost_equal(elk,1.659623598610528,11)
assert_equal(special.ellipkm1(0.0), np.inf)
assert_equal(special.ellipkm1(1.0), pi/2)
assert_equal(special.ellipkm1(np.inf), 0.0)
assert_equal(special.ellipkm1(np.nan), np.nan)
assert_equal(special.ellipkm1(-1), np.nan)
assert_allclose(special.ellipk(-10), 0.7908718902387385)
def test_ellipkinc(self):
elkinc = special.ellipkinc(pi/2,.2)
elk = special.ellipk(0.2)
assert_almost_equal(elkinc,elk,15)
alpha = 20*pi/180
phi = 45*pi/180
m = sin(alpha)**2
elkinc = special.ellipkinc(phi,m)
assert_almost_equal(elkinc,0.79398143,8)
# From pg. 614 of A & S
assert_equal(special.ellipkinc(pi/2, 0.0), pi/2)
assert_equal(special.ellipkinc(pi/2, 1.0), np.inf)
assert_equal(special.ellipkinc(pi/2, -np.inf), 0.0)
assert_equal(special.ellipkinc(pi/2, np.nan), np.nan)
assert_equal(special.ellipkinc(pi/2, 2), np.nan)
assert_equal(special.ellipkinc(0, 0.5), 0.0)
assert_equal(special.ellipkinc(np.inf, 0.5), np.inf)
assert_equal(special.ellipkinc(-np.inf, 0.5), -np.inf)
assert_equal(special.ellipkinc(np.inf, np.inf), np.nan)
assert_equal(special.ellipkinc(np.inf, -np.inf), np.nan)
assert_equal(special.ellipkinc(-np.inf, -np.inf), np.nan)
assert_equal(special.ellipkinc(-np.inf, np.inf), np.nan)
assert_equal(special.ellipkinc(np.nan, 0.5), np.nan)
assert_equal(special.ellipkinc(np.nan, np.nan), np.nan)
assert_allclose(special.ellipkinc(0.38974112035318718, 1), 0.4, rtol=1e-14)
assert_allclose(special.ellipkinc(1.5707, -10), 0.79084284661724946)
def test_ellipkinc_2(self):
# Regression test for gh-3550
# ellipkinc(phi, mbad) was NaN and mvals[2:6] were twice the correct value
mbad = 0.68359375000000011
phi = 0.9272952180016123
m = np.nextafter(mbad, 0)
mvals = []
for j in range(10):
mvals.append(m)
m = np.nextafter(m, 1)
f = special.ellipkinc(phi, mvals)
assert_array_almost_equal_nulp(f, 1.0259330100195334 * np.ones_like(f), 1)
# this bug also appears at phi + n * pi for at least small n
f1 = special.ellipkinc(phi + pi, mvals)
assert_array_almost_equal_nulp(f1, 5.1296650500976675 * np.ones_like(f1), 2)
def test_ellipkinc_singular(self):
# ellipkinc(phi, 1) has closed form and is finite only for phi in (-pi/2, pi/2)
xlog = np.logspace(-300, -17, 25)
xlin = np.linspace(1e-17, 0.1, 25)
xlin2 = np.linspace(0.1, pi/2, 25, endpoint=False)
assert_allclose(special.ellipkinc(xlog, 1), np.arcsinh(np.tan(xlog)), rtol=1e14)
assert_allclose(special.ellipkinc(xlin, 1), np.arcsinh(np.tan(xlin)), rtol=1e14)
assert_allclose(special.ellipkinc(xlin2, 1), np.arcsinh(np.tan(xlin2)), rtol=1e14)
assert_equal(special.ellipkinc(np.pi/2, 1), np.inf)
assert_allclose(special.ellipkinc(-xlog, 1), np.arcsinh(np.tan(-xlog)), rtol=1e14)
assert_allclose(special.ellipkinc(-xlin, 1), np.arcsinh(np.tan(-xlin)), rtol=1e14)
assert_allclose(special.ellipkinc(-xlin2, 1), np.arcsinh(np.tan(-xlin2)), rtol=1e14)
assert_equal(special.ellipkinc(-np.pi/2, 1), np.inf)
def test_ellipe(self):
ele = special.ellipe(.2)
assert_almost_equal(ele,1.4890350580958529,8)
assert_equal(special.ellipe(0.0), pi/2)
assert_equal(special.ellipe(1.0), 1.0)
assert_equal(special.ellipe(-np.inf), np.inf)
assert_equal(special.ellipe(np.nan), np.nan)
assert_equal(special.ellipe(2), np.nan)
assert_allclose(special.ellipe(-10), 3.6391380384177689)
def test_ellipeinc(self):
eleinc = special.ellipeinc(pi/2,.2)
ele = special.ellipe(0.2)
assert_almost_equal(eleinc,ele,14)
# pg 617 of A & S
alpha, phi = 52*pi/180,35*pi/180
m = sin(alpha)**2
eleinc = special.ellipeinc(phi,m)
assert_almost_equal(eleinc, 0.58823065, 8)
assert_equal(special.ellipeinc(pi/2, 0.0), pi/2)
assert_equal(special.ellipeinc(pi/2, 1.0), 1.0)
assert_equal(special.ellipeinc(pi/2, -np.inf), np.inf)
assert_equal(special.ellipeinc(pi/2, np.nan), np.nan)
assert_equal(special.ellipeinc(pi/2, 2), np.nan)
assert_equal(special.ellipeinc(0, 0.5), 0.0)
assert_equal(special.ellipeinc(np.inf, 0.5), np.inf)
assert_equal(special.ellipeinc(-np.inf, 0.5), -np.inf)
assert_equal(special.ellipeinc(np.inf, -np.inf), np.inf)
assert_equal(special.ellipeinc(-np.inf, -np.inf), -np.inf)
assert_equal(special.ellipeinc(np.inf, np.inf), np.nan)
assert_equal(special.ellipeinc(-np.inf, np.inf), np.nan)
assert_equal(special.ellipeinc(np.nan, 0.5), np.nan)
assert_equal(special.ellipeinc(np.nan, np.nan), np.nan)
assert_allclose(special.ellipeinc(1.5707, -10), 3.6388185585822876)
def test_ellipeinc_2(self):
# Regression test for gh-3550
# ellipeinc(phi, mbad) was NaN and mvals[2:6] were twice the correct value
mbad = 0.68359375000000011
phi = 0.9272952180016123
m = np.nextafter(mbad, 0)
mvals = []
for j in range(10):
mvals.append(m)
m = np.nextafter(m, 1)
f = special.ellipeinc(phi, mvals)
assert_array_almost_equal_nulp(f, 0.84442884574781019 * np.ones_like(f), 2)
# this bug also appears at phi + n * pi for at least small n
f1 = special.ellipeinc(phi + pi, mvals)
assert_array_almost_equal_nulp(f1, 3.3471442287390509 * np.ones_like(f1), 4)
class TestErf(TestCase):
def test_erf(self):
er = special.erf(.25)
assert_almost_equal(er,0.2763263902,8)
def test_erf_zeros(self):
erz = special.erf_zeros(5)
erzr = array([1.45061616+1.88094300j,
2.24465928+2.61657514j,
2.83974105+3.17562810j,
3.33546074+3.64617438j,
3.76900557+4.06069723j])
assert_array_almost_equal(erz,erzr,4)
def _check_variant_func(self, func, other_func, rtol, atol=0):
np.random.seed(1234)
n = 10000
x = np.random.pareto(0.02, n) * (2*np.random.randint(0, 2, n) - 1)
y = np.random.pareto(0.02, n) * (2*np.random.randint(0, 2, n) - 1)
z = x + 1j*y
old_errors = np.seterr(all='ignore')
try:
w = other_func(z)
w_real = other_func(x).real
mask = np.isfinite(w)
w = w[mask]
z = z[mask]
mask = np.isfinite(w_real)
w_real = w_real[mask]
x = x[mask]
# test both real and complex variants
assert_func_equal(func, w, z, rtol=rtol, atol=atol)
assert_func_equal(func, w_real, x, rtol=rtol, atol=atol)
finally:
np.seterr(**old_errors)
def test_erfc_consistent(self):
self._check_variant_func(
cephes.erfc,
lambda z: 1 - cephes.erf(z),
rtol=1e-12,
atol=1e-14 # <- the test function loses precision
)
def test_erfcx_consistent(self):
self._check_variant_func(
cephes.erfcx,
lambda z: np.exp(z*z) * cephes.erfc(z),
rtol=1e-12
)
def test_erfi_consistent(self):
self._check_variant_func(
cephes.erfi,
lambda z: -1j * cephes.erf(1j*z),
rtol=1e-12
)
def test_dawsn_consistent(self):
self._check_variant_func(
cephes.dawsn,
lambda z: sqrt(pi)/2 * np.exp(-z*z) * cephes.erfi(z),
rtol=1e-12
)
def test_erfcinv(self):
i = special.erfcinv(1)
# Use assert_array_equal instead of assert_equal, so the comparsion
# of -0.0 and 0.0 doesn't fail.
assert_array_equal(i, 0)
def test_erfinv(self):
i = special.erfinv(0)
assert_equal(i,0)
def test_errprint(self):
a = special.errprint()
b = 1-a # a is the state 1-a inverts state
c = special.errprint(b) # returns last state 'a'
assert_equal(a,c)
d = special.errprint(a) # returns to original state
assert_equal(d,b) # makes sure state was returned
# assert_equal(d,1-a)
class TestEuler(TestCase):
def test_euler(self):
eu0 = special.euler(0)
eu1 = special.euler(1)
eu2 = special.euler(2) # just checking segfaults
assert_almost_equal(eu0[0],1,8)
assert_almost_equal(eu2[2],-1,8)
eu24 = special.euler(24)
mathworld = [1,1,5,61,1385,50521,2702765,199360981,
19391512145,2404879675441,
370371188237525,69348874393137901,
15514534163557086905]
correct = zeros((25,),'d')
for k in range(0,13):
if (k % 2):
correct[2*k] = -float(mathworld[k])
else:
correct[2*k] = float(mathworld[k])
olderr = np.seterr(all='ignore')
try:
err = nan_to_num((eu24-correct)/correct)
errmax = max(err)
finally:
np.seterr(**olderr)
assert_almost_equal(errmax, 0.0, 14)
class TestExp(TestCase):
def test_exp2(self):
ex = special.exp2(2)
exrl = 2**2
assert_equal(ex,exrl)
def test_exp2more(self):
exm = special.exp2(2.5)
exmrl = 2**(2.5)
assert_almost_equal(exm,exmrl,8)
def test_exp10(self):
ex = special.exp10(2)
exrl = 10**2
assert_approx_equal(ex,exrl)
def test_exp10more(self):
exm = special.exp10(2.5)
exmrl = 10**(2.5)
assert_almost_equal(exm,exmrl,8)
def test_expm1(self):
ex = (special.expm1(2),special.expm1(3),special.expm1(4))
exrl = (exp(2)-1,exp(3)-1,exp(4)-1)
assert_array_almost_equal(ex,exrl,8)
def test_expm1more(self):
ex1 = (special.expm1(2),special.expm1(2.1),special.expm1(2.2))
exrl1 = (exp(2)-1,exp(2.1)-1,exp(2.2)-1)
assert_array_almost_equal(ex1,exrl1,8)
class TestFactorialFunctions(TestCase):
def test_factorial(self):
assert_array_almost_equal([6., 24., 120.],
special.factorial([3, 4, 5], exact=False))
assert_equal(special.factorial(5, exact=True), 120)
def test_factorial2(self):
assert_array_almost_equal([105., 384., 945.],
special.factorial2([7, 8, 9], exact=False))
assert_equal(special.factorial2(7, exact=True), 105)
def test_factorialk(self):
assert_equal(special.factorialk(5, 1, exact=True), 120)
assert_equal(special.factorialk(5, 3, exact=True), 10)
class TestFresnel(TestCase):
def test_fresnel(self):
frs = array(special.fresnel(.5))
assert_array_almost_equal(frs,array([0.064732432859999287, 0.49234422587144644]),8)
# values from pg 329 Table 7.11 of A & S
# slightly corrected in 4th decimal place
def test_fresnel_zeros(self):
szo, czo = special.fresnel_zeros(5)
assert_array_almost_equal(szo,
array([2.0093+0.2885j,
2.8335+0.2443j,
3.4675+0.2185j,
4.0026+0.2009j,
4.4742+0.1877j]),3)
assert_array_almost_equal(czo,
array([1.7437+0.3057j,
2.6515+0.2529j,
3.3204+0.2240j,
3.8757+0.2047j,
4.3611+0.1907j]),3)
vals1 = special.fresnel(szo)[0]
vals2 = special.fresnel(czo)[1]
assert_array_almost_equal(vals1,0,14)
assert_array_almost_equal(vals2,0,14)
def test_fresnelc_zeros(self):
szo, czo = special.fresnel_zeros(6)
frc = special.fresnelc_zeros(6)
assert_array_almost_equal(frc,czo,12)
def test_fresnels_zeros(self):
szo, czo = special.fresnel_zeros(5)
frs = special.fresnels_zeros(5)
assert_array_almost_equal(frs,szo,12)
class TestGamma(TestCase):
def test_gamma(self):
gam = special.gamma(5)
assert_equal(gam,24.0)
def test_gammaln(self):
gamln = special.gammaln(3)
lngam = log(special.gamma(3))
assert_almost_equal(gamln,lngam,8)
def test_gammainc(self):
gama = special.gammainc(.5,.5)
assert_almost_equal(gama,.7,1)
def test_gammaincnan(self):
gama = special.gammainc(-1,1)
assert_(isnan(gama))
def test_gammainczero(self):
# bad arg but zero integration limit
gama = special.gammainc(-1,0)
assert_equal(gama,0.0)
def test_gammaincc(self):
gicc = special.gammaincc(.5,.5)
greal = 1 - special.gammainc(.5,.5)
assert_almost_equal(gicc,greal,8)
def test_gammainccnan(self):
gama = special.gammaincc(-1,1)
assert_(isnan(gama))
def test_gammainccinv(self):
gccinv = special.gammainccinv(.5,.5)
gcinv = special.gammaincinv(.5,.5)
assert_almost_equal(gccinv,gcinv,8)
@with_special_errors
def test_gammaincinv(self):
y = special.gammaincinv(.4,.4)
x = special.gammainc(.4,y)
assert_almost_equal(x,0.4,1)
y = special.gammainc(10, 0.05)
x = special.gammaincinv(10, 2.5715803516000736e-20)
assert_almost_equal(0.05, x, decimal=10)
assert_almost_equal(y, 2.5715803516000736e-20, decimal=10)
x = special.gammaincinv(50, 8.20754777388471303050299243573393e-18)
assert_almost_equal(11.0, x, decimal=10)
@with_special_errors
def test_975(self):
# Regression test for ticket #975 -- switch point in algorithm
# check that things work OK at the point, immediately next floats
# around it, and a bit further away
pts = [0.25,
np.nextafter(0.25, 0), 0.25 - 1e-12,
np.nextafter(0.25, 1), 0.25 + 1e-12]
for xp in pts:
y = special.gammaincinv(.4, xp)
x = special.gammainc(0.4, y)
assert_tol_equal(x, xp, rtol=1e-12)
def test_rgamma(self):
rgam = special.rgamma(8)
rlgam = 1/special.gamma(8)
assert_almost_equal(rgam,rlgam,8)
def test_infinity(self):
assert_(np.isinf(special.gamma(-1)))
assert_equal(special.rgamma(-1), 0)
class TestHankel(TestCase):
def test_negv1(self):
assert_almost_equal(special.hankel1(-3,2), -special.hankel1(3,2), 14)
def test_hankel1(self):
hank1 = special.hankel1(1,.1)
hankrl = (special.jv(1,.1) + special.yv(1,.1)*1j)
assert_almost_equal(hank1,hankrl,8)
def test_negv1e(self):
assert_almost_equal(special.hankel1e(-3,2), -special.hankel1e(3,2), 14)
def test_hankel1e(self):
hank1e = special.hankel1e(1,.1)
hankrle = special.hankel1(1,.1)*exp(-.1j)
assert_almost_equal(hank1e,hankrle,8)
def test_negv2(self):
assert_almost_equal(special.hankel2(-3,2), -special.hankel2(3,2), 14)
def test_hankel2(self):
hank2 = special.hankel2(1,.1)
hankrl2 = (special.jv(1,.1) - special.yv(1,.1)*1j)
assert_almost_equal(hank2,hankrl2,8)
def test_neg2e(self):
assert_almost_equal(special.hankel2e(-3,2), -special.hankel2e(3,2), 14)
def test_hankl2e(self):
hank2e = special.hankel2e(1,.1)
hankrl2e = special.hankel2e(1,.1)
assert_almost_equal(hank2e,hankrl2e,8)
class TestHyper(TestCase):
def test_h1vp(self):
h1 = special.h1vp(1,.1)
h1real = (special.jvp(1,.1) + special.yvp(1,.1)*1j)
assert_almost_equal(h1,h1real,8)
def test_h2vp(self):
h2 = special.h2vp(1,.1)
h2real = (special.jvp(1,.1) - special.yvp(1,.1)*1j)
assert_almost_equal(h2,h2real,8)
def test_hyp0f1(self):
# scalar input
assert_allclose(special.hyp0f1(2.5, 0.5), 1.21482702689997, rtol=1e-12)
assert_allclose(special.hyp0f1(2.5, 0), 1.0, rtol=1e-15)
# float input, expected values match mpmath
x = special.hyp0f1(3.0, [-1.5, -1, 0, 1, 1.5])
expected = np.array([0.58493659229143, 0.70566805723127, 1.0,
1.37789689539747, 1.60373685288480])
assert_allclose(x, expected, rtol=1e-12)
# complex input
x = special.hyp0f1(3.0, np.array([-1.5, -1, 0, 1, 1.5]) + 0.j)
assert_allclose(x, expected.astype(complex), rtol=1e-12)
# test broadcasting
x1 = [0.5, 1.5, 2.5]
x2 = [0, 1, 0.5]
x = special.hyp0f1(x1, x2)
expected = [1.0, 1.8134302039235093, 1.21482702689997]
assert_allclose(x, expected, rtol=1e-12)
x = special.hyp0f1(np.row_stack([x1] * 2), x2)
assert_allclose(x, np.row_stack([expected] * 2), rtol=1e-12)
assert_raises(ValueError, special.hyp0f1,
np.row_stack([x1] * 3), [0, 1])
def test_hyp1f1(self):
hyp1 = special.hyp1f1(.1,.1,.3)
assert_almost_equal(hyp1, 1.3498588075760032,7)
# test contributed by Moritz Deger (2008-05-29)
# http://projects.scipy.org/scipy/scipy/ticket/659
# reference data obtained from mathematica [ a, b, x, m(a,b,x)]:
# produced with test_hyp1f1.nb
ref_data = array([[-8.38132975e+00, -1.28436461e+01, -2.91081397e+01, 1.04178330e+04],
[2.91076882e+00, -6.35234333e+00, -1.27083993e+01, 6.68132725e+00],
[-1.42938258e+01, 1.80869131e-01, 1.90038728e+01, 1.01385897e+05],
[5.84069088e+00, 1.33187908e+01, 2.91290106e+01, 1.59469411e+08],
[-2.70433202e+01, -1.16274873e+01, -2.89582384e+01, 1.39900152e+24],
[4.26344966e+00, -2.32701773e+01, 1.91635759e+01, 6.13816915e+21],
[1.20514340e+01, -3.40260240e+00, 7.26832235e+00, 1.17696112e+13],
[2.77372955e+01, -1.99424687e+00, 3.61332246e+00, 3.07419615e+13],
[1.50310939e+01, -2.91198675e+01, -1.53581080e+01, -3.79166033e+02],
[1.43995827e+01, 9.84311196e+00, 1.93204553e+01, 2.55836264e+10],
[-4.08759686e+00, 1.34437025e+01, -1.42072843e+01, 1.70778449e+01],
[8.05595738e+00, -1.31019838e+01, 1.52180721e+01, 3.06233294e+21],
[1.81815804e+01, -1.42908793e+01, 9.57868793e+00, -2.84771348e+20],
[-2.49671396e+01, 1.25082843e+01, -1.71562286e+01, 2.36290426e+07],
[2.67277673e+01, 1.70315414e+01, 6.12701450e+00, 7.77917232e+03],
[2.49565476e+01, 2.91694684e+01, 6.29622660e+00, 2.35300027e+02],
[6.11924542e+00, -1.59943768e+00, 9.57009289e+00, 1.32906326e+11],
[-1.47863653e+01, 2.41691301e+01, -1.89981821e+01, 2.73064953e+03],
[2.24070483e+01, -2.93647433e+00, 8.19281432e+00, -6.42000372e+17],
[8.04042600e-01, 1.82710085e+01, -1.97814534e+01, 5.48372441e-01],
[1.39590390e+01, 1.97318686e+01, 2.37606635e+00, 5.51923681e+00],
[-4.66640483e+00, -2.00237930e+01, 7.40365095e+00, 4.50310752e+00],
[2.76821999e+01, -6.36563968e+00, 1.11533984e+01, -9.28725179e+23],
[-2.56764457e+01, 1.24544906e+00, 1.06407572e+01, 1.25922076e+01],
[3.20447808e+00, 1.30874383e+01, 2.26098014e+01, 2.03202059e+04],
[-1.24809647e+01, 4.15137113e+00, -2.92265700e+01, 2.39621411e+08],
[2.14778108e+01, -2.35162960e+00, -1.13758664e+01, 4.46882152e-01],
[-9.85469168e+00, -3.28157680e+00, 1.67447548e+01, -1.07342390e+07],
[1.08122310e+01, -2.47353236e+01, -1.15622349e+01, -2.91733796e+03],
[-2.67933347e+01, -3.39100709e+00, 2.56006986e+01, -5.29275382e+09],
[-8.60066776e+00, -8.02200924e+00, 1.07231926e+01, 1.33548320e+06],
[-1.01724238e-01, -1.18479709e+01, -2.55407104e+01, 1.55436570e+00],
[-3.93356771e+00, 2.11106818e+01, -2.57598485e+01, 2.13467840e+01],
[3.74750503e+00, 1.55687633e+01, -2.92841720e+01, 1.43873509e-02],
[6.99726781e+00, 2.69855571e+01, -1.63707771e+01, 3.08098673e-02],
[-2.31996011e+01, 3.47631054e+00, 9.75119815e-01, 1.79971073e-02],
[2.38951044e+01, -2.91460190e+01, -2.50774708e+00, 9.56934814e+00],
[1.52730825e+01, 5.77062507e+00, 1.21922003e+01, 1.32345307e+09],
[1.74673917e+01, 1.89723426e+01, 4.94903250e+00, 9.90859484e+01],
[1.88971241e+01, 2.86255413e+01, 5.52360109e-01, 1.44165360e+00],
[1.02002319e+01, -1.66855152e+01, -2.55426235e+01, 6.56481554e+02],
[-1.79474153e+01, 1.22210200e+01, -1.84058212e+01, 8.24041812e+05],
[-1.36147103e+01, 1.32365492e+00, -7.22375200e+00, 9.92446491e+05],
[7.57407832e+00, 2.59738234e+01, -1.34139168e+01, 3.64037761e-02],
[2.21110169e+00, 1.28012666e+01, 1.62529102e+01, 1.33433085e+02],
[-2.64297569e+01, -1.63176658e+01, -1.11642006e+01, -2.44797251e+13],
[-2.46622944e+01, -3.02147372e+00, 8.29159315e+00, -3.21799070e+05],
[-1.37215095e+01, -1.96680183e+01, 2.91940118e+01, 3.21457520e+12],
[-5.45566105e+00, 2.81292086e+01, 1.72548215e-01, 9.66973000e-01],
[-1.55751298e+00, -8.65703373e+00, 2.68622026e+01, -3.17190834e+16],
[2.45393609e+01, -2.70571903e+01, 1.96815505e+01, 1.80708004e+37],
[5.77482829e+00, 1.53203143e+01, 2.50534322e+01, 1.14304242e+06],
[-1.02626819e+01, 2.36887658e+01, -2.32152102e+01, 7.28965646e+02],
[-1.30833446e+00, -1.28310210e+01, 1.87275544e+01, -9.33487904e+12],
[5.83024676e+00, -1.49279672e+01, 2.44957538e+01, -7.61083070e+27],
[-2.03130747e+01, 2.59641715e+01, -2.06174328e+01, 4.54744859e+04],
[1.97684551e+01, -2.21410519e+01, -2.26728740e+01, 3.53113026e+06],
[2.73673444e+01, 2.64491725e+01, 1.57599882e+01, 1.07385118e+07],
[5.73287971e+00, 1.21111904e+01, 1.33080171e+01, 2.63220467e+03],
[-2.82751072e+01, 2.08605881e+01, 9.09838900e+00, -6.60957033e-07],
[1.87270691e+01, -1.74437016e+01, 1.52413599e+01, 6.59572851e+27],
[6.60681457e+00, -2.69449855e+00, 9.78972047e+00, -2.38587870e+12],
[1.20895561e+01, -2.51355765e+01, 2.30096101e+01, 7.58739886e+32],
[-2.44682278e+01, 2.10673441e+01, -1.36705538e+01, 4.54213550e+04],
[-4.50665152e+00, 3.72292059e+00, -4.83403707e+00, 2.68938214e+01],
[-7.46540049e+00, -1.08422222e+01, -1.72203805e+01, -2.09402162e+02],
[-2.00307551e+01, -7.50604431e+00, -2.78640020e+01, 4.15985444e+19],
[1.99890876e+01, 2.20677419e+01, -2.51301778e+01, 1.23840297e-09],
[2.03183823e+01, -7.66942559e+00, 2.10340070e+01, 1.46285095e+31],
[-2.90315825e+00, -2.55785967e+01, -9.58779316e+00, 2.65714264e-01],
[2.73960829e+01, -1.80097203e+01, -2.03070131e+00, 2.52908999e+02],
[-2.11708058e+01, -2.70304032e+01, 2.48257944e+01, 3.09027527e+08],
[2.21959758e+01, 4.00258675e+00, -1.62853977e+01, -9.16280090e-09],
[1.61661840e+01, -2.26845150e+01, 2.17226940e+01, -8.24774394e+33],
[-3.35030306e+00, 1.32670581e+00, 9.39711214e+00, -1.47303163e+01],
[7.23720726e+00, -2.29763909e+01, 2.34709682e+01, -9.20711735e+29],
[2.71013568e+01, 1.61951087e+01, -7.11388906e-01, 2.98750911e-01],
[8.40057933e+00, -7.49665220e+00, 2.95587388e+01, 6.59465635e+29],
[-1.51603423e+01, 1.94032322e+01, -7.60044357e+00, 1.05186941e+02],
[-8.83788031e+00, -2.72018313e+01, 1.88269907e+00, 1.81687019e+00],
[-1.87283712e+01, 5.87479570e+00, -1.91210203e+01, 2.52235612e+08],
[-5.61338513e-01, 2.69490237e+01, 1.16660111e-01, 9.97567783e-01],
[-5.44354025e+00, -1.26721408e+01, -4.66831036e+00, 1.06660735e-01],
[-2.18846497e+00, 2.33299566e+01, 9.62564397e+00, 3.03842061e-01],
[6.65661299e+00, -2.39048713e+01, 1.04191807e+01, 4.73700451e+13],
[-2.57298921e+01, -2.60811296e+01, 2.74398110e+01, -5.32566307e+11],
[-1.11431826e+01, -1.59420160e+01, -1.84880553e+01, -1.01514747e+02],
[6.50301931e+00, 2.59859051e+01, -2.33270137e+01, 1.22760500e-02],
[-1.94987891e+01, -2.62123262e+01, 3.90323225e+00, 1.71658894e+01],
[7.26164601e+00, -1.41469402e+01, 2.81499763e+01, -2.50068329e+31],
[-1.52424040e+01, 2.99719005e+01, -2.85753678e+01, 1.31906693e+04],
[5.24149291e+00, -1.72807223e+01, 2.22129493e+01, 2.50748475e+25],
[3.63207230e-01, -9.54120862e-02, -2.83874044e+01, 9.43854939e-01],
[-2.11326457e+00, -1.25707023e+01, 1.17172130e+00, 1.20812698e+00],
[2.48513582e+00, 1.03652647e+01, -1.84625148e+01, 6.47910997e-02],
[2.65395942e+01, 2.74794672e+01, 1.29413428e+01, 2.89306132e+05],
[-9.49445460e+00, 1.59930921e+01, -1.49596331e+01, 3.27574841e+02],
[-5.89173945e+00, 9.96742426e+00, 2.60318889e+01, -3.15842908e-01],
[-1.15387239e+01, -2.21433107e+01, -2.17686413e+01, 1.56724718e-01],
[-5.30592244e+00, -2.42752190e+01, 1.29734035e+00, 1.31985534e+00]])
for a,b,c,expected in ref_data:
result = special.hyp1f1(a,b,c)
assert_(abs(expected - result)/expected < 1e-4)
def test_hyp1f1_gh2957(self):
hyp1 = special.hyp1f1(0.5, 1.5, -709.7827128933)
hyp2 = special.hyp1f1(0.5, 1.5, -709.7827128934)
assert_almost_equal(hyp1, hyp2, 12)
def test_hyp1f2(self):
pass
def test_hyp2f0(self):
pass
def test_hyp2f1(self):
# a collection of special cases taken from AMS 55
values = [[0.5, 1, 1.5, 0.2**2, 0.5/0.2*log((1+0.2)/(1-0.2))],
[0.5, 1, 1.5, -0.2**2, 1./0.2*arctan(0.2)],
[1, 1, 2, 0.2, -1/0.2*log(1-0.2)],
[3, 3.5, 1.5, 0.2**2,
0.5/0.2/(-5)*((1+0.2)**(-5)-(1-0.2)**(-5))],
[-3, 3, 0.5, sin(0.2)**2, cos(2*3*0.2)],
[3, 4, 8, 1, special.gamma(8)*special.gamma(8-4-3)/special.gamma(8-3)/special.gamma(8-4)],
[3, 2, 3-2+1, -1, 1./2**3*sqrt(pi) *
special.gamma(1+3-2)/special.gamma(1+0.5*3-2)/special.gamma(0.5+0.5*3)],
[5, 2, 5-2+1, -1, 1./2**5*sqrt(pi) *
special.gamma(1+5-2)/special.gamma(1+0.5*5-2)/special.gamma(0.5+0.5*5)],
[4, 0.5+4, 1.5-2*4, -1./3, (8./9)**(-2*4)*special.gamma(4./3) *
special.gamma(1.5-2*4)/special.gamma(3./2)/special.gamma(4./3-2*4)],
# and some others
# ticket #424
[1.5, -0.5, 1.0, -10.0, 4.1300097765277476484],
# negative integer a or b, with c-a-b integer and x > 0.9
[-2,3,1,0.95,0.715],
[2,-3,1,0.95,-0.007],
[-6,3,1,0.95,0.0000810625],
[2,-5,1,0.95,-0.000029375],
# huge negative integers
(10, -900, 10.5, 0.99, 1.91853705796607664803709475658e-24),
(10, -900, -10.5, 0.99, 3.54279200040355710199058559155e-18),
]
for i, (a, b, c, x, v) in enumerate(values):
cv = special.hyp2f1(a, b, c, x)
assert_almost_equal(cv, v, 8, err_msg='test #%d' % i)
def test_hyp3f0(self):
pass
def test_hyperu(self):
val1 = special.hyperu(1,0.1,100)
assert_almost_equal(val1,0.0098153,7)
a,b = [0.3,0.6,1.2,-2.7],[1.5,3.2,-0.4,-3.2]
a,b = asarray(a), asarray(b)
z = 0.5
hypu = special.hyperu(a,b,z)
hprl = (pi/sin(pi*b))*(special.hyp1f1(a,b,z) /
(special.gamma(1+a-b)*special.gamma(b)) -
z**(1-b)*special.hyp1f1(1+a-b,2-b,z)
/ (special.gamma(a)*special.gamma(2-b)))
assert_array_almost_equal(hypu,hprl,12)
def test_hyperu_gh2287(self):
assert_almost_equal(special.hyperu(1, 1.5, 20.2),
0.048360918656699191, 12)
class TestBessel(TestCase):
def test_itj0y0(self):
it0 = array(special.itj0y0(.2))
assert_array_almost_equal(it0,array([0.19933433254006822, -0.34570883800412566]),8)
def test_it2j0y0(self):
it2 = array(special.it2j0y0(.2))
assert_array_almost_equal(it2,array([0.0049937546274601858, -0.43423067011231614]),8)
def test_negv_iv(self):
assert_equal(special.iv(3,2), special.iv(-3,2))
def test_j0(self):
oz = special.j0(.1)
ozr = special.jn(0,.1)
assert_almost_equal(oz,ozr,8)
def test_j1(self):
o1 = special.j1(.1)
o1r = special.jn(1,.1)
assert_almost_equal(o1,o1r,8)
def test_jn(self):
jnnr = special.jn(1,.2)
assert_almost_equal(jnnr,0.099500832639235995,8)
def test_negv_jv(self):
assert_almost_equal(special.jv(-3,2), -special.jv(3,2), 14)
def test_jv(self):
values = [[0, 0.1, 0.99750156206604002],
[2./3, 1e-8, 0.3239028506761532e-5],
[2./3, 1e-10, 0.1503423854873779e-6],
[3.1, 1e-10, 0.1711956265409013e-32],
[2./3, 4.0, -0.2325440850267039],
]
for i, (v, x, y) in enumerate(values):
yc = special.jv(v, x)
assert_almost_equal(yc, y, 8, err_msg='test #%d' % i)
def test_negv_jve(self):
assert_almost_equal(special.jve(-3,2), -special.jve(3,2), 14)
def test_jve(self):
jvexp = special.jve(1,.2)
assert_almost_equal(jvexp,0.099500832639235995,8)
jvexp1 = special.jve(1,.2+1j)
z = .2+1j
jvexpr = special.jv(1,z)*exp(-abs(z.imag))
assert_almost_equal(jvexp1,jvexpr,8)
def test_jn_zeros(self):
jn0 = special.jn_zeros(0,5)
jn1 = special.jn_zeros(1,5)
assert_array_almost_equal(jn0,array([2.4048255577,
5.5200781103,
8.6537279129,
11.7915344391,
14.9309177086]),4)
assert_array_almost_equal(jn1,array([3.83171,
7.01559,
10.17347,
13.32369,
16.47063]),4)
jn102 = special.jn_zeros(102,5)
assert_tol_equal(jn102, array([110.89174935992040343,
117.83464175788308398,
123.70194191713507279,
129.02417238949092824,
134.00114761868422559]), rtol=1e-13)
jn301 = special.jn_zeros(301,5)
assert_tol_equal(jn301, array([313.59097866698830153,
323.21549776096288280,
331.22338738656748796,
338.39676338872084500,
345.03284233056064157]), rtol=1e-13)
def test_jn_zeros_slow(self):
jn0 = special.jn_zeros(0, 300)
assert_tol_equal(jn0[260-1], 816.02884495068867280, rtol=1e-13)
assert_tol_equal(jn0[280-1], 878.86068707124422606, rtol=1e-13)
assert_tol_equal(jn0[300-1], 941.69253065317954064, rtol=1e-13)
jn10 = special.jn_zeros(10, 300)
assert_tol_equal(jn10[260-1], 831.67668514305631151, rtol=1e-13)
assert_tol_equal(jn10[280-1], 894.51275095371316931, rtol=1e-13)
assert_tol_equal(jn10[300-1], 957.34826370866539775, rtol=1e-13)
jn3010 = special.jn_zeros(3010,5)
assert_tol_equal(jn3010, array([3036.86590780927,
3057.06598526482,
3073.66360690272,
3088.37736494778,
3101.86438139042]), rtol=1e-8)
def test_jnjnp_zeros(self):
jn = special.jn
def jnp(n, x):
return (jn(n-1,x) - jn(n+1,x))/2
for nt in range(1, 30):
z, n, m, t = special.jnjnp_zeros(nt)
for zz, nn, tt in zip(z, n, t):
if tt == 0:
assert_allclose(jn(nn, zz), 0, atol=1e-6)
elif tt == 1:
assert_allclose(jnp(nn, zz), 0, atol=1e-6)
else:
raise AssertionError("Invalid t return for nt=%d" % nt)
def test_jnp_zeros(self):
jnp = special.jnp_zeros(1,5)
assert_array_almost_equal(jnp, array([1.84118,
5.33144,
8.53632,
11.70600,
14.86359]),4)
jnp = special.jnp_zeros(443,5)
assert_tol_equal(special.jvp(443, jnp), 0, atol=1e-15)
def test_jnyn_zeros(self):
jnz = special.jnyn_zeros(1,5)
assert_array_almost_equal(jnz,(array([3.83171,
7.01559,
10.17347,
13.32369,
16.47063]),
array([1.84118,
5.33144,
8.53632,
11.70600,
14.86359]),
array([2.19714,
5.42968,
8.59601,
11.74915,
14.89744]),
array([3.68302,
6.94150,
10.12340,
13.28576,
16.44006])),5)
def test_jvp(self):
jvprim = special.jvp(2,2)
jv0 = (special.jv(1,2)-special.jv(3,2))/2
assert_almost_equal(jvprim,jv0,10)
def test_k0(self):
ozk = special.k0(.1)
ozkr = special.kv(0,.1)
assert_almost_equal(ozk,ozkr,8)
def test_k0e(self):
ozke = special.k0e(.1)
ozker = special.kve(0,.1)
assert_almost_equal(ozke,ozker,8)
def test_k1(self):
o1k = special.k1(.1)
o1kr = special.kv(1,.1)
assert_almost_equal(o1k,o1kr,8)
def test_k1e(self):
o1ke = special.k1e(.1)
o1ker = special.kve(1,.1)
assert_almost_equal(o1ke,o1ker,8)
def test_jacobi(self):
a = 5*rand() - 1
b = 5*rand() - 1
P0 = special.jacobi(0,a,b)
P1 = special.jacobi(1,a,b)
P2 = special.jacobi(2,a,b)
P3 = special.jacobi(3,a,b)
assert_array_almost_equal(P0.c,[1],13)
assert_array_almost_equal(P1.c,array([a+b+2,a-b])/2.0,13)
cp = [(a+b+3)*(a+b+4), 4*(a+b+3)*(a+2), 4*(a+1)*(a+2)]
p2c = [cp[0],cp[1]-2*cp[0],cp[2]-cp[1]+cp[0]]
assert_array_almost_equal(P2.c,array(p2c)/8.0,13)
cp = [(a+b+4)*(a+b+5)*(a+b+6),6*(a+b+4)*(a+b+5)*(a+3),
12*(a+b+4)*(a+2)*(a+3),8*(a+1)*(a+2)*(a+3)]
p3c = [cp[0],cp[1]-3*cp[0],cp[2]-2*cp[1]+3*cp[0],cp[3]-cp[2]+cp[1]-cp[0]]
assert_array_almost_equal(P3.c,array(p3c)/48.0,13)
def test_kn(self):
kn1 = special.kn(0,.2)
assert_almost_equal(kn1,1.7527038555281462,8)
def test_negv_kv(self):
assert_equal(special.kv(3.0, 2.2), special.kv(-3.0, 2.2))
def test_kv0(self):
kv0 = special.kv(0,.2)
assert_almost_equal(kv0, 1.7527038555281462, 10)
def test_kv1(self):
kv1 = special.kv(1,0.2)
assert_almost_equal(kv1, 4.775972543220472, 10)
def test_kv2(self):
kv2 = special.kv(2,0.2)
assert_almost_equal(kv2, 49.51242928773287, 10)
def test_kn_largeorder(self):
assert_allclose(special.kn(32, 1), 1.7516596664574289e+43)
def test_kv_largearg(self):
assert_equal(special.kv(0, 1e19), 0)
def test_negv_kve(self):
assert_equal(special.kve(3.0, 2.2), special.kve(-3.0, 2.2))
def test_kve(self):
kve1 = special.kve(0,.2)
kv1 = special.kv(0,.2)*exp(.2)
assert_almost_equal(kve1,kv1,8)
z = .2+1j
kve2 = special.kve(0,z)
kv2 = special.kv(0,z)*exp(z)
assert_almost_equal(kve2,kv2,8)
def test_kvp_v0n1(self):
z = 2.2
assert_almost_equal(-special.kv(1,z), special.kvp(0,z, n=1), 10)
def test_kvp_n1(self):
v = 3.
z = 2.2
xc = -special.kv(v+1,z) + v/z*special.kv(v,z)
x = special.kvp(v,z, n=1)
assert_almost_equal(xc, x, 10) # this function (kvp) is broken
def test_kvp_n2(self):
v = 3.
z = 2.2
xc = (z**2+v**2-v)/z**2 * special.kv(v,z) + special.kv(v+1,z)/z
x = special.kvp(v, z, n=2)
assert_almost_equal(xc, x, 10)
def test_y0(self):
oz = special.y0(.1)
ozr = special.yn(0,.1)
assert_almost_equal(oz,ozr,8)
def test_y1(self):
o1 = special.y1(.1)
o1r = special.yn(1,.1)
assert_almost_equal(o1,o1r,8)
def test_y0_zeros(self):
yo,ypo = special.y0_zeros(2)
zo,zpo = special.y0_zeros(2,complex=1)
all = r_[yo,zo]
allval = r_[ypo,zpo]
assert_array_almost_equal(abs(special.yv(0.0,all)),0.0,11)
assert_array_almost_equal(abs(special.yv(1,all)-allval),0.0,11)
def test_y1_zeros(self):
y1 = special.y1_zeros(1)
assert_array_almost_equal(y1,(array([2.19714]),array([0.52079])),5)
def test_y1p_zeros(self):
y1p = special.y1p_zeros(1,complex=1)
assert_array_almost_equal(y1p,(array([0.5768+0.904j]), array([-0.7635+0.5892j])),3)
def test_yn_zeros(self):
an = special.yn_zeros(4,2)
assert_array_almost_equal(an,array([5.64515, 9.36162]),5)
an = special.yn_zeros(443,5)
assert_tol_equal(an, [450.13573091578090314, 463.05692376675001542,
472.80651546418663566, 481.27353184725625838,
488.98055964441374646], rtol=1e-15)
def test_ynp_zeros(self):
ao = special.ynp_zeros(0,2)
assert_array_almost_equal(ao,array([2.19714133, 5.42968104]),6)
ao = special.ynp_zeros(43,5)
assert_tol_equal(special.yvp(43, ao), 0, atol=1e-15)
ao = special.ynp_zeros(443,5)
assert_tol_equal(special.yvp(443, ao), 0, atol=1e-9)
def test_ynp_zeros_large_order(self):
ao = special.ynp_zeros(443,5)
assert_tol_equal(special.yvp(443, ao), 0, atol=1e-14)
def test_yn(self):
yn2n = special.yn(1,.2)
assert_almost_equal(yn2n,-3.3238249881118471,8)
def test_negv_yv(self):
assert_almost_equal(special.yv(-3,2), -special.yv(3,2), 14)
def test_yv(self):
yv2 = special.yv(1,.2)
assert_almost_equal(yv2,-3.3238249881118471,8)
def test_negv_yve(self):
assert_almost_equal(special.yve(-3,2), -special.yve(3,2), 14)
def test_yve(self):
yve2 = special.yve(1,.2)
assert_almost_equal(yve2,-3.3238249881118471,8)
yve2r = special.yv(1,.2+1j)*exp(-1)
yve22 = special.yve(1,.2+1j)
assert_almost_equal(yve22,yve2r,8)
def test_yvp(self):
yvpr = (special.yv(1,.2) - special.yv(3,.2))/2.0
yvp1 = special.yvp(2,.2)
assert_array_almost_equal(yvp1,yvpr,10)
def _cephes_vs_amos_points(self):
"""Yield points at which to compare Cephes implementation to AMOS"""
# check several points, including large-amplitude ones
for v in [-120, -100.3, -20., -10., -1., -.5,
0., 1., 12.49, 120., 301]:
for z in [-1300, -11, -10, -1, 1., 10., 200.5, 401., 600.5,
700.6, 1300, 10003]:
yield v, z
# check half-integers; these are problematic points at least
# for cephes/iv
for v in 0.5 + arange(-60, 60):
yield v, 3.5
def check_cephes_vs_amos(self, f1, f2, rtol=1e-11, atol=0, skip=None):
for v, z in self._cephes_vs_amos_points():
if skip is not None and skip(v, z):
continue
c1, c2, c3 = f1(v, z), f1(v,z+0j), f2(int(v), z)
if np.isinf(c1):
assert_(np.abs(c2) >= 1e300, (v, z))
elif np.isnan(c1):
assert_(c2.imag != 0, (v, z))
else:
assert_tol_equal(c1, c2, err_msg=(v, z), rtol=rtol, atol=atol)
if v == int(v):
assert_tol_equal(c3, c2, err_msg=(v, z),
rtol=rtol, atol=atol)
def test_jv_cephes_vs_amos(self):
self.check_cephes_vs_amos(special.jv, special.jn, rtol=1e-10, atol=1e-305)
def test_yv_cephes_vs_amos(self):
self.check_cephes_vs_amos(special.yv, special.yn, rtol=1e-11, atol=1e-305)
def test_yv_cephes_vs_amos_only_small_orders(self):
skipper = lambda v, z: (abs(v) > 50)
self.check_cephes_vs_amos(special.yv, special.yn, rtol=1e-11, atol=1e-305, skip=skipper)
def test_iv_cephes_vs_amos(self):
olderr = np.seterr(all='ignore')
try:
self.check_cephes_vs_amos(special.iv, special.iv, rtol=5e-9, atol=1e-305)
finally:
np.seterr(**olderr)
@dec.slow
def test_iv_cephes_vs_amos_mass_test(self):
N = 1000000
np.random.seed(1)
v = np.random.pareto(0.5, N) * (-1)**np.random.randint(2, size=N)
x = np.random.pareto(0.2, N) * (-1)**np.random.randint(2, size=N)
imsk = (np.random.randint(8, size=N) == 0)
v[imsk] = v[imsk].astype(int)
old_err = np.seterr(all='ignore')
try:
c1 = special.iv(v, x)
c2 = special.iv(v, x+0j)
# deal with differences in the inf and zero cutoffs
c1[abs(c1) > 1e300] = np.inf
c2[abs(c2) > 1e300] = np.inf
c1[abs(c1) < 1e-300] = 0
c2[abs(c2) < 1e-300] = 0
dc = abs(c1/c2 - 1)
dc[np.isnan(dc)] = 0
finally:
np.seterr(**old_err)
k = np.argmax(dc)
# Most error apparently comes from AMOS and not our implementation;
# there are some problems near integer orders there
assert_(dc[k] < 2e-7, (v[k], x[k], special.iv(v[k], x[k]), special.iv(v[k], x[k]+0j)))
def test_kv_cephes_vs_amos(self):
self.check_cephes_vs_amos(special.kv, special.kn, rtol=1e-9, atol=1e-305)
self.check_cephes_vs_amos(special.kv, special.kv, rtol=1e-9, atol=1e-305)
def test_ticket_623(self):
assert_tol_equal(special.jv(3, 4), 0.43017147387562193)
assert_tol_equal(special.jv(301, 1300), 0.0183487151115275)
assert_tol_equal(special.jv(301, 1296.0682), -0.0224174325312048)
def test_ticket_853(self):
"""Negative-order Bessels"""
# cephes
assert_tol_equal(special.jv(-1, 1), -0.4400505857449335)
assert_tol_equal(special.jv(-2, 1), 0.1149034849319005)
assert_tol_equal(special.yv(-1, 1), 0.7812128213002887)
assert_tol_equal(special.yv(-2, 1), -1.650682606816255)
assert_tol_equal(special.iv(-1, 1), 0.5651591039924851)
assert_tol_equal(special.iv(-2, 1), 0.1357476697670383)
assert_tol_equal(special.kv(-1, 1), 0.6019072301972347)
assert_tol_equal(special.kv(-2, 1), 1.624838898635178)
assert_tol_equal(special.jv(-0.5, 1), 0.43109886801837607952)
assert_tol_equal(special.yv(-0.5, 1), 0.6713967071418031)
assert_tol_equal(special.iv(-0.5, 1), 1.231200214592967)
assert_tol_equal(special.kv(-0.5, 1), 0.4610685044478945)
# amos
assert_tol_equal(special.jv(-1, 1+0j), -0.4400505857449335)
assert_tol_equal(special.jv(-2, 1+0j), 0.1149034849319005)
assert_tol_equal(special.yv(-1, 1+0j), 0.7812128213002887)
assert_tol_equal(special.yv(-2, 1+0j), -1.650682606816255)
assert_tol_equal(special.iv(-1, 1+0j), 0.5651591039924851)
assert_tol_equal(special.iv(-2, 1+0j), 0.1357476697670383)
assert_tol_equal(special.kv(-1, 1+0j), 0.6019072301972347)
assert_tol_equal(special.kv(-2, 1+0j), 1.624838898635178)
assert_tol_equal(special.jv(-0.5, 1+0j), 0.43109886801837607952)
assert_tol_equal(special.jv(-0.5, 1+1j), 0.2628946385649065-0.827050182040562j)
assert_tol_equal(special.yv(-0.5, 1+0j), 0.6713967071418031)
assert_tol_equal(special.yv(-0.5, 1+1j), 0.967901282890131+0.0602046062142816j)
assert_tol_equal(special.iv(-0.5, 1+0j), 1.231200214592967)
assert_tol_equal(special.iv(-0.5, 1+1j), 0.77070737376928+0.39891821043561j)
assert_tol_equal(special.kv(-0.5, 1+0j), 0.4610685044478945)
assert_tol_equal(special.kv(-0.5, 1+1j), 0.06868578341999-0.38157825981268j)
assert_tol_equal(special.jve(-0.5,1+0.3j), special.jv(-0.5, 1+0.3j)*exp(-0.3))
assert_tol_equal(special.yve(-0.5,1+0.3j), special.yv(-0.5, 1+0.3j)*exp(-0.3))
assert_tol_equal(special.ive(-0.5,0.3+1j), special.iv(-0.5, 0.3+1j)*exp(-0.3))
assert_tol_equal(special.kve(-0.5,0.3+1j), special.kv(-0.5, 0.3+1j)*exp(0.3+1j))
assert_tol_equal(special.hankel1(-0.5, 1+1j), special.jv(-0.5, 1+1j) + 1j*special.yv(-0.5,1+1j))
assert_tol_equal(special.hankel2(-0.5, 1+1j), special.jv(-0.5, 1+1j) - 1j*special.yv(-0.5,1+1j))
def test_ticket_854(self):
"""Real-valued Bessel domains"""
assert_(isnan(special.jv(0.5, -1)))
assert_(isnan(special.iv(0.5, -1)))
assert_(isnan(special.yv(0.5, -1)))
assert_(isnan(special.yv(1, -1)))
assert_(isnan(special.kv(0.5, -1)))
assert_(isnan(special.kv(1, -1)))
assert_(isnan(special.jve(0.5, -1)))
assert_(isnan(special.ive(0.5, -1)))
assert_(isnan(special.yve(0.5, -1)))
assert_(isnan(special.yve(1, -1)))
assert_(isnan(special.kve(0.5, -1)))
assert_(isnan(special.kve(1, -1)))
assert_(isnan(special.airye(-1)[0:2]).all(), special.airye(-1))
assert_(not isnan(special.airye(-1)[2:4]).any(), special.airye(-1))
def test_ticket_503(self):
"""Real-valued Bessel I overflow"""
assert_tol_equal(special.iv(1, 700), 1.528500390233901e302)
assert_tol_equal(special.iv(1000, 1120), 1.301564549405821e301)
def test_iv_hyperg_poles(self):
assert_tol_equal(special.iv(-0.5, 1), 1.231200214592967)
def iv_series(self, v, z, n=200):
k = arange(0, n).astype(float_)
r = (v+2*k)*log(.5*z) - special.gammaln(k+1) - special.gammaln(v+k+1)
r[isnan(r)] = inf
r = exp(r)
err = abs(r).max() * finfo(float_).eps * n + abs(r[-1])*10
return r.sum(), err
def test_i0_series(self):
for z in [1., 10., 200.5]:
value, err = self.iv_series(0, z)
assert_tol_equal(special.i0(z), value, atol=err, err_msg=z)
def test_i1_series(self):
for z in [1., 10., 200.5]:
value, err = self.iv_series(1, z)
assert_tol_equal(special.i1(z), value, atol=err, err_msg=z)
def test_iv_series(self):
for v in [-20., -10., -1., 0., 1., 12.49, 120.]:
for z in [1., 10., 200.5, -1+2j]:
value, err = self.iv_series(v, z)
assert_tol_equal(special.iv(v, z), value, atol=err, err_msg=(v, z))
def test_i0(self):
values = [[0.0, 1.0],
[1e-10, 1.0],
[0.1, 0.9071009258],
[0.5, 0.6450352706],
[1.0, 0.4657596077],
[2.5, 0.2700464416],
[5.0, 0.1835408126],
[20.0, 0.0897803119],
]
for i, (x, v) in enumerate(values):
cv = special.i0(x) * exp(-x)
assert_almost_equal(cv, v, 8, err_msg='test #%d' % i)
def test_i0e(self):
oize = special.i0e(.1)
oizer = special.ive(0,.1)
assert_almost_equal(oize,oizer,8)
def test_i1(self):
values = [[0.0, 0.0],
[1e-10, 0.4999999999500000e-10],
[0.1, 0.0452984468],
[0.5, 0.1564208032],
[1.0, 0.2079104154],
[5.0, 0.1639722669],
[20.0, 0.0875062222],
]
for i, (x, v) in enumerate(values):
cv = special.i1(x) * exp(-x)
assert_almost_equal(cv, v, 8, err_msg='test #%d' % i)
def test_i1e(self):
oi1e = special.i1e(.1)
oi1er = special.ive(1,.1)
assert_almost_equal(oi1e,oi1er,8)
def test_iti0k0(self):
iti0 = array(special.iti0k0(5))
assert_array_almost_equal(iti0,array([31.848667776169801, 1.5673873907283657]),5)
def test_it2i0k0(self):
it2k = special.it2i0k0(.1)
assert_array_almost_equal(it2k,array([0.0012503906973464409, 3.3309450354686687]),6)
def test_iv(self):
iv1 = special.iv(0,.1)*exp(-.1)
assert_almost_equal(iv1,0.90710092578230106,10)
def test_negv_ive(self):
assert_equal(special.ive(3,2), special.ive(-3,2))
def test_ive(self):
ive1 = special.ive(0,.1)
iv1 = special.iv(0,.1)*exp(-.1)
assert_almost_equal(ive1,iv1,10)
def test_ivp0(self):
assert_almost_equal(special.iv(1,2), special.ivp(0,2), 10)
def test_ivp(self):
y = (special.iv(0,2) + special.iv(2,2))/2
x = special.ivp(1,2)
assert_almost_equal(x,y,10)
class TestLaguerre(TestCase):
def test_laguerre(self):
lag0 = special.laguerre(0)
lag1 = special.laguerre(1)
lag2 = special.laguerre(2)
lag3 = special.laguerre(3)
lag4 = special.laguerre(4)
lag5 = special.laguerre(5)
assert_array_almost_equal(lag0.c,[1],13)
assert_array_almost_equal(lag1.c,[-1,1],13)
assert_array_almost_equal(lag2.c,array([1,-4,2])/2.0,13)
assert_array_almost_equal(lag3.c,array([-1,9,-18,6])/6.0,13)
assert_array_almost_equal(lag4.c,array([1,-16,72,-96,24])/24.0,13)
assert_array_almost_equal(lag5.c,array([-1,25,-200,600,-600,120])/120.0,13)
def test_genlaguerre(self):
k = 5*rand()-0.9
lag0 = special.genlaguerre(0,k)
lag1 = special.genlaguerre(1,k)
lag2 = special.genlaguerre(2,k)
lag3 = special.genlaguerre(3,k)
assert_equal(lag0.c,[1])
assert_equal(lag1.c,[-1,k+1])
assert_almost_equal(lag2.c,array([1,-2*(k+2),(k+1.)*(k+2.)])/2.0)
assert_almost_equal(lag3.c,array([-1,3*(k+3),-3*(k+2)*(k+3),(k+1)*(k+2)*(k+3)])/6.0)
# Base polynomials come from Abrahmowitz and Stegan
class TestLegendre(TestCase):
def test_legendre(self):
leg0 = special.legendre(0)
leg1 = special.legendre(1)
leg2 = special.legendre(2)
leg3 = special.legendre(3)
leg4 = special.legendre(4)
leg5 = special.legendre(5)
assert_equal(leg0.c, [1])
assert_equal(leg1.c, [1,0])
assert_almost_equal(leg2.c, array([3,0,-1])/2.0, decimal=13)
assert_almost_equal(leg3.c, array([5,0,-3,0])/2.0)
assert_almost_equal(leg4.c, array([35,0,-30,0,3])/8.0)
assert_almost_equal(leg5.c, array([63,0,-70,0,15,0])/8.0)
class TestLambda(TestCase):
def test_lmbda(self):
lam = special.lmbda(1,.1)
lamr = (array([special.jn(0,.1), 2*special.jn(1,.1)/.1]),
array([special.jvp(0,.1), -2*special.jv(1,.1)/.01 + 2*special.jvp(1,.1)/.1]))
assert_array_almost_equal(lam,lamr,8)
class TestLog1p(TestCase):
def test_log1p(self):
l1p = (special.log1p(10), special.log1p(11), special.log1p(12))
l1prl = (log(11), log(12), log(13))
assert_array_almost_equal(l1p,l1prl,8)
def test_log1pmore(self):
l1pm = (special.log1p(1), special.log1p(1.1), special.log1p(1.2))
l1pmrl = (log(2),log(2.1),log(2.2))
assert_array_almost_equal(l1pm,l1pmrl,8)
class TestLegendreFunctions(TestCase):
def test_clpmn(self):
z = 0.5+0.3j
clp = special.clpmn(2, 2, z, 3)
assert_array_almost_equal(clp,
(array([[1.0000, z, 0.5*(3*z*z-1)],
[0.0000, sqrt(z*z-1), 3*z*sqrt(z*z-1)],
[0.0000, 0.0000, 3*(z*z-1)]]),
array([[0.0000, 1.0000, 3*z],
[0.0000, z/sqrt(z*z-1), 3*(2*z*z-1)/sqrt(z*z-1)],
[0.0000, 0.0000, 6*z]])),
7)
def test_clpmn_close_to_real_2(self):
eps = 1e-10
m = 1
n = 3
x = 0.5
clp_plus = special.clpmn(m, n, x+1j*eps, 2)[0][m, n]
clp_minus = special.clpmn(m, n, x-1j*eps, 2)[0][m, n]
assert_array_almost_equal(array([clp_plus, clp_minus]),
array([special.lpmv(m, n, x),
special.lpmv(m, n, x)]),
7)
def test_clpmn_close_to_real_3(self):
eps = 1e-10
m = 1
n = 3
x = 0.5
clp_plus = special.clpmn(m, n, x+1j*eps, 3)[0][m, n]
clp_minus = special.clpmn(m, n, x-1j*eps, 3)[0][m, n]
assert_array_almost_equal(array([clp_plus, clp_minus]),
array([special.lpmv(m, n, x)*np.exp(-0.5j*m*np.pi),
special.lpmv(m, n, x)*np.exp(0.5j*m*np.pi)]),
7)
def test_clpmn_across_unit_circle(self):
eps = 1e-7
m = 1
n = 1
x = 1j
for type in [2, 3]:
assert_almost_equal(special.clpmn(m, n, x+1j*eps, type)[0][m, n],
special.clpmn(m, n, x-1j*eps, type)[0][m, n], 6)
def test_inf(self):
for z in (1, -1):
for n in range(4):
for m in range(1, n):
lp = special.clpmn(m, n, z)
assert_(np.isinf(lp[1][1,1:]).all())
lp = special.lpmn(m, n, z)
assert_(np.isinf(lp[1][1,1:]).all())
def test_deriv_clpmn(self):
# data inside and outside of the unit circle
zvals = [0.5+0.5j, -0.5+0.5j, -0.5-0.5j, 0.5-0.5j,
1+1j, -1+1j, -1-1j, 1-1j]
m = 2
n = 3
for type in [2, 3]:
for z in zvals:
for h in [1e-3, 1e-3j]:
approx_derivative = (special.clpmn(m, n, z+0.5*h, type)[0]
- special.clpmn(m, n, z-0.5*h, type)[0])/h
assert_allclose(special.clpmn(m, n, z, type)[1],
approx_derivative,
rtol=1e-4)
def test_lpmn(self):
lp = special.lpmn(0,2,.5)
assert_array_almost_equal(lp,(array([[1.00000,
0.50000,
-0.12500]]),
array([[0.00000,
1.00000,
1.50000]])),4)
def test_lpn(self):
lpnf = special.lpn(2,.5)
assert_array_almost_equal(lpnf,(array([1.00000,
0.50000,
-0.12500]),
array([0.00000,
1.00000,
1.50000])),4)
def test_lpmv(self):
lp = special.lpmv(0,2,.5)
assert_almost_equal(lp,-0.125,7)
lp = special.lpmv(0,40,.001)
assert_almost_equal(lp,0.1252678976534484,7)
# XXX: this is outside the domain of the current implementation,
# so ensure it returns a NaN rather than a wrong answer.
olderr = np.seterr(all='ignore')
try:
lp = special.lpmv(-1,-1,.001)
finally:
np.seterr(**olderr)
assert_(lp != 0 or np.isnan(lp))
def test_lqmn(self):
lqmnf = special.lqmn(0,2,.5)
lqf = special.lqn(2,.5)
assert_array_almost_equal(lqmnf[0][0],lqf[0],4)
assert_array_almost_equal(lqmnf[1][0],lqf[1],4)
def test_lqmn_gt1(self):
"""algorithm for real arguments changes at 1.0001
test against analytical result for m=2, n=1
"""
x0 = 1.0001
delta = 0.00002
for x in (x0-delta, x0+delta):
lq = special.lqmn(2, 1, x)[0][-1, -1]
expected = 2/(x*x-1)
assert_almost_equal(lq, expected)
def test_lqmn_shape(self):
a, b = special.lqmn(4, 4, 1.1)
assert_equal(a.shape, (5, 5))
assert_equal(b.shape, (5, 5))
a, b = special.lqmn(4, 0, 1.1)
assert_equal(a.shape, (5, 1))
assert_equal(b.shape, (5, 1))
def test_lqn(self):
lqf = special.lqn(2,.5)
assert_array_almost_equal(lqf,(array([0.5493, -0.7253, -0.8187]),
array([1.3333, 1.216, -0.8427])),4)
class TestMathieu(TestCase):
def test_mathieu_a(self):
pass
def test_mathieu_even_coef(self):
mc = special.mathieu_even_coef(2,5)
# Q not defined broken and cannot figure out proper reporting order
def test_mathieu_odd_coef(self):
# same problem as above
pass
class TestFresnelIntegral(TestCase):
def test_modfresnelp(self):
pass
def test_modfresnelm(self):
pass
class TestOblCvSeq(TestCase):
def test_obl_cv_seq(self):
obl = special.obl_cv_seq(0,3,1)
assert_array_almost_equal(obl,array([-0.348602,
1.393206,
5.486800,
11.492120]),5)
class TestParabolicCylinder(TestCase):
def test_pbdn_seq(self):
pb = special.pbdn_seq(1,.1)
assert_array_almost_equal(pb,(array([0.9975,
0.0998]),
array([-0.0499,
0.9925])),4)
def test_pbdv(self):
pbv = special.pbdv(1,.2)
derrl = 1/2*(.2)*special.pbdv(1,.2)[0] - special.pbdv(0,.2)[0]
def test_pbdv_seq(self):
pbn = special.pbdn_seq(1,.1)
pbv = special.pbdv_seq(1,.1)
assert_array_almost_equal(pbv,(real(pbn[0]),real(pbn[1])),4)
def test_pbdv_points(self):
# simple case
eta = np.linspace(-10, 10, 5)
z = 2**(eta/2)*np.sqrt(np.pi)/special.gamma(.5-.5*eta)
assert_tol_equal(special.pbdv(eta, 0.)[0], z, rtol=1e-14, atol=1e-14)
# some points
assert_tol_equal(special.pbdv(10.34, 20.44)[0], 1.3731383034455e-32, rtol=1e-12)
assert_tol_equal(special.pbdv(-9.53, 3.44)[0], 3.166735001119246e-8, rtol=1e-12)
def test_pbdv_gradient(self):
x = np.linspace(-4, 4, 8)[:,None]
eta = np.linspace(-10, 10, 5)[None,:]
p = special.pbdv(eta, x)
eps = 1e-7 + 1e-7*abs(x)
dp = (special.pbdv(eta, x + eps)[0] - special.pbdv(eta, x - eps)[0]) / eps / 2.
assert_tol_equal(p[1], dp, rtol=1e-6, atol=1e-6)
def test_pbvv_gradient(self):
x = np.linspace(-4, 4, 8)[:,None]
eta = np.linspace(-10, 10, 5)[None,:]
p = special.pbvv(eta, x)
eps = 1e-7 + 1e-7*abs(x)
dp = (special.pbvv(eta, x + eps)[0] - special.pbvv(eta, x - eps)[0]) / eps / 2.
assert_tol_equal(p[1], dp, rtol=1e-6, atol=1e-6)
class TestPolygamma(TestCase):
# from Table 6.2 (pg. 271) of A&S
def test_polygamma(self):
poly2 = special.polygamma(2,1)
poly3 = special.polygamma(3,1)
assert_almost_equal(poly2,-2.4041138063,10)
assert_almost_equal(poly3,6.4939394023,10)
# Test polygamma(0, x) == psi(x)
x = [2, 3, 1.1e14]
assert_almost_equal(special.polygamma(0, x), special.psi(x))
# Test broadcasting
n = [0, 1, 2]
x = [0.5, 1.5, 2.5]
expected = [-1.9635100260214238, 0.93480220054467933,
-0.23620405164172739]
assert_almost_equal(special.polygamma(n, x), expected)
expected = np.row_stack([expected]*2)
assert_almost_equal(special.polygamma(n, np.row_stack([x]*2)),
expected)
assert_almost_equal(special.polygamma(np.row_stack([n]*2), x),
expected)
class TestProCvSeq(TestCase):
def test_pro_cv_seq(self):
prol = special.pro_cv_seq(0,3,1)
assert_array_almost_equal(prol,array([0.319000,
2.593084,
6.533471,
12.514462]),5)
class TestPsi(TestCase):
def test_psi(self):
ps = special.psi(1)
assert_almost_equal(ps,-0.57721566490153287,8)
class TestRadian(TestCase):
def test_radian(self):
rad = special.radian(90,0,0)
assert_almost_equal(rad,pi/2.0,5)
def test_radianmore(self):
rad1 = special.radian(90,1,60)
assert_almost_equal(rad1,pi/2+0.0005816135199345904,5)
class TestRiccati(TestCase):
def test_riccati_jn(self):
jnrl = (special.sph_jn(1,.2)[0]*.2,special.sph_jn(1,.2)[0]+special.sph_jn(1,.2)[1]*.2)
ricjn = special.riccati_jn(1,.2)
assert_array_almost_equal(ricjn,jnrl,8)
def test_riccati_yn(self):
ynrl = (special.sph_yn(1,.2)[0]*.2,special.sph_yn(1,.2)[0]+special.sph_yn(1,.2)[1]*.2)
ricyn = special.riccati_yn(1,.2)
assert_array_almost_equal(ricyn,ynrl,8)
class TestRound(TestCase):
def test_round(self):
rnd = list(map(int,(special.round(10.1),special.round(10.4),special.round(10.5),special.round(10.6))))
# Note: According to the documentation, scipy.special.round is
# supposed to round to the nearest even number if the fractional
# part is exactly 0.5. On some platforms, this does not appear
# to work and thus this test may fail. However, this unit test is
# correctly written.
rndrl = (10,10,10,11)
assert_array_equal(rnd,rndrl)
def test_sph_harm():
# Tests derived from tables in
# http://en.wikipedia.org/wiki/Table_of_spherical_harmonics
sh = special.sph_harm
pi = np.pi
exp = np.exp
sqrt = np.sqrt
sin = np.sin
cos = np.cos
yield (assert_array_almost_equal, sh(0,0,0,0),
0.5/sqrt(pi))
yield (assert_array_almost_equal, sh(-2,2,0.,pi/4),
0.25*sqrt(15./(2.*pi)) *
(sin(pi/4))**2.)
yield (assert_array_almost_equal, sh(-2,2,0.,pi/2),
0.25*sqrt(15./(2.*pi)))
yield (assert_array_almost_equal, sh(2,2,pi,pi/2),
0.25*sqrt(15/(2.*pi)) *
exp(0+2.*pi*1j)*sin(pi/2.)**2.)
yield (assert_array_almost_equal, sh(2,4,pi/4.,pi/3.),
(3./8.)*sqrt(5./(2.*pi)) *
exp(0+2.*pi/4.*1j) *
sin(pi/3.)**2. *
(7.*cos(pi/3.)**2.-1))
yield (assert_array_almost_equal, sh(4,4,pi/8.,pi/6.),
(3./16.)*sqrt(35./(2.*pi)) *
exp(0+4.*pi/8.*1j)*sin(pi/6.)**4.)
def test_sph_harm_ufunc_loop_selection():
# see https://github.com/scipy/scipy/issues/4895
dt = np.dtype(np.complex128)
assert_equal(special.sph_harm(0, 0, 0, 0).dtype, dt)
assert_equal(special.sph_harm([0], 0, 0, 0).dtype, dt)
assert_equal(special.sph_harm(0, [0], 0, 0).dtype, dt)
assert_equal(special.sph_harm(0, 0, [0], 0).dtype, dt)
assert_equal(special.sph_harm(0, 0, 0, [0]).dtype, dt)
assert_equal(special.sph_harm([0], [0], [0], [0]).dtype, dt)
class TestSpherical(TestCase):
def test_sph_harm(self):
# see test_sph_harm function
pass
def test_sph_in(self):
i1n = special.sph_in(1,.2)
inp0 = (i1n[0][1])
inp1 = (i1n[0][0] - 2.0/0.2 * i1n[0][1])
assert_array_almost_equal(i1n[0],array([1.0066800127054699381,
0.066933714568029540839]),12)
assert_array_almost_equal(i1n[1],[inp0,inp1],12)
def test_sph_inkn(self):
spikn = r_[special.sph_in(1,.2) + special.sph_kn(1,.2)]
inkn = r_[special.sph_inkn(1,.2)]
assert_array_almost_equal(inkn,spikn,10)
def test_sph_in_kn_order0(self):
x = 1.
sph_i0 = special.sph_in(0, x)
sph_i0_expected = np.array([np.sinh(x)/x,
np.cosh(x)/x-np.sinh(x)/x**2])
assert_array_almost_equal(r_[sph_i0], sph_i0_expected)
sph_k0 = special.sph_kn(0, x)
sph_k0_expected = np.array([0.5*pi*exp(-x)/x,
-0.5*pi*exp(-x)*(1/x+1/x**2)])
assert_array_almost_equal(r_[sph_k0], sph_k0_expected)
sph_i0k0 = special.sph_inkn(0, x)
assert_array_almost_equal(r_[sph_i0+sph_k0],
r_[sph_i0k0],
10)
def test_sph_jn(self):
s1 = special.sph_jn(2,.2)
s10 = -s1[0][1]
s11 = s1[0][0]-2.0/0.2*s1[0][1]
s12 = s1[0][1]-3.0/0.2*s1[0][2]
assert_array_almost_equal(s1[0],[0.99334665397530607731,
0.066400380670322230863,
0.0026590560795273856680],12)
assert_array_almost_equal(s1[1],[s10,s11,s12],12)
def test_sph_jnyn(self):
jnyn = r_[special.sph_jn(1,.2) + special.sph_yn(1,.2)] # tuple addition
jnyn1 = r_[special.sph_jnyn(1,.2)]
assert_array_almost_equal(jnyn1,jnyn,9)
def test_sph_kn(self):
kn = special.sph_kn(2,.2)
kn0 = -kn[0][1]
kn1 = -kn[0][0]-2.0/0.2*kn[0][1]
kn2 = -kn[0][1]-3.0/0.2*kn[0][2]
assert_array_almost_equal(kn[0],[6.4302962978445670140,
38.581777787067402086,
585.15696310385559829],12)
assert_array_almost_equal(kn[1],[kn0,kn1,kn2],9)
def test_sph_yn(self):
sy1 = special.sph_yn(2,.2)[0][2]
sy2 = special.sph_yn(0,.2)[0][0]
sphpy = (special.sph_yn(1,.2)[0][0]-2*special.sph_yn(2,.2)[0][2])/3 # correct derivative value
assert_almost_equal(sy1,-377.52483,5) # previous values in the system
assert_almost_equal(sy2,-4.9003329,5)
sy3 = special.sph_yn(1,.2)[1][1]
assert_almost_equal(sy3,sphpy,4) # compare correct derivative val. (correct =-system val).
class TestStruve(object):
def _series(self, v, z, n=100):
"""Compute Struve function & error estimate from its power series."""
k = arange(0, n)
r = (-1)**k * (.5*z)**(2*k+v+1)/special.gamma(k+1.5)/special.gamma(k+v+1.5)
err = abs(r).max() * finfo(float_).eps * n
return r.sum(), err
def test_vs_series(self):
"""Check Struve function versus its power series"""
for v in [-20, -10, -7.99, -3.4, -1, 0, 1, 3.4, 12.49, 16]:
for z in [1, 10, 19, 21, 30]:
value, err = self._series(v, z)
assert_tol_equal(special.struve(v, z), value, rtol=0, atol=err), (v, z)
def test_some_values(self):
assert_tol_equal(special.struve(-7.99, 21), 0.0467547614113, rtol=1e-7)
assert_tol_equal(special.struve(-8.01, 21), 0.0398716951023, rtol=1e-8)
assert_tol_equal(special.struve(-3.0, 200), 0.0142134427432, rtol=1e-12)
assert_tol_equal(special.struve(-8.0, -41), 0.0192469727846, rtol=1e-11)
assert_equal(special.struve(-12, -41), -special.struve(-12, 41))
assert_equal(special.struve(+12, -41), -special.struve(+12, 41))
assert_equal(special.struve(-11, -41), +special.struve(-11, 41))
assert_equal(special.struve(+11, -41), +special.struve(+11, 41))
assert_(isnan(special.struve(-7.1, -1)))
assert_(isnan(special.struve(-10.1, -1)))
def test_regression_679(self):
"""Regression test for #679"""
assert_tol_equal(special.struve(-1.0, 20 - 1e-8), special.struve(-1.0, 20 + 1e-8))
assert_tol_equal(special.struve(-2.0, 20 - 1e-8), special.struve(-2.0, 20 + 1e-8))
assert_tol_equal(special.struve(-4.3, 20 - 1e-8), special.struve(-4.3, 20 + 1e-8))
def test_chi2_smalldf():
assert_almost_equal(special.chdtr(0.6,3), 0.957890536704110)
def test_chi2c_smalldf():
assert_almost_equal(special.chdtrc(0.6,3), 1-0.957890536704110)
def test_chi2_inv_smalldf():
assert_almost_equal(special.chdtri(0.6,1-0.957890536704110), 3)
def test_agm_simple():
assert_allclose(special.agm(24, 6), 13.4581714817)
assert_allclose(special.agm(1e30, 1), 2.2292230559453832047768593e28)
def test_legacy():
with warnings.catch_warnings():
warnings.simplefilter("ignore", RuntimeWarning)
# Legacy behavior: truncating arguments to integers
assert_equal(special.bdtrc(1, 2, 0.3), special.bdtrc(1.8, 2.8, 0.3))
assert_equal(special.bdtr(1, 2, 0.3), special.bdtr(1.8, 2.8, 0.3))
assert_equal(special.bdtri(1, 2, 0.3), special.bdtri(1.8, 2.8, 0.3))
assert_equal(special.expn(1, 0.3), special.expn(1.8, 0.3))
assert_equal(special.hyp2f0(1, 2, 0.3, 1), special.hyp2f0(1, 2, 0.3, 1.8))
assert_equal(special.nbdtrc(1, 2, 0.3), special.nbdtrc(1.8, 2.8, 0.3))
assert_equal(special.nbdtr(1, 2, 0.3), special.nbdtr(1.8, 2.8, 0.3))
assert_equal(special.nbdtri(1, 2, 0.3), special.nbdtri(1.8, 2.8, 0.3))
assert_equal(special.pdtrc(1, 0.3), special.pdtrc(1.8, 0.3))
assert_equal(special.pdtr(1, 0.3), special.pdtr(1.8, 0.3))
assert_equal(special.pdtri(1, 0.3), special.pdtri(1.8, 0.3))
assert_equal(special.kn(1, 0.3), special.kn(1.8, 0.3))
assert_equal(special.yn(1, 0.3), special.yn(1.8, 0.3))
assert_equal(special.smirnov(1, 0.3), special.smirnov(1.8, 0.3))
assert_equal(special.smirnovi(1, 0.3), special.smirnovi(1.8, 0.3))
@with_special_errors
def test_error_raising():
assert_raises(special.SpecialFunctionWarning, special.iv, 1, 1e99j)
def test_xlogy():
def xfunc(x, y):
if x == 0 and not np.isnan(y):
return x
else:
return x*np.log(y)
z1 = np.asarray([(0,0), (0, np.nan), (0, np.inf), (1.0, 2.0)], dtype=float)
z2 = np.r_[z1, [(0, 1j), (1, 1j)]]
w1 = np.vectorize(xfunc)(z1[:,0], z1[:,1])
assert_func_equal(special.xlogy, w1, z1, rtol=1e-13, atol=1e-13)
w2 = np.vectorize(xfunc)(z2[:,0], z2[:,1])
assert_func_equal(special.xlogy, w2, z2, rtol=1e-13, atol=1e-13)
def test_xlog1py():
def xfunc(x, y):
if x == 0 and not np.isnan(y):
return x
else:
return x * np.log1p(y)
z1 = np.asarray([(0,0), (0, np.nan), (0, np.inf), (1.0, 2.0),
(1, 1e-30)], dtype=float)
w1 = np.vectorize(xfunc)(z1[:,0], z1[:,1])
assert_func_equal(special.xlog1py, w1, z1, rtol=1e-13, atol=1e-13)
def test_entr():
def xfunc(x):
if x < 0:
return -np.inf
else:
return -special.xlogy(x, x)
values = (0, 0.5, 1.0, np.inf)
signs = [-1, 1]
arr = []
for sgn, v in itertools.product(signs, values):
arr.append(sgn * v)
z = np.array(arr, dtype=float)
w = np.vectorize(xfunc, otypes=[np.float64])(z)
assert_func_equal(special.entr, w, z, rtol=1e-13, atol=1e-13)
def test_kl_div():
def xfunc(x, y):
if x < 0 or y < 0 or (y == 0 and x != 0):
# extension of natural domain to preserve convexity
return np.inf
elif np.isposinf(x) or np.isposinf(y):
# limits within the natural domain
return np.inf
elif x == 0:
return y
else:
return special.xlogy(x, x/y) - x + y
values = (0, 0.5, 1.0)
signs = [-1, 1]
arr = []
for sgna, va, sgnb, vb in itertools.product(signs, values, signs, values):
arr.append((sgna*va, sgnb*vb))
z = np.array(arr, dtype=float)
w = np.vectorize(xfunc, otypes=[np.float64])(z[:,0], z[:,1])
assert_func_equal(special.kl_div, w, z, rtol=1e-13, atol=1e-13)
def test_rel_entr():
def xfunc(x, y):
if x > 0 and y > 0:
return special.xlogy(x, x/y)
elif x == 0 and y >= 0:
return 0
else:
return np.inf
values = (0, 0.5, 1.0)
signs = [-1, 1]
arr = []
for sgna, va, sgnb, vb in itertools.product(signs, values, signs, values):
arr.append((sgna*va, sgnb*vb))
z = np.array(arr, dtype=float)
w = np.vectorize(xfunc, otypes=[np.float64])(z[:,0], z[:,1])
assert_func_equal(special.rel_entr, w, z, rtol=1e-13, atol=1e-13)
def test_huber():
assert_equal(special.huber(-1, 1.5), np.inf)
assert_allclose(special.huber(2, 1.5), 0.5 * np.square(1.5))
assert_allclose(special.huber(2, 2.5), 2 * (2.5 - 0.5 * 2))
def xfunc(delta, r):
if delta < 0:
return np.inf
elif np.abs(r) < delta:
return 0.5 * np.square(r)
else:
return delta * (np.abs(r) - 0.5 * delta)
z = np.random.randn(10, 2)
w = np.vectorize(xfunc, otypes=[np.float64])(z[:,0], z[:,1])
assert_func_equal(special.huber, w, z, rtol=1e-13, atol=1e-13)
def test_pseudo_huber():
def xfunc(delta, r):
if delta < 0:
return np.inf
elif (not delta) or (not r):
return 0
else:
return delta**2 * (np.sqrt(1 + (r/delta)**2) - 1)
z = np.array(np.random.randn(10, 2).tolist() + [[0, 0.5], [0.5, 0]])
w = np.vectorize(xfunc, otypes=[np.float64])(z[:,0], z[:,1])
assert_func_equal(special.pseudo_huber, w, z, rtol=1e-13, atol=1e-13)
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause | -6,958,557,700,135,479,000 | 5,004,383,289,923,406,000 | 37.243979 | 110 | 0.52924 | false |
andykimpe/chromium-test-npapi | tools/telemetry/telemetry/core/backends/webdriver/webdriver_tab_list_backend.py | 13 | 1386 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends.webdriver import webdriver_tab_backend
class WebDriverTabListBackend(object):
def __init__(self, browser_backend):
self._browser_backend = browser_backend
# Stores the window handles.
self._tab_list = []
self._UpdateTabList()
def New(self, timeout=None):
# Webdriver API doesn't support tab controlling.
raise NotImplementedError()
def __iter__(self):
self._UpdateTabList()
return self._tab_list.__iter__()
def __len__(self):
self._UpdateTabList()
return len(self._tab_list)
def __getitem__(self, index):
self._UpdateTabList()
if len(self._tab_list) <= index:
raise IndexError('list index out of range')
return self._tab_list[index]
def _UpdateTabList(self):
window_handles = self._browser_backend.driver.window_handles
old_tab_list = self._tab_list
self._tab_list = []
for window_handle in window_handles:
tab = None
for old_tab in old_tab_list:
if old_tab.window_handle == window_handle:
tab = old_tab
break
else:
tab = webdriver_tab_backend.WebDriverTabBackend(
self._browser_backend, window_handle)
self._tab_list.append(tab)
| bsd-3-clause | -1,393,256,278,469,408,500 | 1,465,974,926,253,565,700 | 29.8 | 72 | 0.663059 | false |
rgerkin/pyNeuroML | pyneuroml/tune/NeuroMLSimulation.py | 1 | 5357 | '''
A class for running a single instance of a NeuroML model by generating a
LEMS file and using pyNeuroML to run in a chosen simulator
'''
import sys
import time
from pyneuroml import pynml
from pyneuroml.lems import generate_lems_file_for_neuroml
try:
import pyelectro # Not used here, just for checking installation
except:
print('>> Note: pyelectro from https://github.com/pgleeson/pyelectro is required!')
exit()
try:
import neurotune # Not used here, just for checking installation
except:
print('>> Note: neurotune from https://github.com/pgleeson/neurotune is required!')
exit()
class NeuroMLSimulation(object):
def __init__(self,
reference,
neuroml_file,
target,
sim_time=1000,
dt=0.05,
simulator='jNeuroML',
generate_dir = './',
cleanup = True,
nml_doc = None):
self.sim_time = sim_time
self.dt = dt
self.simulator = simulator
self.generate_dir = generate_dir if generate_dir.endswith('/') else generate_dir+'/'
self.reference = reference
self.target = target
self.neuroml_file = neuroml_file
self.nml_doc = nml_doc
self.cleanup = cleanup
self.already_run = False
def show(self):
"""
Plot the result of the simulation once it's been intialized
"""
from matplotlib import pyplot as plt
if self.already_run:
for ref in self.volts.keys():
plt.plot(self.t, self.volts[ref], label=ref)
plt.title("Simulation voltage vs time")
plt.legend()
plt.xlabel("Time [ms]")
plt.ylabel("Voltage [mV]")
else:
pynml.print_comment("First you have to 'go()' the simulation.", True)
plt.show()
def go(self):
lems_file_name = 'LEMS_%s.xml'%(self.reference)
generate_lems_file_for_neuroml(self.reference,
self.neuroml_file,
self.target,
self.sim_time,
self.dt,
lems_file_name = lems_file_name,
target_dir = self.generate_dir,
nml_doc = self.nml_doc)
pynml.print_comment_v("Running a simulation of %s ms with timestep %s ms: %s"%(self.sim_time, self.dt, lems_file_name))
self.already_run = True
start = time.time()
if self.simulator == 'jNeuroML':
results = pynml.run_lems_with_jneuroml(lems_file_name,
nogui=True,
load_saved_data=True,
plot=False,
exec_in_dir = self.generate_dir,
verbose=False,
cleanup=self.cleanup)
elif self.simulator == 'jNeuroML_NEURON':
results = pynml.run_lems_with_jneuroml_neuron(lems_file_name,
nogui=True,
load_saved_data=True,
plot=False,
exec_in_dir = self.generate_dir,
verbose=False,
cleanup=self.cleanup)
else:
pynml.print_comment_v('Unsupported simulator: %s'%self.simulator)
exit()
secs = time.time()-start
pynml.print_comment_v("Ran simulation in %s in %f seconds (%f mins)\n\n"%(self.simulator, secs, secs/60.0))
self.t = [t*1000 for t in results['t']]
self.volts = {}
for key in results.keys():
if key != 't':
self.volts[key] = [v*1000 for v in results[key]]
if __name__ == '__main__':
sim_time = 700
dt = 0.05
if len(sys.argv) == 2 and sys.argv[1] == '-net':
sim = NeuroMLSimulation('TestNet',
'../../examples/test_data/simplenet.nml',
'simplenet',
sim_time,
dt,
'jNeuroML',
'temp/')
sim.go()
sim.show()
else:
sim = NeuroMLSimulation('TestHH',
'../../examples/test_data/HHCellNetwork.net.nml',
'HHCellNetwork',
sim_time,
dt,
'jNeuroML',
'temp')
sim.go()
sim.show()
| lgpl-3.0 | 6,708,183,505,584,520,000 | -8,529,624,273,215,800,000 | 31.271084 | 127 | 0.417958 | false |
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/IPython/terminal/pt_inputhooks/wx.py | 8 | 5441 | """Enable wxPython to be used interacively in prompt_toolkit
"""
from __future__ import absolute_import
import sys
import signal
import time
from timeit import default_timer as clock
import wx
def inputhook_wx1(context):
"""Run the wx event loop by processing pending events only.
This approach seems to work, but its performance is not great as it
relies on having PyOS_InputHook called regularly.
"""
try:
app = wx.GetApp()
if app is not None:
assert wx.Thread_IsMain()
# Make a temporary event loop and process system events until
# there are no more waiting, then allow idle events (which
# will also deal with pending or posted wx events.)
evtloop = wx.EventLoop()
ea = wx.EventLoopActivator(evtloop)
while evtloop.Pending():
evtloop.Dispatch()
app.ProcessIdle()
del ea
except KeyboardInterrupt:
pass
return 0
class EventLoopTimer(wx.Timer):
def __init__(self, func):
self.func = func
wx.Timer.__init__(self)
def Notify(self):
self.func()
class EventLoopRunner(object):
def Run(self, time, input_is_ready):
self.input_is_ready = input_is_ready
self.evtloop = wx.EventLoop()
self.timer = EventLoopTimer(self.check_stdin)
self.timer.Start(time)
self.evtloop.Run()
def check_stdin(self):
if self.input_is_ready():
self.timer.Stop()
self.evtloop.Exit()
def inputhook_wx2(context):
"""Run the wx event loop, polling for stdin.
This version runs the wx eventloop for an undetermined amount of time,
during which it periodically checks to see if anything is ready on
stdin. If anything is ready on stdin, the event loop exits.
The argument to elr.Run controls how often the event loop looks at stdin.
This determines the responsiveness at the keyboard. A setting of 1000
enables a user to type at most 1 char per second. I have found that a
setting of 10 gives good keyboard response. We can shorten it further,
but eventually performance would suffer from calling select/kbhit too
often.
"""
try:
app = wx.GetApp()
if app is not None:
assert wx.Thread_IsMain()
elr = EventLoopRunner()
# As this time is made shorter, keyboard response improves, but idle
# CPU load goes up. 10 ms seems like a good compromise.
elr.Run(time=10, # CHANGE time here to control polling interval
input_is_ready=context.input_is_ready)
except KeyboardInterrupt:
pass
return 0
def inputhook_wx3(context):
"""Run the wx event loop by processing pending events only.
This is like inputhook_wx1, but it keeps processing pending events
until stdin is ready. After processing all pending events, a call to
time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%.
This sleep time should be tuned though for best performance.
"""
# We need to protect against a user pressing Control-C when IPython is
# idle and this is running. We trap KeyboardInterrupt and pass.
try:
app = wx.GetApp()
if app is not None:
assert wx.Thread_IsMain()
# The import of wx on Linux sets the handler for signal.SIGINT
# to 0. This is a bug in wx or gtk. We fix by just setting it
# back to the Python default.
if not callable(signal.getsignal(signal.SIGINT)):
signal.signal(signal.SIGINT, signal.default_int_handler)
evtloop = wx.EventLoop()
ea = wx.EventLoopActivator(evtloop)
t = clock()
while not context.input_is_ready():
while evtloop.Pending():
t = clock()
evtloop.Dispatch()
app.ProcessIdle()
# We need to sleep at this point to keep the idle CPU load
# low. However, if sleep to long, GUI response is poor. As
# a compromise, we watch how often GUI events are being processed
# and switch between a short and long sleep time. Here are some
# stats useful in helping to tune this.
# time CPU load
# 0.001 13%
# 0.005 3%
# 0.01 1.5%
# 0.05 0.5%
used_time = clock() - t
if used_time > 10.0:
# print 'Sleep for 1 s' # dbg
time.sleep(1.0)
elif used_time > 0.1:
# Few GUI events coming in, so we can sleep longer
# print 'Sleep for 0.05 s' # dbg
time.sleep(0.05)
else:
# Many GUI events coming in, so sleep only very little
time.sleep(0.001)
del ea
except KeyboardInterrupt:
pass
return 0
if sys.platform == 'darwin':
# On OSX, evtloop.Pending() always returns True, regardless of there being
# any events pending. As such we can't use implementations 1 or 3 of the
# inputhook as those depend on a pending/dispatch loop.
inputhook = inputhook_wx2
else:
# This is our default implementation
inputhook = inputhook_wx3
| gpl-3.0 | 6,764,037,964,693,270,000 | 6,755,785,336,617,648,000 | 35.763514 | 81 | 0.596765 | false |
bearstech/ansible | test/units/module_utils/test_network_common.py | 31 | 5437 | # -*- coding: utf-8 -*-
#
# (c) 2017 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.module_utils.network_common import to_list, sort_list
from ansible.module_utils.network_common import dict_diff, dict_merge
from ansible.module_utils.network_common import conditional, Template
class TestModuleUtilsNetworkCommon(unittest.TestCase):
def test_to_list(self):
for scalar in ('string', 1, True, False, None):
self.assertTrue(isinstance(to_list(scalar), list))
for container in ([1, 2, 3], {'one': 1}):
self.assertTrue(isinstance(to_list(container), list))
test_list = [1, 2, 3]
self.assertNotEqual(id(test_list), id(to_list(test_list)))
def test_sort(self):
data = [3, 1, 2]
self.assertEqual([1, 2, 3], sort_list(data))
string_data = '123'
self.assertEqual(string_data, sort_list(string_data))
def test_dict_diff(self):
base = dict(obj2=dict(), b1=True, b2=False, b3=False,
one=1, two=2, three=3, obj1=dict(key1=1, key2=2),
l1=[1, 3], l2=[1, 2, 3], l4=[4],
nested=dict(n1=dict(n2=2)))
other = dict(b1=True, b2=False, b3=True, b4=True,
one=1, three=4, four=4, obj1=dict(key1=2),
l1=[2, 1], l2=[3, 2, 1], l3=[1],
nested=dict(n1=dict(n2=2, n3=3)))
result = dict_diff(base, other)
# string assertions
self.assertNotIn('one', result)
self.assertNotIn('two', result)
self.assertEqual(result['three'], 4)
self.assertEqual(result['four'], 4)
# dict assertions
self.assertIn('obj1', result)
self.assertIn('key1', result['obj1'])
self.assertNotIn('key2', result['obj1'])
# list assertions
self.assertEqual(result['l1'], [2, 1])
self.assertNotIn('l2', result)
self.assertEqual(result['l3'], [1])
self.assertNotIn('l4', result)
# nested assertions
self.assertIn('obj1', result)
self.assertEqual(result['obj1']['key1'], 2)
self.assertNotIn('key2', result['obj1'])
# bool assertions
self.assertNotIn('b1', result)
self.assertNotIn('b2', result)
self.assertTrue(result['b3'])
self.assertTrue(result['b4'])
def test_dict_merge(self):
base = dict(obj2=dict(), b1=True, b2=False, b3=False,
one=1, two=2, three=3, obj1=dict(key1=1, key2=2),
l1=[1, 3], l2=[1, 2, 3], l4=[4],
nested=dict(n1=dict(n2=2)))
other = dict(b1=True, b2=False, b3=True, b4=True,
one=1, three=4, four=4, obj1=dict(key1=2),
l1=[2, 1], l2=[3, 2, 1], l3=[1],
nested=dict(n1=dict(n2=2, n3=3)))
result = dict_merge(base, other)
# string assertions
self.assertIn('one', result)
self.assertIn('two', result)
self.assertEqual(result['three'], 4)
self.assertEqual(result['four'], 4)
# dict assertions
self.assertIn('obj1', result)
self.assertIn('key1', result['obj1'])
self.assertIn('key2', result['obj1'])
# list assertions
self.assertEqual(result['l1'], [1, 2, 3])
self.assertIn('l2', result)
self.assertEqual(result['l3'], [1])
self.assertIn('l4', result)
# nested assertions
self.assertIn('obj1', result)
self.assertEqual(result['obj1']['key1'], 2)
self.assertIn('key2', result['obj1'])
# bool assertions
self.assertIn('b1', result)
self.assertIn('b2', result)
self.assertTrue(result['b3'])
self.assertTrue(result['b4'])
def test_conditional(self):
self.assertTrue(conditional(10, 10))
self.assertTrue(conditional('10', '10'))
self.assertTrue(conditional('foo', 'foo'))
self.assertTrue(conditional(True, True))
self.assertTrue(conditional(False, False))
self.assertTrue(conditional(None, None))
self.assertTrue(conditional("ge(1)", 1))
self.assertTrue(conditional("gt(1)", 2))
self.assertTrue(conditional("le(2)", 2))
self.assertTrue(conditional("lt(3)", 2))
self.assertTrue(conditional("eq(1)", 1))
self.assertTrue(conditional("neq(0)", 1))
self.assertTrue(conditional("min(1)", 1))
self.assertTrue(conditional("max(1)", 1))
self.assertTrue(conditional("exactly(1)", 1))
def test_template(self):
tmpl = Template()
self.assertEqual('foo', tmpl('{{ test }}', {'test': 'foo'}))
| gpl-3.0 | -5,558,771,534,649,703,000 | -8,610,141,705,720,799,000 | 35.006623 | 70 | 0.589112 | false |
heke123/chromium-crosswalk | build/android/buildbot/bb_device_status_check.py | 7 | 14834 | #!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A class to keep track of devices across builds and report state."""
import argparse
import json
import logging
import os
import psutil
import re
import signal
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import devil_chromium
from devil import devil_env
from devil.android import battery_utils
from devil.android import device_blacklist
from devil.android import device_errors
from devil.android import device_list
from devil.android import device_utils
from devil.android.sdk import adb_wrapper
from devil.constants import exit_codes
from devil.utils import lsusb
from devil.utils import reset_usb
from devil.utils import run_tests_helper
from pylib.constants import host_paths
_RE_DEVICE_ID = re.compile(r'Device ID = (\d+)')
def KillAllAdb():
def GetAllAdb():
for p in psutil.process_iter():
try:
if 'adb' in p.name:
yield p
except (psutil.NoSuchProcess, psutil.AccessDenied):
pass
for sig in [signal.SIGTERM, signal.SIGQUIT, signal.SIGKILL]:
for p in GetAllAdb():
try:
logging.info('kill %d %d (%s [%s])', sig, p.pid, p.name,
' '.join(p.cmdline))
p.send_signal(sig)
except (psutil.NoSuchProcess, psutil.AccessDenied):
pass
for p in GetAllAdb():
try:
logging.error('Unable to kill %d (%s [%s])', p.pid, p.name,
' '.join(p.cmdline))
except (psutil.NoSuchProcess, psutil.AccessDenied):
pass
def _IsBlacklisted(serial, blacklist):
return blacklist and serial in blacklist.Read()
def _BatteryStatus(device, blacklist):
battery_info = {}
try:
battery = battery_utils.BatteryUtils(device)
battery_info = battery.GetBatteryInfo(timeout=5)
battery_level = int(battery_info.get('level', 100))
if battery_level < 15:
logging.error('Critically low battery level (%d)', battery_level)
battery = battery_utils.BatteryUtils(device)
if not battery.GetCharging():
battery.SetCharging(True)
if blacklist:
blacklist.Extend([device.adb.GetDeviceSerial()], reason='low_battery')
except device_errors.CommandFailedError:
logging.exception('Failed to get battery information for %s',
str(device))
return battery_info
def _IMEISlice(device):
imei_slice = ''
try:
for l in device.RunShellCommand(['dumpsys', 'iphonesubinfo'],
check_return=True, timeout=5):
m = _RE_DEVICE_ID.match(l)
if m:
imei_slice = m.group(1)[-6:]
except device_errors.CommandFailedError:
logging.exception('Failed to get IMEI slice for %s', str(device))
return imei_slice
def DeviceStatus(devices, blacklist):
"""Generates status information for the given devices.
Args:
devices: The devices to generate status for.
blacklist: The current device blacklist.
Returns:
A dict of the following form:
{
'<serial>': {
'serial': '<serial>',
'adb_status': str,
'usb_status': bool,
'blacklisted': bool,
# only if the device is connected and not blacklisted
'type': ro.build.product,
'build': ro.build.id,
'build_detail': ro.build.fingerprint,
'battery': {
...
},
'imei_slice': str,
'wifi_ip': str,
},
...
}
"""
adb_devices = {
a[0].GetDeviceSerial(): a
for a in adb_wrapper.AdbWrapper.Devices(desired_state=None, long_list=True)
}
usb_devices = set(lsusb.get_android_devices())
def blacklisting_device_status(device):
serial = device.adb.GetDeviceSerial()
adb_status = (
adb_devices[serial][1] if serial in adb_devices
else 'missing')
usb_status = bool(serial in usb_devices)
device_status = {
'serial': serial,
'adb_status': adb_status,
'usb_status': usb_status,
}
if not _IsBlacklisted(serial, blacklist):
if adb_status == 'device':
try:
build_product = device.build_product
build_id = device.build_id
build_fingerprint = device.GetProp('ro.build.fingerprint', cache=True)
wifi_ip = device.GetProp('dhcp.wlan0.ipaddress')
battery_info = _BatteryStatus(device, blacklist)
imei_slice = _IMEISlice(device)
if (device.product_name == 'mantaray' and
battery_info.get('AC powered', None) != 'true'):
logging.error('Mantaray device not connected to AC power.')
device_status.update({
'ro.build.product': build_product,
'ro.build.id': build_id,
'ro.build.fingerprint': build_fingerprint,
'battery': battery_info,
'imei_slice': imei_slice,
'wifi_ip': wifi_ip,
# TODO(jbudorick): Remove these once no clients depend on them.
'type': build_product,
'build': build_id,
'build_detail': build_fingerprint,
})
except device_errors.CommandFailedError:
logging.exception('Failure while getting device status for %s.',
str(device))
if blacklist:
blacklist.Extend([serial], reason='status_check_failure')
except device_errors.CommandTimeoutError:
logging.exception('Timeout while getting device status for %s.',
str(device))
if blacklist:
blacklist.Extend([serial], reason='status_check_timeout')
elif blacklist:
blacklist.Extend([serial],
reason=adb_status if usb_status else 'offline')
device_status['blacklisted'] = _IsBlacklisted(serial, blacklist)
return device_status
parallel_devices = device_utils.DeviceUtils.parallel(devices)
statuses = parallel_devices.pMap(blacklisting_device_status).pGet(None)
return statuses
def RecoverDevices(devices, blacklist):
"""Attempts to recover any inoperable devices in the provided list.
Args:
devices: The list of devices to attempt to recover.
blacklist: The current device blacklist, which will be used then
reset.
Returns:
Nothing.
"""
statuses = DeviceStatus(devices, blacklist)
should_restart_usb = set(
status['serial'] for status in statuses
if (not status['usb_status']
or status['adb_status'] in ('offline', 'missing')))
should_restart_adb = should_restart_usb.union(set(
status['serial'] for status in statuses
if status['adb_status'] == 'unauthorized'))
should_reboot_device = should_restart_adb.union(set(
status['serial'] for status in statuses
if status['blacklisted']))
logging.debug('Should restart USB for:')
for d in should_restart_usb:
logging.debug(' %s', d)
logging.debug('Should restart ADB for:')
for d in should_restart_adb:
logging.debug(' %s', d)
logging.debug('Should reboot:')
for d in should_reboot_device:
logging.debug(' %s', d)
if blacklist:
blacklist.Reset()
if should_restart_adb:
KillAllAdb()
for serial in should_restart_usb:
try:
reset_usb.reset_android_usb(serial)
except IOError:
logging.exception('Unable to reset USB for %s.', serial)
if blacklist:
blacklist.Extend([serial], reason='usb_failure')
except device_errors.DeviceUnreachableError:
logging.exception('Unable to reset USB for %s.', serial)
if blacklist:
blacklist.Extend([serial], reason='offline')
def blacklisting_recovery(device):
if _IsBlacklisted(device.adb.GetDeviceSerial(), blacklist):
logging.debug('%s is blacklisted, skipping recovery.', str(device))
return
if str(device) in should_reboot_device:
try:
device.WaitUntilFullyBooted(retries=0)
return
except (device_errors.CommandTimeoutError,
device_errors.CommandFailedError):
logging.exception('Failure while waiting for %s. '
'Attempting to recover.', str(device))
try:
try:
device.Reboot(block=False, timeout=5, retries=0)
except device_errors.CommandTimeoutError:
logging.warning('Timed out while attempting to reboot %s normally.'
'Attempting alternative reboot.', str(device))
# The device drops offline before we can grab the exit code, so
# we don't check for status.
device.adb.Root()
device.adb.Shell('echo b > /proc/sysrq-trigger', expect_status=None,
timeout=5, retries=0)
except device_errors.CommandFailedError:
logging.exception('Failed to reboot %s.', str(device))
if blacklist:
blacklist.Extend([device.adb.GetDeviceSerial()],
reason='reboot_failure')
except device_errors.CommandTimeoutError:
logging.exception('Timed out while rebooting %s.', str(device))
if blacklist:
blacklist.Extend([device.adb.GetDeviceSerial()],
reason='reboot_timeout')
try:
device.WaitUntilFullyBooted(retries=0)
except device_errors.CommandFailedError:
logging.exception('Failure while waiting for %s.', str(device))
if blacklist:
blacklist.Extend([device.adb.GetDeviceSerial()],
reason='reboot_failure')
except device_errors.CommandTimeoutError:
logging.exception('Timed out while waiting for %s.', str(device))
if blacklist:
blacklist.Extend([device.adb.GetDeviceSerial()],
reason='reboot_timeout')
device_utils.DeviceUtils.parallel(devices).pMap(blacklisting_recovery)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--out-dir',
help='Directory where the device path is stored',
default=os.path.join(host_paths.DIR_SOURCE_ROOT, 'out'))
parser.add_argument('--restart-usb', action='store_true',
help='DEPRECATED. '
'This script now always tries to reset USB.')
parser.add_argument('--json-output',
help='Output JSON information into a specified file.')
parser.add_argument('--adb-path',
help='Absolute path to the adb binary to use.')
parser.add_argument('--blacklist-file', help='Device blacklist JSON file.')
parser.add_argument('--known-devices-file', action='append', default=[],
dest='known_devices_files',
help='Path to known device lists.')
parser.add_argument('-v', '--verbose', action='count', default=1,
help='Log more information.')
args = parser.parse_args()
run_tests_helper.SetLogLevel(args.verbose)
devil_custom_deps = None
if args.adb_path:
devil_custom_deps = {
'adb': {
devil_env.GetPlatform(): [args.adb_path],
},
}
devil_chromium.Initialize(custom_deps=devil_custom_deps)
blacklist = (device_blacklist.Blacklist(args.blacklist_file)
if args.blacklist_file
else None)
last_devices_path = os.path.join(
args.out_dir, device_list.LAST_DEVICES_FILENAME)
args.known_devices_files.append(last_devices_path)
expected_devices = set()
try:
for path in args.known_devices_files:
if os.path.exists(path):
expected_devices.update(device_list.GetPersistentDeviceList(path))
except IOError:
logging.warning('Problem reading %s, skipping.', path)
logging.info('Expected devices:')
for device in expected_devices:
logging.info(' %s', device)
usb_devices = set(lsusb.get_android_devices())
devices = [device_utils.DeviceUtils(s)
for s in expected_devices.union(usb_devices)]
RecoverDevices(devices, blacklist)
statuses = DeviceStatus(devices, blacklist)
# Log the state of all devices.
for status in statuses:
logging.info(status['serial'])
adb_status = status.get('adb_status')
blacklisted = status.get('blacklisted')
logging.info(' USB status: %s',
'online' if status.get('usb_status') else 'offline')
logging.info(' ADB status: %s', adb_status)
logging.info(' Blacklisted: %s', str(blacklisted))
if adb_status == 'device' and not blacklisted:
logging.info(' Device type: %s', status.get('ro.build.product'))
logging.info(' OS build: %s', status.get('ro.build.id'))
logging.info(' OS build fingerprint: %s',
status.get('ro.build.fingerprint'))
logging.info(' Battery state:')
for k, v in status.get('battery', {}).iteritems():
logging.info(' %s: %s', k, v)
logging.info(' IMEI slice: %s', status.get('imei_slice'))
logging.info(' WiFi IP: %s', status.get('wifi_ip'))
# Update the last devices file(s).
for path in args.known_devices_files:
device_list.WritePersistentDeviceList(
path, [status['serial'] for status in statuses])
# Write device info to file for buildbot info display.
if os.path.exists('/home/chrome-bot'):
with open('/home/chrome-bot/.adb_device_info', 'w') as f:
for status in statuses:
try:
if status['adb_status'] == 'device':
f.write('{serial} {adb_status} {build_product} {build_id} '
'{temperature:.1f}C {level}%\n'.format(
serial=status['serial'],
adb_status=status['adb_status'],
build_product=status['type'],
build_id=status['build'],
temperature=float(status['battery']['temperature']) / 10,
level=status['battery']['level']
))
elif status.get('usb_status', False):
f.write('{serial} {adb_status}\n'.format(
serial=status['serial'],
adb_status=status['adb_status']
))
else:
f.write('{serial} offline\n'.format(
serial=status['serial']
))
except Exception: # pylint: disable=broad-except
pass
# Dump the device statuses to JSON.
if args.json_output:
with open(args.json_output, 'wb') as f:
f.write(json.dumps(statuses, indent=4))
live_devices = [status['serial'] for status in statuses
if (status['adb_status'] == 'device'
and not _IsBlacklisted(status['serial'], blacklist))]
# If all devices failed, or if there are no devices, it's an infra error.
return 0 if live_devices else exit_codes.INFRA
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | 4,748,654,839,227,159,000 | -7,232,832,489,395,159,000 | 33.578089 | 80 | 0.620197 | false |
RonnyPfannschmidt/pytest | testing/test_faulthandler.py | 3 | 5123 | import io
import sys
import pytest
from _pytest.pytester import Pytester
def test_enabled(pytester: Pytester) -> None:
"""Test single crashing test displays a traceback."""
pytester.makepyfile(
"""
import faulthandler
def test_crash():
faulthandler._sigabrt()
"""
)
result = pytester.runpytest_subprocess()
result.stderr.fnmatch_lines(["*Fatal Python error*"])
assert result.ret != 0
def setup_crashing_test(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import faulthandler
import atexit
def test_ok():
atexit.register(faulthandler._sigabrt)
"""
)
def test_crash_during_shutdown_captured(pytester: Pytester) -> None:
"""
Re-enable faulthandler if pytest encountered it enabled during configure.
We should be able to then see crashes during interpreter shutdown.
"""
setup_crashing_test(pytester)
args = (sys.executable, "-Xfaulthandler", "-mpytest")
result = pytester.run(*args)
result.stderr.fnmatch_lines(["*Fatal Python error*"])
assert result.ret != 0
def test_crash_during_shutdown_not_captured(pytester: Pytester) -> None:
"""
Check that pytest leaves faulthandler disabled if it was not enabled during configure.
This prevents us from seeing crashes during interpreter shutdown (see #8260).
"""
setup_crashing_test(pytester)
args = (sys.executable, "-mpytest")
result = pytester.run(*args)
result.stderr.no_fnmatch_line("*Fatal Python error*")
assert result.ret != 0
def test_disabled(pytester: Pytester) -> None:
"""Test option to disable fault handler in the command line."""
pytester.makepyfile(
"""
import faulthandler
def test_disabled():
assert not faulthandler.is_enabled()
"""
)
result = pytester.runpytest_subprocess("-p", "no:faulthandler")
result.stdout.fnmatch_lines(["*1 passed*"])
assert result.ret == 0
@pytest.mark.parametrize(
"enabled",
[
pytest.param(
True, marks=pytest.mark.skip(reason="sometimes crashes on CI (#7022)")
),
False,
],
)
def test_timeout(pytester: Pytester, enabled: bool) -> None:
"""Test option to dump tracebacks after a certain timeout.
If faulthandler is disabled, no traceback will be dumped.
"""
pytester.makepyfile(
"""
import os, time
def test_timeout():
time.sleep(1 if "CI" in os.environ else 0.1)
"""
)
pytester.makeini(
"""
[pytest]
faulthandler_timeout = 0.01
"""
)
args = ["-p", "no:faulthandler"] if not enabled else []
result = pytester.runpytest_subprocess(*args)
tb_output = "most recent call first"
if enabled:
result.stderr.fnmatch_lines(["*%s*" % tb_output])
else:
assert tb_output not in result.stderr.str()
result.stdout.fnmatch_lines(["*1 passed*"])
assert result.ret == 0
@pytest.mark.parametrize("hook_name", ["pytest_enter_pdb", "pytest_exception_interact"])
def test_cancel_timeout_on_hook(monkeypatch, hook_name) -> None:
"""Make sure that we are cancelling any scheduled traceback dumping due
to timeout before entering pdb (pytest-dev/pytest-faulthandler#12) or any
other interactive exception (pytest-dev/pytest-faulthandler#14)."""
import faulthandler
from _pytest import faulthandler as faulthandler_plugin
called = []
monkeypatch.setattr(
faulthandler, "cancel_dump_traceback_later", lambda: called.append(1)
)
# call our hook explicitly, we can trust that pytest will call the hook
# for us at the appropriate moment
hook_func = getattr(faulthandler_plugin, hook_name)
hook_func()
assert called == [1]
def test_already_initialized_crash(pytester: Pytester) -> None:
"""Even if faulthandler is already initialized, we still dump tracebacks on crashes (#8258)."""
pytester.makepyfile(
"""
def test():
import faulthandler
faulthandler._sigabrt()
"""
)
result = pytester.run(
sys.executable,
"-X",
"faulthandler",
"-mpytest",
pytester.path,
)
result.stderr.fnmatch_lines(["*Fatal Python error*"])
assert result.ret != 0
def test_get_stderr_fileno_invalid_fd() -> None:
"""Test for faulthandler being able to handle invalid file descriptors for stderr (#8249)."""
from _pytest.faulthandler import get_stderr_fileno
class StdErrWrapper(io.StringIO):
"""
Mimic ``twisted.logger.LoggingFile`` to simulate returning an invalid file descriptor.
https://github.com/twisted/twisted/blob/twisted-20.3.0/src/twisted/logger/_io.py#L132-L139
"""
def fileno(self):
return -1
wrapper = StdErrWrapper()
with pytest.MonkeyPatch.context() as mp:
mp.setattr("sys.stderr", wrapper)
# Even when the stderr wrapper signals an invalid file descriptor,
# ``_get_stderr_fileno()`` should return the real one.
assert get_stderr_fileno() == 2
| mit | -7,164,025,938,504,688,000 | 1,161,293,801,071,617,300 | 28.784884 | 99 | 0.643568 | false |
shiftcontrol/UnityOpenCV | opencv/tests/swig_python/highgui/match.py | 3 | 1348 | """
This script will compare tho images and decides with a threshold
if these to images are "equal enough"
"""
# import the necessary things for OpenCV
from cv import *
from highgui import *
import frames
import sys
import os
PREFIX=os.path.join(os.environ["srcdir"],"../../opencv_extra/testdata/python/images/")
DisplayImages=False
if DisplayImages:
videowindow="video"
referencewindow="reference"
cvNamedWindow(videowindow,CV_WINDOW_AUTOSIZE)
cvNamedWindow(referencewindow,CV_WINDOW_AUTOSIZE)
# returns True/False if match/non-match
def match( image, index, thres ):
# load image from comparison set
QCIFcompare=cvLoadImage(PREFIX+frames.QCIF[index])
if QCIFcompare is None:
print "Couldn't open image "+PREFIX+frames.QCIF[index]+" for comparison!"
sys.exit(1)
# resize comparison image to input image dimensions
size=cvSize(image.width,image.height)
compare=cvCreateImage(size,IPL_DEPTH_8U,image.nChannels)
cvResize(QCIFcompare,compare)
# compare images
diff=cvNorm( image, compare, CV_RELATIVE_L2 )
if DisplayImages:
cvShowImage(videowindow,image)
cvShowImage(referencewindow,compare)
if diff<=thres:
cvWaitKey(200)
else:
print "index==",index,": max==",thres," is==",diff
cvWaitKey(5000)
cvReleaseImage(QCIFcompare)
cvReleaseImage(compare)
if diff<=thres:
return True
else:
return False
| gpl-3.0 | -1,210,844,899,795,200,300 | -3,646,950,104,578,152,000 | 21.466667 | 86 | 0.755935 | false |
klaus385/openpilot | selfdrive/crash.py | 2 | 1230 | """Install exception handler for process crash."""
import os
import sys
from selfdrive.version import version, dirty
from selfdrive.swaglog import cloudlog
if os.getenv("NOLOG") or os.getenv("NOCRASH"):
def capture_exception(*exc_info):
pass
def bind_user(**kwargs):
pass
def bind_extra(**kwargs):
pass
def install():
pass
else:
from raven import Client
from raven.transport.http import HTTPTransport
client = Client('https://1994756b5e6f41cf939a4c65de45f4f2:[email protected]/77924',
install_sys_hook=False, transport=HTTPTransport, release=version, tags={'dirty': dirty})
def capture_exception(*args, **kwargs):
client.captureException(*args, **kwargs)
cloudlog.error("crash", exc_info=kwargs.get('exc_info', 1))
def bind_user(**kwargs):
client.user_context(kwargs)
def bind_extra(**kwargs):
client.extra_context(kwargs)
def install():
# installs a sys.excepthook
__excepthook__ = sys.excepthook
def handle_exception(*exc_info):
if exc_info[0] not in (KeyboardInterrupt, SystemExit):
capture_exception(exc_info=exc_info)
__excepthook__(*exc_info)
sys.excepthook = handle_exception
| mit | 8,554,870,870,883,396,000 | -6,868,141,802,157,949,000 | 29.75 | 118 | 0.702439 | false |
Azure/azure-sdk-for-python | sdk/powerbiembedded/azure-mgmt-powerbiembedded/azure/mgmt/powerbiembedded/models/operation_py3.py | 1 | 1163 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Operation(Model):
"""Operation.
:param name: The name of the operation being performed on this particular
object. This name should match the action name that appears in RBAC / the
event service.
:type name: str
:param display:
:type display: ~azure.mgmt.powerbiembedded.models.Display
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'Display'},
}
def __init__(self, *, name: str=None, display=None, **kwargs) -> None:
super(Operation, self).__init__(**kwargs)
self.name = name
self.display = display
| mit | -4,947,843,063,941,188,000 | -4,365,325,625,655,006,700 | 33.205882 | 78 | 0.574377 | false |
Anderson0026/mapproxy | mapproxy/script/conf/app.py | 1 | 6606 | # -:- encoding: utf-8 -:-
# This file is part of the MapProxy project.
# Copyright (C) 2013 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
import sys
import os
import optparse
import logging
import textwrap
import datetime
import xml.etree.ElementTree
import yaml
from contextlib import contextmanager
from cStringIO import StringIO
from .sources import sources
from .layers import layers
from .caches import caches
from .seeds import seeds
from .utils import update_config, MapProxyYAMLDumper, download_capabilities
from mapproxy.config.loader import load_configuration
from mapproxy.util.ext.wmsparse import parse_capabilities
def setup_logging(level=logging.INFO):
mapproxy_log = logging.getLogger('mapproxy')
mapproxy_log.setLevel(level)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter(
"[%(asctime)s] %(name)s - %(levelname)s - %(message)s")
ch.setFormatter(formatter)
mapproxy_log.addHandler(ch)
def write_header(f, capabilities):
print >>f, '# MapProxy configuration automatically generated from:'
print >>f, '# %s' % capabilities
print >>f, '#'
print >>f, '# NOTE: The generated configuration can be highly inefficient,'
print >>f, '# especially when multiple layers and caches are requested at once.'
print >>f, '# Make sure you understand the generated configuration!'
print >>f, '#'
print >>f, '# Created on %s with:' % datetime.datetime.now()
print >>f, ' \\\n'.join(textwrap.wrap(' '.join(sys.argv), initial_indent='# ', subsequent_indent='# '))
print >>f, ''
@contextmanager
def file_or_stdout(name):
if name == '-':
yield sys.stdout
else:
with open(name, 'wb') as f:
yield f
def config_command(args):
parser = optparse.OptionParser("usage: %prog autoconfig [options]")
parser.add_option('--capabilities',
help="URL or filename of WMS 1.1.1/1.3.0 capabilities document")
parser.add_option('--output', help="filename for created MapProxy config [default: -]", default="-")
parser.add_option('--output-seed', help="filename for created seeding config")
parser.add_option('--base', help='base config to include in created MapProxy config')
parser.add_option('--overwrite',
help='YAML file with overwrites for the created MapProxy config')
parser.add_option('--overwrite-seed',
help='YAML file with overwrites for the created seeding config')
parser.add_option('--force', default=False, action='store_true',
help="overwrite existing files")
options, args = parser.parse_args(args)
if not options.capabilities:
parser.print_help()
print >>sys.stderr, "\nERROR: --capabilities required"
return 2
if not options.output and not options.output_seed:
parser.print_help()
print >>sys.stderr, "\nERROR: --output and/or --output-seed required"
return 2
if not options.force:
if options.output and options.output != '-' and os.path.exists(options.output):
print >>sys.stderr, "\nERROR: %s already exists, use --force to overwrite" % options.output
return 2
if options.output_seed and options.output_seed != '-' and os.path.exists(options.output_seed):
print >>sys.stderr, "\nERROR: %s already exists, use --force to overwrite" % options.output_seed
return 2
log = logging.getLogger('mapproxy_conf_cmd')
log.addHandler(logging.StreamHandler())
setup_logging(logging.WARNING)
srs_grids = {}
if options.base:
base = load_configuration(options.base)
for name, grid_conf in base.grids.iteritems():
if name.startswith('GLOBAL_'):
continue
srs_grids[grid_conf.tile_grid().srs.srs_code] = name
cap_doc = options.capabilities
if cap_doc.startswith(('http://', 'https://')):
cap_doc = download_capabilities(options.capabilities).read()
else:
cap_doc = open(cap_doc, 'rb').read()
try:
cap = parse_capabilities(StringIO(cap_doc))
except (xml.etree.ElementTree.ParseError, ValueError), ex:
print >>sys.stderr, ex
print >>sys.stderr, cap_doc[:1000] + ('...' if len(cap_doc) > 1000 else '')
return 3
overwrite = None
if options.overwrite:
with open(options.overwrite, 'rb') as f:
overwrite = yaml.load(f)
overwrite_seed = None
if options.overwrite_seed:
with open(options.overwrite_seed, 'rb') as f:
overwrite_seed = yaml.load(f)
conf = {}
if options.base:
conf['base'] = os.path.abspath(options.base)
conf['services'] = {'wms': {'md': {'title': cap.metadata()['title']}}}
if overwrite:
conf['services'] = update_config(conf['services'], overwrite.pop('service', {}))
conf['sources'] = sources(cap)
if overwrite:
conf['sources'] = update_config(conf['sources'], overwrite.pop('sources', {}))
conf['caches'] = caches(cap, conf['sources'], srs_grids=srs_grids)
if overwrite:
conf['caches'] = update_config(conf['caches'], overwrite.pop('caches', {}))
conf['layers'] = layers(cap, conf['caches'])
if overwrite:
conf['layers'] = update_config(conf['layers'], overwrite.pop('layers', {}))
if overwrite:
conf = update_config(conf, overwrite)
seed_conf = {}
seed_conf['seeds'], seed_conf['cleanups'] = seeds(cap, conf['caches'])
if overwrite_seed:
seed_conf = update_config(seed_conf, overwrite_seed)
if options.output:
with file_or_stdout(options.output) as f:
write_header(f, options.capabilities)
yaml.dump(conf, f, default_flow_style=False, Dumper=MapProxyYAMLDumper)
if options.output_seed:
with file_or_stdout(options.output_seed) as f:
write_header(f, options.capabilities)
yaml.dump(seed_conf, f, default_flow_style=False, Dumper=MapProxyYAMLDumper)
return 0 | apache-2.0 | -6,288,337,884,651,626,000 | -3,324,617,349,271,630,300 | 34.713514 | 110 | 0.65607 | false |
franciscod/python-telegram-bot | telegram/inlinequeryresultvideo.py | 2 | 2581 | #!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2016
# Leandro Toledo de Souza <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains the classes that represent Telegram
InlineQueryResultVideo"""
from telegram import InlineQueryResult, InlineKeyboardMarkup, InputMessageContent
class InlineQueryResultVideo(InlineQueryResult):
def __init__(self,
id,
video_url,
mime_type,
thumb_url,
title,
caption=None,
video_width=None,
video_height=None,
video_duration=None,
description=None,
reply_markup=None,
input_message_content=None,
**kwargs):
# Required
super(InlineQueryResultVideo, self).__init__('video', id)
self.video_url = video_url
self.mime_type = mime_type
self.thumb_url = thumb_url
self.title = title
# Optional
if caption:
self.caption = caption
if video_width:
self.video_width = video_width
if video_height:
self.video_height = video_height
if video_duration:
self.video_duration = video_duration
if description:
self.description = description
if reply_markup:
self.reply_markup = reply_markup
if input_message_content:
self.input_message_content = input_message_content
@staticmethod
def de_json(data):
data = super(InlineQueryResultVideo, InlineQueryResultVideo).de_json(data)
data['reply_markup'] = InlineKeyboardMarkup.de_json(data.get('reply_markup'))
data['input_message_content'] = InputMessageContent.de_json(data.get(
'input_message_content'))
return InlineQueryResultVideo(**data)
| gpl-2.0 | -9,040,130,405,405,991,000 | 3,276,602,998,643,006,000 | 34.356164 | 85 | 0.633475 | false |
shingonoide/odoo | addons/purchase/report/purchase_report.py | 50 | 7689 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#
# Please note that these reports are not multi-currency !!!
#
from openerp.osv import fields,osv
from openerp import tools
class purchase_report(osv.osv):
_name = "purchase.report"
_description = "Purchases Orders"
_auto = False
_columns = {
'date': fields.datetime('Order Date', readonly=True, help="Date on which this document has been created"), # TDE FIXME master: rename into date_order
'state': fields.selection([('draft', 'Request for Quotation'),
('confirmed', 'Waiting Supplier Ack'),
('approved', 'Approved'),
('except_picking', 'Shipping Exception'),
('except_invoice', 'Invoice Exception'),
('done', 'Done'),
('cancel', 'Cancelled')],'Order Status', readonly=True),
'product_id':fields.many2one('product.product', 'Product', readonly=True),
'picking_type_id': fields.many2one('stock.warehouse', 'Warehouse', readonly=True),
'location_id': fields.many2one('stock.location', 'Destination', readonly=True),
'partner_id':fields.many2one('res.partner', 'Supplier', readonly=True),
'pricelist_id':fields.many2one('product.pricelist', 'Pricelist', readonly=True),
'date_approve':fields.date('Date Approved', readonly=True),
'expected_date':fields.date('Expected Date', readonly=True),
'validator' : fields.many2one('res.users', 'Validated By', readonly=True),
'product_uom' : fields.many2one('product.uom', 'Reference Unit of Measure', required=True),
'company_id':fields.many2one('res.company', 'Company', readonly=True),
'user_id':fields.many2one('res.users', 'Responsible', readonly=True),
'delay':fields.float('Days to Validate', digits=(16,2), readonly=True),
'delay_pass':fields.float('Days to Deliver', digits=(16,2), readonly=True),
'quantity': fields.integer('Unit Quantity', readonly=True), # TDE FIXME master: rename into unit_quantity
'price_total': fields.float('Total Price', readonly=True),
'price_average': fields.float('Average Price', readonly=True, group_operator="avg"),
'negociation': fields.float('Purchase-Standard Price', readonly=True, group_operator="avg"),
'price_standard': fields.float('Products Value', readonly=True, group_operator="sum"),
'nbr': fields.integer('# of Lines', readonly=True), # TDE FIXME master: rename into nbr_lines
'category_id': fields.many2one('product.category', 'Category', readonly=True)
}
_order = 'date desc, price_total desc'
def init(self, cr):
tools.sql.drop_view_if_exists(cr, 'purchase_report')
cr.execute("""
create or replace view purchase_report as (
WITH currency_rate (currency_id, rate, date_start, date_end) AS (
SELECT r.currency_id, r.rate, r.name AS date_start,
(SELECT name FROM res_currency_rate r2
WHERE r2.name > r.name AND
r2.currency_id = r.currency_id
ORDER BY r2.name ASC
LIMIT 1) AS date_end
FROM res_currency_rate r
)
select
min(l.id) as id,
s.date_order as date,
l.state,
s.date_approve,
s.minimum_planned_date as expected_date,
s.dest_address_id,
s.pricelist_id,
s.validator,
spt.warehouse_id as picking_type_id,
s.partner_id as partner_id,
s.create_uid as user_id,
s.company_id as company_id,
l.product_id,
t.categ_id as category_id,
t.uom_id as product_uom,
s.location_id as location_id,
sum(l.product_qty/u.factor*u2.factor) as quantity,
extract(epoch from age(s.date_approve,s.date_order))/(24*60*60)::decimal(16,2) as delay,
extract(epoch from age(l.date_planned,s.date_order))/(24*60*60)::decimal(16,2) as delay_pass,
count(*) as nbr,
sum(l.price_unit/cr.rate*l.product_qty)::decimal(16,2) as price_total,
avg(100.0 * (l.price_unit/cr.rate*l.product_qty) / NULLIF(ip.value_float*l.product_qty/u.factor*u2.factor, 0.0))::decimal(16,2) as negociation,
sum(ip.value_float*l.product_qty/u.factor*u2.factor)::decimal(16,2) as price_standard,
(sum(l.product_qty*l.price_unit/cr.rate)/NULLIF(sum(l.product_qty/u.factor*u2.factor),0.0))::decimal(16,2) as price_average
from purchase_order_line l
join purchase_order s on (l.order_id=s.id)
left join product_product p on (l.product_id=p.id)
left join product_template t on (p.product_tmpl_id=t.id)
LEFT JOIN ir_property ip ON (ip.name='standard_price' AND ip.res_id=CONCAT('product.template,',t.id) AND ip.company_id=s.company_id)
left join product_uom u on (u.id=l.product_uom)
left join product_uom u2 on (u2.id=t.uom_id)
left join stock_picking_type spt on (spt.id=s.picking_type_id)
join currency_rate cr on (cr.currency_id = s.currency_id and
cr.date_start <= coalesce(s.date_order, now()) and
(cr.date_end is null or cr.date_end > coalesce(s.date_order, now())))
group by
s.company_id,
s.create_uid,
s.partner_id,
u.factor,
s.location_id,
l.price_unit,
s.date_approve,
l.date_planned,
l.product_uom,
s.minimum_planned_date,
s.pricelist_id,
s.validator,
s.dest_address_id,
l.product_id,
t.categ_id,
s.date_order,
l.state,
spt.warehouse_id,
u.uom_type,
u.category_id,
t.uom_id,
u.id,
u2.factor
)
""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -598,907,763,509,953,200 | -2,900,161,749,884,581,000 | 53.147887 | 163 | 0.5344 | false |
2ndQuadrant/ansible | test/runner/lib/docker_util.py | 29 | 7331 | """Functions for accessing docker via the docker cli."""
from __future__ import absolute_import, print_function
import json
import os
import time
from lib.executor import (
SubprocessError,
)
from lib.util import (
ApplicationError,
run_command,
common_environment,
display,
find_executable,
)
from lib.config import (
EnvironmentConfig,
)
BUFFER_SIZE = 256 * 256
def docker_available():
"""
:rtype: bool
"""
return find_executable('docker', required=False)
def get_docker_container_id():
"""
:rtype: str | None
"""
path = '/proc/self/cgroup'
if not os.path.exists(path):
return None
with open(path) as cgroup_fd:
contents = cgroup_fd.read()
paths = [line.split(':')[2] for line in contents.splitlines()]
container_ids = set(path.split('/')[2] for path in paths if path.startswith('/docker/'))
if not container_ids:
return None
if len(container_ids) == 1:
return container_ids.pop()
raise ApplicationError('Found multiple container_id candidates: %s\n%s' % (sorted(container_ids), contents))
def get_docker_container_ip(args, container_id):
"""
:type args: EnvironmentConfig
:type container_id: str
:rtype: str
"""
results = docker_inspect(args, container_id)
ipaddress = results[0]['NetworkSettings']['IPAddress']
return ipaddress
def get_docker_networks(args, container_id):
"""
:param args: EnvironmentConfig
:param container_id: str
:rtype: list[str]
"""
results = docker_inspect(args, container_id)
networks = sorted(results[0]['NetworkSettings']['Networks'])
return networks
def docker_pull(args, image):
"""
:type args: EnvironmentConfig
:type image: str
"""
if ('@' in image or ':' in image) and docker_images(args, image):
display.info('Skipping docker pull of existing image with tag or digest: %s' % image, verbosity=2)
return
if not args.docker_pull:
display.warning('Skipping docker pull for "%s". Image may be out-of-date.' % image)
return
for _ in range(1, 10):
try:
docker_command(args, ['pull', image])
return
except SubprocessError:
display.warning('Failed to pull docker image "%s". Waiting a few seconds before trying again.' % image)
time.sleep(3)
raise ApplicationError('Failed to pull docker image "%s".' % image)
def docker_put(args, container_id, src, dst):
"""
:type args: EnvironmentConfig
:type container_id: str
:type src: str
:type dst: str
"""
# avoid 'docker cp' due to a bug which causes 'docker rm' to fail
with open(src, 'rb') as src_fd:
docker_exec(args, container_id, ['dd', 'of=%s' % dst, 'bs=%s' % BUFFER_SIZE],
options=['-i'], stdin=src_fd, capture=True)
def docker_get(args, container_id, src, dst):
"""
:type args: EnvironmentConfig
:type container_id: str
:type src: str
:type dst: str
"""
# avoid 'docker cp' due to a bug which causes 'docker rm' to fail
with open(dst, 'wb') as dst_fd:
docker_exec(args, container_id, ['dd', 'if=%s' % src, 'bs=%s' % BUFFER_SIZE],
options=['-i'], stdout=dst_fd, capture=True)
def docker_run(args, image, options, cmd=None):
"""
:type args: EnvironmentConfig
:type image: str
:type options: list[str] | None
:type cmd: list[str] | None
:rtype: str | None, str | None
"""
if not options:
options = []
if not cmd:
cmd = []
for _ in range(1, 3):
try:
return docker_command(args, ['run'] + options + [image] + cmd, capture=True)
except SubprocessError as ex:
display.error(ex)
display.warning('Failed to run docker image "%s". Waiting a few seconds before trying again.' % image)
time.sleep(3)
raise ApplicationError('Failed to run docker image "%s".' % image)
def docker_images(args, image):
"""
:param args: CommonConfig
:param image: str
:rtype: list[dict[str, any]]
"""
stdout, _dummy = docker_command(args, ['images', image, '--format', '{{json .}}'], capture=True, always=True)
results = [json.loads(line) for line in stdout.splitlines()]
return results
def docker_rm(args, container_id):
"""
:type args: EnvironmentConfig
:type container_id: str
"""
docker_command(args, ['rm', '-f', container_id], capture=True)
def docker_inspect(args, container_id):
"""
:type args: EnvironmentConfig
:type container_id: str
:rtype: list[dict]
"""
if args.explain:
return []
try:
stdout, _ = docker_command(args, ['inspect', container_id], capture=True)
return json.loads(stdout)
except SubprocessError as ex:
try:
return json.loads(ex.stdout)
except Exception:
raise ex
def docker_network_disconnect(args, container_id, network):
"""
:param args: EnvironmentConfig
:param container_id: str
:param network: str
"""
docker_command(args, ['network', 'disconnect', network, container_id], capture=True)
def docker_network_inspect(args, network):
"""
:type args: EnvironmentConfig
:type network: str
:rtype: list[dict]
"""
if args.explain:
return []
try:
stdout, _ = docker_command(args, ['network', 'inspect', network], capture=True)
return json.loads(stdout)
except SubprocessError as ex:
try:
return json.loads(ex.stdout)
except Exception:
raise ex
def docker_exec(args, container_id, cmd, options=None, capture=False, stdin=None, stdout=None):
"""
:type args: EnvironmentConfig
:type container_id: str
:type cmd: list[str]
:type options: list[str] | None
:type capture: bool
:type stdin: file | None
:type stdout: file | None
:rtype: str | None, str | None
"""
if not options:
options = []
return docker_command(args, ['exec'] + options + [container_id] + cmd, capture=capture, stdin=stdin, stdout=stdout)
def docker_info(args):
"""
:param args: CommonConfig
:rtype: dict[str, any]
"""
stdout, _dummy = docker_command(args, ['info', '--format', '{{json .}}'], capture=True, always=True)
return json.loads(stdout)
def docker_version(args):
"""
:param args: CommonConfig
:rtype: dict[str, any]
"""
stdout, _dummy = docker_command(args, ['version', '--format', '{{json .}}'], capture=True, always=True)
return json.loads(stdout)
def docker_command(args, cmd, capture=False, stdin=None, stdout=None, always=False):
"""
:type args: CommonConfig
:type cmd: list[str]
:type capture: bool
:type stdin: file | None
:type stdout: file | None
:type always: bool
:rtype: str | None, str | None
"""
env = docker_environment()
return run_command(args, ['docker'] + cmd, env=env, capture=capture, stdin=stdin, stdout=stdout, always=always)
def docker_environment():
"""
:rtype: dict[str, str]
"""
env = common_environment()
env.update(dict((key, os.environ[key]) for key in os.environ if key.startswith('DOCKER_')))
return env
| gpl-3.0 | 7,770,197,834,404,914,000 | -1,948,879,348,008,412,200 | 25.465704 | 119 | 0.609057 | false |
huongttlan/statsmodels | statsmodels/sandbox/examples/example_sysreg.py | 31 | 8043 | """Example: statsmodels.sandbox.sysreg
"""
#TODO: this is going to change significantly once we have a panel data structure
from statsmodels.compat.python import lmap, asbytes
import numpy as np
import statsmodels.api as sm
from statsmodels.sandbox.sysreg import *
#for Python 3 compatibility
# Seemingly Unrelated Regressions (SUR) Model
# This example uses the subset of the Grunfeld data in Greene's Econometric
# Analysis Chapter 14 (5th Edition)
grun_data = sm.datasets.grunfeld.load()
firms = ['General Motors', 'Chrysler', 'General Electric', 'Westinghouse',
'US Steel']
#for Python 3 compatibility
firms = lmap(asbytes, firms)
grun_exog = grun_data.exog
grun_endog = grun_data.endog
# Right now takes SUR takes a list of arrays
# The array alternates between the LHS of an equation and RHS side of an
# equation
# This is very likely to change
grun_sys = []
for i in firms:
index = grun_exog['firm'] == i
grun_sys.append(grun_endog[index])
exog = grun_exog[index][['value','capital']].view(float).reshape(-1,2)
exog = sm.add_constant(exog, prepend=True)
grun_sys.append(exog)
# Note that the results in Greene (5th edition) uses a slightly different
# version of the Grunfeld data. To reproduce Table 14.1 the following changes
# are necessary.
grun_sys[-2][5] = 261.6
grun_sys[-2][-3] = 645.2
grun_sys[-1][11,2] = 232.6
grun_mod = SUR(grun_sys)
grun_res = grun_mod.fit()
print("Results for the 2-step GLS")
print("Compare to Greene Table 14.1, 5th edition")
print(grun_res.params)
# or you can do an iterative fit
# you have to define a new model though this will be fixed
# TODO: note the above
print("Results for iterative GLS (equivalent to MLE)")
print("Compare to Greene Table 14.3")
#TODO: these are slightly off, could be a convergence issue
# or might use a different default DOF correction?
grun_imod = SUR(grun_sys)
grun_ires = grun_imod.fit(igls=True)
print(grun_ires.params)
# Two-Stage Least Squares for Simultaneous Equations
#TODO: we are going to need *some kind* of formula framework
# This follows the simple macroeconomic model given in
# Greene Example 15.1 (5th Edition)
# The data however is from statsmodels and is not the same as
# Greene's
# The model is
# consumption: c_{t} = \alpha_{0} + \alpha_{1}y_{t} + \alpha_{2}c_{t-1} + \epsilon_{t1}
# investment: i_{t} = \beta_{0} + \beta_{1}r_{t} + \beta_{2}\left(y_{t}-y_{t-1}\right) + \epsilon_{t2}
# demand: y_{t} = c_{t} + I_{t} + g_{t}
# See Greene's Econometric Analysis for more information
# Load the data
macrodata = sm.datasets.macrodata.load().data
# Not needed, but make sure the data is sorted
macrodata = np.sort(macrodata, order=['year','quarter'])
# Impose the demand restriction
y = macrodata['realcons'] + macrodata['realinv'] + macrodata['realgovt']
# Build the system
macro_sys = []
# First equation LHS
macro_sys.append(macrodata['realcons'][1:]) # leave off first date
# First equation RHS
exog1 = np.column_stack((y[1:],macrodata['realcons'][:-1]))
#TODO: it might be nice to have "lag" and "lead" functions
exog1 = sm.add_constant(exog1, prepend=True)
macro_sys.append(exog1)
# Second equation LHS
macro_sys.append(macrodata['realinv'][1:])
# Second equation RHS
exog2 = np.column_stack((macrodata['tbilrate'][1:], np.diff(y)))
exog2 = sm.add_constant(exog2, prepend=True)
macro_sys.append(exog2)
# We need to say that y_{t} in the RHS of equation 1 is an endogenous regressor
# We will call these independent endogenous variables
# Right now, we use a dictionary to declare these
indep_endog = {0 : [1]}
# We also need to create a design of our instruments
# This will be done automatically in the future
instruments = np.column_stack((macrodata[['realgovt',
'tbilrate']][1:].view(float).reshape(-1,2),macrodata['realcons'][:-1],
y[:-1]))
instruments = sm.add_constant(instruments, prepend=True)
macro_mod = Sem2SLS(macro_sys, indep_endog=indep_endog, instruments=instruments)
# Right now this only returns parameters
macro_params = macro_mod.fit()
print("The parameters for the first equation are correct.")
print("The parameters for the second equation are not.")
print(macro_params)
#TODO: Note that the above is incorrect, because we have no way of telling the
# model that *part* of the y_{t} - y_{t-1} is an independent endogenous variable
# To correct for this we would have to do the following
y_instrumented = macro_mod.wexog[0][:,1]
whitened_ydiff = y_instrumented - y[:-1]
wexog = np.column_stack((macrodata['tbilrate'][1:],whitened_ydiff))
wexog = sm.add_constant(wexog, prepend=True)
correct_params = sm.GLS(macrodata['realinv'][1:], wexog).fit().params
print("If we correctly instrument everything, then these are the parameters")
print("for the second equation")
print(correct_params)
print("Compare to output of R script statsmodels/sandbox/tests/macrodata.s")
print('\nUsing IV2SLS')
from statsmodels.sandbox.regression.gmm import IV2SLS
miv = IV2SLS(macro_sys[0], macro_sys[1], instruments)
resiv = miv.fit()
print("equation 1")
print(resiv.params)
miv2 = IV2SLS(macro_sys[2], macro_sys[3], instruments)
resiv2 = miv2.fit()
print("equation 2")
print(resiv2.params)
### Below is the same example using Greene's data ###
run_greene = 0
if run_greene:
try:
data3 = np.genfromtxt('/home/skipper/school/MetricsII/Greene \
TableF5-1.txt', names=True)
except:
raise ValueError("Based on Greene TableF5-1. You should download it "
"from his web site and edit this script accordingly.")
# Example 15.1 in Greene 5th Edition
# c_t = constant + y_t + c_t-1
# i_t = constant + r_t + (y_t - y_t-1)
# y_t = c_t + i_t + g_t
sys3 = []
sys3.append(data3['realcons'][1:]) # have to leave off a beg. date
# impose 3rd equation on y
y = data3['realcons'] + data3['realinvs'] + data3['realgovt']
exog1 = np.column_stack((y[1:],data3['realcons'][:-1]))
exog1 = sm.add_constant(exog1, prepend=False)
sys3.append(exog1)
sys3.append(data3['realinvs'][1:])
exog2 = np.column_stack((data3['tbilrate'][1:],
np.diff(y)))
# realint is missing 1st observation
exog2 = sm.add_constant(exog2, prepend=False)
sys3.append(exog2)
indep_endog = {0 : [0]} # need to be able to say that y_1 is an instrument..
instruments = np.column_stack((data3[['realgovt',
'tbilrate']][1:].view(float).reshape(-1,2),data3['realcons'][:-1],
y[:-1]))
instruments = sm.add_constant(instruments, prepend=False)
sem_mod = Sem2SLS(sys3, indep_endog = indep_endog, instruments=instruments)
sem_params = sem_mod.fit() # first equation is right, but not second?
# should y_t in the diff be instrumented?
# how would R know this in the script?
# well, let's check...
y_instr = sem_mod.wexog[0][:,0]
wyd = y_instr - y[:-1]
wexog = np.column_stack((data3['tbilrate'][1:],wyd))
wexog = sm.add_constant(wexog, prepend=False)
params = sm.GLS(data3['realinvs'][1:], wexog).fit().params
print("These are the simultaneous equation estimates for Greene's \
example 13-1 (Also application 13-1 in 6th edition.")
print(sem_params)
print("The first set of parameters is correct. The second set is not.")
print("Compare to the solution manual at \
http://pages.stern.nyu.edu/~wgreene/Text/econometricanalysis.htm")
print("The reason is the restriction on (y_t - y_1)")
print("Compare to R script GreeneEx15_1.s")
print("Somehow R carries y.1 in yd to know that it needs to be \
instrumented")
print("If we replace our estimate with the instrumented one")
print(params)
print("We get the right estimate")
print("Without a formula framework we have to be able to do restrictions.")
# yep!, but how in the world does R know this when we just fed it yd??
# must be implicit in the formula framework...
# we are going to need to keep the two equations separate and use
# a restrictions matrix. Ugh, is a formula framework really, necessary to get
# around this?
| bsd-3-clause | 5,717,807,721,724,186,000 | 1,053,081,633,588,925,200 | 37.668269 | 102 | 0.695263 | false |
freakynit/kaggle-ndsb | configurations/bagging_15_convroll4_big_weightdecay_resume.py | 6 | 5502 | import numpy as np
import theano
import theano.tensor as T
import lasagne as nn
import data
import load
import nn_plankton
import dihedral
import dihedral_fast
import tmp_dnn
import tta
resume_path = "metadata/bagging_15_convroll4_big_weightdecay-schaap-20150306-105118.pkl"
validation_split_path = "splits/bagging_split_15.pkl"
patch_size = (95, 95)
augmentation_params = {
'zoom_range': (1 / 1.6, 1.6),
'rotation_range': (0, 360),
'shear_range': (-20, 20),
'translation_range': (-10, 10),
'do_flip': True,
'allow_stretch': 1.3,
}
batch_size = 128 // 4
chunk_size = 32768 // 4
num_chunks_train = 840
momentum = 0.9
learning_rate_schedule = {
0: 0.003,
700: 0.0003,
800: 0.00003,
}
validate_every = 20
save_every = 20
def estimate_scale(img):
return np.maximum(img.shape[0], img.shape[1]) / 85.0
# augmentation_transforms_test = []
# for flip in [True, False]:
# for zoom in [1/1.3, 1/1.2, 1/1.1, 1.0, 1.1, 1.2, 1.3]:
# for rot in np.linspace(0.0, 360.0, 5, endpoint=False):
# tf = data.build_augmentation_transform(zoom=(zoom, zoom), rotation=rot, flip=flip)
# augmentation_transforms_test.append(tf)
augmentation_transforms_test = tta.build_quasirandom_transforms(70, **{
'zoom_range': (1 / 1.4, 1.4),
'rotation_range': (0, 360),
'shear_range': (-10, 10),
'translation_range': (-8, 8),
'do_flip': True,
'allow_stretch': 1.2,
})
data_loader = load.ZmuvRescaledDataLoader(estimate_scale=estimate_scale, num_chunks_train=num_chunks_train,
patch_size=patch_size, chunk_size=chunk_size, augmentation_params=augmentation_params,
augmentation_transforms_test=augmentation_transforms_test, validation_split_path=validation_split_path)
# Conv2DLayer = nn.layers.cuda_convnet.Conv2DCCLayer
# MaxPool2DLayer = nn.layers.cuda_convnet.MaxPool2DCCLayer
Conv2DLayer = tmp_dnn.Conv2DDNNLayer
MaxPool2DLayer = tmp_dnn.MaxPool2DDNNLayer
def build_model():
l0 = nn.layers.InputLayer((batch_size, 1, patch_size[0], patch_size[1]))
l0c = dihedral.CyclicSliceLayer(l0)
l1a = Conv2DLayer(l0c, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l1b = Conv2DLayer(l1a, num_filters=16, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l1 = MaxPool2DLayer(l1b, ds=(3, 3), strides=(2, 2))
l1r = dihedral_fast.CyclicConvRollLayer(l1)
l2a = Conv2DLayer(l1r, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l2b = Conv2DLayer(l2a, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l2 = MaxPool2DLayer(l2b, ds=(3, 3), strides=(2, 2))
l2r = dihedral_fast.CyclicConvRollLayer(l2)
l3a = Conv2DLayer(l2r, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3b = Conv2DLayer(l3a, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3c = Conv2DLayer(l3b, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3 = MaxPool2DLayer(l3c, ds=(3, 3), strides=(2, 2))
l3r = dihedral_fast.CyclicConvRollLayer(l3)
l4a = Conv2DLayer(l3r, num_filters=256, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l4b = Conv2DLayer(l4a, num_filters=256, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l4c = Conv2DLayer(l4b, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l4 = MaxPool2DLayer(l4c, ds=(3, 3), strides=(2, 2))
l4r = dihedral_fast.CyclicConvRollLayer(l4)
l4f = nn.layers.flatten(l4r)
l5 = nn.layers.DenseLayer(nn.layers.dropout(l4f, p=0.5), num_units=512, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l5r = dihedral_fast.CyclicRollLayer(l5)
l6 = nn.layers.DenseLayer(nn.layers.dropout(l5r, p=0.5), num_units=512, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l6m = dihedral.CyclicPoolLayer(l6, pool_function=nn_plankton.rms)
l7 = nn.layers.DenseLayer(nn.layers.dropout(l6m, p=0.5), num_units=data.num_classes, nonlinearity=T.nnet.softmax, W=nn_plankton.Orthogonal(1.0))
return [l0], l7
def build_objective(l_ins, l_out):
lambda_reg = 0.0005
params = nn.layers.get_all_non_bias_params(l_out)
reg_term = sum(T.sum(p**2) for p in params)
def loss(y, t):
return nn_plankton.log_loss(y, t) + lambda_reg * reg_term
return nn.objectives.Objective(l_out, loss_function=loss)
| mit | 3,125,404,274,157,325,300 | 6,564,868,221,317,881,000 | 43.016 | 205 | 0.697928 | false |
stefan-jonasson/home-assistant | homeassistant/components/telegram_bot/__init__.py | 2 | 26538 | """
Component to send and receive Telegram messages.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/telegram_bot/
"""
import asyncio
import io
from functools import partial
import logging
import os
import requests
from requests.auth import HTTPBasicAuth, HTTPDigestAuth
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA, ATTR_MESSAGE, ATTR_TITLE)
from homeassistant.config import load_yaml_config_file
from homeassistant.const import (
ATTR_COMMAND, ATTR_LATITUDE, ATTR_LONGITUDE, CONF_API_KEY,
CONF_PLATFORM, CONF_TIMEOUT, HTTP_DIGEST_AUTHENTICATION)
import homeassistant.helpers.config_validation as cv
from homeassistant.exceptions import TemplateError
from homeassistant.setup import async_prepare_setup_platform
REQUIREMENTS = ['python-telegram-bot==8.1.1']
_LOGGER = logging.getLogger(__name__)
ATTR_ARGS = 'args'
ATTR_AUTHENTICATION = 'authentication'
ATTR_CALLBACK_QUERY = 'callback_query'
ATTR_CALLBACK_QUERY_ID = 'callback_query_id'
ATTR_CAPTION = 'caption'
ATTR_CHAT_ID = 'chat_id'
ATTR_CHAT_INSTANCE = 'chat_instance'
ATTR_DISABLE_NOTIF = 'disable_notification'
ATTR_DISABLE_WEB_PREV = 'disable_web_page_preview'
ATTR_EDITED_MSG = 'edited_message'
ATTR_FILE = 'file'
ATTR_FROM_FIRST = 'from_first'
ATTR_FROM_LAST = 'from_last'
ATTR_KEYBOARD = 'keyboard'
ATTR_KEYBOARD_INLINE = 'inline_keyboard'
ATTR_MESSAGEID = 'message_id'
ATTR_MSG = 'message'
ATTR_MSGID = 'id'
ATTR_PARSER = 'parse_mode'
ATTR_PASSWORD = 'password'
ATTR_REPLY_TO_MSGID = 'reply_to_message_id'
ATTR_REPLYMARKUP = 'reply_markup'
ATTR_SHOW_ALERT = 'show_alert'
ATTR_TARGET = 'target'
ATTR_TEXT = 'text'
ATTR_URL = 'url'
ATTR_USER_ID = 'user_id'
ATTR_USERNAME = 'username'
CONF_ALLOWED_CHAT_IDS = 'allowed_chat_ids'
CONF_PROXY_URL = 'proxy_url'
CONF_PROXY_PARAMS = 'proxy_params'
DOMAIN = 'telegram_bot'
SERVICE_SEND_MESSAGE = 'send_message'
SERVICE_SEND_PHOTO = 'send_photo'
SERVICE_SEND_DOCUMENT = 'send_document'
SERVICE_SEND_LOCATION = 'send_location'
SERVICE_EDIT_MESSAGE = 'edit_message'
SERVICE_EDIT_CAPTION = 'edit_caption'
SERVICE_EDIT_REPLYMARKUP = 'edit_replymarkup'
SERVICE_ANSWER_CALLBACK_QUERY = 'answer_callback_query'
SERVICE_DELETE_MESSAGE = 'delete_message'
EVENT_TELEGRAM_CALLBACK = 'telegram_callback'
EVENT_TELEGRAM_COMMAND = 'telegram_command'
EVENT_TELEGRAM_TEXT = 'telegram_text'
PARSER_HTML = 'html'
PARSER_MD = 'markdown'
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({
vol.Required(CONF_PLATFORM): cv.string,
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_ALLOWED_CHAT_IDS):
vol.All(cv.ensure_list, [vol.Coerce(int)]),
vol.Optional(ATTR_PARSER, default=PARSER_MD): cv.string,
vol.Optional(CONF_PROXY_URL): cv.string,
vol.Optional(CONF_PROXY_PARAMS): dict,
})
BASE_SERVICE_SCHEMA = vol.Schema({
vol.Optional(ATTR_TARGET): vol.All(cv.ensure_list, [vol.Coerce(int)]),
vol.Optional(ATTR_PARSER): cv.string,
vol.Optional(ATTR_DISABLE_NOTIF): cv.boolean,
vol.Optional(ATTR_DISABLE_WEB_PREV): cv.boolean,
vol.Optional(ATTR_KEYBOARD): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_KEYBOARD_INLINE): cv.ensure_list,
}, extra=vol.ALLOW_EXTRA)
SERVICE_SCHEMA_SEND_MESSAGE = BASE_SERVICE_SCHEMA.extend({
vol.Required(ATTR_MESSAGE): cv.template,
vol.Optional(ATTR_TITLE): cv.template,
})
SERVICE_SCHEMA_SEND_FILE = BASE_SERVICE_SCHEMA.extend({
vol.Optional(ATTR_URL): cv.template,
vol.Optional(ATTR_FILE): cv.template,
vol.Optional(ATTR_CAPTION): cv.template,
vol.Optional(ATTR_USERNAME): cv.string,
vol.Optional(ATTR_PASSWORD): cv.string,
vol.Optional(ATTR_AUTHENTICATION): cv.string,
})
SERVICE_SCHEMA_SEND_LOCATION = BASE_SERVICE_SCHEMA.extend({
vol.Required(ATTR_LONGITUDE): cv.template,
vol.Required(ATTR_LATITUDE): cv.template,
})
SERVICE_SCHEMA_EDIT_MESSAGE = SERVICE_SCHEMA_SEND_MESSAGE.extend({
vol.Required(ATTR_MESSAGEID):
vol.Any(cv.positive_int, vol.All(cv.string, 'last')),
vol.Required(ATTR_CHAT_ID): vol.Coerce(int),
})
SERVICE_SCHEMA_EDIT_CAPTION = vol.Schema({
vol.Required(ATTR_MESSAGEID):
vol.Any(cv.positive_int, vol.All(cv.string, 'last')),
vol.Required(ATTR_CHAT_ID): vol.Coerce(int),
vol.Required(ATTR_CAPTION): cv.template,
vol.Optional(ATTR_KEYBOARD_INLINE): cv.ensure_list,
}, extra=vol.ALLOW_EXTRA)
SERVICE_SCHEMA_EDIT_REPLYMARKUP = vol.Schema({
vol.Required(ATTR_MESSAGEID):
vol.Any(cv.positive_int, vol.All(cv.string, 'last')),
vol.Required(ATTR_CHAT_ID): vol.Coerce(int),
vol.Required(ATTR_KEYBOARD_INLINE): cv.ensure_list,
}, extra=vol.ALLOW_EXTRA)
SERVICE_SCHEMA_ANSWER_CALLBACK_QUERY = vol.Schema({
vol.Required(ATTR_MESSAGE): cv.template,
vol.Required(ATTR_CALLBACK_QUERY_ID): vol.Coerce(int),
vol.Optional(ATTR_SHOW_ALERT): cv.boolean,
}, extra=vol.ALLOW_EXTRA)
SERVICE_SCHEMA_DELETE_MESSAGE = vol.Schema({
vol.Required(ATTR_CHAT_ID): vol.Coerce(int),
vol.Required(ATTR_MESSAGEID):
vol.Any(cv.positive_int, vol.All(cv.string, 'last')),
}, extra=vol.ALLOW_EXTRA)
SERVICE_MAP = {
SERVICE_SEND_MESSAGE: SERVICE_SCHEMA_SEND_MESSAGE,
SERVICE_SEND_PHOTO: SERVICE_SCHEMA_SEND_FILE,
SERVICE_SEND_DOCUMENT: SERVICE_SCHEMA_SEND_FILE,
SERVICE_SEND_LOCATION: SERVICE_SCHEMA_SEND_LOCATION,
SERVICE_EDIT_MESSAGE: SERVICE_SCHEMA_EDIT_MESSAGE,
SERVICE_EDIT_CAPTION: SERVICE_SCHEMA_EDIT_CAPTION,
SERVICE_EDIT_REPLYMARKUP: SERVICE_SCHEMA_EDIT_REPLYMARKUP,
SERVICE_ANSWER_CALLBACK_QUERY: SERVICE_SCHEMA_ANSWER_CALLBACK_QUERY,
SERVICE_DELETE_MESSAGE: SERVICE_SCHEMA_DELETE_MESSAGE,
}
def load_data(hass, url=None, filepath=None, username=None, password=None,
authentication=None, num_retries=5):
"""Load photo/document into ByteIO/File container from a source."""
try:
if url is not None:
# Load photo from URL
params = {"timeout": 15}
if username is not None and password is not None:
if authentication == HTTP_DIGEST_AUTHENTICATION:
params["auth"] = HTTPDigestAuth(username, password)
else:
params["auth"] = HTTPBasicAuth(username, password)
retry_num = 0
while retry_num < num_retries:
req = requests.get(url, **params)
if not req.ok:
_LOGGER.warning("Status code %s (retry #%s) loading %s.",
req.status_code, retry_num + 1, url)
else:
data = io.BytesIO(req.content)
if data.read():
data.seek(0)
data.name = url
return data
_LOGGER.warning("Empty data (retry #%s) in %s).",
retry_num + 1, url)
retry_num += 1
_LOGGER.warning("Can't load photo in %s after %s retries.",
url, retry_num)
elif filepath is not None:
if hass.config.is_allowed_path(filepath):
return open(filepath, "rb")
_LOGGER.warning("'%s' are not secure to load data from!", filepath)
else:
_LOGGER.warning("Can't load photo. No photo found in params!")
except (OSError, TypeError) as error:
_LOGGER.error("Can't load photo into ByteIO: %s", error)
return None
@asyncio.coroutine
def async_setup(hass, config):
"""Set up the Telegram bot component."""
if not config[DOMAIN]:
return False
p_config = config[DOMAIN][0]
descriptions = yield from hass.async_add_job(
load_yaml_config_file,
os.path.join(os.path.dirname(__file__), 'services.yaml'))
p_type = p_config.get(CONF_PLATFORM)
platform = yield from async_prepare_setup_platform(
hass, config, DOMAIN, p_type)
if platform is None:
return
_LOGGER.info("Setting up %s.%s", DOMAIN, p_type)
try:
receiver_service = yield from \
platform.async_setup_platform(hass, p_config)
if receiver_service is False:
_LOGGER.error(
"Failed to initialize Telegram bot %s", p_type)
return False
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Error setting up platform %s", p_type)
return False
notify_service = TelegramNotificationService(
hass,
p_config.get(CONF_API_KEY),
p_config.get(CONF_ALLOWED_CHAT_IDS),
p_config.get(ATTR_PARSER),
p_config.get(CONF_PROXY_URL),
p_config.get(CONF_PROXY_PARAMS)
)
@asyncio.coroutine
def async_send_telegram_message(service):
"""Handle sending Telegram Bot message service calls."""
def _render_template_attr(data, attribute):
attribute_templ = data.get(attribute)
if attribute_templ:
if any([isinstance(attribute_templ, vtype)
for vtype in [float, int, str]]):
data[attribute] = attribute_templ
else:
attribute_templ.hass = hass
try:
data[attribute] = attribute_templ.async_render()
except TemplateError as exc:
_LOGGER.error(
"TemplateError in %s: %s -> %s",
attribute, attribute_templ.template, exc)
data[attribute] = attribute_templ.template
msgtype = service.service
kwargs = dict(service.data)
for attribute in [ATTR_MESSAGE, ATTR_TITLE, ATTR_URL, ATTR_FILE,
ATTR_CAPTION, ATTR_LONGITUDE, ATTR_LATITUDE]:
_render_template_attr(kwargs, attribute)
_LOGGER.debug("New telegram message %s: %s", msgtype, kwargs)
if msgtype == SERVICE_SEND_MESSAGE:
yield from hass.async_add_job(
partial(notify_service.send_message, **kwargs))
elif msgtype == SERVICE_SEND_PHOTO:
yield from hass.async_add_job(
partial(notify_service.send_file, True, **kwargs))
elif msgtype == SERVICE_SEND_DOCUMENT:
yield from hass.async_add_job(
partial(notify_service.send_file, False, **kwargs))
elif msgtype == SERVICE_SEND_LOCATION:
yield from hass.async_add_job(
partial(notify_service.send_location, **kwargs))
elif msgtype == SERVICE_ANSWER_CALLBACK_QUERY:
yield from hass.async_add_job(
partial(notify_service.answer_callback_query, **kwargs))
elif msgtype == SERVICE_DELETE_MESSAGE:
yield from hass.async_add_job(
partial(notify_service.delete_message, **kwargs))
else:
yield from hass.async_add_job(
partial(notify_service.edit_message, msgtype, **kwargs))
# Register notification services
for service_notif, schema in SERVICE_MAP.items():
hass.services.async_register(
DOMAIN, service_notif, async_send_telegram_message,
descriptions.get(service_notif), schema=schema)
return True
class TelegramNotificationService:
"""Implement the notification services for the Telegram Bot domain."""
def __init__(self, hass, api_key, allowed_chat_ids, parser,
proxy_url=None, proxy_params=None):
"""Initialize the service."""
from telegram import Bot
from telegram.parsemode import ParseMode
from telegram.utils.request import Request
self.allowed_chat_ids = allowed_chat_ids
self._default_user = self.allowed_chat_ids[0]
self._last_message_id = {user: None for user in self.allowed_chat_ids}
self._parsers = {PARSER_HTML: ParseMode.HTML,
PARSER_MD: ParseMode.MARKDOWN}
self._parse_mode = self._parsers.get(parser)
request = None
if proxy_url is not None:
request = Request(proxy_url=proxy_url,
urllib3_proxy_kwargs=proxy_params)
self.bot = Bot(token=api_key, request=request)
self.hass = hass
def _get_msg_ids(self, msg_data, chat_id):
"""Get the message id to edit.
This can be one of (message_id, inline_message_id) from a msg dict,
returning a tuple.
**You can use 'last' as message_id** to edit
the last sended message in the chat_id.
"""
message_id = inline_message_id = None
if ATTR_MESSAGEID in msg_data:
message_id = msg_data[ATTR_MESSAGEID]
if (isinstance(message_id, str) and (message_id == 'last') and
(self._last_message_id[chat_id] is not None)):
message_id = self._last_message_id[chat_id]
else:
inline_message_id = msg_data['inline_message_id']
return message_id, inline_message_id
def _get_target_chat_ids(self, target):
"""Validate chat_id targets or return default target (first).
:param target: optional list of integers ([12234, -12345])
:return list of chat_id targets (integers)
"""
if target is not None:
if isinstance(target, int):
target = [target]
chat_ids = [t for t in target if t in self.allowed_chat_ids]
if chat_ids:
return chat_ids
_LOGGER.warning("Unallowed targets: %s, using default: %s",
target, self._default_user)
return [self._default_user]
def _get_msg_kwargs(self, data):
"""Get parameters in message data kwargs."""
def _make_row_inline_keyboard(row_keyboard):
"""Make a list of InlineKeyboardButtons.
It can accept:
- a list of tuples like:
`[(text_b1, data_callback_b1),
(text_b2, data_callback_b2), ...]
- a string like: `/cmd1, /cmd2, /cmd3`
- or a string like: `text_b1:/cmd1, text_b2:/cmd2`
"""
from telegram import InlineKeyboardButton
buttons = []
if isinstance(row_keyboard, str):
for key in row_keyboard.split(","):
if ':/' in key:
# commands like: 'Label:/cmd' become ('Label', '/cmd')
label = key.split(':/')[0]
command = key[len(label) + 1:]
buttons.append(
InlineKeyboardButton(label, callback_data=command))
else:
# commands like: '/cmd' become ('CMD', '/cmd')
label = key.strip()[1:].upper()
buttons.append(
InlineKeyboardButton(label, callback_data=key))
elif isinstance(row_keyboard, list):
for entry in row_keyboard:
text_btn, data_btn = entry
buttons.append(
InlineKeyboardButton(text_btn, callback_data=data_btn))
else:
raise ValueError(str(row_keyboard))
return buttons
# Defaults
params = {
ATTR_PARSER: self._parse_mode,
ATTR_DISABLE_NOTIF: False,
ATTR_DISABLE_WEB_PREV: None,
ATTR_REPLY_TO_MSGID: None,
ATTR_REPLYMARKUP: None,
CONF_TIMEOUT: None
}
if data is not None:
if ATTR_PARSER in data:
params[ATTR_PARSER] = self._parsers.get(
data[ATTR_PARSER], self._parse_mode)
if CONF_TIMEOUT in data:
params[CONF_TIMEOUT] = data[CONF_TIMEOUT]
if ATTR_DISABLE_NOTIF in data:
params[ATTR_DISABLE_NOTIF] = data[ATTR_DISABLE_NOTIF]
if ATTR_DISABLE_WEB_PREV in data:
params[ATTR_DISABLE_WEB_PREV] = data[ATTR_DISABLE_WEB_PREV]
if ATTR_REPLY_TO_MSGID in data:
params[ATTR_REPLY_TO_MSGID] = data[ATTR_REPLY_TO_MSGID]
# Keyboards:
if ATTR_KEYBOARD in data:
from telegram import ReplyKeyboardMarkup
keys = data.get(ATTR_KEYBOARD)
keys = keys if isinstance(keys, list) else [keys]
params[ATTR_REPLYMARKUP] = ReplyKeyboardMarkup(
[[key.strip() for key in row.split(",")] for row in keys])
elif ATTR_KEYBOARD_INLINE in data:
from telegram import InlineKeyboardMarkup
keys = data.get(ATTR_KEYBOARD_INLINE)
keys = keys if isinstance(keys, list) else [keys]
params[ATTR_REPLYMARKUP] = InlineKeyboardMarkup(
[_make_row_inline_keyboard(row) for row in keys])
return params
def _send_msg(self, func_send, msg_error, *args_msg, **kwargs_msg):
"""Send one message."""
from telegram.error import TelegramError
try:
out = func_send(*args_msg, **kwargs_msg)
if not isinstance(out, bool) and hasattr(out, ATTR_MESSAGEID):
chat_id = out.chat_id
self._last_message_id[chat_id] = out[ATTR_MESSAGEID]
_LOGGER.debug("Last message ID: %s (from chat_id %s)",
self._last_message_id, chat_id)
elif not isinstance(out, bool):
_LOGGER.warning("Update last message: out_type:%s, out=%s",
type(out), out)
return out
except TelegramError as exc:
_LOGGER.error("%s: %s. Args: %s, kwargs: %s",
msg_error, exc, args_msg, kwargs_msg)
def send_message(self, message="", target=None, **kwargs):
"""Send a message to one or multiple pre-allowed chat IDs."""
title = kwargs.get(ATTR_TITLE)
text = '{}\n{}'.format(title, message) if title else message
params = self._get_msg_kwargs(kwargs)
for chat_id in self._get_target_chat_ids(target):
_LOGGER.debug("Send message in chat ID %s with params: %s",
chat_id, params)
self._send_msg(self.bot.sendMessage,
"Error sending message",
chat_id, text, **params)
def delete_message(self, chat_id=None, **kwargs):
"""Delete a previously sent message."""
chat_id = self._get_target_chat_ids(chat_id)[0]
message_id, _ = self._get_msg_ids(kwargs, chat_id)
_LOGGER.debug("Delete message %s in chat ID %s", message_id, chat_id)
deleted = self._send_msg(self.bot.deleteMessage,
"Error deleting message",
chat_id, message_id)
# reduce message_id anyway:
if self._last_message_id[chat_id] is not None:
# change last msg_id for deque(n_msgs)?
self._last_message_id[chat_id] -= 1
return deleted
def edit_message(self, type_edit, chat_id=None, **kwargs):
"""Edit a previously sent message."""
chat_id = self._get_target_chat_ids(chat_id)[0]
message_id, inline_message_id = self._get_msg_ids(kwargs, chat_id)
params = self._get_msg_kwargs(kwargs)
_LOGGER.debug("Edit message %s in chat ID %s with params: %s",
message_id or inline_message_id, chat_id, params)
if type_edit == SERVICE_EDIT_MESSAGE:
message = kwargs.get(ATTR_MESSAGE)
title = kwargs.get(ATTR_TITLE)
text = '{}\n{}'.format(title, message) if title else message
_LOGGER.debug("Editing message with ID %s.",
message_id or inline_message_id)
return self._send_msg(self.bot.editMessageText,
"Error editing text message",
text, chat_id=chat_id, message_id=message_id,
inline_message_id=inline_message_id,
**params)
elif type_edit == SERVICE_EDIT_CAPTION:
func_send = self.bot.editMessageCaption
params[ATTR_CAPTION] = kwargs.get(ATTR_CAPTION)
else:
func_send = self.bot.editMessageReplyMarkup
return self._send_msg(func_send,
"Error editing message attributes",
chat_id=chat_id, message_id=message_id,
inline_message_id=inline_message_id,
**params)
def answer_callback_query(self, message, callback_query_id,
show_alert=False, **kwargs):
"""Answer a callback originated with a press in an inline keyboard."""
params = self._get_msg_kwargs(kwargs)
_LOGGER.debug("Answer callback query with callback ID %s: %s, "
"alert: %s.", callback_query_id, message, show_alert)
self._send_msg(self.bot.answerCallbackQuery,
"Error sending answer callback query",
callback_query_id,
text=message, show_alert=show_alert, **params)
def send_file(self, is_photo=True, target=None, **kwargs):
"""Send a photo or a document."""
params = self._get_msg_kwargs(kwargs)
caption = kwargs.get(ATTR_CAPTION)
func_send = self.bot.sendPhoto if is_photo else self.bot.sendDocument
file_content = load_data(
self.hass,
url=kwargs.get(ATTR_URL),
filepath=kwargs.get(ATTR_FILE),
username=kwargs.get(ATTR_USERNAME),
password=kwargs.get(ATTR_PASSWORD),
authentication=kwargs.get(ATTR_AUTHENTICATION),
)
if file_content:
for chat_id in self._get_target_chat_ids(target):
_LOGGER.debug("Send file to chat ID %s. Caption: %s.",
chat_id, caption)
self._send_msg(func_send, "Error sending file",
chat_id, file_content,
caption=caption, **params)
file_content.seek(0)
else:
_LOGGER.error("Can't send file with kwargs: %s", kwargs)
def send_location(self, latitude, longitude, target=None, **kwargs):
"""Send a location."""
latitude = float(latitude)
longitude = float(longitude)
params = self._get_msg_kwargs(kwargs)
for chat_id in self._get_target_chat_ids(target):
_LOGGER.debug("Send location %s/%s to chat ID %s.",
latitude, longitude, chat_id)
self._send_msg(self.bot.sendLocation,
"Error sending location",
chat_id=chat_id,
latitude=latitude, longitude=longitude, **params)
class BaseTelegramBotEntity:
"""The base class for the telegram bot."""
def __init__(self, hass, allowed_chat_ids):
"""Initialize the bot base class."""
self.allowed_chat_ids = allowed_chat_ids
self.hass = hass
def _get_message_data(self, msg_data):
"""Return boolean msg_data_is_ok and dict msg_data."""
if not msg_data:
return False, None
bad_fields = ('text' not in msg_data and
'data' not in msg_data and
'chat' not in msg_data)
if bad_fields or 'from' not in msg_data:
# Message is not correct.
_LOGGER.error("Incoming message does not have required data (%s)",
msg_data)
return False, None
if (msg_data['from'].get('id') not in self.allowed_chat_ids or
('chat' in msg_data and
msg_data['chat'].get('id') not in self.allowed_chat_ids)):
# Origin is not allowed.
_LOGGER.error("Incoming message is not allowed (%s)", msg_data)
return True, None
data = {
ATTR_USER_ID: msg_data['from']['id'],
ATTR_FROM_FIRST: msg_data['from']['first_name']
}
if 'last_name' in msg_data['from']:
data[ATTR_FROM_LAST] = msg_data['from']['last_name']
if 'chat' in msg_data:
data[ATTR_CHAT_ID] = msg_data['chat']['id']
elif ATTR_MESSAGE in msg_data and 'chat' in msg_data[ATTR_MESSAGE]:
data[ATTR_CHAT_ID] = msg_data[ATTR_MESSAGE]['chat']['id']
return True, data
def process_message(self, data):
"""Check for basic message rules and fire an event if message is ok."""
if ATTR_MSG in data or ATTR_EDITED_MSG in data:
event = EVENT_TELEGRAM_COMMAND
if ATTR_MSG in data:
data = data.get(ATTR_MSG)
else:
data = data.get(ATTR_EDITED_MSG)
message_ok, event_data = self._get_message_data(data)
if event_data is None:
return message_ok
if 'text' in data:
if data['text'][0] == '/':
pieces = data['text'].split(' ')
event_data[ATTR_COMMAND] = pieces[0]
event_data[ATTR_ARGS] = pieces[1:]
else:
event_data[ATTR_TEXT] = data['text']
event = EVENT_TELEGRAM_TEXT
else:
_LOGGER.warning("Message without text data received: %s", data)
event_data[ATTR_TEXT] = str(data)
event = EVENT_TELEGRAM_TEXT
self.hass.bus.async_fire(event, event_data)
return True
elif ATTR_CALLBACK_QUERY in data:
event = EVENT_TELEGRAM_CALLBACK
data = data.get(ATTR_CALLBACK_QUERY)
message_ok, event_data = self._get_message_data(data)
if event_data is None:
return message_ok
event_data[ATTR_DATA] = data[ATTR_DATA]
event_data[ATTR_MSG] = data[ATTR_MSG]
event_data[ATTR_CHAT_INSTANCE] = data[ATTR_CHAT_INSTANCE]
event_data[ATTR_MSGID] = data[ATTR_MSGID]
self.hass.bus.async_fire(event, event_data)
return True
else:
_LOGGER.warning("Message with unknown data received: %s", data)
return True
| mit | -958,591,407,280,862,800 | -8,761,220,672,489,176,000 | 40.272162 | 79 | 0.579094 | false |
zaxtax/scikit-learn | sklearn/utils/tests/test_seq_dataset.py | 47 | 2486 | # Author: Tom Dupre la Tour <[email protected]>
#
# License: BSD 3 clause
import numpy as np
import scipy.sparse as sp
from sklearn.utils.seq_dataset import ArrayDataset, CSRDataset
from sklearn.datasets import load_iris
from numpy.testing import assert_array_equal
from nose.tools import assert_equal
iris = load_iris()
X = iris.data.astype(np.float64)
y = iris.target.astype(np.float64)
X_csr = sp.csr_matrix(X)
sample_weight = np.arange(y.size, dtype=np.float64)
def assert_csr_equal(X, Y):
X.eliminate_zeros()
Y.eliminate_zeros()
assert_equal(X.shape[0], Y.shape[0])
assert_equal(X.shape[1], Y.shape[1])
assert_array_equal(X.data, Y.data)
assert_array_equal(X.indices, Y.indices)
assert_array_equal(X.indptr, Y.indptr)
def test_seq_dataset():
dataset1 = ArrayDataset(X, y, sample_weight, seed=42)
dataset2 = CSRDataset(X_csr.data, X_csr.indptr, X_csr.indices,
y, sample_weight, seed=42)
for dataset in (dataset1, dataset2):
for i in range(5):
# next sample
xi_, yi, swi, idx = dataset._next_py()
xi = sp.csr_matrix((xi_), shape=(1, X.shape[1]))
assert_csr_equal(xi, X_csr[idx])
assert_equal(yi, y[idx])
assert_equal(swi, sample_weight[idx])
# random sample
xi_, yi, swi, idx = dataset._random_py()
xi = sp.csr_matrix((xi_), shape=(1, X.shape[1]))
assert_csr_equal(xi, X_csr[idx])
assert_equal(yi, y[idx])
assert_equal(swi, sample_weight[idx])
def test_seq_dataset_shuffle():
dataset1 = ArrayDataset(X, y, sample_weight, seed=42)
dataset2 = CSRDataset(X_csr.data, X_csr.indptr, X_csr.indices,
y, sample_weight, seed=42)
# not shuffled
for i in range(5):
_, _, _, idx1 = dataset1._next_py()
_, _, _, idx2 = dataset2._next_py()
assert_equal(idx1, i)
assert_equal(idx2, i)
for i in range(5):
_, _, _, idx1 = dataset1._random_py()
_, _, _, idx2 = dataset2._random_py()
assert_equal(idx1, idx2)
seed = 77
dataset1._shuffle_py(seed)
dataset2._shuffle_py(seed)
for i in range(5):
_, _, _, idx1 = dataset1._next_py()
_, _, _, idx2 = dataset2._next_py()
assert_equal(idx1, idx2)
_, _, _, idx1 = dataset1._random_py()
_, _, _, idx2 = dataset2._random_py()
assert_equal(idx1, idx2)
| bsd-3-clause | 2,785,434,230,672,997,400 | 8,238,613,717,822,671,000 | 28.595238 | 66 | 0.581255 | false |
CydarLtd/ansible | lib/ansible/module_utils/connection.py | 61 | 2929 | #
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2017 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import socket
import struct
import signal
from ansible.module_utils.basic import get_exception
from ansible.module_utils._text import to_bytes, to_native
def send_data(s, data):
packed_len = struct.pack('!Q',len(data))
return s.sendall(packed_len + data)
def recv_data(s):
header_len = 8 # size of a packed unsigned long long
data = to_bytes("")
while len(data) < header_len:
d = s.recv(header_len - len(data))
if not d:
return None
data += d
data_len = struct.unpack('!Q',data[:header_len])[0]
data = data[header_len:]
while len(data) < data_len:
d = s.recv(data_len - len(data))
if not d:
return None
data += d
return data
def exec_command(module, command):
try:
sf = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sf.connect(module._socket_path)
data = "EXEC: %s" % command
send_data(sf, to_bytes(data.strip()))
rc = int(recv_data(sf), 10)
stdout = recv_data(sf)
stderr = recv_data(sf)
except socket.error:
exc = get_exception()
sf.close()
module.fail_json(msg='unable to connect to socket', err=str(exc))
sf.close()
return (rc, to_native(stdout), to_native(stderr))
| gpl-3.0 | -5,940,241,916,715,124,000 | 8,273,707,839,135,634,000 | 38.053333 | 92 | 0.696825 | false |
SonarOpenCommunity/sonar-cxx | cxx-sensors/src/tools/clangsa_createrules.py | 1 | 6838 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SonarQube C++ Community Plugin (cxx plugin)
# Copyright (C) 2010-2021 SonarOpenCommunity
# http://github.com/SonarOpenCommunity/sonar-cxx
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
#
"""
Simple script to generate the rules xml file for SonarQube cxx plugin
from the Clang Static Analyzer checkers.
The clang compiler should be available in the PATH
or output of clang -cc1 -analyzer-checker-help
as input file.
"""
from xml.dom import minidom
import argparse
import re
import subprocess
import sys
import xml.etree.ElementTree as ET
def CDATA(text=None):
element = ET.Element('![CDATA[')
element.text = text
return element
ET._original_serialize_xml = ET._serialize_xml
def _serialize_xml(write, elem, qnames, namespaces,
short_empty_elements, **kwargs):
if elem.tag == '![CDATA[':
write("<%s%s]]>" % (elem.tag, elem.text))
return
return ET._original_serialize_xml(
write, elem, qnames, namespaces, short_empty_elements, **kwargs)
ET._serialize_xml = ET._serialize['xml'] = _serialize_xml
def collect_checkers(clangsa_output):
"""
Parse clang static analyzer output.
Return the list of checkers and the description.
"""
checkers_data = {}
# Checker name and description in one line.
pattern = re.compile(r'^\s\s(?P<checker_name>\S*)\s*(?P<description>.*)')
checker_name = None
for line in clangsa_output.splitlines():
line = line.decode(encoding='UTF-8')
if re.match(r'^CHECKERS:', line) or line == '':
continue
elif checker_name and not re.match(r'^\s\s\S', line):
# Collect description for the checker name.
checkers_data[checker_name] = line.strip()
checker_name = None
elif re.match(r'^\s\s\S+$', line.rstrip()):
# Only checker name is in the line.
checker_name = line.strip()
else:
# Checker name and description is in one line.
match = pattern.match(line.rstrip())
if match:
current = match.groupdict()
checkers_data[current['checker_name']] = current['description']
# Filter out debug checkers.
non_debug = {k: v for k, v in checkers_data.items() if 'debug' not in k}
return non_debug
def main():
parser = argparse.ArgumentParser(
description="""Generate the rules xml file for cxx plugin
plugin from the Clang Static Analyzer checkers.
https://clang-analyzer.llvm.org/""",
usage='%(prog)s -o clangsa.xml')
parser.add_argument('-i', '--input', dest='input_file', action='store',
required=False,
help="""Input file to read rules.
If parameter does not exist
it tries to call clang.""")
parser.add_argument('-o', '--output', dest='output_file', action='store',
required=True,
help="""Output file to write the xml rules.
If the file already exists
it will be overwritten.""")
args = parser.parse_args()
clang_version = "clang version ???".encode('utf-8')
if args.input_file:
with open(args.input_file, 'r') as input:
checker_data = collect_checkers(input.read().encode('utf-8'))
else:
try:
clang_version = ['clang', '--version']
version_info = subprocess.run(clang_version,
stdout=subprocess.PIPE,
check=True).stdout
except subprocess.CalledProcessError as cpe:
sys.exit(cpe.returncode)
# Only the first line is interesting.
clang_version = version_info.splitlines()[0]
try:
clang_checkers = ['clang', '-cc1', '-analyzer-checker-help']
checkers_output = subprocess.run(clang_checkers,
stdout=subprocess.PIPE,
check=True).stdout
print("Collecting clang checkers ...", end='')
checker_data = collect_checkers(checkers_output)
except subprocess.CalledProcessError as cpe:
sys.exit(cpe.returncode)
if not checker_data:
print("No checkers could be processed.")
sys.exit(1)
print(" done.")
print("Generating rules xml ...", end='')
# build a tree structure
rules = ET.Element("rules")
comment = " C and C++ rules for Clang Static Analyzer. " \
"https://clang-analyzer.llvm.org/\n" + \
"Rules list was generated based on " + \
clang_version.decode("utf-8") + " "
rules.append(ET.Comment(comment))
for checker_name, description in checker_data.items():
rule = ET.SubElement(rules, "rule")
key = ET.SubElement(rule, "key")
name = ET.SubElement(rule, "name")
desc = ET.SubElement(rule, "description")
sev = ET.SubElement(rule, "severity")
c_type = ET.SubElement(rule, "type")
key.text = checker_name
name.text = checker_name
sev.text = "MAJOR"
c_type.text = "BUG"
if sev.text != 'INFO':
ET.SubElement(rule, 'remediationFunction').text = 'LINEAR'
ET.SubElement(rule, 'remediationFunctionGapMultiplier').text = '5min'
auto_tag = checker_name.split('.')[0]
tag = ET.SubElement(rule, "tag")
tag.text = auto_tag.lower()
cdata = CDATA('\n<p>' + description.strip() +
'\n</p>\n <h2>References</h2>'
' <p><a href="https://clang-analyzer.llvm.org/"'
' target="_blank">clang-analyzer.llvm.org</a></p> \n')
desc.append(cdata)
xmlstr = minidom.parseString(
ET.tostring(rules, method='xml')).toprettyxml(indent=" ")
print(" done.")
with open(args.output_file, 'w') as out:
out.write(xmlstr)
if __name__ == '__main__':
main()
| lgpl-3.0 | 6,334,337,927,298,540,000 | -5,075,291,307,217,182,000 | 33.0199 | 81 | 0.580579 | false |
CingHu/neutron-ustack | neutron/db/migration/alembic_migrations/versions/1d6ee1ae5da5_db_healing.py | 6 | 1060 | # Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Include all tables and make migrations unconditional.
Revision ID: db_healing
Revises: 5446f2a45467
Create Date: 2014-05-29 10:52:43.898980
"""
# revision identifiers, used by Alembic.
revision = 'db_healing'
down_revision = '5446f2a45467'
from neutron.db.migration.alembic_migrations import heal_script
def upgrade(active_plugins=None, options=None):
heal_script.heal()
def downgrade(active_plugins=None, options=None):
pass
| apache-2.0 | 737,224,979,423,159,800 | 662,132,994,463,898,000 | 28.444444 | 78 | 0.741509 | false |
TEAM-Gummy/platform_external_chromium_org | ppapi/generators/generator.py | 165 | 1776 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import traceback
# Note: some of these files are imported to register cmdline options.
from idl_generator import Generator
from idl_option import ParseOptions
from idl_outfile import IDLOutFile
from idl_parser import ParseFiles
from idl_c_header import HGen
from idl_thunk import TGen
from idl_gen_pnacl import PnaclGen
def Main(args):
# If no arguments are provided, assume we are trying to rebuild the
# C headers with warnings off.
try:
if not args:
args = [
'--wnone', '--cgen', '--range=start,end',
'--pnacl', '--pnaclshim',
'../native_client/src/untrusted/pnacl_irt_shim/pnacl_shim.c',
'--tgen',
]
current_dir = os.path.abspath(os.getcwd())
script_dir = os.path.abspath(os.path.dirname(__file__))
if current_dir != script_dir:
print '\nIncorrect CWD, default run skipped.'
print 'When running with no arguments set CWD to the scripts directory:'
print '\t' + script_dir + '\n'
print 'This ensures correct default paths and behavior.\n'
return 1
filenames = ParseOptions(args)
ast = ParseFiles(filenames)
if ast.errors:
print 'Found %d errors. Aborting build.\n' % ast.errors
return 1
return Generator.Run(ast)
except SystemExit, ec:
print 'Exiting with %d' % ec.code
sys.exit(ec.code)
except:
typeinfo, value, tb = sys.exc_info()
traceback.print_exception(typeinfo, value, tb)
print 'Called with: ' + ' '.join(sys.argv)
if __name__ == '__main__':
sys.exit(Main(sys.argv[1:]))
| bsd-3-clause | -4,147,862,163,178,626,000 | 8,804,900,141,890,446,000 | 30.157895 | 80 | 0.659347 | false |
pyfidelity/rest-seed | backend/backrest/tests/test_change_password.py | 1 | 1714 | from pytest import fixture, mark
from transaction import commit
@fixture(scope='module')
def url(testing):
return testing.route_url('password-change')
@mark.user('alice')
def test_change_password(browser, url, alice):
data = dict(password='foo!', current='alice')
browser.put_json(url, data)
assert alice.validate_password('foo!')
@mark.user('alice')
def test_change_password_twice(browser, url, alice):
data = dict(password='foo!', current='alice')
browser.put_json(url, data)
assert alice.validate_password('foo!')
commit()
data = dict(password='alice', current='foo!')
browser.put_json(url, data)
alice = alice.query.one() # refetch alice after `commit`
assert alice.validate_password('alice')
@mark.user('alice')
def test_change_password_with_wrong_current_password(browser, url, alice):
data = dict(password='foo!', current='hurz?')
result = browser.put_json(url, data, status=400).json
assert [(e['name'], e['description']) for e in result['errors']] == [
('current', 'Password does not match')]
assert alice.validate_password('alice')
@mark.user('alice')
def test_change_password_without_current_password(browser, url, alice):
data = dict(password='foo!')
result = browser.put_json(url, data, status=400).json
assert [(e['name'], e['description']) for e in result['errors']] == [
('current', 'current is missing')]
assert alice.validate_password('alice')
@mark.user('alice')
def test_set_password_without_existing_password(browser, url, alice):
alice.password = None
data = dict(password='foo!', current=None)
browser.put_json(url, data)
assert alice.validate_password('foo!')
| bsd-2-clause | -4,015,997,059,038,244,000 | 7,768,768,278,348,925,000 | 31.961538 | 74 | 0.673279 | false |
gsnbng/erpnext | erpnext/buying/report/subcontracted_item_to_be_received/subcontracted_item_to_be_received.py | 9 | 2552 | # Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
def execute(filters=None):
if filters.from_date >= filters.to_date:
frappe.msgprint(_("To Date must be greater than From Date"))
data = []
columns = get_columns()
get_data(data , filters)
return columns, data
def get_columns():
return [
{
"label": _("Purchase Order"),
"fieldtype": "Link",
"fieldname": "purchase_order",
"options": "Purchase Order",
"width": 150
},
{
"label": _("Date"),
"fieldtype": "Date",
"fieldname": "date",
"hidden": 1,
"width": 150
},
{
"label": _("Supplier"),
"fieldtype": "Link",
"fieldname": "supplier",
"options": "Supplier",
"width": 150
},
{
"label": _("Finished Good Item Code"),
"fieldtype": "Data",
"fieldname": "fg_item_code",
"width": 100
},
{
"label": _("Item name"),
"fieldtype": "Data",
"fieldname": "item_name",
"width": 100
},
{
"label": _("Required Quantity"),
"fieldtype": "Float",
"fieldname": "required_qty",
"width": 100
},
{
"label": _("Received Quantity"),
"fieldtype": "Float",
"fieldname": "received_qty",
"width": 100
},
{
"label": _("Pending Quantity"),
"fieldtype": "Float",
"fieldname": "pending_qty",
"width": 100
}
]
def get_data(data, filters):
po = get_po(filters)
po_name = [v.name for v in po]
sub_items = get_purchase_order_item_supplied(po_name)
for item in sub_items:
for order in po:
if order.name == item.parent and item.received_qty < item.qty:
row ={
'purchase_order': item.parent,
'date': order.transaction_date,
'supplier': order.supplier,
'fg_item_code': item.item_code,
'item_name': item.item_name,
'required_qty': item.qty,
'received_qty':item.received_qty,
'pending_qty':item.qty - item.received_qty
}
data.append(row)
def get_po(filters):
record_filters = [
["is_subcontracted", "=", "Yes"],
["supplier", "=", filters.supplier],
["transaction_date", "<=", filters.to_date],
["transaction_date", ">=", filters.from_date],
["docstatus", "=", 1]
]
return frappe.get_all("Purchase Order", filters=record_filters, fields=["name", "transaction_date", "supplier"])
def get_purchase_order_item_supplied(po):
return frappe.get_all("Purchase Order Item", filters=[
('parent', 'IN', po)
], fields=["parent", "item_code", "item_name", "qty", "received_qty"])
| agpl-3.0 | 2,516,761,596,343,678,500 | -500,010,741,017,143,500 | 23.538462 | 113 | 0.605016 | false |
wscullin/spack | var/spack/repos/builtin/packages/py-pytz/package.py | 3 | 1982 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyPytz(PythonPackage):
"""World timezone definitions, modern and historical."""
homepage = "http://pythonhosted.org/pytz"
url = "https://pypi.io/packages/source/p/pytz/pytz-2016.10.tar.gz"
import_modules = ['pytz']
version('2017.2', 'f89bde8a811c8a1a5bac17eaaa94383c',
url="https://pypi.io/packages/source/p/pytz/pytz-2017.2.zip")
version('2016.10', 'cc9f16ba436efabdcef3c4d32ae4919c')
version('2016.6.1', 'b6c28a3b968bc1d8badfb61b93874e03')
version('2014.10', 'eb1cb941a20c5b751352c52486aa1dd7')
version('2015.4', '417a47b1c432d90333e42084a605d3d8')
version('2016.3', 'abae92c3301b27bd8a9f56b14f52cb29')
depends_on('py-setuptools', type='build')
| lgpl-2.1 | -8,730,758,055,892,100,000 | 5,824,485,266,261,690,000 | 44.045455 | 78 | 0.680626 | false |
weidnem/IntroPython2016 | students/baumel/session_07/HTML_lab/test_html_render.py | 3 | 1663 | """
test code for html_render
"""
import io
from html_render import Element
def test_init():
e = Element()
e = Element("this is some text")
def test_content():
#fixme: this test internals!!!!!!!!
e = Element("this is some text")
assert "this is some text" in e.content
def test_append():
e = Element("this is some text")
e.append("some more text, wooHoo!")
assert "some more text, wooHoo!" in e.content
def test_two_instances():
e = Element("this is some text")
e2 = Element("this is some text")
e.append("some more text")
assert "some more text" not in e2.content
def test_render():
outfile = io.StringIO()
e = Element("this is some text")
e.append("and this is some more text, WooHoo!!")
e.render(outfile)
outfile.seek(0)
file_contents = outfile.read()
#f = open('test1.html', 'w')
#f.write(file_contents)
open('test1.html', 'w').write(file_contents)
print(file_contents)
assert ("this is some text") in file_contents
assert ("and this is some more text, WooHoo!!") in file_contents
assert file_contents.startswith("<html>")
assert file_contents.strip().endswith("</html>")
def test_tag():
outfile = io.StringIO()
e = Element("this is some text", "body")
e.append("and this is some more text, WooHoo!!")
e.render(outfile)
outfile.seek(0)
file_contents = outfile.read()
#f = open('test1.html', 'w')
#f.write(file_contents)
open('test1.html', 'w').write(file_contents)
print(file_contents)
assert ("this is some text") in file_contents
assert ("and this is some more text, WooHoo!!") in file_contents
assert file_contents.startswith("<body>")
assert file_contents.strip().endswith("</body>")
| unlicense | -2,099,930,993,446,777,900 | -8,696,614,431,818,727,000 | 20.597403 | 65 | 0.675286 | false |
antb/TPT----My-old-mod | src/python/stdlib/plat-mac/Carbon/Appearance.py | 81 | 27268 | # Generated from 'Appearance.h'
def FOUR_CHAR_CODE(x): return x
kAppearanceEventClass = FOUR_CHAR_CODE('appr')
kAEAppearanceChanged = FOUR_CHAR_CODE('thme')
kAESystemFontChanged = FOUR_CHAR_CODE('sysf')
kAESmallSystemFontChanged = FOUR_CHAR_CODE('ssfn')
kAEViewsFontChanged = FOUR_CHAR_CODE('vfnt')
kThemeDataFileType = FOUR_CHAR_CODE('thme')
kThemePlatinumFileType = FOUR_CHAR_CODE('pltn')
kThemeCustomThemesFileType = FOUR_CHAR_CODE('scen')
kThemeSoundTrackFileType = FOUR_CHAR_CODE('tsnd')
kThemeBrushDialogBackgroundActive = 1
kThemeBrushDialogBackgroundInactive = 2
kThemeBrushAlertBackgroundActive = 3
kThemeBrushAlertBackgroundInactive = 4
kThemeBrushModelessDialogBackgroundActive = 5
kThemeBrushModelessDialogBackgroundInactive = 6
kThemeBrushUtilityWindowBackgroundActive = 7
kThemeBrushUtilityWindowBackgroundInactive = 8
kThemeBrushListViewSortColumnBackground = 9
kThemeBrushListViewBackground = 10
kThemeBrushIconLabelBackground = 11
kThemeBrushListViewSeparator = 12
kThemeBrushChasingArrows = 13
kThemeBrushDragHilite = 14
kThemeBrushDocumentWindowBackground = 15
kThemeBrushFinderWindowBackground = 16
kThemeBrushScrollBarDelimiterActive = 17
kThemeBrushScrollBarDelimiterInactive = 18
kThemeBrushFocusHighlight = 19
kThemeBrushPopupArrowActive = 20
kThemeBrushPopupArrowPressed = 21
kThemeBrushPopupArrowInactive = 22
kThemeBrushAppleGuideCoachmark = 23
kThemeBrushIconLabelBackgroundSelected = 24
kThemeBrushStaticAreaFill = 25
kThemeBrushActiveAreaFill = 26
kThemeBrushButtonFrameActive = 27
kThemeBrushButtonFrameInactive = 28
kThemeBrushButtonFaceActive = 29
kThemeBrushButtonFaceInactive = 30
kThemeBrushButtonFacePressed = 31
kThemeBrushButtonActiveDarkShadow = 32
kThemeBrushButtonActiveDarkHighlight = 33
kThemeBrushButtonActiveLightShadow = 34
kThemeBrushButtonActiveLightHighlight = 35
kThemeBrushButtonInactiveDarkShadow = 36
kThemeBrushButtonInactiveDarkHighlight = 37
kThemeBrushButtonInactiveLightShadow = 38
kThemeBrushButtonInactiveLightHighlight = 39
kThemeBrushButtonPressedDarkShadow = 40
kThemeBrushButtonPressedDarkHighlight = 41
kThemeBrushButtonPressedLightShadow = 42
kThemeBrushButtonPressedLightHighlight = 43
kThemeBrushBevelActiveLight = 44
kThemeBrushBevelActiveDark = 45
kThemeBrushBevelInactiveLight = 46
kThemeBrushBevelInactiveDark = 47
kThemeBrushNotificationWindowBackground = 48
kThemeBrushMovableModalBackground = 49
kThemeBrushSheetBackgroundOpaque = 50
kThemeBrushDrawerBackground = 51
kThemeBrushToolbarBackground = 52
kThemeBrushSheetBackgroundTransparent = 53
kThemeBrushMenuBackground = 54
kThemeBrushMenuBackgroundSelected = 55
kThemeBrushSheetBackground = kThemeBrushSheetBackgroundOpaque
kThemeBrushBlack = -1
kThemeBrushWhite = -2
kThemeBrushPrimaryHighlightColor = -3
kThemeBrushSecondaryHighlightColor = -4
kThemeTextColorDialogActive = 1
kThemeTextColorDialogInactive = 2
kThemeTextColorAlertActive = 3
kThemeTextColorAlertInactive = 4
kThemeTextColorModelessDialogActive = 5
kThemeTextColorModelessDialogInactive = 6
kThemeTextColorWindowHeaderActive = 7
kThemeTextColorWindowHeaderInactive = 8
kThemeTextColorPlacardActive = 9
kThemeTextColorPlacardInactive = 10
kThemeTextColorPlacardPressed = 11
kThemeTextColorPushButtonActive = 12
kThemeTextColorPushButtonInactive = 13
kThemeTextColorPushButtonPressed = 14
kThemeTextColorBevelButtonActive = 15
kThemeTextColorBevelButtonInactive = 16
kThemeTextColorBevelButtonPressed = 17
kThemeTextColorPopupButtonActive = 18
kThemeTextColorPopupButtonInactive = 19
kThemeTextColorPopupButtonPressed = 20
kThemeTextColorIconLabel = 21
kThemeTextColorListView = 22
kThemeTextColorDocumentWindowTitleActive = 23
kThemeTextColorDocumentWindowTitleInactive = 24
kThemeTextColorMovableModalWindowTitleActive = 25
kThemeTextColorMovableModalWindowTitleInactive = 26
kThemeTextColorUtilityWindowTitleActive = 27
kThemeTextColorUtilityWindowTitleInactive = 28
kThemeTextColorPopupWindowTitleActive = 29
kThemeTextColorPopupWindowTitleInactive = 30
kThemeTextColorRootMenuActive = 31
kThemeTextColorRootMenuSelected = 32
kThemeTextColorRootMenuDisabled = 33
kThemeTextColorMenuItemActive = 34
kThemeTextColorMenuItemSelected = 35
kThemeTextColorMenuItemDisabled = 36
kThemeTextColorPopupLabelActive = 37
kThemeTextColorPopupLabelInactive = 38
kThemeTextColorTabFrontActive = 39
kThemeTextColorTabNonFrontActive = 40
kThemeTextColorTabNonFrontPressed = 41
kThemeTextColorTabFrontInactive = 42
kThemeTextColorTabNonFrontInactive = 43
kThemeTextColorIconLabelSelected = 44
kThemeTextColorBevelButtonStickyActive = 45
kThemeTextColorBevelButtonStickyInactive = 46
kThemeTextColorNotification = 47
kThemeTextColorBlack = -1
kThemeTextColorWhite = -2
kThemeStateInactive = 0
kThemeStateActive = 1
kThemeStatePressed = 2
kThemeStateRollover = 6
kThemeStateUnavailable = 7
kThemeStateUnavailableInactive = 8
kThemeStateDisabled = 0
kThemeStatePressedUp = 2
kThemeStatePressedDown = 3
kThemeArrowCursor = 0
kThemeCopyArrowCursor = 1
kThemeAliasArrowCursor = 2
kThemeContextualMenuArrowCursor = 3
kThemeIBeamCursor = 4
kThemeCrossCursor = 5
kThemePlusCursor = 6
kThemeWatchCursor = 7
kThemeClosedHandCursor = 8
kThemeOpenHandCursor = 9
kThemePointingHandCursor = 10
kThemeCountingUpHandCursor = 11
kThemeCountingDownHandCursor = 12
kThemeCountingUpAndDownHandCursor = 13
kThemeSpinningCursor = 14
kThemeResizeLeftCursor = 15
kThemeResizeRightCursor = 16
kThemeResizeLeftRightCursor = 17
kThemeMenuBarNormal = 0
kThemeMenuBarSelected = 1
kThemeMenuSquareMenuBar = (1 << 0)
kThemeMenuActive = 0
kThemeMenuSelected = 1
kThemeMenuDisabled = 3
kThemeMenuTypePullDown = 0
kThemeMenuTypePopUp = 1
kThemeMenuTypeHierarchical = 2
kThemeMenuTypeInactive = 0x0100
kThemeMenuItemPlain = 0
kThemeMenuItemHierarchical = 1
kThemeMenuItemScrollUpArrow = 2
kThemeMenuItemScrollDownArrow = 3
kThemeMenuItemAtTop = 0x0100
kThemeMenuItemAtBottom = 0x0200
kThemeMenuItemHierBackground = 0x0400
kThemeMenuItemPopUpBackground = 0x0800
kThemeMenuItemHasIcon = 0x8000
kThemeMenuItemNoBackground = 0x4000
kThemeBackgroundTabPane = 1
kThemeBackgroundPlacard = 2
kThemeBackgroundWindowHeader = 3
kThemeBackgroundListViewWindowHeader = 4
kThemeBackgroundSecondaryGroupBox = 5
kThemeNameTag = FOUR_CHAR_CODE('name')
kThemeVariantNameTag = FOUR_CHAR_CODE('varn')
kThemeVariantBaseTintTag = FOUR_CHAR_CODE('tint')
kThemeHighlightColorTag = FOUR_CHAR_CODE('hcol')
kThemeScrollBarArrowStyleTag = FOUR_CHAR_CODE('sbar')
kThemeScrollBarThumbStyleTag = FOUR_CHAR_CODE('sbth')
kThemeSoundsEnabledTag = FOUR_CHAR_CODE('snds')
kThemeDblClickCollapseTag = FOUR_CHAR_CODE('coll')
kThemeAppearanceFileNameTag = FOUR_CHAR_CODE('thme')
kThemeSystemFontTag = FOUR_CHAR_CODE('lgsf')
kThemeSmallSystemFontTag = FOUR_CHAR_CODE('smsf')
kThemeViewsFontTag = FOUR_CHAR_CODE('vfnt')
kThemeViewsFontSizeTag = FOUR_CHAR_CODE('vfsz')
kThemeDesktopPatternNameTag = FOUR_CHAR_CODE('patn')
kThemeDesktopPatternTag = FOUR_CHAR_CODE('patt')
kThemeDesktopPictureNameTag = FOUR_CHAR_CODE('dpnm')
kThemeDesktopPictureAliasTag = FOUR_CHAR_CODE('dpal')
kThemeDesktopPictureAlignmentTag = FOUR_CHAR_CODE('dpan')
kThemeHighlightColorNameTag = FOUR_CHAR_CODE('hcnm')
kThemeExamplePictureIDTag = FOUR_CHAR_CODE('epic')
kThemeSoundTrackNameTag = FOUR_CHAR_CODE('sndt')
kThemeSoundMaskTag = FOUR_CHAR_CODE('smsk')
kThemeUserDefinedTag = FOUR_CHAR_CODE('user')
kThemeSmoothFontEnabledTag = FOUR_CHAR_CODE('smoo')
kThemeSmoothFontMinSizeTag = FOUR_CHAR_CODE('smos')
kTiledOnScreen = 1
kCenterOnScreen = 2
kFitToScreen = 3
kFillScreen = 4
kUseBestGuess = 5
kThemeCheckBoxClassicX = 0
kThemeCheckBoxCheckMark = 1
kThemeScrollBarArrowsSingle = 0
kThemeScrollBarArrowsLowerRight = 1
kThemeScrollBarThumbNormal = 0
kThemeScrollBarThumbProportional = 1
kThemeSystemFont = 0
kThemeSmallSystemFont = 1
kThemeSmallEmphasizedSystemFont = 2
kThemeViewsFont = 3
kThemeEmphasizedSystemFont = 4
kThemeApplicationFont = 5
kThemeLabelFont = 6
kThemeMenuTitleFont = 100
kThemeMenuItemFont = 101
kThemeMenuItemMarkFont = 102
kThemeMenuItemCmdKeyFont = 103
kThemeWindowTitleFont = 104
kThemePushButtonFont = 105
kThemeUtilityWindowTitleFont = 106
kThemeAlertHeaderFont = 107
kThemeCurrentPortFont = 200
kThemeTabNonFront = 0
kThemeTabNonFrontPressed = 1
kThemeTabNonFrontInactive = 2
kThemeTabFront = 3
kThemeTabFrontInactive = 4
kThemeTabNonFrontUnavailable = 5
kThemeTabFrontUnavailable = 6
kThemeTabNorth = 0
kThemeTabSouth = 1
kThemeTabEast = 2
kThemeTabWest = 3
kThemeSmallTabHeight = 16
kThemeLargeTabHeight = 21
kThemeTabPaneOverlap = 3
kThemeSmallTabHeightMax = 19
kThemeLargeTabHeightMax = 24
kThemeMediumScrollBar = 0
kThemeSmallScrollBar = 1
kThemeMediumSlider = 2
kThemeMediumProgressBar = 3
kThemeMediumIndeterminateBar = 4
kThemeRelevanceBar = 5
kThemeSmallSlider = 6
kThemeLargeProgressBar = 7
kThemeLargeIndeterminateBar = 8
kThemeTrackActive = 0
kThemeTrackDisabled = 1
kThemeTrackNothingToScroll = 2
kThemeTrackInactive = 3
kThemeLeftOutsideArrowPressed = 0x01
kThemeLeftInsideArrowPressed = 0x02
kThemeLeftTrackPressed = 0x04
kThemeThumbPressed = 0x08
kThemeRightTrackPressed = 0x10
kThemeRightInsideArrowPressed = 0x20
kThemeRightOutsideArrowPressed = 0x40
kThemeTopOutsideArrowPressed = kThemeLeftOutsideArrowPressed
kThemeTopInsideArrowPressed = kThemeLeftInsideArrowPressed
kThemeTopTrackPressed = kThemeLeftTrackPressed
kThemeBottomTrackPressed = kThemeRightTrackPressed
kThemeBottomInsideArrowPressed = kThemeRightInsideArrowPressed
kThemeBottomOutsideArrowPressed = kThemeRightOutsideArrowPressed
kThemeThumbPlain = 0
kThemeThumbUpward = 1
kThemeThumbDownward = 2
kThemeTrackHorizontal = (1 << 0)
kThemeTrackRightToLeft = (1 << 1)
kThemeTrackShowThumb = (1 << 2)
kThemeTrackThumbRgnIsNotGhost = (1 << 3)
kThemeTrackNoScrollBarArrows = (1 << 4)
kThemeWindowHasGrow = (1 << 0)
kThemeWindowHasHorizontalZoom = (1 << 3)
kThemeWindowHasVerticalZoom = (1 << 4)
kThemeWindowHasFullZoom = kThemeWindowHasHorizontalZoom + kThemeWindowHasVerticalZoom
kThemeWindowHasCloseBox = (1 << 5)
kThemeWindowHasCollapseBox = (1 << 6)
kThemeWindowHasTitleText = (1 << 7)
kThemeWindowIsCollapsed = (1 << 8)
kThemeWindowHasDirty = (1 << 9)
kThemeDocumentWindow = 0
kThemeDialogWindow = 1
kThemeMovableDialogWindow = 2
kThemeAlertWindow = 3
kThemeMovableAlertWindow = 4
kThemePlainDialogWindow = 5
kThemeShadowDialogWindow = 6
kThemePopupWindow = 7
kThemeUtilityWindow = 8
kThemeUtilitySideWindow = 9
kThemeSheetWindow = 10
kThemeDrawerWindow = 11
kThemeWidgetCloseBox = 0
kThemeWidgetZoomBox = 1
kThemeWidgetCollapseBox = 2
kThemeWidgetDirtyCloseBox = 6
kThemeArrowLeft = 0
kThemeArrowDown = 1
kThemeArrowRight = 2
kThemeArrowUp = 3
kThemeArrow3pt = 0
kThemeArrow5pt = 1
kThemeArrow7pt = 2
kThemeArrow9pt = 3
kThemeGrowLeft = (1 << 0)
kThemeGrowRight = (1 << 1)
kThemeGrowUp = (1 << 2)
kThemeGrowDown = (1 << 3)
kThemePushButton = 0
kThemeCheckBox = 1
kThemeRadioButton = 2
kThemeBevelButton = 3
kThemeArrowButton = 4
kThemePopupButton = 5
kThemeDisclosureButton = 6
kThemeIncDecButton = 7
kThemeSmallBevelButton = 8
kThemeMediumBevelButton = 3
kThemeLargeBevelButton = 9
kThemeListHeaderButton = 10
kThemeRoundButton = 11
kThemeLargeRoundButton = 12
kThemeSmallCheckBox = 13
kThemeSmallRadioButton = 14
kThemeRoundedBevelButton = 15
kThemeNormalCheckBox = kThemeCheckBox
kThemeNormalRadioButton = kThemeRadioButton
kThemeButtonOff = 0
kThemeButtonOn = 1
kThemeButtonMixed = 2
kThemeDisclosureRight = 0
kThemeDisclosureDown = 1
kThemeDisclosureLeft = 2
kThemeAdornmentNone = 0
kThemeAdornmentDefault = (1 << 0)
kThemeAdornmentFocus = (1 << 2)
kThemeAdornmentRightToLeft = (1 << 4)
kThemeAdornmentDrawIndicatorOnly = (1 << 5)
kThemeAdornmentHeaderButtonLeftNeighborSelected = (1 << 6)
kThemeAdornmentHeaderButtonRightNeighborSelected = (1 << 7)
kThemeAdornmentHeaderButtonSortUp = (1 << 8)
kThemeAdornmentHeaderMenuButton = (1 << 9)
kThemeAdornmentHeaderButtonNoShadow = (1 << 10)
kThemeAdornmentHeaderButtonShadowOnly = (1 << 11)
kThemeAdornmentNoShadow = kThemeAdornmentHeaderButtonNoShadow
kThemeAdornmentShadowOnly = kThemeAdornmentHeaderButtonShadowOnly
kThemeAdornmentArrowLeftArrow = (1 << 6)
kThemeAdornmentArrowDownArrow = (1 << 7)
kThemeAdornmentArrowDoubleArrow = (1 << 8)
kThemeAdornmentArrowUpArrow = (1 << 9)
kThemeNoSounds = 0
kThemeWindowSoundsMask = (1 << 0)
kThemeMenuSoundsMask = (1 << 1)
kThemeControlSoundsMask = (1 << 2)
kThemeFinderSoundsMask = (1 << 3)
kThemeDragSoundNone = 0
kThemeDragSoundMoveWindow = FOUR_CHAR_CODE('wmov')
kThemeDragSoundGrowWindow = FOUR_CHAR_CODE('wgro')
kThemeDragSoundMoveUtilWindow = FOUR_CHAR_CODE('umov')
kThemeDragSoundGrowUtilWindow = FOUR_CHAR_CODE('ugro')
kThemeDragSoundMoveDialog = FOUR_CHAR_CODE('dmov')
kThemeDragSoundMoveAlert = FOUR_CHAR_CODE('amov')
kThemeDragSoundMoveIcon = FOUR_CHAR_CODE('imov')
kThemeDragSoundSliderThumb = FOUR_CHAR_CODE('slth')
kThemeDragSoundSliderGhost = FOUR_CHAR_CODE('slgh')
kThemeDragSoundScrollBarThumb = FOUR_CHAR_CODE('sbth')
kThemeDragSoundScrollBarGhost = FOUR_CHAR_CODE('sbgh')
kThemeDragSoundScrollBarArrowDecreasing = FOUR_CHAR_CODE('sbad')
kThemeDragSoundScrollBarArrowIncreasing = FOUR_CHAR_CODE('sbai')
kThemeDragSoundDragging = FOUR_CHAR_CODE('drag')
kThemeSoundNone = 0
kThemeSoundMenuOpen = FOUR_CHAR_CODE('mnuo')
kThemeSoundMenuClose = FOUR_CHAR_CODE('mnuc')
kThemeSoundMenuItemHilite = FOUR_CHAR_CODE('mnui')
kThemeSoundMenuItemRelease = FOUR_CHAR_CODE('mnus')
kThemeSoundWindowClosePress = FOUR_CHAR_CODE('wclp')
kThemeSoundWindowCloseEnter = FOUR_CHAR_CODE('wcle')
kThemeSoundWindowCloseExit = FOUR_CHAR_CODE('wclx')
kThemeSoundWindowCloseRelease = FOUR_CHAR_CODE('wclr')
kThemeSoundWindowZoomPress = FOUR_CHAR_CODE('wzmp')
kThemeSoundWindowZoomEnter = FOUR_CHAR_CODE('wzme')
kThemeSoundWindowZoomExit = FOUR_CHAR_CODE('wzmx')
kThemeSoundWindowZoomRelease = FOUR_CHAR_CODE('wzmr')
kThemeSoundWindowCollapsePress = FOUR_CHAR_CODE('wcop')
kThemeSoundWindowCollapseEnter = FOUR_CHAR_CODE('wcoe')
kThemeSoundWindowCollapseExit = FOUR_CHAR_CODE('wcox')
kThemeSoundWindowCollapseRelease = FOUR_CHAR_CODE('wcor')
kThemeSoundWindowDragBoundary = FOUR_CHAR_CODE('wdbd')
kThemeSoundUtilWinClosePress = FOUR_CHAR_CODE('uclp')
kThemeSoundUtilWinCloseEnter = FOUR_CHAR_CODE('ucle')
kThemeSoundUtilWinCloseExit = FOUR_CHAR_CODE('uclx')
kThemeSoundUtilWinCloseRelease = FOUR_CHAR_CODE('uclr')
kThemeSoundUtilWinZoomPress = FOUR_CHAR_CODE('uzmp')
kThemeSoundUtilWinZoomEnter = FOUR_CHAR_CODE('uzme')
kThemeSoundUtilWinZoomExit = FOUR_CHAR_CODE('uzmx')
kThemeSoundUtilWinZoomRelease = FOUR_CHAR_CODE('uzmr')
kThemeSoundUtilWinCollapsePress = FOUR_CHAR_CODE('ucop')
kThemeSoundUtilWinCollapseEnter = FOUR_CHAR_CODE('ucoe')
kThemeSoundUtilWinCollapseExit = FOUR_CHAR_CODE('ucox')
kThemeSoundUtilWinCollapseRelease = FOUR_CHAR_CODE('ucor')
kThemeSoundUtilWinDragBoundary = FOUR_CHAR_CODE('udbd')
kThemeSoundWindowOpen = FOUR_CHAR_CODE('wopn')
kThemeSoundWindowClose = FOUR_CHAR_CODE('wcls')
kThemeSoundWindowZoomIn = FOUR_CHAR_CODE('wzmi')
kThemeSoundWindowZoomOut = FOUR_CHAR_CODE('wzmo')
kThemeSoundWindowCollapseUp = FOUR_CHAR_CODE('wcol')
kThemeSoundWindowCollapseDown = FOUR_CHAR_CODE('wexp')
kThemeSoundWindowActivate = FOUR_CHAR_CODE('wact')
kThemeSoundUtilWindowOpen = FOUR_CHAR_CODE('uopn')
kThemeSoundUtilWindowClose = FOUR_CHAR_CODE('ucls')
kThemeSoundUtilWindowZoomIn = FOUR_CHAR_CODE('uzmi')
kThemeSoundUtilWindowZoomOut = FOUR_CHAR_CODE('uzmo')
kThemeSoundUtilWindowCollapseUp = FOUR_CHAR_CODE('ucol')
kThemeSoundUtilWindowCollapseDown = FOUR_CHAR_CODE('uexp')
kThemeSoundUtilWindowActivate = FOUR_CHAR_CODE('uact')
kThemeSoundDialogOpen = FOUR_CHAR_CODE('dopn')
kThemeSoundDialogClose = FOUR_CHAR_CODE('dlgc')
kThemeSoundAlertOpen = FOUR_CHAR_CODE('aopn')
kThemeSoundAlertClose = FOUR_CHAR_CODE('altc')
kThemeSoundPopupWindowOpen = FOUR_CHAR_CODE('pwop')
kThemeSoundPopupWindowClose = FOUR_CHAR_CODE('pwcl')
kThemeSoundButtonPress = FOUR_CHAR_CODE('btnp')
kThemeSoundButtonEnter = FOUR_CHAR_CODE('btne')
kThemeSoundButtonExit = FOUR_CHAR_CODE('btnx')
kThemeSoundButtonRelease = FOUR_CHAR_CODE('btnr')
kThemeSoundDefaultButtonPress = FOUR_CHAR_CODE('dbtp')
kThemeSoundDefaultButtonEnter = FOUR_CHAR_CODE('dbte')
kThemeSoundDefaultButtonExit = FOUR_CHAR_CODE('dbtx')
kThemeSoundDefaultButtonRelease = FOUR_CHAR_CODE('dbtr')
kThemeSoundCancelButtonPress = FOUR_CHAR_CODE('cbtp')
kThemeSoundCancelButtonEnter = FOUR_CHAR_CODE('cbte')
kThemeSoundCancelButtonExit = FOUR_CHAR_CODE('cbtx')
kThemeSoundCancelButtonRelease = FOUR_CHAR_CODE('cbtr')
kThemeSoundCheckboxPress = FOUR_CHAR_CODE('chkp')
kThemeSoundCheckboxEnter = FOUR_CHAR_CODE('chke')
kThemeSoundCheckboxExit = FOUR_CHAR_CODE('chkx')
kThemeSoundCheckboxRelease = FOUR_CHAR_CODE('chkr')
kThemeSoundRadioPress = FOUR_CHAR_CODE('radp')
kThemeSoundRadioEnter = FOUR_CHAR_CODE('rade')
kThemeSoundRadioExit = FOUR_CHAR_CODE('radx')
kThemeSoundRadioRelease = FOUR_CHAR_CODE('radr')
kThemeSoundScrollArrowPress = FOUR_CHAR_CODE('sbap')
kThemeSoundScrollArrowEnter = FOUR_CHAR_CODE('sbae')
kThemeSoundScrollArrowExit = FOUR_CHAR_CODE('sbax')
kThemeSoundScrollArrowRelease = FOUR_CHAR_CODE('sbar')
kThemeSoundScrollEndOfTrack = FOUR_CHAR_CODE('sbte')
kThemeSoundScrollTrackPress = FOUR_CHAR_CODE('sbtp')
kThemeSoundSliderEndOfTrack = FOUR_CHAR_CODE('slte')
kThemeSoundSliderTrackPress = FOUR_CHAR_CODE('sltp')
kThemeSoundBalloonOpen = FOUR_CHAR_CODE('blno')
kThemeSoundBalloonClose = FOUR_CHAR_CODE('blnc')
kThemeSoundBevelPress = FOUR_CHAR_CODE('bevp')
kThemeSoundBevelEnter = FOUR_CHAR_CODE('beve')
kThemeSoundBevelExit = FOUR_CHAR_CODE('bevx')
kThemeSoundBevelRelease = FOUR_CHAR_CODE('bevr')
kThemeSoundLittleArrowUpPress = FOUR_CHAR_CODE('laup')
kThemeSoundLittleArrowDnPress = FOUR_CHAR_CODE('ladp')
kThemeSoundLittleArrowEnter = FOUR_CHAR_CODE('lare')
kThemeSoundLittleArrowExit = FOUR_CHAR_CODE('larx')
kThemeSoundLittleArrowUpRelease = FOUR_CHAR_CODE('laur')
kThemeSoundLittleArrowDnRelease = FOUR_CHAR_CODE('ladr')
kThemeSoundPopupPress = FOUR_CHAR_CODE('popp')
kThemeSoundPopupEnter = FOUR_CHAR_CODE('pope')
kThemeSoundPopupExit = FOUR_CHAR_CODE('popx')
kThemeSoundPopupRelease = FOUR_CHAR_CODE('popr')
kThemeSoundDisclosurePress = FOUR_CHAR_CODE('dscp')
kThemeSoundDisclosureEnter = FOUR_CHAR_CODE('dsce')
kThemeSoundDisclosureExit = FOUR_CHAR_CODE('dscx')
kThemeSoundDisclosureRelease = FOUR_CHAR_CODE('dscr')
kThemeSoundTabPressed = FOUR_CHAR_CODE('tabp')
kThemeSoundTabEnter = FOUR_CHAR_CODE('tabe')
kThemeSoundTabExit = FOUR_CHAR_CODE('tabx')
kThemeSoundTabRelease = FOUR_CHAR_CODE('tabr')
kThemeSoundDragTargetHilite = FOUR_CHAR_CODE('dthi')
kThemeSoundDragTargetUnhilite = FOUR_CHAR_CODE('dtuh')
kThemeSoundDragTargetDrop = FOUR_CHAR_CODE('dtdr')
kThemeSoundEmptyTrash = FOUR_CHAR_CODE('ftrs')
kThemeSoundSelectItem = FOUR_CHAR_CODE('fsel')
kThemeSoundNewItem = FOUR_CHAR_CODE('fnew')
kThemeSoundReceiveDrop = FOUR_CHAR_CODE('fdrp')
kThemeSoundCopyDone = FOUR_CHAR_CODE('fcpd')
kThemeSoundResolveAlias = FOUR_CHAR_CODE('fral')
kThemeSoundLaunchApp = FOUR_CHAR_CODE('flap')
kThemeSoundDiskInsert = FOUR_CHAR_CODE('dski')
kThemeSoundDiskEject = FOUR_CHAR_CODE('dske')
kThemeSoundFinderDragOnIcon = FOUR_CHAR_CODE('fdon')
kThemeSoundFinderDragOffIcon = FOUR_CHAR_CODE('fdof')
kThemePopupTabNormalPosition = 0
kThemePopupTabCenterOnWindow = 1
kThemePopupTabCenterOnOffset = 2
kThemeMetricScrollBarWidth = 0
kThemeMetricSmallScrollBarWidth = 1
kThemeMetricCheckBoxHeight = 2
kThemeMetricRadioButtonHeight = 3
kThemeMetricEditTextWhitespace = 4
kThemeMetricEditTextFrameOutset = 5
kThemeMetricListBoxFrameOutset = 6
kThemeMetricFocusRectOutset = 7
kThemeMetricImageWellThickness = 8
kThemeMetricScrollBarOverlap = 9
kThemeMetricLargeTabHeight = 10
kThemeMetricLargeTabCapsWidth = 11
kThemeMetricTabFrameOverlap = 12
kThemeMetricTabIndentOrStyle = 13
kThemeMetricTabOverlap = 14
kThemeMetricSmallTabHeight = 15
kThemeMetricSmallTabCapsWidth = 16
kThemeMetricDisclosureButtonHeight = 17
kThemeMetricRoundButtonSize = 18
kThemeMetricPushButtonHeight = 19
kThemeMetricListHeaderHeight = 20
kThemeMetricSmallCheckBoxHeight = 21
kThemeMetricDisclosureButtonWidth = 22
kThemeMetricSmallDisclosureButtonHeight = 23
kThemeMetricSmallDisclosureButtonWidth = 24
kThemeMetricDisclosureTriangleHeight = 25
kThemeMetricDisclosureTriangleWidth = 26
kThemeMetricLittleArrowsHeight = 27
kThemeMetricLittleArrowsWidth = 28
kThemeMetricPaneSplitterHeight = 29
kThemeMetricPopupButtonHeight = 30
kThemeMetricSmallPopupButtonHeight = 31
kThemeMetricLargeProgressBarThickness = 32
kThemeMetricPullDownHeight = 33
kThemeMetricSmallPullDownHeight = 34
kThemeMetricSmallPushButtonHeight = 35
kThemeMetricSmallRadioButtonHeight = 36
kThemeMetricRelevanceIndicatorHeight = 37
kThemeMetricResizeControlHeight = 38
kThemeMetricSmallResizeControlHeight = 39
kThemeMetricLargeRoundButtonSize = 40
kThemeMetricHSliderHeight = 41
kThemeMetricHSliderTickHeight = 42
kThemeMetricSmallHSliderHeight = 43
kThemeMetricSmallHSliderTickHeight = 44
kThemeMetricVSliderWidth = 45
kThemeMetricVSliderTickWidth = 46
kThemeMetricSmallVSliderWidth = 47
kThemeMetricSmallVSliderTickWidth = 48
kThemeMetricTitleBarControlsHeight = 49
kThemeMetricCheckBoxWidth = 50
kThemeMetricSmallCheckBoxWidth = 51
kThemeMetricRadioButtonWidth = 52
kThemeMetricSmallRadioButtonWidth = 53
kThemeMetricSmallHSliderMinThumbWidth = 54
kThemeMetricSmallVSliderMinThumbHeight = 55
kThemeMetricSmallHSliderTickOffset = 56
kThemeMetricSmallVSliderTickOffset = 57
kThemeMetricNormalProgressBarThickness = 58
kThemeMetricProgressBarShadowOutset = 59
kThemeMetricSmallProgressBarShadowOutset = 60
kThemeMetricPrimaryGroupBoxContentInset = 61
kThemeMetricSecondaryGroupBoxContentInset = 62
kThemeMetricMenuMarkColumnWidth = 63
kThemeMetricMenuExcludedMarkColumnWidth = 64
kThemeMetricMenuMarkIndent = 65
kThemeMetricMenuTextLeadingEdgeMargin = 66
kThemeMetricMenuTextTrailingEdgeMargin = 67
kThemeMetricMenuIndentWidth = 68
kThemeMetricMenuIconTrailingEdgeMargin = 69
# appearanceBadBrushIndexErr = themeInvalidBrushErr
# appearanceProcessRegisteredErr = themeProcessRegisteredErr
# appearanceProcessNotRegisteredErr = themeProcessNotRegisteredErr
# appearanceBadTextColorIndexErr = themeBadTextColorErr
# appearanceThemeHasNoAccents = themeHasNoAccentsErr
# appearanceBadCursorIndexErr = themeBadCursorIndexErr
kThemeActiveDialogBackgroundBrush = kThemeBrushDialogBackgroundActive
kThemeInactiveDialogBackgroundBrush = kThemeBrushDialogBackgroundInactive
kThemeActiveAlertBackgroundBrush = kThemeBrushAlertBackgroundActive
kThemeInactiveAlertBackgroundBrush = kThemeBrushAlertBackgroundInactive
kThemeActiveModelessDialogBackgroundBrush = kThemeBrushModelessDialogBackgroundActive
kThemeInactiveModelessDialogBackgroundBrush = kThemeBrushModelessDialogBackgroundInactive
kThemeActiveUtilityWindowBackgroundBrush = kThemeBrushUtilityWindowBackgroundActive
kThemeInactiveUtilityWindowBackgroundBrush = kThemeBrushUtilityWindowBackgroundInactive
kThemeListViewSortColumnBackgroundBrush = kThemeBrushListViewSortColumnBackground
kThemeListViewBackgroundBrush = kThemeBrushListViewBackground
kThemeIconLabelBackgroundBrush = kThemeBrushIconLabelBackground
kThemeListViewSeparatorBrush = kThemeBrushListViewSeparator
kThemeChasingArrowsBrush = kThemeBrushChasingArrows
kThemeDragHiliteBrush = kThemeBrushDragHilite
kThemeDocumentWindowBackgroundBrush = kThemeBrushDocumentWindowBackground
kThemeFinderWindowBackgroundBrush = kThemeBrushFinderWindowBackground
kThemeActiveScrollBarDelimiterBrush = kThemeBrushScrollBarDelimiterActive
kThemeInactiveScrollBarDelimiterBrush = kThemeBrushScrollBarDelimiterInactive
kThemeFocusHighlightBrush = kThemeBrushFocusHighlight
kThemeActivePopupArrowBrush = kThemeBrushPopupArrowActive
kThemePressedPopupArrowBrush = kThemeBrushPopupArrowPressed
kThemeInactivePopupArrowBrush = kThemeBrushPopupArrowInactive
kThemeAppleGuideCoachmarkBrush = kThemeBrushAppleGuideCoachmark
kThemeActiveDialogTextColor = kThemeTextColorDialogActive
kThemeInactiveDialogTextColor = kThemeTextColorDialogInactive
kThemeActiveAlertTextColor = kThemeTextColorAlertActive
kThemeInactiveAlertTextColor = kThemeTextColorAlertInactive
kThemeActiveModelessDialogTextColor = kThemeTextColorModelessDialogActive
kThemeInactiveModelessDialogTextColor = kThemeTextColorModelessDialogInactive
kThemeActiveWindowHeaderTextColor = kThemeTextColorWindowHeaderActive
kThemeInactiveWindowHeaderTextColor = kThemeTextColorWindowHeaderInactive
kThemeActivePlacardTextColor = kThemeTextColorPlacardActive
kThemeInactivePlacardTextColor = kThemeTextColorPlacardInactive
kThemePressedPlacardTextColor = kThemeTextColorPlacardPressed
kThemeActivePushButtonTextColor = kThemeTextColorPushButtonActive
kThemeInactivePushButtonTextColor = kThemeTextColorPushButtonInactive
kThemePressedPushButtonTextColor = kThemeTextColorPushButtonPressed
kThemeActiveBevelButtonTextColor = kThemeTextColorBevelButtonActive
kThemeInactiveBevelButtonTextColor = kThemeTextColorBevelButtonInactive
kThemePressedBevelButtonTextColor = kThemeTextColorBevelButtonPressed
kThemeActivePopupButtonTextColor = kThemeTextColorPopupButtonActive
kThemeInactivePopupButtonTextColor = kThemeTextColorPopupButtonInactive
kThemePressedPopupButtonTextColor = kThemeTextColorPopupButtonPressed
kThemeIconLabelTextColor = kThemeTextColorIconLabel
kThemeListViewTextColor = kThemeTextColorListView
kThemeActiveDocumentWindowTitleTextColor = kThemeTextColorDocumentWindowTitleActive
kThemeInactiveDocumentWindowTitleTextColor = kThemeTextColorDocumentWindowTitleInactive
kThemeActiveMovableModalWindowTitleTextColor = kThemeTextColorMovableModalWindowTitleActive
kThemeInactiveMovableModalWindowTitleTextColor = kThemeTextColorMovableModalWindowTitleInactive
kThemeActiveUtilityWindowTitleTextColor = kThemeTextColorUtilityWindowTitleActive
kThemeInactiveUtilityWindowTitleTextColor = kThemeTextColorUtilityWindowTitleInactive
kThemeActivePopupWindowTitleColor = kThemeTextColorPopupWindowTitleActive
kThemeInactivePopupWindowTitleColor = kThemeTextColorPopupWindowTitleInactive
kThemeActiveRootMenuTextColor = kThemeTextColorRootMenuActive
kThemeSelectedRootMenuTextColor = kThemeTextColorRootMenuSelected
kThemeDisabledRootMenuTextColor = kThemeTextColorRootMenuDisabled
kThemeActiveMenuItemTextColor = kThemeTextColorMenuItemActive
kThemeSelectedMenuItemTextColor = kThemeTextColorMenuItemSelected
kThemeDisabledMenuItemTextColor = kThemeTextColorMenuItemDisabled
kThemeActivePopupLabelTextColor = kThemeTextColorPopupLabelActive
kThemeInactivePopupLabelTextColor = kThemeTextColorPopupLabelInactive
kAEThemeSwitch = kAEAppearanceChanged
kThemeNoAdornment = kThemeAdornmentNone
kThemeDefaultAdornment = kThemeAdornmentDefault
kThemeFocusAdornment = kThemeAdornmentFocus
kThemeRightToLeftAdornment = kThemeAdornmentRightToLeft
kThemeDrawIndicatorOnly = kThemeAdornmentDrawIndicatorOnly
kThemeBrushPassiveAreaFill = kThemeBrushStaticAreaFill
kThemeMetricCheckBoxGlyphHeight = kThemeMetricCheckBoxHeight
kThemeMetricRadioButtonGlyphHeight = kThemeMetricRadioButtonHeight
kThemeMetricDisclosureButtonSize = kThemeMetricDisclosureButtonHeight
kThemeMetricBestListHeaderHeight = kThemeMetricListHeaderHeight
kThemeMetricSmallProgressBarThickness = kThemeMetricNormalProgressBarThickness
kThemeMetricProgressBarThickness = kThemeMetricLargeProgressBarThickness
kThemeScrollBar = kThemeMediumScrollBar
kThemeSlider = kThemeMediumSlider
kThemeProgressBar = kThemeMediumProgressBar
kThemeIndeterminateBar = kThemeMediumIndeterminateBar
| gpl-2.0 | 4,977,986,203,471,583,000 | 275,194,366,849,830,880 | 41.080247 | 95 | 0.859946 | false |
alihalabyah/flexx | flexx/pyscript/parser3.py | 21 | 22006 | """
Python Builtins
---------------
Most buildin functions (that make sense in JS) are automatically
translated to JavaScript: isinstance, issubclass, callable, hasattr,
getattr, setattr, delattr, print, len, max, min, chr, ord, dict, list,
tuple, range, pow, sum, round, int, float, str, bool, abs, divmod, all,
any, enumerate, zip, reversed, sorted, filter, map.
.. pyscript_example::
# "self" is replaced with "this"
self.foo
# Printing just works
print('some test')
print(a, b, c, sep='-')
# Getting the length of a string or array
len(foo)
# Rounding and abs
round(foo) # round to nearest integer
int(foo) # round towards 0 as in Python
abs(foo)
# min and max
min(foo)
min(a, b, c)
max(foo)
max(a, b, c)
# divmod
a, b = divmod(100, 7) # -> 14, 2
# Aggregation
sum(foo)
all(foo)
any(foo)
# Turning things into numbers, bools and strings
str(s)
float(x)
bool(y)
int(z) # this rounds towards zero like in Python
chr(65) # -> 'A'
ord('A') # -> 65
# Turning things into lists and dicts
dict([['foo', 1], ['bar', 2]]) # -> {'foo': 1, 'bar': 2}
list('abc') # -> ['a', 'b', 'c']
dict(other_dict) # make a copy
list(other_list) # make copy
The isinstance function (and friends)
-------------------------------------
The ``isinstance()`` function works for all JS primitive types, but also
for user-defined classes.
.. pyscript_example::
# Basic types
isinstance(3, float) # in JS there are no ints
isinstance('', str)
isinstance([], list)
isinstance({}, dict)
isinstance(foo, types.FunctionType)
# Can also use JS strings
isinstance(3, 'number')
isinstance('', 'string')
isinstance([], 'array')
isinstance({}, 'object')
isinstance(foo, 'function')
# You can use it on your own types too ...
isinstance(x, MyClass)
isinstance(x, 'MyClass') # equivalent
isinstance(x, 'Object') # also yields true (subclass of Object)
# issubclass works too
issubclass(Foo, Bar)
# As well as callable
callable(foo)
hasattr, getattr, setattr and delattr
-------------------------------------
.. pyscript_example::
a = {'foo': 1, 'bar': 2}
hasattr(a, 'foo') # -> True
hasattr(a, 'fooo') # -> False
hasattr(null, 'foo') # -> False
getattr(a, 'foo') # -> 1
getattr(a, 'fooo') # -> raise AttributeError
getattr(a, 'fooo', 3) # -> 3
getattr(null, 'foo', 3) # -> 3
setattr(a, 'foo', 2)
delattr(a, 'foo')
Creating sequences
------------------
.. pyscript_example::
range(10)
range(2, 10, 2)
range(100, 0, -1)
reversed(foo)
sorted(foo)
enumerate(foo)
zip(foo, bar)
filter(func, foo)
map(func, foo)
List methods
------------
.. pyscript_example::
# Call a.append() if it exists, otherwise a.push()
a.append(x)
# Similar for remove()
a.remove(x)
Dict methods
------------
.. pyscript_example::
a = {'foo': 3}
a['foo']
a.get('foo', 0)
a.get('foo')
a.keys()
Str methods
-----------
.. pyscript_example::
"foobar".startswith('foo')
Additional sugar
----------------
.. pyscript_example::
# Get time (number of seconds since epoch)
print(time.time())
# High resolution timer (as in time.perf_counter on Python 3)
t0 = time.perf_counter()
do_something()
t1 = time.perf_counter()
print('this took me', t1-t0, 'seconds')
"""
import ast
from .parser2 import Parser2, JSError, unify # noqa
# List of possibly relevant builtin functions:
#
# abs all any bin bool callable chr complex delattr dict dir divmod
# enumerate eval exec filter float format getattr globals hasattr hash
# hex id int isinstance issubclass iter len list locals map max min next
# object oct ord pow print property range repr reversed round set setattr
# slice sorted str sum super tuple type vars zip
#
# Further, all methods of: list, dict, str, set?
# todo: make these more robust by not applying the Python version if a JS version exists.
class Parser3(Parser2):
""" Parser to transcompile Python to JS, allowing more Pythonic
code, like ``self``, ``print()``, ``len()``, list methods, etc.
"""
NAME_MAP = {'self': 'this', }
NAME_MAP.update(Parser2.NAME_MAP)
## Hardcore functions (hide JS functions with the same name)
def function_isinstance(self, node):
if len(node.args) != 2:
raise JSError('isinstance() expects two arguments.')
ob = unify(self.parse(node.args[0]))
cls = unify(self.parse(node.args[1]))
if cls[0] in '"\'':
cls = cls[1:-1] # remove quotes
BASIC_TYPES = ('number', 'boolean', 'string', 'function', 'array',
'object', 'null', 'undefined')
MAP = {'[int, float]': 'number', '[float, int]': 'number', 'float': 'number',
'str': 'string', 'basestring': 'string', 'string_types': 'string',
'bool': 'boolean',
'FunctionType': 'function', 'types.FunctionType': 'function',
'list': 'array', 'tuple': 'array',
'[list, tuple]': 'array', '[tuple, list]': 'array',
'dict': 'object',
}
cmp = MAP.get(cls, cls)
if cmp.lower() in BASIC_TYPES:
# Basic type, use Object.prototype.toString
# http://stackoverflow.com/questions/11108877
return ["({}).toString.call(",
ob,
").match(/\s([a-zA-Z]+)/)[1].toLowerCase() === ",
repr(cmp.lower())
]
else:
# User defined type, use instanceof
# http://tobyho.com/2011/01/28/checking-types-in-javascript/
cmp = unify(cls)
if cmp[0] == '(':
raise JSError('isinstance() can only compare to simple types')
return ob, " instanceof ", cmp
def function_issubclass(self, node):
# issubclass only needs to work on custom classes
if len(node.args) != 2:
raise JSError('issubclass() expects two arguments.')
cls1 = unify(self.parse(node.args[0]))
cls2 = unify(self.parse(node.args[1]))
if cls2 == 'object':
cls2 = 'Object'
return '(%s.prototype instanceof %s)' % (cls1, cls2)
def function_hasattr(self, node):
if len(node.args) == 2:
ob = unify(self.parse(node.args[0]))
name = unify(self.parse(node.args[1]))
dummy1 = self.dummy()
t = "((%s=%s) !== undefined && %s !== null && %s[%s] !== undefined)"
return t % (dummy1, ob, dummy1, dummy1, name)
else:
raise JSError('hasattr() expects two arguments.')
def function_getattr(self, node):
is_ok = "(ob !== undefined && ob !== null && ob[name] !== undefined)"
if len(node.args) == 2:
ob = unify(self.parse(node.args[0]))
name = unify(self.parse(node.args[1]))
func = "(function (ob, name) {if %s {return ob[name];} " % is_ok
func += "else {var e = Error(name); e.name='AttributeError'; throw e;}})"
return func + '(%s, %s)' % (ob, name)
elif len(node.args) == 3:
ob = unify(self.parse(node.args[0]))
name = unify(self.parse(node.args[1]))
default = unify(self.parse(node.args[2]))
func = "(function (ob, name, dflt) {if %s {return ob[name];} " % is_ok
func += "else {return dflt;}})"
return func + '(%s, %s, %s)' % (ob, name, default)
else:
raise JSError('hasattr() expects two or three arguments.')
def function_setattr(self, node):
is_ok = "(ob !== undefined && ob !== null && ob[name] !== undefined)"
if len(node.args) == 3:
ob = unify(self.parse(node.args[0]))
name = unify(self.parse(node.args[1]))
value = unify(self.parse(node.args[2]))
return '%s[%s] = %s' % (ob, name, value)
else:
raise JSError('setattr() expects three arguments.')
def function_delattr(self, node):
if len(node.args) == 2:
ob = unify(self.parse(node.args[0]))
name = unify(self.parse(node.args[1]))
return 'delete %s[%s]' % (ob, name)
else:
raise JSError('delattr() expects two arguments.')
def function_print(self, node):
# Process keywords
sep, end = '" "', ''
for kw in node.keywords:
if kw.arg == 'sep':
sep = ''.join(self.parse(kw.value))
elif kw.arg == 'end':
end = ''.join(self.parse(kw.value))
elif kw.arg in ('file', 'flush'):
raise JSError('print() file and flush args not supported')
else:
raise JSError('Invalid argument for print(): %r' % kw.arg)
# Combine args
args = [unify(self.parse(arg)) for arg in node.args]
end = (" + %s" % end) if (args and end and end != '\n') else ''
combiner = ' + %s + ' % sep
args_concat = combiner.join(args)
return 'console.log(' + args_concat + end + ')'
def function_len(self, node):
if len(node.args) == 1:
return unify(self.parse(node.args[0])), '.length'
else:
return None # don't apply this feature
def function_max(self, node):
if len(node.args) == 0:
raise JSError('max() needs at least one argument')
elif len(node.args) == 1:
arg = ''.join(self.parse(node.args[0]))
return 'Math.max.apply(null, ', arg, ')'
else:
args = ', '.join([unify(self.parse(arg)) for arg in node.args])
return 'Math.max(', args, ')'
def function_min(self, node):
if len(node.args) == 0:
raise JSError('min() needs at least one argument')
elif len(node.args) == 1:
arg = ''.join(self.parse(node.args[0]))
return 'Math.min.apply(null, ', arg, ')'
else:
args = ', '.join([unify(self.parse(arg)) for arg in node.args])
return 'Math.min(', args, ')'
def function_callable(self, node):
if len(node.args) == 1:
arg = unify(self.parse(node.args[0]))
return '(typeof %s === "function")' % arg
else:
raise JSError('callable() needs at least one argument')
def function_chr(self, node):
if len(node.args) == 1:
arg = ''.join(self.parse(node.args[0]))
return 'String.fromCharCode(%s)' % arg
else:
raise JSError('chr() needs at least one argument')
def function_ord(self, node):
if len(node.args) == 1:
arg = ''.join(self.parse(node.args[0]))
return '%s.charCodeAt(0)' % arg
else:
raise JSError('ord() needs at least one argument')
def function_dict(self, node):
if len(node.args) == 0:
return '{}'
if len(node.args) == 1:
code = '(function(x) {var t, i, keys, r={};'
code += 'if (Array.isArray(x)) {'
code += 'for (i=0; i<x.length; i++) {t=x[i]; r[t[0]] = t[1];} return r;'
code += '} else {'
code += 'keys = Object.keys(x); for (i=0; i<keys.length; i++) {t=keys[i]; r[t] = x[t];} return r;}})'
return code + '(%s)' % ''.join(self.parse(node.args[0]))
else:
raise JSError('dict() needs at least one argument')
def function_list(self, node):
if len(node.args) == 0:
return '[]'
if len(node.args) == 1:
code = '(function(x) {var r=[];'
code += 'if (typeof x==="object" && !Array.isArray(x)) {x=Object.keys(x)}'
code += 'for (var i=0; i<x.length; i++) {r.push(x[i]);} return r;})'
return code + '(%s)' % ''.join(self.parse(node.args[0]))
else:
raise JSError('list() needs at least one argument')
def function_tuple(self, node):
return self.function_list(node)
def function_range(self, node):
fun = 'function (start, end, step) {var i, res = []; for (i=start; i<end; i+=step) {res.push(i);} return res;}'
if len(node.args) == 1:
end = unify(self.parse(node.args[0]))
return '(%s)(0, %s, 1)' % (fun, end)
elif len(node.args) == 2:
start = unify(self.parse(node.args[0]))
end = unify(self.parse(node.args[1]))
return '(%s)(%s, %s, 1)' % (fun, start, end)
elif len(node.args) == 3:
start = unify(self.parse(node.args[0]))
end = unify(self.parse(node.args[1]))
step = ''.join(self.parse(node.args[2]))
if step.lstrip('+-').isnumeric() and float(step) < 0:
fun = fun.replace('<', '>')
return '(%s)(%s, %s, %s)' % (fun, start, end, step)
else:
raise JSError('range() needs 1, 2 or 3 arguments')
## Normal functions (can be overloaded)
def function_pow(self, node):
if len(node.args) == 2:
self.vars_for_functions['pow'] = 'Math.pow'
return None
else:
raise JSError('pow() needs exactly two argument2')
def function_sum(self, node):
if len(node.args) == 1:
code = 'function (x) {return x.reduce(function(a, b) {return a + b;});}'
self.vars_for_functions['sum'] = code
return None
else:
raise JSError('sum() needs exactly one argument')
def function_round(self, node):
if len(node.args) == 1:
self.vars_for_functions['round'] = 'Math.round'
else:
raise JSError('round() needs at least one argument')
def function_int(self, node):
# No need to turn into number first
if len(node.args) == 1:
code = 'function (x) {return x<0 ? Math.ceil(x): Math.floor(x);}'
self.vars_for_functions['int'] = code
else:
raise JSError('int() needs one argument')
def function_float(self, node):
if len(node.args) == 1:
self.vars_for_functions['float'] = 'Number'
else:
raise JSError('float() needs one argument')
def function_str(self, node):
if len(node.args) in (0, 1):
self.vars_for_functions['str'] = 'String'
else:
raise JSError('str() needs zero or one argument')
def function_repr(self, node):
if len(node.args) == 1:
# code = 'function (x) {if (typeof x === "object") {return JSON.stringify(x);}'
# code += ' else if (typeof x === "string") {return "\'" + x + "\'";}'
# code += ' else {return x.toString();}}'
self.vars_for_functions['repr'] = 'JSON.stringify'
else:
raise JSError('repr() needs one argument')
def function_bool(self, node):
if len(node.args) == 1:
self._wrap_truthy(ast.Name('x', '')) # trigger _truthy function declaration
self.vars_for_functions['bool'] = 'function (x) {return Boolean(_truthy(x));}'
else:
raise JSError('bool() needs one argument')
def function_abs(self, node):
if len(node.args) == 1:
self.vars_for_functions['abs'] = 'Math.abs'
else:
raise JSError('abs() needs one argument')
def function_divmod(self, node):
if len(node.args) == 2:
code = 'function (x, y) {var m = x % y; return [(x-m)/y, m];}'
self.vars_for_functions['divmod'] = code
else:
raise JSError('divmod() needs two arguments')
def function_all(self, node):
if len(node.args) == 1:
self._wrap_truthy(ast.Name('x', '')) # trigger _truthy function declaration
code = 'function (x) {for (var i=0; i<x.length; i++) {if (!_truthy(x[i])){return false}} return true;}'
self.vars_for_functions['all'] = code
else:
raise JSError('all() needs one argument')
def function_any(self, node):
if len(node.args) == 1:
self._wrap_truthy(ast.Name('x', '')) # trigger _truthy function declaration
code = 'function (x) {for (var i=0; i<x.length; i++) {if (_truthy(x[i])){return true}} return false;}'
self.vars_for_functions['any'] = code
else:
raise JSError('any() needs one argument')
def function_enumerate(self, node):
if len(node.args) == 1:
code = 'function (iter) { var i, res=[];'
code += self._make_iterable('iter', 'iter', False)
code += 'for (i=0; i<iter.length; i++) {res.push([i, iter[i]]);}'
code += 'return res;}'
self.vars_for_functions['enumerate'] = code
else:
raise JSError('enumerate() needs one argument')
def function_zip(self, node):
if len(node.args) == 2:
code = 'function (iter1, iter2) { var i, res=[];'
code += self._make_iterable('iter1', 'iter1', False)
code += self._make_iterable('iter2', 'iter2', False)
code += 'var len = Math.min(iter1.length, iter2.length);'
code += 'for (i=0; i<len; i++) {res.push([iter1[i], iter2[i]]);}'
code += 'return res;}'
self.vars_for_functions['zip'] = code
else:
raise JSError('zip() needs two arguments')
def function_reversed(self, node):
if len(node.args) == 1:
code = 'function (iter) {'
code += self._make_iterable('iter', 'iter', False)
code += 'return iter.slice().reverse();}'
self.vars_for_functions['reversed'] = code
else:
raise JSError('reversed() needs one argument')
def function_sorted(self, node):
if len(node.args) == 1:
code = 'function (iter) {'
code += self._make_iterable('iter', 'iter', False)
code += 'return iter.slice().sort();}'
self.vars_for_functions['sorted'] = code
else:
raise JSError('sorted() needs one argument')
def function_filter(self, node):
if len(node.args) == 2:
code = 'function (func, iter) {'
code += 'if (typeof func === "undefined" || func === null) {func = function(x) {return x;}}'
code += 'return iter.filter(func);}'
self.vars_for_functions['filter'] = code
else:
raise JSError('filter() needs two arguments')
def function_map(self, node):
if len(node.args) == 2:
code = 'function (func, iter) {return iter.map(func);}'
self.vars_for_functions['map'] = code
else:
raise JSError('map() needs two arguments')
## List methods
def method_append(self, node, base):
if len(node.args) == 1:
code = []
code.append('(%s.append || %s.push).apply(%s, [' % (base, base, base))
code += self.parse(node.args[0])
code.append('])')
return code
def method_remove(self, node, base):
if len(node.args) == 1:
code = []
remove_func = 'function (x) {this.splice(this.indexOf(x), 1);}'
code.append('(%s.remove || %s).apply(%s, [' % (base, remove_func, base))
code += self.parse(node.args[0])
code.append('])')
return code
## Dict methods
def method_get(self, node, base):
if len(node.args) in (1, 2):
# Get name to call object - use simple name if we can
ob_name = base
ob_name1 = base
if not base.isalnum():
dummy = self.dummy()
ob_name = dummy
ob_name1 = '(%s=%s)' % (dummy, base)
# Get args
key = unify(self.parse(node.args[0]))
default = 'null'
normal_args = ''.join(self.parse(node.args[0]))
if len(node.args) == 2:
default = unify(self.parse(node.args[1]))
normal_args += ', ' + ''.join(self.parse(node.args[1]))
# Compose
dict_get = '(%s[%s] || %s)' % (ob_name, key, default)
normal_get = '%s.get(%s)' % (ob_name, normal_args)
return '(/*py-dict.get*/typeof %s.get==="function" ? %s : %s)' % (
ob_name1, normal_get, dict_get)
def method_keys(self, node, base):
if len(node.args) == 0:
return 'Object.keys(%s)' % base
## Str methods
def method_startswith(self, node, base):
if len(node.args) == 1:
arg = unify(self.parse(node.args[0]))
return unify(base), '.indexOf(', arg, ') == 0'
## Extra functions / methods
def method_time(self, node, base): # time.time()
if base == 'time':
if len(node.args) == 0:
return '((new Date()).getTime() / 1000)'
else:
raise JSError('time() needs no argument')
def method_perf_counter(self, node, base): # time.perf_counter()
if base == 'time':
if len(node.args) == 0:
# Work in nodejs and browser
dummy = self.dummy()
return '(typeof(process) === "undefined" ? performance.now()*1e-3 : ((%s=process.hrtime())[0] + %s[1]*1e-9))' % (dummy, dummy)
else:
raise JSError('perf_counter() needs no argument')
| bsd-2-clause | -4,930,930,521,436,102,000 | -7,552,931,461,307,124,000 | 33.81962 | 142 | 0.513087 | false |
jonycgn/scipy | scipy/stats/tests/test_binned_statistic.py | 50 | 8793 | from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import assert_array_almost_equal, run_module_suite
from scipy.stats import (binned_statistic, binned_statistic_2d,
binned_statistic_dd)
from scipy._lib.six import u
from common_tests import check_named_results
class TestBinnedStatistic(object):
@classmethod
def setup_class(cls):
np.random.seed(9865)
cls.x = np.random.random(100)
cls.y = np.random.random(100)
cls.v = np.random.random(100)
cls.X = np.random.random((100, 3))
def test_1d_count(self):
x = self.x
v = self.v
count1, edges1, bc = binned_statistic(x, v, 'count', bins=10)
count2, edges2 = np.histogram(x, bins=10)
assert_array_almost_equal(count1, count2)
assert_array_almost_equal(edges1, edges2)
def test_1d_result_attributes(self):
x = self.x
v = self.v
res = binned_statistic(x, v, 'count', bins=10)
attributes = ('statistic', 'bin_edges', 'binnumber')
check_named_results(res, attributes)
def test_1d_sum(self):
x = self.x
v = self.v
sum1, edges1, bc = binned_statistic(x, v, 'sum', bins=10)
sum2, edges2 = np.histogram(x, bins=10, weights=v)
assert_array_almost_equal(sum1, sum2)
assert_array_almost_equal(edges1, edges2)
def test_1d_mean(self):
x = self.x
v = self.v
stat1, edges1, bc = binned_statistic(x, v, 'mean', bins=10)
stat2, edges2, bc = binned_statistic(x, v, np.mean, bins=10)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(edges1, edges2)
def test_1d_std(self):
x = self.x
v = self.v
stat1, edges1, bc = binned_statistic(x, v, 'std', bins=10)
stat2, edges2, bc = binned_statistic(x, v, np.std, bins=10)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(edges1, edges2)
def test_1d_median(self):
x = self.x
v = self.v
stat1, edges1, bc = binned_statistic(x, v, 'median', bins=10)
stat2, edges2, bc = binned_statistic(x, v, np.median, bins=10)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(edges1, edges2)
def test_1d_bincode(self):
x = self.x[:20]
v = self.v[:20]
count1, edges1, bc = binned_statistic(x, v, 'count', bins=3)
bc2 = np.array([3, 2, 1, 3, 2, 3, 3, 3, 3, 1, 1, 3, 3, 1, 2, 3, 1,
1, 2, 1])
bcount = [(bc == i).sum() for i in np.unique(bc)]
assert_array_almost_equal(bc, bc2)
assert_array_almost_equal(bcount, count1)
def test_1d_range_keyword(self):
# Regression test for gh-3063, range can be (min, max) or [(min, max)]
np.random.seed(9865)
x = np.arange(30)
data = np.random.random(30)
mean, bins, _ = binned_statistic(x[:15], data[:15])
mean_range, bins_range, _ = binned_statistic(x, data, range=[(0, 14)])
mean_range2, bins_range2, _ = binned_statistic(x, data, range=(0, 14))
assert_array_almost_equal(mean, mean_range)
assert_array_almost_equal(bins, bins_range)
assert_array_almost_equal(mean, mean_range2)
assert_array_almost_equal(bins, bins_range2)
def test_2d_count(self):
x = self.x
y = self.y
v = self.v
count1, binx1, biny1, bc = binned_statistic_2d(x, y, v, 'count', bins=5)
count2, binx2, biny2 = np.histogram2d(x, y, bins=5)
assert_array_almost_equal(count1, count2)
assert_array_almost_equal(binx1, binx2)
assert_array_almost_equal(biny1, biny2)
def test_2d_result_attributes(self):
x = self.x
y = self.y
v = self.v
res = binned_statistic_2d(x, y, v, 'count', bins=5)
attributes = ('statistic', 'x_edge', 'y_edge', 'binnumber')
check_named_results(res, attributes)
def test_2d_sum(self):
x = self.x
y = self.y
v = self.v
sum1, binx1, biny1, bc = binned_statistic_2d(x, y, v, 'sum', bins=5)
sum2, binx2, biny2 = np.histogram2d(x, y, bins=5, weights=v)
assert_array_almost_equal(sum1, sum2)
assert_array_almost_equal(binx1, binx2)
assert_array_almost_equal(biny1, biny2)
def test_2d_mean(self):
x = self.x
y = self.y
v = self.v
stat1, binx1, biny1, bc = binned_statistic_2d(x, y, v, 'mean', bins=5)
stat2, binx2, biny2, bc = binned_statistic_2d(x, y, v, np.mean, bins=5)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(binx1, binx2)
assert_array_almost_equal(biny1, biny2)
def test_2d_mean_unicode(self):
x = self.x
y = self.y
v = self.v
stat1, binx1, biny1, bc = binned_statistic_2d(x, y, v, u('mean'), bins=5)
stat2, binx2, biny2, bc = binned_statistic_2d(x, y, v, np.mean, bins=5)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(binx1, binx2)
assert_array_almost_equal(biny1, biny2)
def test_2d_std(self):
x = self.x
y = self.y
v = self.v
stat1, binx1, biny1, bc = binned_statistic_2d(x, y, v, 'std', bins=5)
stat2, binx2, biny2, bc = binned_statistic_2d(x, y, v, np.std, bins=5)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(binx1, binx2)
assert_array_almost_equal(biny1, biny2)
def test_2d_median(self):
x = self.x
y = self.y
v = self.v
stat1, binx1, biny1, bc = binned_statistic_2d(x, y, v, 'median', bins=5)
stat2, binx2, biny2, bc = binned_statistic_2d(x, y, v, np.median, bins=5)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(binx1, binx2)
assert_array_almost_equal(biny1, biny2)
def test_2d_bincode(self):
x = self.x[:20]
y = self.y[:20]
v = self.v[:20]
count1, binx1, biny1, bc = binned_statistic_2d(x, y, v, 'count', bins=3)
bc2 = np.array([17, 11, 6, 16, 11, 17, 18, 17, 17, 7, 6, 18, 16,
6, 11, 16, 6, 6, 11, 8])
bcount = [(bc == i).sum() for i in np.unique(bc)]
assert_array_almost_equal(bc, bc2)
count1adj = count1[count1.nonzero()]
assert_array_almost_equal(bcount, count1adj)
def test_dd_count(self):
X = self.X
v = self.v
count1, edges1, bc = binned_statistic_dd(X, v, 'count', bins=3)
count2, edges2 = np.histogramdd(X, bins=3)
assert_array_almost_equal(count1, count2)
assert_array_almost_equal(edges1, edges2)
def test_dd_result_attributes(self):
X = self.X
v = self.v
res = binned_statistic_dd(X, v, 'count', bins=3)
attributes = ('statistic', 'bin_edges', 'binnumber')
check_named_results(res, attributes)
def test_dd_sum(self):
X = self.X
v = self.v
sum1, edges1, bc = binned_statistic_dd(X, v, 'sum', bins=3)
sum2, edges2 = np.histogramdd(X, bins=3, weights=v)
assert_array_almost_equal(sum1, sum2)
assert_array_almost_equal(edges1, edges2)
def test_dd_mean(self):
X = self.X
v = self.v
stat1, edges1, bc = binned_statistic_dd(X, v, 'mean', bins=3)
stat2, edges2, bc = binned_statistic_dd(X, v, np.mean, bins=3)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(edges1, edges2)
def test_dd_std(self):
X = self.X
v = self.v
stat1, edges1, bc = binned_statistic_dd(X, v, 'std', bins=3)
stat2, edges2, bc = binned_statistic_dd(X, v, np.std, bins=3)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(edges1, edges2)
def test_dd_median(self):
X = self.X
v = self.v
stat1, edges1, bc = binned_statistic_dd(X, v, 'median', bins=3)
stat2, edges2, bc = binned_statistic_dd(X, v, np.median, bins=3)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(edges1, edges2)
def test_dd_bincode(self):
X = self.X[:20]
v = self.v[:20]
count1, edges1, bc = binned_statistic_dd(X, v, 'count', bins=3)
bc2 = np.array([63, 33, 86, 83, 88, 67, 57, 33, 42, 41, 82, 83, 92,
32, 36, 91, 43, 87, 81, 81])
bcount = [(bc == i).sum() for i in np.unique(bc)]
assert_array_almost_equal(bc, bc2)
count1adj = count1[count1.nonzero()]
assert_array_almost_equal(bcount, count1adj)
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause | -7,444,603,491,044,748,000 | -7,769,806,415,382,747,000 | 30.858696 | 81 | 0.573183 | false |
Shraddha512/servo | tests/wpt/run.py | 13 | 1745 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import sys, os, argparse
here = os.path.split(__file__)[0]
servo_root = os.path.abspath(os.path.join(here, "..", ".."))
def wptsubdir(*args):
return os.path.join(here, *args)
# Imports
sys.path.append(wptsubdir("web-platform-tests"))
sys.path.append(wptsubdir("web-platform-tests", "tools", "scripts"))
from wptrunner import wptrunner, wptcommandline
import manifest
def update_manifest():
manifest.update_manifest(wptsubdir("web-platform-tests"),
rebuild=False,
experimental_include_local_changes=True,
path=wptsubdir("metadata", "MANIFEST.json"))
return True
def run_tests(**kwargs):
if not os.path.isfile(wptsubdir("metadata", "MANIFEST.json")):
raise Exception("Manifest not found. Please use --update-manifest in WPTARGS to create one")
wptrunner.setup_logging(kwargs, {"raw": sys.stdout})
return wptrunner.run_tests(**kwargs)
def set_defaults(args):
args.include_manifest = args.include_manifest if args.include_manifest else wptsubdir("include.ini")
args.product = "servo"
rv = vars(args)
wptcommandline.check_args(rv)
return rv
def main():
parser = wptcommandline.create_parser()
parser.add_argument('--update-manifest', dest='update_manifest', action='store_true')
args = parser.parse_args()
if args.update_manifest:
return update_manifest()
kwargs = set_defaults(args)
return run_tests(**kwargs)
if __name__ == "__main__":
sys.exit(0 if main() else 1)
| mpl-2.0 | 6,261,788,276,603,492,000 | -7,393,095,126,382,772,000 | 34.612245 | 104 | 0.664756 | false |
mikedh/trimesh | trimesh/proximity.py | 1 | 19400 | """
proximity.py
---------------
Query mesh- point proximity.
"""
import numpy as np
from . import util
from .grouping import group_min
from .constants import tol, log_time
from .triangles import closest_point as closest_point_corresponding
from .triangles import points_to_barycentric
try:
from scipy.spatial import cKDTree
except BaseException as E:
from .exceptions import closure
cKDTree = closure(E)
def nearby_faces(mesh, points):
"""
For each point find nearby faces relatively quickly.
The closest point on the mesh to the queried point is guaranteed to be
on one of the faces listed.
Does this by finding the nearest vertex on the mesh to each point, and
then returns all the faces that intersect the axis aligned bounding box
centered at the queried point and extending to the nearest vertex.
Parameters
----------
mesh : trimesh.Trimesh
Mesh to query.
points : (n, 3) float
Points in space
Returns
-----------
candidates : (points,) int
Sequence of indexes for mesh.faces
"""
points = np.asanyarray(points, dtype=np.float64)
if not util.is_shape(points, (-1, 3)):
raise ValueError('points must be (n,3)!')
# an r-tree containing the axis aligned bounding box for every triangle
rtree = mesh.triangles_tree
# a kd-tree containing every vertex of the mesh
kdtree = cKDTree(mesh.vertices[mesh.referenced_vertices])
# query the distance to the nearest vertex to get AABB of a sphere
distance_vertex = kdtree.query(points)[0].reshape((-1, 1))
distance_vertex += tol.merge
# axis aligned bounds
bounds = np.column_stack((points - distance_vertex,
points + distance_vertex))
# faces that intersect axis aligned bounding box
candidates = [list(rtree.intersection(b)) for b in bounds]
return candidates
def closest_point_naive(mesh, points):
"""
Given a mesh and a list of points find the closest point
on any triangle.
Does this by constructing a very large intermediate array and
comparing every point to every triangle.
Parameters
----------
mesh : Trimesh
Takes mesh to have same interfaces as `closest_point`
points : (m, 3) float
Points in space
Returns
----------
closest : (m, 3) float
Closest point on triangles for each point
distance : (m,) float
Distances between point and triangle
triangle_id : (m,) int
Index of triangle containing closest point
"""
# get triangles from mesh
triangles = mesh.triangles.view(np.ndarray)
# establish that input points are sane
points = np.asanyarray(points, dtype=np.float64)
if not util.is_shape(triangles, (-1, 3, 3)):
raise ValueError('triangles shape incorrect')
if not util.is_shape(points, (-1, 3)):
raise ValueError('points must be (n,3)')
# create a giant tiled array of each point tiled len(triangles) times
points_tiled = np.tile(points, (1, len(triangles)))
on_triangle = np.array([closest_point_corresponding(
triangles, i.reshape((-1, 3))) for i in points_tiled])
# distance squared
distance_2 = [((i - q)**2).sum(axis=1)
for i, q in zip(on_triangle, points)]
triangle_id = np.array([i.argmin() for i in distance_2])
# closest cartesian point
closest = np.array([g[i] for i, g in zip(triangle_id, on_triangle)])
distance = np.array([g[i] for i, g in zip(triangle_id, distance_2)]) ** .5
return closest, distance, triangle_id
def closest_point(mesh, points):
"""
Given a mesh and a list of points find the closest point
on any triangle.
Parameters
----------
mesh : trimesh.Trimesh
Mesh to query
points : (m, 3) float
Points in space
Returns
----------
closest : (m, 3) float
Closest point on triangles for each point
distance : (m,) float
Distance to mesh.
triangle_id : (m,) int
Index of triangle containing closest point
"""
points = np.asanyarray(points, dtype=np.float64)
if not util.is_shape(points, (-1, 3)):
raise ValueError('points must be (n,3)!')
# do a tree- based query for faces near each point
candidates = nearby_faces(mesh, points)
# view triangles as an ndarray so we don't have to recompute
# the MD5 during all of the subsequent advanced indexing
triangles = mesh.triangles.view(np.ndarray)
# create the corresponding list of triangles
# and query points to send to the closest_point function
all_candidates = np.concatenate(candidates)
num_candidates = list(map(len, candidates))
tile_idxs = np.repeat(np.arange(len(points)), num_candidates)
query_point = points[tile_idxs, :]
query_tri = triangles[all_candidates]
# do the computation for closest point
query_close = closest_point_corresponding(query_tri, query_point)
query_group = np.cumsum(num_candidates)[:-1]
# vectors and distances for
# closest point to query point
query_vector = query_point - query_close
query_distance = util.diagonal_dot(query_vector, query_vector)
# get best two candidate indices by arg-sorting the per-query_distances
qds = np.array_split(query_distance, query_group)
idxs = np.int32([qd.argsort()[:2] if len(qd) > 1 else [0, 0] for qd in qds])
idxs[1:] += query_group.reshape(-1, 1)
# points, distances and triangle ids for best two candidates
two_points = query_close[idxs]
two_dists = query_distance[idxs]
two_candidates = all_candidates[idxs]
# the first candidate is the best result for unambiguous cases
result_close = query_close[idxs[:, 0]]
result_tid = two_candidates[:, 0]
result_distance = two_dists[:, 0]
# however: same closest point on two different faces
# find the best one and correct triangle ids if necessary
check_distance = two_dists.ptp(axis=1) < tol.merge
check_magnitude = np.all(np.abs(two_dists) > tol.merge, axis=1)
# mask results where corrections may be apply
c_mask = np.bitwise_and(check_distance, check_magnitude)
# get two face normals for the candidate points
normals = mesh.face_normals[two_candidates[c_mask]]
# compute normalized surface-point to query-point vectors
vectors = (query_vector[idxs[c_mask]] /
two_dists[c_mask].reshape(-1, 2, 1) ** 0.5)
# compare enclosed angle for both face normals
dots = (normals * vectors).sum(axis=2)
# take the idx with the most positive angle
# allows for selecting the correct candidate triangle id
c_idxs = dots.argmax(axis=1)
# correct triangle ids where necessary
# closest point and distance remain valid
result_tid[c_mask] = two_candidates[c_mask, c_idxs]
result_distance[c_mask] = two_dists[c_mask, c_idxs]
result_close[c_mask] = two_points[c_mask, c_idxs]
# we were comparing the distance squared so
# now take the square root in one vectorized operation
result_distance **= .5
return result_close, result_distance, result_tid
def signed_distance(mesh, points):
"""
Find the signed distance from a mesh to a list of points.
* Points OUTSIDE the mesh will have NEGATIVE distance
* Points within tol.merge of the surface will have POSITIVE distance
* Points INSIDE the mesh will have POSITIVE distance
Parameters
-----------
mesh : trimesh.Trimesh
Mesh to query.
points : (n, 3) float
Points in space
Returns
----------
signed_distance : (n,) float
Signed distance from point to mesh
"""
# make sure we have a numpy array
points = np.asanyarray(points, dtype=np.float64)
# find the closest point on the mesh to the queried points
closest, distance, triangle_id = closest_point(mesh, points)
# we only care about nonzero distances
nonzero = distance > tol.merge
if not nonzero.any():
return distance
# For closest points that project directly in to the triangle, compute sign from
# triangle normal Project each point in to the closest triangle plane
nonzero = np.where(nonzero)[0]
normals = mesh.face_normals[triangle_id]
projection = (points[nonzero] -
(normals[nonzero].T * np.einsum(
"ij,ij->i",
points[nonzero] - closest[nonzero],
normals[nonzero])).T)
# Determine if the projection lies within the closest triangle
barycentric = points_to_barycentric(
mesh.triangles[triangle_id[nonzero]],
projection)
ontriangle = ~((
(barycentric < -tol.merge) | (barycentric > 1 + tol.merge)
).any(axis=1))
# Where projection does lie in the triangle, compare vector to projection to the
# triangle normal to compute sign
sign = np.sign(np.einsum(
"ij,ij->i",
normals[nonzero[ontriangle]],
points[nonzero[ontriangle]] - projection[ontriangle]))
distance[nonzero[ontriangle]] *= -1.0 * sign
# For all other triangles, resort to raycasting against the entire mesh
inside = mesh.ray.contains_points(points[nonzero[~ontriangle]])
sign = (inside.astype(int) * 2) - 1.0
# apply sign to previously computed distance
distance[nonzero[~ontriangle]] *= sign
return distance
class ProximityQuery(object):
"""
Proximity queries for the current mesh.
"""
def __init__(self, mesh):
self._mesh = mesh
@log_time
def on_surface(self, points):
"""
Given list of points, for each point find the closest point
on any triangle of the mesh.
Parameters
----------
points : (m,3) float, points in space
Returns
----------
closest : (m, 3) float
Closest point on triangles for each point
distance : (m,) float
Distance to surface
triangle_id : (m,) int
Index of closest triangle for each point.
"""
return closest_point(mesh=self._mesh,
points=points)
def vertex(self, points):
"""
Given a set of points, return the closest vertex index to each point
Parameters
----------
points : (n, 3) float
Points in space
Returns
----------
distance : (n,) float
Distance from source point to vertex.
vertex_id : (n,) int
Index of mesh.vertices for closest vertex.
"""
tree = self._mesh.kdtree
return tree.query(points)
def signed_distance(self, points):
"""
Find the signed distance from a mesh to a list of points.
* Points OUTSIDE the mesh will have NEGATIVE distance
* Points within tol.merge of the surface will have POSITIVE distance
* Points INSIDE the mesh will have POSITIVE distance
Parameters
-----------
points : (n, 3) float
Points in space
Returns
----------
signed_distance : (n,) float
Signed distance from point to mesh.
"""
return signed_distance(self._mesh, points)
def longest_ray(mesh, points, directions):
"""
Find the lengths of the longest rays which do not intersect the mesh
cast from a list of points in the provided directions.
Parameters
-----------
points : (n, 3) float
Points in space.
directions : (n, 3) float
Directions of rays.
Returns
----------
signed_distance : (n,) float
Length of rays.
"""
points = np.asanyarray(points, dtype=np.float64)
if not util.is_shape(points, (-1, 3)):
raise ValueError('points must be (n,3)!')
directions = np.asanyarray(directions, dtype=np.float64)
if not util.is_shape(directions, (-1, 3)):
raise ValueError('directions must be (n,3)!')
if len(points) != len(directions):
raise ValueError('number of points must equal number of directions!')
faces, rays, locations = mesh.ray.intersects_id(points, directions,
return_locations=True,
multiple_hits=True)
if len(rays) > 0:
distances = np.linalg.norm(locations - points[rays],
axis=1)
else:
distances = np.array([])
# Reject intersections at distance less than tol.planar
rays = rays[distances > tol.planar]
distances = distances[distances > tol.planar]
# Add infinite length for those with no valid intersection
no_intersections = np.setdiff1d(np.arange(len(points)), rays)
rays = np.concatenate((rays, no_intersections))
distances = np.concatenate((distances,
np.repeat(np.inf,
len(no_intersections))))
return group_min(rays, distances)
def max_tangent_sphere(mesh,
points,
inwards=True,
normals=None,
threshold=1e-6,
max_iter=100):
"""
Find the center and radius of the sphere which is tangent to
the mesh at the given point and at least one more point with no
non-tangential intersections with the mesh.
Masatomo Inui, Nobuyuki Umezu & Ryohei Shimane (2016)
Shrinking sphere:
A parallel algorithm for computing the thickness of 3D objects,
Computer-Aided Design and Applications, 13:2, 199-207,
DOI: 10.1080/16864360.2015.1084186
Parameters
----------
points : (n, 3) float
Points in space.
inwards : bool
Whether to have the sphere inside or outside the mesh.
normals : (n, 3) float or None
Normals of the mesh at the given points
if is None computed automatically.
Returns
----------
centers : (n,3) float
Centers of spheres
radii : (n,) float
Radii of spheres
"""
points = np.asanyarray(points, dtype=np.float64)
if not util.is_shape(points, (-1, 3)):
raise ValueError('points must be (n,3)!')
if normals is not None:
normals = np.asanyarray(normals, dtype=np.float64)
if not util.is_shape(normals, (-1, 3)):
raise ValueError('normals must be (n,3)!')
if len(points) != len(normals):
raise ValueError('number of points must equal number of normals!')
else:
normals = mesh.face_normals[closest_point(mesh, points)[2]]
if inwards:
normals = -normals
# Find initial tangent spheres
distances = longest_ray(mesh, points, normals)
radii = distances * 0.5
not_converged = np.ones(len(points), dtype=bool) # boolean mask
# If ray is infinite, find the vertex which is furthest from our point
# when projected onto the ray. I.e. find v which maximises
# (v-p).n = v.n - p.n.
# We use a loop rather a vectorised approach to reduce memory cost
# it also seems to run faster.
for i in np.where(np.isinf(distances))[0]:
projections = np.dot(mesh.vertices - points[i], normals[i])
# If no points lie outside the tangent plane, then the radius is infinite
# otherwise we have a point outside the tangent plane, take the one with maximal
# projection
if projections.max() < tol.planar:
radii[i] = np.inf
not_converged[i] = False
else:
vertex = mesh.vertices[projections.argmax()]
radii[i] = (np.dot(vertex - points[i], vertex - points[i]) /
(2 * np.dot(vertex - points[i], normals[i])))
# Compute centers
centers = points + normals * np.nan_to_num(radii.reshape(-1, 1))
centers[np.isinf(radii)] = [np.nan, np.nan, np.nan]
# Our iterative process terminates when the difference in sphere
# radius is less than threshold*D
D = np.linalg.norm(mesh.bounds[1] - mesh.bounds[0])
convergence_threshold = threshold * D
n_iter = 0
while not_converged.sum() > 0 and n_iter < max_iter:
n_iter += 1
n_points, n_dists, n_faces = mesh.nearest.on_surface(
centers[not_converged])
# If the distance to the nearest point is the same as the distance
# to the start point then we are done.
done = np.abs(
n_dists -
np.linalg.norm(
centers[not_converged] -
points[not_converged],
axis=1)) < tol.planar
not_converged[np.where(not_converged)[0][done]] = False
# Otherwise find the radius and center of the sphere tangent to the mesh
# at the point and the nearest point.
diff = n_points[~done] - points[not_converged]
old_radii = radii[not_converged].copy()
# np.einsum produces element wise dot product
radii[not_converged] = (np.einsum('ij, ij->i',
diff,
diff) /
(2 * np.einsum('ij, ij->i',
diff,
normals[not_converged])))
centers[not_converged] = points[not_converged] + \
normals[not_converged] * radii[not_converged].reshape(-1, 1)
# If change in radius is less than threshold we have converged
cvged = old_radii - radii[not_converged] < convergence_threshold
not_converged[np.where(not_converged)[0][cvged]] = False
return centers, radii
def thickness(mesh,
points,
exterior=False,
normals=None,
method='max_sphere'):
"""
Find the thickness of the mesh at the given points.
Parameters
----------
points : (n, 3) float
Points in space
exterior : bool
Whether to compute the exterior thickness
(a.k.a. reach)
normals : (n, 3) float
Normals of the mesh at the given points
If is None computed automatically.
method : string
One of 'max_sphere' or 'ray'
Returns
----------
thickness : (n,) float
Thickness at given points.
"""
points = np.asanyarray(points, dtype=np.float64)
if not util.is_shape(points, (-1, 3)):
raise ValueError('points must be (n,3)!')
if normals is not None:
normals = np.asanyarray(normals, dtype=np.float64)
if not util.is_shape(normals, (-1, 3)):
raise ValueError('normals must be (n,3)!')
if len(points) != len(normals):
raise ValueError('number of points must equal number of normals!')
else:
normals = mesh.face_normals[closest_point(mesh, points)[2]]
if method == 'max_sphere':
centers, radius = max_tangent_sphere(mesh=mesh,
points=points,
inwards=not exterior,
normals=normals)
thickness = radius * 2
return thickness
elif method == 'ray':
if exterior:
return longest_ray(mesh, points, normals)
else:
return longest_ray(mesh, points, -normals)
else:
raise ValueError('Invalid method, use "max_sphere" or "ray"')
| mit | 2,464,667,304,722,029,000 | -3,442,155,847,384,628,000 | 32.448276 | 88 | 0.609227 | false |
MSOpenTech/edx-platform | lms/djangoapps/bulk_email/tests/test_err_handling.py | 12 | 17239 | # -*- coding: utf-8 -*-
"""
Unit tests for handling email sending errors
"""
from itertools import cycle
from celery.states import SUCCESS, RETRY
from django.conf import settings
from django.core.management import call_command
from django.core.urlresolvers import reverse
from django.db import DatabaseError
import json
from mock import patch, Mock
from smtplib import SMTPDataError, SMTPServerDisconnected, SMTPConnectError
from bulk_email.models import CourseEmail, SEND_TO_ALL
from bulk_email.tasks import perform_delegate_email_batches, send_course_email
from instructor_task.models import InstructorTask
from instructor_task.subtasks import (
initialize_subtask_info,
SubtaskStatus,
check_subtask_is_valid,
update_subtask_status,
DuplicateTaskException,
MAX_DATABASE_LOCK_RETRIES,
)
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from student.tests.factories import UserFactory, AdminFactory, CourseEnrollmentFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class EmailTestException(Exception):
"""Mock exception for email testing."""
pass
@patch('bulk_email.models.html_to_text', Mock(return_value='Mocking CourseEmail.text_message'))
@patch.dict(settings.FEATURES, {'ENABLE_INSTRUCTOR_EMAIL': True, 'REQUIRE_COURSE_EMAIL_AUTH': False})
class TestEmailErrors(ModuleStoreTestCase):
"""
Test that errors from sending email are handled properly.
"""
def setUp(self):
super(TestEmailErrors, self).setUp()
course_title = u"ẗëṡẗ title イ乇丂イ ᄊ乇丂丂ムg乇 キo尺 ムレレ тэѕт мэѕѕаБэ"
self.course = CourseFactory.create(display_name=course_title)
self.instructor = AdminFactory.create()
self.client.login(username=self.instructor.username, password="test")
# load initial content (since we don't run migrations as part of tests):
call_command("loaddata", "course_email_template.json")
self.url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()})
self.send_mail_url = reverse('send_email', kwargs={'course_id': self.course.id.to_deprecated_string()})
self.success_content = {
'course_id': self.course.id.to_deprecated_string(),
'success': True,
}
@patch('bulk_email.tasks.get_connection', autospec=True)
@patch('bulk_email.tasks.send_course_email.retry')
def test_data_err_retry(self, retry, get_conn):
"""
Test that celery handles transient SMTPDataErrors by retrying.
"""
get_conn.return_value.send_messages.side_effect = SMTPDataError(455, "Throttling: Sending rate exceeded")
test_email = {
'action': 'Send email',
'send_to': 'myself',
'subject': 'test subject for myself',
'message': 'test message for myself'
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
# Test that we retry upon hitting a 4xx error
self.assertTrue(retry.called)
(__, kwargs) = retry.call_args
exc = kwargs['exc']
self.assertIsInstance(exc, SMTPDataError)
@patch('bulk_email.tasks.get_connection', autospec=True)
@patch('bulk_email.tasks.update_subtask_status')
@patch('bulk_email.tasks.send_course_email.retry')
def test_data_err_fail(self, retry, result, get_conn):
"""
Test that celery handles permanent SMTPDataErrors by failing and not retrying.
"""
# have every fourth email fail due to blacklisting:
get_conn.return_value.send_messages.side_effect = cycle([SMTPDataError(554, "Email address is blacklisted"),
None, None, None])
students = [UserFactory() for _ in xrange(settings.BULK_EMAIL_EMAILS_PER_TASK)]
for student in students:
CourseEnrollmentFactory.create(user=student, course_id=self.course.id)
test_email = {
'action': 'Send email',
'send_to': 'all',
'subject': 'test subject for all',
'message': 'test message for all'
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
# We shouldn't retry when hitting a 5xx error
self.assertFalse(retry.called)
# Test that after the rejected email, the rest still successfully send
((_entry_id, _current_task_id, subtask_status), _kwargs) = result.call_args
self.assertEquals(subtask_status.skipped, 0)
expected_fails = int((settings.BULK_EMAIL_EMAILS_PER_TASK + 3) / 4.0)
self.assertEquals(subtask_status.failed, expected_fails)
self.assertEquals(subtask_status.succeeded, settings.BULK_EMAIL_EMAILS_PER_TASK - expected_fails)
@patch('bulk_email.tasks.get_connection', autospec=True)
@patch('bulk_email.tasks.send_course_email.retry')
def test_disconn_err_retry(self, retry, get_conn):
"""
Test that celery handles SMTPServerDisconnected by retrying.
"""
get_conn.return_value.open.side_effect = SMTPServerDisconnected(425, "Disconnecting")
test_email = {
'action': 'Send email',
'send_to': 'myself',
'subject': 'test subject for myself',
'message': 'test message for myself'
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
self.assertTrue(retry.called)
(__, kwargs) = retry.call_args
exc = kwargs['exc']
self.assertIsInstance(exc, SMTPServerDisconnected)
@patch('bulk_email.tasks.get_connection', autospec=True)
@patch('bulk_email.tasks.send_course_email.retry')
def test_conn_err_retry(self, retry, get_conn):
"""
Test that celery handles SMTPConnectError by retrying.
"""
get_conn.return_value.open.side_effect = SMTPConnectError(424, "Bad Connection")
test_email = {
'action': 'Send email',
'send_to': 'myself',
'subject': 'test subject for myself',
'message': 'test message for myself'
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
self.assertTrue(retry.called)
(__, kwargs) = retry.call_args
exc = kwargs['exc']
self.assertIsInstance(exc, SMTPConnectError)
@patch('bulk_email.tasks.SubtaskStatus.increment')
@patch('bulk_email.tasks.log')
def test_nonexistent_email(self, mock_log, result):
"""
Tests retries when the email doesn't exist
"""
# create an InstructorTask object to pass through
course_id = self.course.id
entry = InstructorTask.create(course_id, "task_type", "task_key", "task_input", self.instructor)
task_input = {"email_id": -1}
with self.assertRaises(CourseEmail.DoesNotExist):
perform_delegate_email_batches(entry.id, course_id, task_input, "action_name") # pylint: disable=no-member
((log_str, __, email_id), __) = mock_log.warning.call_args
self.assertTrue(mock_log.warning.called)
self.assertIn('Failed to get CourseEmail with id', log_str)
self.assertEqual(email_id, -1)
self.assertFalse(result.called)
def test_nonexistent_course(self):
"""
Tests exception when the course in the email doesn't exist
"""
course_id = SlashSeparatedCourseKey("I", "DONT", "EXIST")
email = CourseEmail(course_id=course_id)
email.save()
entry = InstructorTask.create(course_id, "task_type", "task_key", "task_input", self.instructor)
task_input = {"email_id": email.id} # pylint: disable=no-member
# (?i) is a regex for ignore case
with self.assertRaisesRegexp(ValueError, r"(?i)course not found"):
perform_delegate_email_batches(entry.id, course_id, task_input, "action_name") # pylint: disable=no-member
def test_nonexistent_to_option(self):
"""
Tests exception when the to_option in the email doesn't exist
"""
email = CourseEmail(course_id=self.course.id, to_option="IDONTEXIST")
email.save()
entry = InstructorTask.create(self.course.id, "task_type", "task_key", "task_input", self.instructor)
task_input = {"email_id": email.id} # pylint: disable=no-member
with self.assertRaisesRegexp(Exception, 'Unexpected bulk email TO_OPTION found: IDONTEXIST'):
perform_delegate_email_batches(entry.id, self.course.id, task_input, "action_name") # pylint: disable=no-member
def test_wrong_course_id_in_task(self):
"""
Tests exception when the course_id in task is not the same as one explicitly passed in.
"""
email = CourseEmail(course_id=self.course.id, to_option=SEND_TO_ALL)
email.save()
entry = InstructorTask.create("bogus/task/id", "task_type", "task_key", "task_input", self.instructor)
task_input = {"email_id": email.id} # pylint: disable=no-member
with self.assertRaisesRegexp(ValueError, 'does not match task value'):
perform_delegate_email_batches(entry.id, self.course.id, task_input, "action_name") # pylint: disable=no-member
def test_wrong_course_id_in_email(self):
"""
Tests exception when the course_id in CourseEmail is not the same as one explicitly passed in.
"""
email = CourseEmail(course_id=SlashSeparatedCourseKey("bogus", "course", "id"), to_option=SEND_TO_ALL)
email.save()
entry = InstructorTask.create(self.course.id, "task_type", "task_key", "task_input", self.instructor)
task_input = {"email_id": email.id} # pylint: disable=no-member
with self.assertRaisesRegexp(ValueError, 'does not match email value'):
perform_delegate_email_batches(entry.id, self.course.id, task_input, "action_name") # pylint: disable=no-member
def test_send_email_undefined_subtask(self):
# test at a lower level, to ensure that the course gets checked down below too.
entry = InstructorTask.create(self.course.id, "task_type", "task_key", "task_input", self.instructor)
entry_id = entry.id # pylint: disable=no-member
to_list = ['[email protected]']
global_email_context = {'course_title': 'dummy course'}
subtask_id = "subtask-id-value"
subtask_status = SubtaskStatus.create(subtask_id)
email_id = 1001
with self.assertRaisesRegexp(DuplicateTaskException, 'unable to find subtasks of instructor task'):
send_course_email(entry_id, email_id, to_list, global_email_context, subtask_status.to_dict())
def test_send_email_missing_subtask(self):
# test at a lower level, to ensure that the course gets checked down below too.
entry = InstructorTask.create(self.course.id, "task_type", "task_key", "task_input", self.instructor)
entry_id = entry.id # pylint: disable=no-member
to_list = ['[email protected]']
global_email_context = {'course_title': 'dummy course'}
subtask_id = "subtask-id-value"
initialize_subtask_info(entry, "emailed", 100, [subtask_id])
different_subtask_id = "bogus-subtask-id-value"
subtask_status = SubtaskStatus.create(different_subtask_id)
bogus_email_id = 1001
with self.assertRaisesRegexp(DuplicateTaskException, 'unable to find status for subtask of instructor task'):
send_course_email(entry_id, bogus_email_id, to_list, global_email_context, subtask_status.to_dict())
def test_send_email_completed_subtask(self):
# test at a lower level, to ensure that the course gets checked down below too.
entry = InstructorTask.create(self.course.id, "task_type", "task_key", "task_input", self.instructor)
entry_id = entry.id # pylint: disable=no-member
subtask_id = "subtask-id-value"
initialize_subtask_info(entry, "emailed", 100, [subtask_id])
subtask_status = SubtaskStatus.create(subtask_id, state=SUCCESS)
update_subtask_status(entry_id, subtask_id, subtask_status)
bogus_email_id = 1001
to_list = ['[email protected]']
global_email_context = {'course_title': 'dummy course'}
new_subtask_status = SubtaskStatus.create(subtask_id)
with self.assertRaisesRegexp(DuplicateTaskException, 'already completed'):
send_course_email(entry_id, bogus_email_id, to_list, global_email_context, new_subtask_status.to_dict())
def test_send_email_running_subtask(self):
# test at a lower level, to ensure that the course gets checked down below too.
entry = InstructorTask.create(self.course.id, "task_type", "task_key", "task_input", self.instructor)
entry_id = entry.id # pylint: disable=no-member
subtask_id = "subtask-id-value"
initialize_subtask_info(entry, "emailed", 100, [subtask_id])
subtask_status = SubtaskStatus.create(subtask_id)
update_subtask_status(entry_id, subtask_id, subtask_status)
check_subtask_is_valid(entry_id, subtask_id, subtask_status)
bogus_email_id = 1001
to_list = ['[email protected]']
global_email_context = {'course_title': 'dummy course'}
with self.assertRaisesRegexp(DuplicateTaskException, 'already being executed'):
send_course_email(entry_id, bogus_email_id, to_list, global_email_context, subtask_status.to_dict())
def test_send_email_retried_subtask(self):
# test at a lower level, to ensure that the course gets checked down below too.
entry = InstructorTask.create(self.course.id, "task_type", "task_key", "task_input", self.instructor)
entry_id = entry.id # pylint: disable=no-member
subtask_id = "subtask-id-value"
initialize_subtask_info(entry, "emailed", 100, [subtask_id])
subtask_status = SubtaskStatus.create(subtask_id, state=RETRY, retried_nomax=2)
update_subtask_status(entry_id, subtask_id, subtask_status)
bogus_email_id = 1001
to_list = ['[email protected]']
global_email_context = {'course_title': 'dummy course'}
# try running with a clean subtask:
new_subtask_status = SubtaskStatus.create(subtask_id)
with self.assertRaisesRegexp(DuplicateTaskException, 'already retried'):
send_course_email(entry_id, bogus_email_id, to_list, global_email_context, new_subtask_status.to_dict())
# try again, with a retried subtask with lower count:
new_subtask_status = SubtaskStatus.create(subtask_id, state=RETRY, retried_nomax=1)
with self.assertRaisesRegexp(DuplicateTaskException, 'already retried'):
send_course_email(entry_id, bogus_email_id, to_list, global_email_context, new_subtask_status.to_dict())
def test_send_email_with_locked_instructor_task(self):
# test at a lower level, to ensure that the course gets checked down below too.
entry = InstructorTask.create(self.course.id, "task_type", "task_key", "task_input", self.instructor)
entry_id = entry.id # pylint: disable=no-member
subtask_id = "subtask-id-locked-model"
initialize_subtask_info(entry, "emailed", 100, [subtask_id])
subtask_status = SubtaskStatus.create(subtask_id)
bogus_email_id = 1001
to_list = ['[email protected]']
global_email_context = {'course_title': 'dummy course'}
with patch('instructor_task.subtasks.InstructorTask.save') as mock_task_save:
mock_task_save.side_effect = DatabaseError
with self.assertRaises(DatabaseError):
send_course_email(entry_id, bogus_email_id, to_list, global_email_context, subtask_status.to_dict())
self.assertEquals(mock_task_save.call_count, MAX_DATABASE_LOCK_RETRIES)
def test_send_email_undefined_email(self):
# test at a lower level, to ensure that the course gets checked down below too.
entry = InstructorTask.create(self.course.id, "task_type", "task_key", "task_input", self.instructor)
entry_id = entry.id # pylint: disable=no-member
to_list = ['[email protected]']
global_email_context = {'course_title': 'dummy course'}
subtask_id = "subtask-id-undefined-email"
initialize_subtask_info(entry, "emailed", 100, [subtask_id])
subtask_status = SubtaskStatus.create(subtask_id)
bogus_email_id = 1001
with self.assertRaises(CourseEmail.DoesNotExist):
# we skip the call that updates subtask status, since we've not set up the InstructorTask
# for the subtask, and it's not important to the test.
with patch('bulk_email.tasks.update_subtask_status'):
send_course_email(entry_id, bogus_email_id, to_list, global_email_context, subtask_status.to_dict())
| agpl-3.0 | 3,503,622,457,813,171,000 | 5,324,569,236,675,777,000 | 51.411585 | 124 | 0.66273 | false |
kidaa/avmplus | test/performance/metricinfo.py | 8 | 2932 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# This file contains information about the different performance metrics
# It is a python file that is imported into runtests.py
# Only one variable, metric_info is to be defined in this file.
# metric_info is a dictionary with the primary key being the metric name whose value is another
# dictionary.
# This secondary dictionary MUST define the following (string) keys:
# best : when analyzing multiple iterations, how is the "best" value calculated
# valid values are one of [ min | max | mean | median ]
# note that these are NOT strings, but method names
# The following are optional (string) keys:
# desc : A string description of the metric
# name : Display this name instead of the metric name
# unit : Metric Unit
# largerIsFaster : Boolean indicating whether larger values are considered
# to be faster. Defaults to False
# If a test reports a metric not defined in the metric_info dictionary, min is used as the default
import sys
# add parent dir to python module search path
sys.path.append('..')
from util.runtestUtils import mean, median
metric_info = {
'time': {
'best':min,
'unit':'milliseconds',
},
'compile_time': {
'best':min,
'unit':'seconds',
},
'memory':{
'best':max,
'unit':'k',
},
'size':{
'best':min,
'unit':'bytes',
},
'v8': {
'best':max,
'desc': 'custom v8 normalized metric (hardcoded in the test)',
'largerIsFaster':True
},
'iterations/second':{
'best':max,
'largerIsFaster':True,
},
# steps is a metric output by the avm when compiled with --enable-count-steps
'steps':{
'best':mean,
'desc':'internal steps reported by vm composed of call_count+loop_count. See Bug 568933 for details'
},
# vprof / perfm metrics
'vprof-compile-time': {
'best':min,
'name':'vprof: compile (time)'
},
'vprof-code-size' : {
'best':min,
'name':'vprof: code size (bytes)'
},
'vprof-verify-time' : {
'best':min,
'name':'vprof: verify & IR gen (time)'
},
'vprof-ir-bytes': {
'best':min,
'name':'vprof: mir/lir bytes'
},
'vprof-ir-time': {
'best':min,
'name':'vprof: mir/lir (# of inst)'
},
'vprof-count': {
'best':min,
'name':'vprof: count'
}
}
| mpl-2.0 | 7,557,336,448,874,468,000 | -4,612,894,731,306,633,000 | 32.701149 | 113 | 0.540246 | false |
IptvBrasilGroup/Cleitonleonelcreton.repository | plugin.video.armagedompirata/mechanize/_firefox3cookiejar.py | 134 | 8345 | """Firefox 3 "cookies.sqlite" cookie persistence.
Copyright 2008 John J Lee <[email protected]>
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD or ZPL 2.1 licenses (see the file
COPYING.txt included with the distribution).
"""
import logging
import time
from _clientcookie import CookieJar, Cookie, MappingIterator
from _util import isstringlike, experimental
debug = logging.getLogger("mechanize.cookies").debug
class Firefox3CookieJar(CookieJar):
"""Firefox 3 cookie jar.
The cookies are stored in Firefox 3's "cookies.sqlite" format.
Constructor arguments:
filename: filename of cookies.sqlite (typically found at the top level
of a firefox profile directory)
autoconnect: as a convenience, connect to the SQLite cookies database at
Firefox3CookieJar construction time (default True)
policy: an object satisfying the mechanize.CookiePolicy interface
Note that this is NOT a FileCookieJar, and there are no .load(),
.save() or .restore() methods. The database is in sync with the
cookiejar object's state after each public method call.
Following Firefox's own behaviour, session cookies are never saved to
the database.
The file is created, and an sqlite database written to it, if it does
not already exist. The moz_cookies database table is created if it does
not already exist.
"""
# XXX
# handle DatabaseError exceptions
# add a FileCookieJar (explicit .save() / .revert() / .load() methods)
def __init__(self, filename, autoconnect=True, policy=None):
experimental("Firefox3CookieJar is experimental code")
CookieJar.__init__(self, policy)
if filename is not None and not isstringlike(filename):
raise ValueError("filename must be string-like")
self.filename = filename
self._conn = None
if autoconnect:
self.connect()
def connect(self):
import sqlite3 # not available in Python 2.4 stdlib
self._conn = sqlite3.connect(self.filename)
self._conn.isolation_level = "DEFERRED"
self._create_table_if_necessary()
def close(self):
self._conn.close()
def _transaction(self, func):
try:
cur = self._conn.cursor()
try:
result = func(cur)
finally:
cur.close()
except:
self._conn.rollback()
raise
else:
self._conn.commit()
return result
def _execute(self, query, params=()):
return self._transaction(lambda cur: cur.execute(query, params))
def _query(self, query, params=()):
# XXX should we bother with a transaction?
cur = self._conn.cursor()
try:
cur.execute(query, params)
return cur.fetchall()
finally:
cur.close()
def _create_table_if_necessary(self):
self._execute("""\
CREATE TABLE IF NOT EXISTS moz_cookies (id INTEGER PRIMARY KEY, name TEXT,
value TEXT, host TEXT, path TEXT,expiry INTEGER,
lastAccessed INTEGER, isSecure INTEGER, isHttpOnly INTEGER)""")
def _cookie_from_row(self, row):
(pk, name, value, domain, path, expires,
last_accessed, secure, http_only) = row
version = 0
domain = domain.encode("ascii", "ignore")
path = path.encode("ascii", "ignore")
name = name.encode("ascii", "ignore")
value = value.encode("ascii", "ignore")
secure = bool(secure)
# last_accessed isn't a cookie attribute, so isn't added to rest
rest = {}
if http_only:
rest["HttpOnly"] = None
if name == "":
name = value
value = None
initial_dot = domain.startswith(".")
domain_specified = initial_dot
discard = False
if expires == "":
expires = None
discard = True
return Cookie(version, name, value,
None, False,
domain, domain_specified, initial_dot,
path, False,
secure,
expires,
discard,
None,
None,
rest)
def clear(self, domain=None, path=None, name=None):
CookieJar.clear(self, domain, path, name)
where_parts = []
sql_params = []
if domain is not None:
where_parts.append("host = ?")
sql_params.append(domain)
if path is not None:
where_parts.append("path = ?")
sql_params.append(path)
if name is not None:
where_parts.append("name = ?")
sql_params.append(name)
where = " AND ".join(where_parts)
if where:
where = " WHERE " + where
def clear(cur):
cur.execute("DELETE FROM moz_cookies%s" % where,
tuple(sql_params))
self._transaction(clear)
def _row_from_cookie(self, cookie, cur):
expires = cookie.expires
if cookie.discard:
expires = ""
domain = unicode(cookie.domain)
path = unicode(cookie.path)
name = unicode(cookie.name)
value = unicode(cookie.value)
secure = bool(int(cookie.secure))
if value is None:
value = name
name = ""
last_accessed = int(time.time())
http_only = cookie.has_nonstandard_attr("HttpOnly")
query = cur.execute("""SELECT MAX(id) + 1 from moz_cookies""")
pk = query.fetchone()[0]
if pk is None:
pk = 1
return (pk, name, value, domain, path, expires,
last_accessed, secure, http_only)
def set_cookie(self, cookie):
if cookie.discard:
CookieJar.set_cookie(self, cookie)
return
def set_cookie(cur):
# XXX
# is this RFC 2965-correct?
# could this do an UPDATE instead?
row = self._row_from_cookie(cookie, cur)
name, unused, domain, path = row[1:5]
cur.execute("""\
DELETE FROM moz_cookies WHERE host = ? AND path = ? AND name = ?""",
(domain, path, name))
cur.execute("""\
INSERT INTO moz_cookies VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""", row)
self._transaction(set_cookie)
def __iter__(self):
# session (non-persistent) cookies
for cookie in MappingIterator(self._cookies):
yield cookie
# persistent cookies
for row in self._query("""\
SELECT * FROM moz_cookies ORDER BY name, path, host"""):
yield self._cookie_from_row(row)
def _cookies_for_request(self, request):
session_cookies = CookieJar._cookies_for_request(self, request)
def get_cookies(cur):
query = cur.execute("SELECT host from moz_cookies")
domains = [row[0] for row in query.fetchall()]
cookies = []
for domain in domains:
cookies += self._persistent_cookies_for_domain(domain,
request, cur)
return cookies
persistent_coookies = self._transaction(get_cookies)
return session_cookies + persistent_coookies
def _persistent_cookies_for_domain(self, domain, request, cur):
cookies = []
if not self._policy.domain_return_ok(domain, request):
return []
debug("Checking %s for cookies to return", domain)
query = cur.execute("""\
SELECT * from moz_cookies WHERE host = ? ORDER BY path""",
(domain,))
cookies = [self._cookie_from_row(row) for row in query.fetchall()]
last_path = None
r = []
for cookie in cookies:
if (cookie.path != last_path and
not self._policy.path_return_ok(cookie.path, request)):
last_path = cookie.path
continue
if not self._policy.return_ok(cookie, request):
debug(" not returning cookie")
continue
debug(" it's a match")
r.append(cookie)
return r
| gpl-2.0 | 5,886,294,877,068,132,000 | 2,958,039,414,237,476,000 | 32.649194 | 76 | 0.563811 | false |
JonnyWong16/plexpy | lib/apscheduler/triggers/cron/expressions.py | 3 | 9184 | """This module contains the expressions applicable for CronTrigger's fields."""
from calendar import monthrange
import re
from apscheduler.util import asint
__all__ = ('AllExpression', 'RangeExpression', 'WeekdayRangeExpression',
'WeekdayPositionExpression', 'LastDayOfMonthExpression')
WEEKDAYS = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat']
MONTHS = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
class AllExpression(object):
value_re = re.compile(r'\*(?:/(?P<step>\d+))?$')
def __init__(self, step=None):
self.step = asint(step)
if self.step == 0:
raise ValueError('Increment must be higher than 0')
def validate_range(self, field_name):
from apscheduler.triggers.cron.fields import MIN_VALUES, MAX_VALUES
value_range = MAX_VALUES[field_name] - MIN_VALUES[field_name]
if self.step and self.step > value_range:
raise ValueError('the step value ({}) is higher than the total range of the '
'expression ({})'.format(self.step, value_range))
def get_next_value(self, date, field):
start = field.get_value(date)
minval = field.get_min(date)
maxval = field.get_max(date)
start = max(start, minval)
if not self.step:
next = start
else:
distance_to_next = (self.step - (start - minval)) % self.step
next = start + distance_to_next
if next <= maxval:
return next
def __eq__(self, other):
return isinstance(other, self.__class__) and self.step == other.step
def __str__(self):
if self.step:
return '*/%d' % self.step
return '*'
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.step)
class RangeExpression(AllExpression):
value_re = re.compile(
r'(?P<first>\d+)(?:-(?P<last>\d+))?(?:/(?P<step>\d+))?$')
def __init__(self, first, last=None, step=None):
super(RangeExpression, self).__init__(step)
first = asint(first)
last = asint(last)
if last is None and step is None:
last = first
if last is not None and first > last:
raise ValueError('The minimum value in a range must not be higher than the maximum')
self.first = first
self.last = last
def validate_range(self, field_name):
from apscheduler.triggers.cron.fields import MIN_VALUES, MAX_VALUES
super(RangeExpression, self).validate_range(field_name)
if self.first < MIN_VALUES[field_name]:
raise ValueError('the first value ({}) is lower than the minimum value ({})'
.format(self.first, MIN_VALUES[field_name]))
if self.last is not None and self.last > MAX_VALUES[field_name]:
raise ValueError('the last value ({}) is higher than the maximum value ({})'
.format(self.last, MAX_VALUES[field_name]))
value_range = (self.last or MAX_VALUES[field_name]) - self.first
if self.step and self.step > value_range:
raise ValueError('the step value ({}) is higher than the total range of the '
'expression ({})'.format(self.step, value_range))
def get_next_value(self, date, field):
startval = field.get_value(date)
minval = field.get_min(date)
maxval = field.get_max(date)
# Apply range limits
minval = max(minval, self.first)
maxval = min(maxval, self.last) if self.last is not None else maxval
nextval = max(minval, startval)
# Apply the step if defined
if self.step:
distance_to_next = (self.step - (nextval - minval)) % self.step
nextval += distance_to_next
return nextval if nextval <= maxval else None
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.first == other.first and
self.last == other.last)
def __str__(self):
if self.last != self.first and self.last is not None:
range = '%d-%d' % (self.first, self.last)
else:
range = str(self.first)
if self.step:
return '%s/%d' % (range, self.step)
return range
def __repr__(self):
args = [str(self.first)]
if self.last != self.first and self.last is not None or self.step:
args.append(str(self.last))
if self.step:
args.append(str(self.step))
return "%s(%s)" % (self.__class__.__name__, ', '.join(args))
class MonthRangeExpression(RangeExpression):
value_re = re.compile(r'(?P<first>[a-z]+)(?:-(?P<last>[a-z]+))?', re.IGNORECASE)
def __init__(self, first, last=None):
try:
first_num = MONTHS.index(first.lower()) + 1
except ValueError:
raise ValueError('Invalid month name "%s"' % first)
if last:
try:
last_num = MONTHS.index(last.lower()) + 1
except ValueError:
raise ValueError('Invalid month name "%s"' % last)
else:
last_num = None
super(MonthRangeExpression, self).__init__(first_num, last_num)
def __str__(self):
if self.last != self.first and self.last is not None:
return '%s-%s' % (MONTHS[self.first - 1], MONTHS[self.last - 1])
return MONTHS[self.first - 1]
def __repr__(self):
args = ["'%s'" % MONTHS[self.first]]
if self.last != self.first and self.last is not None:
args.append("'%s'" % MONTHS[self.last - 1])
return "%s(%s)" % (self.__class__.__name__, ', '.join(args))
class WeekdayRangeExpression(RangeExpression):
value_re = re.compile(r'(?P<first>[a-z]+)(?:-(?P<last>[a-z]+))?', re.IGNORECASE)
def __init__(self, first, last=None):
try:
first_num = WEEKDAYS.index(first.lower())
except ValueError:
raise ValueError('Invalid weekday name "%s"' % first)
if last:
try:
last_num = WEEKDAYS.index(last.lower())
except ValueError:
raise ValueError('Invalid weekday name "%s"' % last)
else:
last_num = None
super(WeekdayRangeExpression, self).__init__(first_num, last_num)
def __str__(self):
if self.last != self.first and self.last is not None:
return '%s-%s' % (WEEKDAYS[self.first], WEEKDAYS[self.last])
return WEEKDAYS[self.first]
def __repr__(self):
args = ["'%s'" % WEEKDAYS[self.first]]
if self.last != self.first and self.last is not None:
args.append("'%s'" % WEEKDAYS[self.last])
return "%s(%s)" % (self.__class__.__name__, ', '.join(args))
class WeekdayPositionExpression(AllExpression):
options = ['1st', '2nd', '3rd', '4th', '5th', 'last']
value_re = re.compile(r'(?P<option_name>%s) +(?P<weekday_name>(?:\d+|\w+))' %
'|'.join(options), re.IGNORECASE)
def __init__(self, option_name, weekday_name):
super(WeekdayPositionExpression, self).__init__(None)
try:
self.option_num = self.options.index(option_name.lower())
except ValueError:
raise ValueError('Invalid weekday position "%s"' % option_name)
try:
self.weekday = WEEKDAYS.index(weekday_name.lower())
except ValueError:
raise ValueError('Invalid weekday name "%s"' % weekday_name)
def get_next_value(self, date, field):
# Figure out the weekday of the month's first day and the number of days in that month
first_day_wday, last_day = monthrange(date.year, date.month)
# Calculate which day of the month is the first of the target weekdays
first_hit_day = self.weekday - first_day_wday + 1
if first_hit_day <= 0:
first_hit_day += 7
# Calculate what day of the month the target weekday would be
if self.option_num < 5:
target_day = first_hit_day + self.option_num * 7
else:
target_day = first_hit_day + ((last_day - first_hit_day) // 7) * 7
if target_day <= last_day and target_day >= date.day:
return target_day
def __eq__(self, other):
return (super(WeekdayPositionExpression, self).__eq__(other) and
self.option_num == other.option_num and self.weekday == other.weekday)
def __str__(self):
return '%s %s' % (self.options[self.option_num], WEEKDAYS[self.weekday])
def __repr__(self):
return "%s('%s', '%s')" % (self.__class__.__name__, self.options[self.option_num],
WEEKDAYS[self.weekday])
class LastDayOfMonthExpression(AllExpression):
value_re = re.compile(r'last', re.IGNORECASE)
def __init__(self):
super(LastDayOfMonthExpression, self).__init__(None)
def get_next_value(self, date, field):
return monthrange(date.year, date.month)[1]
def __str__(self):
return 'last'
def __repr__(self):
return "%s()" % self.__class__.__name__
| gpl-3.0 | -7,488,871,801,446,758,000 | -6,003,239,361,865,483,000 | 35.589641 | 96 | 0.565658 | false |
aforalee/RRally | tests/unit/plugins/openstack/context/keystone/test_roles.py | 13 | 5273 | # Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from rally import exceptions
from rally.plugins.openstack.context.keystone import roles
from tests.unit import fakes
from tests.unit import test
CTX = "rally.plugins.openstack.context.keystone.roles"
class RoleGeneratorTestCase(test.TestCase):
def create_default_roles_and_patch_add_remove_functions(self, fc):
fc.keystone().roles.add_user_role = mock.MagicMock()
fc.keystone().roles.remove_user_role = mock.MagicMock()
fc.keystone().roles.create("r1", "test_role1")
fc.keystone().roles.create("r2", "test_role2")
self.assertEqual(2, len(fc.keystone().roles.list()))
@property
def context(self):
return {
"config": {
"roles": [
"test_role1",
"test_role2"
]
},
"admin": {"endpoint": mock.MagicMock()},
"task": mock.MagicMock()
}
@mock.patch("%s.osclients" % CTX)
def test_add_role(self, mock_osclients):
fc = fakes.FakeClients()
mock_osclients.Clients.return_value = fc
self.create_default_roles_and_patch_add_remove_functions(fc)
ctx = roles.RoleGenerator(self.context)
ctx.context["users"] = [{"id": "u1", "tenant_id": "t1"},
{"id": "u2", "tenant_id": "t2"}]
result = ctx._add_role(mock.MagicMock(),
self.context["config"]["roles"][0])
expected = {"id": "r1", "name": "test_role1"}
self.assertEqual(expected, result)
@mock.patch("%s.osclients" % CTX)
def test_add_role_which_does_not_exist(self, mock_osclients):
fc = fakes.FakeClients()
mock_osclients.Clients.return_value = fc
self.create_default_roles_and_patch_add_remove_functions(fc)
ctx = roles.RoleGenerator(self.context)
ctx.context["users"] = [{"id": "u1", "tenant_id": "t1"},
{"id": "u2", "tenant_id": "t2"}]
ex = self.assertRaises(exceptions.NoSuchRole, ctx._add_role,
mock.MagicMock(), "unknown_role")
expected = "There is no role with name `unknown_role`."
self.assertEqual(expected, str(ex))
@mock.patch("%s.osclients" % CTX)
def test_remove_role(self, mock_osclients):
role = mock.MagicMock()
fc = fakes.FakeClients()
mock_osclients.Clients.return_value = fc
self.create_default_roles_and_patch_add_remove_functions(fc)
ctx = roles.RoleGenerator(self.context)
ctx.context["users"] = [{"id": "u1", "tenant_id": "t1"},
{"id": "u2", "tenant_id": "t2"}]
ctx._remove_role(mock.MagicMock(), role)
calls = [
mock.call("u1", role["id"], tenant="t1"),
mock.call("u2", role["id"], tenant="t2"),
]
mock_keystone = mock_osclients.Clients().keystone()
mock_keystone.roles.remove_user_role.assert_has_calls(calls)
@mock.patch("%s.osclients" % CTX)
def test_setup_and_cleanup(self, mock_osclients):
fc = fakes.FakeClients()
mock_osclients.Clients.return_value = fc
self.create_default_roles_and_patch_add_remove_functions(fc)
with roles.RoleGenerator(self.context) as ctx:
ctx.context["users"] = [{"id": "u1", "tenant_id": "t1"},
{"id": "u2", "tenant_id": "t2"}]
ctx.setup()
calls = [
mock.call("u1", "r1", tenant="t1"),
mock.call("u2", "r1", tenant="t2"),
mock.call("u1", "r2", tenant="t1"),
mock.call("u2", "r2", tenant="t2")
]
fc.keystone().roles.add_user_role.assert_has_calls(calls)
self.assertEqual(
4, fc.keystone().roles.add_user_role.call_count)
self.assertEqual(
0, fc.keystone().roles.remove_user_role.call_count)
self.assertEqual(2, len(ctx.context["roles"]))
self.assertEqual(2, len(fc.keystone().roles.list()))
# Cleanup (called by content manager)
self.assertEqual(2, len(fc.keystone().roles.list()))
self.assertEqual(4, fc.keystone().roles.add_user_role.call_count)
self.assertEqual(4, fc.keystone().roles.remove_user_role.call_count)
calls = [
mock.call("u1", "r1", tenant="t1"),
mock.call("u2", "r1", tenant="t2"),
mock.call("u1", "r2", tenant="t1"),
mock.call("u2", "r2", tenant="t2")
]
fc.keystone().roles.remove_user_role.assert_has_calls(calls)
| apache-2.0 | -6,980,644,476,911,731,000 | 5,154,040,377,536,328,000 | 39.251908 | 78 | 0.571591 | false |
Subsets and Splits