repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
scieloorg/wayta | setup.py | 1 | 1219 | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'elasticsearch>=1.1.1',
'pyramid>=1.5.2',
'pyramid_chameleon>=0.3',
'pyramid_debugtoolbar>=2.1'
]
setup(name='wayta',
version='1.3.1',
description='A tool to suggest the name of an institution or country in the original form and language.',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='SciELO',
author_email='[email protected]',
url='http://docs.scielo.org',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
entry_points="""\
[paste.app_factory]
main = wayta:main
[console_scripts]
wayta_loaddata=processing.loaddata:main
""",
)
| bsd-2-clause | -1,527,593,102,391,974,000 | 28.02381 | 111 | 0.607055 | false |
zorroz/microblog | flask/lib/python2.7/site-packages/setuptools/sandbox.py | 221 | 9994 | import os
import sys
import tempfile
import operator
import functools
import itertools
import re
import pkg_resources
if os.name == "java":
import org.python.modules.posix.PosixModule as _os
else:
_os = sys.modules[os.name]
try:
_file = file
except NameError:
_file = None
_open = open
from distutils.errors import DistutilsError
from pkg_resources import working_set
from setuptools.compat import builtins, execfile
__all__ = [
"AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
]
def run_setup(setup_script, args):
"""Run a distutils setup script, sandboxed in its directory"""
old_dir = os.getcwd()
save_argv = sys.argv[:]
save_path = sys.path[:]
setup_dir = os.path.abspath(os.path.dirname(setup_script))
temp_dir = os.path.join(setup_dir,'temp')
if not os.path.isdir(temp_dir): os.makedirs(temp_dir)
save_tmp = tempfile.tempdir
save_modules = sys.modules.copy()
pr_state = pkg_resources.__getstate__()
try:
tempfile.tempdir = temp_dir
os.chdir(setup_dir)
try:
sys.argv[:] = [setup_script]+list(args)
sys.path.insert(0, setup_dir)
# reset to include setup dir, w/clean callback list
working_set.__init__()
working_set.callbacks.append(lambda dist:dist.activate())
DirectorySandbox(setup_dir).run(
lambda: execfile(
"setup.py",
{'__file__':setup_script, '__name__':'__main__'}
)
)
except SystemExit:
v = sys.exc_info()[1]
if v.args and v.args[0]:
raise
# Normal exit, just return
finally:
pkg_resources.__setstate__(pr_state)
sys.modules.update(save_modules)
# remove any modules imported within the sandbox
del_modules = [
mod_name for mod_name in sys.modules
if mod_name not in save_modules
# exclude any encodings modules. See #285
and not mod_name.startswith('encodings.')
]
list(map(sys.modules.__delitem__, del_modules))
os.chdir(old_dir)
sys.path[:] = save_path
sys.argv[:] = save_argv
tempfile.tempdir = save_tmp
class AbstractSandbox:
"""Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
_active = False
def __init__(self):
self._attrs = [
name for name in dir(_os)
if not name.startswith('_') and hasattr(self,name)
]
def _copy(self, source):
for name in self._attrs:
setattr(os, name, getattr(source,name))
def run(self, func):
"""Run 'func' under os sandboxing"""
try:
self._copy(self)
if _file:
builtins.file = self._file
builtins.open = self._open
self._active = True
return func()
finally:
self._active = False
if _file:
builtins.file = _file
builtins.open = _open
self._copy(_os)
def _mk_dual_path_wrapper(name):
original = getattr(_os,name)
def wrap(self,src,dst,*args,**kw):
if self._active:
src,dst = self._remap_pair(name,src,dst,*args,**kw)
return original(src,dst,*args,**kw)
return wrap
for name in ["rename", "link", "symlink"]:
if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name)
def _mk_single_path_wrapper(name, original=None):
original = original or getattr(_os,name)
def wrap(self,path,*args,**kw):
if self._active:
path = self._remap_input(name,path,*args,**kw)
return original(path,*args,**kw)
return wrap
if _file:
_file = _mk_single_path_wrapper('file', _file)
_open = _mk_single_path_wrapper('open', _open)
for name in [
"stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
"remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
"startfile", "mkfifo", "mknod", "pathconf", "access"
]:
if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name)
def _mk_single_with_return(name):
original = getattr(_os,name)
def wrap(self,path,*args,**kw):
if self._active:
path = self._remap_input(name,path,*args,**kw)
return self._remap_output(name, original(path,*args,**kw))
return original(path,*args,**kw)
return wrap
for name in ['readlink', 'tempnam']:
if hasattr(_os,name): locals()[name] = _mk_single_with_return(name)
def _mk_query(name):
original = getattr(_os,name)
def wrap(self,*args,**kw):
retval = original(*args,**kw)
if self._active:
return self._remap_output(name, retval)
return retval
return wrap
for name in ['getcwd', 'tmpnam']:
if hasattr(_os,name): locals()[name] = _mk_query(name)
def _validate_path(self,path):
"""Called to remap or validate any path, whether input or output"""
return path
def _remap_input(self,operation,path,*args,**kw):
"""Called for path inputs"""
return self._validate_path(path)
def _remap_output(self,operation,path):
"""Called for path outputs"""
return self._validate_path(path)
def _remap_pair(self,operation,src,dst,*args,**kw):
"""Called for path pairs like rename, link, and symlink operations"""
return (
self._remap_input(operation+'-from',src,*args,**kw),
self._remap_input(operation+'-to',dst,*args,**kw)
)
if hasattr(os, 'devnull'):
_EXCEPTIONS = [os.devnull,]
else:
_EXCEPTIONS = []
try:
from win32com.client.gencache import GetGeneratePath
_EXCEPTIONS.append(GetGeneratePath())
del GetGeneratePath
except ImportError:
# it appears pywin32 is not installed, so no need to exclude.
pass
class DirectorySandbox(AbstractSandbox):
"""Restrict operations to a single subdirectory - pseudo-chroot"""
write_ops = dict.fromkeys([
"open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
"utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
])
_exception_patterns = [
# Allow lib2to3 to attempt to save a pickled grammar object (#121)
'.*lib2to3.*\.pickle$',
]
"exempt writing to paths that match the pattern"
def __init__(self, sandbox, exceptions=_EXCEPTIONS):
self._sandbox = os.path.normcase(os.path.realpath(sandbox))
self._prefix = os.path.join(self._sandbox,'')
self._exceptions = [
os.path.normcase(os.path.realpath(path))
for path in exceptions
]
AbstractSandbox.__init__(self)
def _violation(self, operation, *args, **kw):
raise SandboxViolation(operation, args, kw)
if _file:
def _file(self, path, mode='r', *args, **kw):
if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
self._violation("file", path, mode, *args, **kw)
return _file(path,mode,*args,**kw)
def _open(self, path, mode='r', *args, **kw):
if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
self._violation("open", path, mode, *args, **kw)
return _open(path,mode,*args,**kw)
def tmpnam(self):
self._violation("tmpnam")
def _ok(self, path):
active = self._active
try:
self._active = False
realpath = os.path.normcase(os.path.realpath(path))
return (
self._exempted(realpath)
or realpath == self._sandbox
or realpath.startswith(self._prefix)
)
finally:
self._active = active
def _exempted(self, filepath):
start_matches = (
filepath.startswith(exception)
for exception in self._exceptions
)
pattern_matches = (
re.match(pattern, filepath)
for pattern in self._exception_patterns
)
candidates = itertools.chain(start_matches, pattern_matches)
return any(candidates)
def _remap_input(self, operation, path, *args, **kw):
"""Called for path inputs"""
if operation in self.write_ops and not self._ok(path):
self._violation(operation, os.path.realpath(path), *args, **kw)
return path
def _remap_pair(self, operation, src, dst, *args, **kw):
"""Called for path pairs like rename, link, and symlink operations"""
if not self._ok(src) or not self._ok(dst):
self._violation(operation, src, dst, *args, **kw)
return (src,dst)
def open(self, file, flags, mode=0x1FF, *args, **kw): # 0777
"""Called for low-level os.open()"""
if flags & WRITE_FLAGS and not self._ok(file):
self._violation("os.open", file, flags, mode, *args, **kw)
return _os.open(file,flags,mode, *args, **kw)
WRITE_FLAGS = functools.reduce(
operator.or_, [getattr(_os, a, 0) for a in
"O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
)
class SandboxViolation(DistutilsError):
"""A setup script attempted to modify the filesystem outside the sandbox"""
def __str__(self):
return """SandboxViolation: %s%r %s
The package setup script has attempted to modify files on your system
that are not within the EasyInstall build area, and has been aborted.
This package cannot be safely installed by EasyInstall, and may not
support alternate installation locations even if you run its setup
script by hand. Please inform the package's author and the EasyInstall
maintainers to find out if a fix or workaround is available.""" % self.args
#
| bsd-3-clause | -4,998,048,370,257,236,000 | 30.037267 | 79 | 0.577747 | false |
uglyboxer/linear_neuron | net-p3/lib/python3.5/site-packages/setuptools/compat.py | 456 | 2094 | import sys
import itertools
PY3 = sys.version_info >= (3,)
PY2 = not PY3
if PY2:
basestring = basestring
import __builtin__ as builtins
import ConfigParser
from StringIO import StringIO
BytesIO = StringIO
func_code = lambda o: o.func_code
func_globals = lambda o: o.func_globals
im_func = lambda o: o.im_func
from htmlentitydefs import name2codepoint
import httplib
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
from BaseHTTPServer import BaseHTTPRequestHandler
iteritems = lambda o: o.iteritems()
long_type = long
maxsize = sys.maxint
unichr = unichr
unicode = unicode
bytes = str
from urllib import url2pathname, splittag, pathname2url
import urllib2
from urllib2 import urlopen, HTTPError, URLError, unquote, splituser
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
filterfalse = itertools.ifilterfalse
exec("""def reraise(tp, value, tb=None):
raise tp, value, tb""")
if PY3:
basestring = str
import builtins
import configparser as ConfigParser
from io import StringIO, BytesIO
func_code = lambda o: o.__code__
func_globals = lambda o: o.__globals__
im_func = lambda o: o.__func__
from html.entities import name2codepoint
import http.client as httplib
from http.server import HTTPServer, SimpleHTTPRequestHandler
from http.server import BaseHTTPRequestHandler
iteritems = lambda o: o.items()
long_type = int
maxsize = sys.maxsize
unichr = chr
unicode = str
bytes = bytes
from urllib.error import HTTPError, URLError
import urllib.request as urllib2
from urllib.request import urlopen, url2pathname, pathname2url
from urllib.parse import (
urlparse, urlunparse, unquote, splituser, urljoin, urlsplit,
urlunsplit, splittag,
)
filterfalse = itertools.filterfalse
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
| mit | -5,642,556,743,955,036,000 | 30.727273 | 76 | 0.69914 | false |
guewen/OpenUpgrade | addons/account_payment/wizard/__init__.py | 436 | 1144 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_payment_order
import account_payment_populate_statement
import account_payment_pay
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -6,422,384,616,153,183,000 | 43 | 78 | 0.632867 | false |
zhuwenping/python-for-android | python-build/python-libs/gdata/build/lib/gdata/health/service.py | 263 | 10007 | #!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""HealthService extends GDataService to streamline Google Health API access.
HealthService: Provides methods to interact with the profile, profile list,
and register/notices feeds. Extends GDataService.
HealthProfileQuery: Queries the Google Health Profile feed.
HealthProfileListQuery: Queries the Google Health Profile list feed.
"""
__author__ = '[email protected] (Eric Bidelman)'
import atom
import gdata.health
import gdata.service
class HealthService(gdata.service.GDataService):
"""Client extension for the Google Health service Document List feed."""
def __init__(self, email=None, password=None, source=None,
use_h9_sandbox=False, server='www.google.com',
additional_headers=None, **kwargs):
"""Creates a client for the Google Health service.
Args:
email: string (optional) The user's email address, used for
authentication.
password: string (optional) The user's password.
source: string (optional) The name of the user's application.
use_h9_sandbox: boolean (optional) True to issue requests against the
/h9 developer's sandbox.
server: string (optional) The name of the server to which a connection
will be opened.
additional_headers: dictionary (optional) Any additional headers which
should be included with CRUD operations.
kwargs: The other parameters to pass to gdata.service.GDataService
constructor.
"""
service = use_h9_sandbox and 'weaver' or 'health'
gdata.service.GDataService.__init__(
self, email=email, password=password, service=service, source=source,
server=server, additional_headers=additional_headers, **kwargs)
self.ssl = True
self.use_h9_sandbox = use_h9_sandbox
def __get_service(self):
return self.use_h9_sandbox and 'h9' or 'health'
def GetProfileFeed(self, query=None, profile_id=None):
"""Fetches the users Google Health profile feed.
Args:
query: HealthProfileQuery or string (optional) A query to use on the
profile feed. If None, a HealthProfileQuery is constructed.
profile_id: string (optional) The profile id to query the profile feed
with when using ClientLogin. Note: this parameter is ignored if
query is set.
Returns:
A gdata.health.ProfileFeed object containing the user's Health profile.
"""
if query is None:
projection = profile_id and 'ui' or 'default'
uri = HealthProfileQuery(
service=self.__get_service(), projection=projection,
profile_id=profile_id).ToUri()
elif isinstance(query, HealthProfileQuery):
uri = query.ToUri()
else:
uri = query
return self.GetFeed(uri, converter=gdata.health.ProfileFeedFromString)
def GetProfileListFeed(self, query=None):
"""Fetches the users Google Health profile feed.
Args:
query: HealthProfileListQuery or string (optional) A query to use
on the profile list feed. If None, a HealthProfileListQuery is
constructed to /health/feeds/profile/list or /h9/feeds/profile/list.
Returns:
A gdata.health.ProfileListFeed object containing the user's list
of profiles.
"""
if not query:
uri = HealthProfileListQuery(service=self.__get_service()).ToUri()
elif isinstance(query, HealthProfileListQuery):
uri = query.ToUri()
else:
uri = query
return self.GetFeed(uri, converter=gdata.health.ProfileListFeedFromString)
def SendNotice(self, subject, body=None, content_type='html',
ccr=None, profile_id=None):
"""Sends (posts) a notice to the user's Google Health profile.
Args:
subject: A string representing the message's subject line.
body: string (optional) The message body.
content_type: string (optional) The content type of the notice message
body. This parameter is only honored when a message body is
specified.
ccr: string (optional) The CCR XML document to reconcile into the
user's profile.
profile_id: string (optional) The profile id to work with when using
ClientLogin. Note: this parameter is ignored if query is set.
Returns:
A gdata.health.ProfileEntry object of the posted entry.
"""
if body:
content = atom.Content(content_type=content_type, text=body)
else:
content = body
entry = gdata.GDataEntry(
title=atom.Title(text=subject), content=content,
extension_elements=[atom.ExtensionElementFromString(ccr)])
projection = profile_id and 'ui' or 'default'
query = HealthRegisterQuery(service=self.__get_service(),
projection=projection, profile_id=profile_id)
return self.Post(entry, query.ToUri(),
converter=gdata.health.ProfileEntryFromString)
class HealthProfileQuery(gdata.service.Query):
"""Object used to construct a URI to query the Google Health profile feed."""
def __init__(self, service='health', feed='feeds/profile',
projection='default', profile_id=None, text_query=None,
params=None, categories=None):
"""Constructor for Health profile feed query.
Args:
service: string (optional) The service to query. Either 'health' or 'h9'.
feed: string (optional) The path for the feed. The default value is
'feeds/profile'.
projection: string (optional) The visibility of the data. Possible values
are 'default' for AuthSub and 'ui' for ClientLogin. If this value
is set to 'ui', the profile_id parameter should also be set.
profile_id: string (optional) The profile id to query. This should only
be used when using ClientLogin.
text_query: str (optional) The contents of the q query parameter. The
contents of the text_query are URL escaped upon conversion to a URI.
Note: this parameter can only be used on the register feed using
ClientLogin.
params: dict (optional) Parameter value string pairs which become URL
params when translated to a URI. These parameters are added to
the query's items.
categories: list (optional) List of category strings which should be
included as query categories. See gdata.service.Query for
additional documentation.
"""
self.service = service
self.profile_id = profile_id
self.projection = projection
gdata.service.Query.__init__(self, feed=feed, text_query=text_query,
params=params, categories=categories)
def ToUri(self):
"""Generates a URI from the query parameters set in the object.
Returns:
A string containing the URI used to retrieve entries from the Health
profile feed.
"""
old_feed = self.feed
self.feed = '/'.join([self.service, old_feed, self.projection])
if self.profile_id:
self.feed += '/' + self.profile_id
self.feed = '/%s' % (self.feed,)
new_feed = gdata.service.Query.ToUri(self)
self.feed = old_feed
return new_feed
class HealthProfileListQuery(gdata.service.Query):
"""Object used to construct a URI to query a Health profile list feed."""
def __init__(self, service='health', feed='feeds/profile/list'):
"""Constructor for Health profile list feed query.
Args:
service: string (optional) The service to query. Either 'health' or 'h9'.
feed: string (optional) The path for the feed. The default value is
'feeds/profile/list'.
"""
gdata.service.Query.__init__(self, feed)
self.service = service
def ToUri(self):
"""Generates a URI from the query parameters set in the object.
Returns:
A string containing the URI used to retrieve entries from the
profile list feed.
"""
return '/%s' % ('/'.join([self.service, self.feed]),)
class HealthRegisterQuery(gdata.service.Query):
"""Object used to construct a URI to query a Health register/notice feed."""
def __init__(self, service='health', feed='feeds/register',
projection='default', profile_id=None):
"""Constructor for Health profile list feed query.
Args:
service: string (optional) The service to query. Either 'health' or 'h9'.
feed: string (optional) The path for the feed. The default value is
'feeds/register'.
projection: string (optional) The visibility of the data. Possible values
are 'default' for AuthSub and 'ui' for ClientLogin. If this value
is set to 'ui', the profile_id parameter should also be set.
profile_id: string (optional) The profile id to query. This should only
be used when using ClientLogin.
"""
gdata.service.Query.__init__(self, feed)
self.service = service
self.projection = projection
self.profile_id = profile_id
def ToUri(self):
"""Generates a URI from the query parameters set in the object.
Returns:
A string containing the URI needed to interact with the register feed.
"""
old_feed = self.feed
self.feed = '/'.join([self.service, old_feed, self.projection])
new_feed = gdata.service.Query.ToUri(self)
self.feed = old_feed
if self.profile_id:
new_feed += '/' + self.profile_id
return '/%s' % (new_feed,)
| apache-2.0 | -5,420,982,813,106,255,000 | 37.04943 | 79 | 0.674328 | false |
pp-mo/iris | lib/iris/quickplot.py | 2 | 9074 | # Copyright Iris contributors
#
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""
High-level plotting extensions to :mod:`iris.plot`.
These routines work much like their :mod:`iris.plot` counterparts, but they
automatically add a plot title, axis titles, and a colour bar when appropriate.
See also: :ref:`matplotlib <matplotlib:users-guide-index>`.
"""
import cf_units
import matplotlib.pyplot as plt
import iris.config
import iris.coords
import iris.plot as iplt
def _use_symbol(units):
# For non-time units use the shortest unit representation.
# E.g. prefer 'K' over 'kelvin', but not '0.0174532925199433 rad'
# over 'degrees'
return (
not units.is_time()
and not units.is_time_reference()
and len(units.symbol) < len(str(units))
)
def _title(cube_or_coord, with_units):
if cube_or_coord is None or isinstance(cube_or_coord, int):
title = ""
else:
title = cube_or_coord.name().replace("_", " ").capitalize()
units = cube_or_coord.units
if with_units and not (
units.is_unknown()
or units.is_no_unit()
or units == cf_units.Unit("1")
):
if _use_symbol(units):
units = units.symbol
title += " / {}".format(units)
return title
def _label(cube, mode, result=None, ndims=2, coords=None, axes=None):
"""Puts labels on the current plot using the given cube."""
if axes is None:
axes = plt.gca()
axes.set_title(_title(cube, with_units=False))
if result is not None:
draw_edges = mode == iris.coords.POINT_MODE
bar = plt.colorbar(
result, orientation="horizontal", drawedges=draw_edges
)
has_known_units = not (
cube.units.is_unknown() or cube.units.is_no_unit()
)
if has_known_units and cube.units != cf_units.Unit("1"):
# Use shortest unit representation for anything other than time
if _use_symbol(cube.units):
bar.set_label(cube.units.symbol)
else:
bar.set_label(cube.units)
# Remove the tick which is put on the colorbar by default.
bar.ax.tick_params(length=0)
if coords is None:
plot_defn = iplt._get_plot_defn(cube, mode, ndims)
else:
plot_defn = iplt._get_plot_defn_custom_coords_picked(
cube, coords, mode, ndims=ndims
)
if ndims == 2:
if not iplt._can_draw_map(plot_defn.coords):
axes.set_ylabel(_title(plot_defn.coords[0], with_units=True))
axes.set_xlabel(_title(plot_defn.coords[1], with_units=True))
elif ndims == 1:
axes.set_xlabel(_title(plot_defn.coords[0], with_units=True))
axes.set_ylabel(_title(cube, with_units=True))
else:
msg = (
"Unexpected number of dimensions ({}) given to "
"_label.".format(ndims)
)
raise ValueError(msg)
def _label_with_bounds(cube, result=None, ndims=2, coords=None, axes=None):
_label(cube, iris.coords.BOUND_MODE, result, ndims, coords, axes)
def _label_with_points(cube, result=None, ndims=2, coords=None, axes=None):
_label(cube, iris.coords.POINT_MODE, result, ndims, coords, axes)
def _get_titles(u_object, v_object):
if u_object is None:
u_object = iplt._u_object_from_v_object(v_object)
xunits = u_object is not None and not u_object.units.is_time_reference()
yunits = not v_object.units.is_time_reference()
xlabel = _title(u_object, with_units=xunits)
ylabel = _title(v_object, with_units=yunits)
title = ""
if u_object is None:
title = _title(v_object, with_units=False)
elif isinstance(u_object, iris.cube.Cube) and not isinstance(
v_object, iris.cube.Cube
):
title = _title(u_object, with_units=False)
elif isinstance(v_object, iris.cube.Cube) and not isinstance(
u_object, iris.cube.Cube
):
title = _title(v_object, with_units=False)
return xlabel, ylabel, title
def _label_1d_plot(*args, **kwargs):
if len(args) > 1 and isinstance(
args[1], (iris.cube.Cube, iris.coords.Coord)
):
xlabel, ylabel, title = _get_titles(*args[:2])
else:
xlabel, ylabel, title = _get_titles(None, args[0])
axes = kwargs.pop("axes", None)
if len(kwargs) != 0:
msg = "Unexpected kwargs {} given to _label_1d_plot".format(
kwargs.keys()
)
raise ValueError(msg)
if axes is None:
axes = plt.gca()
axes.set_title(title)
axes.set_xlabel(xlabel)
axes.set_ylabel(ylabel)
def contour(cube, *args, **kwargs):
"""
Draws contour lines on a labelled plot based on the given Cube.
With the basic call signature, contour "level" values are chosen
automatically::
contour(cube)
Supply a number to use *N* automatically chosen levels::
contour(cube, N)
Supply a sequence *V* to use explicitly defined levels::
contour(cube, V)
See :func:`iris.plot.contour` for details of valid keyword arguments.
"""
coords = kwargs.get("coords")
axes = kwargs.get("axes")
result = iplt.contour(cube, *args, **kwargs)
_label_with_points(cube, coords=coords, axes=axes)
return result
def contourf(cube, *args, **kwargs):
"""
Draws filled contours on a labelled plot based on the given Cube.
With the basic call signature, contour "level" values are chosen
automatically::
contour(cube)
Supply a number to use *N* automatically chosen levels::
contour(cube, N)
Supply a sequence *V* to use explicitly defined levels::
contour(cube, V)
See :func:`iris.plot.contourf` for details of valid keyword arguments.
"""
coords = kwargs.get("coords")
axes = kwargs.get("axes")
result = iplt.contourf(cube, *args, **kwargs)
_label_with_points(cube, result, coords=coords, axes=axes)
return result
def outline(cube, coords=None, color="k", linewidth=None, axes=None):
"""
Draws cell outlines on a labelled plot based on the given Cube.
Kwargs:
* coords: list of :class:`~iris.coords.Coord` objects or coordinate names
Use the given coordinates as the axes for the plot. The order of the
given coordinates indicates which axis to use for each, where the first
element is the horizontal axis of the plot and the second element is
the vertical axis of the plot.
* color: None or mpl color
The color of the cell outlines. If None, the matplotlibrc setting
patch.edgecolor is used by default.
* linewidth: None or number
The width of the lines showing the cell outlines. If None, the default
width in patch.linewidth in matplotlibrc is used.
"""
result = iplt.outline(
cube, color=color, linewidth=linewidth, coords=coords, axes=axes
)
_label_with_bounds(cube, coords=coords, axes=axes)
return result
def pcolor(cube, *args, **kwargs):
"""
Draws a labelled pseudocolor plot based on the given Cube.
See :func:`iris.plot.pcolor` for details of valid keyword arguments.
"""
coords = kwargs.get("coords")
axes = kwargs.get("axes")
result = iplt.pcolor(cube, *args, **kwargs)
_label_with_bounds(cube, result, coords=coords, axes=axes)
return result
def pcolormesh(cube, *args, **kwargs):
"""
Draws a labelled pseudocolour plot based on the given Cube.
See :func:`iris.plot.pcolormesh` for details of valid keyword arguments.
"""
coords = kwargs.get("coords")
axes = kwargs.get("axes")
result = iplt.pcolormesh(cube, *args, **kwargs)
_label_with_bounds(cube, result, coords=coords, axes=axes)
return result
def points(cube, *args, **kwargs):
"""
Draws sample point positions on a labelled plot based on the given Cube.
See :func:`iris.plot.points` for details of valid keyword arguments.
"""
coords = kwargs.get("coords")
axes = kwargs.get("axes")
result = iplt.points(cube, *args, **kwargs)
_label_with_points(cube, coords=coords, axes=axes)
return result
def plot(*args, **kwargs):
"""
Draws a labelled line plot based on the given cube(s) or
coordinate(s).
See :func:`iris.plot.plot` for details of valid arguments and
keyword arguments.
"""
axes = kwargs.get("axes")
result = iplt.plot(*args, **kwargs)
_label_1d_plot(*args, axes=axes)
return result
def scatter(x, y, *args, **kwargs):
"""
Draws a labelled scatter plot based on the given cubes or
coordinates.
See :func:`iris.plot.scatter` for details of valid arguments and
keyword arguments.
"""
axes = kwargs.get("axes")
result = iplt.scatter(x, y, *args, **kwargs)
_label_1d_plot(x, y, axes=axes)
return result
# Provide a convenience show method from pyplot.
show = plt.show
| lgpl-3.0 | -4,965,416,833,127,718,000 | 28.270968 | 79 | 0.634009 | false |
suizokukan/urwid | urwid/tests/test_str_util.py | 20 | 1258 | import unittest
from urwid.compat import B
from urwid.escape import str_util
class DecodeOneTest(unittest.TestCase):
def gwt(self, ch, exp_ord, exp_pos):
ch = B(ch)
o, pos = str_util.decode_one(ch,0)
assert o==exp_ord, " got:%r expected:%r" % (o, exp_ord)
assert pos==exp_pos, " got:%r expected:%r" % (pos, exp_pos)
def test1byte(self):
self.gwt("ab", ord("a"), 1)
self.gwt("\xc0a", ord("?"), 1) # error
def test2byte(self):
self.gwt("\xc2", ord("?"), 1) # error
self.gwt("\xc0\x80", ord("?"), 1) # error
self.gwt("\xc2\x80", 0x80, 2)
self.gwt("\xdf\xbf", 0x7ff, 2)
def test3byte(self):
self.gwt("\xe0", ord("?"), 1) # error
self.gwt("\xe0\xa0", ord("?"), 1) # error
self.gwt("\xe0\x90\x80", ord("?"), 1) # error
self.gwt("\xe0\xa0\x80", 0x800, 3)
self.gwt("\xef\xbf\xbf", 0xffff, 3)
def test4byte(self):
self.gwt("\xf0", ord("?"), 1) # error
self.gwt("\xf0\x90", ord("?"), 1) # error
self.gwt("\xf0\x90\x80", ord("?"), 1) # error
self.gwt("\xf0\x80\x80\x80", ord("?"), 1) # error
self.gwt("\xf0\x90\x80\x80", 0x10000, 4)
self.gwt("\xf3\xbf\xbf\xbf", 0xfffff, 4)
| lgpl-2.1 | 6,452,453,846,512,600,000 | 33 | 67 | 0.520668 | false |
igabriel85/dmon-adp | misc/keras_test.py | 1 | 1530 | import numpy
import pandas as pd
from keras.models import Sequential
from keras.layers import Dense
from keras.wrappers.scikit_learn import KerasClassifier
from keras.utils import np_utils
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.preprocessing import LabelEncoder
from sklearn.pipeline import Pipeline
import os, sys
# fix random seed for reproducibility
seed = 7
numpy.random.seed(seed)
dataDir = os.path.join(os.path.dirname(os.path.abspath('')), 'data')
# load dataset
dataframe = pd.read_csv(os.path.join(dataDir, 'iris.csv'))
dataset = dataframe.values
X = dataset[:,0:4].astype(float)
Y = dataset[:,4]
# print Y
# encode class values as integers
encoder = LabelEncoder()
encoder.fit(Y)
encoded_Y = encoder.transform(Y)
# convert integers to dummy variables (i.e. one hot encoded)
dummy_y = np_utils.to_categorical(encoded_Y)
# print dummy_y
# define baseline model
def baseline_model():
# create model
model = Sequential()
model.add(Dense(8, input_dim=4, activation='relu'))
model.add(Dense(3, activation='softmax'))
# Compile model
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
estimator = KerasClassifier(build_fn=baseline_model, epochs=200, batch_size=20, verbose=1)
kfold = KFold(n_splits=10, shuffle=True, random_state=seed)
results = cross_val_score(estimator, X, dummy_y, cv=kfold)
print("Baseline: %.2f%% (%.2f%%)" % (results.mean()*100, results.std()*100)) | apache-2.0 | -2,858,827,367,040,694,300 | 28.442308 | 90 | 0.74183 | false |
dreadworks/college-cg | t2-objv/parser.py | 1 | 10996 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import numpy as np
from util import LOG
log = LOG.out.info
"""
Parser Library.
Every class gets instantiated with the
name of the file that should be parsed.
Every instance of a parser is bound to
a file and thus maintains the files data.
The classes should handle their parsing
internally without the need to invoke an
extra method that handles that actual parsing.
Even though this makes error handling of
the parsing more complicated (from the users
perspective), it enables the possibility
to work on very large data without the need
to store everything in memory at once.
"""
class ParserException(Exception):
def __str__(self):
return self.msg
def __init__(self, msg):
self.msg = msg
#
#
#
#
class ObjParser(object):
"""
Parses wavefront obj files.
Currently supported:
v, vn, f, s, o
Ignored:
usemtl
"""
class Obj(object):
"""
Parser for obj entity definitions. This
is the whole obj file with zero or one
"o" directives or the part of the obj
file denoted by "o <name>".
"""
def _parse(self, line):
"""
Takes a line and saves the data
found into the internal data structures.
:param line: One line of the obj file
:returns: None
:rtype: None
"""
dtype, data = re.split(r' ', line, maxsplit=1)
data = data.strip()
#
# VERTICES
#
if dtype == 'v':
data = map(float, data.split())
self._vertices.append(np.array(data, 'f'))
return
#
# NORMALS
#
if dtype == 'vn':
data = map(float, data.split())
self._normals.append(np.array(data, 'f'))
return
#
# FACES
#
# save indices of vertices and normals
# in self._vertices & self._normals
# and map the vertex to a set of points
# in self._v2vn
if dtype == 'f':
data = map(lambda s: s.split('/'), data.split())
vertices = map(lambda l: int(l[0]), data)
normals = []
normal = None # cache
for i, pair in enumerate(data):
# 'f v/vt/vn' case
if len(pair) == 3:
index = int(pair[2])
#
# calculate new surface normal
#
else:
if normal is not None:
index = len(self._normals) - 1
else:
self.stats['calculated normals'] += 1
# retrieve point coordinates
v = map(lambda j: self._vertices[j], vertices)
# create normalized vectors spanning a plane
vectors = (v[0] - v[1]), (v[0] - v[2])
# the surface normal is the cross product
# of the two vectors spanning the plane
normal = np.cross(*vectors)
normal = normal / np.linalg.norm(normal)
index = len(self._normals)
self._normals.append(normal)
# save normals index
normals.append(index)
vnstore = self._v2vn.setdefault(vertices[i], [])
vnstore.append(index)
self._faces += zip(vertices, normals)
return
#
# SMOOTHING GROUPS
#
if dtype == 's':
g = self._smoothing[-1]
facecount = len(self._faces)
if data == 'off':
if len(g) == 1:
self._smoothing[-1] += (facecount,)
return
if data == 'on':
if len(g) == 2:
self._smoothing.append((facecount,))
return
#
# IGNORE
#
if dtype == 'usemtl':
fmt = dtype, data
log('ignoring directive %s with data %s' % fmt)
return
#
# NOT FOUND
#
msg = 'Could not map directive "%s"'
raise ParserException(msg % dtype)
def _smooth(self, group):
"""
Calculates a new normal for a vertex
based on all the surface normals on
that particular point. The provided
group determines the range of faces
where the smoothing has to be applied.
:param group: Tuple denoting the smoothing range
:returns: None
:rtype: None
"""
log('smoothing normals between %d and %d' % group)
for i in range(*group):
v = self._faces[i][0]
# cache miss
if not type(self._v2vn[v]) is int:
# obtain normals
normals = self._v2vn[v]
normals = map(lambda i: self._normals[i], normals)
# calculate average normal
smoothed = sum(normals) / len(normals)
smoothed = smoothed / np.linalg.norm(smoothed)
# save smoothed normal
self._normals.append(smoothed)
self._v2vn[v] = len(self._normals) - 1
self.stats['smoothed normals'] += 1
# save new vn to the faces (v, vn) tuple
self._faces[i] = self._faces[i][0], self._v2vn[v]
def __init__(self, name, data):
"""
Initialize the object parser and
parse the file provided.
:param name: Name of the object
:param data: List of strings with obj definitions
:returns: self
:rtype: parser.ObjParser.Obj
"""
self._name = name
# used for an efficient calculation
# of smoothed surfaces and to retrieve
# normals per vertex when serving faces
self._v2vn = {}
# just for statistics
self.stats = {
'calculated normals': 0,
'smoothed normals': 0}
# enumerations in obj's begin
# with value 1 (for whatever reason...)
# hence the None element.
self._vertices = [None] # [None, (x, y, z)₀, ...] ()₀ is np.array
self._normals = [None] # [None, (x, y, z)₀, ...] ()₀ is np.array
self._smoothing = [(0,)] # Ranges of faces where
# smoothing is activated
self._faces = [] # [(v₀, vn₀), (v₁, vn₁), ...],
# v and vn as indices of elements in
# self._vertices and self._normals
# split data line-wise and remove
# empty lines and comments
sanitize = lambda s: s and not s.startswith('#')
data = filter(sanitize, data.split('\n'))
log('analyzing %d lines of raw data' % len(data))
# analyze data line by line
# note: len is an O(1) operation
for line in data:
try:
self._parse(line)
except Exception as e:
msg = 'Could not parse line "%s"\nbecause of %s: %s'
fmt = (line, type(e), str(e))
raise ParserException(msg % fmt)
# if smoothing never got
# explicitly turned off
if len(self._smoothing[-1]) == 1:
self._smoothing[-1] += (len(self._faces),)
# smooth if necessary
for group in self._smoothing:
self._smooth(group)
# for -verbose
fmt = [len(self._vertices), len(self._normals)]
fmt = tuple(map(lambda x: x - 1, fmt))
log('got %d vertices and %d normals' % fmt)
fmt = len(self._faces)
log('got %d vertex/vertex normal pairs for faces' % fmt)
fmt = self.stats['calculated normals']
log('calculated %d normals' % fmt)
fmt = self.stats['smoothed normals']
fmt = fmt, sum([y - x for x, y in self._smoothing])
log('calculated %d smoothed normals of %d definitions' % fmt)
#
# PROPERTIES
#
# @property
def name(self):
"""
Returns the objects name
:returns: The name
:rtype: string
"""
return self._name
@property
def vertices(self):
"""
Returns a numpy array of vertex coordinates.
:returns: Vertex coordinates
:rtype: numpy.Array
"""
return np.array(self._vertices[1:], 'f')
@property
def faces(self):
"""
Returns the faces as a numpy array consisting
of v, vn pairs, where v are vertex coordinates
and vn surface normal coordinates.
:returns: Geometrical description of faces
:rtype: numpy.Array
"""
v, vn = zip(*self._faces)
v = map(lambda i: self._vertices[i], v)
vn = map(lambda i: self._normals[i], vn)
return np.array(zip(v, vn), 'f')
#
#
#
#
#
def __init__(self, fname):
"""
Parses an wavefront obj file. For every
defined object an ObjParser.Obj object
is created.
:param fname: File name
:returns: self
:rtype: parser.ObjParser
"""
log('parsing %s' % fname)
with open(fname) as f:
data = f.read()
self._objects = []
add = self._objects.append
log('parsing data of size %d' % len(data))
objs = re.split(r'^o (.*)', data)
# no 'o'-directive found
if len(objs) == 1:
add(ObjParser.Obj(fname.rstrip('.obj'), objs[0]))
# multiple objects per obj
for name, data in zip(objs[1::2], objs[2::2]):
add(ObjParser.Obj(name, data))
return
raise ParserException("Could not open file")
@property
def objects(self):
"""
Return all parsed objects
:returns: Parsed objects
:rtype: List of parse.ObjParser.Obj objects
"""
return self._objects
| mit | 1,192,911,812,575,661,000 | 28.202128 | 79 | 0.463388 | false |
fujunwei/chromium-crosswalk | tools/telemetry/telemetry/core/platform/power_monitor/power_monitor_controller.py | 69 | 1214 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import telemetry.core.platform.power_monitor as power_monitor
class PowerMonitorController(power_monitor.PowerMonitor):
"""
PowerMonitor that acts as facade for a list of PowerMonitor objects and uses
the first available one.
"""
def __init__(self, power_monitors):
super(PowerMonitorController, self).__init__()
self._cascading_power_monitors = power_monitors
self._active_monitor = None
def _AsyncPowerMonitor(self):
return next(
(x for x in self._cascading_power_monitors if x.CanMonitorPower()),
None)
def CanMonitorPower(self):
return bool(self._AsyncPowerMonitor())
def StartMonitoringPower(self, browser):
self._active_monitor = self._AsyncPowerMonitor()
assert self._active_monitor, 'No available monitor.'
self._active_monitor.StartMonitoringPower(browser)
def StopMonitoringPower(self):
assert self._active_monitor, 'StartMonitoringPower() not called.'
try:
return self._active_monitor.StopMonitoringPower()
finally:
self._active_monitor = None
| bsd-3-clause | 4,080,641,003,020,368,400 | 32.722222 | 78 | 0.726524 | false |
globocom/oauth2u | tests/helpers.py | 1 | 2130 | import sys
import urllib
import cgi
import base64
import json
from functools import partial
import requests
__all__ = ('TEST_SERVER_HOST',
'build_root_url',
'build_basic_authorization_header',
'build_access_token_url',
'parse_json_response',
'parse_query_string',
'get_code_from_url',
'request_authorization_code')
TEST_SERVER_HOST = 'http://localhost:8888'
def build_url(host, path, query=None):
query = query or {}
return u'{0}/{1}?{2}'.format(host.rstrip('/'),
path.lstrip('/'),
urllib.urlencode(query))
build_root_url = partial(build_url, TEST_SERVER_HOST)
build_authorize_url = partial(build_url, TEST_SERVER_HOST, '/authorize')
build_access_token_url = partial(build_url, TEST_SERVER_HOST, '/access-token')
def parse_json_response(response):
assert 'application/json; charset=UTF-8' == response.headers['content-type']
return json.loads(response.content)
def parse_query_string(url):
url, query_string = url.split('?')
query = dict(cgi.parse_qsl(query_string))
return url, query
def get_code_from_url(url):
''' Given an url returns the 'code' GET parameter '''
query = dict(cgi.parse_qsl(url.split('?')[1]))
return query['code']
def request_authorization_code(client_id='123',
redirect_uri='http://callback'):
''' Performs a GET on the authorization request url, waits for the
redirect and return the code provided
'''
url = build_authorize_url({'client_id': client_id,
'response_type': 'code',
'redirect_uri': redirect_uri})
resp = requests.get(url, allow_redirects=False)
assert 302 == resp.status_code
code = get_code_from_url(resp.headers['Location'])
return code
def build_basic_authorization_header(client_id, code):
''' Build the value for a Basic ``Authorization`` HTTP header '''
digest = base64.b64encode('{0}:{1}'.format(client_id, code))
return 'Basic {0}'.format(digest)
| mit | 7,794,158,819,076,828,000 | 29.869565 | 80 | 0.613146 | false |
Conchylicultor/DeepQA | chatbot/corpus/scotusdata.py | 10 | 1602 | # Copyright 2015 Conchylicultor. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
"""
Load transcripts from the Supreme Court of the USA.
Available from here:
https://github.com/pender/chatbot-rnn
"""
class ScotusData:
"""
"""
def __init__(self, dirName):
"""
Args:
dirName (string): directory where to load the corpus
"""
self.lines = self.loadLines(os.path.join(dirName, "scotus"))
self.conversations = [{"lines": self.lines}]
def loadLines(self, fileName):
"""
Args:
fileName (str): file to load
Return:
list<dict<str>>: the extracted fields for each line
"""
lines = []
with open(fileName, 'r') as f:
for line in f:
l = line[line.index(":")+1:].strip() # Strip name of speaker.
lines.append({"text": l})
return lines
def getConversations(self):
return self.conversations
| apache-2.0 | -52,163,720,397,949,330 | 26.62069 | 80 | 0.594881 | false |
DevMine/devmine-core | devmine/__init__.py | 1 | 1683 | __devmine_version__ = '0.1.0'
__api_version__ = '1'
import logging
import bottle
from bottle.ext import sqlalchemy
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from devmine.app.models import Base
from devmine.config import routes
from devmine.lib import composition
class Devmine:
def __init__(self,
server='auto',
host='0.0.0.0',
port=8080,
db_url='sqlite:///:memory:',
db_echo=False,
reloader=False,
debug=False):
self.server_type = server
self.host = host
self.port = port
self.reloader = reloader
self.debug = debug
self.api_version = __api_version__
self.devmine_version = __devmine_version__
self.app = bottle.Bottle()
routes.setup_routing(self.app)
bottle.debug(self.debug)
engine = create_engine(db_url, echo=db_echo)
sqlalchemy_plugin = sqlalchemy.Plugin(
engine,
Base.metadata,
keyword='db',
create=True,
commit=True,
use_kwargs=False
)
self.app.install(sqlalchemy_plugin)
create_session = sessionmaker(bind=engine)
session = create_session()
logging.info('Prefetching the scores matrix...')
composition.get_scores_matrix(session)
session.close()
@staticmethod
def get_version():
"""Return devmine version."""
return __devmine_version__
@staticmethod
def get_api_version():
"""Return devmine API version."""
return __api_version__
| bsd-3-clause | 3,464,901,256,400,185,300 | 24.119403 | 56 | 0.571598 | false |
walterreade/scikit-learn | sklearn/externals/joblib/my_exceptions.py | 31 | 3690 | """
Exceptions
"""
# Author: Gael Varoquaux < gael dot varoquaux at normalesup dot org >
# Copyright: 2010, Gael Varoquaux
# License: BSD 3 clause
import sys
from ._compat import PY3_OR_LATER
class JoblibException(Exception):
"""A simple exception with an error message that you can get to."""
def __init__(self, *args):
# We need to implement __init__ so that it is picked in the
# multiple heritance hierarchy in the class created in
# _mk_exception. Note: in Python 2, if you implement __init__
# in your exception class you need to set .args correctly,
# otherwise you can dump an exception instance with pickle but
# not load it (at load time an empty .args will be passed to
# the constructor). Also we want to be explicit and not use
# 'super' here. Using 'super' can cause a sibling class method
# to be called and we have no control the sibling class method
# constructor signature in the exception returned by
# _mk_exception.
Exception.__init__(self, *args)
def __repr__(self):
if hasattr(self, 'args') and len(self.args) > 0:
message = self.args[0]
else:
message = ''
name = self.__class__.__name__
return '%s\n%s\n%s\n%s' % (name, 75 * '_', message, 75 * '_')
__str__ = __repr__
class TransportableException(JoblibException):
"""An exception containing all the info to wrap an original
exception and recreate it.
"""
def __init__(self, message, etype):
# The next line set the .args correctly. This is needed to
# make the exception loadable with pickle
JoblibException.__init__(self, message, etype)
self.message = message
self.etype = etype
_exception_mapping = dict()
def _mk_exception(exception, name=None):
# Create an exception inheriting from both JoblibException
# and that exception
if name is None:
name = exception.__name__
this_name = 'Joblib%s' % name
if this_name in _exception_mapping:
# Avoid creating twice the same exception
this_exception = _exception_mapping[this_name]
else:
if exception is Exception:
# JoblibException is already a subclass of Exception. No
# need to use multiple inheritance
return JoblibException, this_name
try:
this_exception = type(
this_name, (JoblibException, exception), {})
_exception_mapping[this_name] = this_exception
except TypeError:
# This happens if "Cannot create a consistent method
# resolution order", e.g. because 'exception' is a
# subclass of JoblibException or 'exception' is not an
# acceptable base class
this_exception = JoblibException
return this_exception, this_name
def _mk_common_exceptions():
namespace = dict()
if PY3_OR_LATER:
import builtins as _builtin_exceptions
common_exceptions = filter(
lambda x: x.endswith('Error'),
dir(_builtin_exceptions))
else:
import exceptions as _builtin_exceptions
common_exceptions = dir(_builtin_exceptions)
for name in common_exceptions:
obj = getattr(_builtin_exceptions, name)
if isinstance(obj, type) and issubclass(obj, BaseException):
this_obj, this_name = _mk_exception(obj, name=name)
namespace[this_name] = this_obj
return namespace
# Updating module locals so that the exceptions pickle right. AFAIK this
# works only at module-creation time
locals().update(_mk_common_exceptions())
| bsd-3-clause | 1,029,027,122,358,540,800 | 34.142857 | 72 | 0.630623 | false |
alianmohammad/pd-gem5 | src/arch/micro_asm_test.py | 86 | 3195 | # Copyright (c) 2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
from micro_asm import MicroAssembler, Combinational_Macroop, Rom_Macroop, Rom
class Bah(object):
def __init__(self):
self.mnemonic = "bah"
class Bah_Tweaked(object):
def __init__(self):
self.mnemonic = "bah_tweaked"
class Hoop(object):
def __init__(self, first_param, second_param):
self.mnemonic = "hoop_%s_%s" % (first_param, second_param)
def __str__(self):
return "%s" % self.mnemonic
class Dah(object):
def __init__(self):
self.mnemonic = "dah"
microops = {
"bah": Bah,
"hoop": Hoop,
"dah": Dah
}
class TestMacroop(Combinational_Macroop):
def tweak(self):
microops["bah"] = Bah_Tweaked
def untweak(self):
microops["bah"] = Bah
def print_debug(self, message):
print message
def __init__(self, name):
super(TestMacroop, self).__init__(name)
self.directives = {
"tweak": self.tweak,
"untweak": self.untweak,
"print": self.print_debug
}
assembler = MicroAssembler(TestMacroop, microops, Rom('main ROM'), Rom_Macroop)
testAssembly = '''
# Single line comment
def rom {
goo: bah
extern la: hoop 4*8, "a"
}; /* multiline comment on one line */
/* multi line comment across lines
to make sure they work */
def macroop squishy {
.tweak
bah
.untweak
.print "In the midst"
bah
dah # single line comment after something
.tweak
};
#Extending the rom...
def rom
{
#Here's more stuff for the rom
bah
};
def macroop squashy {
bah
};
def macroop jumper (bar);
'''
assembler.assemble(testAssembly)
| bsd-3-clause | -4,477,015,458,987,570,700 | 28.859813 | 79 | 0.696088 | false |
akash1808/nova_test_latest | nova/tests/unit/scheduler/weights/test_weights_ioopsweight.py | 73 | 2785 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Scheduler IoOpsWeigher weights
"""
from nova.scheduler import weights
from nova.scheduler.weights import io_ops
from nova import test
from nova.tests.unit.scheduler import fakes
class IoOpsWeigherTestCase(test.NoDBTestCase):
def setUp(self):
super(IoOpsWeigherTestCase, self).setUp()
self.weight_handler = weights.HostWeightHandler()
self.weighers = [io_ops.IoOpsWeigher()]
def _get_weighed_host(self, hosts, io_ops_weight_multiplier):
if io_ops_weight_multiplier is not None:
self.flags(io_ops_weight_multiplier=io_ops_weight_multiplier)
return self.weight_handler.get_weighed_objects(self.weighers,
hosts, {})[0]
def _get_all_hosts(self):
host_values = [
('host1', 'node1', {'num_io_ops': 1}),
('host2', 'node2', {'num_io_ops': 2}),
('host3', 'node3', {'num_io_ops': 0}),
('host4', 'node4', {'num_io_ops': 4})
]
return [fakes.FakeHostState(host, node, values)
for host, node, values in host_values]
def _do_test(self, io_ops_weight_multiplier, expected_weight,
expected_host):
hostinfo_list = self._get_all_hosts()
weighed_host = self._get_weighed_host(hostinfo_list,
io_ops_weight_multiplier)
self.assertEqual(weighed_host.weight, expected_weight)
if expected_host:
self.assertEqual(weighed_host.obj.host, expected_host)
def test_io_ops_weight_multiplier_by_default(self):
self._do_test(io_ops_weight_multiplier=None,
expected_weight=0.0,
expected_host='host3')
def test_io_ops_weight_multiplier_zero_value(self):
# We do not know the host, all have same weight.
self._do_test(io_ops_weight_multiplier=0.0,
expected_weight=0.0,
expected_host=None)
def test_io_ops_weight_multiplier_positive_value(self):
self._do_test(io_ops_weight_multiplier=2.0,
expected_weight=2.0,
expected_host='host4')
| apache-2.0 | -8,470,592,831,959,123,000 | 39.955882 | 78 | 0.61149 | false |
jabez1314/shadowsocks | tests/coverage_server.py | 1072 | 1655 | #!/usr/bin/env python
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
if __name__ == '__main__':
import tornado.ioloop
import tornado.web
import urllib
class MainHandler(tornado.web.RequestHandler):
def get(self, project):
try:
with open('/tmp/%s-coverage' % project, 'rb') as f:
coverage = f.read().strip()
n = int(coverage.strip('%'))
if n >= 80:
color = 'brightgreen'
else:
color = 'yellow'
self.redirect(('https://img.shields.io/badge/'
'coverage-%s-%s.svg'
'?style=flat') %
(urllib.quote(coverage), color))
except IOError:
raise tornado.web.HTTPError(404)
application = tornado.web.Application([
(r"/([a-zA-Z0-9\-_]+)", MainHandler),
])
if __name__ == "__main__":
application.listen(8888, address='127.0.0.1')
tornado.ioloop.IOLoop.instance().start()
| apache-2.0 | -2,438,028,363,540,857,300 | 35.777778 | 75 | 0.555287 | false |
BeATz-UnKNoWN/python-for-android | python-build/python-libs/gdata/src/gdata/alt/app_engine.py | 136 | 3386 | #!/usr/bin/python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides functions to persist serialized auth tokens in the datastore.
The get_token and set_token functions should be used in conjunction with
gdata.gauth's token_from_blob and token_to_blob to allow auth token objects
to be reused across requests. It is up to your own code to ensure that the
token key's are unique.
"""
__author__ = '[email protected] (Jeff Scudder)'
from google.appengine.ext import db
from google.appengine.api import memcache
class Token(db.Model):
"""Datastore Model which stores a serialized auth token."""
t = db.BlobProperty()
def get_token(unique_key):
"""Searches for a stored token with the desired key.
Checks memcache and then the datastore if required.
Args:
unique_key: str which uniquely identifies the desired auth token.
Returns:
A string encoding the auth token data. Use gdata.gauth.token_from_blob to
convert back into a usable token object. None if the token was not found
in memcache or the datastore.
"""
token_string = memcache.get(unique_key)
if token_string is None:
# The token wasn't in memcache, so look in the datastore.
token = Token.get_by_key_name(unique_key)
if token is None:
return None
return token.t
return token_string
def set_token(unique_key, token_str):
"""Saves the serialized auth token in the datastore.
The token is also stored in memcache to speed up retrieval on a cache hit.
Args:
unique_key: The unique name for this token as a string. It is up to your
code to ensure that this token value is unique in your application.
Previous values will be silently overwitten.
token_str: A serialized auth token as a string. I expect that this string
will be generated by gdata.gauth.token_to_blob.
Returns:
True if the token was stored sucessfully, False if the token could not be
safely cached (if an old value could not be cleared). If the token was
set in memcache, but not in the datastore, this function will return None.
However, in that situation an exception will likely be raised.
Raises:
Datastore exceptions may be raised from the App Engine SDK in the event of
failure.
"""
# First try to save in memcache.
result = memcache.set(unique_key, token_str)
# If memcache fails to save the value, clear the cached value.
if not result:
result = memcache.delete(unique_key)
# If we could not clear the cached value for this token, refuse to save.
if result == 0:
return False
# Save to the datastore.
if Token(key_name=unique_key, t=token_str).put():
return True
return None
def delete_token(unique_key):
# Clear from memcache.
memcache.delete(unique_key)
# Clear from the datastore.
Token(key_name=unique_key).delete()
| apache-2.0 | 4,032,265,594,405,438,500 | 32.524752 | 78 | 0.723272 | false |
leafclick/intellij-community | python/helpers/py2only/docutils/languages/fr.py | 148 | 1893 | # $Id: fr.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: Stefane Fermigier <[email protected]>
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
French-language mappings for language-dependent features of Docutils.
"""
__docformat__ = 'reStructuredText'
labels = {
u'author': u'Auteur',
u'authors': u'Auteurs',
u'organization': u'Organisation',
u'address': u'Adresse',
u'contact': u'Contact',
u'version': u'Version',
u'revision': u'R\u00e9vision',
u'status': u'Statut',
u'date': u'Date',
u'copyright': u'Copyright',
u'dedication': u'D\u00e9dicace',
u'abstract': u'R\u00e9sum\u00e9',
u'attention': u'Attention!',
u'caution': u'Avertissement!',
u'danger': u'!DANGER!',
u'error': u'Erreur',
u'hint': u'Indication',
u'important': u'Important',
u'note': u'Note',
u'tip': u'Astuce',
u'warning': u'Avis',
u'contents': u'Sommaire'}
"""Mapping of node class name to label text."""
bibliographic_fields = {
u'auteur': u'author',
u'auteurs': u'authors',
u'organisation': u'organization',
u'adresse': u'address',
u'contact': u'contact',
u'version': u'version',
u'r\u00e9vision': u'revision',
u'statut': u'status',
u'date': u'date',
u'copyright': u'copyright',
u'd\u00e9dicace': u'dedication',
u'r\u00e9sum\u00e9': u'abstract'}
"""French (lowcased) to canonical name mapping for bibliographic fields."""
author_separators = [';', ',']
"""List of separator strings for the 'Authors' bibliographic field. Tried in
order."""
| apache-2.0 | -3,634,663,531,247,253,000 | 31.637931 | 76 | 0.628104 | false |
play113/swer | heekscnc-read-only/nc/nc.py | 25 | 20718 | ################################################################################
# nc.py
#
# Base class for NC code creation
# And global functions for calling current creator
#
# Hirutso Enni, 2009-01-13
# altered by Dan Falck 2010-08-04
# added tap() arguments Michael Haberler 2010-10-07
################################################################################
ncOFF = 0
ncLEFT = -1
ncRIGHT = +1
ncCW = -1
ncCCW = +1
ncMIST = 1
ncFLOOD = 2
################################################################################
class Creator:
def __init__(self):
pass
############################################################################
## Internals
def file_open(self, name):
self.file = open(name, 'w')
self.filename = name
def file_close(self):
self.file.close()
def write(self, s):
self.file.write(s)
############################################################################
## Programs
def program_begin(self, id, name=''):
"""Begin a program"""
pass
def add_stock(self, type_name, params):
pass
def program_stop(self, optional=False):
"""Stop the machine"""
pass
def program_end(self):
"""End the program"""
pass
def flush_nc(self):
"""Flush all pending codes"""
pass
############################################################################
## Subprograms
def sub_begin(self, id, name=''):
"""Begin a subprogram"""
pass
def sub_call(self, id):
"""Call a subprogram"""
pass
def sub_end(self):
"""Return from a subprogram"""
pass
############################################################################
## Settings
def imperial(self):
"""Set imperial units"""
pass
def metric(self):
"""Set metric units"""
pass
def absolute(self):
"""Set absolute coordinates"""
pass
def incremental(self):
"""Set incremental coordinates"""
pass
def polar(self, on=True):
"""Set polar coordinates"""
pass
def set_plane(self, plane):
"""Set plane"""
pass
def set_temporary_origin(self, x=None, y=None, z=None, a=None, b=None, c=None):
"""Set temporary origin G92"""
pass
def remove_temporary_origin(self):
"""Remote temporary origin G92.1"""
pass
############################################################################
## Tools
def tool_change(self, id):
"""Change the tool"""
pass
def tool_defn(self, id, name='', params=None):
"""Define a tool"""
pass
def offset_radius(self, id, radius=None):
"""Set tool radius offsetting"""
pass
def offset_length(self, id, length=None):
"""Set tool length offsetting"""
pass
def current_tool(self):
return None
############################################################################
## Datums
def datum_shift(self, x=None, y=None, z=None, a=None, b=None, c=None):
"""Shift the datum"""
pass
def datum_set(self, x=None, y=None, z=None, a=None, b=None, c=None):
"""Set the datum"""
pass
def workplane(self, id):
"""Set the workplane"""
pass
def clearanceplane(self,z=None):
"""set clearance plane"""
pass
############################################################################
## APT360 like Transformation Definitions
## These definitions were created while looking at Irvin Kraal's book on APT
## - Numerical Control Progamming in APT - page 211
def matrix(self,a1=None,b1=None,c1=None,a2=None,b2=None,c2=None,a3=None,b3=None,c3=None):
"""Create a matrix for transformations"""
pass
def translate(self,x=None,y=None,z=None):
"""Translate in x,y,z direction"""
pass
def rotate(self,xyrot=None,yzrot=None,zxrot=None,angle=None):
"""Rotate about a coordinate axis"""
pass
def scale(self,k=None):
"""Scale by factor k"""
pass
def matrix_product(self,matrix1=None,matrix2=None):
"""Create matrix that is the product of two other matrices"""
pass
def mirror_plane(self,plane1=None,plane2=None,plane3=None):
"""Mirror image about one or more coordinate planes"""
pass
def mirror_line(self,line=None):
"""Mirror about a line"""
pass
############################################################################
## Rates + Modes
def feedrate(self, f):
"""Set the feedrate"""
pass
def feedrate_hv(self, fh, fv):
"""Set the horizontal and vertical feedrates"""
pass
def spindle(self, s, clockwise=True):
"""Set the spindle speed"""
pass
def coolant(self, mode=0):
"""Set the coolant mode"""
pass
def gearrange(self, gear=0):
"""Set the gear range"""
pass
############################################################################
## Moves
def rapid(self, x=None, y=None, z=None, a=None, b=None, c=None):
"""Rapid move"""
pass
def feed(self, x=None, y=None, z=None, a = None, b = None, c = None):
"""Feed move"""
pass
def arc_cw(self, x=None, y=None, z=None, i=None, j=None, k=None, r=None):
"""Clockwise arc move"""
pass
def arc_ccw(self, x=None, y=None, z=None, i=None, j=None, k=None, r=None):
"""Counterclockwise arc move"""
pass
def dwell(self, t):
"""Dwell"""
pass
def rapid_home(self, x=None, y=None, z=None, a=None, b=None, c=None):
"""Rapid relative to home position"""
pass
def rapid_unhome(self):
"""Return from rapid home"""
pass
def set_machine_coordinates(self):
"""Set machine coordinates"""
pass
############################################################################
## Cutter radius compensation
def use_CRC(self):
"""CRC"""
return False
############################################################################
## Cycles
def pattern(self):
"""Simple pattern eg. circle, rect"""
pass
def pocket(self):
"""Pocket routine"""
pass
def profile(self):
"""Profile routine"""
pass
def drill(self, x=None, y=None, dwell=None, depthparams = None, retract_mode=None, spindle_mode=None, internal_coolant_on=None, rapid_to_clearance=None):
"""Drilling routines"""
pass
# original prototype was:
# def tap(self, x=None, y=None, z=None, zretract=None, depth=None, standoff=None, dwell_bottom=None, pitch=None, stoppos=None, spin_in=None, spin_out=None):
#
# current call is like so:
# tap(x=10, y=10, z=0, tap_mode=0, depth=12.7, standoff=6.35, direction=0, pitch=1.25)
# just add tap_mode & direction parameters
def tap(self, x=None, y=None, z=None, zretract=None, depth=None, standoff=None, dwell_bottom=None, pitch=None, stoppos=None, spin_in=None, spin_out=None, tap_mode=None, direction=None):
"""Tapping routines"""
pass
def bore(self, x=None, y=None, z=None, zretract=None, depth=None, standoff=None, dwell_bottom=None, feed_in=None, feed_out=None, stoppos=None, shift_back=None, shift_right=None, backbore=False, stop=False):
"""Boring routines"""
pass
def end_canned_cycle(self):
pass
############################################################################
## Misc
def comment(self, text):
"""Insert a comment"""
pass
def insert(self, text):
"""APT style INSERT statement"""
pass
def block_delete(self, on=False):
"""block to ignore if block delete switch is on"""
pass
def variable(self, id):
"""Insert a variable"""
pass
def variable_set(self, id, value):
"""Set a variable"""
pass
def probe_linear_centre_outside(self, x1=None, y1=None, depth=None, x2=None, y2=None ):
pass
def probe_single_point(self, point_along_edge_x=None, point_along_edge_y=None, depth=None, retracted_point_x=None, retracted_point_y=None, destination_point_x=None, destination_point_y=None, intersection_variable_x=None, intersection_variable_y=None, probe_offset_x_component=None, probe_offset_y_component=None ):
pass
def probe_downward_point(self, x=None, y=None, depth=None, intersection_variable_z=None):
pass
def report_probe_results(self, x1=None, y1=None, z1=None, x2=None, y2=None, z2=None, x3=None, y3=None, z3=None, x4=None, y4=None, z4=None, x5=None, y5=None, z5=None, x6=None, y6=None, z6=None, xml_file_name=None ):
pass
def open_log_file(self, xml_file_name=None ):
pass
def log_coordinate(self, x=None, y=None, z=None):
pass
def log_message(self, message=None):
pass
def close_log_file(self):
pass
def rapid_to_midpoint(self, x1=None, y1=None, z1=None, x2=None, y2=None, z2=None):
pass
def rapid_to_intersection(self, x1, y1, x2, y2, x3, y3, x4, y4, intersection_x, intersection_y, ua_numerator, ua_denominator, ua, ub_numerator, ub):
pass
def rapid_to_rotated_coordinate(self, x1, y1, x2, y2, ref_x, ref_y, x_current, y_current, x_final, y_final):
pass
def set_path_control_mode(self, mode, motion_blending_tolerance, naive_cam_tolerance ):
pass
############################################################################
## NC code creator for additive machines like RepRap
def wipe(self):
"""wipe routine"""
pass
def extruder_on(self):
"""Turn on the extruder"""
pass
def extruder_off(self):
"""turn off the extruder"""
pass
def set_extruder_flowrate(self, flowrate):
"""Set the flowrate for the extruder"""
pass
def extruder_temp(self, temp):
"""Set the extruder temp in celsius"""
pass
def fan_on(self):
"""turn on the cooling fan"""
pass
def fan_off(self):
"""turn off the cooling fan"""
pass
def build_bed_temp(self, temp):
"""Set the bed temp in celsius"""
pass
def chamber_temp(self, temp):
"""Set the chamber temp in celsius"""
pass
def begin_ncblock(self):
# if the moves have come from backplotting nc code, then the nc code text can be given with these three functions
pass
def end_ncblock(self):
pass
def add_text(self, s, col, cdata):
pass
################################################################################
creator = Creator()
############################################################################
## Internals
def write(s):
creator.write(s)
def output(filename):
creator.file_open(filename)
############################################################################
## Programs
def program_begin(id, name=''):
creator.program_begin(id, name)
def add_stock(type_name, params):
creator.add_stock(type_name, params)
def program_stop(optional=False):
creator.program_stop(optional)
def program_end():
creator.program_end()
def flush_nc():
creator.flush_nc()
############################################################################
## Subprograms
def sub_begin(id, name=''):
creator.sub_begin(id, name)
def sub_call(id):
creator.sub_call(id)
def sub_end():
creator.sub_end()
############################################################################
## Settings
def imperial():
creator.imperial()
def metric():
creator.metric()
def absolute():
creator.absolute()
def incremental():
creator.incremental()
def polar(on=True):
creator.polar(on)
def set_plane(plane):
creator.set_plane(plane)
def set_temporary_origin(x=None, y=None, z=None, a=None, b=None, c=None):
creator.set_temporary_origin(x,y,z,a,b,c)
def remove_temporary_origin():
creator.remove_temporary_origin()
############################################################################
## Tools
def tool_change(id):
creator.tool_change(id)
def tool_defn(id, name='', params=None):
creator.tool_defn(id, name, params)
def offset_radius(id, radius=None):
creator.offset_radius(id, radius)
def offset_length(id, length=None):
creator.offset_length(id, length)
def current_tool(self):
return creator.current_tool()
############################################################################
## Datums
def datum_shift(x=None, y=None, z=None, a=None, b=None, c=None):
creator.datum_shift(x, y, z, a, b, c)
def datum_set(x=None, y=None, z=None, a=None, b=None, c=None):
creator.datum_set(x, y, z, a, b, c)
def workplane(id):
creator.workplane(id)
def clearanceplane(z=None):
creator.clearanceplane(z)
############################################################################
## APT360 like Transformation Definitions
## These definitions were created while looking at Irvin Kraal's book on APT
## - Numerical Control Progamming in APT - page 211
def matrix(a1=None,b1=None,c1=None,a2=None,b2=None,c2=None,a3=None,b3=None,c3=None):
creator.matrix(a1,b1,c1,a2,b2,c2,a3,b3,c3)
def translate(x=None,y=None,z=None):
creator.translate(x,y,z)
def rotate(xyrot=None,yzrot=None,zxrot=None,angle=None):
creator.rotate(xyrot,yzrot,zxrot,angle)
def scale(k=None):
creator.scale(k)
def matrix_product(matrix1=None,matrix2=None):
creator.matrix_product(matrix1,matrix2)
def mirror_plane(plane1=None,plane2=None,plane3=None):
creator.mirror_plane(plane1,plane2,plane3)
def mirror_line(line=None):
creator.mirror_line(line)
############################################################################
## Rates + Modes
def feedrate(f):
creator.feedrate(f)
def feedrate_hv(fh, fv):
creator.feedrate_hv(fh, fv)
def spindle(s, clockwise=True):
creator.spindle(s, clockwise)
def coolant(mode=0):
creator.coolant(mode)
def gearrange(gear=0):
creator.gearrange(gear)
############################################################################
## Moves
def rapid(x=None, y=None, z=None, a=None, b=None, c=None):
creator.rapid(x, y, z, a, b, c)
def feed(x=None, y=None, z=None, a = None, b = None, c = None):
creator.feed(x, y, z)
def arc_cw(x=None, y=None, z=None, i=None, j=None, k=None, r=None):
creator.arc_cw(x, y, z, i, j, k, r)
def arc_ccw(x=None, y=None, z=None, i=None, j=None, k=None, r=None):
creator.arc_ccw(x, y, z, i, j, k, r)
def dwell(t):
creator.dwell(t)
def rapid_home(x=None, y=None, z=None, a=None, b=None, c=None):
creator.rapid_home(x, y, z, a, b, c)
def rapid_unhome():
creator.rapid_unhome()
def set_machine_coordinates():
creator.set_machine_coordinates()
############################################################################
## Cutter radius compensation
def use_CRC():
return creator.use_CRC()
def CRC_nominal_path():
return creator.CRC_nominal_path()
def start_CRC(left = True, radius = 0.0):
creator.start_CRC(left, radius)
def end_CRC():
creator.end_CRC()
############################################################################
## Cycles
def pattern():
creator.pattern()
def pocket():
creator.pocket()
def profile():
creator.profile()
def drill(x=None, y=None, dwell=None, depthparams = None, retract_mode=None, spindle_mode=None, internal_coolant_on=None, rapid_to_clearance=None):
creator.drill(x, y, dwell, depthparams, retract_mode, spindle_mode, internal_coolant_on, rapid_to_clearance)
def tap(x=None, y=None, z=None, zretract=None, depth=None, standoff=None, dwell_bottom=None, pitch=None, stoppos=None, spin_in=None, spin_out=None, tap_mode=None, direction=None):
creator.tap(x, y, z, zretract, depth, standoff, dwell_bottom, pitch, stoppos, spin_in, spin_out, tap_mode, direction)
def bore(x=None, y=None, z=None, zretract=None, depth=None, standoff=None, dwell_bottom=None, feed_in=None, feed_out=None, stoppos=None, shift_back=None, shift_right=None, backbore=False, stop=False):
creator.bore(x, y, z, zretract, depth, standoff, dwell_Bottom, feed_in, feed_out, stoppos, shift_back, shift_right, backbore, stop)
def end_canned_cycle():
creator.end_canned_cycle()
def peck(count, first, last=None, step=0.0):
pecks = []
peck = first
if (last == None) : last = first
for i in range(0,count):
pecks.append(peck)
if (peck - step > last) : peck -= step
return pecks
############################################################################
## Misc
def comment(text):
creator.comment(text)
def insert(text):
creator.insert(text)
def block_delete(on=False):
creator.block_delete(on)
def variable(id):
creator.variable(id)
def variable_set(id, value):
creator.variable_set(id, value)
def probe_single_point(point_along_edge_x=None, point_along_edge_y=None, depth=None, retracted_point_x=None, retracted_point_y=None, destination_point_x=None, destination_point_y=None, intersection_variable_x=None, intersection_variable_y=None, probe_offset_x_component=None, probe_offset_y_component=None ):
creator.probe_single_point(point_along_edge_x, point_along_edge_y, depth, retracted_point_x, retracted_point_y, destination_point_x, destination_point_y, intersection_variable_x, intersection_variable_y, probe_offset_x_component, probe_offset_y_component )
def probe_downward_point(x=None, y=None, depth=None, intersection_variable_z=None):
creator.probe_downward_point(x, y, depth, intersection_variable_z)
def report_probe_results(x1=None, y1=None, z1=None, x2=None, y2=None, z2=None, x3=None, y3=None, z3=None, x4=None, y4=None, z4=None, x5=None, y5=None, z5=None, x6=None, y6=None, z6=None, xml_file_name=None ):
creator.report_probe_results(x1, y1, z1, x2, y2, z2, x3, y3, z3, x4, y4, z4, x5, y5, z5, x6, y6, z6, xml_file_name)
def open_log_file(xml_file_name=None ):
creator.open_log_file(xml_file_name)
def log_coordinate(x=None, y=None, z=None):
creator.log_coordinate(x, y, z)
def log_message(message=None):
creator.log_message(message)
def close_log_file():
creator.close_log_file()
def rapid_to_midpoint(x1=None, y1=None, z1=None, x2=None, y2=None, z2=None):
creator.rapid_to_midpoint(x1, y1, z1, x2, y2, z2)
def rapid_to_intersection(x1, y1, x2, y2, x3, y3, x4, y4, intersection_x, intersection_y, ua_numerator, ua_denominator, ua, ub_numerator, ub):
creator.rapid_to_intersection(x1, y1, x2, y2, x3, y3, x4, y4, intersection_x, intersection_y, ua_numerator, ua_denominator, ua, ub_numerator, ub)
def rapid_to_rotated_coordinate(x1, y1, x2, y2, ref_x, ref_y, x_current, y_current, x_final, y_final):
creator.rapid_to_rotated_coordinate(x1, y1, x2, y2, ref_x, ref_y, x_current, y_current, x_final, y_final)
def set_path_control_mode(mode, motion_blending_tolerance, naive_cam_tolerance ):
creator.set_path_control_mode(mode, motion_blending_tolerance, naive_cam_tolerance )
############################################################################
## NC code creator for additive machines like RepRap
def wipe():
creator.wipe()
def extruder_on():
creator.extruder_on()
def extruder_off():
creator.extruder_off()
def set_extruder_flowrate(flowrate):
creator.set_extruder_flowrate(flowrate)
def extruder_temp(temp=None):
creator.extruder_temp(temp)
def fan_on():
creator.fan_on()
def fan_off():
creator.fan_off()
def build_bed_temp(temp=None):
creator.build_bed_temp(temp)
def chamber_temp(temp=None):
creator.chamber_temp(temp)
| mit | 6,023,234,747,818,765,000 | 27.597143 | 318 | 0.533884 | false |
ParticulateSolutions/django-paydirekt | django_paydirekt/migrations/0001_initial.py | 1 | 4278 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='PaydirektCapture',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('amount', models.DecimalField(verbose_name='amount', max_digits=9, decimal_places=2)),
('transaction_id', models.CharField(unique=True, max_length=255, verbose_name='transaction id')),
('final', models.BooleanField(default=False, verbose_name='final')),
('link', models.URLField(verbose_name='link')),
('status', models.CharField(max_length=255, verbose_name='status', blank=True)),
('capture_type', models.CharField(max_length=255, verbose_name='capture type', blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('last_modified', models.DateTimeField(auto_now=True, verbose_name='last modified')),
],
options={
'verbose_name': 'Paydirekt Capture',
'verbose_name_plural': 'Paydirekt Captures',
},
),
migrations.CreateModel(
name='PaydirektCheckout',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('checkout_id', models.CharField(unique=True, max_length=255, verbose_name='checkout id')),
('payment_type', models.CharField(max_length=255, verbose_name='payment type')),
('total_amount', models.DecimalField(verbose_name='total amount', max_digits=9, decimal_places=2)),
('status', models.CharField(max_length=255, verbose_name='status', blank=True)),
('link', models.URLField(verbose_name='link')),
('approve_link', models.URLField(verbose_name='approve link')),
('close_link', models.URLField(verbose_name='close link', blank=True)),
('captures_link', models.URLField(verbose_name='captures link', blank=True)),
('refunds_link', models.URLField(verbose_name='refunds link', blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('last_modified', models.DateTimeField(auto_now=True, verbose_name='last modified')),
],
options={
'verbose_name': 'Paydirekt Checkout',
'verbose_name_plural': 'Paydirekt Checkouts',
},
),
migrations.CreateModel(
name='PaydirektRefund',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('amount', models.DecimalField(verbose_name='amount', max_digits=9, decimal_places=2)),
('transaction_id', models.CharField(unique=True, max_length=255, verbose_name='transaction id')),
('link', models.URLField(verbose_name='link')),
('status', models.CharField(max_length=255, verbose_name='status', blank=True)),
('refund_type', models.CharField(max_length=255, verbose_name='refund type', blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('last_modified', models.DateTimeField(auto_now=True, verbose_name='last modified')),
('checkout', models.ForeignKey(related_name='refunds', verbose_name='checkout', to='django_paydirekt.PaydirektCheckout', on_delete=models.CASCADE)),
],
options={
'verbose_name': 'Paydirekt Refund',
'verbose_name_plural': 'Paydirekt Refund',
},
),
migrations.AddField(
model_name='paydirektcapture',
name='checkout',
field=models.ForeignKey(related_name='captures', verbose_name='checkout', to='django_paydirekt.PaydirektCheckout', on_delete=models.CASCADE),
),
]
| mit | -895,930,965,108,189,400 | 56.04 | 164 | 0.588125 | false |
MrLoick/python-for-android | python-modules/twisted/twisted/test/test_stringtransport.py | 56 | 9941 | # Copyright (c) 2009-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.test.proto_helpers}.
"""
from zope.interface.verify import verifyObject
from twisted.internet.interfaces import (ITransport, IPushProducer, IConsumer,
IReactorTCP, IReactorSSL, IReactorUNIX, IAddress, IListeningPort,
IConnector)
from twisted.internet.address import IPv4Address
from twisted.trial.unittest import TestCase
from twisted.test.proto_helpers import (StringTransport, MemoryReactor,
RaisingMemoryReactor)
from twisted.internet.protocol import ClientFactory, Factory
class StringTransportTests(TestCase):
"""
Tests for L{twisted.test.proto_helpers.StringTransport}.
"""
def setUp(self):
self.transport = StringTransport()
def test_interfaces(self):
"""
L{StringTransport} instances provide L{ITransport}, L{IPushProducer},
and L{IConsumer}.
"""
self.assertTrue(verifyObject(ITransport, self.transport))
self.assertTrue(verifyObject(IPushProducer, self.transport))
self.assertTrue(verifyObject(IConsumer, self.transport))
def test_registerProducer(self):
"""
L{StringTransport.registerProducer} records the arguments supplied to
it as instance attributes.
"""
producer = object()
streaming = object()
self.transport.registerProducer(producer, streaming)
self.assertIdentical(self.transport.producer, producer)
self.assertIdentical(self.transport.streaming, streaming)
def test_disallowedRegisterProducer(self):
"""
L{StringTransport.registerProducer} raises L{RuntimeError} if a
producer is already registered.
"""
producer = object()
self.transport.registerProducer(producer, True)
self.assertRaises(
RuntimeError, self.transport.registerProducer, object(), False)
self.assertIdentical(self.transport.producer, producer)
self.assertTrue(self.transport.streaming)
def test_unregisterProducer(self):
"""
L{StringTransport.unregisterProducer} causes the transport to forget
about the registered producer and makes it possible to register a new
one.
"""
oldProducer = object()
newProducer = object()
self.transport.registerProducer(oldProducer, False)
self.transport.unregisterProducer()
self.assertIdentical(self.transport.producer, None)
self.transport.registerProducer(newProducer, True)
self.assertIdentical(self.transport.producer, newProducer)
self.assertTrue(self.transport.streaming)
def test_invalidUnregisterProducer(self):
"""
L{StringTransport.unregisterProducer} raises L{RuntimeError} if called
when no producer is registered.
"""
self.assertRaises(RuntimeError, self.transport.unregisterProducer)
def test_initialProducerState(self):
"""
L{StringTransport.producerState} is initially C{'producing'}.
"""
self.assertEqual(self.transport.producerState, 'producing')
def test_pauseProducing(self):
"""
L{StringTransport.pauseProducing} changes the C{producerState} of the
transport to C{'paused'}.
"""
self.transport.pauseProducing()
self.assertEqual(self.transport.producerState, 'paused')
def test_resumeProducing(self):
"""
L{StringTransport.resumeProducing} changes the C{producerState} of the
transport to C{'producing'}.
"""
self.transport.pauseProducing()
self.transport.resumeProducing()
self.assertEqual(self.transport.producerState, 'producing')
def test_stopProducing(self):
"""
L{StringTransport.stopProducing} changes the C{'producerState'} of the
transport to C{'stopped'}.
"""
self.transport.stopProducing()
self.assertEqual(self.transport.producerState, 'stopped')
def test_stoppedTransportCannotPause(self):
"""
L{StringTransport.pauseProducing} raises L{RuntimeError} if the
transport has been stopped.
"""
self.transport.stopProducing()
self.assertRaises(RuntimeError, self.transport.pauseProducing)
def test_stoppedTransportCannotResume(self):
"""
L{StringTransport.resumeProducing} raises L{RuntimeError} if the
transport has been stopped.
"""
self.transport.stopProducing()
self.assertRaises(RuntimeError, self.transport.resumeProducing)
def test_disconnectingTransportCannotPause(self):
"""
L{StringTransport.pauseProducing} raises L{RuntimeError} if the
transport is being disconnected.
"""
self.transport.loseConnection()
self.assertRaises(RuntimeError, self.transport.pauseProducing)
def test_disconnectingTransportCannotResume(self):
"""
L{StringTransport.resumeProducing} raises L{RuntimeError} if the
transport is being disconnected.
"""
self.transport.loseConnection()
self.assertRaises(RuntimeError, self.transport.resumeProducing)
def test_loseConnectionSetsDisconnecting(self):
"""
L{StringTransport.loseConnection} toggles the C{disconnecting} instance
variable to C{True}.
"""
self.assertFalse(self.transport.disconnecting)
self.transport.loseConnection()
self.assertTrue(self.transport.disconnecting)
def test_specifiedHostAddress(self):
"""
If a host address is passed to L{StringTransport.__init__}, that
value is returned from L{StringTransport.getHost}.
"""
address = object()
self.assertIdentical(StringTransport(address).getHost(), address)
def test_specifiedPeerAddress(self):
"""
If a peer address is passed to L{StringTransport.__init__}, that
value is returned from L{StringTransport.getPeer}.
"""
address = object()
self.assertIdentical(
StringTransport(peerAddress=address).getPeer(), address)
def test_defaultHostAddress(self):
"""
If no host address is passed to L{StringTransport.__init__}, an
L{IPv4Address} is returned from L{StringTransport.getHost}.
"""
address = StringTransport().getHost()
self.assertIsInstance(address, IPv4Address)
def test_defaultPeerAddress(self):
"""
If no peer address is passed to L{StringTransport.__init__}, an
L{IPv4Address} is returned from L{StringTransport.getPeer}.
"""
address = StringTransport().getPeer()
self.assertIsInstance(address, IPv4Address)
class ReactorTests(TestCase):
"""
Tests for L{MemoryReactor} and L{RaisingMemoryReactor}.
"""
def test_memoryReactorProvides(self):
"""
L{MemoryReactor} provides all of the attributes described by the
interfaces it advertises.
"""
memoryReactor = MemoryReactor()
verifyObject(IReactorTCP, memoryReactor)
verifyObject(IReactorSSL, memoryReactor)
verifyObject(IReactorUNIX, memoryReactor)
def test_raisingReactorProvides(self):
"""
L{RaisingMemoryReactor} provides all of the attributes described by the
interfaces it advertises.
"""
raisingReactor = RaisingMemoryReactor()
verifyObject(IReactorTCP, raisingReactor)
verifyObject(IReactorSSL, raisingReactor)
verifyObject(IReactorUNIX, raisingReactor)
def test_connectDestination(self):
"""
L{MemoryReactor.connectTCP}, L{MemoryReactor.connectSSL}, and
L{MemoryReactor.connectUNIX} will return an L{IConnector} whose
C{getDestination} method returns an L{IAddress} with attributes which
reflect the values passed.
"""
memoryReactor = MemoryReactor()
for connector in [memoryReactor.connectTCP(
"test.example.com", 8321, ClientFactory()),
memoryReactor.connectSSL(
"test.example.com", 8321, ClientFactory(),
None)]:
verifyObject(IConnector, connector)
address = connector.getDestination()
verifyObject(IAddress, address)
self.assertEquals(address.host, "test.example.com")
self.assertEquals(address.port, 8321)
connector = memoryReactor.connectUNIX("/fake/path", ClientFactory())
verifyObject(IConnector, connector)
address = connector.getDestination()
verifyObject(IAddress, address)
self.assertEquals(address.name, "/fake/path")
def test_listenDefaultHost(self):
"""
L{MemoryReactor.listenTCP}, L{MemoryReactor.listenSSL} and
L{MemoryReactor.listenUNIX} will return an L{IListeningPort} whose
C{getHost} method returns an L{IAddress}; C{listenTCP} and C{listenSSL}
will have a default host of C{'0.0.0.0'}, and a port that reflects the
value passed, and C{listenUNIX} will have a name that reflects the path
passed.
"""
memoryReactor = MemoryReactor()
for port in [memoryReactor.listenTCP(8242, Factory()),
memoryReactor.listenSSL(8242, Factory(), None)]:
verifyObject(IListeningPort, port)
address = port.getHost()
verifyObject(IAddress, address)
self.assertEquals(address.host, '0.0.0.0')
self.assertEquals(address.port, 8242)
port = memoryReactor.listenUNIX("/path/to/socket", Factory())
verifyObject(IListeningPort, port)
address = port.getHost()
verifyObject(IAddress, address)
self.assertEquals(address.name, "/path/to/socket") | apache-2.0 | 5,605,581,684,867,703,000 | 34.634409 | 79 | 0.659692 | false |
jagg81/translate-toolkit | translate/lang/si.py | 4 | 1048 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010 Zuza Software Foundation
#
# This file is part of the Translate Toolkit.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""This module represents Sinhala language.
For more information, see U{http://en.wikipedia.org/wiki/Sinhala_language}
"""
from translate.lang import common
class si(common.Common):
"""This class represents Sinhala."""
ignoretests = ["startcaps", "simplecaps"]
| gpl-2.0 | -6,726,473,792,437,943,000 | 31.75 | 74 | 0.743321 | false |
geodrinx/gearthview | ext-libs/twisted/internet/_posixserialport.py | 42 | 2068 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Serial Port Protocol
"""
# system imports
import os, errno
# dependent on pyserial ( http://pyserial.sf.net/ )
# only tested w/ 1.18 (5 Dec 2002)
import serial
from serial import PARITY_NONE, PARITY_EVEN, PARITY_ODD
from serial import STOPBITS_ONE, STOPBITS_TWO
from serial import FIVEBITS, SIXBITS, SEVENBITS, EIGHTBITS
from serialport import BaseSerialPort
# twisted imports
from twisted.internet import abstract, fdesc, main
class SerialPort(BaseSerialPort, abstract.FileDescriptor):
"""
A select()able serial device, acting as a transport.
"""
connected = 1
def __init__(self, protocol, deviceNameOrPortNumber, reactor,
baudrate = 9600, bytesize = EIGHTBITS, parity = PARITY_NONE,
stopbits = STOPBITS_ONE, timeout = 0, xonxoff = 0, rtscts = 0):
abstract.FileDescriptor.__init__(self, reactor)
self._serial = self._serialFactory(
deviceNameOrPortNumber, baudrate=baudrate, bytesize=bytesize,
parity=parity, stopbits=stopbits, timeout=timeout,
xonxoff=xonxoff, rtscts=rtscts)
self.reactor = reactor
self.flushInput()
self.flushOutput()
self.protocol = protocol
self.protocol.makeConnection(self)
self.startReading()
def fileno(self):
return self._serial.fd
def writeSomeData(self, data):
"""
Write some data to the serial device.
"""
return fdesc.writeToFD(self.fileno(), data)
def doRead(self):
"""
Some data's readable from serial device.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def connectionLost(self, reason):
"""
Called when the serial port disconnects.
Will call C{connectionLost} on the protocol that is handling the
serial data.
"""
abstract.FileDescriptor.connectionLost(self, reason)
self._serial.close()
self.protocol.connectionLost(reason)
| gpl-3.0 | 8,425,912,776,426,885,000 | 26.945946 | 74 | 0.659574 | false |
GeoNode/geonode | geonode/monitoring/views.py | 4 | 27048 | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2017 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import json
import pytz
from datetime import datetime, timedelta
from django.shortcuts import render
from django import forms
from django.contrib import auth
from django.conf import settings
from django.views.generic.base import View
from django.urls import reverse
from django.core.management import call_command
from django.views.decorators.csrf import csrf_exempt
from geonode.decorators import view_decorator, superuser_protected
from geonode.utils import json_response
from geonode.monitoring.collector import CollectorAPI
from geonode.monitoring.models import (
Service,
Host,
Metric,
ServiceTypeMetric,
MetricLabel,
MonitoredResource,
ExceptionEvent,
EventType,
NotificationCheck,
MetricNotificationCheck,
)
from geonode.monitoring.models import do_autoconfigure
from geonode.monitoring.utils import TypeChecks, dump, extend_datetime_input_formats
from geonode.monitoring.service_handlers import exposes
# Create your views here.
capi = CollectorAPI()
class MetricsList(View):
def get(self, *args, **kwargs):
_metrics = capi.get_metric_names()
out = []
for srv, mlist in _metrics:
out.append({'service': srv.name,
'metrics': [{'name': m.name, 'unit': m.unit, 'type': m.type}
for m in mlist]})
return json_response({'metrics': out})
class ServicesList(View):
def get_queryset(self):
return Service.objects.filter(active=True).select_related()
def get(self, *args, **kwargs):
q = self.get_queryset()
out = []
for item in q:
out.append({'name': item.name,
'host': item.host.name,
'id': item.id,
'type': item.service_type.name,
'check_interval': item.check_interval.total_seconds(),
'last_check': item.last_check})
return json_response({'services': out})
class HostsList(View):
def get_queryset(self):
return Host.objects.filter(active=True).select_related()
def get(self, *args, **kwargs):
q = self.get_queryset()
out = []
for item in q:
out.append({'name': item.name, 'ip': item.ip})
return json_response({'hosts': out})
class _ValidFromToLastForm(forms.Form):
valid_from = forms.DateTimeField(
required=False,
input_formats=extend_datetime_input_formats(['%Y-%m-%dT%H:%M:%S.%fZ'])
)
valid_to = forms.DateTimeField(
required=False,
input_formats=extend_datetime_input_formats(['%Y-%m-%dT%H:%M:%S.%fZ'])
)
interval = forms.IntegerField(min_value=60, required=False)
last = forms.IntegerField(min_value=60, required=False)
def _check_timestamps(self):
last = self.cleaned_data.get('last')
vf = self.cleaned_data.get('valid_from')
vt = self.cleaned_data.get('valid_to')
if last and (vf or vt):
raise forms.ValidationError(
'Cannot use last and valid_from/valid_to at the same time')
def clean(self):
super(_ValidFromToLastForm, self).clean()
self._check_timestamps()
class CheckTypeForm(_ValidFromToLastForm):
"""
Special form class to validate values from specific db dictionaries
(services, resources, ows services etc)
"""
def _check_type(self, tname):
"""
Returns tname-specific object instance from db.
Internally it uses geonode.monotoring.utils.TypeChecks
to resolve field's value to object.
"""
d = self.cleaned_data
if not d:
return
val = d[tname]
if not val:
return
tcheck = getattr(TypeChecks, f'{tname}_type', None)
if not tcheck:
raise forms.ValidationError(f"No type check for {tname}")
try:
return tcheck(val)
except (Exception,) as err:
raise forms.ValidationError(err)
class MetricsFilters(CheckTypeForm):
GROUP_BY_RESOURCE = 'resource'
GROUP_BY_RESOURCE_ON_LABEL = 'resource_on_label'
GROUP_BY_RESOURCE_ON_USER = 'resource_on_user'
GROUP_BY_COUNT_ON_RESOURCE = 'count_on_resource'
GROUP_BY_LABEL = 'label'
GROUP_BY_USER = 'user'
GROUP_BY_USER_ON_LABEL = 'user_on_label'
GROUP_BY_EVENT_TYPE = 'event_type'
GROUP_BY_EVENT_TYPE_ON_LABEL = 'event_type_on_label'
GROUP_BY_EVENT_TYPE_ON_USER = 'event_type_on_user'
GROUP_BY_CHOICES = ((GROUP_BY_RESOURCE, "By resource",),
(GROUP_BY_RESOURCE_ON_LABEL, "By resource on label",),
(GROUP_BY_RESOURCE_ON_USER, "By resource on user",),
(GROUP_BY_COUNT_ON_RESOURCE, "By resource with count",),
(GROUP_BY_LABEL, "By label",),
(GROUP_BY_USER, "By user",),
(GROUP_BY_USER_ON_LABEL, "By user on label",),
(GROUP_BY_EVENT_TYPE, "By event type",),
(GROUP_BY_EVENT_TYPE_ON_LABEL, "By event type on label",),
(GROUP_BY_EVENT_TYPE_ON_USER, "By event type on user",),)
service = forms.CharField(required=False)
label = forms.CharField(required=False)
user = forms.CharField(required=False)
resource = forms.CharField(required=False)
resource_type = forms.ChoiceField(
choices=MonitoredResource.TYPES, required=False)
event_type = forms.CharField(required=False)
service_type = forms.CharField(required=False)
group_by = forms.ChoiceField(choices=GROUP_BY_CHOICES, required=False)
def clean_resource(self):
return self._check_type('resource')
def clean_service(self):
return self._check_type('service')
def clean_label(self):
return self._check_type('label')
def clean_user(self):
return self._check_type('user')
def clean_event_type(self):
return self._check_type('event_type')
def clean_service_type(self):
return self._check_type('service_type')
def _check_services(self):
s = self.cleaned_data.get('service')
st = self.cleaned_data.get('service_type')
if st and s:
raise forms.ValidationError(
"Cannot use service and service type at the same time")
def clean(self):
super(MetricsFilters, self).clean()
self._check_services()
class LabelsFilterForm(CheckTypeForm):
metric_name = forms.CharField(required=False)
def clean_metric(self):
return self._check_type('metric_name')
class ResourcesFilterForm(LabelsFilterForm):
resource_type = forms.CharField(required=False)
def clean_resource_type(self):
return self._check_type('resource_type')
class EventTypesFilterForm(CheckTypeForm):
ows_service = forms.CharField(required=False)
def clean_ows_service(self):
return self._check_type('ows_service')
class FilteredView(View):
# form which validates request.GET for get_queryset()
filter_form = None
# iterable of pairs (from model field, to key name) to map
# fields from model to elements of output data
fields_map = tuple()
# key name for output ({output_name: data})
output_name = None
def get_filter_args(self, request):
self.errors = None
if not self.filter_form:
return {}
f = self.filter_form(data=request.GET)
if not f.is_valid():
self.errors = f.errors
return f.cleaned_data
def get(self, request, *args, **kwargs):
qargs = self.get_filter_args(request)
if self.errors:
return json_response({'success': False,
'status': 'errors',
'errors': self.errors},
status=400)
q = self.get_queryset(**qargs)
from_fields = [f[0] for f in self.fields_map]
to_fields = [f[1] for f in self.fields_map]
out = [dict(zip(to_fields, (getattr(item, f)
for f in from_fields))) for item in q]
data = {self.output_name: out,
'success': True,
'errors': {},
'status': 'ok'}
if self.output_name != 'data':
data['data'] = {'key': self.output_name}
return json_response(data)
@view_decorator(superuser_protected, subclass=True)
class ResourcesList(FilteredView):
filter_form = ResourcesFilterForm
fields_map = (('id', 'id',),
('type', 'type',),
('name', 'name',),)
output_name = 'resources'
def get_queryset(self, metric_name=None,
resource_type=None,
valid_from=None,
valid_to=None,
last=None,
interval=None):
q = MonitoredResource.objects.all().distinct()
qparams = {}
if resource_type:
qparams['type'] = resource_type
if metric_name:
sm = ServiceTypeMetric.objects.filter(metric__name=metric_name)
qparams['metric_values__service_metric__in'] = sm
if last:
_from = datetime.utcnow().replace(tzinfo=pytz.utc) - timedelta(seconds=last)
if interval is None:
interval = 60
if not isinstance(interval, timedelta):
interval = timedelta(seconds=interval)
valid_from = _from
if valid_from:
qparams['metric_values__valid_from__gte'] = valid_from
if valid_to:
qparams['metric_values__valid_to__lte'] = valid_to
if qparams:
q = q.filter(**qparams)
return q
@view_decorator(superuser_protected, subclass=True)
class ResourceTypesList(FilteredView):
output_name = 'resource_types'
def get(self, request, *args, **kwargs):
if self.filter_form:
f = self.filter_form(data=request.GET)
if not f.is_valid():
return json_response({'success': False,
'status': 'errors',
'errors': f.errors},
status=400)
out = [{"name": mrt[0], "type_label": mrt[1]} for mrt in MonitoredResource.TYPES]
data = {self.output_name: out,
'success': True,
'errors': {},
'status': 'ok'}
if self.output_name != 'data':
data['data'] = {'key': self.output_name}
return json_response(data)
@view_decorator(superuser_protected, subclass=True)
class LabelsList(FilteredView):
filter_form = LabelsFilterForm
fields_map = (('id', 'id',),
('name', 'name',),)
output_name = 'labels'
def get_queryset(self, metric_name, valid_from,
valid_to, interval=None, last=None):
q = MetricLabel.objects.all().distinct()
qparams = {}
if metric_name:
sm = ServiceTypeMetric.objects.filter(metric__name=metric_name)
qparams['metric_values__service_metric__in'] = sm
if last:
_from = datetime.utcnow().replace(tzinfo=pytz.utc) - timedelta(seconds=last)
if interval is None:
interval = 60
if not isinstance(interval, timedelta):
interval = timedelta(seconds=interval)
valid_from = _from
if valid_from:
qparams['metric_values__valid_from__gte'] = valid_from
if valid_to:
qparams['metric_values__valid_to__lte'] = valid_to
if qparams:
q = q.filter(**qparams)
return q
@view_decorator(superuser_protected, subclass=True)
class EventTypeList(FilteredView):
filter_form = EventTypesFilterForm
fields_map = (('name', 'name',), ('type_label', 'type_label',),)
output_name = 'event_types'
def get_queryset(self, **kwargs):
if "ows_service" in kwargs and kwargs["ows_service"] is not None:
if kwargs["ows_service"]:
return EventType.objects.filter(name__icontains="OWS")
else:
return EventType.objects.exclude(name__icontains="OWS")
return EventType.objects.all()
def get(self, request, *args, **kwargs):
qargs = self.get_filter_args(request)
if self.errors:
return json_response({'success': False,
'status': 'errors',
'errors': self.errors},
status=400)
q = self.get_queryset(**qargs)
from_fields = [f[0] for f in self.fields_map]
to_fields = [f[1] for f in self.fields_map]
labels = dict(EventType.EVENT_TYPES)
out = [dict(zip(
to_fields,
(getattr(item, f) if f != 'type_label' else labels[getattr(item, 'name')] for f in from_fields)
)) for item in q]
data = {self.output_name: out,
'success': True,
'errors': {},
'status': 'ok'}
if self.output_name != 'data':
data['data'] = {'key': self.output_name}
return json_response(data)
@view_decorator(superuser_protected, subclass=True)
class MetricDataView(View):
def get_filters(self, **kwargs):
out = {}
self.errors = None
f = MetricsFilters(data=self.request.GET)
if not f.is_valid():
self.errors = f.errors
else:
out.update(f.cleaned_data)
return out
def get(self, request, *args, **kwargs):
filters = self.get_filters(**kwargs)
if self.errors:
return json_response({'status': 'error',
'success': False,
'errors': self.errors},
status=400)
metric_name = kwargs['metric_name']
last = filters.pop('last', None)
if last:
td = timedelta(seconds=last)
now = datetime.utcnow().replace(tzinfo=pytz.utc)
filters['valid_from'] = now - td
filters['valid_to'] = now
out = capi.get_metrics_for(metric_name, **filters)
return json_response({'data': out})
class ExceptionsListForm(CheckTypeForm):
error_type = forms.CharField(required=False)
service_name = forms.CharField(required=False)
service_type = forms.CharField(required=False)
resource = forms.CharField(required=False)
def clean_resource(self):
return self._check_type('resource')
def clean_service(self):
return self._check_type('service')
class ExceptionsListView(FilteredView):
filter_form = ExceptionsListForm
fields_map = (('id', 'id',),
('created', 'created',),
('url', 'url',),
('service_data', 'service',),
('error_type', 'error_type',),)
output_name = 'exceptions'
def get_queryset(self, error_type=None,
valid_from=None,
valid_to=None,
interval=None,
last=None,
service_name=None,
service_type=None,
resource=None):
q = ExceptionEvent.objects.all().select_related()
if error_type:
q = q.filter(error_type=error_type)
if last:
_from = datetime.utcnow().replace(tzinfo=pytz.utc) - timedelta(seconds=last)
if interval is None:
interval = 60
if not isinstance(interval, timedelta):
interval = timedelta(seconds=interval)
valid_from = _from
if valid_from:
q = q.filter(created__gte=valid_from)
if valid_to:
q = q.filter(created__lte=valid_to)
if service_name:
q = q.filter(service__name=service_name)
if service_type:
q = q.filter(service__service_type__name=service_type)
if resource:
q = q.filter(request__resources__in=(resource,))
return q
class ExceptionDataView(View):
def get_object(self, exception_id):
try:
return ExceptionEvent.objects.get(id=exception_id)
except ExceptionEvent.DoesNotExist:
return
def get(self, request, exception_id, *args, **kwargs):
e = self.get_object(exception_id)
if not e:
return json_response(
errors={'exception_id': "Object not found"}, status=404)
data = e.expose()
return json_response(data)
class BeaconView(View):
def get(self, request, *args, **kwargs):
service = kwargs.get('exposed')
if not service:
data = [{'name': s, 'url': reverse(
'monitoring:api_beacon_exposed', args=(s,))} for s in exposes.keys()]
return json_response({'exposed': data})
try:
ex = exposes[service]()
except KeyError:
return json_response(
errors={'exposed': f'No service for {service}'}, status=404)
out = {'data': ex.expose(),
'timestamp': datetime.utcnow().replace(tzinfo=pytz.utc)}
return json_response(out)
def index(request):
if auth.get_user(request).is_superuser:
return render(request, 'monitoring/index.html')
return render(request, 'monitoring/non_superuser.html')
class NotificaitonCheckForm(forms.ModelForm):
class Meta:
model = NotificationCheck
fields = ('name', 'description', 'severity', 'user_threshold',)
class MetricNotificationCheckForm(forms.ModelForm):
metric = forms.CharField(required=True)
service = forms.CharField(required=False)
resource = forms.CharField(required=False)
label = forms.CharField(required=False)
event_type = forms.CharField(required=False)
class Meta:
model = MetricNotificationCheck
fields = (
'notification_check',
'min_value',
'max_value',
'max_timeout',
)
def _get_clean_model(self, cls, name):
val = self.cleaned_data.get(name)
if not self.fields[name].required:
if not val:
return
try:
return cls.objects.get(name=val)
except cls.DoesNotExist:
raise forms.ValidationError(f"Invalid {name}: {val}")
def clean_metric(self):
return self._get_clean_model(Metric, 'metric')
def clean_service(self):
return self._get_clean_model(Service, 'service')
def clean_label(self):
return self._get_clean_model(MetricLabel, 'label')
def clean_event_type(self):
return self._get_clean_model(EventType, 'event_type')
def clean_resource(self):
val = self.cleaned_data.get('resource')
if not val:
return
try:
vtype, vname = val.split('=')
except IndexError:
raise forms.ValidationError(
f"Invalid resource name: {val}")
try:
return MonitoredResource.objects.get(name=vname, type=vtype)
except MonitoredResource.DoesNotExist:
raise forms.ValidationError(f"Invalid resource: {val}")
class UserNotificationConfigView(View):
def get_object(self):
pk = self.kwargs['pk']
return NotificationCheck.objects.get(pk=pk)
def get(self, request, *args, **kwargs):
out = {'success': False, 'status': 'error', 'data': [], 'errors': {}}
fields = ('field_name',
'steps',
'current_value',
'steps_calculated',
'unit',
'is_enabled',)
if auth.get_user(request).is_authenticated:
obj = self.get_object()
out['success'] = True
out['status'] = 'ok'
form = obj.get_user_form()
fields = [dump(r, fields) for r in obj.definitions.all()]
out['data'] = {'form': form.as_table(),
'fields': fields,
'emails': obj.emails,
'notification': dump(obj)}
status = 200
else:
out['errors']['user'] = ['User is not authenticated']
status = 401
return json_response(out, status=status)
def post(self, request, *args, **kwargs):
out = {'success': False, 'status': 'error', 'data': [], 'errors': {}}
if auth.get_user(request).is_authenticated:
obj = self.get_object()
try:
is_json = True
data = json.loads(request.body)
except (TypeError, ValueError,):
is_json = False
data = request.POST.copy()
try:
configs = obj.process_user_form(data, is_json=is_json)
out['success'] = True
out['status'] = 'ok'
out['data'] = [dump(c) for c in configs]
status = 200
except forms.ValidationError as err:
out['errors'] = err.errors
status = 400
else:
out['errors']['user'] = ['User is not authenticated']
status = 401
return json_response(out, status=status)
if settings.MONITORING_DISABLE_CSRF:
post = csrf_exempt(post)
class NotificationsList(FilteredView):
filter_form = None
fields_map = (('id', 'id',),
('url', 'url',),
('name', 'name',),
('active', 'active',),
('severity', 'severity',),
('description', 'description',),
)
output_name = 'data'
def get_filter_args(self, *args, **kwargs):
self.errors = {}
if not auth.get_user(self.request).is_authenticated:
self.errors = {'user': ['User is not authenticated']}
return {}
def get_queryset(self, *args, **kwargs):
return NotificationCheck.objects.all()
def create(self, request, *args, **kwargs):
f = NotificaitonCheckForm(data=request.POST)
if f.is_valid():
d = f.cleaned_data
return NotificationCheck.create(**d)
self.errors = f.errors
def post(self, request, *args, **kwargs):
out = {'success': False, 'status': 'error', 'data': [], 'errors': {}}
d = self.create(request, *args, **kwargs)
if d is None:
out['errors'] = self.errors
status = 400
else:
out['data'] = dump(d)
out['success'] = True
out['status'] = 'ok'
status = 200
return json_response(out, status=status)
class StatusCheckView(View):
fields = ('name',
'severity',
'offending_value',
'threshold_value',
'spotted_at',
'valid_from',
'valid_to',
'check_url',
'check_id',
'description',
'message',)
def get(self, request, *args, **kwargs):
capi = CollectorAPI()
checks = capi.get_notifications()
data = {'status': 'ok', 'success': True, 'data': {}}
d = data['data']
d['problems'] = problems = []
d['health_level'] = 'ok'
_levels = ('fatal', 'error', 'warning',)
levels = set([])
for nc, ncdata in checks:
for ncd in ncdata:
levels.add(ncd.severity)
problems.append(dump(ncd, self.fields))
if levels:
for lyr in _levels:
if lyr in levels:
d['health_level'] = lyr
break
return json_response(data)
class AutoconfigureView(View):
def post(self, request, *args, **kwargs):
if not auth.get_user(request).is_authenticated:
out = {'success': False,
'status': 'error',
'errors': {'user': ['User is not authenticated']}
}
return json_response(out, status=401)
if not (auth.get_user(request).is_superuser or auth.get_user(request).is_staff):
out = {'success': False,
'status': 'error',
'errors': {'user': ['User is not permitted']}
}
return json_response(out, status=401)
do_autoconfigure()
out = {'success': True,
'status': 'ok',
'errors': {}
}
return json_response(out)
class CollectMetricsView(View):
"""
- Run command "collect_metrics -n -t xml" via web
"""
authkey = 'OzhVMECJUn9vDu2oLv1HjGPKByuTBwF8'
def get(self, request, *args, **kwargs):
authkey = kwargs.get('authkey')
if not authkey or authkey != self.authkey:
out = {'success': False,
'status': 'error',
'errors': {'denied': ['Call is not permitted']}
}
return json_response(out, status=401)
else:
call_command(
'collect_metrics', '-n', '-t', 'xml')
out = {'success': True,
'status': 'ok',
'errors': {}
}
return json_response(out)
api_metrics = MetricsList.as_view()
api_services = ServicesList.as_view()
api_hosts = HostsList.as_view()
api_labels = LabelsList.as_view()
api_resources = ResourcesList.as_view()
api_resource_types = ResourceTypesList.as_view()
api_event_types = EventTypeList.as_view()
api_metric_data = MetricDataView.as_view()
api_metric_collect = CollectMetricsView.as_view()
api_exceptions = ExceptionsListView.as_view()
api_exception = ExceptionDataView.as_view()
api_beacon = BeaconView.as_view()
api_user_notification_config = UserNotificationConfigView.as_view()
api_user_notifications = NotificationsList.as_view()
api_status = StatusCheckView.as_view()
api_autoconfigure = AutoconfigureView.as_view()
| gpl-3.0 | -8,006,961,202,023,900,000 | 32.979899 | 107 | 0.556603 | false |
rackerlabs/ironic | ironic/drivers/modules/deploy_utils.py | 1 | 17645 | # Copyright (c) 2012 NTT DOCOMO, INC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
import socket
import stat
import time
from oslo.config import cfg
from oslo.utils import excutils
from oslo_concurrency import processutils
from ironic.common import disk_partitioner
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.i18n import _LE
from ironic.common import images
from ironic.common import states
from ironic.common import utils
from ironic.conductor import utils as manager_utils
from ironic.drivers.modules import image_cache
from ironic.openstack.common import log as logging
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
# All functions are called from deploy() directly or indirectly.
# They are split for stub-out.
def discovery(portal_address, portal_port):
"""Do iSCSI discovery on portal."""
utils.execute('iscsiadm',
'-m', 'discovery',
'-t', 'st',
'-p', '%s:%s' % (portal_address, portal_port),
run_as_root=True,
check_exit_code=[0],
attempts=5,
delay_on_retry=True)
def login_iscsi(portal_address, portal_port, target_iqn):
"""Login to an iSCSI target."""
utils.execute('iscsiadm',
'-m', 'node',
'-p', '%s:%s' % (portal_address, portal_port),
'-T', target_iqn,
'--login',
run_as_root=True,
check_exit_code=[0],
attempts=5,
delay_on_retry=True)
# Ensure the login complete
time.sleep(3)
def logout_iscsi(portal_address, portal_port, target_iqn):
"""Logout from an iSCSI target."""
utils.execute('iscsiadm',
'-m', 'node',
'-p', '%s:%s' % (portal_address, portal_port),
'-T', target_iqn,
'--logout',
run_as_root=True,
check_exit_code=[0],
attempts=5,
delay_on_retry=True)
def delete_iscsi(portal_address, portal_port, target_iqn):
"""Delete the iSCSI target."""
# Retry delete until it succeeds (exit code 0) or until there is
# no longer a target to delete (exit code 21).
utils.execute('iscsiadm',
'-m', 'node',
'-p', '%s:%s' % (portal_address, portal_port),
'-T', target_iqn,
'-o', 'delete',
run_as_root=True,
check_exit_code=[0, 21],
attempts=5,
delay_on_retry=True)
def make_partitions(dev, root_mb, swap_mb, ephemeral_mb, commit=True):
"""Create partitions for root, swap and ephemeral on a disk device.
:param root_mb: Size of the root partition in mebibytes (MiB).
:param swap_mb: Size of the swap partition in mebibytes (MiB). If 0,
no swap partition will be created.
:param ephemeral_mb: Size of the ephemeral partition in mebibytes (MiB).
If 0, no ephemeral partition will be created.
:param commit: True/False. Default for this setting is True. If False
partitions will not be written to disk.
:returns: A dictionary containing the partition type as Key and partition
path as Value for the partitions created by this method.
"""
part_template = dev + '-part%d'
part_dict = {}
dp = disk_partitioner.DiskPartitioner(dev)
if ephemeral_mb:
part_num = dp.add_partition(ephemeral_mb)
part_dict['ephemeral'] = part_template % part_num
if swap_mb:
part_num = dp.add_partition(swap_mb, fs_type='linux-swap')
part_dict['swap'] = part_template % part_num
# NOTE(lucasagomes): Make the root partition the last partition. This
# enables tools like cloud-init's growroot utility to expand the root
# partition until the end of the disk.
part_num = dp.add_partition(root_mb)
part_dict['root'] = part_template % part_num
if commit:
# write to the disk
dp.commit()
return part_dict
def is_block_device(dev):
"""Check whether a device is block or not."""
s = os.stat(dev)
return stat.S_ISBLK(s.st_mode)
def dd(src, dst):
"""Execute dd from src to dst."""
utils.dd(src, dst, 'bs=1M', 'oflag=direct')
def populate_image(src, dst):
data = images.qemu_img_info(src)
if data.file_format == 'raw':
dd(src, dst)
else:
images.convert_image(src, dst, 'raw', True)
def mkswap(dev, label='swap1'):
"""Execute mkswap on a device."""
utils.mkfs('swap', dev, label)
def mkfs_ephemeral(dev, ephemeral_format, label="ephemeral0"):
utils.mkfs(ephemeral_format, dev, label)
def block_uuid(dev):
"""Get UUID of a block device."""
out, _err = utils.execute('blkid', '-s', 'UUID', '-o', 'value', dev,
run_as_root=True,
check_exit_code=[0])
return out.strip()
def switch_pxe_config(path, root_uuid, boot_mode):
"""Switch a pxe config from deployment mode to service mode."""
with open(path) as f:
lines = f.readlines()
root = 'UUID=%s' % root_uuid
rre = re.compile(r'\{\{ ROOT \}\}')
if boot_mode == 'uefi':
dre = re.compile('^default=.*$')
boot_line = 'default=boot'
else:
pxe_cmd = 'goto' if CONF.pxe.ipxe_enabled else 'default'
dre = re.compile('^%s .*$' % pxe_cmd)
boot_line = '%s boot' % pxe_cmd
with open(path, 'w') as f:
for line in lines:
line = rre.sub(root, line)
line = dre.sub(boot_line, line)
f.write(line)
def notify(address, port):
"""Notify a node that it becomes ready to reboot."""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((address, port))
s.send('done')
finally:
s.close()
def get_dev(address, port, iqn, lun):
"""Returns a device path for given parameters."""
dev = ("/dev/disk/by-path/ip-%s:%s-iscsi-%s-lun-%s"
% (address, port, iqn, lun))
return dev
def get_image_mb(image_path, virtual_size=True):
"""Get size of an image in Megabyte."""
mb = 1024 * 1024
if not virtual_size:
image_byte = os.path.getsize(image_path)
else:
image_byte = images.converted_size(image_path)
# round up size to MB
image_mb = int((image_byte + mb - 1) / mb)
return image_mb
def get_dev_block_size(dev):
"""Get the device size in 512 byte sectors."""
block_sz, cmderr = utils.execute('blockdev', '--getsz', dev,
run_as_root=True, check_exit_code=[0])
return int(block_sz)
def destroy_disk_metadata(dev, node_uuid):
"""Destroy metadata structures on node's disk.
Ensure that node's disk appears to be blank without zeroing the entire
drive. To do this we will zero:
- the first 18KiB to clear MBR / GPT data
- the last 18KiB to clear GPT and other metadata like: LVM, veritas,
MDADM, DMRAID, ...
"""
# NOTE(NobodyCam): This is needed to work around bug:
# https://bugs.launchpad.net/ironic/+bug/1317647
try:
utils.execute('dd', 'if=/dev/zero', 'of=%s' % dev,
'bs=512', 'count=36', run_as_root=True,
check_exit_code=[0])
except processutils.ProcessExecutionError as err:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed to erase beginning of disk for node "
"%(node)s. Command: %(command)s. Error: %(error)s."),
{'node': node_uuid,
'command': err.cmd,
'error': err.stderr})
# now wipe the end of the disk.
# get end of disk seek value
try:
block_sz = get_dev_block_size(dev)
except processutils.ProcessExecutionError as err:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed to get disk block count for node %(node)s. "
"Command: %(command)s. Error: %(error)s."),
{'node': node_uuid,
'command': err.cmd,
'error': err.stderr})
else:
seek_value = block_sz - 36
try:
utils.execute('dd', 'if=/dev/zero', 'of=%s' % dev,
'bs=512', 'count=36', 'seek=%d' % seek_value,
run_as_root=True, check_exit_code=[0])
except processutils.ProcessExecutionError as err:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed to erase the end of the disk on node "
"%(node)s. Command: %(command)s. "
"Error: %(error)s."),
{'node': node_uuid,
'command': err.cmd,
'error': err.stderr})
def work_on_disk(dev, root_mb, swap_mb, ephemeral_mb, ephemeral_format,
image_path, node_uuid, preserve_ephemeral=False):
"""Create partitions and copy an image to the root partition.
:param dev: Path for the device to work on.
:param root_mb: Size of the root partition in megabytes.
:param swap_mb: Size of the swap partition in megabytes.
:param ephemeral_mb: Size of the ephemeral partition in megabytes. If 0,
no ephemeral partition will be created.
:param ephemeral_format: The type of file system to format the ephemeral
partition.
:param image_path: Path for the instance's disk image.
:param node_uuid: node's uuid. Used for logging.
:param preserve_ephemeral: If True, no filesystem is written to the
ephemeral block device, preserving whatever content it had (if the
partition table has not changed).
:returns: the UUID of the root partition.
"""
if not is_block_device(dev):
raise exception.InstanceDeployFailure(_("Parent device '%s' not found")
% dev)
# the only way for preserve_ephemeral to be set to true is if we are
# rebuilding an instance with --preserve_ephemeral.
commit = not preserve_ephemeral
# now if we are committing the changes to disk clean first.
if commit:
destroy_disk_metadata(dev, node_uuid)
part_dict = make_partitions(dev, root_mb, swap_mb, ephemeral_mb,
commit=commit)
ephemeral_part = part_dict.get('ephemeral')
swap_part = part_dict.get('swap')
root_part = part_dict.get('root')
if not is_block_device(root_part):
raise exception.InstanceDeployFailure(_("Root device '%s' not found")
% root_part)
if swap_part and not is_block_device(swap_part):
raise exception.InstanceDeployFailure(_("Swap device '%s' not found")
% swap_part)
if ephemeral_part and not is_block_device(ephemeral_part):
raise exception.InstanceDeployFailure(
_("Ephemeral device '%s' not found") % ephemeral_part)
populate_image(image_path, root_part)
if swap_part:
mkswap(swap_part)
if ephemeral_part and not preserve_ephemeral:
mkfs_ephemeral(ephemeral_part, ephemeral_format)
try:
root_uuid = block_uuid(root_part)
except processutils.ProcessExecutionError:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed to detect root device UUID."))
return root_uuid
def deploy(address, port, iqn, lun, image_path,
root_mb, swap_mb, ephemeral_mb, ephemeral_format, node_uuid,
preserve_ephemeral=False):
"""All-in-one function to deploy a node.
:param address: The iSCSI IP address.
:param port: The iSCSI port number.
:param iqn: The iSCSI qualified name.
:param lun: The iSCSI logical unit number.
:param image_path: Path for the instance's disk image.
:param root_mb: Size of the root partition in megabytes.
:param swap_mb: Size of the swap partition in megabytes.
:param ephemeral_mb: Size of the ephemeral partition in megabytes. If 0,
no ephemeral partition will be created.
:param ephemeral_format: The type of file system to format the ephemeral
partition.
:param node_uuid: node's uuid. Used for logging.
:param preserve_ephemeral: If True, no filesystem is written to the
ephemeral block device, preserving whatever content it had (if the
partition table has not changed).
:returns: the UUID of the root partition.
"""
dev = get_dev(address, port, iqn, lun)
image_mb = get_image_mb(image_path)
if image_mb > root_mb:
root_mb = image_mb
discovery(address, port)
login_iscsi(address, port, iqn)
try:
root_uuid = work_on_disk(dev, root_mb, swap_mb, ephemeral_mb,
ephemeral_format, image_path, node_uuid,
preserve_ephemeral)
except processutils.ProcessExecutionError as err:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Deploy to address %s failed."), address)
LOG.error(_LE("Command: %s"), err.cmd)
LOG.error(_LE("StdOut: %r"), err.stdout)
LOG.error(_LE("StdErr: %r"), err.stderr)
except exception.InstanceDeployFailure as e:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Deploy to address %s failed."), address)
LOG.error(e)
finally:
logout_iscsi(address, port, iqn)
delete_iscsi(address, port, iqn)
return root_uuid
def notify_deploy_complete(address):
"""Notifies the completion of deployment to the baremetal node.
:param address: The IP address of the node.
"""
# Ensure the node started netcat on the port after POST the request.
time.sleep(3)
notify(address, 10000)
def check_for_missing_params(info_dict, error_msg, param_prefix=''):
"""Check for empty params in the provided dictionary.
:param info_dict: The dictionary to inspect.
:param error_msg: The error message to prefix before printing the
information about missing parameters.
:param param_prefix: Add this prefix to each parameter for error messages
:raises: MissingParameterValue, if one or more parameters are
empty in the provided dictionary.
"""
missing_info = []
for label, value in info_dict.items():
if not value:
missing_info.append(param_prefix + label)
if missing_info:
exc_msg = _("%(error_msg)s. Missing are: %(missing_info)s")
raise exception.MissingParameterValue(exc_msg %
{'error_msg': error_msg, 'missing_info': missing_info})
def fetch_images(ctx, cache, images_info, force_raw=True):
"""Check for available disk space and fetch images using ImageCache.
:param ctx: context
:param cache: ImageCache instance to use for fetching
:param images_info: list of tuples (image href, destination path)
:param force_raw: boolean value, whether to convert the image to raw
format
:raises: InstanceDeployFailure if unable to find enough disk space
"""
try:
image_cache.clean_up_caches(ctx, cache.master_dir, images_info)
except exception.InsufficientDiskSpace as e:
raise exception.InstanceDeployFailure(reason=e)
# NOTE(dtantsur): This code can suffer from race condition,
# if disk space is used between the check and actual download.
# This is probably unavoidable, as we can't control other
# (probably unrelated) processes
for href, path in images_info:
cache.fetch_image(href, path, ctx=ctx, force_raw=force_raw)
def set_failed_state(task, msg):
"""Sets the deploy status as failed with relevant messages.
This method sets the deployment as fail with the given message.
It sets node's provision_state to DEPLOYFAIL and updates last_error
with the given error message. It also powers off the baremetal node.
:param task: a TaskManager instance containing the node to act on.
:param msg: the message to set in last_error of the node.
"""
node = task.node
node.provision_state = states.DEPLOYFAIL
node.target_provision_state = states.NOSTATE
node.save()
try:
manager_utils.node_power_action(task, states.POWER_OFF)
except Exception:
msg2 = (_LE('Node %s failed to power off while handling deploy '
'failure. This may be a serious condition. Node '
'should be removed from Ironic or put in maintenance '
'mode until the problem is resolved.') % node.uuid)
LOG.exception(msg2)
finally:
# NOTE(deva): node_power_action() erases node.last_error
# so we need to set it again here.
node.last_error = msg
node.save()
| apache-2.0 | -5,319,612,824,135,378,000 | 36.225738 | 79 | 0.608274 | false |
xzturn/tensorflow | tensorflow/tools/docs/generate_lib.py | 4 | 22402 | # Lint as: python2, python3
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generate docs for the TensorFlow Python API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import fnmatch
import os
import shutil
import tempfile
import six
from tensorflow.python.util import tf_inspect
from tensorflow.tools.common import public_api
from tensorflow.tools.common import traverse
from tensorflow.tools.docs import doc_controls
from tensorflow.tools.docs import doc_generator_visitor
from tensorflow.tools.docs import parser
from tensorflow.tools.docs import pretty_docs
from tensorflow.tools.docs import py_guide_parser
def write_docs(output_dir,
parser_config,
yaml_toc,
root_title='TensorFlow',
search_hints=True,
site_api_path='api_docs/python'):
"""Write previously extracted docs to disk.
Write a docs page for each symbol included in the indices of parser_config to
a tree of docs at `output_dir`.
Symbols with multiple aliases will have only one page written about
them, which is referenced for all aliases.
Args:
output_dir: Directory to write documentation markdown files to. Will be
created if it doesn't exist.
parser_config: A `parser.ParserConfig` object, containing all the necessary
indices.
yaml_toc: Set to `True` to generate a "_toc.yaml" file.
root_title: The title name for the root level index.md.
search_hints: (bool) include meta-data search hints at the top of each
output file.
site_api_path: The output path relative to the site root. Used in the
`_toc.yaml` and `_redirects.yaml` files.
Raises:
ValueError: if `output_dir` is not an absolute path
"""
# Make output_dir.
if not os.path.isabs(output_dir):
raise ValueError("'output_dir' must be an absolute path.\n"
" output_dir='%s'" % output_dir)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# These dictionaries are used for table-of-contents generation below
# They will contain, after the for-loop below::
# - module name(string):classes and functions the module contains(list)
module_children = {}
# - symbol name(string):pathname (string)
symbol_to_file = {}
# Collect redirects for an api _redirects.yaml file.
redirects = []
# Parse and write Markdown pages, resolving cross-links (@{symbol}).
for full_name, py_object in six.iteritems(parser_config.index):
parser_config.reference_resolver.current_doc_full_name = full_name
if full_name in parser_config.duplicate_of:
continue
# Methods and some routines are documented only as part of their class.
if not (tf_inspect.ismodule(py_object) or tf_inspect.isclass(py_object) or
parser.is_free_function(py_object, full_name, parser_config.index)):
continue
sitepath = os.path.join(parser.documentation_path(full_name)[:-3])
# For TOC, we need to store a mapping from full_name to the file
# we're generating
symbol_to_file[full_name] = sitepath
# For a module, remember the module for the table-of-contents
if tf_inspect.ismodule(py_object):
if full_name in parser_config.tree:
module_children.setdefault(full_name, [])
# For something else that's documented,
# figure out what module it lives in
else:
subname = str(full_name)
while True:
subname = subname[:subname.rindex('.')]
if tf_inspect.ismodule(parser_config.index[subname]):
module_children.setdefault(subname, []).append(full_name)
break
# Generate docs for `py_object`, resolving references.
page_info = parser.docs_for_object(full_name, py_object, parser_config)
path = os.path.join(output_dir, parser.documentation_path(full_name))
directory = os.path.dirname(path)
try:
if not os.path.exists(directory):
os.makedirs(directory)
# This function returns raw bytes in PY2 or unicode in PY3.
if search_hints:
content = [page_info.get_metadata_html()]
else:
content = ['']
content.append(pretty_docs.build_md_page(page_info))
text = '\n'.join(content)
if six.PY3:
text = text.encode('utf-8')
with open(path, 'wb') as f:
f.write(text)
except OSError:
raise OSError(
'Cannot write documentation for %s to %s' % (full_name, directory))
duplicates = parser_config.duplicates.get(full_name, [])
if not duplicates:
continue
duplicates = [item for item in duplicates if item != full_name]
for dup in duplicates:
from_path = os.path.join(site_api_path,
six.ensure_str(dup).replace('.', '/'))
to_path = os.path.join(site_api_path,
six.ensure_str(full_name).replace('.', '/'))
redirects.append((
os.path.join('/', from_path),
os.path.join('/', to_path)))
if redirects:
redirects = sorted(redirects)
template = ('- from: {}\n'
' to: {}\n')
redirects = [template.format(f, t) for f, t in redirects]
api_redirects_path = os.path.join(output_dir, '_redirects.yaml')
with open(api_redirects_path, 'w') as redirect_file:
redirect_file.write('redirects:\n')
redirect_file.write(''.join(redirects))
if yaml_toc:
# Generate table of contents
# Put modules in alphabetical order, case-insensitive
modules = sorted(list(module_children.keys()), key=lambda a: a.upper())
leftnav_path = os.path.join(output_dir, '_toc.yaml')
with open(leftnav_path, 'w') as f:
# Generate header
f.write('# Automatically generated file; please do not edit\ntoc:\n')
for module in modules:
indent_num = module.count('.')
# Don't list `tf.submodule` inside `tf`
indent_num = max(indent_num, 1)
indent = ' '*indent_num
if indent_num > 1:
# tf.contrib.baysflow.entropy will be under
# tf.contrib->baysflow->entropy
title = six.ensure_str(module).split('.')[-1]
else:
title = module
header = [
'- title: ' + six.ensure_str(title), ' section:',
' - title: Overview', ' path: ' +
os.path.join('/', site_api_path, symbol_to_file[module])
]
header = ''.join([indent+line+'\n' for line in header])
f.write(header)
symbols_in_module = module_children.get(module, [])
# Sort case-insensitive, if equal sort case sensitive (upper first)
symbols_in_module.sort(key=lambda a: (a.upper(), a))
for full_name in symbols_in_module:
item = [
' - title: ' + full_name[len(module) + 1:],
' path: ' + os.path.join('/', site_api_path,
symbol_to_file[full_name])]
item = ''.join([indent+line+'\n' for line in item])
f.write(item)
# Write a global index containing all full names with links.
with open(os.path.join(output_dir, 'index.md'), 'w') as f:
f.write(
six.ensure_str(
parser.generate_global_index(root_title, parser_config.index,
parser_config.reference_resolver)))
def add_dict_to_dict(add_from, add_to):
for key in add_from:
if key in add_to:
add_to[key].extend(add_from[key])
else:
add_to[key] = add_from[key]
# Exclude some libraries in contrib from the documentation altogether.
def _get_default_private_map():
return {
'tf.test': ['mock'],
'tf': ['contrib'],
'tf.compat': ['v1', 'v2'],
}
# Exclude members of some libraries.
def _get_default_do_not_descend_map():
# TODO(markdaoust): Use docs_controls decorators, locally, instead.
return {
'tf': ['cli', 'lib', 'wrappers'],
}
class DocControlsAwareCrawler(public_api.PublicAPIVisitor):
"""A `docs_controls` aware API-crawler."""
def _is_private(self, path, name, obj):
if doc_controls.should_skip(obj):
return True
return super(DocControlsAwareCrawler, self)._is_private(path, name, obj)
def extract(py_modules,
private_map,
do_not_descend_map,
visitor_cls=doc_generator_visitor.DocGeneratorVisitor):
"""Extract docs from tf namespace and write them to disk."""
# Traverse the first module.
visitor = visitor_cls(py_modules[0][0])
api_visitor = DocControlsAwareCrawler(visitor)
api_visitor.set_root_name(py_modules[0][0])
add_dict_to_dict(private_map, api_visitor.private_map)
add_dict_to_dict(do_not_descend_map, api_visitor.do_not_descend_map)
traverse.traverse(py_modules[0][1], api_visitor)
# Traverse all py_modules after the first:
for module_name, module in py_modules[1:]:
visitor.set_root_name(module_name)
api_visitor.set_root_name(module_name)
traverse.traverse(module, api_visitor)
return visitor
class _GetMarkdownTitle(py_guide_parser.PyGuideParser):
"""Extract the title from a .md file."""
def __init__(self):
self.title = None
py_guide_parser.PyGuideParser.__init__(self)
def process_title(self, _, title):
if self.title is None: # only use the first title
self.title = title
class _DocInfo(object):
"""A simple struct for holding a doc's url and title."""
def __init__(self, url, title):
self.url = url
self.title = title
def build_doc_index(src_dir):
"""Build an index from a keyword designating a doc to _DocInfo objects."""
doc_index = {}
if not os.path.isabs(src_dir):
raise ValueError("'src_dir' must be an absolute path.\n"
" src_dir='%s'" % src_dir)
if not os.path.exists(src_dir):
raise ValueError("'src_dir' path must exist.\n"
" src_dir='%s'" % src_dir)
for dirpath, _, filenames in os.walk(src_dir):
suffix = os.path.relpath(path=dirpath, start=src_dir)
for base_name in filenames:
if not six.ensure_str(base_name).endswith('.md'):
continue
title_parser = _GetMarkdownTitle()
title_parser.process(os.path.join(dirpath, base_name))
if title_parser.title is None:
msg = ('`{}` has no markdown title (# title)'.format(
os.path.join(dirpath, base_name)))
raise ValueError(msg)
key_parts = six.ensure_str(os.path.join(suffix,
base_name[:-3])).split('/')
if key_parts[-1] == 'index':
key_parts = key_parts[:-1]
doc_info = _DocInfo(os.path.join(suffix, base_name), title_parser.title)
doc_index[key_parts[-1]] = doc_info
if len(key_parts) > 1:
doc_index['/'.join(key_parts[-2:])] = doc_info
return doc_index
class _GuideRef(object):
def __init__(self, base_name, title, section_title, section_tag):
self.url = 'api_guides/python/' + six.ensure_str(
(('%s#%s' % (base_name, section_tag)) if section_tag else base_name))
self.link_text = (('%s > %s' % (title, section_title))
if section_title else title)
def make_md_link(self, url_prefix):
return '[%s](%s%s)' % (self.link_text, url_prefix, self.url)
class _GenerateGuideIndex(py_guide_parser.PyGuideParser):
"""Turn guide files into an index from symbol name to a list of _GuideRefs."""
def __init__(self):
self.index = {}
py_guide_parser.PyGuideParser.__init__(self)
def process(self, full_path, base_name):
"""Index a file, reading from `full_path`, with `base_name` as the link."""
self.full_path = full_path
self.base_name = base_name
self.title = None
self.section_title = None
self.section_tag = None
py_guide_parser.PyGuideParser.process(self, full_path)
def process_title(self, _, title):
if self.title is None: # only use the first title
self.title = title
def process_section(self, _, section_title, tag):
self.section_title = section_title
self.section_tag = tag
def process_line(self, _, line):
"""Index the file and section of each `symbol` reference."""
for match in parser.AUTO_REFERENCE_RE.finditer(line):
val = self.index.get(match.group(1), [])
val.append(
_GuideRef(self.base_name, self.title, self.section_title,
self.section_tag))
self.index[match.group(1)] = val
def _build_guide_index(guide_src_dir):
"""Return dict: symbol name -> _GuideRef from the files in `guide_src_dir`."""
index_generator = _GenerateGuideIndex()
if os.path.exists(guide_src_dir):
for full_path, base_name in py_guide_parser.md_files_in_dir(guide_src_dir):
index_generator.process(full_path, base_name)
return index_generator.index
class _UpdateTags(py_guide_parser.PyGuideParser):
"""Rewrites a Python guide so that each section has an explicit id tag.
"section" here refers to blocks delimited by second level headings.
"""
def process_section(self, line_number, section_title, tag):
self.replace_line(line_number, '<h2 id="%s">%s</h2>' % (tag, section_title))
def update_id_tags_inplace(src_dir):
"""Set explicit ids on all second-level headings to ensure back-links work.
Args:
src_dir: The directory of md-files to convert (inplace).
"""
tag_updater = _UpdateTags()
for dirpath, _, filenames in os.walk(src_dir):
for base_name in filenames:
if not base_name.endswith('.md'):
continue
full_path = os.path.join(src_dir, dirpath, base_name)
# Tag updater loads the file, makes the replacements, and returns the
# modified file contents
content = tag_updater.process(full_path)
with open(full_path, 'w') as f:
f.write(six.ensure_str(content))
EXCLUDED = set(['__init__.py', 'OWNERS', 'README.txt'])
def replace_refs(src_dir,
output_dir,
reference_resolver,
file_pattern='*.md',
api_docs_relpath='api_docs'):
"""Fix @{} references in all files under `src_dir` matching `file_pattern`.
A matching directory structure, with the modified files is
written to `output_dir`.
`{"__init__.py","OWNERS","README.txt"}` are skipped.
Files not matching `file_pattern` (using `fnmatch`) are copied with no change.
Also, files in the `api_guides/python` directory get explicit ids set on all
heading-2s to ensure back-links work.
Args:
src_dir: The directory to convert files from.
output_dir: The root directory to write the resulting files to.
reference_resolver: A `parser.ReferenceResolver` to make the replacements.
file_pattern: Only replace references in files matching file_patters,
using fnmatch. Non-matching files are copied unchanged.
api_docs_relpath: Relative-path string to the api_docs, from the src_dir.
"""
# Iterate through all the source files and process them.
for dirpath, _, filenames in os.walk(src_dir):
depth = os.path.relpath(src_dir, start=dirpath)
# How to get from `dirpath` to api_docs/python/
relative_path_to_root = os.path.join(depth, api_docs_relpath, 'python')
# Make the directory under output_dir.
new_dir = os.path.join(output_dir,
os.path.relpath(path=dirpath, start=src_dir))
if not os.path.exists(new_dir):
os.makedirs(new_dir)
for base_name in filenames:
if base_name in EXCLUDED:
continue
full_in_path = os.path.join(dirpath, base_name)
# Set the `current_doc_full_name` so bad files can be reported on errors.
reference_resolver.current_doc_full_name = full_in_path
suffix = os.path.relpath(path=full_in_path, start=src_dir)
full_out_path = os.path.join(output_dir, suffix)
# Copy files that do not match the file_pattern, unmodified.
if not fnmatch.fnmatch(base_name, file_pattern):
if full_in_path != full_out_path:
shutil.copyfile(full_in_path, full_out_path)
continue
with open(full_in_path, 'rb') as f:
content = f.read().decode('utf-8')
content = reference_resolver.replace_references(content,
relative_path_to_root)
with open(full_out_path, 'wb') as f:
f.write(six.ensure_binary(content, 'utf-8'))
class DocGenerator(object):
"""Main entry point for generating docs."""
def __init__(self):
self.argument_parser = argparse.ArgumentParser()
self._py_modules = None
self._private_map = _get_default_private_map()
self._do_not_descend_map = _get_default_do_not_descend_map()
self.yaml_toc = True
self.argument_parser.add_argument(
'--no_search_hints',
dest='search_hints',
action='store_false',
default=True)
self.argument_parser.add_argument(
'--site_api_path',
type=str, default='api_docs/python',
help='The path from the site-root to api_docs'
'directory for this project')
self.argument_parser.add_argument(
'--api_cache_out_path',
type=str,
default=None,
help='Path to store a json-serialized api-index, so links can be '
'inserted into docs without rebuilding the api_docs')
def add_output_dir_argument(self):
self.argument_parser.add_argument(
'--output_dir',
type=str,
default=None,
required=True,
help='Directory to write docs to.')
def add_src_dir_argument(self):
self.argument_parser.add_argument(
'--src_dir',
type=str,
default=tempfile.mkdtemp(),
required=False,
help='Optional directory of source docs to add api_docs links to')
def add_base_dir_argument(self, default_base_dir):
self.argument_parser.add_argument(
'--base_dir',
type=str,
default=default_base_dir,
help='Base directory to strip from file names referenced in docs.')
def parse_known_args(self):
flags, _ = self.argument_parser.parse_known_args()
return flags
def add_to_private_map(self, d):
add_dict_to_dict(d, self._private_map)
def add_to_do_not_descend_map(self, d):
add_dict_to_dict(d, self._do_not_descend_map)
def set_private_map(self, d):
self._private_map = d
def set_do_not_descend_map(self, d):
self._do_not_descend_map = d
def set_py_modules(self, py_modules):
self._py_modules = py_modules
def py_module_names(self):
if self._py_modules is None:
raise RuntimeError(
'Must call set_py_modules() before running py_module_names().')
return [name for (name, _) in self._py_modules]
def make_reference_resolver(self, visitor, doc_index):
return parser.ReferenceResolver.from_visitor(
visitor, doc_index, py_module_names=self.py_module_names())
def make_parser_config(self, visitor, reference_resolver, guide_index,
base_dir):
return parser.ParserConfig(
reference_resolver=reference_resolver,
duplicates=visitor.duplicates,
duplicate_of=visitor.duplicate_of,
tree=visitor.tree,
index=visitor.index,
reverse_index=visitor.reverse_index,
guide_index=guide_index,
base_dir=base_dir)
def run_extraction(self):
return extract(self._py_modules, self._private_map,
self._do_not_descend_map)
def build(self, flags):
"""Build all the docs.
This produces two outputs
python api docs:
* generated from modules set with `set_py_modules`.
* written to '{FLAGS.output_dir}/api_docs/python/'
non-api docs:
* Everything in '{FLAGS.src_dir}' is copied to '{FLAGS.output_dir}'.
* '@{}' references in '.md' files are replaced with links.
* '.md' files under 'api_guides/python' have explicit ids set for their
second level headings.
Args:
flags:
* src_dir: Where to fetch the non-api-docs.
* base_dir: Base of the docs directory (Used to build correct
relative links).
* output_dir: Where to write the resulting docs.
Returns:
The number of errors encountered while processing.
"""
# Extract the python api from the _py_modules
doc_index = build_doc_index(flags.src_dir)
visitor = self.run_extraction()
reference_resolver = self.make_reference_resolver(visitor, doc_index)
if getattr(flags, 'api_cache_out_path', None):
reference_resolver.to_json_file(flags.api_cache_out_path)
# Build the guide_index for the api_docs back links.
root_title = getattr(flags, 'root_title', 'TensorFlow')
guide_index = _build_guide_index(
os.path.join(flags.src_dir, 'api_guides/python'))
# Write the api docs.
parser_config = self.make_parser_config(visitor, reference_resolver,
guide_index, flags.base_dir)
output_dir = os.path.join(flags.output_dir, 'api_docs/python')
write_docs(
output_dir,
parser_config,
yaml_toc=self.yaml_toc,
root_title=root_title,
search_hints=getattr(flags, 'search_hints', True),
site_api_path=getattr(flags, 'site_api_path', ''))
# Replace all the @{} references in files under `FLAGS.src_dir`
replace_refs(flags.src_dir, flags.output_dir, reference_resolver, '*.md')
# Fix the tags in the guide dir.
guide_dir = os.path.join(flags.output_dir, 'api_guides/python')
if os.path.exists(guide_dir):
update_id_tags_inplace(guide_dir)
# Report all errors found by the reference resolver, and return the error
# code.
parser_config.reference_resolver.log_errors()
return parser_config.reference_resolver.num_errors()
| apache-2.0 | -1,247,658,788,023,999,200 | 33.785714 | 80 | 0.63847 | false |
Mevlock/xbmc | lib/gtest/test/gtest_env_var_test.py | 184 | 3546 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly parses environment variables."""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_env_var_test_')
environ = os.environ.copy()
def AssertEq(expected, actual):
if expected != actual:
print 'Expected: %s' % (expected,)
print ' Actual: %s' % (actual,)
raise AssertionError
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def GetFlag(flag):
"""Runs gtest_env_var_test_ and returns its output."""
args = [COMMAND]
if flag is not None:
args += [flag]
return gtest_test_utils.Subprocess(args, env=environ,
capture_stderr=False).output
def TestFlag(flag, test_val, default_val):
"""Verifies that the given flag is affected by the corresponding env var."""
env_var = 'GTEST_' + flag.upper()
SetEnvVar(env_var, test_val)
AssertEq(test_val, GetFlag(flag))
SetEnvVar(env_var, None)
AssertEq(default_val, GetFlag(flag))
class GTestEnvVarTest(gtest_test_utils.TestCase):
def testEnvVarAffectsFlag(self):
"""Tests that environment variable should affect the corresponding flag."""
TestFlag('break_on_failure', '1', '0')
TestFlag('color', 'yes', 'auto')
TestFlag('filter', 'FooTest.Bar', '*')
TestFlag('output', 'xml:tmp/foo.xml', '')
TestFlag('print_time', '0', '1')
TestFlag('repeat', '999', '1')
TestFlag('throw_on_failure', '1', '0')
TestFlag('death_test_style', 'threadsafe', 'fast')
TestFlag('catch_exceptions', '0', '1')
if IS_LINUX:
TestFlag('death_test_use_fork', '1', '0')
TestFlag('stack_trace_depth', '0', '100')
if __name__ == '__main__':
gtest_test_utils.Main()
| gpl-2.0 | 6,911,166,276,109,264,000 | 33.096154 | 79 | 0.701918 | false |
sudiptpa/google-diff-match-patch | python2/diff_match_patch_test.py | 319 | 41744 | #!/usr/bin/python2.4
"""Test harness for diff_match_patch.py
Copyright 2006 Google Inc.
http://code.google.com/p/google-diff-match-patch/
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import time
import unittest
import diff_match_patch as dmp_module
# Force a module reload. Allows one to edit the DMP module and rerun the tests
# without leaving the Python interpreter.
reload(dmp_module)
class DiffMatchPatchTest(unittest.TestCase):
def setUp(self):
"Test harness for dmp_module."
self.dmp = dmp_module.diff_match_patch()
def diff_rebuildtexts(self, diffs):
# Construct the two texts which made up the diff originally.
text1 = ""
text2 = ""
for x in range(0, len(diffs)):
if diffs[x][0] != dmp_module.diff_match_patch.DIFF_INSERT:
text1 += diffs[x][1]
if diffs[x][0] != dmp_module.diff_match_patch.DIFF_DELETE:
text2 += diffs[x][1]
return (text1, text2)
class DiffTest(DiffMatchPatchTest):
"""DIFF TEST FUNCTIONS"""
def testDiffCommonPrefix(self):
# Detect any common prefix.
# Null case.
self.assertEquals(0, self.dmp.diff_commonPrefix("abc", "xyz"))
# Non-null case.
self.assertEquals(4, self.dmp.diff_commonPrefix("1234abcdef", "1234xyz"))
# Whole case.
self.assertEquals(4, self.dmp.diff_commonPrefix("1234", "1234xyz"))
def testDiffCommonSuffix(self):
# Detect any common suffix.
# Null case.
self.assertEquals(0, self.dmp.diff_commonSuffix("abc", "xyz"))
# Non-null case.
self.assertEquals(4, self.dmp.diff_commonSuffix("abcdef1234", "xyz1234"))
# Whole case.
self.assertEquals(4, self.dmp.diff_commonSuffix("1234", "xyz1234"))
def testDiffCommonOverlap(self):
# Null case.
self.assertEquals(0, self.dmp.diff_commonOverlap("", "abcd"))
# Whole case.
self.assertEquals(3, self.dmp.diff_commonOverlap("abc", "abcd"))
# No overlap.
self.assertEquals(0, self.dmp.diff_commonOverlap("123456", "abcd"))
# Overlap.
self.assertEquals(3, self.dmp.diff_commonOverlap("123456xxx", "xxxabcd"))
# Unicode.
# Some overly clever languages (C#) may treat ligatures as equal to their
# component letters. E.g. U+FB01 == 'fi'
self.assertEquals(0, self.dmp.diff_commonOverlap("fi", u"\ufb01i"))
def testDiffHalfMatch(self):
# Detect a halfmatch.
self.dmp.Diff_Timeout = 1
# No match.
self.assertEquals(None, self.dmp.diff_halfMatch("1234567890", "abcdef"))
self.assertEquals(None, self.dmp.diff_halfMatch("12345", "23"))
# Single Match.
self.assertEquals(("12", "90", "a", "z", "345678"), self.dmp.diff_halfMatch("1234567890", "a345678z"))
self.assertEquals(("a", "z", "12", "90", "345678"), self.dmp.diff_halfMatch("a345678z", "1234567890"))
self.assertEquals(("abc", "z", "1234", "0", "56789"), self.dmp.diff_halfMatch("abc56789z", "1234567890"))
self.assertEquals(("a", "xyz", "1", "7890", "23456"), self.dmp.diff_halfMatch("a23456xyz", "1234567890"))
# Multiple Matches.
self.assertEquals(("12123", "123121", "a", "z", "1234123451234"), self.dmp.diff_halfMatch("121231234123451234123121", "a1234123451234z"))
self.assertEquals(("", "-=-=-=-=-=", "x", "", "x-=-=-=-=-=-=-="), self.dmp.diff_halfMatch("x-=-=-=-=-=-=-=-=-=-=-=-=", "xx-=-=-=-=-=-=-="))
self.assertEquals(("-=-=-=-=-=", "", "", "y", "-=-=-=-=-=-=-=y"), self.dmp.diff_halfMatch("-=-=-=-=-=-=-=-=-=-=-=-=y", "-=-=-=-=-=-=-=yy"))
# Non-optimal halfmatch.
# Optimal diff would be -q+x=H-i+e=lloHe+Hu=llo-Hew+y not -qHillo+x=HelloHe-w+Hulloy
self.assertEquals(("qHillo", "w", "x", "Hulloy", "HelloHe"), self.dmp.diff_halfMatch("qHilloHelloHew", "xHelloHeHulloy"))
# Optimal no halfmatch.
self.dmp.Diff_Timeout = 0
self.assertEquals(None, self.dmp.diff_halfMatch("qHilloHelloHew", "xHelloHeHulloy"))
def testDiffLinesToChars(self):
# Convert lines down to characters.
self.assertEquals(("\x01\x02\x01", "\x02\x01\x02", ["", "alpha\n", "beta\n"]), self.dmp.diff_linesToChars("alpha\nbeta\nalpha\n", "beta\nalpha\nbeta\n"))
self.assertEquals(("", "\x01\x02\x03\x03", ["", "alpha\r\n", "beta\r\n", "\r\n"]), self.dmp.diff_linesToChars("", "alpha\r\nbeta\r\n\r\n\r\n"))
self.assertEquals(("\x01", "\x02", ["", "a", "b"]), self.dmp.diff_linesToChars("a", "b"))
# More than 256 to reveal any 8-bit limitations.
n = 300
lineList = []
charList = []
for x in range(1, n + 1):
lineList.append(str(x) + "\n")
charList.append(unichr(x))
self.assertEquals(n, len(lineList))
lines = "".join(lineList)
chars = "".join(charList)
self.assertEquals(n, len(chars))
lineList.insert(0, "")
self.assertEquals((chars, "", lineList), self.dmp.diff_linesToChars(lines, ""))
def testDiffCharsToLines(self):
# Convert chars up to lines.
diffs = [(self.dmp.DIFF_EQUAL, "\x01\x02\x01"), (self.dmp.DIFF_INSERT, "\x02\x01\x02")]
self.dmp.diff_charsToLines(diffs, ["", "alpha\n", "beta\n"])
self.assertEquals([(self.dmp.DIFF_EQUAL, "alpha\nbeta\nalpha\n"), (self.dmp.DIFF_INSERT, "beta\nalpha\nbeta\n")], diffs)
# More than 256 to reveal any 8-bit limitations.
n = 300
lineList = []
charList = []
for x in range(1, n + 1):
lineList.append(str(x) + "\n")
charList.append(unichr(x))
self.assertEquals(n, len(lineList))
lines = "".join(lineList)
chars = "".join(charList)
self.assertEquals(n, len(chars))
lineList.insert(0, "")
diffs = [(self.dmp.DIFF_DELETE, chars)]
self.dmp.diff_charsToLines(diffs, lineList)
self.assertEquals([(self.dmp.DIFF_DELETE, lines)], diffs)
def testDiffCleanupMerge(self):
# Cleanup a messy diff.
# Null case.
diffs = []
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([], diffs)
# No change case.
diffs = [(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "b"), (self.dmp.DIFF_INSERT, "c")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "b"), (self.dmp.DIFF_INSERT, "c")], diffs)
# Merge equalities.
diffs = [(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_EQUAL, "b"), (self.dmp.DIFF_EQUAL, "c")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "abc")], diffs)
# Merge deletions.
diffs = [(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_DELETE, "b"), (self.dmp.DIFF_DELETE, "c")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abc")], diffs)
# Merge insertions.
diffs = [(self.dmp.DIFF_INSERT, "a"), (self.dmp.DIFF_INSERT, "b"), (self.dmp.DIFF_INSERT, "c")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_INSERT, "abc")], diffs)
# Merge interweave.
diffs = [(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_INSERT, "b"), (self.dmp.DIFF_DELETE, "c"), (self.dmp.DIFF_INSERT, "d"), (self.dmp.DIFF_EQUAL, "e"), (self.dmp.DIFF_EQUAL, "f")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "ac"), (self.dmp.DIFF_INSERT, "bd"), (self.dmp.DIFF_EQUAL, "ef")], diffs)
# Prefix and suffix detection.
diffs = [(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_INSERT, "abc"), (self.dmp.DIFF_DELETE, "dc")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "d"), (self.dmp.DIFF_INSERT, "b"), (self.dmp.DIFF_EQUAL, "c")], diffs)
# Prefix and suffix detection with equalities.
diffs = [(self.dmp.DIFF_EQUAL, "x"), (self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_INSERT, "abc"), (self.dmp.DIFF_DELETE, "dc"), (self.dmp.DIFF_EQUAL, "y")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "xa"), (self.dmp.DIFF_DELETE, "d"), (self.dmp.DIFF_INSERT, "b"), (self.dmp.DIFF_EQUAL, "cy")], diffs)
# Slide edit left.
diffs = [(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_INSERT, "ba"), (self.dmp.DIFF_EQUAL, "c")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_INSERT, "ab"), (self.dmp.DIFF_EQUAL, "ac")], diffs)
# Slide edit right.
diffs = [(self.dmp.DIFF_EQUAL, "c"), (self.dmp.DIFF_INSERT, "ab"), (self.dmp.DIFF_EQUAL, "a")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "ca"), (self.dmp.DIFF_INSERT, "ba")], diffs)
# Slide edit left recursive.
diffs = [(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "b"), (self.dmp.DIFF_EQUAL, "c"), (self.dmp.DIFF_DELETE, "ac"), (self.dmp.DIFF_EQUAL, "x")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_EQUAL, "acx")], diffs)
# Slide edit right recursive.
diffs = [(self.dmp.DIFF_EQUAL, "x"), (self.dmp.DIFF_DELETE, "ca"), (self.dmp.DIFF_EQUAL, "c"), (self.dmp.DIFF_DELETE, "b"), (self.dmp.DIFF_EQUAL, "a")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "xca"), (self.dmp.DIFF_DELETE, "cba")], diffs)
def testDiffCleanupSemanticLossless(self):
# Slide diffs to match logical boundaries.
# Null case.
diffs = []
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([], diffs)
# Blank lines.
diffs = [(self.dmp.DIFF_EQUAL, "AAA\r\n\r\nBBB"), (self.dmp.DIFF_INSERT, "\r\nDDD\r\n\r\nBBB"), (self.dmp.DIFF_EQUAL, "\r\nEEE")]
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "AAA\r\n\r\n"), (self.dmp.DIFF_INSERT, "BBB\r\nDDD\r\n\r\n"), (self.dmp.DIFF_EQUAL, "BBB\r\nEEE")], diffs)
# Line boundaries.
diffs = [(self.dmp.DIFF_EQUAL, "AAA\r\nBBB"), (self.dmp.DIFF_INSERT, " DDD\r\nBBB"), (self.dmp.DIFF_EQUAL, " EEE")]
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "AAA\r\n"), (self.dmp.DIFF_INSERT, "BBB DDD\r\n"), (self.dmp.DIFF_EQUAL, "BBB EEE")], diffs)
# Word boundaries.
diffs = [(self.dmp.DIFF_EQUAL, "The c"), (self.dmp.DIFF_INSERT, "ow and the c"), (self.dmp.DIFF_EQUAL, "at.")]
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "The "), (self.dmp.DIFF_INSERT, "cow and the "), (self.dmp.DIFF_EQUAL, "cat.")], diffs)
# Alphanumeric boundaries.
diffs = [(self.dmp.DIFF_EQUAL, "The-c"), (self.dmp.DIFF_INSERT, "ow-and-the-c"), (self.dmp.DIFF_EQUAL, "at.")]
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "The-"), (self.dmp.DIFF_INSERT, "cow-and-the-"), (self.dmp.DIFF_EQUAL, "cat.")], diffs)
# Hitting the start.
diffs = [(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_EQUAL, "ax")]
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_EQUAL, "aax")], diffs)
# Hitting the end.
diffs = [(self.dmp.DIFF_EQUAL, "xa"), (self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_EQUAL, "a")]
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "xaa"), (self.dmp.DIFF_DELETE, "a")], diffs)
# Sentence boundaries.
diffs = [(self.dmp.DIFF_EQUAL, "The xxx. The "), (self.dmp.DIFF_INSERT, "zzz. The "), (self.dmp.DIFF_EQUAL, "yyy.")]
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "The xxx."), (self.dmp.DIFF_INSERT, " The zzz."), (self.dmp.DIFF_EQUAL, " The yyy.")], diffs)
def testDiffCleanupSemantic(self):
# Cleanup semantically trivial equalities.
# Null case.
diffs = []
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([], diffs)
# No elimination #1.
diffs = [(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_INSERT, "cd"), (self.dmp.DIFF_EQUAL, "12"), (self.dmp.DIFF_DELETE, "e")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_INSERT, "cd"), (self.dmp.DIFF_EQUAL, "12"), (self.dmp.DIFF_DELETE, "e")], diffs)
# No elimination #2.
diffs = [(self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_INSERT, "ABC"), (self.dmp.DIFF_EQUAL, "1234"), (self.dmp.DIFF_DELETE, "wxyz")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_INSERT, "ABC"), (self.dmp.DIFF_EQUAL, "1234"), (self.dmp.DIFF_DELETE, "wxyz")], diffs)
# Simple elimination.
diffs = [(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_EQUAL, "b"), (self.dmp.DIFF_DELETE, "c")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_INSERT, "b")], diffs)
# Backpass elimination.
diffs = [(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_EQUAL, "cd"), (self.dmp.DIFF_DELETE, "e"), (self.dmp.DIFF_EQUAL, "f"), (self.dmp.DIFF_INSERT, "g")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abcdef"), (self.dmp.DIFF_INSERT, "cdfg")], diffs)
# Multiple eliminations.
diffs = [(self.dmp.DIFF_INSERT, "1"), (self.dmp.DIFF_EQUAL, "A"), (self.dmp.DIFF_DELETE, "B"), (self.dmp.DIFF_INSERT, "2"), (self.dmp.DIFF_EQUAL, "_"), (self.dmp.DIFF_INSERT, "1"), (self.dmp.DIFF_EQUAL, "A"), (self.dmp.DIFF_DELETE, "B"), (self.dmp.DIFF_INSERT, "2")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "AB_AB"), (self.dmp.DIFF_INSERT, "1A2_1A2")], diffs)
# Word boundaries.
diffs = [(self.dmp.DIFF_EQUAL, "The c"), (self.dmp.DIFF_DELETE, "ow and the c"), (self.dmp.DIFF_EQUAL, "at.")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "The "), (self.dmp.DIFF_DELETE, "cow and the "), (self.dmp.DIFF_EQUAL, "cat.")], diffs)
# No overlap elimination.
diffs = [(self.dmp.DIFF_DELETE, "abcxx"), (self.dmp.DIFF_INSERT, "xxdef")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abcxx"), (self.dmp.DIFF_INSERT, "xxdef")], diffs)
# Overlap elimination.
diffs = [(self.dmp.DIFF_DELETE, "abcxxx"), (self.dmp.DIFF_INSERT, "xxxdef")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_EQUAL, "xxx"), (self.dmp.DIFF_INSERT, "def")], diffs)
# Reverse overlap elimination.
diffs = [(self.dmp.DIFF_DELETE, "xxxabc"), (self.dmp.DIFF_INSERT, "defxxx")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_INSERT, "def"), (self.dmp.DIFF_EQUAL, "xxx"), (self.dmp.DIFF_DELETE, "abc")], diffs)
# Two overlap eliminations.
diffs = [(self.dmp.DIFF_DELETE, "abcd1212"), (self.dmp.DIFF_INSERT, "1212efghi"), (self.dmp.DIFF_EQUAL, "----"), (self.dmp.DIFF_DELETE, "A3"), (self.dmp.DIFF_INSERT, "3BC")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abcd"), (self.dmp.DIFF_EQUAL, "1212"), (self.dmp.DIFF_INSERT, "efghi"), (self.dmp.DIFF_EQUAL, "----"), (self.dmp.DIFF_DELETE, "A"), (self.dmp.DIFF_EQUAL, "3"), (self.dmp.DIFF_INSERT, "BC")], diffs)
def testDiffCleanupEfficiency(self):
# Cleanup operationally trivial equalities.
self.dmp.Diff_EditCost = 4
# Null case.
diffs = []
self.dmp.diff_cleanupEfficiency(diffs)
self.assertEquals([], diffs)
# No elimination.
diffs = [(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_INSERT, "12"), (self.dmp.DIFF_EQUAL, "wxyz"), (self.dmp.DIFF_DELETE, "cd"), (self.dmp.DIFF_INSERT, "34")]
self.dmp.diff_cleanupEfficiency(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_INSERT, "12"), (self.dmp.DIFF_EQUAL, "wxyz"), (self.dmp.DIFF_DELETE, "cd"), (self.dmp.DIFF_INSERT, "34")], diffs)
# Four-edit elimination.
diffs = [(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_INSERT, "12"), (self.dmp.DIFF_EQUAL, "xyz"), (self.dmp.DIFF_DELETE, "cd"), (self.dmp.DIFF_INSERT, "34")]
self.dmp.diff_cleanupEfficiency(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abxyzcd"), (self.dmp.DIFF_INSERT, "12xyz34")], diffs)
# Three-edit elimination.
diffs = [(self.dmp.DIFF_INSERT, "12"), (self.dmp.DIFF_EQUAL, "x"), (self.dmp.DIFF_DELETE, "cd"), (self.dmp.DIFF_INSERT, "34")]
self.dmp.diff_cleanupEfficiency(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "xcd"), (self.dmp.DIFF_INSERT, "12x34")], diffs)
# Backpass elimination.
diffs = [(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_INSERT, "12"), (self.dmp.DIFF_EQUAL, "xy"), (self.dmp.DIFF_INSERT, "34"), (self.dmp.DIFF_EQUAL, "z"), (self.dmp.DIFF_DELETE, "cd"), (self.dmp.DIFF_INSERT, "56")]
self.dmp.diff_cleanupEfficiency(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abxyzcd"), (self.dmp.DIFF_INSERT, "12xy34z56")], diffs)
# High cost elimination.
self.dmp.Diff_EditCost = 5
diffs = [(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_INSERT, "12"), (self.dmp.DIFF_EQUAL, "wxyz"), (self.dmp.DIFF_DELETE, "cd"), (self.dmp.DIFF_INSERT, "34")]
self.dmp.diff_cleanupEfficiency(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abwxyzcd"), (self.dmp.DIFF_INSERT, "12wxyz34")], diffs)
self.dmp.Diff_EditCost = 4
def testDiffPrettyHtml(self):
# Pretty print.
diffs = [(self.dmp.DIFF_EQUAL, "a\n"), (self.dmp.DIFF_DELETE, "<B>b</B>"), (self.dmp.DIFF_INSERT, "c&d")]
self.assertEquals("<span>a¶<br></span><del style=\"background:#ffe6e6;\"><B>b</B></del><ins style=\"background:#e6ffe6;\">c&d</ins>", self.dmp.diff_prettyHtml(diffs))
def testDiffText(self):
# Compute the source and destination texts.
diffs = [(self.dmp.DIFF_EQUAL, "jump"), (self.dmp.DIFF_DELETE, "s"), (self.dmp.DIFF_INSERT, "ed"), (self.dmp.DIFF_EQUAL, " over "), (self.dmp.DIFF_DELETE, "the"), (self.dmp.DIFF_INSERT, "a"), (self.dmp.DIFF_EQUAL, " lazy")]
self.assertEquals("jumps over the lazy", self.dmp.diff_text1(diffs))
self.assertEquals("jumped over a lazy", self.dmp.diff_text2(diffs))
def testDiffDelta(self):
# Convert a diff into delta string.
diffs = [(self.dmp.DIFF_EQUAL, "jump"), (self.dmp.DIFF_DELETE, "s"), (self.dmp.DIFF_INSERT, "ed"), (self.dmp.DIFF_EQUAL, " over "), (self.dmp.DIFF_DELETE, "the"), (self.dmp.DIFF_INSERT, "a"), (self.dmp.DIFF_EQUAL, " lazy"), (self.dmp.DIFF_INSERT, "old dog")]
text1 = self.dmp.diff_text1(diffs)
self.assertEquals("jumps over the lazy", text1)
delta = self.dmp.diff_toDelta(diffs)
self.assertEquals("=4\t-1\t+ed\t=6\t-3\t+a\t=5\t+old dog", delta)
# Convert delta string into a diff.
self.assertEquals(diffs, self.dmp.diff_fromDelta(text1, delta))
# Generates error (19 != 20).
try:
self.dmp.diff_fromDelta(text1 + "x", delta)
self.assertFalse(True)
except ValueError:
# Exception expected.
pass
# Generates error (19 != 18).
try:
self.dmp.diff_fromDelta(text1[1:], delta)
self.assertFalse(True)
except ValueError:
# Exception expected.
pass
# Generates error (%c3%xy invalid Unicode).
try:
self.dmp.diff_fromDelta("", "+%c3xy")
self.assertFalse(True)
except ValueError:
# Exception expected.
pass
# Test deltas with special characters.
diffs = [(self.dmp.DIFF_EQUAL, u"\u0680 \x00 \t %"), (self.dmp.DIFF_DELETE, u"\u0681 \x01 \n ^"), (self.dmp.DIFF_INSERT, u"\u0682 \x02 \\ |")]
text1 = self.dmp.diff_text1(diffs)
self.assertEquals(u"\u0680 \x00 \t %\u0681 \x01 \n ^", text1)
delta = self.dmp.diff_toDelta(diffs)
self.assertEquals("=7\t-7\t+%DA%82 %02 %5C %7C", delta)
# Convert delta string into a diff.
self.assertEquals(diffs, self.dmp.diff_fromDelta(text1, delta))
# Verify pool of unchanged characters.
diffs = [(self.dmp.DIFF_INSERT, "A-Z a-z 0-9 - _ . ! ~ * ' ( ) ; / ? : @ & = + $ , # ")]
text2 = self.dmp.diff_text2(diffs)
self.assertEquals("A-Z a-z 0-9 - _ . ! ~ * \' ( ) ; / ? : @ & = + $ , # ", text2)
delta = self.dmp.diff_toDelta(diffs)
self.assertEquals("+A-Z a-z 0-9 - _ . ! ~ * \' ( ) ; / ? : @ & = + $ , # ", delta)
# Convert delta string into a diff.
self.assertEquals(diffs, self.dmp.diff_fromDelta("", delta))
def testDiffXIndex(self):
# Translate a location in text1 to text2.
self.assertEquals(5, self.dmp.diff_xIndex([(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_INSERT, "1234"), (self.dmp.DIFF_EQUAL, "xyz")], 2))
# Translation on deletion.
self.assertEquals(1, self.dmp.diff_xIndex([(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "1234"), (self.dmp.DIFF_EQUAL, "xyz")], 3))
def testDiffLevenshtein(self):
# Levenshtein with trailing equality.
self.assertEquals(4, self.dmp.diff_levenshtein([(self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_INSERT, "1234"), (self.dmp.DIFF_EQUAL, "xyz")]))
# Levenshtein with leading equality.
self.assertEquals(4, self.dmp.diff_levenshtein([(self.dmp.DIFF_EQUAL, "xyz"), (self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_INSERT, "1234")]))
# Levenshtein with middle equality.
self.assertEquals(7, self.dmp.diff_levenshtein([(self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_EQUAL, "xyz"), (self.dmp.DIFF_INSERT, "1234")]))
def testDiffBisect(self):
# Normal.
a = "cat"
b = "map"
# Since the resulting diff hasn't been normalized, it would be ok if
# the insertion and deletion pairs are swapped.
# If the order changes, tweak this test as required.
self.assertEquals([(self.dmp.DIFF_DELETE, "c"), (self.dmp.DIFF_INSERT, "m"), (self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "t"), (self.dmp.DIFF_INSERT, "p")], self.dmp.diff_bisect(a, b, sys.maxint))
# Timeout.
self.assertEquals([(self.dmp.DIFF_DELETE, "cat"), (self.dmp.DIFF_INSERT, "map")], self.dmp.diff_bisect(a, b, 0))
def testDiffMain(self):
# Perform a trivial diff.
# Null case.
self.assertEquals([], self.dmp.diff_main("", "", False))
# Equality.
self.assertEquals([(self.dmp.DIFF_EQUAL, "abc")], self.dmp.diff_main("abc", "abc", False))
# Simple insertion.
self.assertEquals([(self.dmp.DIFF_EQUAL, "ab"), (self.dmp.DIFF_INSERT, "123"), (self.dmp.DIFF_EQUAL, "c")], self.dmp.diff_main("abc", "ab123c", False))
# Simple deletion.
self.assertEquals([(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "123"), (self.dmp.DIFF_EQUAL, "bc")], self.dmp.diff_main("a123bc", "abc", False))
# Two insertions.
self.assertEquals([(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_INSERT, "123"), (self.dmp.DIFF_EQUAL, "b"), (self.dmp.DIFF_INSERT, "456"), (self.dmp.DIFF_EQUAL, "c")], self.dmp.diff_main("abc", "a123b456c", False))
# Two deletions.
self.assertEquals([(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "123"), (self.dmp.DIFF_EQUAL, "b"), (self.dmp.DIFF_DELETE, "456"), (self.dmp.DIFF_EQUAL, "c")], self.dmp.diff_main("a123b456c", "abc", False))
# Perform a real diff.
# Switch off the timeout.
self.dmp.Diff_Timeout = 0
# Simple cases.
self.assertEquals([(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_INSERT, "b")], self.dmp.diff_main("a", "b", False))
self.assertEquals([(self.dmp.DIFF_DELETE, "Apple"), (self.dmp.DIFF_INSERT, "Banana"), (self.dmp.DIFF_EQUAL, "s are a"), (self.dmp.DIFF_INSERT, "lso"), (self.dmp.DIFF_EQUAL, " fruit.")], self.dmp.diff_main("Apples are a fruit.", "Bananas are also fruit.", False))
self.assertEquals([(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_INSERT, u"\u0680"), (self.dmp.DIFF_EQUAL, "x"), (self.dmp.DIFF_DELETE, "\t"), (self.dmp.DIFF_INSERT, "\x00")], self.dmp.diff_main("ax\t", u"\u0680x\x00", False))
# Overlaps.
self.assertEquals([(self.dmp.DIFF_DELETE, "1"), (self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "y"), (self.dmp.DIFF_EQUAL, "b"), (self.dmp.DIFF_DELETE, "2"), (self.dmp.DIFF_INSERT, "xab")], self.dmp.diff_main("1ayb2", "abxab", False))
self.assertEquals([(self.dmp.DIFF_INSERT, "xaxcx"), (self.dmp.DIFF_EQUAL, "abc"), (self.dmp.DIFF_DELETE, "y")], self.dmp.diff_main("abcy", "xaxcxabc", False))
self.assertEquals([(self.dmp.DIFF_DELETE, "ABCD"), (self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "="), (self.dmp.DIFF_INSERT, "-"), (self.dmp.DIFF_EQUAL, "bcd"), (self.dmp.DIFF_DELETE, "="), (self.dmp.DIFF_INSERT, "-"), (self.dmp.DIFF_EQUAL, "efghijklmnopqrs"), (self.dmp.DIFF_DELETE, "EFGHIJKLMNOefg")], self.dmp.diff_main("ABCDa=bcd=efghijklmnopqrsEFGHIJKLMNOefg", "a-bcd-efghijklmnopqrs", False))
# Large equality.
self.assertEquals([(self.dmp.DIFF_INSERT, " "), (self.dmp.DIFF_EQUAL,"a"), (self.dmp.DIFF_INSERT,"nd"), (self.dmp.DIFF_EQUAL," [[Pennsylvania]]"), (self.dmp.DIFF_DELETE," and [[New")], self.dmp.diff_main("a [[Pennsylvania]] and [[New", " and [[Pennsylvania]]", False))
# Timeout.
self.dmp.Diff_Timeout = 0.1 # 100ms
a = "`Twas brillig, and the slithy toves\nDid gyre and gimble in the wabe:\nAll mimsy were the borogoves,\nAnd the mome raths outgrabe.\n"
b = "I am the very model of a modern major general,\nI've information vegetable, animal, and mineral,\nI know the kings of England, and I quote the fights historical,\nFrom Marathon to Waterloo, in order categorical.\n"
# Increase the text lengths by 1024 times to ensure a timeout.
for x in range(10):
a = a + a
b = b + b
startTime = time.time()
self.dmp.diff_main(a, b)
endTime = time.time()
# Test that we took at least the timeout period.
self.assertTrue(self.dmp.Diff_Timeout <= endTime - startTime)
# Test that we didn't take forever (be forgiving).
# Theoretically this test could fail very occasionally if the
# OS task swaps or locks up for a second at the wrong moment.
self.assertTrue(self.dmp.Diff_Timeout * 2 > endTime - startTime)
self.dmp.Diff_Timeout = 0
# Test the linemode speedup.
# Must be long to pass the 100 char cutoff.
# Simple line-mode.
a = "1234567890\n" * 13
b = "abcdefghij\n" * 13
self.assertEquals(self.dmp.diff_main(a, b, False), self.dmp.diff_main(a, b, True))
# Single line-mode.
a = "1234567890" * 13
b = "abcdefghij" * 13
self.assertEquals(self.dmp.diff_main(a, b, False), self.dmp.diff_main(a, b, True))
# Overlap line-mode.
a = "1234567890\n" * 13
b = "abcdefghij\n1234567890\n1234567890\n1234567890\nabcdefghij\n1234567890\n1234567890\n1234567890\nabcdefghij\n1234567890\n1234567890\n1234567890\nabcdefghij\n"
texts_linemode = self.diff_rebuildtexts(self.dmp.diff_main(a, b, True))
texts_textmode = self.diff_rebuildtexts(self.dmp.diff_main(a, b, False))
self.assertEquals(texts_textmode, texts_linemode)
# Test null inputs.
try:
self.dmp.diff_main(None, None)
self.assertFalse(True)
except ValueError:
# Exception expected.
pass
class MatchTest(DiffMatchPatchTest):
"""MATCH TEST FUNCTIONS"""
def testMatchAlphabet(self):
# Initialise the bitmasks for Bitap.
self.assertEquals({"a":4, "b":2, "c":1}, self.dmp.match_alphabet("abc"))
self.assertEquals({"a":37, "b":18, "c":8}, self.dmp.match_alphabet("abcaba"))
def testMatchBitap(self):
self.dmp.Match_Distance = 100
self.dmp.Match_Threshold = 0.5
# Exact matches.
self.assertEquals(5, self.dmp.match_bitap("abcdefghijk", "fgh", 5))
self.assertEquals(5, self.dmp.match_bitap("abcdefghijk", "fgh", 0))
# Fuzzy matches.
self.assertEquals(4, self.dmp.match_bitap("abcdefghijk", "efxhi", 0))
self.assertEquals(2, self.dmp.match_bitap("abcdefghijk", "cdefxyhijk", 5))
self.assertEquals(-1, self.dmp.match_bitap("abcdefghijk", "bxy", 1))
# Overflow.
self.assertEquals(2, self.dmp.match_bitap("123456789xx0", "3456789x0", 2))
self.assertEquals(0, self.dmp.match_bitap("abcdef", "xxabc", 4))
self.assertEquals(3, self.dmp.match_bitap("abcdef", "defyy", 4))
self.assertEquals(0, self.dmp.match_bitap("abcdef", "xabcdefy", 0))
# Threshold test.
self.dmp.Match_Threshold = 0.4
self.assertEquals(4, self.dmp.match_bitap("abcdefghijk", "efxyhi", 1))
self.dmp.Match_Threshold = 0.3
self.assertEquals(-1, self.dmp.match_bitap("abcdefghijk", "efxyhi", 1))
self.dmp.Match_Threshold = 0.0
self.assertEquals(1, self.dmp.match_bitap("abcdefghijk", "bcdef", 1))
self.dmp.Match_Threshold = 0.5
# Multiple select.
self.assertEquals(0, self.dmp.match_bitap("abcdexyzabcde", "abccde", 3))
self.assertEquals(8, self.dmp.match_bitap("abcdexyzabcde", "abccde", 5))
# Distance test.
self.dmp.Match_Distance = 10 # Strict location.
self.assertEquals(-1, self.dmp.match_bitap("abcdefghijklmnopqrstuvwxyz", "abcdefg", 24))
self.assertEquals(0, self.dmp.match_bitap("abcdefghijklmnopqrstuvwxyz", "abcdxxefg", 1))
self.dmp.Match_Distance = 1000 # Loose location.
self.assertEquals(0, self.dmp.match_bitap("abcdefghijklmnopqrstuvwxyz", "abcdefg", 24))
def testMatchMain(self):
# Full match.
# Shortcut matches.
self.assertEquals(0, self.dmp.match_main("abcdef", "abcdef", 1000))
self.assertEquals(-1, self.dmp.match_main("", "abcdef", 1))
self.assertEquals(3, self.dmp.match_main("abcdef", "", 3))
self.assertEquals(3, self.dmp.match_main("abcdef", "de", 3))
self.assertEquals(3, self.dmp.match_main("abcdef", "defy", 4))
self.assertEquals(0, self.dmp.match_main("abcdef", "abcdefy", 0))
# Complex match.
self.dmp.Match_Threshold = 0.7
self.assertEquals(4, self.dmp.match_main("I am the very model of a modern major general.", " that berry ", 5))
self.dmp.Match_Threshold = 0.5
# Test null inputs.
try:
self.dmp.match_main(None, None, 0)
self.assertFalse(True)
except ValueError:
# Exception expected.
pass
class PatchTest(DiffMatchPatchTest):
"""PATCH TEST FUNCTIONS"""
def testPatchObj(self):
# Patch Object.
p = dmp_module.patch_obj()
p.start1 = 20
p.start2 = 21
p.length1 = 18
p.length2 = 17
p.diffs = [(self.dmp.DIFF_EQUAL, "jump"), (self.dmp.DIFF_DELETE, "s"), (self.dmp.DIFF_INSERT, "ed"), (self.dmp.DIFF_EQUAL, " over "), (self.dmp.DIFF_DELETE, "the"), (self.dmp.DIFF_INSERT, "a"), (self.dmp.DIFF_EQUAL, "\nlaz")]
strp = str(p)
self.assertEquals("@@ -21,18 +22,17 @@\n jump\n-s\n+ed\n over \n-the\n+a\n %0Alaz\n", strp)
def testPatchFromText(self):
self.assertEquals([], self.dmp.patch_fromText(""))
strp = "@@ -21,18 +22,17 @@\n jump\n-s\n+ed\n over \n-the\n+a\n %0Alaz\n"
self.assertEquals(strp, str(self.dmp.patch_fromText(strp)[0]))
self.assertEquals("@@ -1 +1 @@\n-a\n+b\n", str(self.dmp.patch_fromText("@@ -1 +1 @@\n-a\n+b\n")[0]))
self.assertEquals("@@ -1,3 +0,0 @@\n-abc\n", str(self.dmp.patch_fromText("@@ -1,3 +0,0 @@\n-abc\n")[0]))
self.assertEquals("@@ -0,0 +1,3 @@\n+abc\n", str(self.dmp.patch_fromText("@@ -0,0 +1,3 @@\n+abc\n")[0]))
# Generates error.
try:
self.dmp.patch_fromText("Bad\nPatch\n")
self.assertFalse(True)
except ValueError:
# Exception expected.
pass
def testPatchToText(self):
strp = "@@ -21,18 +22,17 @@\n jump\n-s\n+ed\n over \n-the\n+a\n laz\n"
p = self.dmp.patch_fromText(strp)
self.assertEquals(strp, self.dmp.patch_toText(p))
strp = "@@ -1,9 +1,9 @@\n-f\n+F\n oo+fooba\n@@ -7,9 +7,9 @@\n obar\n-,\n+.\n tes\n"
p = self.dmp.patch_fromText(strp)
self.assertEquals(strp, self.dmp.patch_toText(p))
def testPatchAddContext(self):
self.dmp.Patch_Margin = 4
p = self.dmp.patch_fromText("@@ -21,4 +21,10 @@\n-jump\n+somersault\n")[0]
self.dmp.patch_addContext(p, "The quick brown fox jumps over the lazy dog.")
self.assertEquals("@@ -17,12 +17,18 @@\n fox \n-jump\n+somersault\n s ov\n", str(p))
# Same, but not enough trailing context.
p = self.dmp.patch_fromText("@@ -21,4 +21,10 @@\n-jump\n+somersault\n")[0]
self.dmp.patch_addContext(p, "The quick brown fox jumps.")
self.assertEquals("@@ -17,10 +17,16 @@\n fox \n-jump\n+somersault\n s.\n", str(p))
# Same, but not enough leading context.
p = self.dmp.patch_fromText("@@ -3 +3,2 @@\n-e\n+at\n")[0]
self.dmp.patch_addContext(p, "The quick brown fox jumps.")
self.assertEquals("@@ -1,7 +1,8 @@\n Th\n-e\n+at\n qui\n", str(p))
# Same, but with ambiguity.
p = self.dmp.patch_fromText("@@ -3 +3,2 @@\n-e\n+at\n")[0]
self.dmp.patch_addContext(p, "The quick brown fox jumps. The quick brown fox crashes.")
self.assertEquals("@@ -1,27 +1,28 @@\n Th\n-e\n+at\n quick brown fox jumps. \n", str(p))
def testPatchMake(self):
# Null case.
patches = self.dmp.patch_make("", "")
self.assertEquals("", self.dmp.patch_toText(patches))
text1 = "The quick brown fox jumps over the lazy dog."
text2 = "That quick brown fox jumped over a lazy dog."
# Text2+Text1 inputs.
expectedPatch = "@@ -1,8 +1,7 @@\n Th\n-at\n+e\n qui\n@@ -21,17 +21,18 @@\n jump\n-ed\n+s\n over \n-a\n+the\n laz\n"
# The second patch must be "-21,17 +21,18", not "-22,17 +21,18" due to rolling context.
patches = self.dmp.patch_make(text2, text1)
self.assertEquals(expectedPatch, self.dmp.patch_toText(patches))
# Text1+Text2 inputs.
expectedPatch = "@@ -1,11 +1,12 @@\n Th\n-e\n+at\n quick b\n@@ -22,18 +22,17 @@\n jump\n-s\n+ed\n over \n-the\n+a\n laz\n"
patches = self.dmp.patch_make(text1, text2)
self.assertEquals(expectedPatch, self.dmp.patch_toText(patches))
# Diff input.
diffs = self.dmp.diff_main(text1, text2, False)
patches = self.dmp.patch_make(diffs)
self.assertEquals(expectedPatch, self.dmp.patch_toText(patches))
# Text1+Diff inputs.
patches = self.dmp.patch_make(text1, diffs)
self.assertEquals(expectedPatch, self.dmp.patch_toText(patches))
# Text1+Text2+Diff inputs (deprecated).
patches = self.dmp.patch_make(text1, text2, diffs)
self.assertEquals(expectedPatch, self.dmp.patch_toText(patches))
# Character encoding.
patches = self.dmp.patch_make("`1234567890-=[]\\;',./", "~!@#$%^&*()_+{}|:\"<>?")
self.assertEquals("@@ -1,21 +1,21 @@\n-%601234567890-=%5B%5D%5C;',./\n+~!@#$%25%5E&*()_+%7B%7D%7C:%22%3C%3E?\n", self.dmp.patch_toText(patches))
# Character decoding.
diffs = [(self.dmp.DIFF_DELETE, "`1234567890-=[]\\;',./"), (self.dmp.DIFF_INSERT, "~!@#$%^&*()_+{}|:\"<>?")]
self.assertEquals(diffs, self.dmp.patch_fromText("@@ -1,21 +1,21 @@\n-%601234567890-=%5B%5D%5C;',./\n+~!@#$%25%5E&*()_+%7B%7D%7C:%22%3C%3E?\n")[0].diffs)
# Long string with repeats.
text1 = ""
for x in range(100):
text1 += "abcdef"
text2 = text1 + "123"
expectedPatch = "@@ -573,28 +573,31 @@\n cdefabcdefabcdefabcdefabcdef\n+123\n"
patches = self.dmp.patch_make(text1, text2)
self.assertEquals(expectedPatch, self.dmp.patch_toText(patches))
# Test null inputs.
try:
self.dmp.patch_make(None, None)
self.assertFalse(True)
except ValueError:
# Exception expected.
pass
def testPatchSplitMax(self):
# Assumes that Match_MaxBits is 32.
patches = self.dmp.patch_make("abcdefghijklmnopqrstuvwxyz01234567890", "XabXcdXefXghXijXklXmnXopXqrXstXuvXwxXyzX01X23X45X67X89X0")
self.dmp.patch_splitMax(patches)
self.assertEquals("@@ -1,32 +1,46 @@\n+X\n ab\n+X\n cd\n+X\n ef\n+X\n gh\n+X\n ij\n+X\n kl\n+X\n mn\n+X\n op\n+X\n qr\n+X\n st\n+X\n uv\n+X\n wx\n+X\n yz\n+X\n 012345\n@@ -25,13 +39,18 @@\n zX01\n+X\n 23\n+X\n 45\n+X\n 67\n+X\n 89\n+X\n 0\n", self.dmp.patch_toText(patches))
patches = self.dmp.patch_make("abcdef1234567890123456789012345678901234567890123456789012345678901234567890uvwxyz", "abcdefuvwxyz")
oldToText = self.dmp.patch_toText(patches)
self.dmp.patch_splitMax(patches)
self.assertEquals(oldToText, self.dmp.patch_toText(patches))
patches = self.dmp.patch_make("1234567890123456789012345678901234567890123456789012345678901234567890", "abc")
self.dmp.patch_splitMax(patches)
self.assertEquals("@@ -1,32 +1,4 @@\n-1234567890123456789012345678\n 9012\n@@ -29,32 +1,4 @@\n-9012345678901234567890123456\n 7890\n@@ -57,14 +1,3 @@\n-78901234567890\n+abc\n", self.dmp.patch_toText(patches))
patches = self.dmp.patch_make("abcdefghij , h : 0 , t : 1 abcdefghij , h : 0 , t : 1 abcdefghij , h : 0 , t : 1", "abcdefghij , h : 1 , t : 1 abcdefghij , h : 1 , t : 1 abcdefghij , h : 0 , t : 1")
self.dmp.patch_splitMax(patches)
self.assertEquals("@@ -2,32 +2,32 @@\n bcdefghij , h : \n-0\n+1\n , t : 1 abcdef\n@@ -29,32 +29,32 @@\n bcdefghij , h : \n-0\n+1\n , t : 1 abcdef\n", self.dmp.patch_toText(patches))
def testPatchAddPadding(self):
# Both edges full.
patches = self.dmp.patch_make("", "test")
self.assertEquals("@@ -0,0 +1,4 @@\n+test\n", self.dmp.patch_toText(patches))
self.dmp.patch_addPadding(patches)
self.assertEquals("@@ -1,8 +1,12 @@\n %01%02%03%04\n+test\n %01%02%03%04\n", self.dmp.patch_toText(patches))
# Both edges partial.
patches = self.dmp.patch_make("XY", "XtestY")
self.assertEquals("@@ -1,2 +1,6 @@\n X\n+test\n Y\n", self.dmp.patch_toText(patches))
self.dmp.patch_addPadding(patches)
self.assertEquals("@@ -2,8 +2,12 @@\n %02%03%04X\n+test\n Y%01%02%03\n", self.dmp.patch_toText(patches))
# Both edges none.
patches = self.dmp.patch_make("XXXXYYYY", "XXXXtestYYYY")
self.assertEquals("@@ -1,8 +1,12 @@\n XXXX\n+test\n YYYY\n", self.dmp.patch_toText(patches))
self.dmp.patch_addPadding(patches)
self.assertEquals("@@ -5,8 +5,12 @@\n XXXX\n+test\n YYYY\n", self.dmp.patch_toText(patches))
def testPatchApply(self):
self.dmp.Match_Distance = 1000
self.dmp.Match_Threshold = 0.5
self.dmp.Patch_DeleteThreshold = 0.5
# Null case.
patches = self.dmp.patch_make("", "")
results = self.dmp.patch_apply(patches, "Hello world.")
self.assertEquals(("Hello world.", []), results)
# Exact match.
patches = self.dmp.patch_make("The quick brown fox jumps over the lazy dog.", "That quick brown fox jumped over a lazy dog.")
results = self.dmp.patch_apply(patches, "The quick brown fox jumps over the lazy dog.")
self.assertEquals(("That quick brown fox jumped over a lazy dog.", [True, True]), results)
# Partial match.
results = self.dmp.patch_apply(patches, "The quick red rabbit jumps over the tired tiger.")
self.assertEquals(("That quick red rabbit jumped over a tired tiger.", [True, True]), results)
# Failed match.
results = self.dmp.patch_apply(patches, "I am the very model of a modern major general.")
self.assertEquals(("I am the very model of a modern major general.", [False, False]), results)
# Big delete, small change.
patches = self.dmp.patch_make("x1234567890123456789012345678901234567890123456789012345678901234567890y", "xabcy")
results = self.dmp.patch_apply(patches, "x123456789012345678901234567890-----++++++++++-----123456789012345678901234567890y")
self.assertEquals(("xabcy", [True, True]), results)
# Big delete, big change 1.
patches = self.dmp.patch_make("x1234567890123456789012345678901234567890123456789012345678901234567890y", "xabcy")
results = self.dmp.patch_apply(patches, "x12345678901234567890---------------++++++++++---------------12345678901234567890y")
self.assertEquals(("xabc12345678901234567890---------------++++++++++---------------12345678901234567890y", [False, True]), results)
# Big delete, big change 2.
self.dmp.Patch_DeleteThreshold = 0.6
patches = self.dmp.patch_make("x1234567890123456789012345678901234567890123456789012345678901234567890y", "xabcy")
results = self.dmp.patch_apply(patches, "x12345678901234567890---------------++++++++++---------------12345678901234567890y")
self.assertEquals(("xabcy", [True, True]), results)
self.dmp.Patch_DeleteThreshold = 0.5
# Compensate for failed patch.
self.dmp.Match_Threshold = 0.0
self.dmp.Match_Distance = 0
patches = self.dmp.patch_make("abcdefghijklmnopqrstuvwxyz--------------------1234567890", "abcXXXXXXXXXXdefghijklmnopqrstuvwxyz--------------------1234567YYYYYYYYYY890")
results = self.dmp.patch_apply(patches, "ABCDEFGHIJKLMNOPQRSTUVWXYZ--------------------1234567890")
self.assertEquals(("ABCDEFGHIJKLMNOPQRSTUVWXYZ--------------------1234567YYYYYYYYYY890", [False, True]), results)
self.dmp.Match_Threshold = 0.5
self.dmp.Match_Distance = 1000
# No side effects.
patches = self.dmp.patch_make("", "test")
patchstr = self.dmp.patch_toText(patches)
results = self.dmp.patch_apply(patches, "")
self.assertEquals(patchstr, self.dmp.patch_toText(patches))
# No side effects with major delete.
patches = self.dmp.patch_make("The quick brown fox jumps over the lazy dog.", "Woof")
patchstr = self.dmp.patch_toText(patches)
self.dmp.patch_apply(patches, "The quick brown fox jumps over the lazy dog.")
self.assertEquals(patchstr, self.dmp.patch_toText(patches))
# Edge exact match.
patches = self.dmp.patch_make("", "test")
self.dmp.patch_apply(patches, "")
self.assertEquals(("test", [True]), results)
# Near edge exact match.
patches = self.dmp.patch_make("XY", "XtestY")
results = self.dmp.patch_apply(patches, "XY")
self.assertEquals(("XtestY", [True]), results)
# Edge partial match.
patches = self.dmp.patch_make("y", "y123")
results = self.dmp.patch_apply(patches, "x")
self.assertEquals(("x123", [True]), results)
if __name__ == "__main__":
unittest.main()
| apache-2.0 | 7,225,473,876,815,896,000 | 47.036824 | 408 | 0.645578 | false |
chaluemwut/fbserver | venv/lib/python2.7/site-packages/scipy/io/matlab/tests/test_mio_funcs.py | 17 | 1816 | #!/usr/bin/env python
''' Jottings to work out format for __function_workspace__ matrix at end
of mat file.
'''
from __future__ import division, print_function, absolute_import
from os.path import join as pjoin, dirname
import sys
from io import BytesIO
from numpy.testing import \
assert_array_equal, \
assert_array_almost_equal, \
assert_equal, \
assert_raises, run_module_suite
from nose.tools import assert_true
import numpy as np
from numpy.compat import asstr
from scipy.io.matlab.mio5 import MatlabObject, MatFile5Writer, \
MatFile5Reader, MatlabFunction
test_data_path = pjoin(dirname(__file__), 'data')
def read_minimat_vars(rdr):
rdr.initialize_read()
mdict = {'__globals__': []}
i = 0
while not rdr.end_of_stream():
hdr, next_position = rdr.read_var_header()
name = asstr(hdr.name)
if name == '':
name = 'var_%d' % i
i += 1
res = rdr.read_var_array(hdr, process=False)
rdr.mat_stream.seek(next_position)
mdict[name] = res
if hdr.is_global:
mdict['__globals__'].append(name)
return mdict
def read_workspace_vars(fname):
fp = open(fname, 'rb')
rdr = MatFile5Reader(fp, struct_as_record=True)
vars = rdr.get_variables()
fws = vars['__function_workspace__']
ws_bs = BytesIO(fws.tostring())
ws_bs.seek(2)
rdr.mat_stream = ws_bs
# Guess byte order.
mi = rdr.mat_stream.read(2)
rdr.byte_order = mi == b'IM' and '<' or '>'
rdr.mat_stream.read(4) # presumably byte padding
mdict = read_minimat_vars(rdr)
fp.close()
return mdict
def test_jottings():
# example
fname = pjoin(test_data_path, 'parabola.mat')
ws_vars = read_workspace_vars(fname)
if __name__ == "__main__":
run_module_suite()
| apache-2.0 | 795,994,972,358,193,200 | 24.577465 | 72 | 0.624449 | false |
uclaros/QGIS | tests/src/python/test_qgsoptional.py | 74 | 2145 | # -*- coding: utf-8 -*-
'''
test_qgsoptional.py
--------------------------------------
Date : September 2016
Copyright : (C) 2016 Matthias Kuhn
email : [email protected]
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
'''
import qgis # NOQA
from qgis.testing import unittest
from qgis.core import QgsOptionalExpression, QgsExpression
class TestQgsOptional(unittest.TestCase):
def setUp(self):
"""Run before each test."""
pass
def tearDown(self):
"""Run after each test."""
pass
def testQgsOptionalExpression(self):
opt = QgsOptionalExpression()
self.assertFalse(opt.enabled())
opt = QgsOptionalExpression(QgsExpression('true'))
self.assertTrue(opt.enabled())
self.assertEqual(opt.data().expression(), 'true')
opt.setEnabled(False)
self.assertFalse(opt.enabled())
# boolean operator not yet working in python
# self.assertFalse(opt)
self.assertEqual(opt.data().expression(), 'true')
opt.setEnabled(True)
self.assertTrue(opt.enabled())
# self.assertTrue(opt)
self.assertEqual(opt.data().expression(), 'true')
opt.setData(QgsExpression('xyz'))
self.assertTrue(opt.enabled())
self.assertEqual(opt.data().expression(), 'xyz')
opt = QgsOptionalExpression(QgsExpression('true'), False)
self.assertFalse(opt.enabled())
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 2,118,830,919,897,886,700 | 35.355932 | 77 | 0.498834 | false |
JingZhou0404/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/steps/build.py | 119 | 2636 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from webkitpy.tool.steps.abstractstep import AbstractStep
from webkitpy.tool.steps.options import Options
_log = logging.getLogger(__name__)
class Build(AbstractStep):
@classmethod
def options(cls):
return AbstractStep.options() + [
Options.build,
Options.quiet,
Options.build_style,
]
def build(self, build_style):
environment = self._tool.copy_current_environment()
environment.disable_gcc_smartquotes()
env = environment.to_dictionary()
build_webkit_command = self._tool.deprecated_port().build_webkit_command(build_style=build_style)
self._tool.executive.run_and_throw_if_fail(build_webkit_command, self._options.quiet,
cwd=self._tool.scm().checkout_root, env=env)
def run(self, state):
if not self._options.build:
return
_log.info("Building WebKit")
if self._options.build_style == "both":
self.build("debug")
self.build("release")
else:
self.build(self._options.build_style)
| bsd-3-clause | 5,639,353,447,083,814,000 | 40.84127 | 105 | 0.716995 | false |
simartin/servo | python/servo/build_commands.py | 2 | 45345 | # Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
from __future__ import print_function, unicode_literals
import datetime
import locale
import os
import os.path as path
import platform
import shutil
import subprocess
import sys
import six.moves.urllib as urllib
import zipfile
import stat
from time import time
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from mach.registrar import Registrar
from mach_bootstrap import _get_exec_path
from servo.command_base import CommandBase, cd, call, check_call, append_to_path_env, gstreamer_root
from servo.gstreamer import windows_dlls, windows_plugins, macos_dylibs, macos_plugins
from servo.util import host_triple
def format_duration(seconds):
return str(datetime.timedelta(seconds=int(seconds)))
def notify_linux(title, text):
try:
import dbus
bus = dbus.SessionBus()
notify_obj = bus.get_object("org.freedesktop.Notifications", "/org/freedesktop/Notifications")
method = notify_obj.get_dbus_method("Notify", "org.freedesktop.Notifications")
method(title, 0, "", text, "", [], {"transient": True}, -1)
except ImportError:
raise Exception("Optional Python module 'dbus' is not installed.")
def notify_win(title, text):
try:
from servo.win32_toast import WindowsToast
w = WindowsToast()
w.balloon_tip(title, text)
except WindowsError:
from ctypes import Structure, windll, POINTER, sizeof
from ctypes.wintypes import DWORD, HANDLE, WINFUNCTYPE, BOOL, UINT
class FLASHWINDOW(Structure):
_fields_ = [("cbSize", UINT),
("hwnd", HANDLE),
("dwFlags", DWORD),
("uCount", UINT),
("dwTimeout", DWORD)]
FlashWindowExProto = WINFUNCTYPE(BOOL, POINTER(FLASHWINDOW))
FlashWindowEx = FlashWindowExProto(("FlashWindowEx", windll.user32))
FLASHW_CAPTION = 0x01
FLASHW_TRAY = 0x02
FLASHW_TIMERNOFG = 0x0C
params = FLASHWINDOW(sizeof(FLASHWINDOW),
windll.kernel32.GetConsoleWindow(),
FLASHW_CAPTION | FLASHW_TRAY | FLASHW_TIMERNOFG, 3, 0)
FlashWindowEx(params)
def notify_darwin(title, text):
try:
import Foundation
bundleDict = Foundation.NSBundle.mainBundle().infoDictionary()
bundleIdentifier = 'CFBundleIdentifier'
if bundleIdentifier not in bundleDict:
bundleDict[bundleIdentifier] = 'mach'
note = Foundation.NSUserNotification.alloc().init()
note.setTitle_(title)
note.setInformativeText_(text)
now = Foundation.NSDate.dateWithTimeInterval_sinceDate_(0, Foundation.NSDate.date())
note.setDeliveryDate_(now)
centre = Foundation.NSUserNotificationCenter.defaultUserNotificationCenter()
centre.scheduleNotification_(note)
except ImportError:
raise Exception("Optional Python module 'pyobjc' is not installed.")
def notify_with_command(command):
def notify(title, text):
if call([command, title, text]) != 0:
raise Exception("Could not run '%s'." % command)
return notify
def notify_build_done(config, elapsed, success=True):
"""Generate desktop notification when build is complete and the
elapsed build time was longer than 30 seconds."""
if elapsed > 30:
notify(config, "Servo build",
"%s in %s" % ("Completed" if success else "FAILED", format_duration(elapsed)))
def notify(config, title, text):
"""Generate a desktop notification using appropriate means on
supported platforms Linux, Windows, and Mac OS. On unsupported
platforms, this function acts as a no-op.
If notify-command is set in the [tools] section of the configuration,
that is used instead."""
notify_command = config["tools"].get("notify-command")
if notify_command:
func = notify_with_command(notify_command)
else:
platforms = {
"linux": notify_linux,
"linux2": notify_linux,
"win32": notify_win,
"darwin": notify_darwin
}
func = platforms.get(sys.platform)
if func is not None:
try:
func(title, text)
except Exception as e:
extra = getattr(e, "message", "")
print("[Warning] Could not generate notification! %s" % extra, file=sys.stderr)
@CommandProvider
class MachCommands(CommandBase):
@Command('build',
description='Build Servo',
category='build')
@CommandArgument('--release', '-r',
action='store_true',
help='Build in release mode')
@CommandArgument('--dev', '-d',
action='store_true',
help='Build in development mode')
@CommandArgument('--jobs', '-j',
default=None,
help='Number of jobs to run in parallel')
@CommandArgument('--no-package',
action='store_true',
help='For Android, disable packaging into a .apk after building')
@CommandArgument('--verbose', '-v',
action='store_true',
help='Print verbose output')
@CommandArgument('--very-verbose', '-vv',
action='store_true',
help='Print very verbose output')
@CommandArgument('--uwp',
action='store_true',
help='Build for HoloLens (x64)')
@CommandArgument('--win-arm64', action='store_true', help="Use arm64 Windows target")
@CommandArgument('params', nargs='...',
help="Command-line arguments to be passed through to Cargo")
@CommandBase.build_like_command_arguments
def build(self, release=False, dev=False, jobs=None, params=None, media_stack=None,
no_package=False, verbose=False, very_verbose=False,
target=None, android=False, magicleap=False, libsimpleservo=False,
features=None, uwp=False, win_arm64=False, **kwargs):
# Force the UWP-enabled target if the convenience UWP flags are passed.
if uwp and not target:
if win_arm64:
target = 'aarch64-uwp-windows-msvc'
else:
target = 'x86_64-uwp-windows-msvc'
opts = params or []
features = features or []
target, android = self.pick_target_triple(target, android, magicleap)
# Infer UWP build if only provided a target.
if not uwp:
uwp = target and 'uwp' in target
features += self.pick_media_stack(media_stack, target)
target_path = base_path = self.get_target_dir()
if android:
target_path = path.join(target_path, "android")
base_path = path.join(target_path, target)
elif magicleap:
target_path = path.join(target_path, "magicleap")
base_path = path.join(target_path, target)
release_path = path.join(base_path, "release", "servo")
dev_path = path.join(base_path, "debug", "servo")
release_exists = path.exists(release_path)
dev_exists = path.exists(dev_path)
if not (release or dev):
if self.config["build"]["mode"] == "dev":
dev = True
elif self.config["build"]["mode"] == "release":
release = True
elif release_exists and not dev_exists:
release = True
elif dev_exists and not release_exists:
dev = True
else:
print("Please specify either --dev (-d) for a development")
print(" build, or --release (-r) for an optimized build.")
sys.exit(1)
if release and dev:
print("Please specify either --dev or --release.")
sys.exit(1)
if release:
opts += ["--release"]
servo_path = release_path
else:
servo_path = dev_path
if jobs is not None:
opts += ["-j", jobs]
if verbose:
opts += ["-v"]
if very_verbose:
opts += ["-vv"]
env = self.build_env(target=target, is_build=True, uwp=uwp, features=features)
self.ensure_bootstrapped(target=target)
self.ensure_clobbered()
build_start = time()
env["CARGO_TARGET_DIR"] = target_path
host = host_triple()
target_triple = target or host_triple()
if 'apple-darwin' in host and target_triple == host:
if 'CXXFLAGS' not in env:
env['CXXFLAGS'] = ''
env["CXXFLAGS"] += "-mmacosx-version-min=10.10"
if 'windows' in host:
vs_dirs = self.vs_dirs()
if host != target_triple and 'windows' in target_triple:
if os.environ.get('VisualStudioVersion') or os.environ.get('VCINSTALLDIR'):
print("Can't cross-compile for Windows inside of a Visual Studio shell.\n"
"Please run `python mach build [arguments]` to bypass automatic "
"Visual Studio shell, and make sure the VisualStudioVersion and "
"VCINSTALLDIR environment variables are not set.")
sys.exit(1)
vcinstalldir = vs_dirs['vcdir']
if not os.path.exists(vcinstalldir):
print("Can't find Visual C++ %s installation at %s." % (vs_dirs['vs_version'], vcinstalldir))
sys.exit(1)
env['PKG_CONFIG_ALLOW_CROSS'] = "1"
if uwp:
# Ensure libstd is ready for the new UWP target.
check_call(["rustup", "component", "add", "rust-src"])
env['RUST_SYSROOT'] = path.expanduser('~\\.xargo')
# Don't try and build a desktop port.
libsimpleservo = True
arches = {
"aarch64": {
"angle": "arm64",
"gst": "ARM64",
"gst_root": "arm64",
},
"x86_64": {
"angle": "x64",
"gst": "X86_64",
"gst_root": "x64",
},
}
arch = arches.get(target_triple.split('-')[0])
if not arch:
print("Unsupported UWP target.")
sys.exit(1)
# Ensure that the NuGet ANGLE package containing libEGL is accessible
# to the Rust linker.
append_to_path_env(angle_root(target_triple, env), env, "LIB")
# Don't want to mix non-UWP libraries with vendored UWP libraries.
if "gstreamer" in env['LIB']:
print("Found existing GStreamer library path in LIB. Please remove it.")
sys.exit(1)
# Override any existing GStreamer installation with the vendored libraries.
env["GSTREAMER_1_0_ROOT_" + arch['gst']] = path.join(
self.msvc_package_dir("gstreamer-uwp"), arch['gst_root']
)
env["PKG_CONFIG_PATH"] = path.join(
self.msvc_package_dir("gstreamer-uwp"), arch['gst_root'],
"lib", "pkgconfig"
)
if 'windows' in host:
process = subprocess.Popen('("%s" %s > nul) && "python" -c "import os; print(repr(os.environ))"' %
(os.path.join(vs_dirs['vcdir'], "Auxiliary", "Build", "vcvarsall.bat"), "x64"),
stdout=subprocess.PIPE, shell=True)
stdout, stderr = process.communicate()
exitcode = process.wait()
encoding = locale.getpreferredencoding() # See https://stackoverflow.com/a/9228117
if exitcode == 0:
decoded = stdout.decode(encoding)
if decoded.startswith("environ("):
decoded = decoded.strip()[8:-1]
os.environ.update(eval(decoded))
else:
print("Failed to run vcvarsall. stderr:")
print(stderr.decode(encoding))
exit(1)
# Ensure that GStreamer libraries are accessible when linking.
if 'windows' in target_triple:
gst_root = gstreamer_root(target_triple, env)
if gst_root:
append_to_path_env(os.path.join(gst_root, "lib"), env, "LIB")
if android:
if "ANDROID_NDK" not in env:
print("Please set the ANDROID_NDK environment variable.")
sys.exit(1)
if "ANDROID_SDK" not in env:
print("Please set the ANDROID_SDK environment variable.")
sys.exit(1)
android_platform = self.config["android"]["platform"]
android_toolchain_name = self.config["android"]["toolchain_name"]
android_toolchain_prefix = self.config["android"]["toolchain_prefix"]
android_lib = self.config["android"]["lib"]
android_arch = self.config["android"]["arch"]
# Build OpenSSL for android
env["OPENSSL_VERSION"] = "1.1.1d"
make_cmd = ["make"]
if jobs is not None:
make_cmd += ["-j" + jobs]
openssl_dir = path.join(target_path, target, "native", "openssl")
if not path.exists(openssl_dir):
os.makedirs(openssl_dir)
shutil.copy(path.join(self.android_support_dir(), "openssl.makefile"), openssl_dir)
shutil.copy(path.join(self.android_support_dir(), "openssl.sh"), openssl_dir)
# Check if the NDK version is 15
if not os.path.isfile(path.join(env["ANDROID_NDK"], 'source.properties')):
print("ANDROID_NDK should have file `source.properties`.")
print("The environment variable ANDROID_NDK may be set at a wrong path.")
sys.exit(1)
with open(path.join(env["ANDROID_NDK"], 'source.properties')) as ndk_properties:
lines = ndk_properties.readlines()
if lines[1].split(' = ')[1].split('.')[0] != '15':
print("Currently only support NDK 15. Please re-run `./mach bootstrap-android`.")
sys.exit(1)
env["RUST_TARGET"] = target
with cd(openssl_dir):
status = call(
make_cmd + ["-f", "openssl.makefile"],
env=env,
verbose=verbose)
if status:
return status
openssl_dir = path.join(openssl_dir, "openssl-{}".format(env["OPENSSL_VERSION"]))
env['OPENSSL_LIB_DIR'] = openssl_dir
env['OPENSSL_INCLUDE_DIR'] = path.join(openssl_dir, "include")
env['OPENSSL_STATIC'] = 'TRUE'
# Android builds also require having the gcc bits on the PATH and various INCLUDE
# path munging if you do not want to install a standalone NDK. See:
# https://dxr.mozilla.org/mozilla-central/source/build/autoconf/android.m4#139-161
os_type = platform.system().lower()
if os_type not in ["linux", "darwin"]:
raise Exception("Android cross builds are only supported on Linux and macOS.")
cpu_type = platform.machine().lower()
host_suffix = "unknown"
if cpu_type in ["i386", "i486", "i686", "i768", "x86"]:
host_suffix = "x86"
elif cpu_type in ["x86_64", "x86-64", "x64", "amd64"]:
host_suffix = "x86_64"
host = os_type + "-" + host_suffix
host_cc = env.get('HOST_CC') or _get_exec_path(["clang"]) or _get_exec_path(["gcc"])
host_cxx = env.get('HOST_CXX') or _get_exec_path(["clang++"]) or _get_exec_path(["g++"])
llvm_toolchain = path.join(env['ANDROID_NDK'], "toolchains", "llvm", "prebuilt", host)
gcc_toolchain = path.join(env['ANDROID_NDK'], "toolchains",
android_toolchain_prefix + "-4.9", "prebuilt", host)
gcc_libs = path.join(gcc_toolchain, "lib", "gcc", android_toolchain_name, "4.9.x")
env['PATH'] = (path.join(llvm_toolchain, "bin") + ':' + env['PATH'])
env['ANDROID_SYSROOT'] = path.join(env['ANDROID_NDK'], "sysroot")
support_include = path.join(env['ANDROID_NDK'], "sources", "android", "support", "include")
cpufeatures_include = path.join(env['ANDROID_NDK'], "sources", "android", "cpufeatures")
cxx_include = path.join(env['ANDROID_NDK'], "sources", "cxx-stl",
"llvm-libc++", "include")
clang_include = path.join(llvm_toolchain, "lib64", "clang", "3.8", "include")
cxxabi_include = path.join(env['ANDROID_NDK'], "sources", "cxx-stl",
"llvm-libc++abi", "include")
sysroot_include = path.join(env['ANDROID_SYSROOT'], "usr", "include")
arch_include = path.join(sysroot_include, android_toolchain_name)
android_platform_dir = path.join(env['ANDROID_NDK'], "platforms", android_platform, "arch-" + android_arch)
arch_libs = path.join(android_platform_dir, "usr", "lib")
clang_include = path.join(llvm_toolchain, "lib64", "clang", "5.0", "include")
android_api = android_platform.replace('android-', '')
env['HOST_CC'] = host_cc
env['HOST_CXX'] = host_cxx
env['HOST_CFLAGS'] = ''
env['HOST_CXXFLAGS'] = ''
env['CC'] = path.join(llvm_toolchain, "bin", "clang")
env['CPP'] = path.join(llvm_toolchain, "bin", "clang") + " -E"
env['CXX'] = path.join(llvm_toolchain, "bin", "clang++")
env['ANDROID_TOOLCHAIN'] = gcc_toolchain
env['ANDROID_TOOLCHAIN_DIR'] = gcc_toolchain
env['ANDROID_VERSION'] = android_api
env['ANDROID_PLATFORM_DIR'] = android_platform_dir
env['GCC_TOOLCHAIN'] = gcc_toolchain
gcc_toolchain_bin = path.join(gcc_toolchain, android_toolchain_name, "bin")
env['AR'] = path.join(gcc_toolchain_bin, "ar")
env['RANLIB'] = path.join(gcc_toolchain_bin, "ranlib")
env['OBJCOPY'] = path.join(gcc_toolchain_bin, "objcopy")
env['YASM'] = path.join(env['ANDROID_NDK'], 'prebuilt', host, 'bin', 'yasm')
# A cheat-sheet for some of the build errors caused by getting the search path wrong...
#
# fatal error: 'limits' file not found
# -- add -I cxx_include
# unknown type name '__locale_t' (when running bindgen in mozjs_sys)
# -- add -isystem sysroot_include
# error: use of undeclared identifier 'UINTMAX_C'
# -- add -D__STDC_CONSTANT_MACROS
#
# Also worth remembering: autoconf uses C for its configuration,
# even for C++ builds, so the C flags need to line up with the C++ flags.
env['CFLAGS'] = ' '.join([
"--target=" + target,
"--sysroot=" + env['ANDROID_SYSROOT'],
"--gcc-toolchain=" + gcc_toolchain,
"-isystem", sysroot_include,
"-I" + arch_include,
"-B" + arch_libs,
"-L" + arch_libs,
"-D__ANDROID_API__=" + android_api,
])
env['CXXFLAGS'] = ' '.join([
"--target=" + target,
"--sysroot=" + env['ANDROID_SYSROOT'],
"--gcc-toolchain=" + gcc_toolchain,
"-I" + cpufeatures_include,
"-I" + cxx_include,
"-I" + clang_include,
"-isystem", sysroot_include,
"-I" + cxxabi_include,
"-I" + clang_include,
"-I" + arch_include,
"-I" + support_include,
"-L" + gcc_libs,
"-B" + arch_libs,
"-L" + arch_libs,
"-D__ANDROID_API__=" + android_api,
"-D__STDC_CONSTANT_MACROS",
"-D__NDK_FPABI__=",
])
env['CPPFLAGS'] = ' '.join([
"--target=" + target,
"--sysroot=" + env['ANDROID_SYSROOT'],
"-I" + arch_include,
])
env["NDK_ANDROID_VERSION"] = android_api
env["ANDROID_ABI"] = android_lib
env["ANDROID_PLATFORM"] = android_platform
env["NDK_CMAKE_TOOLCHAIN_FILE"] = path.join(env['ANDROID_NDK'], "build", "cmake", "android.toolchain.cmake")
env["CMAKE_TOOLCHAIN_FILE"] = path.join(self.android_support_dir(), "toolchain.cmake")
# Set output dir for gradle aar files
aar_out_dir = self.android_aar_dir()
if not os.path.exists(aar_out_dir):
os.makedirs(aar_out_dir)
env["AAR_OUT_DIR"] = aar_out_dir
# GStreamer and its dependencies use pkg-config and this flag is required
# to make it work in a cross-compilation context.
env["PKG_CONFIG_ALLOW_CROSS"] = '1'
# Build the name of the package containing all GStreamer dependencies
# according to the build target.
gst_lib = "gst-build-{}".format(self.config["android"]["lib"])
gst_lib_zip = "gstreamer-{}-1.16.0-20190517-095630.zip".format(self.config["android"]["lib"])
gst_dir = os.path.join(target_path, "gstreamer")
gst_lib_path = os.path.join(gst_dir, gst_lib)
pkg_config_path = os.path.join(gst_lib_path, "pkgconfig")
env["PKG_CONFIG_PATH"] = pkg_config_path
if not os.path.exists(gst_lib_path):
# Download GStreamer dependencies if they have not already been downloaded
# This bundle is generated with `libgstreamer_android_gen`
# Follow these instructions to build and deploy new binaries
# https://github.com/servo/libgstreamer_android_gen#build
print("Downloading GStreamer dependencies")
gst_url = "https://servo-deps-2.s3.amazonaws.com/gstreamer/%s" % gst_lib_zip
print(gst_url)
urllib.request.urlretrieve(gst_url, gst_lib_zip)
zip_ref = zipfile.ZipFile(gst_lib_zip, "r")
zip_ref.extractall(gst_dir)
os.remove(gst_lib_zip)
# Change pkgconfig info to make all GStreamer dependencies point
# to the libgstreamer_android.so bundle.
for each in os.listdir(pkg_config_path):
if each.endswith('.pc'):
print("Setting pkgconfig info for %s" % each)
pc = os.path.join(pkg_config_path, each)
expr = "s#libdir=.*#libdir=%s#g" % gst_lib_path
subprocess.call(["perl", "-i", "-pe", expr, pc])
if magicleap:
if platform.system() not in ["Darwin"]:
raise Exception("Magic Leap builds are only supported on macOS. "
"If you only wish to test if your code builds, "
"run ./mach build -p libmlservo.")
ml_sdk = env.get("MAGICLEAP_SDK")
if not ml_sdk:
raise Exception("Magic Leap builds need the MAGICLEAP_SDK environment variable")
if not os.path.exists(ml_sdk):
raise Exception("Path specified by MAGICLEAP_SDK does not exist.")
ml_support = path.join(self.get_top_dir(), "support", "magicleap")
# We pretend to be an Android build
env.setdefault("ANDROID_VERSION", "21")
env.setdefault("ANDROID_NDK", env["MAGICLEAP_SDK"])
env.setdefault("ANDROID_NDK_VERSION", "16.0.0")
env.setdefault("ANDROID_PLATFORM_DIR", path.join(env["MAGICLEAP_SDK"], "lumin"))
env.setdefault("ANDROID_TOOLCHAIN_DIR", path.join(env["MAGICLEAP_SDK"], "tools", "toolchains"))
env.setdefault("ANDROID_CLANG", path.join(env["ANDROID_TOOLCHAIN_DIR"], "bin", "clang"))
# A random collection of search paths
env.setdefault("STLPORT_LIBS", " ".join([
"-L" + path.join(env["MAGICLEAP_SDK"], "lumin", "stl", "libc++-lumin", "lib"),
"-lc++"
]))
env.setdefault("STLPORT_CPPFLAGS", " ".join([
"-I" + path.join(env["MAGICLEAP_SDK"], "lumin", "stl", "libc++-lumin", "include")
]))
env.setdefault("CPPFLAGS", " ".join([
"--no-standard-includes",
"--sysroot=" + env["ANDROID_PLATFORM_DIR"],
"-I" + path.join(env["ANDROID_PLATFORM_DIR"], "usr", "include"),
"-isystem" + path.join(env["ANDROID_TOOLCHAIN_DIR"], "lib64", "clang", "3.8", "include"),
]))
env.setdefault("CFLAGS", " ".join([
env["CPPFLAGS"],
"-L" + path.join(env["ANDROID_TOOLCHAIN_DIR"], "lib", "gcc", target, "4.9.x"),
]))
env.setdefault("CXXFLAGS", " ".join([
# Sigh, Angle gets confused if there's another EGL around
"-I./gfx/angle/checkout/include",
env["STLPORT_CPPFLAGS"],
env["CFLAGS"]
]))
# The toolchain commands
env.setdefault("AR", path.join(env["ANDROID_TOOLCHAIN_DIR"], "bin", "aarch64-linux-android-ar"))
env.setdefault("AS", path.join(env["ANDROID_TOOLCHAIN_DIR"], "bin", "aarch64-linux-android-clang"))
env.setdefault("CC", path.join(env["ANDROID_TOOLCHAIN_DIR"], "bin", "aarch64-linux-android-clang"))
env.setdefault("CPP", path.join(env["ANDROID_TOOLCHAIN_DIR"], "bin", "aarch64-linux-android-clang -E"))
env.setdefault("CXX", path.join(env["ANDROID_TOOLCHAIN_DIR"], "bin", "aarch64-linux-android-clang++"))
env.setdefault("LD", path.join(env["ANDROID_TOOLCHAIN_DIR"], "bin", "aarch64-linux-android-ld"))
env.setdefault("OBJCOPY", path.join(env["ANDROID_TOOLCHAIN_DIR"], "bin", "aarch64-linux-android-objcopy"))
env.setdefault("OBJDUMP", path.join(env["ANDROID_TOOLCHAIN_DIR"], "bin", "aarch64-linux-android-objdump"))
env.setdefault("RANLIB", path.join(env["ANDROID_TOOLCHAIN_DIR"], "bin", "aarch64-linux-android-ranlib"))
env.setdefault("STRIP", path.join(env["ANDROID_TOOLCHAIN_DIR"], "bin", "aarch64-linux-android-strip"))
# Undo all of that when compiling build tools for the host
env.setdefault("HOST_CFLAGS", "")
env.setdefault("HOST_CXXFLAGS", "")
env.setdefault("HOST_CC", "/usr/local/opt/llvm/bin/clang")
env.setdefault("HOST_CXX", "/usr/local/opt/llvm/bin/clang++")
env.setdefault("HOST_LD", "ld")
# Some random build configurations
env.setdefault("HARFBUZZ_SYS_NO_PKG_CONFIG", "1")
env.setdefault("PKG_CONFIG_ALLOW_CROSS", "1")
env.setdefault("CMAKE_TOOLCHAIN_FILE", path.join(ml_support, "toolchain.cmake"))
env.setdefault("_LIBCPP_INLINE_VISIBILITY", "__attribute__((__always_inline__))")
# The Open SSL configuration
env.setdefault("OPENSSL_DIR", path.join(target_path, target, "native", "openssl"))
env.setdefault("OPENSSL_VERSION", "1.1.1d")
env.setdefault("OPENSSL_STATIC", "1")
# GStreamer configuration
env.setdefault("GSTREAMER_DIR", path.join(target_path, target, "native", "gstreamer-1.16.0"))
env.setdefault("GSTREAMER_URL", "https://servo-deps-2.s3.amazonaws.com/gstreamer/gstreamer-magicleap-1.16.0-20190823-104505.tgz")
env.setdefault("PKG_CONFIG_PATH", path.join(env["GSTREAMER_DIR"], "system", "lib64", "pkgconfig"))
# Override the linker set in .cargo/config
env.setdefault("CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER", path.join(ml_support, "fake-ld.sh"))
# Only build libmlservo
opts += ["--package", "libmlservo"]
# Download and build OpenSSL if necessary
status = call(path.join(ml_support, "openssl.sh"), env=env, verbose=verbose)
if status:
return status
# Download prebuilt Gstreamer if necessary
if not os.path.exists(path.join(env["GSTREAMER_DIR"], "system")):
if not os.path.exists(env["GSTREAMER_DIR"] + ".tgz"):
check_call([
'curl',
'-L',
'-f',
'-o', env["GSTREAMER_DIR"] + ".tgz",
env["GSTREAMER_URL"],
])
check_call([
'mkdir',
'-p',
env["GSTREAMER_DIR"],
])
check_call([
'tar',
'xzf',
env["GSTREAMER_DIR"] + ".tgz",
'-C', env["GSTREAMER_DIR"],
])
# https://internals.rust-lang.org/t/exploring-crate-graph-build-times-with-cargo-build-ztimings/10975
# Prepend so that e.g. `-Ztimings` (which means `-Ztimings=info,html`)
# given on the command line can override it
opts = ["-Ztimings=info"] + opts
if very_verbose:
print(["Calling", "cargo", "build"] + opts)
for key in env:
print((key, env[key]))
if sys.platform == "win32":
env.setdefault("CC", "clang-cl.exe")
env.setdefault("CXX", "clang-cl.exe")
if uwp:
env.setdefault("TARGET_CFLAGS", "")
env.setdefault("TARGET_CXXFLAGS", "")
env["TARGET_CFLAGS"] += " -DWINAPI_FAMILY=WINAPI_FAMILY_APP"
env["TARGET_CXXFLAGS"] += " -DWINAPI_FAMILY=WINAPI_FAMILY_APP"
else:
env.setdefault("CC", "clang")
env.setdefault("CXX", "clang++")
status = self.run_cargo_build_like_command(
"build", opts, env=env, verbose=verbose,
target=target, android=android, magicleap=magicleap, libsimpleservo=libsimpleservo, uwp=uwp,
features=features, **kwargs
)
elapsed = time() - build_start
# Do some additional things if the build succeeded
if status == 0:
if android and not no_package:
flavor = None
if "googlevr" in features:
flavor = "googlevr"
elif "oculusvr" in features:
flavor = "oculusvr"
rv = Registrar.dispatch("package", context=self.context,
release=release, dev=dev, target=target, flavor=flavor)
if rv:
return rv
if sys.platform == "win32":
servo_exe_dir = os.path.dirname(
self.get_binary_path(release, dev, target=target, simpleservo=libsimpleservo)
)
assert os.path.exists(servo_exe_dir)
# on msvc builds, use editbin to change the subsystem to windows, but only
# on release builds -- on debug builds, it hides log output
if not dev and not libsimpleservo:
call(["editbin", "/nologo", "/subsystem:windows", path.join(servo_exe_dir, "servo.exe")],
verbose=verbose)
# on msvc, we need to copy in some DLLs in to the servo.exe dir and the directory for unit tests.
for ssl_lib in ["libssl.dll", "libcrypto.dll"]:
ssl_path = path.join(env['OPENSSL_LIB_DIR'], "../bin", ssl_lib)
shutil.copy(ssl_path, servo_exe_dir)
shutil.copy(ssl_path, path.join(servo_exe_dir, "deps"))
build_path = path.join(servo_exe_dir, "build")
assert os.path.exists(build_path)
def package_generated_shared_libraries(libs, build_path, servo_exe_dir):
for root, dirs, files in os.walk(build_path):
remaining_libs = list(libs)
for lib in libs:
if lib in files:
shutil.copy(path.join(root, lib), servo_exe_dir)
remaining_libs.remove(lib)
continue
libs = remaining_libs
if not libs:
return True
for lib in libs:
print("WARNING: could not find " + lib)
# UWP build has its own ANGLE library that it packages.
if not uwp:
print("Packaging EGL DLLs")
egl_libs = ["libEGL.dll", "libGLESv2.dll"]
if not package_generated_shared_libraries(egl_libs, build_path, servo_exe_dir):
status = 1
# copy needed gstreamer DLLs in to servo.exe dir
print("Packaging gstreamer DLLs")
if not package_gstreamer_dlls(env, servo_exe_dir, target_triple, uwp):
status = 1
# UWP app packaging already bundles all required DLLs for us.
print("Packaging MSVC DLLs")
if not package_msvc_dlls(servo_exe_dir, target_triple, vs_dirs['vcdir'], vs_dirs['vs_version']):
status = 1
elif sys.platform == "darwin":
servo_exe_dir = os.path.dirname(
self.get_binary_path(release, dev, target=target, simpleservo=libsimpleservo)
)
assert os.path.exists(servo_exe_dir)
if not package_gstreamer_dylibs(servo_exe_dir):
return 1
# On the Mac, set a lovely icon. This makes it easier to pick out the Servo binary in tools
# like Instruments.app.
try:
import Cocoa
icon_path = path.join(self.get_top_dir(), "resources", "servo_1024.png")
icon = Cocoa.NSImage.alloc().initWithContentsOfFile_(icon_path)
if icon is not None:
Cocoa.NSWorkspace.sharedWorkspace().setIcon_forFile_options_(icon,
servo_path,
0)
except ImportError:
pass
# Generate Desktop Notification if elapsed-time > some threshold value
notify_build_done(self.config, elapsed, status == 0)
print("Build %s in %s" % ("Completed" if status == 0 else "FAILED", format_duration(elapsed)))
return status
@Command('clean',
description='Clean the build directory.',
category='build')
@CommandArgument('--manifest-path',
default=None,
help='Path to the manifest to the package to clean')
@CommandArgument('--verbose', '-v',
action='store_true',
help='Print verbose output')
@CommandArgument('params', nargs='...',
help="Command-line arguments to be passed through to Cargo")
def clean(self, manifest_path=None, params=[], verbose=False):
self.ensure_bootstrapped()
virtualenv_fname = '_virtualenv%d.%d' % (sys.version_info[0], sys.version_info[1])
virtualenv_path = path.join(self.get_top_dir(), 'python', virtualenv_fname)
if path.exists(virtualenv_path):
print('Removing virtualenv directory: %s' % virtualenv_path)
shutil.rmtree(virtualenv_path)
self.clean_uwp()
opts = ["--manifest-path", manifest_path or path.join(self.context.topdir, "Cargo.toml")]
if verbose:
opts += ["-v"]
opts += params
return check_call(["cargo", "clean"] + opts, env=self.build_env(), verbose=verbose)
@Command('clean-uwp',
description='Clean the support/hololens/ directory.',
category='build')
def clean_uwp(self):
uwp_artifacts = [
"support/hololens/x64/",
"support/hololens/ARM/",
"support/hololens/ARM64/",
"support/hololens/ServoApp/x64/",
"support/hololens/ServoApp/ARM/",
"support/hololens/ServoApp/ARM64/",
"support/hololens/ServoApp/Generated Files/",
"support/hololens/ServoApp/BundleArtifacts/",
"support/hololens/ServoApp/support/",
"support/hololens/ServoApp/Debug/",
"support/hololens/ServoApp/Release/",
"support/hololens/packages/",
"support/hololens/AppPackages/",
"support/hololens/ServoApp/ServoApp.vcxproj.user",
]
for uwp_artifact in uwp_artifacts:
artifact = path.join(self.get_top_dir(), uwp_artifact)
if path.exists(artifact):
if path.isdir(artifact):
shutil.rmtree(artifact)
else:
os.remove(artifact)
def angle_root(target, nuget_env):
arch = {
"aarch64": "arm64",
"x86_64": "x64",
}
angle_arch = arch[target.split('-')[0]]
package_name = "ANGLE.WindowsStore.Servo"
import xml.etree.ElementTree as ET
tree = ET.parse(os.path.join('support', 'hololens', 'ServoApp', 'packages.config'))
root = tree.getroot()
for package in root.iter('package'):
if package.get('id') == package_name:
package_version = package.get('version')
break
else:
raise Exception("Couldn't locate ANGLE package")
angle_default_path = path.join(os.getcwd(), "support", "hololens", "packages",
package_name + "." + package_version, "bin", "UAP", angle_arch)
# Nuget executable command
nuget_app = path.join(os.getcwd(), "support", "hololens", "ServoApp.sln")
if not os.path.exists(angle_default_path):
check_call(['nuget.exe', 'restore', nuget_app], env=nuget_env)
return angle_default_path
def package_gstreamer_dylibs(servo_exe_dir):
missing = []
gst_dylibs = macos_dylibs() + macos_plugins()
for gst_lib in gst_dylibs:
try:
dest_path = os.path.join(servo_exe_dir, os.path.basename(gst_lib))
if os.path.isfile(dest_path):
os.remove(dest_path)
shutil.copy(gst_lib, servo_exe_dir)
except Exception as e:
print(e)
missing += [str(gst_lib)]
for gst_lib in missing:
print("ERROR: could not find required GStreamer DLL: " + gst_lib)
return not missing
def package_gstreamer_dlls(env, servo_exe_dir, target, uwp):
gst_root = gstreamer_root(target, env)
if not gst_root:
print("Could not find GStreamer installation directory.")
return False
# All the shared libraries required for starting up and loading plugins.
gst_dlls = [
"avcodec-58.dll",
"avfilter-7.dll",
"avformat-58.dll",
"avutil-56.dll",
"bz2.dll",
"ffi-7.dll",
"gio-2.0-0.dll",
"glib-2.0-0.dll",
"gmodule-2.0-0.dll",
"gobject-2.0-0.dll",
"intl-8.dll",
"orc-0.4-0.dll",
"swresample-3.dll",
"z-1.dll",
]
gst_dlls += windows_dlls(uwp)
if uwp:
# These come from a more recent version of ffmpeg and
# aren't present in the official GStreamer 1.16 release.
gst_dlls += [
"avresample-4.dll",
"postproc-55.dll",
"swscale-5.dll",
"x264-157.dll",
]
else:
# These are built with MinGW and are not yet compatible
# with UWP's restrictions.
gst_dlls += [
"graphene-1.0-0.dll",
"libcrypto-1_1-x64.dll",
"libgmp-10.dll",
"libgnutls-30.dll",
"libhogweed-4.dll",
"libjpeg-8.dll",
"libnettle-6.dll.",
"libogg-0.dll",
"libopus-0.dll",
"libpng16-16.dll",
"libssl-1_1-x64.dll",
"libtasn1-6.dll",
"libtheora-0.dll",
"libtheoradec-1.dll",
"libtheoraenc-1.dll",
"libusrsctp-1.dll",
"libvorbis-0.dll",
"libvorbisenc-2.dll",
"libwinpthread-1.dll",
"nice-10.dll",
]
missing = []
for gst_lib in gst_dlls:
try:
shutil.copy(path.join(gst_root, "bin", gst_lib), servo_exe_dir)
except Exception:
missing += [str(gst_lib)]
for gst_lib in missing:
print("ERROR: could not find required GStreamer DLL: " + gst_lib)
if missing:
return False
# Only copy a subset of the available plugins.
gst_dlls = windows_plugins(uwp)
gst_plugin_path_root = os.environ.get("GSTREAMER_PACKAGE_PLUGIN_PATH") or gst_root
gst_plugin_path = path.join(gst_plugin_path_root, "lib", "gstreamer-1.0")
if not os.path.exists(gst_plugin_path):
print("ERROR: couldn't find gstreamer plugins at " + gst_plugin_path)
return False
missing = []
for gst_lib in gst_dlls:
try:
shutil.copy(path.join(gst_plugin_path, gst_lib), servo_exe_dir)
except Exception:
missing += [str(gst_lib)]
for gst_lib in missing:
print("ERROR: could not find required GStreamer DLL: " + gst_lib)
return not missing
def package_msvc_dlls(servo_exe_dir, target, vcinstalldir, vs_version):
# copy some MSVC DLLs to servo.exe dir
msvc_redist_dir = None
vs_platforms = {
"x86_64": "x64",
"i686": "x86",
"aarch64": "arm64",
}
target_arch = target.split('-')[0]
vs_platform = vs_platforms[target_arch]
vc_dir = vcinstalldir or os.environ.get("VCINSTALLDIR", "")
if not vs_version:
vs_version = os.environ.get("VisualStudioVersion", "")
msvc_deps = [
"msvcp140.dll",
"vcruntime140.dll",
]
if target_arch != "aarch64" and "uwp" not in target and vs_version in ("14.0", "15.0", "16.0"):
msvc_deps += ["api-ms-win-crt-runtime-l1-1-0.dll"]
# Check if it's Visual C++ Build Tools or Visual Studio 2015
vs14_vcvars = path.join(vc_dir, "vcvarsall.bat")
is_vs14 = True if os.path.isfile(vs14_vcvars) or vs_version == "14.0" else False
if is_vs14:
msvc_redist_dir = path.join(vc_dir, "redist", vs_platform, "Microsoft.VC140.CRT")
elif vs_version in ("15.0", "16.0"):
redist_dir = path.join(vc_dir, "Redist", "MSVC")
if os.path.isdir(redist_dir):
for p in os.listdir(redist_dir)[::-1]:
redist_path = path.join(redist_dir, p)
for v in ["VC141", "VC142", "VC150", "VC160"]:
# there are two possible paths
# `x64\Microsoft.VC*.CRT` or `onecore\x64\Microsoft.VC*.CRT`
redist1 = path.join(redist_path, vs_platform, "Microsoft.{}.CRT".format(v))
redist2 = path.join(redist_path, "onecore", vs_platform, "Microsoft.{}.CRT".format(v))
if os.path.isdir(redist1):
msvc_redist_dir = redist1
break
elif os.path.isdir(redist2):
msvc_redist_dir = redist2
break
if msvc_redist_dir:
break
if not msvc_redist_dir:
print("Couldn't locate MSVC redistributable directory")
return False
redist_dirs = [
msvc_redist_dir,
]
if "WindowsSdkDir" in os.environ:
redist_dirs += [path.join(os.environ["WindowsSdkDir"], "Redist", "ucrt", "DLLs", vs_platform)]
missing = []
for msvc_dll in msvc_deps:
for dll_dir in redist_dirs:
dll = path.join(dll_dir, msvc_dll)
servo_dir_dll = path.join(servo_exe_dir, msvc_dll)
if os.path.isfile(dll):
if os.path.isfile(servo_dir_dll):
# avoid permission denied error when overwrite dll in servo build directory
os.chmod(servo_dir_dll, stat.S_IWUSR)
shutil.copy(dll, servo_exe_dir)
break
else:
missing += [msvc_dll]
for msvc_dll in missing:
print("DLL file `{}` not found!".format(msvc_dll))
return not missing
| mpl-2.0 | -9,109,003,718,326,073,000 | 42.896418 | 141 | 0.54264 | false |
MalkIPP/ipp_work | ipp_work/simulations/ir_marg_rate.py | 1 | 8481 | # -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <[email protected]>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import pandas
import logging
import openfisca_france_data
from openfisca_france_data.input_data_builders import get_input_data_frame
from openfisca_france_data.surveys import SurveyScenario
from openfisca_core.rates import average_rate
from ipp_work.utils import from_simulation_to_data_frame_by_entity_key_plural
log = logging.getLogger(__name__)
def from_input_df_to_entity_key_plural_df(input_data_frame, tax_benefit_system, simulation, used_as_input_variables = None):
'''
En entrée il faut:
une input_data_frame
une liste des variables nécessaires et leurs entités => il faut le tax_benefit_system
Objectif: créer une input_data_frame_by_entity_key_plural
Il faut ensuite créer une 2e fonction qui transforme cette df en Array
'''
assert input_data_frame is not None
assert tax_benefit_system is not None
id_variables = [
entity.index_for_person_variable_name for entity in simulation.entity_by_key_singular.values()
if not entity.is_persons_entity]
role_variables = [
entity.role_for_person_variable_name for entity in simulation.entity_by_key_singular.values()
if not entity.is_persons_entity]
column_by_name = tax_benefit_system.column_by_name
# Check 1 (ici ou dans la méthode de classe ?)
for column_name in input_data_frame:
if column_name not in column_by_name:
log.info('Unknown column "{}" in survey, dropped from input table'.format(column_name))
# waiting for the new pandas version to hit Travis repo
input_data_frame = input_data_frame.drop(column_name, axis = 1)
# , inplace = True) # TODO: effet de bords ?
# Check 2 (ici ou dans la méthode de classe ?)
for column_name in input_data_frame:
if column_name in id_variables + role_variables:
continue
#TODO: make that work ? (MG, may 15)
# if column_by_name[column_name].formula_class.function is not None:
# if column_name in column_by_name.used_as_input_variables:
# log.info(
# 'Column "{}" not dropped because present in used_as_input_variabels'.format(column_name))
# continue
#
# log.info('Column "{}" in survey set to be calculated, dropped from input table'.format(column_name))
# input_data_frame = input_data_frame.drop(column_name, axis = 1)
# , inplace = True) # TODO: effet de bords ?
# Work on entities
for entity in simulation.entity_by_key_singular.values():
if entity.is_persons_entity:
entity.count = entity.step_size = len(input_data_frame)
else:
entity.count = entity.step_size = (input_data_frame[entity.role_for_person_variable_name] == 0).sum()
entity.roles_count = input_data_frame[entity.role_for_person_variable_name].max() + 1
# Classify column by entity:
columns_by_entity = {}
columns_by_entity['individu'] = []
columns_by_entity['quifam'] = []
columns_by_entity['quifoy'] = []
columns_by_entity['quimen'] = []
for column_name, column_serie in input_data_frame.iteritems():
holder = simulation.get_or_new_holder(column_name)
entity = holder.entity
if entity.is_persons_entity:
columns_by_entity['individu'].append(column_name)
else:
columns_by_entity[entity.role_for_person_variable_name].append(column_name)
input_data_frame_by_entity_key_plural = {}
for entity in simulation.entity_by_key_singular.values():
if entity.is_persons_entity:
input_data_frame_by_entity_key_plural['individus'] = \
input_data_frame[columns_by_entity['individu']]
entity.count = entity.step_size = len(input_data_frame)
else:
input_data_frame_by_entity_key_plural[entity.index_for_person_variable_name] = \
input_data_frame[columns_by_entity[entity.role_for_person_variable_name]][input_data_frame[entity.role_for_person_variable_name] == 0]
return input_data_frame_by_entity_key_plural
def marginal_rate_survey(df, target = None, target_2 = None, varying = None, varying_2 = None):
# target: numerator, varying: denominator
return 1 - (df[target] - df[target_2]) / (df[varying] - df[varying_2])
def varying_survey_simulation(year = 2009, increment = 10, target = 'irpp', varying = 'rni', used_as_input_variables = None):
TaxBenefitSystem = openfisca_france_data.init_country()
tax_benefit_system = TaxBenefitSystem()
input_data_frame = get_input_data_frame(year)
# Simulation 1 : get varying and target
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame,
used_as_input_variables = used_as_input_variables,
year = year,
tax_benefit_system = tax_benefit_system
)
simulation = survey_scenario.new_simulation(debug = False)
output_data_frame = pandas.DataFrame(
dict([(name, simulation.calculate_add(name)) for name in [
target, varying, 'idfoy_original'
]]))
# Make input_data_frame_by_entity_key_plural from the previous input_data_frame and simulation
input_data_frames_by_entity_key_plural = \
from_input_df_to_entity_key_plural_df(input_data_frame, tax_benefit_system, simulation)
foyers = input_data_frames_by_entity_key_plural['idfoy']
foyers = pandas.merge(foyers, output_data_frame, on = 'idfoy_original')
# Incrementation of varying:
foyers[varying] = foyers[varying] + increment
# On remplace la nouvelle base dans le dictionnaire
input_data_frames_by_entity_key_plural['idfoy'] = foyers
# 2e simulation à partir de input_data_frame_by_entity_key_plural:
# TODO: fix used_as_input_variabels in the from_input_df_to_entity_key_plural_df() function
used_as_input_variables = used_as_input_variables + [varying]
TaxBenefitSystem = openfisca_france_data.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = None,
input_data_frames_by_entity_key_plural = input_data_frames_by_entity_key_plural,
used_as_input_variables = used_as_input_variables,
year = year,
tax_benefit_system = tax_benefit_system,
)
simulation = survey_scenario.new_simulation(debug = False)
output_data_frame2 = pandas.DataFrame(
dict([(name, simulation.calculate_add(name)) for name in [
target, varying, 'idfoy_original'
]]))
output_data_frame2.rename(columns = {varying: '{}_2'.format(varying),
target: '{}_2'.format(target)}, inplace = True)
merged = pandas.merge(output_data_frame, output_data_frame2, on = 'idfoy_original')
merged['marginal_rate'] = marginal_rate_survey(merged, '{}'.format(target), '{}_2'.format(target), 'rni', 'rni_2')
merged['average_rate'] = average_rate(target = merged[target], varying = merged[varying])
return merged
if __name__ == '__main__':
import logging
import time
log = logging.getLogger(__name__)
import sys
logging.basicConfig(level = logging.INFO, stream = sys.stdout)
start = time.time()
used_as_input_variables = ['salaire_imposable', 'cho', 'rst', 'age_en_mois', 'smic55']
merged = varying_survey_simulation(year = 2009, increment = 10, target = 'irpp', varying = 'rni',
used_as_input_variables = used_as_input_variables)
| agpl-3.0 | 4,238,916,866,469,250,000 | 44.069149 | 150 | 0.66883 | false |
agx/git-buildpackage | gbp/scripts/import_ref.py | 1 | 8733 | # vim: set fileencoding=utf-8 :
#
# (C) 2018 Michael Stapelberg <[email protected]>
# 2018 Guido Günther <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, please see
# <http://www.gnu.org/licenses/>
#
"""Import a new upstream version from a git branch onto the Debian branch"""
import os
import sys
import gbp.command_wrappers as gbpc
from gbp.deb.git import GitRepositoryError
from gbp.config import GbpOptionParserDebian, GbpOptionGroup
from gbp.errors import GbpError
import gbp.log
from gbp.scripts.common import ExitCodes
from gbp.deb.rollbackgit import RollbackDebianGitRepository
from gbp.scripts.import_orig import (debian_branch_merge,
postimport_hook,
set_bare_repo_options,
rollback)
def get_commit_and_version_to_merge(repo, options):
"""
Get the commit and version we want to merge based on the
--upstream-tag setting
"""
version = options.version
if options.upstream_tree.upper() == 'VERSION':
# Determine tag name from given version
if not options.version:
raise GbpError("No upstream version given, try -u<version>")
commit = repo.version_to_tag(options.upstream_tag, options.version)
elif options.upstream_tree.upper() == 'BRANCH':
# Use head of upstrem branch
if not repo.has_branch(options.upstream_branch):
raise GbpError("%s is not a valid branch" % options.upstream_branch)
commit = options.upstream_branch
else:
# Use whatever is passed in as commitish
commit = "%s^{commit}" % options.upstream_tree
return commit, version
def build_parser(name):
try:
parser = GbpOptionParserDebian(command=os.path.basename(name), prefix='',
usage='%prog [options] -u<upstream-version>')
except GbpError as err:
gbp.log.err(err)
return None
import_group = GbpOptionGroup(parser, "import options",
"import related options")
tag_group = GbpOptionGroup(parser, "tag options",
"tag related options ")
branch_group = GbpOptionGroup(parser, "version and branch naming options",
"version number and branch layout options")
cmd_group = GbpOptionGroup(parser, "external command options",
"how and when to invoke external commands and hooks")
for group in [import_group, branch_group, tag_group, cmd_group]:
parser.add_option_group(group)
branch_group.add_option("-u", "--upstream-version", dest="version",
help="The version number to use for the new version, "
"default is ''", default='')
branch_group.add_config_file_option(option_name="debian-branch",
dest="debian_branch")
branch_group.add_config_file_option(option_name="upstream-branch",
dest="upstream_branch")
branch_group.add_config_file_option(option_name="upstream-tree",
dest="upstream_tree",
help="Where to merge the upstream changes from.",
default="VERSION")
branch_group.add_config_file_option(option_name="merge-mode", dest="merge_mode")
tag_group.add_boolean_config_file_option(option_name="sign-tags",
dest="sign_tags")
tag_group.add_config_file_option(option_name="keyid",
dest="keyid")
tag_group.add_config_file_option(option_name="upstream-tag",
dest="upstream_tag")
import_group.add_config_file_option(option_name="import-msg",
dest="import_msg")
cmd_group.add_config_file_option(option_name="postimport", dest="postimport")
parser.add_boolean_config_file_option(option_name="rollback",
dest="rollback")
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False,
help="verbose command execution")
parser.add_config_file_option(option_name="color", dest="color", type='tristate')
parser.add_config_file_option(option_name="color-scheme",
dest="color_scheme")
return parser
def parse_args(argv):
"""Parse the command line arguments
@return: options and arguments
"""
parser = build_parser(argv[0])
if not parser:
return None, None
(options, args) = parser.parse_args(argv[1:])
gbp.log.setup(options.color, options.verbose, options.color_scheme)
return options, args
def main(argv):
ret = 0
repo = None
(options, args) = parse_args(argv)
if not options:
return ExitCodes.parse_error
# TODO: honor --filter option
# TODO: add --filter-with-copyright which takes d/copyright into account
# TODO: handle automatic versions based on timestamp + sha1
# TODO: handle updating of upstream branch from remote
gbp.log.warn("This script is experimental, it might change incompatibly between versions.")
try:
try:
repo = RollbackDebianGitRepository('.')
except GitRepositoryError:
raise GbpError("%s is not a git repository" % (os.path.abspath('.')))
commit, version = get_commit_and_version_to_merge(repo, options)
is_empty = repo.is_empty()
(clean, out) = repo.is_clean()
if not clean and not is_empty:
gbp.log.err("Repository has uncommitted changes, commit these first: ")
raise GbpError(out)
if repo.bare:
set_bare_repo_options(options)
try:
tag = repo.version_to_tag(options.upstream_tag, version)
if not repo.has_tag(tag):
gbp.log.info("Upstream tag '%s' not found. Creating it for you." % tag)
repo.create_tag(name=tag,
msg="Upstream version %s" % version,
commit="%s^0" % commit,
sign=options.sign_tags,
keyid=options.keyid)
if is_empty:
repo.create_branch(branch=options.debian_branch, rev=commit)
repo.force_head(options.debian_branch, hard=True)
# In an empty repo avoid master branch defaulted to by
# git and check out debian branch instead.
if not repo.bare:
cur = repo.branch
if cur != options.debian_branch:
repo.set_branch(options.debian_branch)
repo.delete_branch(cur)
else:
repo.rrr_branch(options.debian_branch)
debian_branch_merge(repo, tag, version, options)
# Update working copy and index if we've possibly updated the
# checked out branch
current_branch = repo.get_branch()
if current_branch in [options.upstream_branch,
repo.pristine_tar_branch]:
repo.force_head(current_branch, hard=True)
postimport_hook(repo, tag, version, options)
except (gbpc.CommandExecFailed, GitRepositoryError) as err:
msg = str(err) or 'Unknown error, please report a bug'
raise GbpError("Import of %s failed: %s" % (commit, msg))
except KeyboardInterrupt:
raise GbpError("Import of %s failed: aborted by user" % (options.git_ref))
except GbpError as err:
if str(err):
gbp.log.err(err)
ret = 1
rollback(repo, options)
if not ret:
gbp.log.info("Successfully imported version %s" % (version))
return ret
if __name__ == "__main__":
sys.exit(main(sys.argv))
# vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·:
| gpl-2.0 | -2,448,606,281,932,488,000 | 40.966346 | 95 | 0.590446 | false |
yodalee/servo | tests/wpt/web-platform-tests/tools/py/testing/log/test_warning.py | 161 | 2253 | import pytest
import py
mypath = py.path.local(__file__).new(ext=".py")
@pytest.mark.xfail
def test_forwarding_to_warnings_module():
pytest.deprecated_call(py.log._apiwarn, "1.3", "..")
def test_apiwarn_functional(recwarn):
capture = py.io.StdCapture()
py.log._apiwarn("x.y.z", "something", stacklevel=1)
out, err = capture.reset()
py.builtin.print_("out", out)
py.builtin.print_("err", err)
assert err.find("x.y.z") != -1
lno = py.code.getrawcode(test_apiwarn_functional).co_firstlineno + 2
exp = "%s:%s" % (mypath, lno)
assert err.find(exp) != -1
def test_stacklevel(recwarn):
def f():
py.log._apiwarn("x", "some", stacklevel=2)
# 3
# 4
capture = py.io.StdCapture()
f()
out, err = capture.reset()
lno = py.code.getrawcode(test_stacklevel).co_firstlineno + 6
warning = str(err)
assert warning.find(":%s" % lno) != -1
def test_stacklevel_initpkg_with_resolve(testdir, recwarn):
testdir.makepyfile(modabc="""
import py
def f():
py.log._apiwarn("x", "some", stacklevel="apipkg123")
""")
testdir.makepyfile(apipkg123="""
def __getattr__():
import modabc
modabc.f()
""")
p = testdir.makepyfile("""
import apipkg123
apipkg123.__getattr__()
""")
capture = py.io.StdCapture()
p.pyimport()
out, err = capture.reset()
warning = str(err)
loc = 'test_stacklevel_initpkg_with_resolve.py:2'
assert warning.find(loc) != -1
def test_stacklevel_initpkg_no_resolve(recwarn):
def f():
py.log._apiwarn("x", "some", stacklevel="apipkg")
capture = py.io.StdCapture()
f()
out, err = capture.reset()
lno = py.code.getrawcode(test_stacklevel_initpkg_no_resolve).co_firstlineno + 2
warning = str(err)
assert warning.find(":%s" % lno) != -1
def test_function(recwarn):
capture = py.io.StdCapture()
py.log._apiwarn("x.y.z", "something", function=test_function)
out, err = capture.reset()
py.builtin.print_("out", out)
py.builtin.print_("err", err)
assert err.find("x.y.z") != -1
lno = py.code.getrawcode(test_function).co_firstlineno
exp = "%s:%s" % (mypath, lno)
assert err.find(exp) != -1
| mpl-2.0 | 7,405,839,485,959,459,000 | 28.644737 | 83 | 0.600976 | false |
quasiben/bokeh | examples/plotting/server/selection_histogram.py | 4 | 3752 | # You must first run "bokeh serve" to view this example
import numpy as np
from bokeh.client import push_session
from bokeh.models import BoxSelectTool, LassoSelectTool, Paragraph
from bokeh.plotting import curdoc, figure, hplot, vplot
# create three normal population samples with different parameters
x1 = np.random.normal(loc=5.0, size=400) * 100
y1 = np.random.normal(loc=10.0, size=400) * 10
x2 = np.random.normal(loc=5.0, size=800) * 50
y2 = np.random.normal(loc=5.0, size=800) * 10
x3 = np.random.normal(loc=55.0, size=200) * 10
y3 = np.random.normal(loc=4.0, size=200) * 10
x = np.concatenate((x1, x2, x3))
y = np.concatenate((y1, y2, y3))
TOOLS="pan,wheel_zoom,box_select,lasso_select"
# create the scatter plot
p = figure(tools=TOOLS, plot_width=600, plot_height=600, title=None, min_border=10, min_border_left=50)
r = p.scatter(x, y, size=3, color="#3A5785", alpha=0.6)
p.select(BoxSelectTool).select_every_mousemove = False
p.select(LassoSelectTool).select_every_mousemove = False
# create the horizontal histogram
hhist, hedges = np.histogram(x, bins=20)
hzeros = np.zeros(len(hedges)-1)
hmax = max(hhist)*1.1
LINE_ARGS = dict(color="#3A5785", line_color=None)
ph = figure(toolbar_location=None, plot_width=p.plot_width, plot_height=200, x_range=p.x_range,
y_range=(-hmax, hmax), title=None, min_border=10, min_border_left=50)
ph.xgrid.grid_line_color = None
ph.quad(bottom=0, left=hedges[:-1], right=hedges[1:], top=hhist, color="white", line_color="#3A5785")
hh1 = ph.quad(bottom=0, left=hedges[:-1], right=hedges[1:], top=hzeros, alpha=0.5, **LINE_ARGS)
hh2 = ph.quad(bottom=0, left=hedges[:-1], right=hedges[1:], top=hzeros, alpha=0.1, **LINE_ARGS)
# create the vertical histogram
vhist, vedges = np.histogram(y, bins=20)
vzeros = np.zeros(len(vedges)-1)
vmax = max(vhist)*1.1
th = 42 # need to adjust for toolbar height, unfortunately
pv = figure(toolbar_location=None, plot_width=200, plot_height=p.plot_height+th-10, x_range=(-vmax, vmax),
y_range=p.y_range, title=None, min_border=10, min_border_top=th)
pv.ygrid.grid_line_color = None
pv.xaxis.major_label_orientation = -3.14/2
pv.quad(left=0, bottom=vedges[:-1], top=vedges[1:], right=vhist, color="white", line_color="#3A5785")
vh1 = pv.quad(left=0, bottom=vedges[:-1], top=vedges[1:], right=vzeros, alpha=0.5, **LINE_ARGS)
vh2 = pv.quad(left=0, bottom=vedges[:-1], top=vedges[1:], right=vzeros, alpha=0.1, **LINE_ARGS)
# NOTE: Version 0.11 has introduced auto spacing by default on VBox/HBox/vplot/hplot
# so for now we must tweak some spacing and borders to have it closely
# aligned.
pv.min_border_top = 80
pv.min_border_left = 0
ph.min_border_top = 10
ph.min_border_right = 10
p.min_border_right = 10
layout = vplot(hplot(p, pv), hplot(ph, Paragraph(width=200)), width=800, height=800)
# open a session to keep our local document in sync with server
session = push_session(curdoc())
def update(attr, old, new):
inds = np.array(new['1d']['indices'])
if len(inds) == 0 or len(inds) == len(x):
hhist1, hhist2 = hzeros, hzeros
vhist1, vhist2 = vzeros, vzeros
else:
neg_inds = np.ones_like(x, dtype=np.bool)
neg_inds[inds] = False
hhist1, _ = np.histogram(x[inds], bins=hedges)
vhist1, _ = np.histogram(y[inds], bins=vedges)
hhist2, _ = np.histogram(x[neg_inds], bins=hedges)
vhist2, _ = np.histogram(y[neg_inds], bins=vedges)
hh1.data_source.data["top"] = hhist1
hh2.data_source.data["top"] = -hhist2
vh1.data_source.data["right"] = vhist1
vh2.data_source.data["right"] = -vhist2
r.data_source.on_change('selected', update)
session.show(layout) # open the document in a browser
session.loop_until_closed() # run forever
| bsd-3-clause | -1,259,513,667,026,541,800 | 38.083333 | 106 | 0.685501 | false |
shownomercy/django | django/conf/locale/lt/formats.py | 504 | 1830 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'Y \m. E j \d.'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = r'Y \m. E j \d., H:i'
YEAR_MONTH_FORMAT = r'Y \m. F'
MONTH_DAY_FORMAT = r'E j \d.'
SHORT_DATE_FORMAT = 'Y-m-d'
SHORT_DATETIME_FORMAT = 'Y-m-d H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%Y-%m-%d', '%d.%m.%Y', '%d.%m.%y', # '2006-10-25', '25.10.2006', '25.10.06'
]
TIME_INPUT_FORMATS = [
'%H:%M:%S', # '14:30:59'
'%H:%M:%S.%f', # '14:30:59.000200'
'%H:%M', # '14:30'
'%H.%M.%S', # '14.30.59'
'%H.%M.%S.%f', # '14.30.59.000200'
'%H.%M', # '14.30'
]
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59'
'%d.%m.%y %H:%M:%S.%f', # '25.10.06 14:30:59.000200'
'%d.%m.%y %H:%M', # '25.10.06 14:30'
'%d.%m.%y %H.%M.%S', # '25.10.06 14.30.59'
'%d.%m.%y %H.%M.%S.%f', # '25.10.06 14.30.59.000200'
'%d.%m.%y %H.%M', # '25.10.06 14.30'
'%d.%m.%y', # '25.10.06'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| bsd-3-clause | -5,178,753,615,758,317,000 | 37.125 | 81 | 0.505464 | false |
uwcirg/true_nth_usa_portal | tests/test_portal.py | 1 | 11562 | """Unit test module for portal views"""
from datetime import datetime
import tempfile
import urllib
from flask_swagger import swagger
from flask_webtest import SessionScope
from swagger_spec_validator import validate_spec_url
from portal.config.config import TestConfig
from portal.extensions import db
from portal.factories.app import create_app
from portal.models.intervention import INTERVENTION, UserIntervention
from portal.models.message import EmailMessage
from portal.models.organization import Organization
from portal.models.role import ROLE
from portal.models.user import User, get_user
from tests import OAUTH_INFO_PROVIDER_LOGIN, TEST_USER_ID, TestCase
class TestPortal(TestCase):
"""Portal view tests"""
def test_card_html(self):
"""Interventions can customize the button text """
client = self.add_client()
intervention = INTERVENTION.DECISION_SUPPORT_P3P
intervention.public_access = True # make the card avail for the test
client.intervention = intervention
intervention.card_html = "Custom Label"
self.login()
self.add_required_clinical_data()
self.bless_with_basics(make_patient=False)
response = self.client.get('/home')
assert response.status_code == 200
assert 'Custom Label' in response.get_data(as_text=True)
intervention = db.session.merge(intervention)
assert intervention.card_html in response.get_data(as_text=True)
def test_user_card_html(self):
"""Interventions can further customize per user"""
client = self.add_client()
intervention = INTERVENTION.DECISION_SUPPORT_P3P
intervention.public_access = True # make the card avail for the test
client.intervention = intervention
ui = UserIntervention(
user_id=TEST_USER_ID, intervention_id=intervention.id)
ui.card_html = "<b>Bold Card Text</b>"
ui.link_label = "Custom User Label"
ui.link_url = 'http://example.com/?test=param1'
with SessionScope(db):
db.session.add(ui)
db.session.commit()
self.login()
self.add_required_clinical_data()
self.bless_with_basics(make_patient=False)
user = db.session.merge(self.test_user)
response = self.client.get('/home')
assert response.status_code == 200
ui = db.session.merge(ui)
assert ui.card_html in response.get_data(as_text=True)
assert ui.link_label in response.get_data(as_text=True)
assert ui.link_url in response.get_data(as_text=True)
intervention = db.session.merge(intervention)
assert (
intervention.display_for_user(user).link_label
in response.get_data(as_text=True))
def test_staff_html(self):
"""Interventions can customize the staff text """
client = self.add_client()
intervention = INTERVENTION.sexual_recovery
client.intervention = intervention
ui = UserIntervention(
user_id=TEST_USER_ID,
intervention_id=intervention.id)
ui.staff_html = "Custom text for <i>staff</i>"
with SessionScope(db):
db.session.add(ui)
db.session.commit()
self.bless_with_basics()
self.login()
self.promote_user(role_name=ROLE.INTERVENTION_STAFF.value)
# This test requires PATIENT_LIST_ADDL_FIELDS includes the
# 'reports' field
self.app.config['PATIENT_LIST_ADDL_FIELDS'] = ['reports']
response = self.client.get('/patients/')
ui = db.session.merge(ui)
results = response.get_data(as_text=True)
assert ui.staff_html in results
def test_public_access(self):
"""Interventions w/o public access should be hidden"""
client = self.add_client()
intervention = INTERVENTION.sexual_recovery
client.intervention = intervention
intervention.public_access = False
self.login()
self.add_required_clinical_data()
self.bless_with_basics()
response = self.client.get('/home')
assert 'Sexual Recovery' not in response.get_data(as_text=True)
# now give just the test user access
intervention = db.session.merge(intervention)
ui = UserIntervention(
user_id=TEST_USER_ID,
intervention_id=intervention.id,
access="granted")
with SessionScope(db):
db.session.add(ui)
db.session.commit()
response = self.client.get('/home')
assert 'Sexual Recovery' in response.get_data(as_text=True)
def test_admin_list(self):
"""Test admin view lists all users"""
# Generate a few users with a smattering of roles
u1 = self.add_user(username='[email protected]')
u2 = self.add_user(username='[email protected]')
self.promote_user(u1, role_name=ROLE.ADMIN.value)
self.promote_user(u2, role_name=ROLE.APPLICATION_DEVELOPER.value)
# Test user needs admin role to view list
self.promote_user(role_name=ROLE.ADMIN.value)
self.login()
response = self.client.get('/admin')
# Should at least see an entry per user in system
assert (response.get_data(as_text=True).count('/profile')
>= User.query.count())
def test_invite(self):
"""Test email invite form"""
test_user = User.query.get(TEST_USER_ID)
test_user.email = '[email protected]'
db.session.add(test_user)
db.session.commit()
self.login()
postdata = {
'subject': 'unittest subject',
'recipients': '[email protected] [email protected]',
'body': "Ode to joy"}
response = self.client.post('/invite', data=postdata,
follow_redirects=True)
assert "Email Invite Sent" in response.get_data(as_text=True)
def test_message_sent(self):
"""Email invites - test view for sent messages"""
sent_at = datetime.strptime(
"2000/01/01 12:31:00", "%Y/%m/%d %H:%M:%S")
message = EmailMessage(
subject='a subject', user_id=TEST_USER_ID,
sender="[email protected]",
body='Welcome to testing \u2713',
sent_at=sent_at,
recipients="[email protected] [email protected]")
db.session.add(message)
db.session.commit()
# confirm styling unicode functions
body = message.style_message(message.body)
assert 'DOCTYPE' in body
assert 'style' in body
assert isinstance(body, str)
self.login()
response = self.client.get('/invite/{0}'.format(message.id))
assert (response.get_data(as_text=True).find(
sent_at.strftime('%m/%d/%Y %H:%M:%S')) > 0)
assert (response.get_data(as_text=True).find('[email protected] [email protected]')
> 0)
def test_missing_message(self):
"""Request to view non existant message should 404"""
self.login()
response = self.client.get('/invite/404')
assert response.status_code == 404
def test_swagger_docgen(self):
"""Build swagger docs for entire project"""
expected_keys = (
'info',
'paths',
'swagger',
'definitions',
)
swag = swagger(self.client.application)
for key in expected_keys:
assert key in swag
def test_swagger_validation(self):
"""Ensure our swagger spec matches swagger schema"""
with tempfile.NamedTemporaryFile(
prefix='swagger_test_',
suffix='.json',
delete=True,
) as temp_spec:
temp_spec.write(self.client.get('/spec').data)
temp_spec.seek(0)
validate_spec_url("file:%s" % temp_spec.name)
def test_report_error(self):
self.login()
params = {
'subject_id': 112,
'page_url': '/not/real',
'message': 'creative test string'
}
response = self.client.get('/report-error?{}'.format(
urllib.parse.urlencode(params)))
assert response.status_code == 200
def test_configuration_settings(self):
self.login()
lr_group = self.app.config['LR_GROUP']
response = self.client.get('/api/settings/lr_group')
assert response.status_code == 200
assert response.json.get('LR_GROUP') == lr_group
response2 = self.client.get('/api/settings/bad_value')
assert response2.status_code == 400
def test_configuration_secrets(self):
"""Ensure config keys containing secrets are not exposed"""
blacklist = (
'SECRET',
'URI',
'SQL',
)
response = self.client.get('/api/settings')
assert response.status_code == 200
assert not any(
any(k in config_key for k in blacklist)
for config_key in response.json
)
class TestPortalEproms(TestCase):
"""Portal views depending on eproms blueprint"""
def create_app(self):
"""
Overload base version to hide the GIL (allows registration of ePROMs)
"""
tc = TestConfig()
setattr(tc, 'HIDE_GIL', True)
self._app = create_app(tc)
return self._app
def test_redirect_validation(self):
self.promote_user(role_name=ROLE.ADMIN.value)
self.promote_user(role_name=ROLE.STAFF.value)
org = Organization(name='test org')
user = get_user(TEST_USER_ID)
with SessionScope(db):
db.session.add(org)
user.organizations.append(org)
db.session.commit()
self.login()
client = self.add_client()
client_url = client._redirect_uris
local_url = "http://{}/home?test".format(
self.app.config.get('SERVER_NAME'))
invalid_url = 'http://invalid.org'
# validate redirect of /website-consent-script GET
response = self.client.get(
'/website-consent-script/{}'.format(TEST_USER_ID),
query_string={'redirect_url': local_url}
)
assert response.status_code == 200
response2 = self.client.get(
'/website-consent-script/{}'.format(TEST_USER_ID),
query_string={'redirect_url': invalid_url}
)
assert response2.status_code == 401
# validate session login redirect with valid url
oauth_info = {
'user_id': TEST_USER_ID,
'next': client_url,
}
response3 = self.login(oauth_info=oauth_info)
assert response3.status_code == 200
# validate session login redirect with invalid url
oauth_info['next'] = invalid_url
response4 = self.login(oauth_info=oauth_info)
assert response4.status_code == 401
# validate provider login redirect with invalid url
oauth_info = dict(OAUTH_INFO_PROVIDER_LOGIN)
oauth_info['next'] = invalid_url
response5 = self.login(oauth_info=oauth_info)
assert response5.status_code == 401
# validate redirect of /challenge POST
formdata = {'user_id': TEST_USER_ID, 'next_url': local_url}
response6 = self.client.post('/challenge', data=formdata)
assert response6.status_code == 200
formdata['next_url'] = invalid_url
response7 = self.client.post('/challenge', data=formdata)
assert response7.status_code == 401
| bsd-3-clause | -5,025,709,329,483,542,000 | 34.466258 | 79 | 0.606729 | false |
andersinno/foosball | config/settings/production.py | 1 | 4603 | # -*- coding: utf-8 -*-
"""
Production Configurations
"""
from __future__ import absolute_import, unicode_literals
from django.utils import six
from .common import * # noqa
# Enable social integration plugins
SOCIAL_ACCOUNTS = (
'allauth.socialaccount.providers.google',
'allauth.socialaccount.providers.facebook',
)
INSTALLED_APPS += SOCIAL_ACCOUNTS
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Raises ImproperlyConfigured exception if DJANGO_SECRET_KEY not in os.environ
SECRET_KEY = env("DJANGO_SECRET_KEY")
# This ensures that Django will be able to detect a secure connection
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURITY_MIDDLEWARE = (
'django.middleware.security.SecurityMiddleware',
)
# Make sure SecurityMiddleware is listed first
MIDDLEWARE_CLASSES = SECURITY_MIDDLEWARE + MIDDLEWARE_CLASSES
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
SECURE_SSL_REDIRECT = True
ACCOUNT_DEFAULT_HTTP_PROTOCOL = "https"
# SITE CONFIGURATION
# ------------------------------------------------------------------------------
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['foosball.anders.fi'])
# END SITE CONFIGURATION
# EMAIL
# ------------------------------------------------------------------------------
DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL',
default='foosball <[email protected]>')
EMAIL_SUBJECT_PREFIX = env("DJANGO_EMAIL_SUBJECT_PREFIX", default='[foosball] ')
SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL)
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See:
# https://docs.djangoproject.com/en/dev/ref/templates/api/#django.template.loaders.cached.Loader
TEMPLATES[1]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ]),
]
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
DATABASES['default'] = env.db("DATABASE_URL")
# CACHING
# ------------------------------------------------------------------------------
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "{0}/{1}".format(env('REDIS_URL', default="redis://127.0.0.1:6379"), 0),
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"IGNORE_EXCEPTIONS": True, # mimics memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
}
}
}
# LOGGING CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s '
'%(process)d %(thread)d %(message)s'
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True
},
'django.security.DisallowedHost': {
'level': 'ERROR',
'handlers': ['console', 'mail_admins'],
'propagate': True
}
}
}
# Custom Admin URL, use {% url 'admin:index' %}
ADMIN_URL = env('DJANGO_ADMIN_URL')
| mit | 1,731,769,611,805,305,600 | 33.096296 | 117 | 0.563546 | false |
ducthien1490/youtube-dl | youtube_dl/extractor/instagram.py | 93 | 4498 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
limit_length,
)
class InstagramIE(InfoExtractor):
_VALID_URL = r'https://instagram\.com/p/(?P<id>[\da-zA-Z]+)'
_TEST = {
'url': 'https://instagram.com/p/aye83DjauH/?foo=bar#abc',
'md5': '0d2da106a9d2631273e192b372806516',
'info_dict': {
'id': 'aye83DjauH',
'ext': 'mp4',
'uploader_id': 'naomipq',
'title': 'Video by naomipq',
'description': 'md5:1f17f0ab29bd6fe2bfad705f58de3cb8',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
uploader_id = self._search_regex(r'"owner":{"username":"(.+?)"',
webpage, 'uploader id', fatal=False)
desc = self._search_regex(r'"caption":"(.*?)"', webpage, 'description',
fatal=False)
return {
'id': video_id,
'url': self._og_search_video_url(webpage, secure=False),
'ext': 'mp4',
'title': 'Video by %s' % uploader_id,
'thumbnail': self._og_search_thumbnail(webpage),
'uploader_id': uploader_id,
'description': desc,
}
class InstagramUserIE(InfoExtractor):
_VALID_URL = r'https://instagram\.com/(?P<username>[^/]{2,})/?(?:$|[?#])'
IE_DESC = 'Instagram user profile'
IE_NAME = 'instagram:user'
_TEST = {
'url': 'https://instagram.com/porsche',
'info_dict': {
'id': 'porsche',
'title': 'porsche',
},
'playlist_mincount': 2,
'playlist': [{
'info_dict': {
'id': '614605558512799803_462752227',
'ext': 'mp4',
'title': '#Porsche Intelligent Performance.',
'thumbnail': 're:^https?://.*\.jpg',
'uploader': 'Porsche',
'uploader_id': 'porsche',
'timestamp': 1387486713,
'upload_date': '20131219',
},
}],
'params': {
'extract_flat': True,
'skip_download': True,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
uploader_id = mobj.group('username')
entries = []
page_count = 0
media_url = 'http://instagram.com/%s/media' % uploader_id
while True:
page = self._download_json(
media_url, uploader_id,
note='Downloading page %d ' % (page_count + 1),
)
page_count += 1
for it in page['items']:
if it.get('type') != 'video':
continue
like_count = int_or_none(it.get('likes', {}).get('count'))
user = it.get('user', {})
formats = [{
'format_id': k,
'height': v.get('height'),
'width': v.get('width'),
'url': v['url'],
} for k, v in it['videos'].items()]
self._sort_formats(formats)
thumbnails_el = it.get('images', {})
thumbnail = thumbnails_el.get('thumbnail', {}).get('url')
# In some cases caption is null, which corresponds to None
# in python. As a result, it.get('caption', {}) gives None
title = (it.get('caption') or {}).get('text', it['id'])
entries.append({
'id': it['id'],
'title': limit_length(title, 80),
'formats': formats,
'thumbnail': thumbnail,
'webpage_url': it.get('link'),
'uploader': user.get('full_name'),
'uploader_id': user.get('username'),
'like_count': like_count,
'timestamp': int_or_none(it.get('created_time')),
})
if not page['items']:
break
max_id = page['items'][-1]['id']
media_url = (
'http://instagram.com/%s/media?max_id=%s' % (
uploader_id, max_id))
return {
'_type': 'playlist',
'entries': entries,
'id': uploader_id,
'title': uploader_id,
}
| unlicense | 54,199,603,054,088,460 | 32.567164 | 79 | 0.450867 | false |
bloopletech/Comix | src/preferences.py | 6 | 22727 | """preferences.py - Preference handler."""
import os
import cPickle
import gtk
import pango
import constants
import labels
ZOOM_MODE_BEST = 0
ZOOM_MODE_WIDTH = 1
ZOOM_MODE_HEIGHT = 2
ZOOM_MODE_MANUAL = 3
# All the preferences are stored here.
prefs = {
'comment extensions': ['txt', 'nfo'],
'auto load last file': False,
'page of last file': 1,
'path to last file': '',
'auto open next archive': True,
'bg colour': (5000, 5000, 5000),
'checkered bg for transparent images': True,
'cache': True,
'stretch': False,
'default double page': False,
'default fullscreen': False,
'default zoom mode': ZOOM_MODE_BEST,
'default manga mode': False,
'lens magnification': 2,
'lens size': 200,
'no double page for wide images': False,
'double step in double page mode': True,
'show page numbers on thumbnails': True,
'thumbnail size': 80,
'create thumbnails': True,
'slideshow delay': 3000,
'smart space scroll': True,
'flip with wheel': False,
'smart bg': False,
'store recent file info': True,
'hide all': False,
'hide all in fullscreen': True,
'stored hide all values': (True, True, True, True, True),
'path of last browsed in filechooser': constants.HOME_DIR,
'last filter in main filechooser': 0,
'last filter in library filechooser': 1,
'show menubar': True,
'show scrollbar': True,
'show statusbar': True,
'show toolbar': True,
'show thumbnails': True,
'rotation': 0,
'auto rotate from exif': True,
'vertical flip': False,
'horizontal flip': False,
'keep transformation': False,
'window height': gtk.gdk.screen_get_default().get_height() * 3 // 4,
'window width': min(gtk.gdk.screen_get_default().get_width() * 3 // 4,
gtk.gdk.screen_get_default().get_height() * 5 // 8),
'library cover size': 128,
'auto add books into collections': True,
'last library collection': None,
'lib window height': gtk.gdk.screen_get_default().get_height() * 3 // 4,
'lib window width': gtk.gdk.screen_get_default().get_width() * 3 // 4
}
_config_path = os.path.join(constants.CONFIG_DIR, 'preferences.pickle')
_dialog = None
class _PreferencesDialog(gtk.Dialog):
"""The preferences dialog where most (but not all) settings that are
saved between sessions are presented to the user.
"""
def __init__(self, window):
self._window = window
gtk.Dialog.__init__(self, _('Preferences'), window, 0,
(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE))
self.connect('response', self._response)
self.set_has_separator(False)
self.set_resizable(True)
self.set_default_response(gtk.RESPONSE_CLOSE)
notebook = gtk.Notebook()
self.vbox.pack_start(notebook)
self.set_border_width(4)
notebook.set_border_width(6)
# ----------------------------------------------------------------
# The "Appearance" tab.
# ----------------------------------------------------------------
page = _PreferencePage(80)
page.new_section(_('Background'))
fixed_bg_button = gtk.RadioButton(None, '%s:' %
_('Use this colour as background'))
fixed_bg_button.set_tooltip_text(
_('Always use this selected colour as the background colour.'))
color_button = gtk.ColorButton(gtk.gdk.Color(*prefs['bg colour']))
color_button.connect('color_set', self._color_button_cb)
page.add_row(fixed_bg_button, color_button)
dynamic_bg_button = gtk.RadioButton(fixed_bg_button,
_('Use dynamic background colour.'))
dynamic_bg_button.set_active(prefs['smart bg'])
dynamic_bg_button.connect('toggled', self._check_button_cb, 'smart bg')
dynamic_bg_button.set_tooltip_text(
_('Automatically pick a background colour that fits the viewed image.'))
page.add_row(dynamic_bg_button)
page.new_section(_('Thumbnails'))
label = gtk.Label('%s:' % _('Thumbnail size (in pixels)'))
adjustment = gtk.Adjustment(prefs['thumbnail size'], 20, 128, 1, 10)
thumb_size_spinner = gtk.SpinButton(adjustment)
thumb_size_spinner.connect('value_changed', self._spinner_cb,
'thumbnail size')
page.add_row(label, thumb_size_spinner)
thumb_number_button = gtk.CheckButton(
_('Show page numbers on thumbnails.'))
thumb_number_button.set_active(
prefs['show page numbers on thumbnails'])
thumb_number_button.connect('toggled', self._check_button_cb,
'show page numbers on thumbnails')
page.add_row(thumb_number_button)
page.new_section(_('Magnifying Glass'))
label = gtk.Label('%s:' % _('Magnifying glass size (in pixels)'))
adjustment = gtk.Adjustment(prefs['lens size'], 50, 400, 1, 10)
glass_size_spinner = gtk.SpinButton(adjustment)
glass_size_spinner.connect('value_changed', self._spinner_cb,
'lens size')
glass_size_spinner.set_tooltip_text(
_('Set the size of the magnifying glass. It is a square with a side of this many pixels.'))
page.add_row(label, glass_size_spinner)
label = gtk.Label('%s:' % _('Magnification factor'))
adjustment = gtk.Adjustment(prefs['lens magnification'], 1.1, 10.0,
0.1, 1.0)
glass_magnification_spinner = gtk.SpinButton(adjustment, digits=1)
glass_magnification_spinner.connect('value_changed', self._spinner_cb,
'lens magnification')
glass_magnification_spinner.set_tooltip_text(
_('Set the magnification factor of the magnifying glass.'))
page.add_row(label, glass_magnification_spinner)
page.new_section(_('Image scaling'))
stretch_button = gtk.CheckButton(_('Stretch small images.'))
stretch_button.set_active(prefs['stretch'])
stretch_button.connect('toggled', self._check_button_cb, 'stretch')
stretch_button.set_tooltip_text(
_('Stretch images to a size that is larger than their original size if the current zoom mode requests it. If this preference is unset, images are never scaled to be larger than their original size.'))
page.add_row(stretch_button)
page.new_section(_('Transparency'))
checkered_bg_button = gtk.CheckButton(
_('Use checkered background for transparent images.'))
checkered_bg_button.set_active(
prefs['checkered bg for transparent images'])
checkered_bg_button.connect('toggled', self._check_button_cb,
'checkered bg for transparent images')
checkered_bg_button.set_tooltip_text(
_('Use a grey checkered background for transparent images. If this preference is unset, the background is plain white instead.'))
page.add_row(checkered_bg_button)
notebook.append_page(page, gtk.Label(_('Appearance')))
# ----------------------------------------------------------------
# The "Behaviour" tab.
# ----------------------------------------------------------------
page = _PreferencePage(150)
page.new_section(_('Scroll'))
smart_space_button = gtk.CheckButton(
_('Use smart space key scrolling.'))
smart_space_button.set_active(prefs['smart space scroll'])
smart_space_button.connect('toggled', self._check_button_cb,
'smart space scroll')
smart_space_button.set_tooltip_text(
_('Use smart scrolling with the space key. Normally the space key scrolls only right down (or up when shift is pressed), but with this preference set it also scrolls sideways and so tries to follow the natural reading order of the comic book.'))
page.add_row(smart_space_button)
flip_with_wheel_button = gtk.CheckButton(
_('Flip pages when scrolling off the edges of the page.'))
flip_with_wheel_button.set_active(prefs['flip with wheel'])
flip_with_wheel_button.connect('toggled', self._check_button_cb,
'flip with wheel')
flip_with_wheel_button.set_tooltip_text(
_('Flip pages when scrolling "off the page" with the scroll wheel or with the arrow keys. It takes three consecutive "steps" with the scroll wheel or the arrow keys for the pages to be flipped.'))
page.add_row(flip_with_wheel_button)
page.new_section(_('Double page mode'))
step_length_button = gtk.CheckButton(
_('Flip two pages in double page mode.'))
step_length_button.set_active(prefs['double step in double page mode'])
step_length_button.connect('toggled', self._check_button_cb,
'double step in double page mode')
step_length_button.set_tooltip_text(
_('Flip two pages, instead of one, each time we flip pages in double page mode.'))
page.add_row(step_length_button)
virtual_double_button = gtk.CheckButton(
_('Show only one wide image in double page mode.'))
virtual_double_button.set_active(
prefs['no double page for wide images'])
virtual_double_button.connect('toggled', self._check_button_cb,
'no double page for wide images')
virtual_double_button.set_tooltip_text(
_("Display only one image in double page mode, if the image's width exceeds its height. The result of this is that scans that span two pages are displayed properly (i.e. alone) also in double page mode."))
page.add_row(virtual_double_button)
page.new_section(_('Files'))
auto_open_next_button = gtk.CheckButton(
_('Automatically open the next archive.'))
auto_open_next_button.set_active(prefs['auto open next archive'])
auto_open_next_button.connect('toggled', self._check_button_cb,
'auto open next archive')
auto_open_next_button.set_tooltip_text(
_('Automatically open the next archive in the directory when flipping past the last page, or the previous archive when flipping past the first page.'))
page.add_row(auto_open_next_button)
auto_open_last_button = gtk.CheckButton(
_('Automatically open the last viewed file on startup.'))
auto_open_last_button.set_active(prefs['auto load last file'])
auto_open_last_button.connect('toggled', self._check_button_cb,
'auto load last file')
auto_open_last_button.set_tooltip_text(
_('Automatically open, on startup, the file that was open when Comix was last closed.'))
page.add_row(auto_open_last_button)
store_recent_button = gtk.CheckButton(
_('Store information about recently opened files.'))
store_recent_button.set_active(prefs['store recent file info'])
store_recent_button.connect('toggled', self._check_button_cb,
'store recent file info')
store_recent_button.set_tooltip_text(
_('Add information about all files opened from within Comix to the shared recent files list.'))
page.add_row(store_recent_button)
create_thumbs_button = gtk.CheckButton(
_('Store thumbnails for opened files.'))
create_thumbs_button.set_active(prefs['create thumbnails'])
create_thumbs_button.connect('toggled', self._check_button_cb,
'create thumbnails')
create_thumbs_button.set_tooltip_text(
_('Store thumbnails for opened files according to the freedesktop.org specification. These thumbnails are shared by many other applications, such as most file managers.'))
page.add_row(create_thumbs_button)
page.new_section(_('Cache'))
cache_button = gtk.CheckButton(_('Use a cache to speed up browsing.'))
cache_button.set_active(prefs['cache'])
cache_button.connect('toggled', self._check_button_cb, 'cache')
cache_button.set_tooltip_text(
_('Cache the images that are next to the currently viewed image in order to speed up browsing. Since the speed improvements are quite big, it is recommended that you have this preference set, unless you are running short on free RAM.'))
page.add_row(cache_button)
notebook.append_page(page, gtk.Label(_('Behaviour')))
# ----------------------------------------------------------------
# The "Display" tab.
# ----------------------------------------------------------------
page = _PreferencePage(180)
page.new_section(_('Default modes'))
double_page_button = gtk.CheckButton(
_('Use double page mode by default.'))
double_page_button.set_active(prefs['default double page'])
double_page_button.connect('toggled', self._check_button_cb,
'default double page')
page.add_row(double_page_button)
fullscreen_button = gtk.CheckButton(_('Use fullscreen by default.'))
fullscreen_button.set_active(prefs['default fullscreen'])
fullscreen_button.connect('toggled', self._check_button_cb,
'default fullscreen')
page.add_row(fullscreen_button)
manga_button = gtk.CheckButton(_('Use manga mode by default.'))
manga_button.set_active(prefs['default manga mode'])
manga_button.connect('toggled', self._check_button_cb,
'default manga mode')
page.add_row(manga_button)
label = gtk.Label('%s:' % _('Default zoom mode'))
zoom_combo = gtk.combo_box_new_text()
zoom_combo.append_text(_('Best fit mode'))
zoom_combo.append_text(_('Fit width mode'))
zoom_combo.append_text(_('Fit height mode'))
zoom_combo.append_text(_('Manual zoom mode'))
# Change this if the combobox entries are reordered.
zoom_combo.set_active(prefs['default zoom mode'])
zoom_combo.connect('changed', self._combo_box_cb)
page.add_row(label, zoom_combo)
page.new_section(_('Fullscreen'))
hide_in_fullscreen_button = gtk.CheckButton(
_('Automatically hide all toolbars in fullscreen.'))
hide_in_fullscreen_button.set_active(prefs['hide all in fullscreen'])
hide_in_fullscreen_button.connect('toggled', self._check_button_cb,
'hide all in fullscreen')
page.add_row(hide_in_fullscreen_button)
page.new_section(_('Slideshow'))
label = gtk.Label('%s:' % _('Slideshow delay (in seconds)'))
adjustment = gtk.Adjustment(prefs['slideshow delay'] / 1000.0,
0.5, 3600.0, 0.1, 1)
delay_spinner = gtk.SpinButton(adjustment, digits=1)
delay_spinner.connect('value_changed', self._spinner_cb,
'slideshow delay')
page.add_row(label, delay_spinner)
page.new_section(_('Comments'))
label = gtk.Label('%s:' % _('Comment extensions'))
extensions_entry = gtk.Entry()
extensions_entry.set_text(', '.join(prefs['comment extensions']))
extensions_entry.connect('activate', self._entry_cb)
extensions_entry.connect('focus_out_event', self._entry_cb)
extensions_entry.set_tooltip_text(
_('Treat all files found within archives, that have one of these file endings, as comments.'))
page.add_row(label, extensions_entry)
page.new_section(_('Rotation'))
auto_rotate_button = gtk.CheckButton(
_('Automatically rotate images according to their metadata.'))
auto_rotate_button.set_active(prefs['auto rotate from exif'])
auto_rotate_button.connect('toggled', self._check_button_cb,
'auto rotate from exif')
auto_rotate_button.set_tooltip_text(
_('Automatically rotate images when an orientation is specified in the image metadata, such as in an Exif tag.'))
page.add_row(auto_rotate_button)
notebook.append_page(page, gtk.Label(_('Display')))
self.show_all()
def _check_button_cb(self, button, preference):
"""Callback for all checkbutton-type preferences."""
prefs[preference] = button.get_active()
if preference == 'smart bg':
if not prefs[preference]:
self._window.set_bg_colour(prefs['bg colour'])
else:
self._window.draw_image(scroll=False)
elif preference in ('stretch', 'checkered bg for transparent images',
'no double page for wide images', 'auto rotate from exif'):
self._window.draw_image(scroll=False)
elif (preference == 'hide all in fullscreen' and
self._window.is_fullscreen):
self._window.draw_image(scroll=False)
elif preference == 'show page numbers on thumbnails':
self._window.thumbnailsidebar.clear()
self._window.thumbnailsidebar.load_thumbnails()
def _color_button_cb(self, colorbutton):
"""Callback for the background colour selection button."""
colour = colorbutton.get_color()
prefs['bg colour'] = colour.red, colour.green, colour.blue
if not prefs['smart bg'] or not self._window.file_handler.file_loaded:
self._window.set_bg_colour(prefs['bg colour'])
def _spinner_cb(self, spinbutton, preference):
"""Callback for spinner-type preferences."""
value = spinbutton.get_value()
if preference == 'lens size':
prefs[preference] = int(value)
elif preference == 'lens magnification':
prefs[preference] = value
elif preference == 'slideshow delay':
prefs[preference] = int(value * 1000)
self._window.slideshow.update_delay()
elif preference == 'thumbnail size':
prefs[preference] = int(value)
self._window.thumbnailsidebar.resize()
self._window.draw_image(scroll=False)
def _combo_box_cb(self, combobox):
"""Callback for combobox-type preferences."""
zoom_mode = combobox.get_active()
prefs['default zoom mode'] = zoom_mode
def _entry_cb(self, entry, event=None):
"""Callback for entry-type preferences."""
text = entry.get_text()
extensions = [e.strip() for e in text.split(',')]
prefs['comment extensions'] = [e for e in extensions if e]
self._window.file_handler.update_comment_extensions()
def _response(self, dialog, response):
_close_dialog()
class _PreferencePage(gtk.VBox):
"""The _PreferencePage is a conveniece class for making one "page"
in a preferences-style dialog that contains one or more
_PreferenceSections.
"""
def __init__(self, right_column_width):
"""Create a new page where any possible right columns have the
width request <right_column_width>.
"""
gtk.VBox.__init__(self, False, 12)
self.set_border_width(12)
self._right_column_width = right_column_width
self._section = None
def new_section(self, header):
"""Start a new section in the page, with the header text from
<header>.
"""
self._section = _PreferenceSection(header, self._right_column_width)
self.pack_start(self._section, False, False)
def add_row(self, left_item, right_item=None):
"""Add a row to the page (in the latest section), containing one
or two items. If the left item is a label it is automatically
aligned properly.
"""
if isinstance(left_item, gtk.Label):
left_item.set_alignment(0, 0.5)
if right_item is None:
self._section.contentbox.pack_start(left_item)
else:
left_box, right_box = self._section.new_split_vboxes()
left_box.pack_start(left_item)
right_box.pack_start(right_item)
class _PreferenceSection(gtk.VBox):
"""The _PreferenceSection is a convenience class for making one
"section" of a preference-style dialog, e.g. it has a bold header
and a number of rows which are indented with respect to that header.
"""
def __init__(self, header, right_column_width=150):
"""Contruct a new section with the header set to the text in
<header>, and the width request of the (possible) right columns
set to that of <right_column_width>.
"""
gtk.VBox.__init__(self, False, 0)
self._right_column_width = right_column_width
self.contentbox = gtk.VBox(False, 6)
label = labels.BoldLabel(header)
label.set_alignment(0, 0.5)
hbox = gtk.HBox(False, 0)
hbox.pack_start(gtk.HBox(), False, False, 6)
hbox.pack_start(self.contentbox)
self.pack_start(label, False, False)
self.pack_start(hbox, False, False, 6)
def new_split_vboxes(self):
"""Return two new VBoxes that are automatically put in the section
after the previously added items. The right one has a width request
equal to the right_column_width value passed to the class contructor,
in order to make it easy for all "right column items" in a page to
line up nicely.
"""
left_box = gtk.VBox(False, 6)
right_box = gtk.VBox(False, 6)
right_box.set_size_request(self._right_column_width, -1)
hbox = gtk.HBox(False, 12)
hbox.pack_start(left_box)
hbox.pack_start(right_box, False, False)
self.contentbox.pack_start(hbox)
return left_box, right_box
def open_dialog(action, window):
global _dialog
if _dialog is None:
_dialog = _PreferencesDialog(window)
else:
_dialog.present()
def _close_dialog(*args):
global _dialog
if _dialog is not None:
_dialog.destroy()
_dialog = None
def read_preferences_file():
"""Read preferences data from disk."""
if os.path.isfile(_config_path):
config = None
try:
config = open(_config_path, 'rb')
version = cPickle.load(config)
old_prefs = cPickle.load(config)
config.close()
except Exception:
print '! Corrupt preferences file "%s", deleting...' % _config_path
if config is not None:
config.close()
os.remove(_config_path)
else:
for key in old_prefs:
if key in prefs:
prefs[key] = old_prefs[key]
def write_preferences_file():
"""Write preference data to disk."""
config = open(_config_path, 'wb')
cPickle.dump(constants.VERSION, config, cPickle.HIGHEST_PROTOCOL)
cPickle.dump(prefs, config, cPickle.HIGHEST_PROTOCOL)
config.close()
| gpl-2.0 | -1,446,618,445,956,264,400 | 44.72837 | 257 | 0.616887 | false |
ghxandsky/zstack-utility | kvmagent/kvmagent/plugins/network_plugin.py | 1 | 7289 | '''
@author: frank
'''
from kvmagent import kvmagent
from zstacklib.utils import jsonobject
from zstacklib.utils import http
from zstacklib.utils import log
from zstacklib.utils import lock
from zstacklib.utils import shell
from zstacklib.utils import linux
import os
import traceback
CHECK_PHYSICAL_NETWORK_INTERFACE_PATH = '/network/checkphysicalnetworkinterface'
KVM_REALIZE_L2NOVLAN_NETWORK_PATH = "/network/l2novlan/createbridge"
KVM_REALIZE_L2VLAN_NETWORK_PATH = "/network/l2vlan/createbridge"
KVM_CHECK_L2NOVLAN_NETWORK_PATH = "/network/l2novlan/checkbridge"
KVM_CHECK_L2VLAN_NETWORK_PATH = "/network/l2vlan/checkbridge"
logger = log.get_logger(__name__)
class CheckPhysicalNetworkInterfaceCmd(kvmagent.AgentCommand):
def __init__(self):
super(CheckPhysicalNetworkInterfaceCmd, self).__init__()
self.interfaceNames = None
class CheckPhysicalNetworkInterfaceResponse(kvmagent.AgentResponse):
def __init__(self):
super(CheckPhysicalNetworkInterfaceResponse, self).__init__()
self.failedInterfaceNames = None
class CreateBridgeCmd(kvmagent.AgentCommand):
def __init__(self):
super(CreateBridgeCmd, self).__init__()
self.physicalInterfaceName = None
self.bridgeName = None
class CreateBridgeResponse(kvmagent.AgentResponse):
def __init__(self):
super(CreateBridgeResponse, self).__init__()
class CreateVlanBridgeCmd(kvmagent.AgentCommand):
def __init__(self):
super(CreateVlanBridgeCmd, self).__init__()
self.vlan = None
class CreateVlanBridgeResponse(kvmagent.AgentResponse):
def __init__(self):
super(CreateVlanBridgeResponse, self).__init__()
class CheckBridgeResponse(kvmagent.AgentResponse):
def __init__(self):
super(CheckBridgeResponse, self).__init__()
class CheckVlanBridgeResponse(kvmagent.AgentResponse):
def __init__(self):
super(CheckVlanBridgeResponse, self).__init__()
class NetworkPlugin(kvmagent.KvmAgent):
'''
classdocs
'''
def _ifup_device_if_down(self, device_name):
state_path = '/sys/class/net/%s/operstate' % device_name
if not os.path.exists(state_path):
raise Exception('cannot find %s' % state_path)
with open(state_path, 'r') as fd:
state = fd.read()
if 'up' in state:
return
shell.call('ip link set %s up' % device_name)
def _configure_bridge(self):
shell.call('echo 1 > /proc/sys/net/bridge/bridge-nf-call-iptables')
shell.call('echo 1 > /proc/sys/net/ipv4/conf/default/forwarding')
@kvmagent.replyerror
def check_physical_network_interface(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = CheckPhysicalNetworkInterfaceResponse()
for i in cmd.interfaceNames:
shell_cmd = shell.ShellCmd("ip link | grep '%s'" % i)
shell_cmd(False)
if shell_cmd.return_code != 0:
rsp.failedInterfaceNames = [i]
rsp.success = False
return jsonobject.dumps(rsp)
for i in cmd.interfaceNames:
self._ifup_device_if_down(i)
logger.debug(http.path_msg(CHECK_PHYSICAL_NETWORK_INTERFACE_PATH, 'checked physical interfaces: %s' % cmd.interfaceNames))
return jsonobject.dumps(rsp)
@lock.lock('create_bridge')
@kvmagent.replyerror
def create_bridge(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = CreateBridgeResponse()
self._ifup_device_if_down(cmd.physicalInterfaceName)
if linux.is_vif_on_bridge(cmd.bridgeName, cmd.physicalInterfaceName):
logger.debug('%s is a bridge device. Interface %s is attached to bridge. No need to create bridge or attach device interface' % (cmd.bridgeName, cmd.physicalInterfaceName))
self._configure_bridge()
return jsonobject.dumps(rsp)
try:
linux.create_bridge(cmd.bridgeName, cmd.physicalInterfaceName)
self._configure_bridge()
logger.debug('successfully realize bridge[%s] from device[%s]' % (cmd.bridgeName, cmd.physicalInterfaceName))
except Exception as e:
logger.warning(traceback.format_exc())
rsp.error = 'unable to create bridge[%s] from device[%s], because %s' % (cmd.bridgeName, cmd.physicalInterfaceName, str(e))
rsp.success = False
return jsonobject.dumps(rsp)
@lock.lock('create_bridge')
@kvmagent.replyerror
def create_vlan_bridge(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = CreateVlanBridgeResponse()
if linux.is_bridge(cmd.bridgeName):
logger.debug('%s is a bridge device, no need to create bridge' % cmd.bridgeName)
self._ifup_device_if_down('%s.%s' % (cmd.physicalInterfaceName, cmd.vlan))
self._configure_bridge()
return jsonobject.dumps(rsp)
try:
linux.create_vlan_bridge(cmd.bridgeName, cmd.physicalInterfaceName, cmd.vlan)
self._configure_bridge()
logger.debug('successfully realize vlan bridge[name:%s, vlan:%s] from device[%s]' % (cmd.bridgeName, cmd.vlan, cmd.physicalInterfaceName))
except Exception as e:
logger.warning(traceback.format_exc())
rsp.error = 'unable to create vlan bridge[name:%s, vlan:%s] from device[%s], because %s' % (cmd.bridgeName, cmd.vlan, cmd.physicalInterfaceName, str(e))
rsp.success = False
return jsonobject.dumps(rsp)
@lock.lock('create_bridge')
@kvmagent.replyerror
def check_bridge(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = CheckBridgeResponse()
if not linux.is_bridge(cmd.bridgeName):
rsp.error = "can not find bridge[%s]" % cmd.bridgeName
rsp.success = False
else:
self._ifup_device_if_down(cmd.physicalInterfaceName)
return jsonobject.dumps(rsp)
@lock.lock('create_bridge')
@kvmagent.replyerror
def check_vlan_bridge(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = CheckVlanBridgeResponse()
if not linux.is_bridge(cmd.bridgeName):
rsp.error = "can not find vlan bridge[%s]" % cmd.bridgeName
rsp.success = False
else:
self._ifup_device_if_down(cmd.physicalInterfaceName)
return jsonobject.dumps(rsp)
def start(self):
http_server = kvmagent.get_http_server()
http_server.register_sync_uri(CHECK_PHYSICAL_NETWORK_INTERFACE_PATH, self.check_physical_network_interface)
http_server.register_async_uri(KVM_REALIZE_L2NOVLAN_NETWORK_PATH, self.create_bridge)
http_server.register_async_uri(KVM_REALIZE_L2VLAN_NETWORK_PATH, self.create_vlan_bridge)
http_server.register_async_uri(KVM_CHECK_L2NOVLAN_NETWORK_PATH, self.check_bridge)
http_server.register_async_uri(KVM_CHECK_L2VLAN_NETWORK_PATH, self.check_vlan_bridge)
def stop(self):
pass
| apache-2.0 | 3,011,938,923,725,352,000 | 37.61413 | 184 | 0.641377 | false |
KanoComputing/nush | cherrypy/test/test_auth_basic.py | 54 | 2853 | # This file is part of CherryPy <http://www.cherrypy.org/>
# -*- coding: utf-8 -*-
# vim:ts=4:sw=4:expandtab:fileencoding=utf-8
import cherrypy
from cherrypy._cpcompat import md5, ntob
from cherrypy.lib import auth_basic
from cherrypy.test import helper
class BasicAuthTest(helper.CPWebCase):
def setup_server():
class Root:
def index(self):
return "This is public."
index.exposed = True
class BasicProtected:
def index(self):
return "Hello %s, you've been authorized." % cherrypy.request.login
index.exposed = True
class BasicProtected2:
def index(self):
return "Hello %s, you've been authorized." % cherrypy.request.login
index.exposed = True
userpassdict = {'xuser' : 'xpassword'}
userhashdict = {'xuser' : md5(ntob('xpassword')).hexdigest()}
def checkpasshash(realm, user, password):
p = userhashdict.get(user)
return p and p == md5(ntob(password)).hexdigest() or False
conf = {'/basic': {'tools.auth_basic.on': True,
'tools.auth_basic.realm': 'wonderland',
'tools.auth_basic.checkpassword': auth_basic.checkpassword_dict(userpassdict)},
'/basic2': {'tools.auth_basic.on': True,
'tools.auth_basic.realm': 'wonderland',
'tools.auth_basic.checkpassword': checkpasshash},
}
root = Root()
root.basic = BasicProtected()
root.basic2 = BasicProtected2()
cherrypy.tree.mount(root, config=conf)
setup_server = staticmethod(setup_server)
def testPublic(self):
self.getPage("/")
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/html;charset=utf-8')
self.assertBody('This is public.')
def testBasic(self):
self.getPage("/basic/")
self.assertStatus(401)
self.assertHeader('WWW-Authenticate', 'Basic realm="wonderland"')
self.getPage('/basic/', [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3JX')])
self.assertStatus(401)
self.getPage('/basic/', [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3Jk')])
self.assertStatus('200 OK')
self.assertBody("Hello xuser, you've been authorized.")
def testBasic2(self):
self.getPage("/basic2/")
self.assertStatus(401)
self.assertHeader('WWW-Authenticate', 'Basic realm="wonderland"')
self.getPage('/basic2/', [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3JX')])
self.assertStatus(401)
self.getPage('/basic2/', [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3Jk')])
self.assertStatus('200 OK')
self.assertBody("Hello xuser, you've been authorized.")
| gpl-3.0 | 6,043,323,925,622,085,000 | 35.113924 | 106 | 0.596916 | false |
ghickman/tvrenamr | tvrenamr/logs.py | 2 | 2377 | import logging
import logging.handlers
import os
def convert_log_level(level=26):
"""
Get a numeric log level from a string. The default 26 is for SHORT logs.
:param level
:return level
"""
# annoying but the level can be passed in as None
if not level:
level = 26
levels = {'notset': 0, 'debug': 10, 'info': 20, 'minimal': 22,
'short': 26, 'warning': 30, 'error': 40, 'critical': 50}
if isinstance(level, str):
level = levels.get(level)
return level
def get_log_file(filename=None):
# make sure the log directory exists and place the log file there
if filename is None:
filename = os.path.join(
os.path.expanduser('~'),
'.tvrenamr',
'tvrenamr.log'
)
filename = filename.replace('~', os.path.expanduser('~'))
try:
os.makedirs(os.path.split(filename)[0])
except OSError:
pass
return filename
def start_logging(filename, log_level, quiet=False):
"""
Setup the file logging and start the root logger
"""
filename = get_log_file(filename)
log_level = convert_log_level(log_level)
# add the custom levels
logging.addLevelName(22, 'MINIMAL')
logging.addLevelName(26, 'SHORT')
# setup log file
file_format = '%(asctime)-15s %(levelname)-8s %(name)-11s %(message)s'
handler = logging.handlers.RotatingFileHandler(filename, maxBytes=1048576, backupCount=10)
handler.setFormatter(logging.Formatter(file_format, '%Y-%m-%dT%H:%M'))
logging.getLogger().addHandler(handler)
logging.getLogger().setLevel(logging.DEBUG)
if not quiet:
# setup the console logs to debug
# debug
if log_level is 10:
console_format = '%(asctime)-15s %(levelname)-8s %(name)-11s %(message)s'
console_datefmt = '%Y-%m-%d %H:%M'
else:
console_format = '%(message)s'
console_datefmt = ''
console_formatter = logging.Formatter(console_format, console_datefmt)
# define a Handler with the given level and outputs to the console
console = logging.StreamHandler()
console.setLevel(log_level)
# set the console format & attach the handler to the root logger with it.
console.setFormatter(console_formatter)
logging.getLogger().addHandler(console)
| mit | 6,956,430,640,036,286,000 | 28.7125 | 94 | 0.622213 | false |
simleo/openmicroscopy | components/tools/OmeroPy/test/integration/test_chgrp.py | 2 | 46241 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2015 University of Dundee & Open Microscopy Environment.
# All rights reserved. Use is subject to license terms supplied in LICENSE.txt
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Integration test for moving objects between groups.
"""
import omero
import omero.gateway
from omero.testlib import ITest
import pytest
from omero.cmd import Chgrp2
from omero.cmd.graphs import ChildOption
from omero.model import DatasetI, DatasetImageLinkI, ExperimenterGroupI, ImageI
from omero.model import TagAnnotationI
from omero.model import ProjectDatasetLinkI, ProjectI, PlateI, ScreenI
from omero.model import ExperimenterI
from omero.rtypes import rstring, unwrap
from omero.api import Save
PRIVATE = 'rw----'
READONLY = 'rwr---'
READANNOTATE = 'rwra--'
COLLAB = 'rwrw--'
class TestChgrp(ITest):
def testChgrpImportedImage(self):
"""
Tests chgrp for an imported image, moving to a collaborative group
"""
# One user in two groups
client, exp = self.new_client_and_user()
grp = self.new_group(experimenters=[exp], perms=COLLAB)
gid = grp.id.val
client.sf.getAdminService().getEventContext() # Reset session
# Import an image into the client context
images = self.import_fake_file(name="testChgrpImportedImage",
client=client)
image = images[0]
# Chgrp
chgrp = Chgrp2(targetObjects={'Image': [image.id.val]}, groupId=gid)
self.do_submit(chgrp, client)
# Change our context to new group...
admin = client.sf.getAdminService()
admin.setDefaultGroup(exp, ExperimenterGroupI(gid, False))
self.set_context(client, gid)
# ...check image
img = client.sf.getQueryService().get("Image", image.id.val)
assert img.details.group.id.val == gid
def testChgrpImage(self):
"""
Tests chgrp for a dummny image object (no Pixels)
"""
# One user in two groups
client, exp = self.new_client_and_user()
grp = self.new_group([exp])
gid = grp.id.val
client.sf.getAdminService().getEventContext() # Reset session
update = client.sf.getUpdateService()
query = client.sf.getQueryService()
admin = client.sf.getAdminService()
first_gid = admin.getEventContext().groupId
# Create a dataset in the 'first group'
ds = self.make_dataset(name="testChgrpImage_target", client=client)
ds_id = ds.id.val
# Change our context to new group and create image
admin.setDefaultGroup(exp, ExperimenterGroupI(gid, False))
self.set_context(client, gid)
update = client.sf.getUpdateService() # do we need to get this again?
img = self.new_image()
img = update.saveAndReturnObject(img)
# Move image to new group
chgrp = Chgrp2(
targetObjects={'Image': [img.id.val]}, groupId=first_gid)
# Link to Save
link = DatasetImageLinkI()
link.child = ImageI(img.id.val, False)
link.parent = DatasetI(ds_id, False)
save = Save()
save.obj = link
requests = [chgrp, save] # we're going to chgrp THEN save DIlink
# Change our context to original group...
admin.setDefaultGroup(exp, ExperimenterGroupI(first_gid, False))
self.set_context(client, first_gid)
# We have to be in destination group for link Save to work
self.do_submit(requests, client)
# ...check image
img = client.sf.getQueryService().get("Image", img.id.val)
assert img.details.group.id.val == first_gid
# check Dataset
query = "select link from DatasetImageLink link\
where link.child.id=%s" % img.id.val
l = client.sf.getQueryService().findByQuery(query, None)
assert l is not None, "New DatasetImageLink on image not found"
assert l.details.group.id.val == first_gid,\
"Link Created in same group as Image target"
def testChgrpPDI(self):
"""
Tests chgrp for a Project, Dataset, Image hierarchy
"""
# One user in two groups
client, exp = self.new_client_and_user()
grp = self.new_group([exp])
gid = grp.id.val
client.sf.getAdminService().getEventContext() # Reset session
# Data Setup (image in the P/D hierarchy)
img = self.make_image(client=client)
project = self.make_project(name="chgrp-test", client=client)
dataset = self.make_dataset(name="chgrp-test", client=client)
self.link(dataset, img, client=client)
self.link(project, dataset, client=client)
# Move Project to new group
chgrp = Chgrp2(
targetObjects={'Project': [project.id.val]}, groupId=gid)
self.do_submit(chgrp, client)
# Change our context to new group...
admin = client.sf.getAdminService()
admin.setDefaultGroup(exp, ExperimenterGroupI(gid, False))
self.set_context(client, gid)
# ...check image
img = client.sf.getQueryService().get("Image", img.id.val)
assert img.details.group.id.val == gid
# check Project
prj = client.sf.getQueryService().get("Project", project.id.val)
assert prj.details.group.id.val == gid
def testChgrpRdef7825(self):
# One user in two groups
owner, owner_obj = self.new_client_and_user(perms="rwrw--")
admin = owner.sf.getAdminService()
ec = admin.getEventContext()
source_grp = admin.getGroup(ec.groupId)
target_grp = self.new_group([owner])
target_gid = target_grp.id.val
ec = admin.getEventContext() # Refresh
# Add another user to the source group
member = self.new_client(group=source_grp)
# Create an image as the owner
images = self.import_fake_file(name="testChgrpRdef7825",
client=owner)
image = images[0]
# Render as both users
owner_g = omero.gateway.BlitzGateway(client_obj=owner)
member_g = omero.gateway.BlitzGateway(client_obj=member)
def render(g):
g.getObject("Image", image.id.val).getThumbnail()
render(owner_g)
render(member_g)
# Now chgrp and try to delete
chgrp = Chgrp2(
targetObjects={'Image': [image.id.val]}, groupId=target_gid)
self.do_submit(chgrp, owner)
# Shouldn't be necessary to change group, but we're gonna
owner_g.SERVICE_OPTS.setOmeroGroup("-1")
handle = owner_g.deleteObjects("Image", [image.id.val])
self.wait_on_cmd(owner_g.c, handle)
def testChgrpOneImageFilesetErr(self):
"""
Simple example of the MIF chgrp bad case:
A single fileset containing 2 images - we try to chgrp ONE image.
Each sibling CANNOT be moved independently of the other.
"""
# One user in two groups
client, user = self.new_client_and_user(perms=PRIVATE)
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
# 2 images sharing a fileset
images = self.import_fake_file(2, client=client)
# Now chgrp
chgrp = Chgrp2(
targetObjects={'Image': [images[0].id.val]}, groupId=target_gid)
self.do_submit(chgrp, client, test_should_pass=False)
def testChgrpAllImagesFilesetOK(self):
"""
Simple example of the MIF chgrp bad case:
A single fileset containing 2 images
can be moved to the same group together.
"""
# One user in two groups
client, user = self.new_client_and_user(perms=PRIVATE)
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
images = self.import_fake_file(2, client=client)
# chgrp should succeed
ids = [images[0].id.val, images[1].id.val]
chgrp = Chgrp2(targetObjects={'Image': ids}, groupId=target_gid)
self.do_submit(chgrp, client)
# Check both Images moved
query_service = client.sf.getQueryService()
ctx = {'omero.group': '-1'} # query across groups
for i in images:
image = query_service.get('Image', i.id.val, ctx)
img_gid = image.details.group.id.val
assert target_gid == img_gid,\
"Image should be in group: %s, NOT %s" % (target_gid, img_gid)
def testChgrpAllImagesFilesetTwoCommandsErr(self):
"""
Simple example of the MIF chgrp bad case with Chgrp2:
A single fileset containing 2 images cannot be moved
to the same group together using two commands
See testChgrpAllImagesFilesetOK for the good.
"""
# One user in two groups
client, user = self.new_client_and_user(perms=PRIVATE)
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
images = self.import_fake_file(2, client=client)
# chgrp should succeed
chgrp1 = Chgrp2(
targetObjects={'Image': [images[0].id.val]}, groupId=target_gid)
chgrp2 = Chgrp2(
targetObjects={'Image': [images[1].id.val]}, groupId=target_gid)
self.do_submit([chgrp1, chgrp2], client, test_should_pass=False)
def testChgrpOneDatasetFilesetErr(self):
"""
Simple example of the MIF chgrp bad case:
A single fileset containing 2 images is split among 2 datasets.
We try to chgrp ONE Dataset.
Each dataset CANNOT be moved independently of the other.
"""
# One user in two groups
client, user = self.new_client_and_user(perms=PRIVATE)
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
datasets = self.create_datasets(
2, "testChgrpOneDatasetFilesetErr", client=client)
images = self.import_fake_file(2, client=client)
for i in range(2):
self.link(datasets[i], images[i], client=client)
# chgrp should succeed with the first Dataset only
chgrp = Chgrp2(
targetObjects={"Dataset": [datasets[0].id.val]},
groupId=target_gid)
self.do_submit(chgrp, client)
query_service = client.sf.getQueryService()
# Check Images not moved
for i in range(2):
image = query_service.get('Image', images[i].id.val)
assert target_gid != image.details.group.id.val,\
"Image should not be in group: %s" % target_gid
# Check second Dataset not moved
dataset = query_service.get('Dataset', datasets[1].id.val)
assert target_gid != dataset.details.group.id.val,\
"Dataset should not be in group: %s" % target_gid
ctx = {'omero.group': str(target_gid)} # query in the target group
# Check first Dataset moved
dataset = query_service.get('Dataset', datasets[0].id.val, ctx)
assert target_gid == dataset.details.group.id.val,\
"Dataset should be in group: %s" % target_gid
def testChgrpAllDatasetsFilesetOK(self):
"""
Simple example of the MIF chgrp bad case:
a single fileset containing 2 images is split among 2 datasets.
Datasets can be moved to the same group together.
"""
# One user in two groups
client, user = self.new_client_and_user(perms=PRIVATE)
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
datasets = self.create_datasets(
2, "testChgrpAllDatasetsFilesetOK", client=client)
images = self.import_fake_file(2, client=client)
for i in range(2):
self.link(datasets[i], images[i], client=client)
# Now chgrp, should succeed
ids = [datasets[0].id.val, datasets[1].id.val]
chgrp = Chgrp2(targetObjects={"Dataset": ids}, groupId=target_gid)
self.do_submit(chgrp, client)
# Check both Datasets and Images moved
query_service = client.sf.getQueryService()
ctx = {'omero.group': str(target_gid)} # query in the target group
for i in range(2):
dataset = query_service.get('Dataset', datasets[i].id.val, ctx)
image = query_service.get('Image', images[i].id.val, ctx)
assert target_gid == dataset.details.group.id.val,\
"Dataset should be in group: %s" % target_gid
assert target_gid == image.details.group.id.val,\
"Image should be in group: %s" % target_gid
def testChgrpOneDatasetFilesetOK(self):
"""
Simple example of the MIF chgrp good case:
a single fileset containing 2 images in one dataset.
The dataset can be moved.
"""
# One user in two groups
client, user = self.new_client_and_user(perms=PRIVATE)
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
ds = self.make_dataset(name="testChgrpOneDatasetFilesetOK",
client=client)
images = self.import_fake_file(2, client=client)
for i in range(2):
self.link(ds, images[i], client=client)
# Now chgrp, should succeed
chgrp = Chgrp2(
targetObjects={"Dataset": [ds.id.val]}, groupId=target_gid)
self.do_submit(chgrp, client)
# Check Dataset and both Images moved
query_service = client.sf.getQueryService()
ctx = {'omero.group': '-1'} # query across groups
dataset = query_service.get('Dataset', ds.id.val, ctx)
assert target_gid == dataset.details.group.id.val,\
"Dataset should be in group: %s" % target_gid
for i in range(2):
image = query_service.get('Image', images[i].id.val, ctx)
img_gid = image.details.group.id.val
assert target_gid == img_gid,\
"Image should be in group: %s, NOT %s" % (target_gid, img_gid)
def testChgrpImagesTwoFilesetsErr(self):
"""
If we try to 'split' 2 Filesets, both should be returned
by the chgrp error
"""
# One user in two groups
client, user = self.new_client_and_user(perms=PRIVATE)
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
images_fs_one = self.import_fake_file(2, client=client)
images_fs_two = self.import_fake_file(2, client=client)
# chgrp should fail...
ids = [images_fs_one[0].id.val, images_fs_two[0].id.val]
chgrp = Chgrp2(targetObjects={"Image": ids}, groupId=target_gid)
self.do_submit(chgrp, client, test_should_pass=False)
def testChgrpDatasetTwoFilesetsErr(self):
"""
If we try to 'split' 2 Filesets, both should be returned
by the chgrp error
"""
# One user in two groups
client, user = self.new_client_and_user(perms=PRIVATE)
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
images_fs_one = self.import_fake_file(2, client=client)
images_fs_two = self.import_fake_file(2, client=client)
ds = self.make_dataset(name="testChgrpDatasetTwoFilesetsErr",
client=client)
self.import_fake_file(2, client=client)
for i in (images_fs_one, images_fs_two):
self.link(ds, i[0], client=client)
# chgrp should succeed with the Dataset only
chgrp = Chgrp2(
targetObjects={"Dataset": [ds.id.val]}, groupId=target_gid)
self.do_submit(chgrp, client)
query_service = client.sf.getQueryService()
# Check Images not moved
for i in (images_fs_one[0], images_fs_two[0]):
image = query_service.get('Image', i.id.val)
assert target_gid != image.details.group.id.val,\
"Image should not be in group: %s" % target_gid
ctx = {'omero.group': str(target_gid)} # query in the target group
# Check Dataset moved
dataset = query_service.get('Dataset', ds.id.val, ctx)
assert target_gid == dataset.details.group.id.val,\
"Dataset should be in group: %s" % target_gid
def testChgrpDatasetCheckFsGroup(self):
"""
Move a Dataset of MIF images into a new group,
then check that the Fileset group is the same as the target group.
From 'Security Violation'
Bug https://github.com/openmicroscopy/openmicroscopy/pull/1139
"""
# One user in two groups
client, user = self.new_client_and_user(perms=PRIVATE)
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
ds = self.make_dataset(name="testChgrpDatasetCheckFsGroup",
client=client)
images = self.import_fake_file(2, client=client)
for i in range(2):
self.link(ds, images[i], client=client)
# Now chgrp, should succeed
chgrp = Chgrp2(
targetObjects={"Dataset": [ds.id.val]}, groupId=target_gid)
self.do_submit(chgrp, client)
# Check the group of the fileset is in sync with image.
ctx = {'omero.group': '-1'}
qs = client.sf.getQueryService()
image1 = qs.get("Image", images[0].id.val, ctx)
fs_id = image1.fileset.id.val
image_gid = image1.details.group.id.val
fileset_gid = qs.get("Fileset", fs_id, ctx).details.group.id.val
assert image_gid == fileset_gid,\
"Image group: %s and Fileset group: %s don't match" %\
(image_gid, fileset_gid)
def testChgrpFilesetOK(self):
"""
Move a Fileset of MIF images into a new group,
then check that the Fileset group is the same as the target group.
"""
# One user in two groups
client, user = self.new_client_and_user(perms=PRIVATE)
query = client.sf.getQueryService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
images = self.import_fake_file(2, client=client)
fs_id = query.get("Image", images[0].id.val).fileset.id.val
# Now chgrp, should succeed
chgrp = Chgrp2(targetObjects={"Fileset": [fs_id]}, groupId=target_gid)
self.do_submit(chgrp, client)
# Check Fileset and both Images moved and
# thus the Fileset is in sync with Images.
ctx = {'omero.group': '-1'} # query across groups
fileset = query.get('Fileset', fs_id, ctx)
assert target_gid == fileset.details.group.id.val,\
"Fileset should be in group: %s" % target_gid
for i in range(2):
image = query.get('Image', images[i].id.val, ctx)
img_gid = image.details.group.id.val
assert target_gid == img_gid,\
"Image should be in group: %s, NOT %s" % (target_gid, img_gid)
def testChgrp11000(self):
"""
Move a Dataset of MIF images *with a companion file* into a new group.
Note: once FakeReader supports companion files this logic can be
simplified.
"""
# One user in two groups
client, user = self.new_client_and_user(perms=PRIVATE)
ds = self.make_dataset(name="testChgrp11000", client=client)
images = self.import_fake_file(2, client=client)
for i in range(2):
self.link(ds, images[i], client=client)
# Perform the extra companion file logic
fs = client.sf.getQueryService().findByQuery("""
select fs from Image i
join i.fileset fs
join fetch fs.usedFiles as uf
join fetch uf.originalFile
where i.id = %s
""" % images[0].id.val, None)
entry1 = fs.getFilesetEntry(0)
ofile = entry1.getOriginalFile()
for i in range(2):
ann = omero.model.FileAnnotationI()
ann.file = ofile.proxy()
self.link(images[i], ann, client=client)
def testChgrp11109(self):
"""
Place a plate in a single screen and attempt to move it.
"""
# One user in two groups
client, user = self.new_client_and_user(perms=PRIVATE)
admin = client.sf.getAdminService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
admin.getEventContext() # Refresh
update = client.sf.getUpdateService()
plate = PlateI()
plate.name = rstring("testChgrp11109")
screen = ScreenI()
screen.name = rstring("testChgrp11109")
link = screen.linkPlate(plate)
link = update.saveAndReturnObject(link)
# Now chgrp, should succeed
chgrp = Chgrp2(
targetObjects={"Plate": [link.child.id.val]}, groupId=target_gid)
self.do_submit(chgrp, client)
# Check that the links have been destroyed
query = client.sf.getQueryService()
with pytest.raises(omero.ValidationException):
query.get("ScreenPlateLink", link.id.val, {"omero.group": "-1"})
def testChgrpDatasetWithImage(self):
"""
D->I
ChGrp D
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
client, user = self.new_client_and_user(perms=PRIVATE)
admin = client.sf.getAdminService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
admin.getEventContext() # Refresh
query = client.sf.getQueryService()
d = self.make_dataset(client=client)
i = self.make_image(client=client)
self.link(d, i, client=client)
self.change_group([d], target_gid, client)
ctx = {'omero.group': '-1'}
assert target_gid == query.get("Image",
i.id.val, ctx).details.group.id.val
assert target_gid == query.get("Dataset",
d.id.val, ctx).details.group.id.val
def testChgrpPDIReverseLinkOrder(self):
"""
P->D->I
ChGrp P
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
client, user = self.new_client_and_user(perms=PRIVATE)
admin = client.sf.getAdminService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
admin.getEventContext() # Refresh
query = client.sf.getQueryService()
p = self.make_project(client=client)
d = self.make_dataset(client=client)
i = self.make_image(client=client)
self.link(p, d, client=client)
self.link(d, i, client=client)
self.change_group([p], target_gid, client=client)
ctx = {'omero.group': '-1'}
assert target_gid == query.get("Project",
p.id.val, ctx).details.group.id.val
assert target_gid == query.get("Dataset",
d.id.val, ctx).details.group.id.val
assert target_gid == query.get("Image",
i.id.val, ctx).details.group.id.val
def testChgrpTwoDatasetsLinkedToSingleImageDefault(self):
"""
D1->I
D2->I
ChGrp D1
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
client, user = self.new_client_and_user(perms=PRIVATE)
admin = client.sf.getAdminService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
admin.getEventContext() # Refresh
query = client.sf.getQueryService()
d1 = self.make_dataset(client=client)
d2 = self.make_dataset(client=client)
i = self.make_image(client=client)
self.link(d1, i, client=client)
self.link(d2, i, client=client)
self.change_group([d1], target_gid, client=client)
ctx = {'omero.group': '-1'}
assert target_gid == query.get("Dataset",
d1.id.val, ctx).details.group.id.val
assert target_gid != query.get("Dataset",
d2.id.val, ctx).details.group.id.val
assert target_gid != query.get("Image",
i.id.val, ctx).details.group.id.val
def testChgrpTwoDatasetsLinkedToSingleImageHard(self):
"""
D1->I
D2->I
ChGrp D1
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
client, user = self.new_client_and_user(perms=PRIVATE)
admin = client.sf.getAdminService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
admin.getEventContext() # Refresh
query = client.sf.getQueryService()
d1 = self.make_dataset(client=client)
d2 = self.make_dataset(client=client)
i = self.make_image(client=client)
self.link(d1, i, client=client)
self.link(d2, i, client=client)
hard = ChildOption(includeType=["Image"])
chgrp = Chgrp2(
targetObjects={"Dataset": [d1.id.val]}, childOptions=[hard],
groupId=target_gid)
self.do_submit(chgrp, client)
ctx = {'omero.group': '-1'}
assert target_gid == query.get("Dataset",
d1.id.val, ctx).details.group.id.val
assert target_gid != query.get("Dataset",
d2.id.val, ctx).details.group.id.val
assert target_gid == query.get("Image",
i.id.val, ctx).details.group.id.val
def testChgrpProjectWithDatasetLinkedToImageWithOtherDatasetDefault(self):
"""
P->D1->I
D2->I
ChGrp P
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
client, user = self.new_client_and_user(perms=PRIVATE)
admin = client.sf.getAdminService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
admin.getEventContext() # Refresh
query = client.sf.getQueryService()
p = self.make_project(client=client)
d1 = self.make_dataset(client=client)
d2 = self.make_dataset(client=client)
i = self.make_image(client=client)
self.link(d1, i, client=client)
self.link(d2, i, client=client)
self.link(p, d1, client=client)
self.change_group([p], target_gid, client)
ctx = {'omero.group': '-1'}
assert target_gid == query.get("Project",
p.id.val, ctx).details.group.id.val
assert target_gid == query.get("Dataset",
d1.id.val, ctx).details.group.id.val
assert target_gid != query.get("Image",
i.id.val, ctx).details.group.id.val
def testChgrpProjectWithDatasetLinkedToImageWithOtherDatasetHard(self):
"""
P->D1->I
D2->I
ChGrp P
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
client, user = self.new_client_and_user(perms=PRIVATE)
admin = client.sf.getAdminService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
admin.getEventContext() # Refresh
query = client.sf.getQueryService()
p = self.make_project(client=client)
d1 = self.make_dataset(client=client)
d2 = self.make_dataset(client=client)
i = self.make_image(client=client)
self.link(d1, i, client=client)
self.link(d2, i, client=client)
self.link(p, d1, client=client)
hard = ChildOption(includeType=["Image"])
chgrp = Chgrp2(
targetObjects={"Project": [p.id.val]}, childOptions=[hard],
groupId=target_gid)
self.do_submit(chgrp, client)
ctx = {'omero.group': '-1'}
assert target_gid == query.get("Project",
p.id.val, ctx).details.group.id.val
assert target_gid == query.get("Dataset",
d1.id.val, ctx).details.group.id.val
assert target_gid != query.get("Dataset",
d2.id.val, ctx).details.group.id.val
assert target_gid == query.get("Image",
i.id.val, ctx).details.group.id.val
def testChgrpDatasetWithImageLinkedToTwoProjects(self):
"""
P1->D->I
P2->D->I
ChGrp D
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
client, user = self.new_client_and_user(perms=PRIVATE)
admin = client.sf.getAdminService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
admin.getEventContext() # Refresh
query = client.sf.getQueryService()
p1 = self.make_project(client=client)
p2 = self.make_project(client=client)
d = self.make_dataset(client=client)
i = self.make_image(client=client)
self.link(p1, d, client=client)
self.link(p2, d, client=client)
self.link(d, i, client=client)
self.change_group([d], target_gid, client)
ctx = {'omero.group': '-1'}
assert not target_gid == query.get("Project",
p1.id.val, ctx).details.group.id.val
assert not target_gid == query.get("Project",
p2.id.val, ctx).details.group.id.val
assert target_gid == query.get("Dataset",
d.id.val, ctx).details.group.id.val
assert target_gid == query.get("Image",
i.id.val, ctx).details.group.id.val
def testChgrpProjectLinkedToDatasetAndImageDefault(self):
"""
P1->D->I
P2->D->I
ChGrp P1
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
client, user = self.new_client_and_user(perms=PRIVATE)
admin = client.sf.getAdminService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
admin.getEventContext() # Refresh
query = client.sf.getQueryService()
p1 = self.make_project(client=client)
p2 = self.make_project(client=client)
d = self.make_dataset(client=client)
i = self.make_image(client=client)
self.link(p1, d, client=client)
self.link(p2, d, client=client)
self.link(d, i, client=client)
self.change_group([p1], target_gid, client)
ctx = {'omero.group': '-1'}
assert target_gid == query.get("Project",
p1.id.val, ctx).details.group.id.val
assert target_gid != query.get("Dataset",
d.id.val, ctx).details.group.id.val
assert target_gid != query.get("Image",
i.id.val, ctx).details.group.id.val
def testChgrpProjectLinkedToDatasetAndImageHard(self):
"""
P1->D->I
P2->D->I
ChGrp P1
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
client, user = self.new_client_and_user(perms=PRIVATE)
admin = client.sf.getAdminService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
admin.getEventContext() # Refresh
query = client.sf.getQueryService()
p1 = self.make_project(client=client)
p2 = self.make_project(client=client)
d = self.make_dataset(client=client)
i = self.make_image(client=client)
self.link(p1, d, client=client)
self.link(p2, d, client=client)
self.link(d, i, client=client)
hard = ChildOption(includeType=["Dataset"])
chgrp = Chgrp2(
targetObjects={"Project": [p1.id.val]}, childOptions=[hard],
groupId=target_gid)
self.do_submit(chgrp, client)
ctx = {'omero.group': '-1'}
assert target_gid == query.get("Project",
p1.id.val, ctx).details.group.id.val
assert target_gid != query.get("Project",
p2.id.val, ctx).details.group.id.val
assert target_gid == query.get("Dataset",
d.id.val, ctx).details.group.id.val
assert target_gid == query.get("Image",
i.id.val, ctx).details.group.id.val
def testChgrpProjectLinkedToDatasetDefault(self):
"""
P1->D
P2->D
ChGrp P1
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
client, user = self.new_client_and_user(perms=PRIVATE)
admin = client.sf.getAdminService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
admin.getEventContext() # Refresh
query = client.sf.getQueryService()
p1 = self.make_project(client=client)
p2 = self.make_project(client=client)
d = self.make_dataset(client=client)
self.link(p1, d, client=client)
self.link(p2, d, client=client)
self.change_group([p1], target_gid, client)
ctx = {'omero.group': '-1'}
assert target_gid == query.get("Project",
p1.id.val, ctx).details.group.id.val
assert target_gid != query.get("Dataset",
d.id.val, ctx).details.group.id.val
def testChgrpProjectLinkedToDatasetHard(self):
"""
P1->D
P2->D
ChGrp P1
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
client, user = self.new_client_and_user(perms=PRIVATE)
admin = client.sf.getAdminService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
admin.getEventContext() # Refresh
query = client.sf.getQueryService()
p1 = self.make_project(client=client)
p2 = self.make_project(client=client)
d = self.make_dataset(client=client)
self.link(p1, d, client=client)
self.link(p2, d, client=client)
hard = ChildOption(includeType=["Dataset"])
chgrp = Chgrp2(
targetObjects={"Project": [p1.id.val]}, childOptions=[hard],
groupId=target_gid)
self.do_submit(chgrp, client)
ctx = {'omero.group': '-1'}
assert target_gid == query.get("Project",
p1.id.val, ctx).details.group.id.val
assert target_gid != query.get("Project",
p2.id.val, ctx).details.group.id.val
assert target_gid == query.get("Dataset",
d.id.val, ctx).details.group.id.val
def testChgrpProjectLinkedToTwoDatasetsAndImage(self):
"""
P->D1->I
P->D2->I
ChGrp P
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
client, user = self.new_client_and_user(perms=PRIVATE)
admin = client.sf.getAdminService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
admin.getEventContext() # Refresh
query = client.sf.getQueryService()
p = self.make_project(client=client)
d1 = self.make_dataset(client=client)
d2 = self.make_dataset(client=client)
i = self.make_image(client=client)
self.link(p, d1, client=client)
self.link(p, d2, client=client)
self.link(d1, i, client=client)
self.link(d2, i, client=client)
self.change_group([p], target_gid, client)
ctx = {'omero.group': '-1'}
assert target_gid == query.get("Project",
p.id.val, ctx).details.group.id.val
assert target_gid == query.get("Dataset",
d1.id.val, ctx).details.group.id.val
assert target_gid == query.get("Dataset",
d2.id.val, ctx).details.group.id.val
assert target_gid == query.get("Image",
i.id.val, ctx).details.group.id.val
def testIntergroupLinks(self):
# create read-annotate group 'read-annotate' with implicit owner
ra_group = self.new_group(perms=READANNOTATE)
self.new_user(group=ra_group, owner=True)
# create private group 'private' with implicit owner
p_group = self.new_group(perms=PRIVATE)
self.new_user(group=p_group, owner=True)
# create new user 'image-owner' who is a member of both 'read-annotate'
# and 'private'
io_client, image_owner = self.new_client_and_user(group=ra_group)
self.add_groups(image_owner, [p_group])
# create new user 'tag-owner' who is a member of both 'read-annotate'
# and 'private'
to_client, tag_owner = self.new_client_and_user(group=ra_group)
self.add_groups(tag_owner, [p_group])
# switch user to 'image-owner'
# import two images into 'read-annotate'
images = []
for x in range(0, 2):
values = self.import_fake_file(client=io_client)
images.append(values[0])
image = io_client.sf.getQueryService().get("Image",
images[x].id.val)
assert ra_group.id.val == image.details.group.id.val
# switch user to tag-owner
# tag both image-owner's images with the same new tag
tag = self.new_object(
TagAnnotationI, name="tag from user %s" % tag_owner.omeName.val)
tag = to_client.sf.getUpdateService().saveAndReturnObject(tag)
assert tag_owner.id.val == tag.details.owner.id.val
links = []
for image in images:
links.append(self.link(image, tag, client=to_client))
# (shell) as root
# run bin/omero hql --all 'select parent.details.group.id,
# child.details.group.id from ImageIink'
# and observe that for each row
# the group ID in Col1 matches that in Col2
for link in links:
assert link.parent.details.group.id == link.child.details.group.id
# switch user to image-owner
# right-click one of the images and move it to private
self.change_group([images[0]], p_group.id.val, io_client)
# (shell) as root
# run bin/omero hql --all 'select parent.details.group.id,
# child.details.group.id from ImageAnnotationLink' and recoil in horror
params = omero.sys.ParametersI()
params.addId(tag.id.val)
ctx = {"omero.group": "-1"}
query = "select parent.details.group.id,"
query += " child.details.group.id from ImageAnnotationLink"
query += " where child.id = :id"
links = unwrap(self.root.sf.getQueryService().projection(query, params,
ctx))
assert links is not None
for link in links:
assert link[0] == link[1]
class TestChgrpTarget(ITest):
def createDSInGroup(self, gid, name=None, client=None):
if name is None:
name = self.uuid()
if client is None:
client = self.client
ctx = {'omero.group': str(gid)}
update = client.sf.getUpdateService()
ds = self.new_dataset(name)
return update.saveAndReturnObject(ds, ctx)
def chgrpImagesToTargetDataset(self, img_count):
"""
Helper method to test chgrp of image(s) to target Dataset
"""
# One user in two groups
client, user = self.new_client_and_user(perms=PRIVATE)
admin = client.sf.getAdminService()
target_grp = self.new_group([user], perms=PRIVATE)
target_gid = target_grp.id.val
images = self.import_fake_file(img_count, client=client)
ds = self.createDSInGroup(target_gid, client=client)
# each chgrp includes a 'save' link to target dataset
saves = []
ids = []
for i in images:
ids.append(i.id.val)
link = DatasetImageLinkI()
link.child = ImageI(i.id.val, False)
link.parent = DatasetI(ds.id.val, False)
save = Save()
save.obj = link
saves.append(save)
chgrp = Chgrp2(
targetObjects={"Image": ids}, groupId=target_gid)
requests = [chgrp]
requests.extend(saves)
self.do_submit(requests, client, omero_group=target_gid)
# Check Images moved to correct group
query_service = client.sf.getQueryService()
ctx = {'omero.group': '-1'} # query across groups
for i in images:
image = query_service.get('Image', i.id.val, ctx)
img_gid = image.details.group.id.val
assert target_gid == img_gid,\
"Image should be in group: %s, NOT %s" % (target_gid, img_gid)
# Check Dataset has images linked
ds_imgs = client.sf.getContainerService().getImages(
'Dataset', [ds.id.val], None, ctx)
assert len(ds_imgs) == len(images),\
"All Images should be in target Dataset"
previous_gid = admin.getEventContext().groupId
return (ds, images, client, user, previous_gid, target_gid)
def testChgrpImageToTargetDataset(self):
""" Chgrp a single Image to target Dataset """
self.chgrpImagesToTargetDataset(1)
def testChgrpMifImagesToTargetDataset(self):
""" Chgrp 2 images in a MIF to target Dataset """
self.chgrpImagesToTargetDataset(2)
def testChgrpImageToTargetDatasetAndBackNoDS(self):
"""
Chgrp a single Image to target Dataset and then back
No target is provided on the way back.
see ticket:11118
"""
ds, images, client, user, old_gid, new_gid =\
self.chgrpImagesToTargetDataset(1)
chgrp = Chgrp2(
targetObjects={"Image": [images[0].id.val]}, groupId=old_gid)
self.do_submit(chgrp, client, omero_group=old_gid)
def testChgrpImageToTargetDatasetAndBackDS(self):
"""
Chgrp a single Image to target Dataset and then back
see ticket:11118
"""
new_ds, images, client, user, old_gid, new_gid =\
self.chgrpImagesToTargetDataset(1)
# create Dataset in original group
old_ds = self.createDSInGroup(old_gid, client=client)
link = DatasetImageLinkI()
link.parent = old_ds.proxy()
link.child = images[0].proxy()
chgrp = Chgrp2(
targetObjects={"Image": [images[0].id.val]}, groupId=old_gid)
save = Save(link)
self.do_submit([chgrp, save], client, omero_group=old_gid)
dils = client.sf.getQueryService().findAllByQuery(
"select dil from DatasetImageLink dil where dil.child.id = :id",
omero.sys.ParametersI().addId(images[0].id.val),
{"omero.group": "-1"})
assert 1 == len(dils)
@pytest.mark.parametrize("credentials", ["user", "admin"])
def testChgrpDatasetToTargetProject(self, credentials):
"""
Tests that an Admin can move a user's Dataset to a private
group and link it to an existing user's Project there.
Also tests that the user can do the same chgrp themselves.
"""
# One user in two groups
client, user = self.new_client_and_user(perms=PRIVATE)
target_grp = self.new_group([user], perms=PRIVATE)
e_ctx = client.sf.getAdminService().getEventContext() # Reset session
user_id = e_ctx.userId
target_gid = target_grp.id.val
# User creates Dataset in current group...
update = client.sf.getUpdateService()
ds = self.make_dataset(client=client)
# ...and Project in target group
ctx = {'omero.group': str(target_gid)}
pr = self.new_project()
pr = update.saveAndReturnObject(pr, ctx)
requests = []
saves = []
chgrp = Chgrp2(
targetObjects={"Dataset": [ds.id.val]}, groupId=target_gid)
requests.append(chgrp)
link = ProjectDatasetLinkI()
link.details.owner = ExperimenterI(user_id, False)
link.child = DatasetI(ds.id.val, False)
link.parent = ProjectI(pr.id.val, False)
save = Save()
save.obj = link
saves.append(save)
requests.extend(saves)
if credentials == "user":
c = client
else:
c = self.root
self.do_submit(requests, c, omero_group=target_gid)
query_service = client.sf.getQueryService()
ctx = {'omero.group': '-1'} # query across groups
dataset = query_service.get('Dataset', ds.id.val, ctx)
ds_gid = dataset.details.group.id.val
assert target_gid == ds_gid,\
"Dataset should be in group: %s, NOT %s" % (target_gid, ds_gid)
| gpl-2.0 | 1,454,220,204,188,468,700 | 38.021941 | 79 | 0.586038 | false |
MarcosCommunity/odoo | addons/base_report_designer/plugin/openerp_report_designer/bin/script/Repeatln.py | 293 | 13228 | #########################################################################
#
# Copyright (c) 2003-2004 Danny Brewer [email protected]
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#############################################################################
import uno
import string
import unohelper
import xmlrpclib
from com.sun.star.task import XJobExecutor
if __name__<>"package":
from lib.gui import *
from lib.error import ErrorDialog
from lib.functions import *
from ServerParameter import *
from lib.logreport import *
from lib.rpc import *
from LoginTest import *
database="test_db1"
uid = 3
#class RepeatIn:
class RepeatIn( unohelper.Base, XJobExecutor ):
def __init__(self, sObject="", sVariable="", sFields="", sDisplayName="", bFromModify=False):
# Interface Design
LoginTest()
self.logobj=Logger()
if not loginstatus and __name__=="package":
exit(1)
self.win = DBModalDialog(60, 50, 180, 250, "RepeatIn Builder")
self.win.addFixedText("lblVariable", 2, 12, 60, 15, "Objects to loop on :")
self.win.addComboBox("cmbVariable", 180-120-2, 10, 120, 15,True, itemListenerProc=self.cmbVariable_selected)
self.insVariable = self.win.getControl( "cmbVariable" )
self.win.addFixedText("lblFields", 10, 32, 60, 15, "Field to loop on :")
self.win.addComboListBox("lstFields", 180-120-2, 30, 120, 150, False,itemListenerProc=self.lstbox_selected)
self.insField = self.win.getControl( "lstFields" )
self.win.addFixedText("lblName", 12, 187, 60, 15, "Variable name :")
self.win.addEdit("txtName", 180-120-2, 185, 120, 15,)
self.win.addFixedText("lblUName", 8, 207, 60, 15, "Displayed name :")
self.win.addEdit("txtUName", 180-120-2, 205, 120, 15,)
self.win.addButton('btnOK',-2 ,-10,45,15,'Ok', actionListenerProc = self.btnOk_clicked )
self.win.addButton('btnCancel',-2 - 45 - 5 ,-10,45,15,'Cancel', actionListenerProc = self.btnCancel_clicked )
global passwd
self.password = passwd
global url
self.sock=RPCSession(url)
# Variable Declaration
self.sValue=None
self.sObj=None
self.aSectionList=[]
self.sGVariable=sVariable
self.sGDisplayName=sDisplayName
self.aItemList=[]
self.aComponentAdd=[]
self.aObjectList=[]
self.aListRepeatIn=[]
self.aVariableList=[]
# Call method to perform Enumration on Report Document
EnumDocument(self.aItemList,self.aComponentAdd)
# Perform checking that Field-1 and Field - 4 is available or not alos get Combobox
# filled if condition is true
desktop = getDesktop()
doc = desktop.getCurrentComponent()
docinfo = doc.getDocumentInfo()
# Check weather Field-1 is available if not then exit from application
self.sMyHost= ""
if not docinfo.getUserFieldValue(3) == "" and not docinfo.getUserFieldValue(0)=="":
self.sMyHost= docinfo.getUserFieldValue(0)
self.count=0
oParEnum = doc.getTextFields().createEnumeration()
while oParEnum.hasMoreElements():
oPar = oParEnum.nextElement()
if oPar.supportsService("com.sun.star.text.TextField.DropDown"):
self.count += 1
getList(self.aObjectList, self.sMyHost,self.count)
cursor = doc.getCurrentController().getViewCursor()
text = cursor.getText()
tcur = text.createTextCursorByRange(cursor)
self.aVariableList.extend( filter( lambda obj: obj[:obj.find(" ")] == "List", self.aObjectList ) )
for i in range(len(self.aItemList)):
try:
anItem = self.aItemList[i][1]
component = self.aComponentAdd[i]
if component == "Document":
sLVal = anItem[anItem.find(",'") + 2:anItem.find("')")]
self.aVariableList.extend( filter( lambda obj: obj[:obj.find("(")] == sLVal, self.aObjectList ) )
if tcur.TextSection:
getRecersiveSection(tcur.TextSection,self.aSectionList)
if component in self.aSectionList:
sLVal = anItem[anItem.find(",'") + 2:anItem.find("')")]
self.aVariableList.extend( filter( lambda obj: obj[:obj.find("(")] == sLVal, self.aObjectList ) )
if tcur.TextTable:
if not component == "Document" and component[component.rfind(".") + 1:] == tcur.TextTable.Name:
VariableScope( tcur, self.aVariableList, self.aObjectList, self.aComponentAdd, self.aItemList, component )
except :
import traceback,sys
info = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
self.logobj.log_write('RepeatIn', LOG_ERROR, info)
self.bModify=bFromModify
if self.bModify==True:
if sObject=="":
self.insVariable.setText("List of "+docinfo.getUserFieldValue(3))
self.insField.addItem("objects",self.win.getListBoxItemCount("lstFields"))
self.win.setEditText("txtName", sVariable)
self.win.setEditText("txtUName",sDisplayName)
self.sValue= "objects"
else:
sItem=""
for anObject in self.aObjectList:
if anObject[:anObject.find("(")] == sObject:
sItem = anObject
self.insVariable.setText( sItem )
genTree(
sItem[sItem.find("(")+1:sItem.find(")")],
self.aListRepeatIn,
self.insField,
self.sMyHost,
2,
ending=['one2many','many2many'],
recur=['one2many','many2many']
)
self.sValue= self.win.getListBoxItem("lstFields",self.aListRepeatIn.index(sFields))
for var in self.aVariableList:
if var[:8] <> 'List of ':
self.model_ids = self.sock.execute(database, uid, self.password, 'ir.model' , 'search', [('model','=',var[var.find("(")+1:var.find(")")])])
else:
self.model_ids = self.sock.execute(database, uid, self.password, 'ir.model' , 'search', [('model','=',var[8:])])
fields=['name','model']
self.model_res = self.sock.execute(database, uid, self.password, 'ir.model', 'read', self.model_ids,fields)
if self.model_res <> []:
if var[:8]<>'List of ':
self.insVariable.addItem(var[:var.find("(")+1] + self.model_res[0]['name'] + ")" ,self.insVariable.getItemCount())
else:
self.insVariable.addItem('List of ' + self.model_res[0]['name'] ,self.insVariable.getItemCount())
else:
self.insVariable.addItem(var ,self.insVariable.getItemCount())
self.win.doModalDialog("lstFields",self.sValue)
else:
ErrorDialog("Please Select Appropriate module" ,"Create new report from: \nOdoo -> Open a New Report")
self.win.endExecute()
def lstbox_selected(self, oItemEvent):
sItem=self.win.getListBoxSelectedItem("lstFields")
sMain=self.aListRepeatIn[self.win.getListBoxSelectedItemPos("lstFields")]
if self.bModify==True:
self.win.setEditText("txtName", self.sGVariable)
self.win.setEditText("txtUName",self.sGDisplayName)
else:
self.win.setEditText("txtName",sMain[sMain.rfind("/")+1:])
self.win.setEditText("txtUName","|-."+sItem[sItem.rfind("/")+1:]+".-|")
def cmbVariable_selected(self, oItemEvent):
if self.count > 0 :
desktop=getDesktop()
doc =desktop.getCurrentComponent()
docinfo=doc.getDocumentInfo()
self.win.removeListBoxItems("lstFields", 0, self.win.getListBoxItemCount("lstFields"))
sItem=self.win.getComboBoxText("cmbVariable")
for var in self.aVariableList:
if var[:8]=='List of ':
if var[:8]==sItem[:8]:
sItem = var
elif var[:var.find("(")+1] == sItem[:sItem.find("(")+1]:
sItem = var
self.aListRepeatIn=[]
data = ( sItem[sItem.rfind(" ") + 1:] == docinfo.getUserFieldValue(3) ) and docinfo.getUserFieldValue(3) or sItem[sItem.find("(")+1:sItem.find(")")]
genTree( data, self.aListRepeatIn, self.insField, self.sMyHost, 2, ending=['one2many','many2many'], recur=['one2many','many2many'] )
self.win.selectListBoxItemPos("lstFields", 0, True )
else:
sItem=self.win.getComboBoxText("cmbVariable")
for var in self.aVariableList:
if var[:8]=='List of ' and var[:8] == sItem[:8]:
sItem = var
if sItem.find(".")==-1:
temp=sItem[sItem.rfind("x_"):]
else:
temp=sItem[sItem.rfind(".")+1:]
self.win.setEditText("txtName",temp)
self.win.setEditText("txtUName","|-."+temp+".-|")
self.insField.addItem("objects",self.win.getListBoxItemCount("lstFields"))
self.win.selectListBoxItemPos("lstFields", 0, True )
def btnOk_clicked(self, oActionEvent):
desktop=getDesktop()
doc = desktop.getCurrentComponent()
cursor = doc.getCurrentController().getViewCursor()
selectedItem = self.win.getListBoxSelectedItem( "lstFields" )
selectedItemPos = self.win.getListBoxSelectedItemPos( "lstFields" )
txtName = self.win.getEditText( "txtName" )
txtUName = self.win.getEditText( "txtUName" )
if selectedItem != "" and txtName != "" and txtUName != "":
sKey=u""+ txtUName
if selectedItem == "objects":
sValue=u"[[ repeatIn(" + selectedItem + ",'" + txtName + "') ]]"
else:
sObjName=self.win.getComboBoxText("cmbVariable")
sObjName=sObjName[:sObjName.find("(")]
sValue=u"[[ repeatIn(" + sObjName + self.aListRepeatIn[selectedItemPos].replace("/",".") + ",'" + txtName +"') ]]"
if self.bModify == True:
oCurObj = cursor.TextField
oCurObj.Items = (sKey,sValue)
oCurObj.update()
else:
oInputList = doc.createInstance("com.sun.star.text.TextField.DropDown")
if self.win.getListBoxSelectedItem("lstFields") == "objects":
oInputList.Items = (sKey,sValue)
doc.Text.insertTextContent(cursor,oInputList,False)
else:
sValue=u"[[ repeatIn(" + sObjName + self.aListRepeatIn[selectedItemPos].replace("/",".") + ",'" + txtName +"') ]]"
if cursor.TextTable==None:
oInputList.Items = (sKey,sValue)
doc.Text.insertTextContent(cursor,oInputList,False)
else:
oInputList.Items = (sKey,sValue)
widget = ( cursor.TextTable or selectedItem <> 'objects' ) and cursor.TextTable.getCellByName( cursor.Cell.CellName ) or doc.Text
widget.insertTextContent(cursor,oInputList,False)
self.win.endExecute()
else:
ErrorDialog("Please fill appropriate data in Object Field or Name field \nor select particular value from the list of fields.")
def btnCancel_clicked(self, oActionEvent):
self.win.endExecute()
if __name__<>"package" and __name__=="__main__":
RepeatIn()
elif __name__=="package":
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation( RepeatIn, "org.openoffice.openerp.report.repeatln", ("com.sun.star.task.Job",),)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -1,155,928,816,165,303,000 | 46.412186 | 160 | 0.571591 | false |
NitroKK/kernel_lge_iproj | tools/perf/scripts/python/sched-migration.py | 11215 | 11670 | #!/usr/bin/python
#
# Cpu task migration overview toy
#
# Copyright (C) 2010 Frederic Weisbecker <[email protected]>
#
# perf script event handlers have been generated by perf script -g python
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import os
import sys
from collections import defaultdict
from UserList import UserList
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from SchedGui import *
threads = { 0 : "idle"}
def thread_name(pid):
return "%s:%d" % (threads[pid], pid)
class RunqueueEventUnknown:
@staticmethod
def color():
return None
def __repr__(self):
return "unknown"
class RunqueueEventSleep:
@staticmethod
def color():
return (0, 0, 0xff)
def __init__(self, sleeper):
self.sleeper = sleeper
def __repr__(self):
return "%s gone to sleep" % thread_name(self.sleeper)
class RunqueueEventWakeup:
@staticmethod
def color():
return (0xff, 0xff, 0)
def __init__(self, wakee):
self.wakee = wakee
def __repr__(self):
return "%s woke up" % thread_name(self.wakee)
class RunqueueEventFork:
@staticmethod
def color():
return (0, 0xff, 0)
def __init__(self, child):
self.child = child
def __repr__(self):
return "new forked task %s" % thread_name(self.child)
class RunqueueMigrateIn:
@staticmethod
def color():
return (0, 0xf0, 0xff)
def __init__(self, new):
self.new = new
def __repr__(self):
return "task migrated in %s" % thread_name(self.new)
class RunqueueMigrateOut:
@staticmethod
def color():
return (0xff, 0, 0xff)
def __init__(self, old):
self.old = old
def __repr__(self):
return "task migrated out %s" % thread_name(self.old)
class RunqueueSnapshot:
def __init__(self, tasks = [0], event = RunqueueEventUnknown()):
self.tasks = tuple(tasks)
self.event = event
def sched_switch(self, prev, prev_state, next):
event = RunqueueEventUnknown()
if taskState(prev_state) == "R" and next in self.tasks \
and prev in self.tasks:
return self
if taskState(prev_state) != "R":
event = RunqueueEventSleep(prev)
next_tasks = list(self.tasks[:])
if prev in self.tasks:
if taskState(prev_state) != "R":
next_tasks.remove(prev)
elif taskState(prev_state) == "R":
next_tasks.append(prev)
if next not in next_tasks:
next_tasks.append(next)
return RunqueueSnapshot(next_tasks, event)
def migrate_out(self, old):
if old not in self.tasks:
return self
next_tasks = [task for task in self.tasks if task != old]
return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old))
def __migrate_in(self, new, event):
if new in self.tasks:
self.event = event
return self
next_tasks = self.tasks[:] + tuple([new])
return RunqueueSnapshot(next_tasks, event)
def migrate_in(self, new):
return self.__migrate_in(new, RunqueueMigrateIn(new))
def wake_up(self, new):
return self.__migrate_in(new, RunqueueEventWakeup(new))
def wake_up_new(self, new):
return self.__migrate_in(new, RunqueueEventFork(new))
def load(self):
""" Provide the number of tasks on the runqueue.
Don't count idle"""
return len(self.tasks) - 1
def __repr__(self):
ret = self.tasks.__repr__()
ret += self.origin_tostring()
return ret
class TimeSlice:
def __init__(self, start, prev):
self.start = start
self.prev = prev
self.end = start
# cpus that triggered the event
self.event_cpus = []
if prev is not None:
self.total_load = prev.total_load
self.rqs = prev.rqs.copy()
else:
self.rqs = defaultdict(RunqueueSnapshot)
self.total_load = 0
def __update_total_load(self, old_rq, new_rq):
diff = new_rq.load() - old_rq.load()
self.total_load += diff
def sched_switch(self, ts_list, prev, prev_state, next, cpu):
old_rq = self.prev.rqs[cpu]
new_rq = old_rq.sched_switch(prev, prev_state, next)
if old_rq is new_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def migrate(self, ts_list, new, old_cpu, new_cpu):
if old_cpu == new_cpu:
return
old_rq = self.prev.rqs[old_cpu]
out_rq = old_rq.migrate_out(new)
self.rqs[old_cpu] = out_rq
self.__update_total_load(old_rq, out_rq)
new_rq = self.prev.rqs[new_cpu]
in_rq = new_rq.migrate_in(new)
self.rqs[new_cpu] = in_rq
self.__update_total_load(new_rq, in_rq)
ts_list.append(self)
if old_rq is not out_rq:
self.event_cpus.append(old_cpu)
self.event_cpus.append(new_cpu)
def wake_up(self, ts_list, pid, cpu, fork):
old_rq = self.prev.rqs[cpu]
if fork:
new_rq = old_rq.wake_up_new(pid)
else:
new_rq = old_rq.wake_up(pid)
if new_rq is old_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def next(self, t):
self.end = t
return TimeSlice(t, self)
class TimeSliceList(UserList):
def __init__(self, arg = []):
self.data = arg
def get_time_slice(self, ts):
if len(self.data) == 0:
slice = TimeSlice(ts, TimeSlice(-1, None))
else:
slice = self.data[-1].next(ts)
return slice
def find_time_slice(self, ts):
start = 0
end = len(self.data)
found = -1
searching = True
while searching:
if start == end or start == end - 1:
searching = False
i = (end + start) / 2
if self.data[i].start <= ts and self.data[i].end >= ts:
found = i
end = i
continue
if self.data[i].end < ts:
start = i
elif self.data[i].start > ts:
end = i
return found
def set_root_win(self, win):
self.root_win = win
def mouse_down(self, cpu, t):
idx = self.find_time_slice(t)
if idx == -1:
return
ts = self[idx]
rq = ts.rqs[cpu]
raw = "CPU: %d\n" % cpu
raw += "Last event : %s\n" % rq.event.__repr__()
raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000)
raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6))
raw += "Load = %d\n" % rq.load()
for t in rq.tasks:
raw += "%s \n" % thread_name(t)
self.root_win.update_summary(raw)
def update_rectangle_cpu(self, slice, cpu):
rq = slice.rqs[cpu]
if slice.total_load != 0:
load_rate = rq.load() / float(slice.total_load)
else:
load_rate = 0
red_power = int(0xff - (0xff * load_rate))
color = (0xff, red_power, red_power)
top_color = None
if cpu in slice.event_cpus:
top_color = rq.event.color()
self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end)
def fill_zone(self, start, end):
i = self.find_time_slice(start)
if i == -1:
return
for i in xrange(i, len(self.data)):
timeslice = self.data[i]
if timeslice.start > end:
return
for cpu in timeslice.rqs:
self.update_rectangle_cpu(timeslice, cpu)
def interval(self):
if len(self.data) == 0:
return (0, 0)
return (self.data[0].start, self.data[-1].end)
def nr_rectangles(self):
last_ts = self.data[-1]
max_cpu = 0
for cpu in last_ts.rqs:
if cpu > max_cpu:
max_cpu = cpu
return max_cpu
class SchedEventProxy:
def __init__(self):
self.current_tsk = defaultdict(lambda : -1)
self.timeslices = TimeSliceList()
def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
""" Ensure the task we sched out this cpu is really the one
we logged. Otherwise we may have missed traces """
on_cpu_task = self.current_tsk[headers.cpu]
if on_cpu_task != -1 and on_cpu_task != prev_pid:
print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \
(headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid)
threads[prev_pid] = prev_comm
threads[next_pid] = next_comm
self.current_tsk[headers.cpu] = next_pid
ts = self.timeslices.get_time_slice(headers.ts())
ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu)
def migrate(self, headers, pid, prio, orig_cpu, dest_cpu):
ts = self.timeslices.get_time_slice(headers.ts())
ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
def wake_up(self, headers, comm, pid, success, target_cpu, fork):
if success == 0:
return
ts = self.timeslices.get_time_slice(headers.ts())
ts.wake_up(self.timeslices, pid, target_cpu, fork)
def trace_begin():
global parser
parser = SchedEventProxy()
def trace_end():
app = wx.App(False)
timeslices = parser.timeslices
frame = RootFrame(timeslices, "Migration")
app.MainLoop()
def sched__sched_stat_runtime(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, runtime, vruntime):
pass
def sched__sched_stat_iowait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_sleep(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_process_fork(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
parent_comm, parent_pid, child_comm, child_pid):
pass
def sched__sched_process_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_free(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_migrate_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, orig_cpu,
dest_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
def sched__sched_switch(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio)
def sched__sched_wakeup_new(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 1)
def sched__sched_wakeup(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 0)
def sched__sched_wait_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_kthread_stop_ret(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
ret):
pass
def sched__sched_kthread_stop(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid):
pass
def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
pass
| gpl-2.0 | 8,364,869,242,313,903,000 | 24.314534 | 88 | 0.675407 | false |
CLVsol/oehealth | oehealth_pharmacy/oehealth_annotation.py | 1 | 1638 | # -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from openerp.osv import orm, fields
class oehealth_annotation(orm.Model):
_inherit = 'oehealth.annotation'
_columns = {
'pharmacy_id' : fields.many2one ('oehealth.pharmacy', 'Pharmacy'),
}
oehealth_annotation()
| agpl-3.0 | 4,323,486,043,866,246,700 | 55.482759 | 80 | 0.440171 | false |
SrNetoChan/Quantum-GIS | python/plugins/processing/algs/qgis/Heatmap.py | 15 | 11238 | # -*- coding: utf-8 -*-
"""
***************************************************************************
Heatmap.py
---------------------
Date : November 2016
Copyright : (C) 2016 by Nyall Dawson
Email : nyall dot dawson at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Nyall Dawson'
__date__ = 'November 2016'
__copyright__ = '(C) 2016, Nyall Dawson'
import os
from collections import OrderedDict
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsApplication,
QgsFeatureRequest,
QgsRasterFileWriter,
QgsProcessing,
QgsProcessingException,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterNumber,
QgsProcessingParameterDistance,
QgsProcessingParameterField,
QgsProcessingParameterEnum,
QgsProcessingParameterDefinition,
QgsProcessingParameterRasterDestination)
from qgis.analysis import QgsKernelDensityEstimation
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class Heatmap(QgisAlgorithm):
INPUT = 'INPUT'
RADIUS = 'RADIUS'
RADIUS_FIELD = 'RADIUS_FIELD'
WEIGHT_FIELD = 'WEIGHT_FIELD'
PIXEL_SIZE = 'PIXEL_SIZE'
KERNEL = 'KERNEL'
DECAY = 'DECAY'
OUTPUT_VALUE = 'OUTPUT_VALUE'
OUTPUT = 'OUTPUT'
def icon(self):
return QgsApplication.getThemeIcon("/heatmap.svg")
def tags(self):
return self.tr('heatmap,kde,hotspot').split(',')
def group(self):
return self.tr('Interpolation')
def groupId(self):
return 'interpolation'
def name(self):
return 'heatmapkerneldensityestimation'
def displayName(self):
return self.tr('Heatmap (Kernel Density Estimation)')
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.KERNELS = OrderedDict([(self.tr('Quartic'), QgsKernelDensityEstimation.KernelQuartic),
(self.tr('Triangular'), QgsKernelDensityEstimation.KernelTriangular),
(self.tr('Uniform'), QgsKernelDensityEstimation.KernelUniform),
(self.tr('Triweight'), QgsKernelDensityEstimation.KernelTriweight),
(self.tr('Epanechnikov'), QgsKernelDensityEstimation.KernelEpanechnikov)])
self.OUTPUT_VALUES = OrderedDict([(self.tr('Raw'), QgsKernelDensityEstimation.OutputRaw),
(self.tr('Scaled'), QgsKernelDensityEstimation.OutputScaled)])
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Point layer'),
[QgsProcessing.TypeVectorPoint]))
self.addParameter(QgsProcessingParameterDistance(self.RADIUS,
self.tr('Radius'),
100.0, self.INPUT, False, 0.0))
radius_field_param = QgsProcessingParameterField(self.RADIUS_FIELD,
self.tr('Radius from field'),
None,
self.INPUT,
QgsProcessingParameterField.Numeric,
optional=True
)
radius_field_param.setFlags(radius_field_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(radius_field_param)
class ParameterHeatmapPixelSize(QgsProcessingParameterNumber):
def __init__(self, name='', description='', parent_layer=None, radius_param=None, radius_field_param=None, minValue=None,
default=None, optional=False):
QgsProcessingParameterNumber.__init__(self, name, description, QgsProcessingParameterNumber.Double, default, optional, minValue)
self.parent_layer = parent_layer
self.radius_param = radius_param
self.radius_field_param = radius_field_param
def clone(self):
copy = ParameterHeatmapPixelSize(self.name(), self.description(), self.parent_layer, self.radius_param, self.radius_field_param, self.minimum(), self.maximum(), self.defaultValue((), self.flags() & QgsProcessingParameterDefinition.FlagOptional))
return copy
pixel_size_param = ParameterHeatmapPixelSize(self.PIXEL_SIZE,
self.tr('Output raster size'),
parent_layer=self.INPUT,
radius_param=self.RADIUS,
radius_field_param=self.RADIUS_FIELD,
minValue=0.0,
default=0.1)
pixel_size_param.setMetadata({
'widget_wrapper': {
'class': 'processing.algs.qgis.ui.HeatmapWidgets.HeatmapPixelSizeWidgetWrapper'}})
self.addParameter(pixel_size_param)
weight_field_param = QgsProcessingParameterField(self.WEIGHT_FIELD,
self.tr('Weight from field'),
None,
self.INPUT,
QgsProcessingParameterField.Numeric,
optional=True
)
weight_field_param.setFlags(weight_field_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(weight_field_param)
keys = list(self.KERNELS.keys())
kernel_shape_param = QgsProcessingParameterEnum(self.KERNEL,
self.tr('Kernel shape'),
keys,
allowMultiple=False,
defaultValue=0)
kernel_shape_param.setFlags(kernel_shape_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(kernel_shape_param)
decay_ratio = QgsProcessingParameterNumber(self.DECAY,
self.tr('Decay ratio (Triangular kernels only)'),
QgsProcessingParameterNumber.Double,
0.0, True, -100.0, 100.0)
decay_ratio.setFlags(decay_ratio.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(decay_ratio)
keys = list(self.OUTPUT_VALUES.keys())
output_scaling = QgsProcessingParameterEnum(self.OUTPUT_VALUE,
self.tr('Output value scaling'),
keys,
allowMultiple=False,
defaultValue=0)
output_scaling.setFlags(output_scaling.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(output_scaling)
self.addParameter(QgsProcessingParameterRasterDestination(self.OUTPUT, self.tr('Heatmap')))
def processAlgorithm(self, parameters, context, feedback):
source = self.parameterAsSource(parameters, self.INPUT, context)
if source is None:
raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT))
radius = self.parameterAsDouble(parameters, self.RADIUS, context)
kernel_shape = self.parameterAsEnum(parameters, self.KERNEL, context)
pixel_size = self.parameterAsDouble(parameters, self.PIXEL_SIZE, context)
decay = self.parameterAsDouble(parameters, self.DECAY, context)
output_values = self.parameterAsEnum(parameters, self.OUTPUT_VALUE, context)
outputFile = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
output_format = QgsRasterFileWriter.driverForExtension(os.path.splitext(outputFile)[1])
weight_field = self.parameterAsString(parameters, self.WEIGHT_FIELD, context)
radius_field = self.parameterAsString(parameters, self.RADIUS_FIELD, context)
attrs = []
kde_params = QgsKernelDensityEstimation.Parameters()
kde_params.source = source
kde_params.radius = radius
kde_params.pixelSize = pixel_size
# radius field
if radius_field:
kde_params.radiusField = radius_field
attrs.append(source.fields().lookupField(radius_field))
# weight field
if weight_field:
kde_params.weightField = weight_field
attrs.append(source.fields().lookupField(weight_field))
kde_params.shape = kernel_shape
kde_params.decayRatio = decay
kde_params.outputValues = output_values
kde = QgsKernelDensityEstimation(kde_params, outputFile, output_format)
if kde.prepare() != QgsKernelDensityEstimation.Success:
raise QgsProcessingException(
self.tr('Could not create destination layer'))
request = QgsFeatureRequest()
request.setSubsetOfAttributes(attrs)
features = source.getFeatures(request)
total = 100.0 / source.featureCount() if source.featureCount() else 0
for current, f in enumerate(features):
if feedback.isCanceled():
break
if kde.addFeature(f) != QgsKernelDensityEstimation.Success:
feedback.reportError(self.tr('Error adding feature with ID {} to heatmap').format(f.id()))
feedback.setProgress(int(current * total))
if kde.finalise() != QgsKernelDensityEstimation.Success:
raise QgsProcessingException(
self.tr('Could not save destination layer'))
return {self.OUTPUT: outputFile}
| gpl-2.0 | 5,786,339,362,994,566,000 | 48.289474 | 261 | 0.532657 | false |
SnabbCo/neutron | neutron/tests/unit/services/firewall/agents/l3reference/test_firewall_l3_agent.py | 3 | 16283 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Sumit Naiksatam, [email protected], Big Switch Networks, Inc.
# @author: Sridar Kandaswamy, [email protected], Cisco Systems, Inc.
# @author: Dan Florea, [email protected], Cisco Systems, Inc.
import contextlib
import uuid
import mock
from oslo.config import cfg
from neutron.agent.common import config as agent_config
from neutron.agent import l3_agent
from neutron.agent.linux import ip_lib
from neutron.common import config as base_config
from neutron import context
from neutron.plugins.common import constants
from neutron.services.firewall.agents.l3reference import firewall_l3_agent
from neutron.tests import base
from neutron.tests.unit.services.firewall.agents import test_firewall_agent_api
class FWaasHelper(object):
def __init__(self, host):
pass
class FWaasAgent(firewall_l3_agent.FWaaSL3AgentRpcCallback, FWaasHelper):
def __init__(self, conf=None):
super(FWaasAgent, self).__init__(conf)
class TestFwaasL3AgentRpcCallback(base.BaseTestCase):
def setUp(self):
super(TestFwaasL3AgentRpcCallback, self).setUp()
self.conf = cfg.ConfigOpts()
self.conf.register_opts(base_config.core_opts)
self.conf.register_opts(l3_agent.L3NATAgent.OPTS)
agent_config.register_use_namespaces_opts_helper(self.conf)
agent_config.register_root_helper(self.conf)
self.conf.root_helper = 'sudo'
self.api = FWaasAgent(self.conf)
self.api.fwaas_driver = test_firewall_agent_api.NoopFwaasDriver()
def test_create_firewall(self):
fake_firewall = {'id': 0}
with mock.patch.object(
self.api,
'_invoke_driver_for_plugin_api'
) as mock_driver:
self.assertEqual(
self.api.create_firewall(
mock.sentinel.context,
fake_firewall,
'host'),
mock_driver.return_value)
def test_update_firewall(self):
fake_firewall = {'id': 0}
with mock.patch.object(
self.api,
'_invoke_driver_for_plugin_api'
) as mock_driver:
self.assertEqual(
self.api.update_firewall(
mock.sentinel.context,
fake_firewall,
'host'),
mock_driver.return_value)
def test_delete_firewall(self):
fake_firewall = {'id': 0}
with mock.patch.object(
self.api,
'_invoke_driver_for_plugin_api'
) as mock_driver:
self.assertEqual(
self.api.delete_firewall(
mock.sentinel.context,
fake_firewall,
'host'),
mock_driver.return_value)
def test_invoke_driver_for_plugin_api(self):
fake_firewall = {'id': 0, 'tenant_id': 1,
'admin_state_up': True}
self.api.plugin_rpc = mock.Mock()
with contextlib.nested(
mock.patch.object(self.api.plugin_rpc, 'get_routers'),
mock.patch.object(self.api, '_get_router_info_list_for_tenant'),
mock.patch.object(self.api.fwaas_driver, 'create_firewall'),
mock.patch.object(self.api.fwplugin_rpc, 'set_firewall_status')
) as (
mock_get_routers,
mock_get_router_info_list_for_tenant,
mock_driver_create_firewall,
mock_set_firewall_status):
mock_driver_create_firewall.return_value = True
self.api.create_firewall(
context=mock.sentinel.context,
firewall=fake_firewall, host='host')
mock_get_routers.assert_called_once_with(
mock.sentinel.context)
mock_get_router_info_list_for_tenant.assert_called_once_with(
mock_get_routers.return_value, fake_firewall['tenant_id'])
mock_set_firewall_status.assert_called_once_with(
mock.sentinel.context,
fake_firewall['id'],
'ACTIVE')
def test_invoke_driver_for_plugin_api_admin_state_down(self):
fake_firewall = {'id': 0, 'tenant_id': 1,
'admin_state_up': False}
self.api.plugin_rpc = mock.Mock()
with contextlib.nested(
mock.patch.object(self.api.plugin_rpc, 'get_routers'),
mock.patch.object(self.api, '_get_router_info_list_for_tenant'),
mock.patch.object(self.api.fwaas_driver, 'update_firewall'),
mock.patch.object(self.api.fwplugin_rpc,
'get_firewalls_for_tenant'),
mock.patch.object(self.api.fwplugin_rpc, 'set_firewall_status')
) as (
mock_get_routers,
mock_get_router_info_list_for_tenant,
mock_driver_update_firewall,
mock_get_firewalls_for_tenant,
mock_set_firewall_status):
mock_driver_update_firewall.return_value = True
self.api.update_firewall(
context=mock.sentinel.context,
firewall=fake_firewall, host='host')
mock_get_routers.assert_called_once_with(
mock.sentinel.context)
mock_get_router_info_list_for_tenant.assert_called_once_with(
mock_get_routers.return_value, fake_firewall['tenant_id'])
mock_set_firewall_status.assert_called_once_with(
mock.sentinel.context,
fake_firewall['id'],
'DOWN')
def test_invoke_driver_for_plugin_api_delete(self):
fake_firewall = {'id': 0, 'tenant_id': 1,
'admin_state_up': True}
self.api.plugin_rpc = mock.Mock()
with contextlib.nested(
mock.patch.object(self.api.plugin_rpc, 'get_routers'),
mock.patch.object(self.api, '_get_router_info_list_for_tenant'),
mock.patch.object(self.api.fwaas_driver, 'delete_firewall'),
mock.patch.object(self.api.fwplugin_rpc, 'firewall_deleted')
) as (
mock_get_routers,
mock_get_router_info_list_for_tenant,
mock_driver_delete_firewall,
mock_firewall_deleted):
mock_driver_delete_firewall.return_value = True
self.api.delete_firewall(
context=mock.sentinel.context,
firewall=fake_firewall, host='host')
mock_get_routers.assert_called_once_with(
mock.sentinel.context)
mock_get_router_info_list_for_tenant.assert_called_once_with(
mock_get_routers.return_value, fake_firewall['tenant_id'])
mock_firewall_deleted.assert_called_once_with(
mock.sentinel.context,
fake_firewall['id'])
def test_delete_firewall_no_router(self):
fake_firewall = {'id': 0, 'tenant_id': 1}
self.api.plugin_rpc = mock.Mock()
with contextlib.nested(
mock.patch.object(self.api.plugin_rpc, 'get_routers'),
mock.patch.object(self.api, '_get_router_info_list_for_tenant'),
mock.patch.object(self.api.fwplugin_rpc, 'firewall_deleted')
) as (
mock_get_routers,
mock_get_router_info_list_for_tenant,
mock_firewall_deleted):
mock_get_router_info_list_for_tenant.return_value = []
self.api.delete_firewall(
context=mock.sentinel.context,
firewall=fake_firewall, host='host')
mock_get_routers.assert_called_once_with(
mock.sentinel.context)
mock_get_router_info_list_for_tenant.assert_called_once_with(
mock_get_routers.return_value, fake_firewall['tenant_id'])
mock_firewall_deleted.assert_called_once_with(
mock.sentinel.context,
fake_firewall['id'])
def test_process_router_add_fw_update(self):
fake_firewall_list = [{'id': 0, 'tenant_id': 1,
'status': constants.PENDING_UPDATE,
'admin_state_up': True}]
fake_router = {'id': 1111, 'tenant_id': 2}
self.api.plugin_rpc = mock.Mock()
ri = mock.Mock()
ri.router = fake_router
routers = [ri.router]
with contextlib.nested(
mock.patch.object(self.api.plugin_rpc, 'get_routers'),
mock.patch.object(self.api, '_get_router_info_list_for_tenant'),
mock.patch.object(self.api.fwaas_driver, 'update_firewall'),
mock.patch.object(self.api.fwplugin_rpc, 'set_firewall_status'),
mock.patch.object(self.api.fwplugin_rpc,
'get_firewalls_for_tenant'),
mock.patch.object(context, 'Context')
) as (
mock_get_routers,
mock_get_router_info_list_for_tenant,
mock_driver_update_firewall,
mock_set_firewall_status,
mock_get_firewalls_for_tenant,
mock_Context):
mock_driver_update_firewall.return_value = True
ctx = mock.sentinel.context
mock_Context.return_value = ctx
mock_get_router_info_list_for_tenant.return_value = routers
mock_get_firewalls_for_tenant.return_value = fake_firewall_list
self.api._process_router_add(ri)
mock_get_router_info_list_for_tenant.assert_called_with(
routers,
ri.router['tenant_id'])
mock_get_firewalls_for_tenant.assert_called_once_with(ctx)
mock_driver_update_firewall.assert_called_once_with(
routers,
fake_firewall_list[0])
mock_set_firewall_status.assert_called_once_with(
ctx,
fake_firewall_list[0]['id'],
constants.ACTIVE)
def test_process_router_add_fw_delete(self):
fake_firewall_list = [{'id': 0, 'tenant_id': 1,
'status': constants.PENDING_DELETE}]
fake_router = {'id': 1111, 'tenant_id': 2}
self.api.plugin_rpc = mock.Mock()
ri = mock.Mock()
ri.router = fake_router
routers = [ri.router]
with contextlib.nested(
mock.patch.object(self.api.plugin_rpc, 'get_routers'),
mock.patch.object(self.api, '_get_router_info_list_for_tenant'),
mock.patch.object(self.api.fwaas_driver, 'delete_firewall'),
mock.patch.object(self.api.fwplugin_rpc, 'firewall_deleted'),
mock.patch.object(self.api.fwplugin_rpc,
'get_firewalls_for_tenant'),
mock.patch.object(context, 'Context')
) as (
mock_get_routers,
mock_get_router_info_list_for_tenant,
mock_driver_delete_firewall,
mock_firewall_deleted,
mock_get_firewalls_for_tenant,
mock_Context):
mock_driver_delete_firewall.return_value = True
ctx = mock.sentinel.context
mock_Context.return_value = ctx
mock_get_router_info_list_for_tenant.return_value = routers
mock_get_firewalls_for_tenant.return_value = fake_firewall_list
self.api._process_router_add(ri)
mock_get_router_info_list_for_tenant.assert_called_with(
routers,
ri.router['tenant_id'])
mock_get_firewalls_for_tenant.assert_called_once_with(ctx)
mock_driver_delete_firewall.assert_called_once_with(
routers,
fake_firewall_list[0])
mock_firewall_deleted.assert_called_once_with(
ctx,
fake_firewall_list[0]['id'])
def _prepare_router_data(self, use_namespaces):
router = {'id': str(uuid.uuid4()), 'tenant_id': str(uuid.uuid4())}
return l3_agent.RouterInfo(router['id'], self.conf.root_helper,
use_namespaces, router=router)
def _get_router_info_list_with_namespace_helper(self,
router_use_namespaces):
self.conf.set_override('use_namespaces', True)
ri = self._prepare_router_data(
use_namespaces=router_use_namespaces)
routers = [ri.router]
self.api.router_info = {ri.router_id: ri}
with mock.patch.object(ip_lib.IPWrapper,
'get_namespaces') as mock_get_namespaces:
mock_get_namespaces.return_value = ri.ns_name
router_info_list = self.api._get_router_info_list_for_tenant(
routers,
ri.router['tenant_id'])
self.assertEqual([ri], router_info_list)
mock_get_namespaces.assert_called_once_with(
self.conf.root_helper)
def _get_router_info_list_without_namespace_helper(self,
router_use_namespaces):
self.conf.set_override('use_namespaces', False)
ri = self._prepare_router_data(
use_namespaces=router_use_namespaces)
routers = [ri.router]
self.api.router_info = {ri.router_id: ri}
router_info_list = self.api._get_router_info_list_for_tenant(
routers,
ri.router['tenant_id'])
if router_use_namespaces:
self.assertFalse(router_info_list)
else:
self.assertEqual([ri], router_info_list)
def test_get_router_info_list_for_tenant_for_namespaces_enabled(self):
self._get_router_info_list_with_namespace_helper(
router_use_namespaces=True)
def test_get_router_info_list_for_tenant_for_namespaces_disabled(self):
self._get_router_info_list_without_namespace_helper(
router_use_namespaces=False)
def test_get_router_info_list_tenant_with_namespace_router_without(self):
self._get_router_info_list_with_namespace_helper(
router_use_namespaces=False)
def test_get_router_info_list_tenant_without_namespace_router_with(self):
self._get_router_info_list_without_namespace_helper(
router_use_namespaces=True)
def _get_router_info_list_router_without_router_info_helper(self,
rtr_with_ri):
self.conf.set_override('use_namespaces', True)
# ri.router with associated router_info (ri)
# rtr2 has no router_info
ri = self._prepare_router_data(use_namespaces=True)
rtr2 = {'id': str(uuid.uuid4()), 'tenant_id': ri.router['tenant_id']}
routers = [rtr2]
self.api.router_info = {}
ri_expected = []
if rtr_with_ri:
self.api.router_info[ri.router_id] = ri
routers.append(ri.router)
ri_expected.append(ri)
with mock.patch.object(ip_lib.IPWrapper,
'get_namespaces') as mock_get_namespaces:
mock_get_namespaces.return_value = ri.ns_name
router_info_list = self.api._get_router_info_list_for_tenant(
routers,
ri.router['tenant_id'])
self.assertEqual(ri_expected, router_info_list)
def test_get_router_info_list_router_without_router_info(self):
self._get_router_info_list_router_without_router_info_helper(
rtr_with_ri=False)
def test_get_router_info_list_two_routers_one_without_router_info(self):
self._get_router_info_list_router_without_router_info_helper(
rtr_with_ri=True)
| apache-2.0 | 812,300,187,931,894,500 | 40.644501 | 79 | 0.586071 | false |
nathanielvarona/airflow | airflow/providers/google/cloud/hooks/functions.py | 1 | 10000 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains a Google Cloud Functions Hook."""
import time
from typing import Any, Dict, List, Optional, Sequence, Union
import requests
from googleapiclient.discovery import build
from airflow.exceptions import AirflowException
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
# Time to sleep between active checks of the operation results
TIME_TO_SLEEP_IN_SECONDS = 1
class CloudFunctionsHook(GoogleBaseHook):
"""
Hook for the Google Cloud Functions APIs.
All the methods in the hook where project_id is used must be called with
keyword arguments rather than positional.
"""
_conn = None # type: Optional[Any]
def __init__(
self,
api_version: str,
gcp_conn_id: str = "google_cloud_default",
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
delegate_to=delegate_to,
impersonation_chain=impersonation_chain,
)
self.api_version = api_version
@staticmethod
def _full_location(project_id: str, location: str) -> str:
"""
Retrieve full location of the function in the form of
``projects/<GCP_PROJECT_ID>/locations/<GCP_LOCATION>``
:param project_id: The Google Cloud Project project_id where the function belongs.
:type project_id: str
:param location: The location where the function is created.
:type location: str
:return:
"""
return f'projects/{project_id}/locations/{location}'
def get_conn(self) -> build:
"""
Retrieves the connection to Cloud Functions.
:return: Google Cloud Function services object.
:rtype: dict
"""
if not self._conn:
http_authorized = self._authorize()
self._conn = build(
'cloudfunctions', self.api_version, http=http_authorized, cache_discovery=False
)
return self._conn
def get_function(self, name: str) -> dict:
"""
Returns the Cloud Function with the given name.
:param name: Name of the function.
:type name: str
:return: A Cloud Functions object representing the function.
:rtype: dict
"""
# fmt: off
return self.get_conn().projects().locations().functions().get( # pylint: disable=no-member
name=name).execute(num_retries=self.num_retries)
# fmt: on
@GoogleBaseHook.fallback_to_default_project_id
def create_new_function(self, location: str, body: dict, project_id: str) -> None:
"""
Creates a new function in Cloud Function in the location specified in the body.
:param location: The location of the function.
:type location: str
:param body: The body required by the Cloud Functions insert API.
:type body: dict
:param project_id: Optional, Google Cloud Project project_id where the function belongs.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:type project_id: str
:return: None
"""
# fmt: off
response = self.get_conn().projects().locations().functions().create( # pylint: disable=no-member
location=self._full_location(project_id, location),
body=body
).execute(num_retries=self.num_retries)
# fmt: on
operation_name = response["name"]
self._wait_for_operation_to_complete(operation_name=operation_name)
def update_function(self, name: str, body: dict, update_mask: List[str]) -> None:
"""
Updates Cloud Functions according to the specified update mask.
:param name: The name of the function.
:type name: str
:param body: The body required by the cloud function patch API.
:type body: dict
:param update_mask: The update mask - array of fields that should be patched.
:type update_mask: [str]
:return: None
"""
# fmt: off
response = self.get_conn().projects().locations().functions().patch( # pylint: disable=no-member
updateMask=",".join(update_mask),
name=name,
body=body
).execute(num_retries=self.num_retries)
# fmt: on
operation_name = response["name"]
self._wait_for_operation_to_complete(operation_name=operation_name)
@GoogleBaseHook.fallback_to_default_project_id
def upload_function_zip(self, location: str, zip_path: str, project_id: str) -> str:
"""
Uploads zip file with sources.
:param location: The location where the function is created.
:type location: str
:param zip_path: The path of the valid .zip file to upload.
:type zip_path: str
:param project_id: Optional, Google Cloud Project project_id where the function belongs.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:type project_id: str
:return: The upload URL that was returned by generateUploadUrl method.
:rtype: str
"""
# fmt: off
# pylint: disable=no-member # noqa
response = \
self.get_conn().projects().locations().functions().generateUploadUrl(
parent=self._full_location(project_id, location)
).execute(num_retries=self.num_retries)
# fmt: on
upload_url = response.get('uploadUrl')
with open(zip_path, 'rb') as file:
requests.put(
url=upload_url,
data=file,
# Those two headers needs to be specified according to:
# https://cloud.google.com/functions/docs/reference/rest/v1/projects.locations.functions/generateUploadUrl
# nopep8
headers={
'Content-type': 'application/zip',
'x-goog-content-length-range': '0,104857600',
},
)
return upload_url
def delete_function(self, name: str) -> None:
"""
Deletes the specified Cloud Function.
:param name: The name of the function.
:type name: str
:return: None
"""
# fmt: off
response = self.get_conn().projects().locations().functions().delete( # pylint: disable=no-member
name=name).execute(num_retries=self.num_retries)
# fmt: on
operation_name = response["name"]
self._wait_for_operation_to_complete(operation_name=operation_name)
@GoogleBaseHook.fallback_to_default_project_id
def call_function(
self,
function_id: str,
input_data: Dict,
location: str,
project_id: str,
) -> dict:
"""
Synchronously invokes a deployed Cloud Function. To be used for testing
purposes as very limited traffic is allowed.
:param function_id: ID of the function to be called
:type function_id: str
:param input_data: Input to be passed to the function
:type input_data: Dict
:param location: The location where the function is located.
:type location: str
:param project_id: Optional, Google Cloud Project project_id where the function belongs.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:type project_id: str
:return: None
"""
name = f"projects/{project_id}/locations/{location}/functions/{function_id}"
# fmt: off
response = self.get_conn().projects().locations().functions().call( # pylint: disable=no-member
name=name,
body=input_data
).execute(num_retries=self.num_retries)
# fmt: on
if 'error' in response:
raise AirflowException(response['error'])
return response
def _wait_for_operation_to_complete(self, operation_name: str) -> dict:
"""
Waits for the named operation to complete - checks status of the
asynchronous call.
:param operation_name: The name of the operation.
:type operation_name: str
:return: The response returned by the operation.
:rtype: dict
:exception: AirflowException in case error is returned.
"""
service = self.get_conn()
while True:
# fmt: off
operation_response = service.operations().get( # pylint: disable=no-member
name=operation_name,
).execute(num_retries=self.num_retries)
# fmt: on
if operation_response.get("done"):
response = operation_response.get("response")
error = operation_response.get("error")
# Note, according to documentation always either response or error is
# set when "done" == True
if error:
raise AirflowException(str(error))
return response
time.sleep(TIME_TO_SLEEP_IN_SECONDS)
| apache-2.0 | -1,905,960,405,674,623,000 | 38.0625 | 122 | 0.6187 | false |
rorasa/KeeTerm | Crypto/SelfTest/Random/Fortuna/__init__.py | 118 | 1825 | # -*- coding: utf-8 -*-
#
# SelfTest/Random/Fortuna/__init__.py: Self-test for Fortuna modules
#
# Written in 2008 by Dwayne C. Litzenberger <[email protected]>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test for the Crypto.Random.Fortuna package"""
__revision__ = "$Id$"
import os
def get_tests(config={}):
tests = []
from Crypto.SelfTest.Random.Fortuna import test_FortunaAccumulator; tests += test_FortunaAccumulator.get_tests(config=config)
from Crypto.SelfTest.Random.Fortuna import test_FortunaGenerator; tests += test_FortunaGenerator.get_tests(config=config)
from Crypto.SelfTest.Random.Fortuna import test_SHAd256; tests += test_SHAd256.get_tests(config=config)
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| mit | 4,642,142,392,486,135,000 | 40.477273 | 129 | 0.682192 | false |
argonemyth/sentry | src/sentry/migrations/0168_unfill_projectkey_user.py | 34 | 36774 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
ProjectKey = orm['sentry.ProjectKey']
ProjectKey.objects.filter(
user__isnull=False,
).update(user=None)
def backwards(self, orm):
pass
models = {
'sentry.accessgroup': {
'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'symmetrical': 'False'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"}),
'type': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '50'})
},
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Event']", 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.alert': {
'Meta': {'object_name': 'Alert'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'related_groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_alerts'", 'symmetrical': 'False', 'through': "orm['sentry.AlertRelatedGroup']", 'to': "orm['sentry.Group']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.alertrelatedgroup': {
'Meta': {'unique_together': "(('group', 'alert'),)", 'object_name': 'AlertRelatedGroup'},
'alert': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Alert']"}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.apikey': {
'Meta': {'object_name': 'ApiKey'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Organization']"}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.auditlogentry': {
'Meta': {'object_name': 'AuditLogEntry'},
'actor': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'audit_actors'", 'to': "orm['sentry.User']"}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'target_user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.authidentity': {
'Meta': {'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity'},
'auth_provider': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.AuthProvider']"}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authprovider': {
'Meta': {'object_name': 'AuthProvider'},
'config': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'default_teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'unique': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.broadcast': {
'Meta': {'object_name': 'Broadcast'},
'badge': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group', 'datetime'),)"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.file': {
'Meta': {'object_name': 'File'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'headers': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'storage': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'storage_options': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupassignee': {
'Meta': {'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_assignee_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.grouphash': {
'Meta': {'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.grouprulestatus': {
'Meta': {'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'rule': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.grouptagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'null': 'True', 'to': "orm['sentry.Project']"}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.helppage': {
'Meta': {'object_name': 'HelpPage'},
'content': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True'}),
'priority': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {'object_name': 'Organization'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.organizationmember': {
'Meta': {'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Organization']"}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.pendingteammember': {
'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'pending_member_set'", 'to': "orm['sentry.Team']"}),
'type': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '50'})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'), ('organization', 'slug'))", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
'user_added': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'keys_added_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.release': {
'Meta': {'unique_together': "(('project', 'version'),)", 'object_name': 'Release'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.releasefile': {
'Meta': {'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile'},
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'name': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.rule': {
'Meta': {'object_name': 'Rule'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"}),
'type': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '50'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
}
}
complete_apps = ['sentry']
symmetrical = True
| bsd-3-clause | -5,341,465,913,821,492,000 | 87.399038 | 223 | 0.571382 | false |
jakubtyniecki/pact | tests/test_merge_rec.py | 1 | 1117 |
""" merge rec sort tests module """
import unittest
import random
from sort import merge_rec
from tests import helper
class MergeSortTests(unittest.TestCase):
""" merge rec sort unit tests class """
max = 100
arr = []
def setUp(self):
""" setting up for the test """
self.arr = random.sample(range(self.max), self.max)
def test_null_input(self):
""" should raise when input array is None """
# arrange
inp = None
# act
with self.assertRaises(TypeError) as ex:
merge_rec.sort(inp)
# assert
self.assertEqual("'NoneType' object is not iterable", str(ex.exception))
def test_empty_input(self):
""" should return [] when input array is empty """
# arrange
inp = []
# act
res = merge_rec.sort(inp)
# assert
self.assertEqual(len(inp), len(res))
def test_sort_a_given_array(self):
""" should sort a given array """
# act
res = merge_rec.sort(self.arr[:])
# assert
self.assertTrue(helper.is_sorted(res))
| mit | -4,817,606,431,334,600,000 | 21.795918 | 80 | 0.569382 | false |
git-cola/git-cola | cola/widgets/log.py | 3 | 2617 | from __future__ import absolute_import, division, unicode_literals
import time
from qtpy import QtGui
from qtpy import QtWidgets
from qtpy.QtCore import Qt
from qtpy.QtCore import Signal
from .. import core
from .. import qtutils
from ..i18n import N_
from . import defs
from .text import VimTextEdit
class LogWidget(QtWidgets.QFrame):
"""A simple dialog to display command logs."""
channel = Signal(object)
def __init__(self, context, parent=None, output=None):
QtWidgets.QFrame.__init__(self, parent)
self.output_text = VimTextEdit(context, parent=self)
self.highlighter = LogSyntaxHighlighter(self.output_text.document())
if output:
self.set_output(output)
self.main_layout = qtutils.vbox(defs.no_margin, defs.spacing, self.output_text)
self.setLayout(self.main_layout)
self.channel.connect(self.append, type=Qt.QueuedConnection)
def clear(self):
self.output_text.clear()
def set_output(self, output):
self.output_text.set_value(output)
def log_status(self, status, out, err=None):
msg = []
if out:
msg.append(out)
if err:
msg.append(err)
if status:
msg.append(N_('exit code %s') % status)
self.log('\n'.join(msg))
def append(self, msg):
"""Append to the end of the log message"""
if not msg:
return
msg = core.decode(msg)
cursor = self.output_text.textCursor()
cursor.movePosition(cursor.End)
text = self.output_text
# NOTE: the ': ' colon-SP-SP suffix is for the syntax highlighter
prefix = core.decode(time.strftime('%Y-%m-%d %H:%M:%S: ')) # ISO-8601
for line in msg.split('\n'):
cursor.insertText(prefix + line + '\n')
cursor.movePosition(cursor.End)
text.setTextCursor(cursor)
def log(self, msg):
"""Add output to the log window"""
# Funnel through a Qt queued to allow thread-safe logging from
# asynchronous QRunnables, filesystem notification, etc.
self.channel.emit(msg)
class LogSyntaxHighlighter(QtGui.QSyntaxHighlighter):
"""Implements the log syntax highlighting"""
def __init__(self, doc):
QtGui.QSyntaxHighlighter.__init__(self, doc)
palette = QtGui.QPalette()
QPalette = QtGui.QPalette
self.disabled_color = palette.color(QPalette.Disabled, QPalette.Text)
def highlightBlock(self, text):
end = text.find(': ')
if end > 0:
self.setFormat(0, end + 1, self.disabled_color)
| gpl-2.0 | 2,925,479,753,435,552,300 | 30.914634 | 87 | 0.625908 | false |
hassoon3/odoo | addons/l10n_us/__openerp__.py | 341 | 1763 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'United States - Chart of accounts',
'version': '1.1',
'author': 'OpenERP SA',
'category': 'Localization/Account Charts',
'description': """
United States - Chart of accounts.
==================================
""",
'website': 'http://www.openerp.com',
'depends': ['account_chart', 'account_anglo_saxon'],
'data': [
'l10n_us_account_type.xml',
'account_chart_template.xml',
'account.account.template.csv',
'account_tax_code_template.xml',
'account_tax_template.xml',
'account_chart_template_after.xml',
'l10n_us_wizard.xml'
],
'demo': [],
'test': [],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -1,198,873,749,042,053,600 | 37.326087 | 78 | 0.581395 | false |
ResByte/graph_slam | scripts/robot.py | 1 | 1487 | #!/usr/bin/env python
import roslib
import rospy
import sys
from geometry_msgs.msg import Twist
import numpy as np
from nav_msgs.msg import Odometry
from tf.transformations import euler_from_quaternion
import matplotlib.pyplot as plt
from sensor_msgs.msg import PointCloud2
import sensor_msgs.point_cloud2 as pc2
import itertools
class Robot():
"""This is a generic robot class to implement various machine learning algorithms and """
def __init__(self):
self.pose = [] #check if this needs to be initialized
rospy.init_node('robot',anonymous = False)
def odomCb(self,msg):
#print msg.pose.pose
quaternion = (msg.pose.pose.orientation.x,msg.pose.pose.orientation.y,msg.pose.pose.orientation.z,msg.pose.pose.orientation.w)
euler = euler_from_quaternion(quaternion)
#print euler
self.pose = [msg.pose.pose.position.x,msg.pose.pose.position.y,euler[2]]
print self.pose[0],self.pose[1],self.pose[2]
def odomSub(self):
rospy.Subscriber('/odom',Odometry,self.odomCb)
def cloudCb(self,data):
#data_out = pc2.read_points(data, field_names=None, skip_nans=False, uvs=[[data.width, data.height]])
#cloud = list(itertools.islice(data_out,0,100))
cloud = np.asarray(data)
print
def cloudSub(self):
rospy.Subscriber('/camera/depth/points',PointCloud2,self.cloudCb)
if __name__ == '__main__':
print "init_node"
try:
robot = Robot()
while not rospy.is_shutdown():
#robot.odomSub()
robot.cloudSub()
except:
rospy.loginfo("node terminated.") | gpl-2.0 | -2,485,843,108,175,711,000 | 30.659574 | 128 | 0.729657 | false |
erinspace/osf.io | tests/test_utils.py | 1 | 18008 | # -*- coding: utf-8 -*-
import datetime
import mock
import os
import pytest
import time
import unittest
from django.utils import timezone
from flask import Flask
from nose.tools import * # noqa (PEP8 asserts)
import blinker
from tests.base import OsfTestCase, DbTestCase
from osf_tests.factories import RegistrationFactory, UserFactory, fake_email
from framework.auth.utils import generate_csl_given_name
from framework.routing import Rule, json_renderer
from framework.utils import secure_filename, throttle_period_expired
from api.base.utils import waterbutler_api_url_for
from osf.utils.functional import rapply
from website.routes import process_rules, OsfWebRenderer
from website import settings
from website.util import paths
from website.util import web_url_for, api_url_for, is_json_request, conjunct, api_v2_url
from website.project import utils as project_utils
from website.profile import utils as profile_utils
try:
import magic # noqa
LIBMAGIC_AVAILABLE = True
except ImportError:
LIBMAGIC_AVAILABLE = False
HERE = os.path.dirname(os.path.abspath(__file__))
class TestTimeUtils(unittest.TestCase):
def test_throttle_period_expired_no_timestamp(self):
is_expired = throttle_period_expired(timestamp=None, throttle=30)
assert_true(is_expired)
def test_throttle_period_expired_using_datetime(self):
timestamp = timezone.now()
is_expired = throttle_period_expired(timestamp=(timestamp + datetime.timedelta(seconds=29)), throttle=30)
assert_false(is_expired)
is_expired = throttle_period_expired(timestamp=(timestamp - datetime.timedelta(seconds=31)), throttle=30)
assert_true(is_expired)
def test_throttle_period_expired_using_timestamp_in_seconds(self):
timestamp = int(time.time())
is_expired = throttle_period_expired(timestamp=(timestamp + 29), throttle=30)
assert_false(is_expired)
is_expired = throttle_period_expired(timestamp=(timestamp - 31), throttle=30)
assert_true(is_expired)
class TestUrlForHelpers(unittest.TestCase):
def setUp(self):
def dummy_view():
return {}
def dummy_guid_project_view():
return {}
def dummy_guid_profile_view():
return {}
self.app = Flask(__name__)
api_rule = Rule([
'/api/v1/<pid>/',
'/api/v1/<pid>/component/<nid>/'
], 'get', dummy_view, json_renderer)
web_rule = Rule([
'/<pid>/',
'/<pid>/component/<nid>/'
], 'get', dummy_view, OsfWebRenderer)
web_guid_project_rule = Rule([
'/project/<pid>/',
'/project/<pid>/node/<nid>/',
], 'get', dummy_guid_project_view, OsfWebRenderer)
web_guid_profile_rule = Rule([
'/profile/<pid>/',
], 'get', dummy_guid_profile_view, OsfWebRenderer)
process_rules(self.app, [api_rule, web_rule, web_guid_project_rule, web_guid_profile_rule])
def test_api_url_for(self):
with self.app.test_request_context():
assert api_url_for('dummy_view', pid='123') == '/api/v1/123/'
def test_api_v2_url_with_port(self):
full_url = api_v2_url('/nodes/abcd3/contributors/',
base_route='http://localhost:8000/',
base_prefix='v2/')
assert_equal(full_url, 'http://localhost:8000/v2/nodes/abcd3/contributors/')
# Handles URL the same way whether or not user enters a leading slash
full_url = api_v2_url('nodes/abcd3/contributors/',
base_route='http://localhost:8000/',
base_prefix='v2/')
assert_equal(full_url, 'http://localhost:8000/v2/nodes/abcd3/contributors/')
def test_api_v2_url_with_params(self):
"""Handles- and encodes- URLs with parameters (dict and kwarg) correctly"""
full_url = api_v2_url('/nodes/abcd3/contributors/',
params={'filter[fullname]': 'bob'},
base_route='https://api.osf.io/',
base_prefix='v2/',
page_size=10)
assert_equal(full_url, 'https://api.osf.io/v2/nodes/abcd3/contributors/?filter%5Bfullname%5D=bob&page_size=10')
def test_api_v2_url_base_path(self):
"""Given a blank string, should return the base path (domain + port + prefix) with no extra cruft at end"""
full_url = api_v2_url('',
base_route='http://localhost:8000/',
base_prefix='v2/')
assert_equal(full_url, 'http://localhost:8000/v2/')
def test_web_url_for(self):
with self.app.test_request_context():
assert web_url_for('dummy_view', pid='123') == '/123/'
def test_web_url_for_guid(self):
with self.app.test_request_context():
# check /project/<pid>
assert_equal('/pid123/', web_url_for('dummy_guid_project_view', pid='pid123', _guid=True))
assert_equal('/project/pid123/', web_url_for('dummy_guid_project_view', pid='pid123', _guid=False))
assert_equal('/project/pid123/', web_url_for('dummy_guid_project_view', pid='pid123'))
# check /project/<pid>/node/<nid>
assert_equal('/nid321/', web_url_for('dummy_guid_project_view', pid='pid123', nid='nid321', _guid=True))
assert_equal(
'/project/pid123/node/nid321/',
web_url_for('dummy_guid_project_view', pid='pid123', nid='nid321', _guid=False))
assert_equal(
'/project/pid123/node/nid321/',
web_url_for('dummy_guid_project_view', pid='pid123', nid='nid321'))
# check /profile/<pid>
assert_equal('/pro123/', web_url_for('dummy_guid_profile_view', pid='pro123', _guid=True))
assert_equal('/profile/pro123/', web_url_for('dummy_guid_profile_view', pid='pro123', _guid=False))
assert_equal('/profile/pro123/', web_url_for('dummy_guid_profile_view', pid='pro123'))
def test_web_url_for_guid_regex_conditions(self):
with self.app.test_request_context():
# regex matches limit keys to a minimum of 5 alphanumeric characters.
# check /project/<pid>
assert_not_equal('/123/', web_url_for('dummy_guid_project_view', pid='123', _guid=True))
assert_equal('/123456/', web_url_for('dummy_guid_project_view', pid='123456', _guid=True))
# check /project/<pid>/node/<nid>
assert_not_equal('/321/', web_url_for('dummy_guid_project_view', pid='123', nid='321', _guid=True))
assert_equal('/654321/', web_url_for('dummy_guid_project_view', pid='123456', nid='654321', _guid=True))
# check /profile/<pid>
assert_not_equal('/123/', web_url_for('dummy_guid_profile_view', pid='123', _guid=True))
assert_equal('/123456/', web_url_for('dummy_guid_profile_view', pid='123456', _guid=True))
def test_web_url_for_guid_case_sensitive(self):
with self.app.test_request_context():
# check /project/<pid>
assert_equal('/ABCdef/', web_url_for('dummy_guid_project_view', pid='ABCdef', _guid=True))
# check /project/<pid>/node/<nid>
assert_equal('/GHIjkl/', web_url_for('dummy_guid_project_view', pid='ABCdef', nid='GHIjkl', _guid=True))
# check /profile/<pid>
assert_equal('/MNOpqr/', web_url_for('dummy_guid_profile_view', pid='MNOpqr', _guid=True))
def test_web_url_for_guid_invalid_unicode(self):
with self.app.test_request_context():
# unicode id's are not supported when encoding guid url's.
# check /project/<pid>
assert_not_equal('/ø∆≤µ©/', web_url_for('dummy_guid_project_view', pid='ø∆≤µ©', _guid=True))
assert_equal(
'/project/%C3%B8%CB%86%E2%88%86%E2%89%A4%C2%B5%CB%86/',
web_url_for('dummy_guid_project_view', pid='øˆ∆≤µˆ', _guid=True))
# check /project/<pid>/node/<nid>
assert_not_equal(
'/ø∆≤µ©/',
web_url_for('dummy_guid_project_view', pid='ø∆≤µ©', nid='©µ≤∆ø', _guid=True))
assert_equal(
'/project/%C3%B8%CB%86%E2%88%86%E2%89%A4%C2%B5%CB%86/node/%C2%A9%C2%B5%E2%89%A4%E2%88%86%C3%B8/',
web_url_for('dummy_guid_project_view', pid='øˆ∆≤µˆ', nid='©µ≤∆ø', _guid=True))
# check /profile/<pid>
assert_not_equal('/ø∆≤µ©/', web_url_for('dummy_guid_profile_view', pid='ø∆≤µ©', _guid=True))
assert_equal(
'/profile/%C3%B8%CB%86%E2%88%86%E2%89%A4%C2%B5%CB%86/',
web_url_for('dummy_guid_profile_view', pid='øˆ∆≤µˆ', _guid=True))
def test_api_url_for_with_multiple_urls(self):
with self.app.test_request_context():
url = api_url_for('dummy_view', pid='123', nid='abc')
assert url == '/api/v1/123/component/abc/'
def test_web_url_for_with_multiple_urls(self):
with self.app.test_request_context():
url = web_url_for('dummy_view', pid='123', nid='abc')
assert url == '/123/component/abc/'
def test_is_json_request(self):
with self.app.test_request_context(content_type='application/json'):
assert_true(is_json_request())
with self.app.test_request_context(content_type=None):
assert_false(is_json_request())
with self.app.test_request_context(content_type='application/json;charset=UTF-8'):
assert_true(is_json_request())
def test_waterbutler_api_url_for(self):
with self.app.test_request_context():
url = waterbutler_api_url_for('fakeid', 'provider', '/path')
assert_in('/fakeid/', url)
assert_in('/path', url)
assert_in('/providers/provider/', url)
assert_in(settings.WATERBUTLER_URL, url)
def test_waterbutler_api_url_for_internal(self):
settings.WATERBUTLER_INTERNAL_URL = 'http://1.2.3.4:7777'
with self.app.test_request_context():
url = waterbutler_api_url_for('fakeid', 'provider', '/path', _internal=True)
assert_not_in(settings.WATERBUTLER_URL, url)
assert_in(settings.WATERBUTLER_INTERNAL_URL, url)
assert_in('/fakeid/', url)
assert_in('/path', url)
assert_in('/providers/provider', url)
class TestFrameworkUtils(unittest.TestCase):
def test_leading_underscores(self):
assert_equal(
'__init__.py',
secure_filename('__init__.py')
)
def test_werkzeug_cases(self):
"""Test that Werkzeug's tests still pass for our wrapped version"""
# Copied from Werkzeug
# BSD licensed - original at github.com/mitsuhiko/werkzeug,
# /tests/test_utils.py, line 282, commit 811b438
assert_equal(
'My_cool_movie.mov',
secure_filename('My cool movie.mov')
)
assert_equal(
'etc_passwd',
secure_filename('../../../etc/passwd')
)
assert_equal(
'i_contain_cool_umlauts.txt',
secure_filename(u'i contain cool \xfcml\xe4uts.txt')
)
class TestWebpackFilter(unittest.TestCase):
def setUp(self):
self.asset_paths = {'assets': 'assets.07123e.js'}
def test_resolve_asset(self):
asset = paths.webpack_asset('assets.js', self.asset_paths, debug=False)
assert_equal(asset, '/static/public/js/assets.07123e.js')
def test_resolve_asset_not_found_and_not_in_debug_mode(self):
with assert_raises(KeyError):
paths.webpack_asset('bundle.js', self.asset_paths, debug=False)
class TestWebsiteUtils(unittest.TestCase):
def test_conjunct(self):
words = []
assert_equal(conjunct(words), '')
words = ['a']
assert_equal(conjunct(words), 'a')
words = ['a', 'b']
assert_equal(conjunct(words), 'a and b')
words = ['a', 'b', 'c']
assert_equal(conjunct(words), 'a, b, and c')
assert_equal(conjunct(words, conj='or'), 'a, b, or c')
def test_rapply(self):
inputs = {
'foo': 'bar',
'baz': {
'boom': ['kapow'],
'bang': 'bam'
},
'bat': ['man']
}
outputs = rapply(inputs, str.upper)
assert_equal(outputs['foo'], 'bar'.upper())
assert_equal(outputs['baz']['boom'], ['kapow'.upper()])
assert_equal(outputs['baz']['bang'], 'bam'.upper())
assert_equal(outputs['bat'], ['man'.upper()])
r_assert = lambda s: assert_equal(s.upper(), s)
rapply(outputs, r_assert)
def test_rapply_on_list(self):
inputs = range(5)
add_one = lambda n: n + 1
outputs = rapply(inputs, add_one)
for i in inputs:
assert_equal(outputs[i], i + 1)
def test_rapply_on_tuple(self):
inputs = tuple(i for i in range(5))
add_one = lambda n: n + 1
outputs = rapply(inputs, add_one)
for i in inputs:
assert_equal(outputs[i], i + 1)
assert_equal(type(outputs), tuple)
def test_rapply_on_set(self):
inputs = set(i for i in range(5))
add_one = lambda n: n + 1
outputs = rapply(inputs, add_one)
for i in inputs:
assert_in(i + 1, outputs)
assert_true(isinstance(outputs, set))
def test_rapply_on_str(self):
input = 'bob'
convert = lambda s: s.upper()
outputs = rapply(input, convert)
assert_equal('BOB', outputs)
assert_true(isinstance(outputs, basestring))
def test_rapply_preserves_args_and_kwargs(self):
def zero_if_not_check(item, check, checkFn=lambda n: n):
if check and checkFn(item):
return item
return 0
inputs = range(5)
outputs = rapply(inputs, zero_if_not_check, True, checkFn=lambda n: n % 2)
assert_equal(outputs, [0, 1, 0, 3, 0])
outputs = rapply(inputs, zero_if_not_check, False, checkFn=lambda n: n % 2)
assert_equal(outputs, [0, 0, 0, 0, 0])
class TestProjectUtils(OsfTestCase):
def set_registered_date(self, reg, date):
reg.registered_date = date
reg.save()
def test_get_recent_public_registrations(self):
count = 0
for i in range(5):
reg = RegistrationFactory()
reg.is_public = True
count = count + 1
tdiff = timezone.now() - datetime.timedelta(days=count)
self.set_registered_date(reg, tdiff)
regs = [r for r in project_utils.recent_public_registrations()]
assert_equal(len(regs), 5)
for i in range(4):
assert_true(regs[i].registered_date > regs[i + 1].registered_date)
for i in range(5):
reg = RegistrationFactory()
reg.is_public = True
count = count + 1
tdiff = timezone.now() - datetime.timedelta(days=count)
self.set_registered_date(reg, tdiff)
regs = [r for r in project_utils.recent_public_registrations(7)]
assert_equal(len(regs), 7)
class TestProfileUtils(DbTestCase):
def setUp(self):
self.user = UserFactory()
def test_get_other_user_profile_image_default_size(self):
profile_image = profile_utils.get_profile_image_url(self.user)
assert_true(profile_image)
def test_get_other_user_profile_image(self):
profile_image = profile_utils.get_profile_image_url(self.user, size=25)
assert_true(profile_image)
class TestSignalUtils(unittest.TestCase):
def setUp(self):
self.signals = blinker.Namespace()
self.signal_ = self.signals.signal('signal-')
self.mock_listener = mock.MagicMock()
def listener(self, signal):
self.mock_listener()
def test_signal(self):
self.signal_.connect(self.listener)
self.signal_.send()
assert_true(self.mock_listener.called)
class TestUserUtils(unittest.TestCase):
def test_generate_csl_given_name_with_given_middle_suffix(self):
given_name = 'Cause'
middle_names = 'Awesome'
suffix = 'Jr.'
csl_given_name = generate_csl_given_name(
given_name=given_name, middle_names=middle_names, suffix=suffix
)
assert_equal(csl_given_name, 'Cause A, Jr.')
def test_generate_csl_given_name_with_given_middle(self):
given_name = 'Cause'
middle_names = 'Awesome'
csl_given_name = generate_csl_given_name(
given_name=given_name, middle_names=middle_names
)
assert_equal(csl_given_name, 'Cause A')
def test_generate_csl_given_name_with_given_suffix(self):
given_name = 'Cause'
suffix = 'Jr.'
csl_given_name = generate_csl_given_name(
given_name=given_name, suffix=suffix
)
assert_equal(csl_given_name, 'Cause, Jr.')
def test_generate_csl_given_name_with_given(self):
given_name = 'Cause'
csl_given_name = generate_csl_given_name(given_name)
assert_equal(csl_given_name, 'Cause')
@pytest.mark.django_db
class TestUserFactoryConflict:
def test_build_create_user_time_conflict(self):
# Test that build and create user factories do not create conflicting usernames
# because they occured quickly
user_email_one = fake_email()
user_email_two = fake_email()
assert user_email_one != user_email_two
user_one_build = UserFactory.build()
user_two_build = UserFactory.build()
assert user_one_build.username != user_two_build.username
user_one_create = UserFactory()
user_two_create = UserFactory()
assert user_one_create.username != user_two_create.username
| apache-2.0 | -7,115,528,908,618,472,000 | 38.751663 | 119 | 0.593429 | false |
aerophile/django | tests/view_tests/tests/test_specials.py | 66 | 1428 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase, override_settings
@override_settings(ROOT_URLCONF='view_tests.generic_urls')
class URLHandling(TestCase):
"""
Tests for URL handling in views and responses.
"""
redirect_target = "/%E4%B8%AD%E6%96%87/target/"
def test_combining_redirect(self):
"""
Tests that redirecting to an IRI, requiring encoding before we use it
in an HTTP response, is handled correctly. In this case the arg to
HttpRedirect is ASCII but the current request path contains non-ASCII
characters so this test ensures the creation of the full path with a
base non-ASCII part is handled correctly.
"""
response = self.client.get('/中文/')
self.assertRedirects(response, self.redirect_target)
def test_nonascii_redirect(self):
"""
Tests that a non-ASCII argument to HttpRedirect is handled properly.
"""
response = self.client.get('/nonascii_redirect/')
self.assertRedirects(response, self.redirect_target)
def test_permanent_nonascii_redirect(self):
"""
Tests that a non-ASCII argument to HttpPermanentRedirect is handled
properly.
"""
response = self.client.get('/permanent_nonascii_redirect/')
self.assertRedirects(response, self.redirect_target, status_code=301)
| bsd-3-clause | 5,085,993,936,269,425,000 | 36.473684 | 77 | 0.668539 | false |
lochiiconnectivity/boto | boto/ec2/autoscale/__init__.py | 2 | 33259 | # Copyright (c) 2009-2011 Reza Lotun http://reza.lotun.name/
# Copyright (c) 2011 Jann Kleen
# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
This module provides an interface to the Elastic Compute Cloud (EC2)
Auto Scaling service.
"""
import base64
import boto
from boto.connection import AWSQueryConnection
from boto.ec2.regioninfo import RegionInfo
from boto.ec2.autoscale.request import Request
from boto.ec2.autoscale.launchconfig import LaunchConfiguration
from boto.ec2.autoscale.group import AutoScalingGroup
from boto.ec2.autoscale.group import ProcessType
from boto.ec2.autoscale.activity import Activity
from boto.ec2.autoscale.policy import AdjustmentType
from boto.ec2.autoscale.policy import MetricCollectionTypes
from boto.ec2.autoscale.policy import ScalingPolicy
from boto.ec2.autoscale.policy import TerminationPolicies
from boto.ec2.autoscale.instance import Instance
from boto.ec2.autoscale.scheduled import ScheduledUpdateGroupAction
from boto.ec2.autoscale.tag import Tag
RegionData = {
'us-east-1': 'autoscaling.us-east-1.amazonaws.com',
'us-west-1': 'autoscaling.us-west-1.amazonaws.com',
'us-west-2': 'autoscaling.us-west-2.amazonaws.com',
'sa-east-1': 'autoscaling.sa-east-1.amazonaws.com',
'eu-west-1': 'autoscaling.eu-west-1.amazonaws.com',
'ap-northeast-1': 'autoscaling.ap-northeast-1.amazonaws.com',
'ap-southeast-1': 'autoscaling.ap-southeast-1.amazonaws.com',
'ap-southeast-2': 'autoscaling.ap-southeast-2.amazonaws.com',
}
def regions():
"""
Get all available regions for the Auto Scaling service.
:rtype: list
:return: A list of :class:`boto.RegionInfo` instances
"""
regions = []
for region_name in RegionData:
region = RegionInfo(name=region_name,
endpoint=RegionData[region_name],
connection_cls=AutoScaleConnection)
regions.append(region)
return regions
def connect_to_region(region_name, **kw_params):
"""
Given a valid region name, return a
:class:`boto.ec2.autoscale.AutoScaleConnection`.
:param str region_name: The name of the region to connect to.
:rtype: :class:`boto.ec2.AutoScaleConnection` or ``None``
:return: A connection to the given region, or None if an invalid region
name is given
"""
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
class AutoScaleConnection(AWSQueryConnection):
APIVersion = boto.config.get('Boto', 'autoscale_version', '2011-01-01')
DefaultRegionEndpoint = boto.config.get('Boto', 'autoscale_endpoint',
'autoscaling.us-east-1.amazonaws.com')
DefaultRegionName = boto.config.get('Boto', 'autoscale_region_name',
'us-east-1')
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None, debug=0,
https_connection_factory=None, region=None, path='/',
security_token=None, validate_certs=True):
"""
Init method to create a new connection to the AutoScaling service.
B{Note:} The host argument is overridden by the host specified in the
boto configuration file.
"""
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint,
AutoScaleConnection)
self.region = region
AWSQueryConnection.__init__(self, aws_access_key_id,
aws_secret_access_key,
is_secure, port, proxy, proxy_port,
proxy_user, proxy_pass,
self.region.endpoint, debug,
https_connection_factory, path=path,
security_token=security_token,
validate_certs=validate_certs)
def _required_auth_capability(self):
return ['hmac-v4']
def build_list_params(self, params, items, label):
"""
Items is a list of dictionaries or strings::
[
{
'Protocol' : 'HTTP',
'LoadBalancerPort' : '80',
'InstancePort' : '80'
},
..
] etc.
or::
['us-east-1b',...]
"""
# different from EC2 list params
for i in xrange(1, len(items) + 1):
if isinstance(items[i - 1], dict):
for k, v in items[i - 1].iteritems():
if isinstance(v, dict):
for kk, vv in v.iteritems():
params['%s.member.%d.%s.%s' % (label, i, k, kk)] = vv
else:
params['%s.member.%d.%s' % (label, i, k)] = v
elif isinstance(items[i - 1], basestring):
params['%s.member.%d' % (label, i)] = items[i - 1]
def _update_group(self, op, as_group):
params = {'AutoScalingGroupName': as_group.name,
'LaunchConfigurationName': as_group.launch_config_name,
'MinSize': as_group.min_size,
'MaxSize': as_group.max_size}
# get availability zone information (required param)
zones = as_group.availability_zones
self.build_list_params(params, zones, 'AvailabilityZones')
if as_group.desired_capacity:
params['DesiredCapacity'] = as_group.desired_capacity
if as_group.vpc_zone_identifier:
params['VPCZoneIdentifier'] = as_group.vpc_zone_identifier
if as_group.health_check_period:
params['HealthCheckGracePeriod'] = as_group.health_check_period
if as_group.health_check_type:
params['HealthCheckType'] = as_group.health_check_type
if as_group.default_cooldown:
params['DefaultCooldown'] = as_group.default_cooldown
if as_group.placement_group:
params['PlacementGroup'] = as_group.placement_group
if as_group.termination_policies:
self.build_list_params(params, as_group.termination_policies,
'TerminationPolicies')
if op.startswith('Create'):
# you can only associate load balancers with an autoscale
# group at creation time
if as_group.load_balancers:
self.build_list_params(params, as_group.load_balancers,
'LoadBalancerNames')
if as_group.tags:
for i, tag in enumerate(as_group.tags):
tag.build_params(params, i + 1)
return self.get_object(op, params, Request)
def create_auto_scaling_group(self, as_group):
"""
Create auto scaling group.
"""
return self._update_group('CreateAutoScalingGroup', as_group)
def delete_auto_scaling_group(self, name, force_delete=False):
"""
Deletes the specified auto scaling group if the group has no instances
and no scaling activities in progress.
"""
if(force_delete):
params = {'AutoScalingGroupName': name, 'ForceDelete': 'true'}
else:
params = {'AutoScalingGroupName': name}
return self.get_object('DeleteAutoScalingGroup', params, Request)
def create_launch_configuration(self, launch_config):
"""
Creates a new Launch Configuration.
:type launch_config: :class:`boto.ec2.autoscale.launchconfig.LaunchConfiguration`
:param launch_config: LaunchConfiguration object.
"""
params = {'ImageId': launch_config.image_id,
'LaunchConfigurationName': launch_config.name,
'InstanceType': launch_config.instance_type}
if launch_config.key_name:
params['KeyName'] = launch_config.key_name
if launch_config.user_data:
params['UserData'] = base64.b64encode(launch_config.user_data)
if launch_config.kernel_id:
params['KernelId'] = launch_config.kernel_id
if launch_config.ramdisk_id:
params['RamdiskId'] = launch_config.ramdisk_id
if launch_config.block_device_mappings:
self.build_list_params(params, launch_config.block_device_mappings,
'BlockDeviceMappings')
if launch_config.security_groups:
self.build_list_params(params, launch_config.security_groups,
'SecurityGroups')
if launch_config.instance_monitoring:
params['InstanceMonitoring.Enabled'] = 'true'
else:
params['InstanceMonitoring.Enabled'] = 'false'
if launch_config.spot_price is not None:
params['SpotPrice'] = str(launch_config.spot_price)
if launch_config.instance_profile_name is not None:
params['IamInstanceProfile'] = launch_config.instance_profile_name
if launch_config.ebs_optimized:
params['EbsOptimized'] = 'true'
else:
params['EbsOptimized'] = 'false'
return self.get_object('CreateLaunchConfiguration', params,
Request, verb='POST')
def create_scaling_policy(self, scaling_policy):
"""
Creates a new Scaling Policy.
:type scaling_policy: :class:`boto.ec2.autoscale.policy.ScalingPolicy`
:param scaling_policy: ScalingPolicy object.
"""
params = {'AdjustmentType': scaling_policy.adjustment_type,
'AutoScalingGroupName': scaling_policy.as_name,
'PolicyName': scaling_policy.name,
'ScalingAdjustment': scaling_policy.scaling_adjustment}
if scaling_policy.cooldown is not None:
params['Cooldown'] = scaling_policy.cooldown
return self.get_object('PutScalingPolicy', params, Request)
def delete_launch_configuration(self, launch_config_name):
"""
Deletes the specified LaunchConfiguration.
The specified launch configuration must not be attached to an Auto
Scaling group. Once this call completes, the launch configuration is no
longer available for use.
"""
params = {'LaunchConfigurationName': launch_config_name}
return self.get_object('DeleteLaunchConfiguration', params, Request)
def get_all_groups(self, names=None, max_records=None, next_token=None):
"""
Returns a full description of each Auto Scaling group in the given
list. This includes all Amazon EC2 instances that are members of the
group. If a list of names is not provided, the service returns the full
details of all Auto Scaling groups.
This action supports pagination by returning a token if there are more
pages to retrieve. To get the next page, call this action again with
the returned token as the NextToken parameter.
:type names: list
:param names: List of group names which should be searched for.
:type max_records: int
:param max_records: Maximum amount of groups to return.
:rtype: list
:returns: List of :class:`boto.ec2.autoscale.group.AutoScalingGroup`
instances.
"""
params = {}
if max_records:
params['MaxRecords'] = max_records
if next_token:
params['NextToken'] = next_token
if names:
self.build_list_params(params, names, 'AutoScalingGroupNames')
return self.get_list('DescribeAutoScalingGroups', params,
[('member', AutoScalingGroup)])
def get_all_launch_configurations(self, **kwargs):
"""
Returns a full description of the launch configurations given the
specified names.
If no names are specified, then the full details of all launch
configurations are returned.
:type names: list
:param names: List of configuration names which should be searched for.
:type max_records: int
:param max_records: Maximum amount of configurations to return.
:type next_token: str
:param next_token: If you have more results than can be returned
at once, pass in this parameter to page through all results.
:rtype: list
:returns: List of
:class:`boto.ec2.autoscale.launchconfig.LaunchConfiguration`
instances.
"""
params = {}
max_records = kwargs.get('max_records', None)
names = kwargs.get('names', None)
if max_records is not None:
params['MaxRecords'] = max_records
if names:
self.build_list_params(params, names, 'LaunchConfigurationNames')
next_token = kwargs.get('next_token')
if next_token:
params['NextToken'] = next_token
return self.get_list('DescribeLaunchConfigurations', params,
[('member', LaunchConfiguration)])
def get_all_activities(self, autoscale_group, activity_ids=None,
max_records=None, next_token=None):
"""
Get all activities for the given autoscaling group.
This action supports pagination by returning a token if there are more
pages to retrieve. To get the next page, call this action again with
the returned token as the NextToken parameter
:type autoscale_group: str or
:class:`boto.ec2.autoscale.group.AutoScalingGroup` object
:param autoscale_group: The auto scaling group to get activities on.
:type max_records: int
:param max_records: Maximum amount of activities to return.
:rtype: list
:returns: List of
:class:`boto.ec2.autoscale.activity.Activity` instances.
"""
name = autoscale_group
if isinstance(autoscale_group, AutoScalingGroup):
name = autoscale_group.name
params = {'AutoScalingGroupName': name}
if max_records:
params['MaxRecords'] = max_records
if next_token:
params['NextToken'] = next_token
if activity_ids:
self.build_list_params(params, activity_ids, 'ActivityIds')
return self.get_list('DescribeScalingActivities',
params, [('member', Activity)])
def get_termination_policies(self):
"""Gets all valid termination policies.
These values can then be used as the termination_policies arg
when creating and updating autoscale groups.
"""
return self.get_object('DescribeTerminationPolicyTypes',
{}, TerminationPolicies)
def delete_scheduled_action(self, scheduled_action_name,
autoscale_group=None):
"""
Deletes a previously scheduled action.
:type scheduled_action_name: str
:param scheduled_action_name: The name of the action you want
to delete.
:type autoscale_group: str
:param autoscale_group: The name of the autoscale group.
"""
params = {'ScheduledActionName': scheduled_action_name}
if autoscale_group:
params['AutoScalingGroupName'] = autoscale_group
return self.get_status('DeleteScheduledAction', params)
def terminate_instance(self, instance_id, decrement_capacity=True):
"""
Terminates the specified instance. The desired group size can
also be adjusted, if desired.
:type instance_id: str
:param instance_id: The ID of the instance to be terminated.
:type decrement_capability: bool
:param decrement_capacity: Whether to decrement the size of the
autoscaling group or not.
"""
params = {'InstanceId': instance_id}
if decrement_capacity:
params['ShouldDecrementDesiredCapacity'] = 'true'
else:
params['ShouldDecrementDesiredCapacity'] = 'false'
return self.get_object('TerminateInstanceInAutoScalingGroup', params,
Activity)
def delete_policy(self, policy_name, autoscale_group=None):
"""
Delete a policy.
:type policy_name: str
:param policy_name: The name or ARN of the policy to delete.
:type autoscale_group: str
:param autoscale_group: The name of the autoscale group.
"""
params = {'PolicyName': policy_name}
if autoscale_group:
params['AutoScalingGroupName'] = autoscale_group
return self.get_status('DeletePolicy', params)
def get_all_adjustment_types(self):
return self.get_list('DescribeAdjustmentTypes', {},
[('member', AdjustmentType)])
def get_all_autoscaling_instances(self, instance_ids=None,
max_records=None, next_token=None):
"""
Returns a description of each Auto Scaling instance in the instance_ids
list. If a list is not provided, the service returns the full details
of all instances up to a maximum of fifty.
This action supports pagination by returning a token if there are more
pages to retrieve. To get the next page, call this action again with
the returned token as the NextToken parameter.
:type instance_ids: list
:param instance_ids: List of Autoscaling Instance IDs which should be
searched for.
:type max_records: int
:param max_records: Maximum number of results to return.
:rtype: list
:returns: List of
:class:`boto.ec2.autoscale.instance.Instance` objects.
"""
params = {}
if instance_ids:
self.build_list_params(params, instance_ids, 'InstanceIds')
if max_records:
params['MaxRecords'] = max_records
if next_token:
params['NextToken'] = next_token
return self.get_list('DescribeAutoScalingInstances',
params, [('member', Instance)])
def get_all_metric_collection_types(self):
"""
Returns a list of metrics and a corresponding list of granularities
for each metric.
"""
return self.get_object('DescribeMetricCollectionTypes',
{}, MetricCollectionTypes)
def get_all_policies(self, as_group=None, policy_names=None,
max_records=None, next_token=None):
"""
Returns descriptions of what each policy does. This action supports
pagination. If the response includes a token, there are more records
available. To get the additional records, repeat the request with the
response token as the NextToken parameter.
If no group name or list of policy names are provided, all
available policies are returned.
:type as_name: str
:param as_name: The name of the
:class:`boto.ec2.autoscale.group.AutoScalingGroup` to filter for.
:type names: list
:param names: List of policy names which should be searched for.
:type max_records: int
:param max_records: Maximum amount of groups to return.
"""
params = {}
if as_group:
params['AutoScalingGroupName'] = as_group
if policy_names:
self.build_list_params(params, policy_names, 'PolicyNames')
if max_records:
params['MaxRecords'] = max_records
if next_token:
params['NextToken'] = next_token
return self.get_list('DescribePolicies', params,
[('member', ScalingPolicy)])
def get_all_scaling_process_types(self):
"""
Returns scaling process types for use in the ResumeProcesses and
SuspendProcesses actions.
"""
return self.get_list('DescribeScalingProcessTypes', {},
[('member', ProcessType)])
def suspend_processes(self, as_group, scaling_processes=None):
"""
Suspends Auto Scaling processes for an Auto Scaling group.
:type as_group: string
:param as_group: The auto scaling group to suspend processes on.
:type scaling_processes: list
:param scaling_processes: Processes you want to suspend. If omitted,
all processes will be suspended.
"""
params = {'AutoScalingGroupName': as_group}
if scaling_processes:
self.build_list_params(params, scaling_processes,
'ScalingProcesses')
return self.get_status('SuspendProcesses', params)
def resume_processes(self, as_group, scaling_processes=None):
"""
Resumes Auto Scaling processes for an Auto Scaling group.
:type as_group: string
:param as_group: The auto scaling group to resume processes on.
:type scaling_processes: list
:param scaling_processes: Processes you want to resume. If omitted, all
processes will be resumed.
"""
params = {'AutoScalingGroupName': as_group}
if scaling_processes:
self.build_list_params(params, scaling_processes,
'ScalingProcesses')
return self.get_status('ResumeProcesses', params)
def create_scheduled_group_action(self, as_group, name, time=None,
desired_capacity=None,
min_size=None, max_size=None,
start_time=None, end_time=None,
recurrence=None):
"""
Creates a scheduled scaling action for a Auto Scaling group. If you
leave a parameter unspecified, the corresponding value remains
unchanged in the affected Auto Scaling group.
:type as_group: string
:param as_group: The auto scaling group to get activities on.
:type name: string
:param name: Scheduled action name.
:type time: datetime.datetime
:param time: The time for this action to start. (Depracated)
:type desired_capacity: int
:param desired_capacity: The number of EC2 instances that should
be running in this group.
:type min_size: int
:param min_size: The minimum size for the new auto scaling group.
:type max_size: int
:param max_size: The minimum size for the new auto scaling group.
:type start_time: datetime.datetime
:param start_time: The time for this action to start. When StartTime and EndTime are specified with Recurrence, they form the boundaries of when the recurring action will start and stop.
:type end_time: datetime.datetime
:param end_time: The time for this action to end. When StartTime and EndTime are specified with Recurrence, they form the boundaries of when the recurring action will start and stop.
:type recurrence: string
:param recurrence: The time when recurring future actions will start. Start time is specified by the user following the Unix cron syntax format. EXAMPLE: '0 10 * * *'
"""
params = {'AutoScalingGroupName': as_group,
'ScheduledActionName': name}
if start_time is not None:
params['StartTime'] = start_time.isoformat()
if end_time is not None:
params['EndTime'] = end_time.isoformat()
if recurrence is not None:
params['Recurrence'] = recurrence
if time:
params['Time'] = time.isoformat()
if desired_capacity is not None:
params['DesiredCapacity'] = desired_capacity
if min_size is not None:
params['MinSize'] = min_size
if max_size is not None:
params['MaxSize'] = max_size
return self.get_status('PutScheduledUpdateGroupAction', params)
def get_all_scheduled_actions(self, as_group=None, start_time=None,
end_time=None, scheduled_actions=None,
max_records=None, next_token=None):
params = {}
if as_group:
params['AutoScalingGroupName'] = as_group
if scheduled_actions:
self.build_list_params(params, scheduled_actions,
'ScheduledActionNames')
if max_records:
params['MaxRecords'] = max_records
if next_token:
params['NextToken'] = next_token
return self.get_list('DescribeScheduledActions', params,
[('member', ScheduledUpdateGroupAction)])
def disable_metrics_collection(self, as_group, metrics=None):
"""
Disables monitoring of group metrics for the Auto Scaling group
specified in AutoScalingGroupName. You can specify the list of affected
metrics with the Metrics parameter.
"""
params = {'AutoScalingGroupName': as_group}
if metrics:
self.build_list_params(params, metrics, 'Metrics')
return self.get_status('DisableMetricsCollection', params)
def enable_metrics_collection(self, as_group, granularity, metrics=None):
"""
Enables monitoring of group metrics for the Auto Scaling group
specified in AutoScalingGroupName. You can specify the list of enabled
metrics with the Metrics parameter.
Auto scaling metrics collection can be turned on only if the
InstanceMonitoring.Enabled flag, in the Auto Scaling group's launch
configuration, is set to true.
:type autoscale_group: string
:param autoscale_group: The auto scaling group to get activities on.
:type granularity: string
:param granularity: The granularity to associate with the metrics to
collect. Currently, the only legal granularity is "1Minute".
:type metrics: string list
:param metrics: The list of metrics to collect. If no metrics are
specified, all metrics are enabled.
"""
params = {'AutoScalingGroupName': as_group,
'Granularity': granularity}
if metrics:
self.build_list_params(params, metrics, 'Metrics')
return self.get_status('EnableMetricsCollection', params)
def execute_policy(self, policy_name, as_group=None, honor_cooldown=None):
params = {'PolicyName': policy_name}
if as_group:
params['AutoScalingGroupName'] = as_group
if honor_cooldown:
params['HonorCooldown'] = honor_cooldown
return self.get_status('ExecutePolicy', params)
def put_notification_configuration(self, autoscale_group, topic, notification_types):
"""
Configures an Auto Scaling group to send notifications when
specified events take place.
:type as_group: str or
:class:`boto.ec2.autoscale.group.AutoScalingGroup` object
:param as_group: The Auto Scaling group to put notification
configuration on.
:type topic: str
:param topic: The Amazon Resource Name (ARN) of the Amazon Simple
Notification Service (SNS) topic.
:type notification_types: list
:param notification_types: The type of events that will trigger
the notification.
"""
name = autoscale_group
if isinstance(autoscale_group, AutoScalingGroup):
name = autoscale_group.name
params = {'AutoScalingGroupName': name,
'TopicARN': topic}
self.build_list_params(params, notification_types, 'NotificationTypes')
return self.get_status('PutNotificationConfiguration', params)
def set_instance_health(self, instance_id, health_status,
should_respect_grace_period=True):
"""
Explicitly set the health status of an instance.
:type instance_id: str
:param instance_id: The identifier of the EC2 instance.
:type health_status: str
:param health_status: The health status of the instance.
"Healthy" means that the instance is healthy and should remain
in service. "Unhealthy" means that the instance is unhealthy.
Auto Scaling should terminate and replace it.
:type should_respect_grace_period: bool
:param should_respect_grace_period: If True, this call should
respect the grace period associated with the group.
"""
params = {'InstanceId': instance_id,
'HealthStatus': health_status}
if should_respect_grace_period:
params['ShouldRespectGracePeriod'] = 'true'
else:
params['ShouldRespectGracePeriod'] = 'false'
return self.get_status('SetInstanceHealth', params)
def set_desired_capacity(self, group_name, desired_capacity, honor_cooldown=False):
"""
Adjusts the desired size of the AutoScalingGroup by initiating scaling
activities. When reducing the size of the group, it is not possible to define
which Amazon EC2 instances will be terminated. This applies to any Auto Scaling
decisions that might result in terminating instances.
:type group_name: string
:param group_name: name of the auto scaling group
:type desired_capacity: integer
:param desired_capacity: new capacity setting for auto scaling group
:type honor_cooldown: boolean
:param honor_cooldown: by default, overrides any cooldown period
"""
params = {'AutoScalingGroupName': group_name,
'DesiredCapacity': desired_capacity}
if honor_cooldown:
params['HonorCooldown'] = json.dumps('True')
return self.get_status('SetDesiredCapacity', params)
# Tag methods
def get_all_tags(self, filters=None, max_records=None, next_token=None):
"""
Lists the Auto Scaling group tags.
This action supports pagination by returning a token if there
are more pages to retrieve. To get the next page, call this
action again with the returned token as the NextToken
parameter.
:type filters: dict
:param filters: The value of the filter type used to identify
the tags to be returned. NOT IMPLEMENTED YET.
:type max_records: int
:param max_records: Maximum number of tags to return.
:rtype: list
:returns: List of :class:`boto.ec2.autoscale.tag.Tag`
instances.
"""
params = {}
if max_records:
params['MaxRecords'] = max_records
if next_token:
params['NextToken'] = next_token
return self.get_list('DescribeTags', params,
[('member', Tag)])
def create_or_update_tags(self, tags):
"""
Creates new tags or updates existing tags for an Auto Scaling group.
:type tags: List of :class:`boto.ec2.autoscale.tag.Tag`
:param tags: The new or updated tags.
"""
params = {}
for i, tag in enumerate(tags):
tag.build_params(params, i + 1)
return self.get_status('CreateOrUpdateTags', params, verb='POST')
def delete_tags(self, tags):
"""
Deletes existing tags for an Auto Scaling group.
:type tags: List of :class:`boto.ec2.autoscale.tag.Tag`
:param tags: The new or updated tags.
"""
params = {}
for i, tag in enumerate(tags):
tag.build_params(params, i + 1)
return self.get_status('DeleteTags', params, verb='POST')
| mit | 3,487,480,716,292,861,000 | 40.418431 | 194 | 0.615984 | false |
18padx08/PPTex | PPTexEnv_x86_64/lib/python2.7/site-packages/sympy/plotting/pygletplot/plot_curve.py | 25 | 2853 | from __future__ import print_function, division
from pyglet.gl import *
from plot_mode_base import PlotModeBase
from sympy.core import S
from sympy.core.compatibility import xrange
class PlotCurve(PlotModeBase):
style_override = 'wireframe'
def _on_calculate_verts(self):
self.t_interval = self.intervals[0]
self.t_set = list(self.t_interval.frange())
self.bounds = [[S.Infinity, -S.Infinity, 0],
[S.Infinity, -S.Infinity, 0],
[S.Infinity, -S.Infinity, 0]]
evaluate = self._get_evaluator()
self._calculating_verts_pos = 0.0
self._calculating_verts_len = float(self.t_interval.v_len)
self.verts = list()
b = self.bounds
for t in self.t_set:
try:
_e = evaluate(t) # calculate vertex
except (NameError, ZeroDivisionError):
_e = None
if _e is not None: # update bounding box
for axis in range(3):
b[axis][0] = min([b[axis][0], _e[axis]])
b[axis][1] = max([b[axis][1], _e[axis]])
self.verts.append(_e)
self._calculating_verts_pos += 1.0
for axis in range(3):
b[axis][2] = b[axis][1] - b[axis][0]
if b[axis][2] == 0.0:
b[axis][2] = 1.0
self.push_wireframe(self.draw_verts(False))
def _on_calculate_cverts(self):
if not self.verts or not self.color:
return
def set_work_len(n):
self._calculating_cverts_len = float(n)
def inc_work_pos():
self._calculating_cverts_pos += 1.0
set_work_len(1)
self._calculating_cverts_pos = 0
self.cverts = self.color.apply_to_curve(self.verts,
self.t_set,
set_len=set_work_len,
inc_pos=inc_work_pos)
self.push_wireframe(self.draw_verts(True))
def calculate_one_cvert(self, t):
vert = self.verts[t]
return self.color(vert[0], vert[1], vert[2],
self.t_set[t], None)
def draw_verts(self, use_cverts):
def f():
glBegin(GL_LINE_STRIP)
for t in xrange(len(self.t_set)):
p = self.verts[t]
if p is None:
glEnd()
glBegin(GL_LINE_STRIP)
continue
if use_cverts:
c = self.cverts[t]
if c is None:
c = (0, 0, 0)
glColor3f(*c)
else:
glColor3f(*self.default_wireframe_color)
glVertex3f(*p)
glEnd()
return f
| mit | 4,147,604,870,148,563,500 | 32.564706 | 69 | 0.473887 | false |
R3v1L/evogtk | evogtk/gui/accessclasslib/spinbutton.py | 2 | 1871 | # -*- coding: utf-8 -*-
###############################################################################
# Copyright (C) 2008 EVO Sistemas Libres <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
###############################################################################
# checkbutton
# EVOGTK Access class for gtk.CheckButton, gtk.ToggleButton, gtk.ToggleAction widgets
###############################################################################
# GTK Imports
import gtk
# DBWidgets imports
from evogtk.widgets import DBSpinButton
class AccessClass:
"""
Class for gtk.SpinButton
"""
def supported_widgets(self):
"""
Supported widgets for this access class
"""
return [gtk.Adjustment,gtk.SpinButton,DBSpinButton]
def supported_types(self):
"""
Supported types for this access class
"""
return [int,float]
def set_content(self,widget,content):
"""
Method for setting the widget content
"""
widget.set_value(content)
def get_content(self,widget):
"""
Method for setting the widget content
"""
return widget.get_value()
| mit | -4,306,299,812,427,367,400 | 33.648148 | 85 | 0.59861 | false |
2mny/mylar | lib/comictaggerlib/matchselectionwindow.py | 1 | 4989 | """
A PyQT4 dialog to select from automated issue matches
"""
"""
Copyright 2012-2014 Anthony Beville
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
from PyQt4 import QtCore, QtGui, uic
from PyQt4.QtCore import QUrl, pyqtSignal, QByteArray
from imagefetcher import ImageFetcher
from settings import ComicTaggerSettings
from comicarchive import MetaDataStyle
from coverimagewidget import CoverImageWidget
from comicvinetalker import ComicVineTalker
import utils
class MatchSelectionWindow(QtGui.QDialog):
volume_id = 0
def __init__(self, parent, matches, comic_archive):
super(MatchSelectionWindow, self).__init__(parent)
uic.loadUi(ComicTaggerSettings.getUIFile('matchselectionwindow.ui' ), self)
self.altCoverWidget = CoverImageWidget( self.altCoverContainer, CoverImageWidget.AltCoverMode )
gridlayout = QtGui.QGridLayout( self.altCoverContainer )
gridlayout.addWidget( self.altCoverWidget )
gridlayout.setContentsMargins(0,0,0,0)
self.archiveCoverWidget = CoverImageWidget( self.archiveCoverContainer, CoverImageWidget.ArchiveMode )
gridlayout = QtGui.QGridLayout( self.archiveCoverContainer )
gridlayout.addWidget( self.archiveCoverWidget )
gridlayout.setContentsMargins(0,0,0,0)
utils.reduceWidgetFontSize( self.twList )
utils.reduceWidgetFontSize( self.teDescription, 1 )
self.setWindowFlags(self.windowFlags() |
QtCore.Qt.WindowSystemMenuHint |
QtCore.Qt.WindowMaximizeButtonHint)
self.matches = matches
self.comic_archive = comic_archive
self.twList.currentItemChanged.connect(self.currentItemChanged)
self.twList.cellDoubleClicked.connect(self.cellDoubleClicked)
self.updateData()
def updateData( self):
self.setCoverImage()
self.populateTable()
self.twList.resizeColumnsToContents()
self.twList.selectRow( 0 )
path = self.comic_archive.path
self.setWindowTitle( u"Select correct match: {0}".format(
os.path.split(path)[1] ))
def populateTable( self ):
while self.twList.rowCount() > 0:
self.twList.removeRow(0)
self.twList.setSortingEnabled(False)
row = 0
for match in self.matches:
self.twList.insertRow(row)
item_text = match['series']
item = QtGui.QTableWidgetItem(item_text)
item.setData( QtCore.Qt.ToolTipRole, item_text )
item.setData( QtCore.Qt.UserRole, (match,))
item.setFlags(QtCore.Qt.ItemIsSelectable| QtCore.Qt.ItemIsEnabled)
self.twList.setItem(row, 0, item)
if match['publisher'] is not None:
item_text = u"{0}".format(match['publisher'])
else:
item_text = u"Unknown"
item = QtGui.QTableWidgetItem(item_text)
item.setData( QtCore.Qt.ToolTipRole, item_text )
item.setFlags(QtCore.Qt.ItemIsSelectable| QtCore.Qt.ItemIsEnabled)
self.twList.setItem(row, 1, item)
month_str = u""
year_str = u"????"
if match['month'] is not None:
month_str = u"-{0:02d}".format(int(match['month']))
if match['year'] is not None:
year_str = u"{0}".format(match['year'])
item_text = year_str + month_str
item = QtGui.QTableWidgetItem(item_text)
item.setData( QtCore.Qt.ToolTipRole, item_text )
item.setFlags(QtCore.Qt.ItemIsSelectable| QtCore.Qt.ItemIsEnabled)
self.twList.setItem(row, 2, item)
item_text = match['issue_title']
if item_text is None:
item_text = ""
item = QtGui.QTableWidgetItem(item_text)
item.setData( QtCore.Qt.ToolTipRole, item_text )
item.setFlags(QtCore.Qt.ItemIsSelectable| QtCore.Qt.ItemIsEnabled)
self.twList.setItem(row, 3, item)
row += 1
self.twList.resizeColumnsToContents()
self.twList.setSortingEnabled(True)
self.twList.sortItems( 2 , QtCore.Qt.AscendingOrder )
self.twList.selectRow(0)
self.twList.resizeColumnsToContents()
self.twList.horizontalHeader().setStretchLastSection(True)
def cellDoubleClicked( self, r, c ):
self.accept()
def currentItemChanged( self, curr, prev ):
if curr is None:
return
if prev is not None and prev.row() == curr.row():
return
self.altCoverWidget.setIssueID( self.currentMatch()['issue_id'] )
if self.currentMatch()['description'] is None:
self.teDescription.setText ( "" )
else:
self.teDescription.setText ( self.currentMatch()['description'] )
def setCoverImage( self ):
self.archiveCoverWidget.setArchive( self.comic_archive)
def currentMatch( self ):
row = self.twList.currentRow()
match = self.twList.item(row, 0).data( QtCore.Qt.UserRole ).toPyObject()[0]
return match
| gpl-3.0 | 6,659,778,074,985,122,000 | 30.18125 | 104 | 0.732411 | false |
p0psicles/SickGear | sickbeard/clients/deluge.py | 3 | 8261 | # Author: Mr_Orange <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import json
from base64 import b64encode
import sickbeard
from sickbeard import logger
from sickbeard.clients.generic import GenericClient
from lib.requests.exceptions import RequestException
class DelugeAPI(GenericClient):
def __init__(self, host=None, username=None, password=None):
super(DelugeAPI, self).__init__('Deluge', host, username, password)
self.url = self.host + 'json'
def _get_auth(self):
post_data = json.dumps({'method': 'auth.login',
'params': [self.password],
'id': 1})
try:
self.auth = self.session.post(
self.url,
data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT
).json()['result']
post_data = json.dumps({'method': 'web.connected',
'params': [],
'id': 10})
connected = self.session.post(
self.url,
data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT
).json()['result']
if not connected:
post_data = json.dumps({'method': 'web.get_hosts',
'params': [],
'id': 11})
hosts = self.session.post(
self.url,
data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT
).json()['result']
if len(hosts) == 0:
logger.log(self.name + u': WebUI does not contain daemons',
logger.ERROR)
return None
post_data = json.dumps({'method': 'web.connect',
'params': [hosts[0][0]],
'id': 11})
self.session.post(self.url, data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT)
post_data = json.dumps({'method': 'web.connected',
'params': [],
'id': 10})
connected = self.session.post(
self.url,
data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT
).json()['result']
if not connected:
logger.log(self.name + u': WebUI could not connect to daemon',
logger.ERROR)
return None
except RequestException:
return None
return self.auth
def _add_torrent_uri(self, result):
post_data = json.dumps({
'method': 'core.add_torrent_magnet',
'params': [result.url, {
'move_completed': 'true',
'move_completed_path': sickbeard.TV_DOWNLOAD_DIR
}],
'id': 2
})
result.hash = self._request(method='post',
data=post_data).json()['result']
return result.hash
def _add_torrent_file(self, result):
post_data = json.dumps({'method':
'core.add_torrent_file',
'params': [result.name + '.torrent',
b64encode(result.content),
{'move_completed': 'true',
'move_completed_path':
sickbeard.TV_DOWNLOAD_DIR}],
'id': 2})
result.hash = self._request(method='post',
data=post_data).json()['result']
return result.hash
def _set_torrent_label(self, result):
label = sickbeard.TORRENT_LABEL
if ' ' in label:
logger.log(self.name +
u': Invalid label. Label must not contain a space',
logger.ERROR)
return False
if label:
# check if label already exists and create it if not
post_data = json.dumps({
'method': 'label.get_labels',
'params': [],
'id': 3
})
labels = self._request(method='post',
data=post_data).json()['result']
if labels is not None:
if label not in labels:
logger.log(self.name + ': ' + label +
u' label does not exist in ' +
u'Deluge we must add it',
logger.DEBUG)
post_data = json.dumps({
'method': 'label.add',
'params': [label],
'id': 4
})
self._request(method='post', data=post_data)
logger.log(self.name + ': ' + label +
u' label added to Deluge', logger.DEBUG)
# add label to torrent
post_data = json.dumps({
'method': 'label.set_torrent',
'params': [result.hash, label],
'id': 5
})
self._request(method='post', data=post_data)
logger.log(self.name + ': ' + label +
u' label added to torrent',
logger.DEBUG)
else:
logger.log(self.name + ': ' +
u'label plugin not detected',
logger.DEBUG)
return False
return True
def _set_torrent_ratio(self, result):
ratio = None
if result.ratio:
ratio = result.ratio
if ratio:
post_data = json.dumps({'method': 'core.set_torrent_stop_at_ratio',
'params': [result.hash, True],
'id': 5})
self._request(method='post', data=post_data)
post_data = json.dumps({'method': 'core.set_torrent_stop_ratio',
'params': [result.hash, float(ratio)],
'id': 6})
self._request(method='post', data=post_data)
return True
def _set_torrent_path(self, result):
if sickbeard.TORRENT_PATH:
post_data = json.dumps({
'method': 'core.set_torrent_move_completed',
'params': [result.hash, True],
'id': 7
})
self._request(method='post', data=post_data)
post_data = json.dumps({
'method': 'core.set_torrent_move_completed_path',
'params': [result.hash, sickbeard.TORRENT_PATH],
'id': 8
})
self._request(method='post', data=post_data)
return True
def _set_torrent_pause(self, result):
if sickbeard.TORRENT_PAUSED:
post_data = json.dumps({'method': 'core.pause_torrent',
'params': [[result.hash]],
'id': 9})
self._request(method='post', data=post_data)
return True
api = DelugeAPI()
| gpl-3.0 | 5,227,302,978,426,104,000 | 35.553097 | 79 | 0.454303 | false |
helenst/django | django/conf/locale/zh_Hant/formats.py | 634 | 1810 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'Y年n月j日' # 2016年9月5日
TIME_FORMAT = 'H:i' # 20:45
DATETIME_FORMAT = 'Y年n月j日 H:i' # 2016年9月5日 20:45
YEAR_MONTH_FORMAT = 'Y年n月' # 2016年9月
MONTH_DAY_FORMAT = 'm月j日' # 9月5日
SHORT_DATE_FORMAT = 'Y年n月j日' # 2016年9月5日
SHORT_DATETIME_FORMAT = 'Y年n月j日 H:i' # 2016年9月5日 20:45
FIRST_DAY_OF_WEEK = 1 # 星期一 (Monday)
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%Y/%m/%d', # '2016/09/05'
'%Y-%m-%d', # '2016-09-05'
'%Y年%n月%j日', # '2016年9月5日'
)
TIME_INPUT_FORMATS = (
'%H:%M', # '20:45'
'%H:%M:%S', # '20:45:29'
'%H:%M:%S.%f', # '20:45:29.000200'
)
DATETIME_INPUT_FORMATS = (
'%Y/%m/%d %H:%M', # '2016/09/05 20:45'
'%Y-%m-%d %H:%M', # '2016-09-05 20:45'
'%Y年%n月%j日 %H:%M', # '2016年9月5日 14:45'
'%Y/%m/%d %H:%M:%S', # '2016/09/05 20:45:29'
'%Y-%m-%d %H:%M:%S', # '2016-09-05 20:45:29'
'%Y年%n月%j日 %H:%M:%S', # '2016年9月5日 20:45:29'
'%Y/%m/%d %H:%M:%S.%f', # '2016/09/05 20:45:29.000200'
'%Y-%m-%d %H:%M:%S.%f', # '2016-09-05 20:45:29.000200'
'%Y年%n月%j日 %H:%n:%S.%f', # '2016年9月5日 20:45:29.000200'
)
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ''
NUMBER_GROUPING = 4
| bsd-3-clause | 1,843,172,978,198,359,300 | 36.6 | 77 | 0.536643 | false |
omni5cience/django-inlineformfield | .tox/py27/lib/python2.7/site-packages/IPython/core/displayhook.py | 8 | 11365 | # -*- coding: utf-8 -*-
"""Displayhook for IPython.
This defines a callable class that IPython uses for `sys.displayhook`.
Authors:
* Fernando Perez
* Brian Granger
* Robert Kern
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
# Copyright (C) 2001-2007 Fernando Perez <[email protected]>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import print_function
import sys
from IPython.core.formatters import _safe_get_formatter_method
from IPython.config.configurable import Configurable
from IPython.utils import io
from IPython.utils.py3compat import builtin_mod
from IPython.utils.traitlets import Instance
from IPython.utils.warn import warn
#-----------------------------------------------------------------------------
# Main displayhook class
#-----------------------------------------------------------------------------
# TODO: Move the various attributes (cache_size, [others now moved]). Some
# of these are also attributes of InteractiveShell. They should be on ONE object
# only and the other objects should ask that one object for their values.
class DisplayHook(Configurable):
"""The custom IPython displayhook to replace sys.displayhook.
This class does many things, but the basic idea is that it is a callable
that gets called anytime user code returns a value.
"""
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
def __init__(self, shell=None, cache_size=1000, **kwargs):
super(DisplayHook, self).__init__(shell=shell, **kwargs)
cache_size_min = 3
if cache_size <= 0:
self.do_full_cache = 0
cache_size = 0
elif cache_size < cache_size_min:
self.do_full_cache = 0
cache_size = 0
warn('caching was disabled (min value for cache size is %s).' %
cache_size_min,level=3)
else:
self.do_full_cache = 1
self.cache_size = cache_size
# we need a reference to the user-level namespace
self.shell = shell
self._,self.__,self.___ = '','',''
# these are deliberately global:
to_user_ns = {'_':self._,'__':self.__,'___':self.___}
self.shell.user_ns.update(to_user_ns)
@property
def prompt_count(self):
return self.shell.execution_count
#-------------------------------------------------------------------------
# Methods used in __call__. Override these methods to modify the behavior
# of the displayhook.
#-------------------------------------------------------------------------
def check_for_underscore(self):
"""Check if the user has set the '_' variable by hand."""
# If something injected a '_' variable in __builtin__, delete
# ipython's automatic one so we don't clobber that. gettext() in
# particular uses _, so we need to stay away from it.
if '_' in builtin_mod.__dict__:
try:
del self.shell.user_ns['_']
except KeyError:
pass
def quiet(self):
"""Should we silence the display hook because of ';'?"""
# do not print output if input ends in ';'
try:
cell = self.shell.history_manager.input_hist_parsed[self.prompt_count]
return cell.rstrip().endswith(';')
except IndexError:
# some uses of ipshellembed may fail here
return False
def start_displayhook(self):
"""Start the displayhook, initializing resources."""
pass
def write_output_prompt(self):
"""Write the output prompt.
The default implementation simply writes the prompt to
``io.stdout``.
"""
# Use write, not print which adds an extra space.
io.stdout.write(self.shell.separate_out)
outprompt = self.shell.prompt_manager.render('out')
if self.do_full_cache:
io.stdout.write(outprompt)
def compute_format_data(self, result):
"""Compute format data of the object to be displayed.
The format data is a generalization of the :func:`repr` of an object.
In the default implementation the format data is a :class:`dict` of
key value pair where the keys are valid MIME types and the values
are JSON'able data structure containing the raw data for that MIME
type. It is up to frontends to determine pick a MIME to to use and
display that data in an appropriate manner.
This method only computes the format data for the object and should
NOT actually print or write that to a stream.
Parameters
----------
result : object
The Python object passed to the display hook, whose format will be
computed.
Returns
-------
(format_dict, md_dict) : dict
format_dict is a :class:`dict` whose keys are valid MIME types and values are
JSON'able raw data for that MIME type. It is recommended that
all return values of this should always include the "text/plain"
MIME type representation of the object.
md_dict is a :class:`dict` with the same MIME type keys
of metadata associated with each output.
"""
return self.shell.display_formatter.format(result)
def write_format_data(self, format_dict, md_dict=None):
"""Write the format data dict to the frontend.
This default version of this method simply writes the plain text
representation of the object to ``io.stdout``. Subclasses should
override this method to send the entire `format_dict` to the
frontends.
Parameters
----------
format_dict : dict
The format dict for the object passed to `sys.displayhook`.
md_dict : dict (optional)
The metadata dict to be associated with the display data.
"""
# We want to print because we want to always make sure we have a
# newline, even if all the prompt separators are ''. This is the
# standard IPython behavior.
result_repr = format_dict['text/plain']
if '\n' in result_repr:
# So that multi-line strings line up with the left column of
# the screen, instead of having the output prompt mess up
# their first line.
# We use the prompt template instead of the expanded prompt
# because the expansion may add ANSI escapes that will interfere
# with our ability to determine whether or not we should add
# a newline.
prompt_template = self.shell.prompt_manager.out_template
if prompt_template and not prompt_template.endswith('\n'):
# But avoid extraneous empty lines.
result_repr = '\n' + result_repr
print(result_repr, file=io.stdout)
def update_user_ns(self, result):
"""Update user_ns with various things like _, __, _1, etc."""
# Avoid recursive reference when displaying _oh/Out
if result is not self.shell.user_ns['_oh']:
if len(self.shell.user_ns['_oh']) >= self.cache_size and self.do_full_cache:
warn('Output cache limit (currently '+
repr(self.cache_size)+' entries) hit.\n'
'Flushing cache and resetting history counter...\n'
'The only history variables available will be _,__,___ and _1\n'
'with the current result.')
self.flush()
# Don't overwrite '_' and friends if '_' is in __builtin__ (otherwise
# we cause buggy behavior for things like gettext).
if '_' not in builtin_mod.__dict__:
self.___ = self.__
self.__ = self._
self._ = result
self.shell.push({'_':self._,
'__':self.__,
'___':self.___}, interactive=False)
# hackish access to top-level namespace to create _1,_2... dynamically
to_main = {}
if self.do_full_cache:
new_result = '_'+repr(self.prompt_count)
to_main[new_result] = result
self.shell.push(to_main, interactive=False)
self.shell.user_ns['_oh'][self.prompt_count] = result
def log_output(self, format_dict):
"""Log the output."""
if self.shell.logger.log_output:
self.shell.logger.log_write(format_dict['text/plain'], 'output')
self.shell.history_manager.output_hist_reprs[self.prompt_count] = \
format_dict['text/plain']
def finish_displayhook(self):
"""Finish up all displayhook activities."""
io.stdout.write(self.shell.separate_out2)
io.stdout.flush()
def __call__(self, result=None):
"""Printing with history cache management.
This is invoked everytime the interpreter needs to print, and is
activated by setting the variable sys.displayhook to it.
"""
self.check_for_underscore()
if result is not None and not self.quiet():
# If _ipython_display_ is defined, use that to display this object.
display_method = _safe_get_formatter_method(result, '_ipython_display_')
if display_method is not None:
try:
return display_method()
except NotImplementedError:
pass
self.start_displayhook()
self.write_output_prompt()
format_dict, md_dict = self.compute_format_data(result)
self.write_format_data(format_dict, md_dict)
self.update_user_ns(result)
self.log_output(format_dict)
self.finish_displayhook()
def flush(self):
if not self.do_full_cache:
raise ValueError("You shouldn't have reached the cache flush "
"if full caching is not enabled!")
# delete auto-generated vars from global namespace
for n in range(1,self.prompt_count + 1):
key = '_'+repr(n)
try:
del self.shell.user_ns[key]
except: pass
# In some embedded circumstances, the user_ns doesn't have the
# '_oh' key set up.
oh = self.shell.user_ns.get('_oh', None)
if oh is not None:
oh.clear()
# Release our own references to objects:
self._, self.__, self.___ = '', '', ''
if '_' not in builtin_mod.__dict__:
self.shell.user_ns.update({'_':None,'__':None, '___':None})
import gc
# TODO: Is this really needed?
# IronPython blocks here forever
if sys.platform != "cli":
gc.collect()
| mit | 550,763,466,342,112,600 | 38.877193 | 89 | 0.556445 | false |
ntrrgc/snorky | snorky/tests/utils/rpc.py | 1 | 2428 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from snorky.services.base import format_call
from snorky.hashable import make_hashable
class TestRequest(object):
"""Mocked Request class for use with RPC services"""
def __init__(self, service, client, command, params):
self.service = service
self.client = client
self.command = command
self.params = make_hashable(params)
self.debug = True # propagate internal errors
self.call_id = None
self.resolved = False
self.response = None
self.response_type = None
def reply(self, data):
if self.resolved:
raise RuntimeError("This request has already been resolved")
self.response_type = "response"
self.response = data
self.resolved = True
def error(self, msg):
if self.resolved:
raise RuntimeError("This request has already been resolved")
self.response_type = "error"
self.response = msg
self.resolved = True
def format_call(self):
return format_call(self.command, self.params)
class RPCTestMixin(object):
"""Useful methods for testing RPC services"""
def _rpcCallNoAsserts(self, service, client, command, request_debug=True,
**params):
# request_debug=True tells RPCService to propagate internal errors
# instead of replying with an RPC error.
request = TestRequest(service, client, command, params)
request.debug = request_debug
service.process_request(request)
return request
def rpcCall(self, service, client, command, **params):
request = self._rpcCallNoAsserts(service, client, command, **params)
self.assertTrue(request.resolved)
if request.response_type == "error":
raise AssertionError("Error in RPC call: %s" % request.response)
self.assertEqual(request.response_type, "response")
return request.response
def rpcExpectError(self, service, client, command, **params):
request = self._rpcCallNoAsserts(service, client, command, **params)
self.assertTrue(request.resolved)
self.assertEqual(request.response_type, "error")
return request.response
| mpl-2.0 | 5,013,836,287,573,457,000 | 33.685714 | 77 | 0.654036 | false |
mccheung/kbengine | kbe/src/lib/python/Lib/asyncio/events.py | 61 | 18822 | """Event loop and event loop policy."""
__all__ = ['AbstractEventLoopPolicy',
'AbstractEventLoop', 'AbstractServer',
'Handle', 'TimerHandle',
'get_event_loop_policy', 'set_event_loop_policy',
'get_event_loop', 'set_event_loop', 'new_event_loop',
'get_child_watcher', 'set_child_watcher',
]
import functools
import inspect
import reprlib
import socket
import subprocess
import sys
import threading
import traceback
_PY34 = sys.version_info >= (3, 4)
def _get_function_source(func):
if _PY34:
func = inspect.unwrap(func)
elif hasattr(func, '__wrapped__'):
func = func.__wrapped__
if inspect.isfunction(func):
code = func.__code__
return (code.co_filename, code.co_firstlineno)
if isinstance(func, functools.partial):
return _get_function_source(func.func)
if _PY34 and isinstance(func, functools.partialmethod):
return _get_function_source(func.func)
return None
def _format_args(args):
"""Format function arguments.
Special case for a single parameter: ('hello',) is formatted as ('hello').
"""
# use reprlib to limit the length of the output
args_repr = reprlib.repr(args)
if len(args) == 1 and args_repr.endswith(',)'):
args_repr = args_repr[:-2] + ')'
return args_repr
def _format_callback(func, args, suffix=''):
if isinstance(func, functools.partial):
if args is not None:
suffix = _format_args(args) + suffix
return _format_callback(func.func, func.args, suffix)
func_repr = getattr(func, '__qualname__', None)
if not func_repr:
func_repr = repr(func)
if args is not None:
func_repr += _format_args(args)
if suffix:
func_repr += suffix
source = _get_function_source(func)
if source:
func_repr += ' at %s:%s' % source
return func_repr
class Handle:
"""Object returned by callback registration methods."""
__slots__ = ('_callback', '_args', '_cancelled', '_loop',
'_source_traceback', '_repr', '__weakref__')
def __init__(self, callback, args, loop):
assert not isinstance(callback, Handle), 'A Handle is not a callback'
self._loop = loop
self._callback = callback
self._args = args
self._cancelled = False
self._repr = None
if self._loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
else:
self._source_traceback = None
def _repr_info(self):
info = [self.__class__.__name__]
if self._cancelled:
info.append('cancelled')
if self._callback is not None:
info.append(_format_callback(self._callback, self._args))
if self._source_traceback:
frame = self._source_traceback[-1]
info.append('created at %s:%s' % (frame[0], frame[1]))
return info
def __repr__(self):
if self._repr is not None:
return self._repr
info = self._repr_info()
return '<%s>' % ' '.join(info)
def cancel(self):
self._cancelled = True
if self._loop.get_debug():
# Keep a representation in debug mode to keep callback and
# parameters. For example, to log the warning "Executing <Handle
# ...> took 2.5 second"
self._repr = repr(self)
self._callback = None
self._args = None
def _run(self):
try:
self._callback(*self._args)
except Exception as exc:
cb = _format_callback(self._callback, self._args)
msg = 'Exception in callback {}'.format(cb)
context = {
'message': msg,
'exception': exc,
'handle': self,
}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
self = None # Needed to break cycles when an exception occurs.
class TimerHandle(Handle):
"""Object returned by timed callback registration methods."""
__slots__ = ['_when']
def __init__(self, when, callback, args, loop):
assert when is not None
super().__init__(callback, args, loop)
if self._source_traceback:
del self._source_traceback[-1]
self._when = when
def _repr_info(self):
info = super()._repr_info()
pos = 2 if self._cancelled else 1
info.insert(pos, 'when=%s' % self._when)
return info
def __hash__(self):
return hash(self._when)
def __lt__(self, other):
return self._when < other._when
def __le__(self, other):
if self._when < other._when:
return True
return self.__eq__(other)
def __gt__(self, other):
return self._when > other._when
def __ge__(self, other):
if self._when > other._when:
return True
return self.__eq__(other)
def __eq__(self, other):
if isinstance(other, TimerHandle):
return (self._when == other._when and
self._callback == other._callback and
self._args == other._args and
self._cancelled == other._cancelled)
return NotImplemented
def __ne__(self, other):
equal = self.__eq__(other)
return NotImplemented if equal is NotImplemented else not equal
class AbstractServer:
"""Abstract server returned by create_server()."""
def close(self):
"""Stop serving. This leaves existing connections open."""
return NotImplemented
def wait_closed(self):
"""Coroutine to wait until service is closed."""
return NotImplemented
class AbstractEventLoop:
"""Abstract event loop."""
# Running and stopping the event loop.
def run_forever(self):
"""Run the event loop until stop() is called."""
raise NotImplementedError
def run_until_complete(self, future):
"""Run the event loop until a Future is done.
Return the Future's result, or raise its exception.
"""
raise NotImplementedError
def stop(self):
"""Stop the event loop as soon as reasonable.
Exactly how soon that is may depend on the implementation, but
no more I/O callbacks should be scheduled.
"""
raise NotImplementedError
def is_running(self):
"""Return whether the event loop is currently running."""
raise NotImplementedError
def is_closed(self):
"""Returns True if the event loop was closed."""
raise NotImplementedError
def close(self):
"""Close the loop.
The loop should not be running.
This is idempotent and irreversible.
No other methods should be called after this one.
"""
raise NotImplementedError
# Methods scheduling callbacks. All these return Handles.
def call_soon(self, callback, *args):
return self.call_later(0, callback, *args)
def call_later(self, delay, callback, *args):
raise NotImplementedError
def call_at(self, when, callback, *args):
raise NotImplementedError
def time(self):
raise NotImplementedError
# Method scheduling a coroutine object: create a task.
def create_task(self, coro):
raise NotImplementedError
# Methods for interacting with threads.
def call_soon_threadsafe(self, callback, *args):
raise NotImplementedError
def run_in_executor(self, executor, callback, *args):
raise NotImplementedError
def set_default_executor(self, executor):
raise NotImplementedError
# Network I/O methods returning Futures.
def getaddrinfo(self, host, port, *, family=0, type=0, proto=0, flags=0):
raise NotImplementedError
def getnameinfo(self, sockaddr, flags=0):
raise NotImplementedError
def create_connection(self, protocol_factory, host=None, port=None, *,
ssl=None, family=0, proto=0, flags=0, sock=None,
local_addr=None, server_hostname=None):
raise NotImplementedError
def create_server(self, protocol_factory, host=None, port=None, *,
family=socket.AF_UNSPEC, flags=socket.AI_PASSIVE,
sock=None, backlog=100, ssl=None, reuse_address=None):
"""A coroutine which creates a TCP server bound to host and port.
The return value is a Server object which can be used to stop
the service.
If host is an empty string or None all interfaces are assumed
and a list of multiple sockets will be returned (most likely
one for IPv4 and another one for IPv6).
family can be set to either AF_INET or AF_INET6 to force the
socket to use IPv4 or IPv6. If not set it will be determined
from host (defaults to AF_UNSPEC).
flags is a bitmask for getaddrinfo().
sock can optionally be specified in order to use a preexisting
socket object.
backlog is the maximum number of queued connections passed to
listen() (defaults to 100).
ssl can be set to an SSLContext to enable SSL over the
accepted connections.
reuse_address tells the kernel to reuse a local socket in
TIME_WAIT state, without waiting for its natural timeout to
expire. If not specified will automatically be set to True on
UNIX.
"""
raise NotImplementedError
def create_unix_connection(self, protocol_factory, path, *,
ssl=None, sock=None,
server_hostname=None):
raise NotImplementedError
def create_unix_server(self, protocol_factory, path, *,
sock=None, backlog=100, ssl=None):
"""A coroutine which creates a UNIX Domain Socket server.
The return value is a Server object, which can be used to stop
the service.
path is a str, representing a file systsem path to bind the
server socket to.
sock can optionally be specified in order to use a preexisting
socket object.
backlog is the maximum number of queued connections passed to
listen() (defaults to 100).
ssl can be set to an SSLContext to enable SSL over the
accepted connections.
"""
raise NotImplementedError
def create_datagram_endpoint(self, protocol_factory,
local_addr=None, remote_addr=None, *,
family=0, proto=0, flags=0):
raise NotImplementedError
# Pipes and subprocesses.
def connect_read_pipe(self, protocol_factory, pipe):
"""Register read pipe in event loop. Set the pipe to non-blocking mode.
protocol_factory should instantiate object with Protocol interface.
pipe is a file-like object.
Return pair (transport, protocol), where transport supports the
ReadTransport interface."""
# The reason to accept file-like object instead of just file descriptor
# is: we need to own pipe and close it at transport finishing
# Can got complicated errors if pass f.fileno(),
# close fd in pipe transport then close f and vise versa.
raise NotImplementedError
def connect_write_pipe(self, protocol_factory, pipe):
"""Register write pipe in event loop.
protocol_factory should instantiate object with BaseProtocol interface.
Pipe is file-like object already switched to nonblocking.
Return pair (transport, protocol), where transport support
WriteTransport interface."""
# The reason to accept file-like object instead of just file descriptor
# is: we need to own pipe and close it at transport finishing
# Can got complicated errors if pass f.fileno(),
# close fd in pipe transport then close f and vise versa.
raise NotImplementedError
def subprocess_shell(self, protocol_factory, cmd, *, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
**kwargs):
raise NotImplementedError
def subprocess_exec(self, protocol_factory, *args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
**kwargs):
raise NotImplementedError
# Ready-based callback registration methods.
# The add_*() methods return None.
# The remove_*() methods return True if something was removed,
# False if there was nothing to delete.
def add_reader(self, fd, callback, *args):
raise NotImplementedError
def remove_reader(self, fd):
raise NotImplementedError
def add_writer(self, fd, callback, *args):
raise NotImplementedError
def remove_writer(self, fd):
raise NotImplementedError
# Completion based I/O methods returning Futures.
def sock_recv(self, sock, nbytes):
raise NotImplementedError
def sock_sendall(self, sock, data):
raise NotImplementedError
def sock_connect(self, sock, address):
raise NotImplementedError
def sock_accept(self, sock):
raise NotImplementedError
# Signal handling.
def add_signal_handler(self, sig, callback, *args):
raise NotImplementedError
def remove_signal_handler(self, sig):
raise NotImplementedError
# Error handlers.
def set_exception_handler(self, handler):
raise NotImplementedError
def default_exception_handler(self, context):
raise NotImplementedError
def call_exception_handler(self, context):
raise NotImplementedError
# Debug flag management.
def get_debug(self):
raise NotImplementedError
def set_debug(self, enabled):
raise NotImplementedError
class AbstractEventLoopPolicy:
"""Abstract policy for accessing the event loop."""
def get_event_loop(self):
"""Get the event loop for the current context.
Returns an event loop object implementing the BaseEventLoop interface,
or raises an exception in case no event loop has been set for the
current context and the current policy does not specify to create one.
It should never return None."""
raise NotImplementedError
def set_event_loop(self, loop):
"""Set the event loop for the current context to loop."""
raise NotImplementedError
def new_event_loop(self):
"""Create and return a new event loop object according to this
policy's rules. If there's need to set this loop as the event loop for
the current context, set_event_loop must be called explicitly."""
raise NotImplementedError
# Child processes handling (Unix only).
def get_child_watcher(self):
"Get the watcher for child processes."
raise NotImplementedError
def set_child_watcher(self, watcher):
"""Set the watcher for child processes."""
raise NotImplementedError
class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy):
"""Default policy implementation for accessing the event loop.
In this policy, each thread has its own event loop. However, we
only automatically create an event loop by default for the main
thread; other threads by default have no event loop.
Other policies may have different rules (e.g. a single global
event loop, or automatically creating an event loop per thread, or
using some other notion of context to which an event loop is
associated).
"""
_loop_factory = None
class _Local(threading.local):
_loop = None
_set_called = False
def __init__(self):
self._local = self._Local()
def get_event_loop(self):
"""Get the event loop.
This may be None or an instance of EventLoop.
"""
if (self._local._loop is None and
not self._local._set_called and
isinstance(threading.current_thread(), threading._MainThread)):
self.set_event_loop(self.new_event_loop())
assert self._local._loop is not None, \
('There is no current event loop in thread %r.' %
threading.current_thread().name)
return self._local._loop
def set_event_loop(self, loop):
"""Set the event loop."""
self._local._set_called = True
assert loop is None or isinstance(loop, AbstractEventLoop)
self._local._loop = loop
def new_event_loop(self):
"""Create a new event loop.
You must call set_event_loop() to make this the current event
loop.
"""
return self._loop_factory()
# Event loop policy. The policy itself is always global, even if the
# policy's rules say that there is an event loop per thread (or other
# notion of context). The default policy is installed by the first
# call to get_event_loop_policy().
_event_loop_policy = None
# Lock for protecting the on-the-fly creation of the event loop policy.
_lock = threading.Lock()
def _init_event_loop_policy():
global _event_loop_policy
with _lock:
if _event_loop_policy is None: # pragma: no branch
from . import DefaultEventLoopPolicy
_event_loop_policy = DefaultEventLoopPolicy()
def get_event_loop_policy():
"""Get the current event loop policy."""
if _event_loop_policy is None:
_init_event_loop_policy()
return _event_loop_policy
def set_event_loop_policy(policy):
"""Set the current event loop policy.
If policy is None, the default policy is restored."""
global _event_loop_policy
assert policy is None or isinstance(policy, AbstractEventLoopPolicy)
_event_loop_policy = policy
def get_event_loop():
"""Equivalent to calling get_event_loop_policy().get_event_loop()."""
return get_event_loop_policy().get_event_loop()
def set_event_loop(loop):
"""Equivalent to calling get_event_loop_policy().set_event_loop(loop)."""
get_event_loop_policy().set_event_loop(loop)
def new_event_loop():
"""Equivalent to calling get_event_loop_policy().new_event_loop()."""
return get_event_loop_policy().new_event_loop()
def get_child_watcher():
"""Equivalent to calling get_event_loop_policy().get_child_watcher()."""
return get_event_loop_policy().get_child_watcher()
def set_child_watcher(watcher):
"""Equivalent to calling
get_event_loop_policy().set_child_watcher(watcher)."""
return get_event_loop_policy().set_child_watcher(watcher)
| lgpl-3.0 | -7,761,992,944,354,123,000 | 31.119454 | 79 | 0.626767 | false |
skatsuta/aerospike-training | book/answers/Complete/Python/TweetService.py | 2 | 9958 | #!/usr/bin/env python
#
# * Copyright 2012-2014 by Aerospike.
# *
# * Permission is hereby granted, free of charge, to any person obtaining a copy
# * of this software and associated documentation files (the "Software"), to
# * deal in the Software without restriction, including without limitation the
# * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# * sell copies of the Software, and to permit persons to whom the Software is
# * furnished to do so, subject to the following conditions:
# *
# * The above copyright notice and this permission notice shall be included in
# * all copies or substantial portions of the Software.
# *
# * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# * IN THE SOFTWARE.
#
from __future__ import print_function
import aerospike
import sys
import time
from aerospike import predicates as p
class TweetService(object):
def __init__(self, client):
self.client = client
def createTweet(self):
print("\n********** Create Tweet **********\n")
# /*********************///
# /*****Data Model*****///
# Namespace: test
# Set: tweets
# Key: <username:<counter>>
# Bins:
# tweet - string
# ts - int (Stores epoch timestamp of the tweet)
# username - string
# Sample Key: dash:1
# Sample Record:
# { tweet: 'Put. A. Bird. On. It.',
# ts: 1408574221,
# username: 'dash'
# }
# /*********************///
userRecord = None
userKey = None
tweetKey = None
# Get username
username = str()
username = raw_input("Enter username: ")
if len(username) > 0:
# Check if username exists
meta = None
policy = None
userKey = ("test", "users", username)
(key, metadata,userRecord) = self.client.get(userKey,policy)
record = {}
if userRecord:
# Set Tweet Count
if 'tweetcount' in userRecord:
nextTweetCount = int(userRecord['tweetcount']) + 1
else:
nextTweetCount = 1
# Get tweet
record['tweet'] = raw_input("Enter tweet for " + username + ":")
# Write record
#wPolicy.recordExistsAction = RecordExistsAction.UPDATE
# Create timestamp to store along with the tweet so we can
# query, index and report on it
ts= self.getTimeStamp()
tweetKey = ("test", "tweets", username + ":" + str(nextTweetCount))
record["ts"] = ts
record["username"]= username
self.client.put(tweetKey,record, meta, policy)
print("\nINFO: Tweet record created!\n",record,tweetKey)
# Update tweet count and last tweet'd timestamp in the user
# record
self.updateUser(self.client, userKey, policy, ts, nextTweetCount)
else:
print("ERROR: User record not found!\n")
def scanAllTweetsForAllUsers(self):
try:
# Python Scan
tweetScan = self.client.scan("test", "tweets")
tweetScan.select('tweet')
# callback for each record read
def tweetCallback((key, meta, record)):
print(record)
# invoke the operations, and for each record invoke the callback
tweetScan.foreach(tweetCallback)
except Exception as e :
print("error: {0}".format(e), file=sys.stderr)
def updateUser(self, client, userKey, policy, ts, tweetCount):
userTweet = {}
userTweet["tweetcount"] = tweetCount
userTweet["lasttweeted"] = ts
meta = None
self.client.put(userKey,userTweet, meta, policy)
print("\nINFO: The tweet count now is: " , tweetCount)
def updateUserUsingOperate(self, client, userKey, policy, ts):
record = self.client.operate(policy, userKey, Operation.add(Bin("tweetcount", 1)), Operation.put(Bin("lasttweeted", ts)), Operation.get())
print("\nINFO: The tweet count now is: " + record.getValue("tweetcount"))
def queryTweetsByUsername(self):
print("\n********** Query Tweets By Username **********\n")
# Get username
username = str()
username = raw_input("Enter username: ")
if len(username) > 0:
try:
self.client.index_string_create(None, "test", "tweets", "username", "username_index")
time.sleep(5)
print("\nINFO: String Secondary Index Created ")
tweetQuery = self.client.query("test", "tweets")
#tweetQuery.select('username')
# callback for each record read
def tweetQueryCallback((key, meta, record)):
print(record["tweet"])
# invoke the operations, and for each record invoke the callback
tweetQuery.where(p.equals('username',username))
tweetQuery.foreach(tweetQueryCallback)
except Exception as e :
print("error: {0}".format(e), file=sys.stderr)
def queryUsersByTweetCount(self):
print("\n********** Query Users By Tweet Count Range **********\n")
# Get username
try:
self.client.index_integer_create(None, "test", "users", "tweetcount", "tweetcount_index")
time.sleep(5)
print("\nINFO: Integer Secondary Index Created ")
min = int(raw_input("Enter Min Tweet Count: "))
max = int(raw_input("Enter Max Tweet Count: "))
print("\nList of users with " , min , "-" , max , " tweets:\n")
tweetQuery = self.client.query("test", "users")
#tweetQuery.select('username')
# callback for each record read
def tweetQueryCountCallback((key, meta, record)):
print(record["username"] , " has " , record["tweetcount"] , " tweets\n")
# invoke the operations, and for each record invoke the callback
tweetQuery.where(p.between('tweetcount',min,max))
tweetQuery.foreach(tweetQueryCountCallback)
except Exception as e :
print("error: {0}".format(e), file=sys.stderr)
def getTimeStamp(self):
""" generated source for method getTimeStamp """
return int(round(time.time() * 1000))
def createTweets(self):
randomTweets = ["For just $1 you get a half price download of half of the song and listen to it just once.", "People tell me my body looks like a melted candle", "Come on movie! Make it start!", "Byaaaayy", "Please, please, win! Meow, meow, meow!", "Put. A. Bird. On. It.", "A weekend wasted is a weekend well spent", "Would you like to super spike your meal?", "We have a mean no-no-bring-bag up here on aisle two.", "SEEK: See, Every, EVERY, Kind... of spot", "We can order that for you. It will take a year to get there.", "If you are pregnant, have a soda.", "Hear that snap? Hear that clap?", "Follow me and I may follow you", "Which is the best cafe in Portland? Discuss...", "Portland Coffee is for closers!", "Lets get this party started!", "How about them portland blazers!", "You got school'd, yo", "I love animals", "I love my dog", "What's up Portland", "Which is the best cafe in Portland? Discuss...", "I dont always tweet, but when I do it is on Tweetaspike"]
totalUsers = 10000
maxTweets = 20
username = str()
ts = 0
wr_policy = {
AS_POLICY_W_EXISTS: AS_POLICY_EXISTS_IGNORE
}
print("\nCreate up to " , maxTweets , " tweets each for " , totalUsers , " users. Press any key to continue...\n")
raw_input("..")
j = 0
while j < totalUsers:
username = "user" + str(random.randint(1,totalUsers))
userKey = ("test", "users", username)
meta = None
policy = None
ts = None
k = 0
(key, metadata,userRecord) = self.client.get(userKey,policy)
if userRecord:
totalTweets = random.randint(1,maxTweets)
while k <= totalTweets:
record = {}
ts = self.getTimeStamp()
tweetKey = ("test", "tweets", username + ":" + str(k))
record["tweet"] = random.choice(randomTweets)
record["ts"] = ts
record["username"]= username
self.client.put(tweetKey,record, meta, wr_policy)
k += 1
# Create timestamp to store along with the tweet so we can
# query, index and report on it
print("\nWrote " , totalTweets , " tweets for " , username , "!")
if totalTweets > 0:
# Update tweet count and last tweet'd timestamp in the user
# record
self.updateUser(self.client, userKey, wr_policy, ts, totalTweets)
j += 1
# Check if user record exists
# create up to maxTweets random tweets for this user
# Create timestamp to store along with the tweet so we can
# query, index and report on it
# Update tweet count and last tweet'd timestamp in the user
# record
print("\n\nDone creating up to " , maxTweets , " tweets each for " , totalUsers , " users!\n")
| mit | -2,930,180,760,949,846,500 | 46.194313 | 982 | 0.574011 | false |
wangli1426/heron | heron/examples/src/python/misc/test_task_hook.py | 8 | 2350 | # Copyright 2016 Twitter. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''module for example task hook'''
from collections import Counter
from heron.common.src.python.utils.log import Log
from heron.common.src.python.utils.topology import ITaskHook
# pylint: disable=unused-argument
class TestTaskHook(ITaskHook):
"""TestTaskHook logs event information every 10000 times"""
CONST = 10000
def prepare(self, conf, context):
Log.info("In prepare of TestTaskHook")
self.counter = Counter()
# pylint: disable=no-self-use
def clean_up(self):
Log.info("In clean_up of TestTaskHook")
def emit(self, emit_info):
self.counter['emit'] += 1
if self.counter['emit'] % self.CONST == 0:
Log.info("TestTaskHook: emitted %s tuples" % str(self.counter['emit']))
def spout_ack(self, spout_ack_info):
self.counter['sp_ack'] += 1
if self.counter['sp_ack'] % self.CONST == 0:
Log.info("TestTaskHook: spout acked %s tuples" % str(self.counter['sp_ack']))
def spout_fail(self, spout_fail_info):
self.counter['sp_fail'] += 1
if self.counter['sp_fail'] % self.CONST == 0:
Log.info("TestTaskHook: spout failed %s tuples" % str(self.counter['sp_fail']))
def bolt_execute(self, bolt_execute_info):
self.counter['bl_exec'] += 1
if self.counter['bl_exec'] % self.CONST == 0:
Log.info("TestTaskHook: bolt executed %s tuples" % str(self.counter['bl_exec']))
def bolt_ack(self, bolt_ack_info):
self.counter['bl_ack'] += 1
if self.counter['bl_ack'] % self.CONST == 0:
Log.info("TestTaskHook: bolt acked %s tuples" % str(self.counter['bl_ack']))
def bolt_fail(self, bolt_fail_info):
self.counter['bl_fail'] += 1
if self.counter['bl_fail'] % self.CONST == 0:
Log.info("TestTaskHook: bolt failed %s tuples" % str(self.counter['bl_fail']))
| apache-2.0 | -4,632,834,672,013,429,000 | 37.52459 | 86 | 0.685957 | false |
guoxiaolongzte/spark | examples/src/main/python/avro_inputformat.py | 51 | 3170 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Read data file users.avro in local Spark distro:
$ cd $SPARK_HOME
$ ./bin/spark-submit --driver-class-path /path/to/example/jar \
> ./examples/src/main/python/avro_inputformat.py \
> examples/src/main/resources/users.avro
{u'favorite_color': None, u'name': u'Alyssa', u'favorite_numbers': [3, 9, 15, 20]}
{u'favorite_color': u'red', u'name': u'Ben', u'favorite_numbers': []}
To read name and favorite_color fields only, specify the following reader schema:
$ cat examples/src/main/resources/user.avsc
{"namespace": "example.avro",
"type": "record",
"name": "User",
"fields": [
{"name": "name", "type": "string"},
{"name": "favorite_color", "type": ["string", "null"]}
]
}
$ ./bin/spark-submit --driver-class-path /path/to/example/jar \
> ./examples/src/main/python/avro_inputformat.py \
> examples/src/main/resources/users.avro examples/src/main/resources/user.avsc
{u'favorite_color': None, u'name': u'Alyssa'}
{u'favorite_color': u'red', u'name': u'Ben'}
"""
from __future__ import print_function
import sys
from functools import reduce
from pyspark.sql import SparkSession
if __name__ == "__main__":
if len(sys.argv) != 2 and len(sys.argv) != 3:
print("""
Usage: avro_inputformat <data_file> [reader_schema_file]
Run with example jar:
./bin/spark-submit --driver-class-path /path/to/example/jar \
/path/to/examples/avro_inputformat.py <data_file> [reader_schema_file]
Assumes you have Avro data stored in <data_file>. Reader schema can be optionally specified
in [reader_schema_file].
""", file=sys.stderr)
sys.exit(-1)
path = sys.argv[1]
spark = SparkSession\
.builder\
.appName("AvroKeyInputFormat")\
.getOrCreate()
sc = spark.sparkContext
conf = None
if len(sys.argv) == 3:
schema_rdd = sc.textFile(sys.argv[2], 1).collect()
conf = {"avro.schema.input.key": reduce(lambda x, y: x + y, schema_rdd)}
avro_rdd = sc.newAPIHadoopFile(
path,
"org.apache.avro.mapreduce.AvroKeyInputFormat",
"org.apache.avro.mapred.AvroKey",
"org.apache.hadoop.io.NullWritable",
keyConverter="org.apache.spark.examples.pythonconverters.AvroWrapperToJavaConverter",
conf=conf)
output = avro_rdd.map(lambda x: x[0]).collect()
for k in output:
print(k)
spark.stop()
| apache-2.0 | -3,921,060,171,460,118,500 | 33.835165 | 99 | 0.675079 | false |
dpendl00/headphones | lib/html5lib/constants.py | 963 | 87346 | from __future__ import absolute_import, division, unicode_literals
import string
import gettext
_ = gettext.gettext
EOF = None
E = {
"null-character":
_("Null character in input stream, replaced with U+FFFD."),
"invalid-codepoint":
_("Invalid codepoint in stream."),
"incorrectly-placed-solidus":
_("Solidus (/) incorrectly placed in tag."),
"incorrect-cr-newline-entity":
_("Incorrect CR newline entity, replaced with LF."),
"illegal-windows-1252-entity":
_("Entity used with illegal number (windows-1252 reference)."),
"cant-convert-numeric-entity":
_("Numeric entity couldn't be converted to character "
"(codepoint U+%(charAsInt)08x)."),
"illegal-codepoint-for-numeric-entity":
_("Numeric entity represents an illegal codepoint: "
"U+%(charAsInt)08x."),
"numeric-entity-without-semicolon":
_("Numeric entity didn't end with ';'."),
"expected-numeric-entity-but-got-eof":
_("Numeric entity expected. Got end of file instead."),
"expected-numeric-entity":
_("Numeric entity expected but none found."),
"named-entity-without-semicolon":
_("Named entity didn't end with ';'."),
"expected-named-entity":
_("Named entity expected. Got none."),
"attributes-in-end-tag":
_("End tag contains unexpected attributes."),
'self-closing-flag-on-end-tag':
_("End tag contains unexpected self-closing flag."),
"expected-tag-name-but-got-right-bracket":
_("Expected tag name. Got '>' instead."),
"expected-tag-name-but-got-question-mark":
_("Expected tag name. Got '?' instead. (HTML doesn't "
"support processing instructions.)"),
"expected-tag-name":
_("Expected tag name. Got something else instead"),
"expected-closing-tag-but-got-right-bracket":
_("Expected closing tag. Got '>' instead. Ignoring '</>'."),
"expected-closing-tag-but-got-eof":
_("Expected closing tag. Unexpected end of file."),
"expected-closing-tag-but-got-char":
_("Expected closing tag. Unexpected character '%(data)s' found."),
"eof-in-tag-name":
_("Unexpected end of file in the tag name."),
"expected-attribute-name-but-got-eof":
_("Unexpected end of file. Expected attribute name instead."),
"eof-in-attribute-name":
_("Unexpected end of file in attribute name."),
"invalid-character-in-attribute-name":
_("Invalid character in attribute name"),
"duplicate-attribute":
_("Dropped duplicate attribute on tag."),
"expected-end-of-tag-name-but-got-eof":
_("Unexpected end of file. Expected = or end of tag."),
"expected-attribute-value-but-got-eof":
_("Unexpected end of file. Expected attribute value."),
"expected-attribute-value-but-got-right-bracket":
_("Expected attribute value. Got '>' instead."),
'equals-in-unquoted-attribute-value':
_("Unexpected = in unquoted attribute"),
'unexpected-character-in-unquoted-attribute-value':
_("Unexpected character in unquoted attribute"),
"invalid-character-after-attribute-name":
_("Unexpected character after attribute name."),
"unexpected-character-after-attribute-value":
_("Unexpected character after attribute value."),
"eof-in-attribute-value-double-quote":
_("Unexpected end of file in attribute value (\")."),
"eof-in-attribute-value-single-quote":
_("Unexpected end of file in attribute value (')."),
"eof-in-attribute-value-no-quotes":
_("Unexpected end of file in attribute value."),
"unexpected-EOF-after-solidus-in-tag":
_("Unexpected end of file in tag. Expected >"),
"unexpected-character-after-solidus-in-tag":
_("Unexpected character after / in tag. Expected >"),
"expected-dashes-or-doctype":
_("Expected '--' or 'DOCTYPE'. Not found."),
"unexpected-bang-after-double-dash-in-comment":
_("Unexpected ! after -- in comment"),
"unexpected-space-after-double-dash-in-comment":
_("Unexpected space after -- in comment"),
"incorrect-comment":
_("Incorrect comment."),
"eof-in-comment":
_("Unexpected end of file in comment."),
"eof-in-comment-end-dash":
_("Unexpected end of file in comment (-)"),
"unexpected-dash-after-double-dash-in-comment":
_("Unexpected '-' after '--' found in comment."),
"eof-in-comment-double-dash":
_("Unexpected end of file in comment (--)."),
"eof-in-comment-end-space-state":
_("Unexpected end of file in comment."),
"eof-in-comment-end-bang-state":
_("Unexpected end of file in comment."),
"unexpected-char-in-comment":
_("Unexpected character in comment found."),
"need-space-after-doctype":
_("No space after literal string 'DOCTYPE'."),
"expected-doctype-name-but-got-right-bracket":
_("Unexpected > character. Expected DOCTYPE name."),
"expected-doctype-name-but-got-eof":
_("Unexpected end of file. Expected DOCTYPE name."),
"eof-in-doctype-name":
_("Unexpected end of file in DOCTYPE name."),
"eof-in-doctype":
_("Unexpected end of file in DOCTYPE."),
"expected-space-or-right-bracket-in-doctype":
_("Expected space or '>'. Got '%(data)s'"),
"unexpected-end-of-doctype":
_("Unexpected end of DOCTYPE."),
"unexpected-char-in-doctype":
_("Unexpected character in DOCTYPE."),
"eof-in-innerhtml":
_("XXX innerHTML EOF"),
"unexpected-doctype":
_("Unexpected DOCTYPE. Ignored."),
"non-html-root":
_("html needs to be the first start tag."),
"expected-doctype-but-got-eof":
_("Unexpected End of file. Expected DOCTYPE."),
"unknown-doctype":
_("Erroneous DOCTYPE."),
"expected-doctype-but-got-chars":
_("Unexpected non-space characters. Expected DOCTYPE."),
"expected-doctype-but-got-start-tag":
_("Unexpected start tag (%(name)s). Expected DOCTYPE."),
"expected-doctype-but-got-end-tag":
_("Unexpected end tag (%(name)s). Expected DOCTYPE."),
"end-tag-after-implied-root":
_("Unexpected end tag (%(name)s) after the (implied) root element."),
"expected-named-closing-tag-but-got-eof":
_("Unexpected end of file. Expected end tag (%(name)s)."),
"two-heads-are-not-better-than-one":
_("Unexpected start tag head in existing head. Ignored."),
"unexpected-end-tag":
_("Unexpected end tag (%(name)s). Ignored."),
"unexpected-start-tag-out-of-my-head":
_("Unexpected start tag (%(name)s) that can be in head. Moved."),
"unexpected-start-tag":
_("Unexpected start tag (%(name)s)."),
"missing-end-tag":
_("Missing end tag (%(name)s)."),
"missing-end-tags":
_("Missing end tags (%(name)s)."),
"unexpected-start-tag-implies-end-tag":
_("Unexpected start tag (%(startName)s) "
"implies end tag (%(endName)s)."),
"unexpected-start-tag-treated-as":
_("Unexpected start tag (%(originalName)s). Treated as %(newName)s."),
"deprecated-tag":
_("Unexpected start tag %(name)s. Don't use it!"),
"unexpected-start-tag-ignored":
_("Unexpected start tag %(name)s. Ignored."),
"expected-one-end-tag-but-got-another":
_("Unexpected end tag (%(gotName)s). "
"Missing end tag (%(expectedName)s)."),
"end-tag-too-early":
_("End tag (%(name)s) seen too early. Expected other end tag."),
"end-tag-too-early-named":
_("Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s)."),
"end-tag-too-early-ignored":
_("End tag (%(name)s) seen too early. Ignored."),
"adoption-agency-1.1":
_("End tag (%(name)s) violates step 1, "
"paragraph 1 of the adoption agency algorithm."),
"adoption-agency-1.2":
_("End tag (%(name)s) violates step 1, "
"paragraph 2 of the adoption agency algorithm."),
"adoption-agency-1.3":
_("End tag (%(name)s) violates step 1, "
"paragraph 3 of the adoption agency algorithm."),
"adoption-agency-4.4":
_("End tag (%(name)s) violates step 4, "
"paragraph 4 of the adoption agency algorithm."),
"unexpected-end-tag-treated-as":
_("Unexpected end tag (%(originalName)s). Treated as %(newName)s."),
"no-end-tag":
_("This element (%(name)s) has no end tag."),
"unexpected-implied-end-tag-in-table":
_("Unexpected implied end tag (%(name)s) in the table phase."),
"unexpected-implied-end-tag-in-table-body":
_("Unexpected implied end tag (%(name)s) in the table body phase."),
"unexpected-char-implies-table-voodoo":
_("Unexpected non-space characters in "
"table context caused voodoo mode."),
"unexpected-hidden-input-in-table":
_("Unexpected input with type hidden in table context."),
"unexpected-form-in-table":
_("Unexpected form in table context."),
"unexpected-start-tag-implies-table-voodoo":
_("Unexpected start tag (%(name)s) in "
"table context caused voodoo mode."),
"unexpected-end-tag-implies-table-voodoo":
_("Unexpected end tag (%(name)s) in "
"table context caused voodoo mode."),
"unexpected-cell-in-table-body":
_("Unexpected table cell start tag (%(name)s) "
"in the table body phase."),
"unexpected-cell-end-tag":
_("Got table cell end tag (%(name)s) "
"while required end tags are missing."),
"unexpected-end-tag-in-table-body":
_("Unexpected end tag (%(name)s) in the table body phase. Ignored."),
"unexpected-implied-end-tag-in-table-row":
_("Unexpected implied end tag (%(name)s) in the table row phase."),
"unexpected-end-tag-in-table-row":
_("Unexpected end tag (%(name)s) in the table row phase. Ignored."),
"unexpected-select-in-select":
_("Unexpected select start tag in the select phase "
"treated as select end tag."),
"unexpected-input-in-select":
_("Unexpected input start tag in the select phase."),
"unexpected-start-tag-in-select":
_("Unexpected start tag token (%(name)s in the select phase. "
"Ignored."),
"unexpected-end-tag-in-select":
_("Unexpected end tag (%(name)s) in the select phase. Ignored."),
"unexpected-table-element-start-tag-in-select-in-table":
_("Unexpected table element start tag (%(name)s) in the select in table phase."),
"unexpected-table-element-end-tag-in-select-in-table":
_("Unexpected table element end tag (%(name)s) in the select in table phase."),
"unexpected-char-after-body":
_("Unexpected non-space characters in the after body phase."),
"unexpected-start-tag-after-body":
_("Unexpected start tag token (%(name)s)"
" in the after body phase."),
"unexpected-end-tag-after-body":
_("Unexpected end tag token (%(name)s)"
" in the after body phase."),
"unexpected-char-in-frameset":
_("Unexpected characters in the frameset phase. Characters ignored."),
"unexpected-start-tag-in-frameset":
_("Unexpected start tag token (%(name)s)"
" in the frameset phase. Ignored."),
"unexpected-frameset-in-frameset-innerhtml":
_("Unexpected end tag token (frameset) "
"in the frameset phase (innerHTML)."),
"unexpected-end-tag-in-frameset":
_("Unexpected end tag token (%(name)s)"
" in the frameset phase. Ignored."),
"unexpected-char-after-frameset":
_("Unexpected non-space characters in the "
"after frameset phase. Ignored."),
"unexpected-start-tag-after-frameset":
_("Unexpected start tag (%(name)s)"
" in the after frameset phase. Ignored."),
"unexpected-end-tag-after-frameset":
_("Unexpected end tag (%(name)s)"
" in the after frameset phase. Ignored."),
"unexpected-end-tag-after-body-innerhtml":
_("Unexpected end tag after body(innerHtml)"),
"expected-eof-but-got-char":
_("Unexpected non-space characters. Expected end of file."),
"expected-eof-but-got-start-tag":
_("Unexpected start tag (%(name)s)"
". Expected end of file."),
"expected-eof-but-got-end-tag":
_("Unexpected end tag (%(name)s)"
". Expected end of file."),
"eof-in-table":
_("Unexpected end of file. Expected table content."),
"eof-in-select":
_("Unexpected end of file. Expected select content."),
"eof-in-frameset":
_("Unexpected end of file. Expected frameset content."),
"eof-in-script-in-script":
_("Unexpected end of file. Expected script content."),
"eof-in-foreign-lands":
_("Unexpected end of file. Expected foreign content"),
"non-void-element-with-trailing-solidus":
_("Trailing solidus not allowed on element %(name)s"),
"unexpected-html-element-in-foreign-content":
_("Element %(name)s not allowed in a non-html context"),
"unexpected-end-tag-before-html":
_("Unexpected end tag (%(name)s) before html."),
"XXX-undefined-error":
_("Undefined error (this sucks and should be fixed)"),
}
namespaces = {
"html": "http://www.w3.org/1999/xhtml",
"mathml": "http://www.w3.org/1998/Math/MathML",
"svg": "http://www.w3.org/2000/svg",
"xlink": "http://www.w3.org/1999/xlink",
"xml": "http://www.w3.org/XML/1998/namespace",
"xmlns": "http://www.w3.org/2000/xmlns/"
}
scopingElements = frozenset((
(namespaces["html"], "applet"),
(namespaces["html"], "caption"),
(namespaces["html"], "html"),
(namespaces["html"], "marquee"),
(namespaces["html"], "object"),
(namespaces["html"], "table"),
(namespaces["html"], "td"),
(namespaces["html"], "th"),
(namespaces["mathml"], "mi"),
(namespaces["mathml"], "mo"),
(namespaces["mathml"], "mn"),
(namespaces["mathml"], "ms"),
(namespaces["mathml"], "mtext"),
(namespaces["mathml"], "annotation-xml"),
(namespaces["svg"], "foreignObject"),
(namespaces["svg"], "desc"),
(namespaces["svg"], "title"),
))
formattingElements = frozenset((
(namespaces["html"], "a"),
(namespaces["html"], "b"),
(namespaces["html"], "big"),
(namespaces["html"], "code"),
(namespaces["html"], "em"),
(namespaces["html"], "font"),
(namespaces["html"], "i"),
(namespaces["html"], "nobr"),
(namespaces["html"], "s"),
(namespaces["html"], "small"),
(namespaces["html"], "strike"),
(namespaces["html"], "strong"),
(namespaces["html"], "tt"),
(namespaces["html"], "u")
))
specialElements = frozenset((
(namespaces["html"], "address"),
(namespaces["html"], "applet"),
(namespaces["html"], "area"),
(namespaces["html"], "article"),
(namespaces["html"], "aside"),
(namespaces["html"], "base"),
(namespaces["html"], "basefont"),
(namespaces["html"], "bgsound"),
(namespaces["html"], "blockquote"),
(namespaces["html"], "body"),
(namespaces["html"], "br"),
(namespaces["html"], "button"),
(namespaces["html"], "caption"),
(namespaces["html"], "center"),
(namespaces["html"], "col"),
(namespaces["html"], "colgroup"),
(namespaces["html"], "command"),
(namespaces["html"], "dd"),
(namespaces["html"], "details"),
(namespaces["html"], "dir"),
(namespaces["html"], "div"),
(namespaces["html"], "dl"),
(namespaces["html"], "dt"),
(namespaces["html"], "embed"),
(namespaces["html"], "fieldset"),
(namespaces["html"], "figure"),
(namespaces["html"], "footer"),
(namespaces["html"], "form"),
(namespaces["html"], "frame"),
(namespaces["html"], "frameset"),
(namespaces["html"], "h1"),
(namespaces["html"], "h2"),
(namespaces["html"], "h3"),
(namespaces["html"], "h4"),
(namespaces["html"], "h5"),
(namespaces["html"], "h6"),
(namespaces["html"], "head"),
(namespaces["html"], "header"),
(namespaces["html"], "hr"),
(namespaces["html"], "html"),
(namespaces["html"], "iframe"),
# Note that image is commented out in the spec as "this isn't an
# element that can end up on the stack, so it doesn't matter,"
(namespaces["html"], "image"),
(namespaces["html"], "img"),
(namespaces["html"], "input"),
(namespaces["html"], "isindex"),
(namespaces["html"], "li"),
(namespaces["html"], "link"),
(namespaces["html"], "listing"),
(namespaces["html"], "marquee"),
(namespaces["html"], "menu"),
(namespaces["html"], "meta"),
(namespaces["html"], "nav"),
(namespaces["html"], "noembed"),
(namespaces["html"], "noframes"),
(namespaces["html"], "noscript"),
(namespaces["html"], "object"),
(namespaces["html"], "ol"),
(namespaces["html"], "p"),
(namespaces["html"], "param"),
(namespaces["html"], "plaintext"),
(namespaces["html"], "pre"),
(namespaces["html"], "script"),
(namespaces["html"], "section"),
(namespaces["html"], "select"),
(namespaces["html"], "style"),
(namespaces["html"], "table"),
(namespaces["html"], "tbody"),
(namespaces["html"], "td"),
(namespaces["html"], "textarea"),
(namespaces["html"], "tfoot"),
(namespaces["html"], "th"),
(namespaces["html"], "thead"),
(namespaces["html"], "title"),
(namespaces["html"], "tr"),
(namespaces["html"], "ul"),
(namespaces["html"], "wbr"),
(namespaces["html"], "xmp"),
(namespaces["svg"], "foreignObject")
))
htmlIntegrationPointElements = frozenset((
(namespaces["mathml"], "annotaion-xml"),
(namespaces["svg"], "foreignObject"),
(namespaces["svg"], "desc"),
(namespaces["svg"], "title")
))
mathmlTextIntegrationPointElements = frozenset((
(namespaces["mathml"], "mi"),
(namespaces["mathml"], "mo"),
(namespaces["mathml"], "mn"),
(namespaces["mathml"], "ms"),
(namespaces["mathml"], "mtext")
))
adjustForeignAttributes = {
"xlink:actuate": ("xlink", "actuate", namespaces["xlink"]),
"xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]),
"xlink:href": ("xlink", "href", namespaces["xlink"]),
"xlink:role": ("xlink", "role", namespaces["xlink"]),
"xlink:show": ("xlink", "show", namespaces["xlink"]),
"xlink:title": ("xlink", "title", namespaces["xlink"]),
"xlink:type": ("xlink", "type", namespaces["xlink"]),
"xml:base": ("xml", "base", namespaces["xml"]),
"xml:lang": ("xml", "lang", namespaces["xml"]),
"xml:space": ("xml", "space", namespaces["xml"]),
"xmlns": (None, "xmlns", namespaces["xmlns"]),
"xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"])
}
unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in
adjustForeignAttributes.items()])
spaceCharacters = frozenset((
"\t",
"\n",
"\u000C",
" ",
"\r"
))
tableInsertModeElements = frozenset((
"table",
"tbody",
"tfoot",
"thead",
"tr"
))
asciiLowercase = frozenset(string.ascii_lowercase)
asciiUppercase = frozenset(string.ascii_uppercase)
asciiLetters = frozenset(string.ascii_letters)
digits = frozenset(string.digits)
hexDigits = frozenset(string.hexdigits)
asciiUpper2Lower = dict([(ord(c), ord(c.lower()))
for c in string.ascii_uppercase])
# Heading elements need to be ordered
headingElements = (
"h1",
"h2",
"h3",
"h4",
"h5",
"h6"
)
voidElements = frozenset((
"base",
"command",
"event-source",
"link",
"meta",
"hr",
"br",
"img",
"embed",
"param",
"area",
"col",
"input",
"source",
"track"
))
cdataElements = frozenset(('title', 'textarea'))
rcdataElements = frozenset((
'style',
'script',
'xmp',
'iframe',
'noembed',
'noframes',
'noscript'
))
booleanAttributes = {
"": frozenset(("irrelevant",)),
"style": frozenset(("scoped",)),
"img": frozenset(("ismap",)),
"audio": frozenset(("autoplay", "controls")),
"video": frozenset(("autoplay", "controls")),
"script": frozenset(("defer", "async")),
"details": frozenset(("open",)),
"datagrid": frozenset(("multiple", "disabled")),
"command": frozenset(("hidden", "disabled", "checked", "default")),
"hr": frozenset(("noshade")),
"menu": frozenset(("autosubmit",)),
"fieldset": frozenset(("disabled", "readonly")),
"option": frozenset(("disabled", "readonly", "selected")),
"optgroup": frozenset(("disabled", "readonly")),
"button": frozenset(("disabled", "autofocus")),
"input": frozenset(("disabled", "readonly", "required", "autofocus", "checked", "ismap")),
"select": frozenset(("disabled", "readonly", "autofocus", "multiple")),
"output": frozenset(("disabled", "readonly")),
}
# entitiesWindows1252 has to be _ordered_ and needs to have an index. It
# therefore can't be a frozenset.
entitiesWindows1252 = (
8364, # 0x80 0x20AC EURO SIGN
65533, # 0x81 UNDEFINED
8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK
402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK
8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK
8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS
8224, # 0x86 0x2020 DAGGER
8225, # 0x87 0x2021 DOUBLE DAGGER
710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT
8240, # 0x89 0x2030 PER MILLE SIGN
352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON
8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK
338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE
65533, # 0x8D UNDEFINED
381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON
65533, # 0x8F UNDEFINED
65533, # 0x90 UNDEFINED
8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK
8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK
8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK
8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK
8226, # 0x95 0x2022 BULLET
8211, # 0x96 0x2013 EN DASH
8212, # 0x97 0x2014 EM DASH
732, # 0x98 0x02DC SMALL TILDE
8482, # 0x99 0x2122 TRADE MARK SIGN
353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON
8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE
65533, # 0x9D UNDEFINED
382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON
376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS
)
xmlEntities = frozenset(('lt;', 'gt;', 'amp;', 'apos;', 'quot;'))
entities = {
"AElig": "\xc6",
"AElig;": "\xc6",
"AMP": "&",
"AMP;": "&",
"Aacute": "\xc1",
"Aacute;": "\xc1",
"Abreve;": "\u0102",
"Acirc": "\xc2",
"Acirc;": "\xc2",
"Acy;": "\u0410",
"Afr;": "\U0001d504",
"Agrave": "\xc0",
"Agrave;": "\xc0",
"Alpha;": "\u0391",
"Amacr;": "\u0100",
"And;": "\u2a53",
"Aogon;": "\u0104",
"Aopf;": "\U0001d538",
"ApplyFunction;": "\u2061",
"Aring": "\xc5",
"Aring;": "\xc5",
"Ascr;": "\U0001d49c",
"Assign;": "\u2254",
"Atilde": "\xc3",
"Atilde;": "\xc3",
"Auml": "\xc4",
"Auml;": "\xc4",
"Backslash;": "\u2216",
"Barv;": "\u2ae7",
"Barwed;": "\u2306",
"Bcy;": "\u0411",
"Because;": "\u2235",
"Bernoullis;": "\u212c",
"Beta;": "\u0392",
"Bfr;": "\U0001d505",
"Bopf;": "\U0001d539",
"Breve;": "\u02d8",
"Bscr;": "\u212c",
"Bumpeq;": "\u224e",
"CHcy;": "\u0427",
"COPY": "\xa9",
"COPY;": "\xa9",
"Cacute;": "\u0106",
"Cap;": "\u22d2",
"CapitalDifferentialD;": "\u2145",
"Cayleys;": "\u212d",
"Ccaron;": "\u010c",
"Ccedil": "\xc7",
"Ccedil;": "\xc7",
"Ccirc;": "\u0108",
"Cconint;": "\u2230",
"Cdot;": "\u010a",
"Cedilla;": "\xb8",
"CenterDot;": "\xb7",
"Cfr;": "\u212d",
"Chi;": "\u03a7",
"CircleDot;": "\u2299",
"CircleMinus;": "\u2296",
"CirclePlus;": "\u2295",
"CircleTimes;": "\u2297",
"ClockwiseContourIntegral;": "\u2232",
"CloseCurlyDoubleQuote;": "\u201d",
"CloseCurlyQuote;": "\u2019",
"Colon;": "\u2237",
"Colone;": "\u2a74",
"Congruent;": "\u2261",
"Conint;": "\u222f",
"ContourIntegral;": "\u222e",
"Copf;": "\u2102",
"Coproduct;": "\u2210",
"CounterClockwiseContourIntegral;": "\u2233",
"Cross;": "\u2a2f",
"Cscr;": "\U0001d49e",
"Cup;": "\u22d3",
"CupCap;": "\u224d",
"DD;": "\u2145",
"DDotrahd;": "\u2911",
"DJcy;": "\u0402",
"DScy;": "\u0405",
"DZcy;": "\u040f",
"Dagger;": "\u2021",
"Darr;": "\u21a1",
"Dashv;": "\u2ae4",
"Dcaron;": "\u010e",
"Dcy;": "\u0414",
"Del;": "\u2207",
"Delta;": "\u0394",
"Dfr;": "\U0001d507",
"DiacriticalAcute;": "\xb4",
"DiacriticalDot;": "\u02d9",
"DiacriticalDoubleAcute;": "\u02dd",
"DiacriticalGrave;": "`",
"DiacriticalTilde;": "\u02dc",
"Diamond;": "\u22c4",
"DifferentialD;": "\u2146",
"Dopf;": "\U0001d53b",
"Dot;": "\xa8",
"DotDot;": "\u20dc",
"DotEqual;": "\u2250",
"DoubleContourIntegral;": "\u222f",
"DoubleDot;": "\xa8",
"DoubleDownArrow;": "\u21d3",
"DoubleLeftArrow;": "\u21d0",
"DoubleLeftRightArrow;": "\u21d4",
"DoubleLeftTee;": "\u2ae4",
"DoubleLongLeftArrow;": "\u27f8",
"DoubleLongLeftRightArrow;": "\u27fa",
"DoubleLongRightArrow;": "\u27f9",
"DoubleRightArrow;": "\u21d2",
"DoubleRightTee;": "\u22a8",
"DoubleUpArrow;": "\u21d1",
"DoubleUpDownArrow;": "\u21d5",
"DoubleVerticalBar;": "\u2225",
"DownArrow;": "\u2193",
"DownArrowBar;": "\u2913",
"DownArrowUpArrow;": "\u21f5",
"DownBreve;": "\u0311",
"DownLeftRightVector;": "\u2950",
"DownLeftTeeVector;": "\u295e",
"DownLeftVector;": "\u21bd",
"DownLeftVectorBar;": "\u2956",
"DownRightTeeVector;": "\u295f",
"DownRightVector;": "\u21c1",
"DownRightVectorBar;": "\u2957",
"DownTee;": "\u22a4",
"DownTeeArrow;": "\u21a7",
"Downarrow;": "\u21d3",
"Dscr;": "\U0001d49f",
"Dstrok;": "\u0110",
"ENG;": "\u014a",
"ETH": "\xd0",
"ETH;": "\xd0",
"Eacute": "\xc9",
"Eacute;": "\xc9",
"Ecaron;": "\u011a",
"Ecirc": "\xca",
"Ecirc;": "\xca",
"Ecy;": "\u042d",
"Edot;": "\u0116",
"Efr;": "\U0001d508",
"Egrave": "\xc8",
"Egrave;": "\xc8",
"Element;": "\u2208",
"Emacr;": "\u0112",
"EmptySmallSquare;": "\u25fb",
"EmptyVerySmallSquare;": "\u25ab",
"Eogon;": "\u0118",
"Eopf;": "\U0001d53c",
"Epsilon;": "\u0395",
"Equal;": "\u2a75",
"EqualTilde;": "\u2242",
"Equilibrium;": "\u21cc",
"Escr;": "\u2130",
"Esim;": "\u2a73",
"Eta;": "\u0397",
"Euml": "\xcb",
"Euml;": "\xcb",
"Exists;": "\u2203",
"ExponentialE;": "\u2147",
"Fcy;": "\u0424",
"Ffr;": "\U0001d509",
"FilledSmallSquare;": "\u25fc",
"FilledVerySmallSquare;": "\u25aa",
"Fopf;": "\U0001d53d",
"ForAll;": "\u2200",
"Fouriertrf;": "\u2131",
"Fscr;": "\u2131",
"GJcy;": "\u0403",
"GT": ">",
"GT;": ">",
"Gamma;": "\u0393",
"Gammad;": "\u03dc",
"Gbreve;": "\u011e",
"Gcedil;": "\u0122",
"Gcirc;": "\u011c",
"Gcy;": "\u0413",
"Gdot;": "\u0120",
"Gfr;": "\U0001d50a",
"Gg;": "\u22d9",
"Gopf;": "\U0001d53e",
"GreaterEqual;": "\u2265",
"GreaterEqualLess;": "\u22db",
"GreaterFullEqual;": "\u2267",
"GreaterGreater;": "\u2aa2",
"GreaterLess;": "\u2277",
"GreaterSlantEqual;": "\u2a7e",
"GreaterTilde;": "\u2273",
"Gscr;": "\U0001d4a2",
"Gt;": "\u226b",
"HARDcy;": "\u042a",
"Hacek;": "\u02c7",
"Hat;": "^",
"Hcirc;": "\u0124",
"Hfr;": "\u210c",
"HilbertSpace;": "\u210b",
"Hopf;": "\u210d",
"HorizontalLine;": "\u2500",
"Hscr;": "\u210b",
"Hstrok;": "\u0126",
"HumpDownHump;": "\u224e",
"HumpEqual;": "\u224f",
"IEcy;": "\u0415",
"IJlig;": "\u0132",
"IOcy;": "\u0401",
"Iacute": "\xcd",
"Iacute;": "\xcd",
"Icirc": "\xce",
"Icirc;": "\xce",
"Icy;": "\u0418",
"Idot;": "\u0130",
"Ifr;": "\u2111",
"Igrave": "\xcc",
"Igrave;": "\xcc",
"Im;": "\u2111",
"Imacr;": "\u012a",
"ImaginaryI;": "\u2148",
"Implies;": "\u21d2",
"Int;": "\u222c",
"Integral;": "\u222b",
"Intersection;": "\u22c2",
"InvisibleComma;": "\u2063",
"InvisibleTimes;": "\u2062",
"Iogon;": "\u012e",
"Iopf;": "\U0001d540",
"Iota;": "\u0399",
"Iscr;": "\u2110",
"Itilde;": "\u0128",
"Iukcy;": "\u0406",
"Iuml": "\xcf",
"Iuml;": "\xcf",
"Jcirc;": "\u0134",
"Jcy;": "\u0419",
"Jfr;": "\U0001d50d",
"Jopf;": "\U0001d541",
"Jscr;": "\U0001d4a5",
"Jsercy;": "\u0408",
"Jukcy;": "\u0404",
"KHcy;": "\u0425",
"KJcy;": "\u040c",
"Kappa;": "\u039a",
"Kcedil;": "\u0136",
"Kcy;": "\u041a",
"Kfr;": "\U0001d50e",
"Kopf;": "\U0001d542",
"Kscr;": "\U0001d4a6",
"LJcy;": "\u0409",
"LT": "<",
"LT;": "<",
"Lacute;": "\u0139",
"Lambda;": "\u039b",
"Lang;": "\u27ea",
"Laplacetrf;": "\u2112",
"Larr;": "\u219e",
"Lcaron;": "\u013d",
"Lcedil;": "\u013b",
"Lcy;": "\u041b",
"LeftAngleBracket;": "\u27e8",
"LeftArrow;": "\u2190",
"LeftArrowBar;": "\u21e4",
"LeftArrowRightArrow;": "\u21c6",
"LeftCeiling;": "\u2308",
"LeftDoubleBracket;": "\u27e6",
"LeftDownTeeVector;": "\u2961",
"LeftDownVector;": "\u21c3",
"LeftDownVectorBar;": "\u2959",
"LeftFloor;": "\u230a",
"LeftRightArrow;": "\u2194",
"LeftRightVector;": "\u294e",
"LeftTee;": "\u22a3",
"LeftTeeArrow;": "\u21a4",
"LeftTeeVector;": "\u295a",
"LeftTriangle;": "\u22b2",
"LeftTriangleBar;": "\u29cf",
"LeftTriangleEqual;": "\u22b4",
"LeftUpDownVector;": "\u2951",
"LeftUpTeeVector;": "\u2960",
"LeftUpVector;": "\u21bf",
"LeftUpVectorBar;": "\u2958",
"LeftVector;": "\u21bc",
"LeftVectorBar;": "\u2952",
"Leftarrow;": "\u21d0",
"Leftrightarrow;": "\u21d4",
"LessEqualGreater;": "\u22da",
"LessFullEqual;": "\u2266",
"LessGreater;": "\u2276",
"LessLess;": "\u2aa1",
"LessSlantEqual;": "\u2a7d",
"LessTilde;": "\u2272",
"Lfr;": "\U0001d50f",
"Ll;": "\u22d8",
"Lleftarrow;": "\u21da",
"Lmidot;": "\u013f",
"LongLeftArrow;": "\u27f5",
"LongLeftRightArrow;": "\u27f7",
"LongRightArrow;": "\u27f6",
"Longleftarrow;": "\u27f8",
"Longleftrightarrow;": "\u27fa",
"Longrightarrow;": "\u27f9",
"Lopf;": "\U0001d543",
"LowerLeftArrow;": "\u2199",
"LowerRightArrow;": "\u2198",
"Lscr;": "\u2112",
"Lsh;": "\u21b0",
"Lstrok;": "\u0141",
"Lt;": "\u226a",
"Map;": "\u2905",
"Mcy;": "\u041c",
"MediumSpace;": "\u205f",
"Mellintrf;": "\u2133",
"Mfr;": "\U0001d510",
"MinusPlus;": "\u2213",
"Mopf;": "\U0001d544",
"Mscr;": "\u2133",
"Mu;": "\u039c",
"NJcy;": "\u040a",
"Nacute;": "\u0143",
"Ncaron;": "\u0147",
"Ncedil;": "\u0145",
"Ncy;": "\u041d",
"NegativeMediumSpace;": "\u200b",
"NegativeThickSpace;": "\u200b",
"NegativeThinSpace;": "\u200b",
"NegativeVeryThinSpace;": "\u200b",
"NestedGreaterGreater;": "\u226b",
"NestedLessLess;": "\u226a",
"NewLine;": "\n",
"Nfr;": "\U0001d511",
"NoBreak;": "\u2060",
"NonBreakingSpace;": "\xa0",
"Nopf;": "\u2115",
"Not;": "\u2aec",
"NotCongruent;": "\u2262",
"NotCupCap;": "\u226d",
"NotDoubleVerticalBar;": "\u2226",
"NotElement;": "\u2209",
"NotEqual;": "\u2260",
"NotEqualTilde;": "\u2242\u0338",
"NotExists;": "\u2204",
"NotGreater;": "\u226f",
"NotGreaterEqual;": "\u2271",
"NotGreaterFullEqual;": "\u2267\u0338",
"NotGreaterGreater;": "\u226b\u0338",
"NotGreaterLess;": "\u2279",
"NotGreaterSlantEqual;": "\u2a7e\u0338",
"NotGreaterTilde;": "\u2275",
"NotHumpDownHump;": "\u224e\u0338",
"NotHumpEqual;": "\u224f\u0338",
"NotLeftTriangle;": "\u22ea",
"NotLeftTriangleBar;": "\u29cf\u0338",
"NotLeftTriangleEqual;": "\u22ec",
"NotLess;": "\u226e",
"NotLessEqual;": "\u2270",
"NotLessGreater;": "\u2278",
"NotLessLess;": "\u226a\u0338",
"NotLessSlantEqual;": "\u2a7d\u0338",
"NotLessTilde;": "\u2274",
"NotNestedGreaterGreater;": "\u2aa2\u0338",
"NotNestedLessLess;": "\u2aa1\u0338",
"NotPrecedes;": "\u2280",
"NotPrecedesEqual;": "\u2aaf\u0338",
"NotPrecedesSlantEqual;": "\u22e0",
"NotReverseElement;": "\u220c",
"NotRightTriangle;": "\u22eb",
"NotRightTriangleBar;": "\u29d0\u0338",
"NotRightTriangleEqual;": "\u22ed",
"NotSquareSubset;": "\u228f\u0338",
"NotSquareSubsetEqual;": "\u22e2",
"NotSquareSuperset;": "\u2290\u0338",
"NotSquareSupersetEqual;": "\u22e3",
"NotSubset;": "\u2282\u20d2",
"NotSubsetEqual;": "\u2288",
"NotSucceeds;": "\u2281",
"NotSucceedsEqual;": "\u2ab0\u0338",
"NotSucceedsSlantEqual;": "\u22e1",
"NotSucceedsTilde;": "\u227f\u0338",
"NotSuperset;": "\u2283\u20d2",
"NotSupersetEqual;": "\u2289",
"NotTilde;": "\u2241",
"NotTildeEqual;": "\u2244",
"NotTildeFullEqual;": "\u2247",
"NotTildeTilde;": "\u2249",
"NotVerticalBar;": "\u2224",
"Nscr;": "\U0001d4a9",
"Ntilde": "\xd1",
"Ntilde;": "\xd1",
"Nu;": "\u039d",
"OElig;": "\u0152",
"Oacute": "\xd3",
"Oacute;": "\xd3",
"Ocirc": "\xd4",
"Ocirc;": "\xd4",
"Ocy;": "\u041e",
"Odblac;": "\u0150",
"Ofr;": "\U0001d512",
"Ograve": "\xd2",
"Ograve;": "\xd2",
"Omacr;": "\u014c",
"Omega;": "\u03a9",
"Omicron;": "\u039f",
"Oopf;": "\U0001d546",
"OpenCurlyDoubleQuote;": "\u201c",
"OpenCurlyQuote;": "\u2018",
"Or;": "\u2a54",
"Oscr;": "\U0001d4aa",
"Oslash": "\xd8",
"Oslash;": "\xd8",
"Otilde": "\xd5",
"Otilde;": "\xd5",
"Otimes;": "\u2a37",
"Ouml": "\xd6",
"Ouml;": "\xd6",
"OverBar;": "\u203e",
"OverBrace;": "\u23de",
"OverBracket;": "\u23b4",
"OverParenthesis;": "\u23dc",
"PartialD;": "\u2202",
"Pcy;": "\u041f",
"Pfr;": "\U0001d513",
"Phi;": "\u03a6",
"Pi;": "\u03a0",
"PlusMinus;": "\xb1",
"Poincareplane;": "\u210c",
"Popf;": "\u2119",
"Pr;": "\u2abb",
"Precedes;": "\u227a",
"PrecedesEqual;": "\u2aaf",
"PrecedesSlantEqual;": "\u227c",
"PrecedesTilde;": "\u227e",
"Prime;": "\u2033",
"Product;": "\u220f",
"Proportion;": "\u2237",
"Proportional;": "\u221d",
"Pscr;": "\U0001d4ab",
"Psi;": "\u03a8",
"QUOT": "\"",
"QUOT;": "\"",
"Qfr;": "\U0001d514",
"Qopf;": "\u211a",
"Qscr;": "\U0001d4ac",
"RBarr;": "\u2910",
"REG": "\xae",
"REG;": "\xae",
"Racute;": "\u0154",
"Rang;": "\u27eb",
"Rarr;": "\u21a0",
"Rarrtl;": "\u2916",
"Rcaron;": "\u0158",
"Rcedil;": "\u0156",
"Rcy;": "\u0420",
"Re;": "\u211c",
"ReverseElement;": "\u220b",
"ReverseEquilibrium;": "\u21cb",
"ReverseUpEquilibrium;": "\u296f",
"Rfr;": "\u211c",
"Rho;": "\u03a1",
"RightAngleBracket;": "\u27e9",
"RightArrow;": "\u2192",
"RightArrowBar;": "\u21e5",
"RightArrowLeftArrow;": "\u21c4",
"RightCeiling;": "\u2309",
"RightDoubleBracket;": "\u27e7",
"RightDownTeeVector;": "\u295d",
"RightDownVector;": "\u21c2",
"RightDownVectorBar;": "\u2955",
"RightFloor;": "\u230b",
"RightTee;": "\u22a2",
"RightTeeArrow;": "\u21a6",
"RightTeeVector;": "\u295b",
"RightTriangle;": "\u22b3",
"RightTriangleBar;": "\u29d0",
"RightTriangleEqual;": "\u22b5",
"RightUpDownVector;": "\u294f",
"RightUpTeeVector;": "\u295c",
"RightUpVector;": "\u21be",
"RightUpVectorBar;": "\u2954",
"RightVector;": "\u21c0",
"RightVectorBar;": "\u2953",
"Rightarrow;": "\u21d2",
"Ropf;": "\u211d",
"RoundImplies;": "\u2970",
"Rrightarrow;": "\u21db",
"Rscr;": "\u211b",
"Rsh;": "\u21b1",
"RuleDelayed;": "\u29f4",
"SHCHcy;": "\u0429",
"SHcy;": "\u0428",
"SOFTcy;": "\u042c",
"Sacute;": "\u015a",
"Sc;": "\u2abc",
"Scaron;": "\u0160",
"Scedil;": "\u015e",
"Scirc;": "\u015c",
"Scy;": "\u0421",
"Sfr;": "\U0001d516",
"ShortDownArrow;": "\u2193",
"ShortLeftArrow;": "\u2190",
"ShortRightArrow;": "\u2192",
"ShortUpArrow;": "\u2191",
"Sigma;": "\u03a3",
"SmallCircle;": "\u2218",
"Sopf;": "\U0001d54a",
"Sqrt;": "\u221a",
"Square;": "\u25a1",
"SquareIntersection;": "\u2293",
"SquareSubset;": "\u228f",
"SquareSubsetEqual;": "\u2291",
"SquareSuperset;": "\u2290",
"SquareSupersetEqual;": "\u2292",
"SquareUnion;": "\u2294",
"Sscr;": "\U0001d4ae",
"Star;": "\u22c6",
"Sub;": "\u22d0",
"Subset;": "\u22d0",
"SubsetEqual;": "\u2286",
"Succeeds;": "\u227b",
"SucceedsEqual;": "\u2ab0",
"SucceedsSlantEqual;": "\u227d",
"SucceedsTilde;": "\u227f",
"SuchThat;": "\u220b",
"Sum;": "\u2211",
"Sup;": "\u22d1",
"Superset;": "\u2283",
"SupersetEqual;": "\u2287",
"Supset;": "\u22d1",
"THORN": "\xde",
"THORN;": "\xde",
"TRADE;": "\u2122",
"TSHcy;": "\u040b",
"TScy;": "\u0426",
"Tab;": "\t",
"Tau;": "\u03a4",
"Tcaron;": "\u0164",
"Tcedil;": "\u0162",
"Tcy;": "\u0422",
"Tfr;": "\U0001d517",
"Therefore;": "\u2234",
"Theta;": "\u0398",
"ThickSpace;": "\u205f\u200a",
"ThinSpace;": "\u2009",
"Tilde;": "\u223c",
"TildeEqual;": "\u2243",
"TildeFullEqual;": "\u2245",
"TildeTilde;": "\u2248",
"Topf;": "\U0001d54b",
"TripleDot;": "\u20db",
"Tscr;": "\U0001d4af",
"Tstrok;": "\u0166",
"Uacute": "\xda",
"Uacute;": "\xda",
"Uarr;": "\u219f",
"Uarrocir;": "\u2949",
"Ubrcy;": "\u040e",
"Ubreve;": "\u016c",
"Ucirc": "\xdb",
"Ucirc;": "\xdb",
"Ucy;": "\u0423",
"Udblac;": "\u0170",
"Ufr;": "\U0001d518",
"Ugrave": "\xd9",
"Ugrave;": "\xd9",
"Umacr;": "\u016a",
"UnderBar;": "_",
"UnderBrace;": "\u23df",
"UnderBracket;": "\u23b5",
"UnderParenthesis;": "\u23dd",
"Union;": "\u22c3",
"UnionPlus;": "\u228e",
"Uogon;": "\u0172",
"Uopf;": "\U0001d54c",
"UpArrow;": "\u2191",
"UpArrowBar;": "\u2912",
"UpArrowDownArrow;": "\u21c5",
"UpDownArrow;": "\u2195",
"UpEquilibrium;": "\u296e",
"UpTee;": "\u22a5",
"UpTeeArrow;": "\u21a5",
"Uparrow;": "\u21d1",
"Updownarrow;": "\u21d5",
"UpperLeftArrow;": "\u2196",
"UpperRightArrow;": "\u2197",
"Upsi;": "\u03d2",
"Upsilon;": "\u03a5",
"Uring;": "\u016e",
"Uscr;": "\U0001d4b0",
"Utilde;": "\u0168",
"Uuml": "\xdc",
"Uuml;": "\xdc",
"VDash;": "\u22ab",
"Vbar;": "\u2aeb",
"Vcy;": "\u0412",
"Vdash;": "\u22a9",
"Vdashl;": "\u2ae6",
"Vee;": "\u22c1",
"Verbar;": "\u2016",
"Vert;": "\u2016",
"VerticalBar;": "\u2223",
"VerticalLine;": "|",
"VerticalSeparator;": "\u2758",
"VerticalTilde;": "\u2240",
"VeryThinSpace;": "\u200a",
"Vfr;": "\U0001d519",
"Vopf;": "\U0001d54d",
"Vscr;": "\U0001d4b1",
"Vvdash;": "\u22aa",
"Wcirc;": "\u0174",
"Wedge;": "\u22c0",
"Wfr;": "\U0001d51a",
"Wopf;": "\U0001d54e",
"Wscr;": "\U0001d4b2",
"Xfr;": "\U0001d51b",
"Xi;": "\u039e",
"Xopf;": "\U0001d54f",
"Xscr;": "\U0001d4b3",
"YAcy;": "\u042f",
"YIcy;": "\u0407",
"YUcy;": "\u042e",
"Yacute": "\xdd",
"Yacute;": "\xdd",
"Ycirc;": "\u0176",
"Ycy;": "\u042b",
"Yfr;": "\U0001d51c",
"Yopf;": "\U0001d550",
"Yscr;": "\U0001d4b4",
"Yuml;": "\u0178",
"ZHcy;": "\u0416",
"Zacute;": "\u0179",
"Zcaron;": "\u017d",
"Zcy;": "\u0417",
"Zdot;": "\u017b",
"ZeroWidthSpace;": "\u200b",
"Zeta;": "\u0396",
"Zfr;": "\u2128",
"Zopf;": "\u2124",
"Zscr;": "\U0001d4b5",
"aacute": "\xe1",
"aacute;": "\xe1",
"abreve;": "\u0103",
"ac;": "\u223e",
"acE;": "\u223e\u0333",
"acd;": "\u223f",
"acirc": "\xe2",
"acirc;": "\xe2",
"acute": "\xb4",
"acute;": "\xb4",
"acy;": "\u0430",
"aelig": "\xe6",
"aelig;": "\xe6",
"af;": "\u2061",
"afr;": "\U0001d51e",
"agrave": "\xe0",
"agrave;": "\xe0",
"alefsym;": "\u2135",
"aleph;": "\u2135",
"alpha;": "\u03b1",
"amacr;": "\u0101",
"amalg;": "\u2a3f",
"amp": "&",
"amp;": "&",
"and;": "\u2227",
"andand;": "\u2a55",
"andd;": "\u2a5c",
"andslope;": "\u2a58",
"andv;": "\u2a5a",
"ang;": "\u2220",
"ange;": "\u29a4",
"angle;": "\u2220",
"angmsd;": "\u2221",
"angmsdaa;": "\u29a8",
"angmsdab;": "\u29a9",
"angmsdac;": "\u29aa",
"angmsdad;": "\u29ab",
"angmsdae;": "\u29ac",
"angmsdaf;": "\u29ad",
"angmsdag;": "\u29ae",
"angmsdah;": "\u29af",
"angrt;": "\u221f",
"angrtvb;": "\u22be",
"angrtvbd;": "\u299d",
"angsph;": "\u2222",
"angst;": "\xc5",
"angzarr;": "\u237c",
"aogon;": "\u0105",
"aopf;": "\U0001d552",
"ap;": "\u2248",
"apE;": "\u2a70",
"apacir;": "\u2a6f",
"ape;": "\u224a",
"apid;": "\u224b",
"apos;": "'",
"approx;": "\u2248",
"approxeq;": "\u224a",
"aring": "\xe5",
"aring;": "\xe5",
"ascr;": "\U0001d4b6",
"ast;": "*",
"asymp;": "\u2248",
"asympeq;": "\u224d",
"atilde": "\xe3",
"atilde;": "\xe3",
"auml": "\xe4",
"auml;": "\xe4",
"awconint;": "\u2233",
"awint;": "\u2a11",
"bNot;": "\u2aed",
"backcong;": "\u224c",
"backepsilon;": "\u03f6",
"backprime;": "\u2035",
"backsim;": "\u223d",
"backsimeq;": "\u22cd",
"barvee;": "\u22bd",
"barwed;": "\u2305",
"barwedge;": "\u2305",
"bbrk;": "\u23b5",
"bbrktbrk;": "\u23b6",
"bcong;": "\u224c",
"bcy;": "\u0431",
"bdquo;": "\u201e",
"becaus;": "\u2235",
"because;": "\u2235",
"bemptyv;": "\u29b0",
"bepsi;": "\u03f6",
"bernou;": "\u212c",
"beta;": "\u03b2",
"beth;": "\u2136",
"between;": "\u226c",
"bfr;": "\U0001d51f",
"bigcap;": "\u22c2",
"bigcirc;": "\u25ef",
"bigcup;": "\u22c3",
"bigodot;": "\u2a00",
"bigoplus;": "\u2a01",
"bigotimes;": "\u2a02",
"bigsqcup;": "\u2a06",
"bigstar;": "\u2605",
"bigtriangledown;": "\u25bd",
"bigtriangleup;": "\u25b3",
"biguplus;": "\u2a04",
"bigvee;": "\u22c1",
"bigwedge;": "\u22c0",
"bkarow;": "\u290d",
"blacklozenge;": "\u29eb",
"blacksquare;": "\u25aa",
"blacktriangle;": "\u25b4",
"blacktriangledown;": "\u25be",
"blacktriangleleft;": "\u25c2",
"blacktriangleright;": "\u25b8",
"blank;": "\u2423",
"blk12;": "\u2592",
"blk14;": "\u2591",
"blk34;": "\u2593",
"block;": "\u2588",
"bne;": "=\u20e5",
"bnequiv;": "\u2261\u20e5",
"bnot;": "\u2310",
"bopf;": "\U0001d553",
"bot;": "\u22a5",
"bottom;": "\u22a5",
"bowtie;": "\u22c8",
"boxDL;": "\u2557",
"boxDR;": "\u2554",
"boxDl;": "\u2556",
"boxDr;": "\u2553",
"boxH;": "\u2550",
"boxHD;": "\u2566",
"boxHU;": "\u2569",
"boxHd;": "\u2564",
"boxHu;": "\u2567",
"boxUL;": "\u255d",
"boxUR;": "\u255a",
"boxUl;": "\u255c",
"boxUr;": "\u2559",
"boxV;": "\u2551",
"boxVH;": "\u256c",
"boxVL;": "\u2563",
"boxVR;": "\u2560",
"boxVh;": "\u256b",
"boxVl;": "\u2562",
"boxVr;": "\u255f",
"boxbox;": "\u29c9",
"boxdL;": "\u2555",
"boxdR;": "\u2552",
"boxdl;": "\u2510",
"boxdr;": "\u250c",
"boxh;": "\u2500",
"boxhD;": "\u2565",
"boxhU;": "\u2568",
"boxhd;": "\u252c",
"boxhu;": "\u2534",
"boxminus;": "\u229f",
"boxplus;": "\u229e",
"boxtimes;": "\u22a0",
"boxuL;": "\u255b",
"boxuR;": "\u2558",
"boxul;": "\u2518",
"boxur;": "\u2514",
"boxv;": "\u2502",
"boxvH;": "\u256a",
"boxvL;": "\u2561",
"boxvR;": "\u255e",
"boxvh;": "\u253c",
"boxvl;": "\u2524",
"boxvr;": "\u251c",
"bprime;": "\u2035",
"breve;": "\u02d8",
"brvbar": "\xa6",
"brvbar;": "\xa6",
"bscr;": "\U0001d4b7",
"bsemi;": "\u204f",
"bsim;": "\u223d",
"bsime;": "\u22cd",
"bsol;": "\\",
"bsolb;": "\u29c5",
"bsolhsub;": "\u27c8",
"bull;": "\u2022",
"bullet;": "\u2022",
"bump;": "\u224e",
"bumpE;": "\u2aae",
"bumpe;": "\u224f",
"bumpeq;": "\u224f",
"cacute;": "\u0107",
"cap;": "\u2229",
"capand;": "\u2a44",
"capbrcup;": "\u2a49",
"capcap;": "\u2a4b",
"capcup;": "\u2a47",
"capdot;": "\u2a40",
"caps;": "\u2229\ufe00",
"caret;": "\u2041",
"caron;": "\u02c7",
"ccaps;": "\u2a4d",
"ccaron;": "\u010d",
"ccedil": "\xe7",
"ccedil;": "\xe7",
"ccirc;": "\u0109",
"ccups;": "\u2a4c",
"ccupssm;": "\u2a50",
"cdot;": "\u010b",
"cedil": "\xb8",
"cedil;": "\xb8",
"cemptyv;": "\u29b2",
"cent": "\xa2",
"cent;": "\xa2",
"centerdot;": "\xb7",
"cfr;": "\U0001d520",
"chcy;": "\u0447",
"check;": "\u2713",
"checkmark;": "\u2713",
"chi;": "\u03c7",
"cir;": "\u25cb",
"cirE;": "\u29c3",
"circ;": "\u02c6",
"circeq;": "\u2257",
"circlearrowleft;": "\u21ba",
"circlearrowright;": "\u21bb",
"circledR;": "\xae",
"circledS;": "\u24c8",
"circledast;": "\u229b",
"circledcirc;": "\u229a",
"circleddash;": "\u229d",
"cire;": "\u2257",
"cirfnint;": "\u2a10",
"cirmid;": "\u2aef",
"cirscir;": "\u29c2",
"clubs;": "\u2663",
"clubsuit;": "\u2663",
"colon;": ":",
"colone;": "\u2254",
"coloneq;": "\u2254",
"comma;": ",",
"commat;": "@",
"comp;": "\u2201",
"compfn;": "\u2218",
"complement;": "\u2201",
"complexes;": "\u2102",
"cong;": "\u2245",
"congdot;": "\u2a6d",
"conint;": "\u222e",
"copf;": "\U0001d554",
"coprod;": "\u2210",
"copy": "\xa9",
"copy;": "\xa9",
"copysr;": "\u2117",
"crarr;": "\u21b5",
"cross;": "\u2717",
"cscr;": "\U0001d4b8",
"csub;": "\u2acf",
"csube;": "\u2ad1",
"csup;": "\u2ad0",
"csupe;": "\u2ad2",
"ctdot;": "\u22ef",
"cudarrl;": "\u2938",
"cudarrr;": "\u2935",
"cuepr;": "\u22de",
"cuesc;": "\u22df",
"cularr;": "\u21b6",
"cularrp;": "\u293d",
"cup;": "\u222a",
"cupbrcap;": "\u2a48",
"cupcap;": "\u2a46",
"cupcup;": "\u2a4a",
"cupdot;": "\u228d",
"cupor;": "\u2a45",
"cups;": "\u222a\ufe00",
"curarr;": "\u21b7",
"curarrm;": "\u293c",
"curlyeqprec;": "\u22de",
"curlyeqsucc;": "\u22df",
"curlyvee;": "\u22ce",
"curlywedge;": "\u22cf",
"curren": "\xa4",
"curren;": "\xa4",
"curvearrowleft;": "\u21b6",
"curvearrowright;": "\u21b7",
"cuvee;": "\u22ce",
"cuwed;": "\u22cf",
"cwconint;": "\u2232",
"cwint;": "\u2231",
"cylcty;": "\u232d",
"dArr;": "\u21d3",
"dHar;": "\u2965",
"dagger;": "\u2020",
"daleth;": "\u2138",
"darr;": "\u2193",
"dash;": "\u2010",
"dashv;": "\u22a3",
"dbkarow;": "\u290f",
"dblac;": "\u02dd",
"dcaron;": "\u010f",
"dcy;": "\u0434",
"dd;": "\u2146",
"ddagger;": "\u2021",
"ddarr;": "\u21ca",
"ddotseq;": "\u2a77",
"deg": "\xb0",
"deg;": "\xb0",
"delta;": "\u03b4",
"demptyv;": "\u29b1",
"dfisht;": "\u297f",
"dfr;": "\U0001d521",
"dharl;": "\u21c3",
"dharr;": "\u21c2",
"diam;": "\u22c4",
"diamond;": "\u22c4",
"diamondsuit;": "\u2666",
"diams;": "\u2666",
"die;": "\xa8",
"digamma;": "\u03dd",
"disin;": "\u22f2",
"div;": "\xf7",
"divide": "\xf7",
"divide;": "\xf7",
"divideontimes;": "\u22c7",
"divonx;": "\u22c7",
"djcy;": "\u0452",
"dlcorn;": "\u231e",
"dlcrop;": "\u230d",
"dollar;": "$",
"dopf;": "\U0001d555",
"dot;": "\u02d9",
"doteq;": "\u2250",
"doteqdot;": "\u2251",
"dotminus;": "\u2238",
"dotplus;": "\u2214",
"dotsquare;": "\u22a1",
"doublebarwedge;": "\u2306",
"downarrow;": "\u2193",
"downdownarrows;": "\u21ca",
"downharpoonleft;": "\u21c3",
"downharpoonright;": "\u21c2",
"drbkarow;": "\u2910",
"drcorn;": "\u231f",
"drcrop;": "\u230c",
"dscr;": "\U0001d4b9",
"dscy;": "\u0455",
"dsol;": "\u29f6",
"dstrok;": "\u0111",
"dtdot;": "\u22f1",
"dtri;": "\u25bf",
"dtrif;": "\u25be",
"duarr;": "\u21f5",
"duhar;": "\u296f",
"dwangle;": "\u29a6",
"dzcy;": "\u045f",
"dzigrarr;": "\u27ff",
"eDDot;": "\u2a77",
"eDot;": "\u2251",
"eacute": "\xe9",
"eacute;": "\xe9",
"easter;": "\u2a6e",
"ecaron;": "\u011b",
"ecir;": "\u2256",
"ecirc": "\xea",
"ecirc;": "\xea",
"ecolon;": "\u2255",
"ecy;": "\u044d",
"edot;": "\u0117",
"ee;": "\u2147",
"efDot;": "\u2252",
"efr;": "\U0001d522",
"eg;": "\u2a9a",
"egrave": "\xe8",
"egrave;": "\xe8",
"egs;": "\u2a96",
"egsdot;": "\u2a98",
"el;": "\u2a99",
"elinters;": "\u23e7",
"ell;": "\u2113",
"els;": "\u2a95",
"elsdot;": "\u2a97",
"emacr;": "\u0113",
"empty;": "\u2205",
"emptyset;": "\u2205",
"emptyv;": "\u2205",
"emsp13;": "\u2004",
"emsp14;": "\u2005",
"emsp;": "\u2003",
"eng;": "\u014b",
"ensp;": "\u2002",
"eogon;": "\u0119",
"eopf;": "\U0001d556",
"epar;": "\u22d5",
"eparsl;": "\u29e3",
"eplus;": "\u2a71",
"epsi;": "\u03b5",
"epsilon;": "\u03b5",
"epsiv;": "\u03f5",
"eqcirc;": "\u2256",
"eqcolon;": "\u2255",
"eqsim;": "\u2242",
"eqslantgtr;": "\u2a96",
"eqslantless;": "\u2a95",
"equals;": "=",
"equest;": "\u225f",
"equiv;": "\u2261",
"equivDD;": "\u2a78",
"eqvparsl;": "\u29e5",
"erDot;": "\u2253",
"erarr;": "\u2971",
"escr;": "\u212f",
"esdot;": "\u2250",
"esim;": "\u2242",
"eta;": "\u03b7",
"eth": "\xf0",
"eth;": "\xf0",
"euml": "\xeb",
"euml;": "\xeb",
"euro;": "\u20ac",
"excl;": "!",
"exist;": "\u2203",
"expectation;": "\u2130",
"exponentiale;": "\u2147",
"fallingdotseq;": "\u2252",
"fcy;": "\u0444",
"female;": "\u2640",
"ffilig;": "\ufb03",
"fflig;": "\ufb00",
"ffllig;": "\ufb04",
"ffr;": "\U0001d523",
"filig;": "\ufb01",
"fjlig;": "fj",
"flat;": "\u266d",
"fllig;": "\ufb02",
"fltns;": "\u25b1",
"fnof;": "\u0192",
"fopf;": "\U0001d557",
"forall;": "\u2200",
"fork;": "\u22d4",
"forkv;": "\u2ad9",
"fpartint;": "\u2a0d",
"frac12": "\xbd",
"frac12;": "\xbd",
"frac13;": "\u2153",
"frac14": "\xbc",
"frac14;": "\xbc",
"frac15;": "\u2155",
"frac16;": "\u2159",
"frac18;": "\u215b",
"frac23;": "\u2154",
"frac25;": "\u2156",
"frac34": "\xbe",
"frac34;": "\xbe",
"frac35;": "\u2157",
"frac38;": "\u215c",
"frac45;": "\u2158",
"frac56;": "\u215a",
"frac58;": "\u215d",
"frac78;": "\u215e",
"frasl;": "\u2044",
"frown;": "\u2322",
"fscr;": "\U0001d4bb",
"gE;": "\u2267",
"gEl;": "\u2a8c",
"gacute;": "\u01f5",
"gamma;": "\u03b3",
"gammad;": "\u03dd",
"gap;": "\u2a86",
"gbreve;": "\u011f",
"gcirc;": "\u011d",
"gcy;": "\u0433",
"gdot;": "\u0121",
"ge;": "\u2265",
"gel;": "\u22db",
"geq;": "\u2265",
"geqq;": "\u2267",
"geqslant;": "\u2a7e",
"ges;": "\u2a7e",
"gescc;": "\u2aa9",
"gesdot;": "\u2a80",
"gesdoto;": "\u2a82",
"gesdotol;": "\u2a84",
"gesl;": "\u22db\ufe00",
"gesles;": "\u2a94",
"gfr;": "\U0001d524",
"gg;": "\u226b",
"ggg;": "\u22d9",
"gimel;": "\u2137",
"gjcy;": "\u0453",
"gl;": "\u2277",
"glE;": "\u2a92",
"gla;": "\u2aa5",
"glj;": "\u2aa4",
"gnE;": "\u2269",
"gnap;": "\u2a8a",
"gnapprox;": "\u2a8a",
"gne;": "\u2a88",
"gneq;": "\u2a88",
"gneqq;": "\u2269",
"gnsim;": "\u22e7",
"gopf;": "\U0001d558",
"grave;": "`",
"gscr;": "\u210a",
"gsim;": "\u2273",
"gsime;": "\u2a8e",
"gsiml;": "\u2a90",
"gt": ">",
"gt;": ">",
"gtcc;": "\u2aa7",
"gtcir;": "\u2a7a",
"gtdot;": "\u22d7",
"gtlPar;": "\u2995",
"gtquest;": "\u2a7c",
"gtrapprox;": "\u2a86",
"gtrarr;": "\u2978",
"gtrdot;": "\u22d7",
"gtreqless;": "\u22db",
"gtreqqless;": "\u2a8c",
"gtrless;": "\u2277",
"gtrsim;": "\u2273",
"gvertneqq;": "\u2269\ufe00",
"gvnE;": "\u2269\ufe00",
"hArr;": "\u21d4",
"hairsp;": "\u200a",
"half;": "\xbd",
"hamilt;": "\u210b",
"hardcy;": "\u044a",
"harr;": "\u2194",
"harrcir;": "\u2948",
"harrw;": "\u21ad",
"hbar;": "\u210f",
"hcirc;": "\u0125",
"hearts;": "\u2665",
"heartsuit;": "\u2665",
"hellip;": "\u2026",
"hercon;": "\u22b9",
"hfr;": "\U0001d525",
"hksearow;": "\u2925",
"hkswarow;": "\u2926",
"hoarr;": "\u21ff",
"homtht;": "\u223b",
"hookleftarrow;": "\u21a9",
"hookrightarrow;": "\u21aa",
"hopf;": "\U0001d559",
"horbar;": "\u2015",
"hscr;": "\U0001d4bd",
"hslash;": "\u210f",
"hstrok;": "\u0127",
"hybull;": "\u2043",
"hyphen;": "\u2010",
"iacute": "\xed",
"iacute;": "\xed",
"ic;": "\u2063",
"icirc": "\xee",
"icirc;": "\xee",
"icy;": "\u0438",
"iecy;": "\u0435",
"iexcl": "\xa1",
"iexcl;": "\xa1",
"iff;": "\u21d4",
"ifr;": "\U0001d526",
"igrave": "\xec",
"igrave;": "\xec",
"ii;": "\u2148",
"iiiint;": "\u2a0c",
"iiint;": "\u222d",
"iinfin;": "\u29dc",
"iiota;": "\u2129",
"ijlig;": "\u0133",
"imacr;": "\u012b",
"image;": "\u2111",
"imagline;": "\u2110",
"imagpart;": "\u2111",
"imath;": "\u0131",
"imof;": "\u22b7",
"imped;": "\u01b5",
"in;": "\u2208",
"incare;": "\u2105",
"infin;": "\u221e",
"infintie;": "\u29dd",
"inodot;": "\u0131",
"int;": "\u222b",
"intcal;": "\u22ba",
"integers;": "\u2124",
"intercal;": "\u22ba",
"intlarhk;": "\u2a17",
"intprod;": "\u2a3c",
"iocy;": "\u0451",
"iogon;": "\u012f",
"iopf;": "\U0001d55a",
"iota;": "\u03b9",
"iprod;": "\u2a3c",
"iquest": "\xbf",
"iquest;": "\xbf",
"iscr;": "\U0001d4be",
"isin;": "\u2208",
"isinE;": "\u22f9",
"isindot;": "\u22f5",
"isins;": "\u22f4",
"isinsv;": "\u22f3",
"isinv;": "\u2208",
"it;": "\u2062",
"itilde;": "\u0129",
"iukcy;": "\u0456",
"iuml": "\xef",
"iuml;": "\xef",
"jcirc;": "\u0135",
"jcy;": "\u0439",
"jfr;": "\U0001d527",
"jmath;": "\u0237",
"jopf;": "\U0001d55b",
"jscr;": "\U0001d4bf",
"jsercy;": "\u0458",
"jukcy;": "\u0454",
"kappa;": "\u03ba",
"kappav;": "\u03f0",
"kcedil;": "\u0137",
"kcy;": "\u043a",
"kfr;": "\U0001d528",
"kgreen;": "\u0138",
"khcy;": "\u0445",
"kjcy;": "\u045c",
"kopf;": "\U0001d55c",
"kscr;": "\U0001d4c0",
"lAarr;": "\u21da",
"lArr;": "\u21d0",
"lAtail;": "\u291b",
"lBarr;": "\u290e",
"lE;": "\u2266",
"lEg;": "\u2a8b",
"lHar;": "\u2962",
"lacute;": "\u013a",
"laemptyv;": "\u29b4",
"lagran;": "\u2112",
"lambda;": "\u03bb",
"lang;": "\u27e8",
"langd;": "\u2991",
"langle;": "\u27e8",
"lap;": "\u2a85",
"laquo": "\xab",
"laquo;": "\xab",
"larr;": "\u2190",
"larrb;": "\u21e4",
"larrbfs;": "\u291f",
"larrfs;": "\u291d",
"larrhk;": "\u21a9",
"larrlp;": "\u21ab",
"larrpl;": "\u2939",
"larrsim;": "\u2973",
"larrtl;": "\u21a2",
"lat;": "\u2aab",
"latail;": "\u2919",
"late;": "\u2aad",
"lates;": "\u2aad\ufe00",
"lbarr;": "\u290c",
"lbbrk;": "\u2772",
"lbrace;": "{",
"lbrack;": "[",
"lbrke;": "\u298b",
"lbrksld;": "\u298f",
"lbrkslu;": "\u298d",
"lcaron;": "\u013e",
"lcedil;": "\u013c",
"lceil;": "\u2308",
"lcub;": "{",
"lcy;": "\u043b",
"ldca;": "\u2936",
"ldquo;": "\u201c",
"ldquor;": "\u201e",
"ldrdhar;": "\u2967",
"ldrushar;": "\u294b",
"ldsh;": "\u21b2",
"le;": "\u2264",
"leftarrow;": "\u2190",
"leftarrowtail;": "\u21a2",
"leftharpoondown;": "\u21bd",
"leftharpoonup;": "\u21bc",
"leftleftarrows;": "\u21c7",
"leftrightarrow;": "\u2194",
"leftrightarrows;": "\u21c6",
"leftrightharpoons;": "\u21cb",
"leftrightsquigarrow;": "\u21ad",
"leftthreetimes;": "\u22cb",
"leg;": "\u22da",
"leq;": "\u2264",
"leqq;": "\u2266",
"leqslant;": "\u2a7d",
"les;": "\u2a7d",
"lescc;": "\u2aa8",
"lesdot;": "\u2a7f",
"lesdoto;": "\u2a81",
"lesdotor;": "\u2a83",
"lesg;": "\u22da\ufe00",
"lesges;": "\u2a93",
"lessapprox;": "\u2a85",
"lessdot;": "\u22d6",
"lesseqgtr;": "\u22da",
"lesseqqgtr;": "\u2a8b",
"lessgtr;": "\u2276",
"lesssim;": "\u2272",
"lfisht;": "\u297c",
"lfloor;": "\u230a",
"lfr;": "\U0001d529",
"lg;": "\u2276",
"lgE;": "\u2a91",
"lhard;": "\u21bd",
"lharu;": "\u21bc",
"lharul;": "\u296a",
"lhblk;": "\u2584",
"ljcy;": "\u0459",
"ll;": "\u226a",
"llarr;": "\u21c7",
"llcorner;": "\u231e",
"llhard;": "\u296b",
"lltri;": "\u25fa",
"lmidot;": "\u0140",
"lmoust;": "\u23b0",
"lmoustache;": "\u23b0",
"lnE;": "\u2268",
"lnap;": "\u2a89",
"lnapprox;": "\u2a89",
"lne;": "\u2a87",
"lneq;": "\u2a87",
"lneqq;": "\u2268",
"lnsim;": "\u22e6",
"loang;": "\u27ec",
"loarr;": "\u21fd",
"lobrk;": "\u27e6",
"longleftarrow;": "\u27f5",
"longleftrightarrow;": "\u27f7",
"longmapsto;": "\u27fc",
"longrightarrow;": "\u27f6",
"looparrowleft;": "\u21ab",
"looparrowright;": "\u21ac",
"lopar;": "\u2985",
"lopf;": "\U0001d55d",
"loplus;": "\u2a2d",
"lotimes;": "\u2a34",
"lowast;": "\u2217",
"lowbar;": "_",
"loz;": "\u25ca",
"lozenge;": "\u25ca",
"lozf;": "\u29eb",
"lpar;": "(",
"lparlt;": "\u2993",
"lrarr;": "\u21c6",
"lrcorner;": "\u231f",
"lrhar;": "\u21cb",
"lrhard;": "\u296d",
"lrm;": "\u200e",
"lrtri;": "\u22bf",
"lsaquo;": "\u2039",
"lscr;": "\U0001d4c1",
"lsh;": "\u21b0",
"lsim;": "\u2272",
"lsime;": "\u2a8d",
"lsimg;": "\u2a8f",
"lsqb;": "[",
"lsquo;": "\u2018",
"lsquor;": "\u201a",
"lstrok;": "\u0142",
"lt": "<",
"lt;": "<",
"ltcc;": "\u2aa6",
"ltcir;": "\u2a79",
"ltdot;": "\u22d6",
"lthree;": "\u22cb",
"ltimes;": "\u22c9",
"ltlarr;": "\u2976",
"ltquest;": "\u2a7b",
"ltrPar;": "\u2996",
"ltri;": "\u25c3",
"ltrie;": "\u22b4",
"ltrif;": "\u25c2",
"lurdshar;": "\u294a",
"luruhar;": "\u2966",
"lvertneqq;": "\u2268\ufe00",
"lvnE;": "\u2268\ufe00",
"mDDot;": "\u223a",
"macr": "\xaf",
"macr;": "\xaf",
"male;": "\u2642",
"malt;": "\u2720",
"maltese;": "\u2720",
"map;": "\u21a6",
"mapsto;": "\u21a6",
"mapstodown;": "\u21a7",
"mapstoleft;": "\u21a4",
"mapstoup;": "\u21a5",
"marker;": "\u25ae",
"mcomma;": "\u2a29",
"mcy;": "\u043c",
"mdash;": "\u2014",
"measuredangle;": "\u2221",
"mfr;": "\U0001d52a",
"mho;": "\u2127",
"micro": "\xb5",
"micro;": "\xb5",
"mid;": "\u2223",
"midast;": "*",
"midcir;": "\u2af0",
"middot": "\xb7",
"middot;": "\xb7",
"minus;": "\u2212",
"minusb;": "\u229f",
"minusd;": "\u2238",
"minusdu;": "\u2a2a",
"mlcp;": "\u2adb",
"mldr;": "\u2026",
"mnplus;": "\u2213",
"models;": "\u22a7",
"mopf;": "\U0001d55e",
"mp;": "\u2213",
"mscr;": "\U0001d4c2",
"mstpos;": "\u223e",
"mu;": "\u03bc",
"multimap;": "\u22b8",
"mumap;": "\u22b8",
"nGg;": "\u22d9\u0338",
"nGt;": "\u226b\u20d2",
"nGtv;": "\u226b\u0338",
"nLeftarrow;": "\u21cd",
"nLeftrightarrow;": "\u21ce",
"nLl;": "\u22d8\u0338",
"nLt;": "\u226a\u20d2",
"nLtv;": "\u226a\u0338",
"nRightarrow;": "\u21cf",
"nVDash;": "\u22af",
"nVdash;": "\u22ae",
"nabla;": "\u2207",
"nacute;": "\u0144",
"nang;": "\u2220\u20d2",
"nap;": "\u2249",
"napE;": "\u2a70\u0338",
"napid;": "\u224b\u0338",
"napos;": "\u0149",
"napprox;": "\u2249",
"natur;": "\u266e",
"natural;": "\u266e",
"naturals;": "\u2115",
"nbsp": "\xa0",
"nbsp;": "\xa0",
"nbump;": "\u224e\u0338",
"nbumpe;": "\u224f\u0338",
"ncap;": "\u2a43",
"ncaron;": "\u0148",
"ncedil;": "\u0146",
"ncong;": "\u2247",
"ncongdot;": "\u2a6d\u0338",
"ncup;": "\u2a42",
"ncy;": "\u043d",
"ndash;": "\u2013",
"ne;": "\u2260",
"neArr;": "\u21d7",
"nearhk;": "\u2924",
"nearr;": "\u2197",
"nearrow;": "\u2197",
"nedot;": "\u2250\u0338",
"nequiv;": "\u2262",
"nesear;": "\u2928",
"nesim;": "\u2242\u0338",
"nexist;": "\u2204",
"nexists;": "\u2204",
"nfr;": "\U0001d52b",
"ngE;": "\u2267\u0338",
"nge;": "\u2271",
"ngeq;": "\u2271",
"ngeqq;": "\u2267\u0338",
"ngeqslant;": "\u2a7e\u0338",
"nges;": "\u2a7e\u0338",
"ngsim;": "\u2275",
"ngt;": "\u226f",
"ngtr;": "\u226f",
"nhArr;": "\u21ce",
"nharr;": "\u21ae",
"nhpar;": "\u2af2",
"ni;": "\u220b",
"nis;": "\u22fc",
"nisd;": "\u22fa",
"niv;": "\u220b",
"njcy;": "\u045a",
"nlArr;": "\u21cd",
"nlE;": "\u2266\u0338",
"nlarr;": "\u219a",
"nldr;": "\u2025",
"nle;": "\u2270",
"nleftarrow;": "\u219a",
"nleftrightarrow;": "\u21ae",
"nleq;": "\u2270",
"nleqq;": "\u2266\u0338",
"nleqslant;": "\u2a7d\u0338",
"nles;": "\u2a7d\u0338",
"nless;": "\u226e",
"nlsim;": "\u2274",
"nlt;": "\u226e",
"nltri;": "\u22ea",
"nltrie;": "\u22ec",
"nmid;": "\u2224",
"nopf;": "\U0001d55f",
"not": "\xac",
"not;": "\xac",
"notin;": "\u2209",
"notinE;": "\u22f9\u0338",
"notindot;": "\u22f5\u0338",
"notinva;": "\u2209",
"notinvb;": "\u22f7",
"notinvc;": "\u22f6",
"notni;": "\u220c",
"notniva;": "\u220c",
"notnivb;": "\u22fe",
"notnivc;": "\u22fd",
"npar;": "\u2226",
"nparallel;": "\u2226",
"nparsl;": "\u2afd\u20e5",
"npart;": "\u2202\u0338",
"npolint;": "\u2a14",
"npr;": "\u2280",
"nprcue;": "\u22e0",
"npre;": "\u2aaf\u0338",
"nprec;": "\u2280",
"npreceq;": "\u2aaf\u0338",
"nrArr;": "\u21cf",
"nrarr;": "\u219b",
"nrarrc;": "\u2933\u0338",
"nrarrw;": "\u219d\u0338",
"nrightarrow;": "\u219b",
"nrtri;": "\u22eb",
"nrtrie;": "\u22ed",
"nsc;": "\u2281",
"nsccue;": "\u22e1",
"nsce;": "\u2ab0\u0338",
"nscr;": "\U0001d4c3",
"nshortmid;": "\u2224",
"nshortparallel;": "\u2226",
"nsim;": "\u2241",
"nsime;": "\u2244",
"nsimeq;": "\u2244",
"nsmid;": "\u2224",
"nspar;": "\u2226",
"nsqsube;": "\u22e2",
"nsqsupe;": "\u22e3",
"nsub;": "\u2284",
"nsubE;": "\u2ac5\u0338",
"nsube;": "\u2288",
"nsubset;": "\u2282\u20d2",
"nsubseteq;": "\u2288",
"nsubseteqq;": "\u2ac5\u0338",
"nsucc;": "\u2281",
"nsucceq;": "\u2ab0\u0338",
"nsup;": "\u2285",
"nsupE;": "\u2ac6\u0338",
"nsupe;": "\u2289",
"nsupset;": "\u2283\u20d2",
"nsupseteq;": "\u2289",
"nsupseteqq;": "\u2ac6\u0338",
"ntgl;": "\u2279",
"ntilde": "\xf1",
"ntilde;": "\xf1",
"ntlg;": "\u2278",
"ntriangleleft;": "\u22ea",
"ntrianglelefteq;": "\u22ec",
"ntriangleright;": "\u22eb",
"ntrianglerighteq;": "\u22ed",
"nu;": "\u03bd",
"num;": "#",
"numero;": "\u2116",
"numsp;": "\u2007",
"nvDash;": "\u22ad",
"nvHarr;": "\u2904",
"nvap;": "\u224d\u20d2",
"nvdash;": "\u22ac",
"nvge;": "\u2265\u20d2",
"nvgt;": ">\u20d2",
"nvinfin;": "\u29de",
"nvlArr;": "\u2902",
"nvle;": "\u2264\u20d2",
"nvlt;": "<\u20d2",
"nvltrie;": "\u22b4\u20d2",
"nvrArr;": "\u2903",
"nvrtrie;": "\u22b5\u20d2",
"nvsim;": "\u223c\u20d2",
"nwArr;": "\u21d6",
"nwarhk;": "\u2923",
"nwarr;": "\u2196",
"nwarrow;": "\u2196",
"nwnear;": "\u2927",
"oS;": "\u24c8",
"oacute": "\xf3",
"oacute;": "\xf3",
"oast;": "\u229b",
"ocir;": "\u229a",
"ocirc": "\xf4",
"ocirc;": "\xf4",
"ocy;": "\u043e",
"odash;": "\u229d",
"odblac;": "\u0151",
"odiv;": "\u2a38",
"odot;": "\u2299",
"odsold;": "\u29bc",
"oelig;": "\u0153",
"ofcir;": "\u29bf",
"ofr;": "\U0001d52c",
"ogon;": "\u02db",
"ograve": "\xf2",
"ograve;": "\xf2",
"ogt;": "\u29c1",
"ohbar;": "\u29b5",
"ohm;": "\u03a9",
"oint;": "\u222e",
"olarr;": "\u21ba",
"olcir;": "\u29be",
"olcross;": "\u29bb",
"oline;": "\u203e",
"olt;": "\u29c0",
"omacr;": "\u014d",
"omega;": "\u03c9",
"omicron;": "\u03bf",
"omid;": "\u29b6",
"ominus;": "\u2296",
"oopf;": "\U0001d560",
"opar;": "\u29b7",
"operp;": "\u29b9",
"oplus;": "\u2295",
"or;": "\u2228",
"orarr;": "\u21bb",
"ord;": "\u2a5d",
"order;": "\u2134",
"orderof;": "\u2134",
"ordf": "\xaa",
"ordf;": "\xaa",
"ordm": "\xba",
"ordm;": "\xba",
"origof;": "\u22b6",
"oror;": "\u2a56",
"orslope;": "\u2a57",
"orv;": "\u2a5b",
"oscr;": "\u2134",
"oslash": "\xf8",
"oslash;": "\xf8",
"osol;": "\u2298",
"otilde": "\xf5",
"otilde;": "\xf5",
"otimes;": "\u2297",
"otimesas;": "\u2a36",
"ouml": "\xf6",
"ouml;": "\xf6",
"ovbar;": "\u233d",
"par;": "\u2225",
"para": "\xb6",
"para;": "\xb6",
"parallel;": "\u2225",
"parsim;": "\u2af3",
"parsl;": "\u2afd",
"part;": "\u2202",
"pcy;": "\u043f",
"percnt;": "%",
"period;": ".",
"permil;": "\u2030",
"perp;": "\u22a5",
"pertenk;": "\u2031",
"pfr;": "\U0001d52d",
"phi;": "\u03c6",
"phiv;": "\u03d5",
"phmmat;": "\u2133",
"phone;": "\u260e",
"pi;": "\u03c0",
"pitchfork;": "\u22d4",
"piv;": "\u03d6",
"planck;": "\u210f",
"planckh;": "\u210e",
"plankv;": "\u210f",
"plus;": "+",
"plusacir;": "\u2a23",
"plusb;": "\u229e",
"pluscir;": "\u2a22",
"plusdo;": "\u2214",
"plusdu;": "\u2a25",
"pluse;": "\u2a72",
"plusmn": "\xb1",
"plusmn;": "\xb1",
"plussim;": "\u2a26",
"plustwo;": "\u2a27",
"pm;": "\xb1",
"pointint;": "\u2a15",
"popf;": "\U0001d561",
"pound": "\xa3",
"pound;": "\xa3",
"pr;": "\u227a",
"prE;": "\u2ab3",
"prap;": "\u2ab7",
"prcue;": "\u227c",
"pre;": "\u2aaf",
"prec;": "\u227a",
"precapprox;": "\u2ab7",
"preccurlyeq;": "\u227c",
"preceq;": "\u2aaf",
"precnapprox;": "\u2ab9",
"precneqq;": "\u2ab5",
"precnsim;": "\u22e8",
"precsim;": "\u227e",
"prime;": "\u2032",
"primes;": "\u2119",
"prnE;": "\u2ab5",
"prnap;": "\u2ab9",
"prnsim;": "\u22e8",
"prod;": "\u220f",
"profalar;": "\u232e",
"profline;": "\u2312",
"profsurf;": "\u2313",
"prop;": "\u221d",
"propto;": "\u221d",
"prsim;": "\u227e",
"prurel;": "\u22b0",
"pscr;": "\U0001d4c5",
"psi;": "\u03c8",
"puncsp;": "\u2008",
"qfr;": "\U0001d52e",
"qint;": "\u2a0c",
"qopf;": "\U0001d562",
"qprime;": "\u2057",
"qscr;": "\U0001d4c6",
"quaternions;": "\u210d",
"quatint;": "\u2a16",
"quest;": "?",
"questeq;": "\u225f",
"quot": "\"",
"quot;": "\"",
"rAarr;": "\u21db",
"rArr;": "\u21d2",
"rAtail;": "\u291c",
"rBarr;": "\u290f",
"rHar;": "\u2964",
"race;": "\u223d\u0331",
"racute;": "\u0155",
"radic;": "\u221a",
"raemptyv;": "\u29b3",
"rang;": "\u27e9",
"rangd;": "\u2992",
"range;": "\u29a5",
"rangle;": "\u27e9",
"raquo": "\xbb",
"raquo;": "\xbb",
"rarr;": "\u2192",
"rarrap;": "\u2975",
"rarrb;": "\u21e5",
"rarrbfs;": "\u2920",
"rarrc;": "\u2933",
"rarrfs;": "\u291e",
"rarrhk;": "\u21aa",
"rarrlp;": "\u21ac",
"rarrpl;": "\u2945",
"rarrsim;": "\u2974",
"rarrtl;": "\u21a3",
"rarrw;": "\u219d",
"ratail;": "\u291a",
"ratio;": "\u2236",
"rationals;": "\u211a",
"rbarr;": "\u290d",
"rbbrk;": "\u2773",
"rbrace;": "}",
"rbrack;": "]",
"rbrke;": "\u298c",
"rbrksld;": "\u298e",
"rbrkslu;": "\u2990",
"rcaron;": "\u0159",
"rcedil;": "\u0157",
"rceil;": "\u2309",
"rcub;": "}",
"rcy;": "\u0440",
"rdca;": "\u2937",
"rdldhar;": "\u2969",
"rdquo;": "\u201d",
"rdquor;": "\u201d",
"rdsh;": "\u21b3",
"real;": "\u211c",
"realine;": "\u211b",
"realpart;": "\u211c",
"reals;": "\u211d",
"rect;": "\u25ad",
"reg": "\xae",
"reg;": "\xae",
"rfisht;": "\u297d",
"rfloor;": "\u230b",
"rfr;": "\U0001d52f",
"rhard;": "\u21c1",
"rharu;": "\u21c0",
"rharul;": "\u296c",
"rho;": "\u03c1",
"rhov;": "\u03f1",
"rightarrow;": "\u2192",
"rightarrowtail;": "\u21a3",
"rightharpoondown;": "\u21c1",
"rightharpoonup;": "\u21c0",
"rightleftarrows;": "\u21c4",
"rightleftharpoons;": "\u21cc",
"rightrightarrows;": "\u21c9",
"rightsquigarrow;": "\u219d",
"rightthreetimes;": "\u22cc",
"ring;": "\u02da",
"risingdotseq;": "\u2253",
"rlarr;": "\u21c4",
"rlhar;": "\u21cc",
"rlm;": "\u200f",
"rmoust;": "\u23b1",
"rmoustache;": "\u23b1",
"rnmid;": "\u2aee",
"roang;": "\u27ed",
"roarr;": "\u21fe",
"robrk;": "\u27e7",
"ropar;": "\u2986",
"ropf;": "\U0001d563",
"roplus;": "\u2a2e",
"rotimes;": "\u2a35",
"rpar;": ")",
"rpargt;": "\u2994",
"rppolint;": "\u2a12",
"rrarr;": "\u21c9",
"rsaquo;": "\u203a",
"rscr;": "\U0001d4c7",
"rsh;": "\u21b1",
"rsqb;": "]",
"rsquo;": "\u2019",
"rsquor;": "\u2019",
"rthree;": "\u22cc",
"rtimes;": "\u22ca",
"rtri;": "\u25b9",
"rtrie;": "\u22b5",
"rtrif;": "\u25b8",
"rtriltri;": "\u29ce",
"ruluhar;": "\u2968",
"rx;": "\u211e",
"sacute;": "\u015b",
"sbquo;": "\u201a",
"sc;": "\u227b",
"scE;": "\u2ab4",
"scap;": "\u2ab8",
"scaron;": "\u0161",
"sccue;": "\u227d",
"sce;": "\u2ab0",
"scedil;": "\u015f",
"scirc;": "\u015d",
"scnE;": "\u2ab6",
"scnap;": "\u2aba",
"scnsim;": "\u22e9",
"scpolint;": "\u2a13",
"scsim;": "\u227f",
"scy;": "\u0441",
"sdot;": "\u22c5",
"sdotb;": "\u22a1",
"sdote;": "\u2a66",
"seArr;": "\u21d8",
"searhk;": "\u2925",
"searr;": "\u2198",
"searrow;": "\u2198",
"sect": "\xa7",
"sect;": "\xa7",
"semi;": ";",
"seswar;": "\u2929",
"setminus;": "\u2216",
"setmn;": "\u2216",
"sext;": "\u2736",
"sfr;": "\U0001d530",
"sfrown;": "\u2322",
"sharp;": "\u266f",
"shchcy;": "\u0449",
"shcy;": "\u0448",
"shortmid;": "\u2223",
"shortparallel;": "\u2225",
"shy": "\xad",
"shy;": "\xad",
"sigma;": "\u03c3",
"sigmaf;": "\u03c2",
"sigmav;": "\u03c2",
"sim;": "\u223c",
"simdot;": "\u2a6a",
"sime;": "\u2243",
"simeq;": "\u2243",
"simg;": "\u2a9e",
"simgE;": "\u2aa0",
"siml;": "\u2a9d",
"simlE;": "\u2a9f",
"simne;": "\u2246",
"simplus;": "\u2a24",
"simrarr;": "\u2972",
"slarr;": "\u2190",
"smallsetminus;": "\u2216",
"smashp;": "\u2a33",
"smeparsl;": "\u29e4",
"smid;": "\u2223",
"smile;": "\u2323",
"smt;": "\u2aaa",
"smte;": "\u2aac",
"smtes;": "\u2aac\ufe00",
"softcy;": "\u044c",
"sol;": "/",
"solb;": "\u29c4",
"solbar;": "\u233f",
"sopf;": "\U0001d564",
"spades;": "\u2660",
"spadesuit;": "\u2660",
"spar;": "\u2225",
"sqcap;": "\u2293",
"sqcaps;": "\u2293\ufe00",
"sqcup;": "\u2294",
"sqcups;": "\u2294\ufe00",
"sqsub;": "\u228f",
"sqsube;": "\u2291",
"sqsubset;": "\u228f",
"sqsubseteq;": "\u2291",
"sqsup;": "\u2290",
"sqsupe;": "\u2292",
"sqsupset;": "\u2290",
"sqsupseteq;": "\u2292",
"squ;": "\u25a1",
"square;": "\u25a1",
"squarf;": "\u25aa",
"squf;": "\u25aa",
"srarr;": "\u2192",
"sscr;": "\U0001d4c8",
"ssetmn;": "\u2216",
"ssmile;": "\u2323",
"sstarf;": "\u22c6",
"star;": "\u2606",
"starf;": "\u2605",
"straightepsilon;": "\u03f5",
"straightphi;": "\u03d5",
"strns;": "\xaf",
"sub;": "\u2282",
"subE;": "\u2ac5",
"subdot;": "\u2abd",
"sube;": "\u2286",
"subedot;": "\u2ac3",
"submult;": "\u2ac1",
"subnE;": "\u2acb",
"subne;": "\u228a",
"subplus;": "\u2abf",
"subrarr;": "\u2979",
"subset;": "\u2282",
"subseteq;": "\u2286",
"subseteqq;": "\u2ac5",
"subsetneq;": "\u228a",
"subsetneqq;": "\u2acb",
"subsim;": "\u2ac7",
"subsub;": "\u2ad5",
"subsup;": "\u2ad3",
"succ;": "\u227b",
"succapprox;": "\u2ab8",
"succcurlyeq;": "\u227d",
"succeq;": "\u2ab0",
"succnapprox;": "\u2aba",
"succneqq;": "\u2ab6",
"succnsim;": "\u22e9",
"succsim;": "\u227f",
"sum;": "\u2211",
"sung;": "\u266a",
"sup1": "\xb9",
"sup1;": "\xb9",
"sup2": "\xb2",
"sup2;": "\xb2",
"sup3": "\xb3",
"sup3;": "\xb3",
"sup;": "\u2283",
"supE;": "\u2ac6",
"supdot;": "\u2abe",
"supdsub;": "\u2ad8",
"supe;": "\u2287",
"supedot;": "\u2ac4",
"suphsol;": "\u27c9",
"suphsub;": "\u2ad7",
"suplarr;": "\u297b",
"supmult;": "\u2ac2",
"supnE;": "\u2acc",
"supne;": "\u228b",
"supplus;": "\u2ac0",
"supset;": "\u2283",
"supseteq;": "\u2287",
"supseteqq;": "\u2ac6",
"supsetneq;": "\u228b",
"supsetneqq;": "\u2acc",
"supsim;": "\u2ac8",
"supsub;": "\u2ad4",
"supsup;": "\u2ad6",
"swArr;": "\u21d9",
"swarhk;": "\u2926",
"swarr;": "\u2199",
"swarrow;": "\u2199",
"swnwar;": "\u292a",
"szlig": "\xdf",
"szlig;": "\xdf",
"target;": "\u2316",
"tau;": "\u03c4",
"tbrk;": "\u23b4",
"tcaron;": "\u0165",
"tcedil;": "\u0163",
"tcy;": "\u0442",
"tdot;": "\u20db",
"telrec;": "\u2315",
"tfr;": "\U0001d531",
"there4;": "\u2234",
"therefore;": "\u2234",
"theta;": "\u03b8",
"thetasym;": "\u03d1",
"thetav;": "\u03d1",
"thickapprox;": "\u2248",
"thicksim;": "\u223c",
"thinsp;": "\u2009",
"thkap;": "\u2248",
"thksim;": "\u223c",
"thorn": "\xfe",
"thorn;": "\xfe",
"tilde;": "\u02dc",
"times": "\xd7",
"times;": "\xd7",
"timesb;": "\u22a0",
"timesbar;": "\u2a31",
"timesd;": "\u2a30",
"tint;": "\u222d",
"toea;": "\u2928",
"top;": "\u22a4",
"topbot;": "\u2336",
"topcir;": "\u2af1",
"topf;": "\U0001d565",
"topfork;": "\u2ada",
"tosa;": "\u2929",
"tprime;": "\u2034",
"trade;": "\u2122",
"triangle;": "\u25b5",
"triangledown;": "\u25bf",
"triangleleft;": "\u25c3",
"trianglelefteq;": "\u22b4",
"triangleq;": "\u225c",
"triangleright;": "\u25b9",
"trianglerighteq;": "\u22b5",
"tridot;": "\u25ec",
"trie;": "\u225c",
"triminus;": "\u2a3a",
"triplus;": "\u2a39",
"trisb;": "\u29cd",
"tritime;": "\u2a3b",
"trpezium;": "\u23e2",
"tscr;": "\U0001d4c9",
"tscy;": "\u0446",
"tshcy;": "\u045b",
"tstrok;": "\u0167",
"twixt;": "\u226c",
"twoheadleftarrow;": "\u219e",
"twoheadrightarrow;": "\u21a0",
"uArr;": "\u21d1",
"uHar;": "\u2963",
"uacute": "\xfa",
"uacute;": "\xfa",
"uarr;": "\u2191",
"ubrcy;": "\u045e",
"ubreve;": "\u016d",
"ucirc": "\xfb",
"ucirc;": "\xfb",
"ucy;": "\u0443",
"udarr;": "\u21c5",
"udblac;": "\u0171",
"udhar;": "\u296e",
"ufisht;": "\u297e",
"ufr;": "\U0001d532",
"ugrave": "\xf9",
"ugrave;": "\xf9",
"uharl;": "\u21bf",
"uharr;": "\u21be",
"uhblk;": "\u2580",
"ulcorn;": "\u231c",
"ulcorner;": "\u231c",
"ulcrop;": "\u230f",
"ultri;": "\u25f8",
"umacr;": "\u016b",
"uml": "\xa8",
"uml;": "\xa8",
"uogon;": "\u0173",
"uopf;": "\U0001d566",
"uparrow;": "\u2191",
"updownarrow;": "\u2195",
"upharpoonleft;": "\u21bf",
"upharpoonright;": "\u21be",
"uplus;": "\u228e",
"upsi;": "\u03c5",
"upsih;": "\u03d2",
"upsilon;": "\u03c5",
"upuparrows;": "\u21c8",
"urcorn;": "\u231d",
"urcorner;": "\u231d",
"urcrop;": "\u230e",
"uring;": "\u016f",
"urtri;": "\u25f9",
"uscr;": "\U0001d4ca",
"utdot;": "\u22f0",
"utilde;": "\u0169",
"utri;": "\u25b5",
"utrif;": "\u25b4",
"uuarr;": "\u21c8",
"uuml": "\xfc",
"uuml;": "\xfc",
"uwangle;": "\u29a7",
"vArr;": "\u21d5",
"vBar;": "\u2ae8",
"vBarv;": "\u2ae9",
"vDash;": "\u22a8",
"vangrt;": "\u299c",
"varepsilon;": "\u03f5",
"varkappa;": "\u03f0",
"varnothing;": "\u2205",
"varphi;": "\u03d5",
"varpi;": "\u03d6",
"varpropto;": "\u221d",
"varr;": "\u2195",
"varrho;": "\u03f1",
"varsigma;": "\u03c2",
"varsubsetneq;": "\u228a\ufe00",
"varsubsetneqq;": "\u2acb\ufe00",
"varsupsetneq;": "\u228b\ufe00",
"varsupsetneqq;": "\u2acc\ufe00",
"vartheta;": "\u03d1",
"vartriangleleft;": "\u22b2",
"vartriangleright;": "\u22b3",
"vcy;": "\u0432",
"vdash;": "\u22a2",
"vee;": "\u2228",
"veebar;": "\u22bb",
"veeeq;": "\u225a",
"vellip;": "\u22ee",
"verbar;": "|",
"vert;": "|",
"vfr;": "\U0001d533",
"vltri;": "\u22b2",
"vnsub;": "\u2282\u20d2",
"vnsup;": "\u2283\u20d2",
"vopf;": "\U0001d567",
"vprop;": "\u221d",
"vrtri;": "\u22b3",
"vscr;": "\U0001d4cb",
"vsubnE;": "\u2acb\ufe00",
"vsubne;": "\u228a\ufe00",
"vsupnE;": "\u2acc\ufe00",
"vsupne;": "\u228b\ufe00",
"vzigzag;": "\u299a",
"wcirc;": "\u0175",
"wedbar;": "\u2a5f",
"wedge;": "\u2227",
"wedgeq;": "\u2259",
"weierp;": "\u2118",
"wfr;": "\U0001d534",
"wopf;": "\U0001d568",
"wp;": "\u2118",
"wr;": "\u2240",
"wreath;": "\u2240",
"wscr;": "\U0001d4cc",
"xcap;": "\u22c2",
"xcirc;": "\u25ef",
"xcup;": "\u22c3",
"xdtri;": "\u25bd",
"xfr;": "\U0001d535",
"xhArr;": "\u27fa",
"xharr;": "\u27f7",
"xi;": "\u03be",
"xlArr;": "\u27f8",
"xlarr;": "\u27f5",
"xmap;": "\u27fc",
"xnis;": "\u22fb",
"xodot;": "\u2a00",
"xopf;": "\U0001d569",
"xoplus;": "\u2a01",
"xotime;": "\u2a02",
"xrArr;": "\u27f9",
"xrarr;": "\u27f6",
"xscr;": "\U0001d4cd",
"xsqcup;": "\u2a06",
"xuplus;": "\u2a04",
"xutri;": "\u25b3",
"xvee;": "\u22c1",
"xwedge;": "\u22c0",
"yacute": "\xfd",
"yacute;": "\xfd",
"yacy;": "\u044f",
"ycirc;": "\u0177",
"ycy;": "\u044b",
"yen": "\xa5",
"yen;": "\xa5",
"yfr;": "\U0001d536",
"yicy;": "\u0457",
"yopf;": "\U0001d56a",
"yscr;": "\U0001d4ce",
"yucy;": "\u044e",
"yuml": "\xff",
"yuml;": "\xff",
"zacute;": "\u017a",
"zcaron;": "\u017e",
"zcy;": "\u0437",
"zdot;": "\u017c",
"zeetrf;": "\u2128",
"zeta;": "\u03b6",
"zfr;": "\U0001d537",
"zhcy;": "\u0436",
"zigrarr;": "\u21dd",
"zopf;": "\U0001d56b",
"zscr;": "\U0001d4cf",
"zwj;": "\u200d",
"zwnj;": "\u200c",
}
replacementCharacters = {
0x0: "\uFFFD",
0x0d: "\u000D",
0x80: "\u20AC",
0x81: "\u0081",
0x81: "\u0081",
0x82: "\u201A",
0x83: "\u0192",
0x84: "\u201E",
0x85: "\u2026",
0x86: "\u2020",
0x87: "\u2021",
0x88: "\u02C6",
0x89: "\u2030",
0x8A: "\u0160",
0x8B: "\u2039",
0x8C: "\u0152",
0x8D: "\u008D",
0x8E: "\u017D",
0x8F: "\u008F",
0x90: "\u0090",
0x91: "\u2018",
0x92: "\u2019",
0x93: "\u201C",
0x94: "\u201D",
0x95: "\u2022",
0x96: "\u2013",
0x97: "\u2014",
0x98: "\u02DC",
0x99: "\u2122",
0x9A: "\u0161",
0x9B: "\u203A",
0x9C: "\u0153",
0x9D: "\u009D",
0x9E: "\u017E",
0x9F: "\u0178",
}
encodings = {
'437': 'cp437',
'850': 'cp850',
'852': 'cp852',
'855': 'cp855',
'857': 'cp857',
'860': 'cp860',
'861': 'cp861',
'862': 'cp862',
'863': 'cp863',
'865': 'cp865',
'866': 'cp866',
'869': 'cp869',
'ansix341968': 'ascii',
'ansix341986': 'ascii',
'arabic': 'iso8859-6',
'ascii': 'ascii',
'asmo708': 'iso8859-6',
'big5': 'big5',
'big5hkscs': 'big5hkscs',
'chinese': 'gbk',
'cp037': 'cp037',
'cp1026': 'cp1026',
'cp154': 'ptcp154',
'cp367': 'ascii',
'cp424': 'cp424',
'cp437': 'cp437',
'cp500': 'cp500',
'cp775': 'cp775',
'cp819': 'windows-1252',
'cp850': 'cp850',
'cp852': 'cp852',
'cp855': 'cp855',
'cp857': 'cp857',
'cp860': 'cp860',
'cp861': 'cp861',
'cp862': 'cp862',
'cp863': 'cp863',
'cp864': 'cp864',
'cp865': 'cp865',
'cp866': 'cp866',
'cp869': 'cp869',
'cp936': 'gbk',
'cpgr': 'cp869',
'cpis': 'cp861',
'csascii': 'ascii',
'csbig5': 'big5',
'cseuckr': 'cp949',
'cseucpkdfmtjapanese': 'euc_jp',
'csgb2312': 'gbk',
'cshproman8': 'hp-roman8',
'csibm037': 'cp037',
'csibm1026': 'cp1026',
'csibm424': 'cp424',
'csibm500': 'cp500',
'csibm855': 'cp855',
'csibm857': 'cp857',
'csibm860': 'cp860',
'csibm861': 'cp861',
'csibm863': 'cp863',
'csibm864': 'cp864',
'csibm865': 'cp865',
'csibm866': 'cp866',
'csibm869': 'cp869',
'csiso2022jp': 'iso2022_jp',
'csiso2022jp2': 'iso2022_jp_2',
'csiso2022kr': 'iso2022_kr',
'csiso58gb231280': 'gbk',
'csisolatin1': 'windows-1252',
'csisolatin2': 'iso8859-2',
'csisolatin3': 'iso8859-3',
'csisolatin4': 'iso8859-4',
'csisolatin5': 'windows-1254',
'csisolatin6': 'iso8859-10',
'csisolatinarabic': 'iso8859-6',
'csisolatincyrillic': 'iso8859-5',
'csisolatingreek': 'iso8859-7',
'csisolatinhebrew': 'iso8859-8',
'cskoi8r': 'koi8-r',
'csksc56011987': 'cp949',
'cspc775baltic': 'cp775',
'cspc850multilingual': 'cp850',
'cspc862latinhebrew': 'cp862',
'cspc8codepage437': 'cp437',
'cspcp852': 'cp852',
'csptcp154': 'ptcp154',
'csshiftjis': 'shift_jis',
'csunicode11utf7': 'utf-7',
'cyrillic': 'iso8859-5',
'cyrillicasian': 'ptcp154',
'ebcdiccpbe': 'cp500',
'ebcdiccpca': 'cp037',
'ebcdiccpch': 'cp500',
'ebcdiccphe': 'cp424',
'ebcdiccpnl': 'cp037',
'ebcdiccpus': 'cp037',
'ebcdiccpwt': 'cp037',
'ecma114': 'iso8859-6',
'ecma118': 'iso8859-7',
'elot928': 'iso8859-7',
'eucjp': 'euc_jp',
'euckr': 'cp949',
'extendedunixcodepackedformatforjapanese': 'euc_jp',
'gb18030': 'gb18030',
'gb2312': 'gbk',
'gb231280': 'gbk',
'gbk': 'gbk',
'greek': 'iso8859-7',
'greek8': 'iso8859-7',
'hebrew': 'iso8859-8',
'hproman8': 'hp-roman8',
'hzgb2312': 'hz',
'ibm037': 'cp037',
'ibm1026': 'cp1026',
'ibm367': 'ascii',
'ibm424': 'cp424',
'ibm437': 'cp437',
'ibm500': 'cp500',
'ibm775': 'cp775',
'ibm819': 'windows-1252',
'ibm850': 'cp850',
'ibm852': 'cp852',
'ibm855': 'cp855',
'ibm857': 'cp857',
'ibm860': 'cp860',
'ibm861': 'cp861',
'ibm862': 'cp862',
'ibm863': 'cp863',
'ibm864': 'cp864',
'ibm865': 'cp865',
'ibm866': 'cp866',
'ibm869': 'cp869',
'iso2022jp': 'iso2022_jp',
'iso2022jp2': 'iso2022_jp_2',
'iso2022kr': 'iso2022_kr',
'iso646irv1991': 'ascii',
'iso646us': 'ascii',
'iso88591': 'windows-1252',
'iso885910': 'iso8859-10',
'iso8859101992': 'iso8859-10',
'iso885911987': 'windows-1252',
'iso885913': 'iso8859-13',
'iso885914': 'iso8859-14',
'iso8859141998': 'iso8859-14',
'iso885915': 'iso8859-15',
'iso885916': 'iso8859-16',
'iso8859162001': 'iso8859-16',
'iso88592': 'iso8859-2',
'iso885921987': 'iso8859-2',
'iso88593': 'iso8859-3',
'iso885931988': 'iso8859-3',
'iso88594': 'iso8859-4',
'iso885941988': 'iso8859-4',
'iso88595': 'iso8859-5',
'iso885951988': 'iso8859-5',
'iso88596': 'iso8859-6',
'iso885961987': 'iso8859-6',
'iso88597': 'iso8859-7',
'iso885971987': 'iso8859-7',
'iso88598': 'iso8859-8',
'iso885981988': 'iso8859-8',
'iso88599': 'windows-1254',
'iso885991989': 'windows-1254',
'isoceltic': 'iso8859-14',
'isoir100': 'windows-1252',
'isoir101': 'iso8859-2',
'isoir109': 'iso8859-3',
'isoir110': 'iso8859-4',
'isoir126': 'iso8859-7',
'isoir127': 'iso8859-6',
'isoir138': 'iso8859-8',
'isoir144': 'iso8859-5',
'isoir148': 'windows-1254',
'isoir149': 'cp949',
'isoir157': 'iso8859-10',
'isoir199': 'iso8859-14',
'isoir226': 'iso8859-16',
'isoir58': 'gbk',
'isoir6': 'ascii',
'koi8r': 'koi8-r',
'koi8u': 'koi8-u',
'korean': 'cp949',
'ksc5601': 'cp949',
'ksc56011987': 'cp949',
'ksc56011989': 'cp949',
'l1': 'windows-1252',
'l10': 'iso8859-16',
'l2': 'iso8859-2',
'l3': 'iso8859-3',
'l4': 'iso8859-4',
'l5': 'windows-1254',
'l6': 'iso8859-10',
'l8': 'iso8859-14',
'latin1': 'windows-1252',
'latin10': 'iso8859-16',
'latin2': 'iso8859-2',
'latin3': 'iso8859-3',
'latin4': 'iso8859-4',
'latin5': 'windows-1254',
'latin6': 'iso8859-10',
'latin8': 'iso8859-14',
'latin9': 'iso8859-15',
'ms936': 'gbk',
'mskanji': 'shift_jis',
'pt154': 'ptcp154',
'ptcp154': 'ptcp154',
'r8': 'hp-roman8',
'roman8': 'hp-roman8',
'shiftjis': 'shift_jis',
'tis620': 'cp874',
'unicode11utf7': 'utf-7',
'us': 'ascii',
'usascii': 'ascii',
'utf16': 'utf-16',
'utf16be': 'utf-16-be',
'utf16le': 'utf-16-le',
'utf8': 'utf-8',
'windows1250': 'cp1250',
'windows1251': 'cp1251',
'windows1252': 'cp1252',
'windows1253': 'cp1253',
'windows1254': 'cp1254',
'windows1255': 'cp1255',
'windows1256': 'cp1256',
'windows1257': 'cp1257',
'windows1258': 'cp1258',
'windows936': 'gbk',
'x-x-big5': 'big5'}
tokenTypes = {
"Doctype": 0,
"Characters": 1,
"SpaceCharacters": 2,
"StartTag": 3,
"EndTag": 4,
"EmptyTag": 5,
"Comment": 6,
"ParseError": 7
}
tagTokenTypes = frozenset((tokenTypes["StartTag"], tokenTypes["EndTag"],
tokenTypes["EmptyTag"]))
prefixes = dict([(v, k) for k, v in namespaces.items()])
prefixes["http://www.w3.org/1998/Math/MathML"] = "math"
class DataLossWarning(UserWarning):
pass
class ReparseException(Exception):
pass
| gpl-3.0 | -8,795,280,964,289,582,000 | 27.13982 | 94 | 0.498603 | false |
cselis86/edx-platform | lms/djangoapps/notes/api.py | 165 | 7855 | from opaque_keys.edx.locations import SlashSeparatedCourseKey
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, Http404
from django.core.exceptions import ValidationError
from notes.models import Note
from notes.utils import notes_enabled_for_course
from courseware.courses import get_course_with_access
import json
import logging
import collections
log = logging.getLogger(__name__)
API_SETTINGS = {
'META': {'name': 'Notes API', 'version': 1},
# Maps resources to HTTP methods and actions
'RESOURCE_MAP': {
'root': {'GET': 'root'},
'notes': {'GET': 'index', 'POST': 'create'},
'note': {'GET': 'read', 'PUT': 'update', 'DELETE': 'delete'},
'search': {'GET': 'search'},
},
# Cap the number of notes that can be returned in one request
'MAX_NOTE_LIMIT': 1000,
}
# Wrapper class for HTTP response and data. All API actions are expected to return this.
ApiResponse = collections.namedtuple('ApiResponse', ['http_response', 'data'])
#----------------------------------------------------------------------#
# API requests are routed through api_request() using the resource map.
def api_enabled(request, course_key):
'''
Returns True if the api is enabled for the course, otherwise False.
'''
course = _get_course(request, course_key)
return notes_enabled_for_course(course)
@login_required
def api_request(request, course_id, **kwargs):
'''
Routes API requests to the appropriate action method and returns JSON.
Raises a 404 if the requested resource does not exist or notes are
disabled for the course.
'''
assert isinstance(course_id, basestring)
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
# Verify that the api should be accessible to this course
if not api_enabled(request, course_key):
log.debug('Notes are disabled for course: {0}'.format(course_id))
raise Http404
# Locate the requested resource
resource_map = API_SETTINGS.get('RESOURCE_MAP', {})
resource_name = kwargs.pop('resource')
resource_method = request.method
resource = resource_map.get(resource_name)
if resource is None:
log.debug('Resource "{0}" does not exist'.format(resource_name))
raise Http404
if resource_method not in resource.keys():
log.debug('Resource "{0}" does not support method "{1}"'.format(resource_name, resource_method))
raise Http404
# Execute the action associated with the resource
func = resource.get(resource_method)
module = globals()
if func not in module:
log.debug('Function "{0}" does not exist for request {1} {2}'.format(func, resource_method, resource_name))
raise Http404
log.debug('API request: {0} {1}'.format(resource_method, resource_name))
api_response = module[func](request, course_key, **kwargs)
http_response = api_format(api_response)
return http_response
def api_format(api_response):
'''
Takes an ApiResponse and returns an HttpResponse.
'''
http_response = api_response.http_response
content_type = 'application/json'
content = ''
# not doing a strict boolean check on data becuase it could be an empty list
if api_response.data is not None and api_response.data != '':
content = json.dumps(api_response.data)
http_response['Content-type'] = content_type
http_response.content = content
log.debug('API response type: {0} content: {1}'.format(content_type, content))
return http_response
def _get_course(request, course_key):
'''
Helper function to load and return a user's course.
'''
return get_course_with_access(request.user, 'load', course_key)
#----------------------------------------------------------------------#
# API actions exposed via the resource map.
def index(request, course_key):
'''
Returns a list of annotation objects.
'''
MAX_LIMIT = API_SETTINGS.get('MAX_NOTE_LIMIT')
notes = Note.objects.order_by('id').filter(course_id=course_key,
user=request.user)[:MAX_LIMIT]
return ApiResponse(http_response=HttpResponse(), data=[note.as_dict() for note in notes])
def create(request, course_key):
'''
Receives an annotation object to create and returns a 303 with the read location.
'''
note = Note(course_id=course_key, user=request.user)
try:
note.clean(request.body)
except ValidationError as e:
log.debug(e)
return ApiResponse(http_response=HttpResponse('', status=400), data=None)
note.save()
response = HttpResponse('', status=303)
response['Location'] = note.get_absolute_url()
return ApiResponse(http_response=response, data=None)
def read(request, _course_key, note_id):
'''
Returns a single annotation object.
'''
try:
note = Note.objects.get(id=note_id)
except Note.DoesNotExist:
return ApiResponse(http_response=HttpResponse('', status=404), data=None)
if note.user.id != request.user.id:
return ApiResponse(http_response=HttpResponse('', status=403), data=None)
return ApiResponse(http_response=HttpResponse(), data=note.as_dict())
def update(request, course_key, note_id): # pylint: disable=unused-argument
'''
Updates an annotation object and returns a 303 with the read location.
'''
try:
note = Note.objects.get(id=note_id)
except Note.DoesNotExist:
return ApiResponse(http_response=HttpResponse('', status=404), data=None)
if note.user.id != request.user.id:
return ApiResponse(http_response=HttpResponse('', status=403), data=None)
try:
note.clean(request.body)
except ValidationError as e:
log.debug(e)
return ApiResponse(http_response=HttpResponse('', status=400), data=None)
note.save()
response = HttpResponse('', status=303)
response['Location'] = note.get_absolute_url()
return ApiResponse(http_response=response, data=None)
def delete(request, course_id, note_id):
'''
Deletes the annotation object and returns a 204 with no content.
'''
try:
note = Note.objects.get(id=note_id)
except Note.DoesNotExist:
return ApiResponse(http_response=HttpResponse('', status=404), data=None)
if note.user.id != request.user.id:
return ApiResponse(http_response=HttpResponse('', status=403), data=None)
note.delete()
return ApiResponse(http_response=HttpResponse('', status=204), data=None)
def search(request, course_key):
'''
Returns a subset of annotation objects based on a search query.
'''
MAX_LIMIT = API_SETTINGS.get('MAX_NOTE_LIMIT')
# search parameters
offset = request.GET.get('offset', '')
limit = request.GET.get('limit', '')
uri = request.GET.get('uri', '')
# validate search parameters
if offset.isdigit():
offset = int(offset)
else:
offset = 0
if limit.isdigit():
limit = int(limit)
if limit == 0 or limit > MAX_LIMIT:
limit = MAX_LIMIT
else:
limit = MAX_LIMIT
# set filters
filters = {'course_id': course_key, 'user': request.user}
if uri != '':
filters['uri'] = uri
# retrieve notes
notes = Note.objects.order_by('id').filter(**filters)
total = notes.count()
rows = notes[offset:offset + limit]
result = {
'total': total,
'rows': [note.as_dict() for note in rows]
}
return ApiResponse(http_response=HttpResponse(), data=result)
def root(request, course_key): # pylint: disable=unused-argument
'''
Returns version information about the API.
'''
return ApiResponse(http_response=HttpResponse(), data=API_SETTINGS.get('META'))
| agpl-3.0 | -1,831,694,184,086,946,300 | 29.925197 | 115 | 0.647104 | false |
danielpalomino/gem5 | src/arch/x86/isa/insts/general_purpose/control_transfer/jump.py | 40 | 5389 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop JMP_I
{
# Make the default data size of jumps 64 bits in 64 bit mode
.adjust_env oszIn64Override
rdip t1
limm t2, imm
wrip t1, t2
};
def macroop JMP_R
{
# Make the default data size of jumps 64 bits in 64 bit mode
.adjust_env oszIn64Override
wripi reg, 0
};
def macroop JMP_M
{
# Make the default data size of jumps 64 bits in 64 bit mode
.adjust_env oszIn64Override
ld t1, seg, sib, disp
wripi t1, 0
};
def macroop JMP_P
{
# Make the default data size of jumps 64 bits in 64 bit mode
.adjust_env oszIn64Override
rdip t7
ld t1, seg, riprel, disp
wripi t1, 0
};
def macroop JMP_FAR_M
{
limm t1, 0, dataSize=8
limm t2, 0, dataSize=8
lea t1, seg, sib, disp, dataSize=asz
ld t2, seg, [1, t0, t1], dsz
ld t1, seg, [1, t0, t1]
br rom_label("jmpFarWork")
};
def macroop JMP_FAR_P
{
limm t1, 0, dataSize=8
limm t2, 0, dataSize=8
rdip t7, dataSize=asz
lea t1, seg, riprel, disp, dataSize=asz
ld t2, seg, [1, t0, t1], dsz
ld t1, seg, [1, t0, t1]
br rom_label("jmpFarWork")
};
def macroop JMP_FAR_I
{
# Put the whole far pointer into a register.
limm t2, imm, dataSize=8
# Figure out the width of the offset.
limm t3, dsz, dataSize=8
slli t3, t3, 3, dataSize=8
# Get the offset into t1.
mov t1, t0, t2
# Get the selector into t2.
srl t2, t2, t3, dataSize=8
mov t2, t0, t2, dataSize=2
br rom_label("jmpFarWork")
};
def rom
{
extern jmpFarWork:
# t1 has the offset and t2 has the new selector.
# This is intended to run in protected mode.
andi t0, t2, 0xFC, flags=(EZF,), dataSize=2
fault "new GeneralProtection(0)", flags=(CEZF,)
andi t3, t2, 0xF8, dataSize=8
andi t0, t2, 0x4, flags=(EZF,), dataSize=2
br rom_local_label("farJmpGlobalDescriptor"), flags=(CEZF,)
ld t4, tsl, [1, t0, t3], dataSize=8, addressSize=8, atCPL0=True
br rom_local_label("farJmpProcessDescriptor")
farJmpGlobalDescriptor:
ld t4, tsg, [1, t0, t3], dataSize=8, addressSize=8, atCPL0=True
farJmpProcessDescriptor:
rcri t0, t4, 13, flags=(ECF,), dataSize=2
br rom_local_label("farJmpSystemDescriptor"), flags=(nCECF,)
chks t2, t4, CSCheck, dataSize=8
wrdl cs, t4, t2
wrsel cs, t2
wrip t0, t1
eret
farJmpSystemDescriptor:
panic "Far jumps to system descriptors aren't implemented"
eret
};
def macroop JMP_FAR_REAL_M
{
lea t1, seg, sib, disp, dataSize=asz
ld t2, seg, [1, t0, t1], dsz
ld t1, seg, [1, t0, t1]
zexti t3, t1, 15, dataSize=8
slli t3, t3, 4, dataSize=8
wrsel cs, t1, dataSize=2
wrbase cs, t3
wrip t0, t2, dataSize=asz
};
def macroop JMP_FAR_REAL_P
{
panic "Real mode far jump executed in 64 bit mode!"
};
def macroop JMP_FAR_REAL_I
{
# Put the whole far pointer into a register.
limm t2, imm, dataSize=8
# Figure out the width of the offset.
limm t3, dsz, dataSize=8
slli t3, t3, 3, dataSize=8
# Get the selector into t1.
sll t1, t2, t3, dataSize=8
mov t1, t0, t1, dataSize=2
# And get the offset into t2
mov t2, t0, t2
slli t3, t3, 4, dataSize=8
wrsel cs, t1, dataSize=2
wrbase cs, t3
wrip t0, t2, dataSize=asz
};
'''
| bsd-3-clause | 6,019,071,972,304,923,000 | 29.971264 | 72 | 0.696419 | false |
gmacchi93/serverInfoParaguay | apps/venv/lib/python2.7/site-packages/django/middleware/locale.py | 85 | 3156 | "This is the locale selecting middleware that will look at accept headers"
from django.conf import settings
from django.core.urlresolvers import (
LocaleRegexURLResolver, get_resolver, get_script_prefix, is_valid_path,
)
from django.http import HttpResponseRedirect
from django.utils import translation
from django.utils.cache import patch_vary_headers
class LocaleMiddleware(object):
"""
This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context. This allows pages to be dynamically
translated to the language the user desires (if the language
is available, of course).
"""
response_redirect_class = HttpResponseRedirect
def __init__(self):
self._is_language_prefix_patterns_used = False
for url_pattern in get_resolver(None).url_patterns:
if isinstance(url_pattern, LocaleRegexURLResolver):
self._is_language_prefix_patterns_used = True
break
def process_request(self, request):
check_path = self.is_language_prefix_patterns_used()
language = translation.get_language_from_request(
request, check_path=check_path)
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
language = translation.get_language()
language_from_path = translation.get_language_from_path(request.path_info)
if (response.status_code == 404 and not language_from_path
and self.is_language_prefix_patterns_used()):
urlconf = getattr(request, 'urlconf', None)
language_path = '/%s%s' % (language, request.path_info)
path_valid = is_valid_path(language_path, urlconf)
if (not path_valid and settings.APPEND_SLASH
and not language_path.endswith('/')):
path_valid = is_valid_path("%s/" % language_path, urlconf)
if path_valid:
script_prefix = get_script_prefix()
language_url = "%s://%s%s" % (
request.scheme,
request.get_host(),
# insert language after the script prefix and before the
# rest of the URL
request.get_full_path().replace(
script_prefix,
'%s%s/' % (script_prefix, language),
1
)
)
return self.response_redirect_class(language_url)
if not (self.is_language_prefix_patterns_used()
and language_from_path):
patch_vary_headers(response, ('Accept-Language',))
if 'Content-Language' not in response:
response['Content-Language'] = language
return response
def is_language_prefix_patterns_used(self):
"""
Returns `True` if the `LocaleRegexURLResolver` is used
at root level of the urlpatterns, else it returns `False`.
"""
return self._is_language_prefix_patterns_used
| apache-2.0 | -3,226,576,463,676,864,500 | 41.08 | 82 | 0.613435 | false |
bwhmather/wmii | alternative_wmiircs/python/pyxp/client.py | 9 | 9838 | # Copyright (C) 2009 Kris Maglione
import operator
import os
import re
import sys
from threading import *
import traceback
import pyxp
from pyxp import fcall, fields
from pyxp.mux import Mux
from pyxp.types import *
if os.environ.get('NAMESPACE', None):
namespace = os.environ['NAMESPACE']
else:
try:
namespace = '/tmp/ns.%s.%s' % (
os.environ['USER'],
re.sub(r'\.0$', '', os.environ['DISPLAY']))
except Exception:
pass
NAMESPACE = namespace
OREAD = 0x00
OWRITE = 0x01
ORDWR = 0x02
OEXEC = 0x03
OEXCL = 0x04
OTRUNC = 0x10
OREXEC = 0x20
ORCLOSE = 0x40
OAPPEND = 0x80
ROOT_FID = 0
class ProtocolException(Exception):
pass
class RPCError(Exception):
pass
class Client(object):
ROOT_FID = 0
@staticmethod
def respond(callback, data, exc=None, tb=None):
if hasattr(callback, 'func_code'):
callback(*(data, exc, tb)[0:callback.func_code.co_argcount])
elif callable(callback):
callback(data)
def __enter__(self):
return self
def __exit__(self, *args):
self._cleanup()
def __init__(self, conn=None, namespace=None, root=None):
if not conn and namespace:
conn = 'unix!%s/%s' % (NAMESPACE, namespace)
try:
self.lastfid = ROOT_FID
self.fids = set()
self.lock = RLock()
def process(data):
return fcall.Fcall.unmarshall(data)[1]
self.mux = Mux(conn, process, maxtag=256)
resp = self._dorpc(fcall.Tversion(version=pyxp.VERSION, msize=65535))
if resp.version != pyxp.VERSION:
raise ProtocolException, "Can't speak 9P version '%s'" % resp.version
self.msize = resp.msize
self._dorpc(fcall.Tattach(fid=ROOT_FID, afid=fcall.NO_FID,
uname=os.environ['USER'], aname=''))
if root:
path = self._splitpath(root)
resp = self._dorpc(fcall.Twalk(fid=ROOT_FID,
newfid=ROOT_FID,
wname=path))
except Exception:
traceback.print_exc(sys.stdout)
if getattr(self, 'mux', None):
self.mux.fd.close()
raise
def _cleanup(self):
try:
for f in self.files:
f.close()
finally:
self.mux.fd.close()
self.mux = None
def _dorpc(self, req, callback=None, error=None):
def doresp(resp):
if isinstance(resp, fcall.Rerror):
raise RPCError, "%s[%d] RPC returned error: %s" % (
req.__class__.__name__, resp.tag, resp.ename)
if req.type != resp.type ^ 1:
raise ProtocolException, "Missmatched RPC message types: %s => %s" % (
req.__class__.__name__, resp.__class__.__name__)
return resp
def next(mux, resp):
try:
res = doresp(resp)
except Exception, e:
self.respond(error or callback, None, e, None)
else:
self.respond(callback, res)
if not callback:
return doresp(self.mux.rpc(req))
self.mux.rpc(req, next)
def _splitpath(self, path):
if isinstance(path, list):
return path
return [v for v in path.split('/') if v != '']
def _getfid(self):
with self.lock:
if self.fids:
return self.fids.pop()
self.lastfid += 1
return self.lastfid
def _putfid(self, fid):
with self.lock:
self.fids.add(fid)
def _aclunk(self, fid, callback=None):
def next(resp, exc, tb):
if resp:
self._putfid(fid)
self.respond(callback, resp, exc, tb)
self._dorpc(fcall.Tclunk(fid=fid), next)
def _clunk(self, fid):
try:
self._dorpc(fcall.Tclunk(fid=fid))
finally:
self._putfid(fid)
def _walk(self, path):
fid = self._getfid()
ofid = ROOT_FID
while True:
self._dorpc(fcall.Twalk(fid=ofid, newfid=fid,
wname=path[0:fcall.MAX_WELEM]))
path = path[fcall.MAX_WELEM:]
ofid = fid
if len(path) == 0:
break
@apply
class Res:
def __enter__(res):
return fid
def __exit__(res, exc_type, exc_value, traceback):
if exc_type:
self._clunk(fid)
return Res
_file = property(lambda self: File)
def _open(self, path, mode, fcall, origpath=None):
resp = None
with self._walk(path) as nfid:
fid = nfid
fcall.fid = fid
resp = self._dorpc(fcall)
def cleanup():
self._aclunk(fid)
file = self._file(self, origpath or '/'.join(path), resp, fid, mode, cleanup)
return file
def open(self, path, mode=OREAD):
path = self._splitpath(path)
return self._open(path, mode, fcall.Topen(mode=mode))
def create(self, path, mode=OREAD, perm=0):
path = self._splitpath(path)
name = path.pop()
return self._open(path, mode, fcall.Tcreate(mode=mode, name=name, perm=perm),
origpath='/'.join(path + [name]))
def remove(self, path):
path = self._splitpath(path)
with self._walk(path) as fid:
self._dorpc(fcall.Tremove(fid=fid))
def stat(self, path):
path = self._splitpath(path)
try:
with self._walk(path) as fid:
resp = self._dorpc(fcall.Tstat(fid= fid))
st = resp.stat
self._clunk(fid)
return st
except RPCError:
return None
def read(self, path, *args, **kwargs):
with self.open(path) as f:
return f.read(*args, **kwargs)
def readlines(self, path, *args, **kwargs):
with self.open(path) as f:
for l in f.readlines(*args, **kwargs):
yield l
def readdir(self, path, *args, **kwargs):
with self.open(path) as f:
for s in f.readdir(*args, **kwargs):
yield s
def write(self, path, *args, **kwargs):
with self.open(path, OWRITE) as f:
return f.write(*args, **kwargs)
class File(object):
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def __init__(self, client, path, fcall, fid, mode, cleanup):
self.lock = RLock()
self.client = client
self.path = path
self.fid = fid
self._cleanup = cleanup
self.mode = mode
self.iounit = fcall.iounit
self.qid = fcall.qid
self.closed = False
self.offset = 0
def __del__(self):
if not self.closed:
self._cleanup()
def _dorpc(self, fcall, async=None, error=None):
if hasattr(fcall, 'fid'):
fcall.fid = self.fid
return self.client._dorpc(fcall, async, error)
def stat(self):
resp = self._dorpc(fcall.Tstat())
return resp.stat
def read(self, count=None, offset=None, buf=''):
if count is None:
count = self.iounit
res = []
with self.lock:
offs = self.offset
if offset is not None:
offs = offset
while count > 0:
n = min(count, self.iounit)
count -= n
resp = self._dorpc(fcall.Tread(offset=offs, count=n))
data = resp.data
offs += len(data)
res.append(data)
if len(data) < n:
break
if offset is None:
self.offset = offs
return ''.join(res)
def readlines(self):
last = None
while True:
data = self.read()
if not data:
break
lines = data.split('\n')
if last:
lines[0] = last + lines[0]
last = None
for i in range(0, len(lines) - 1):
yield lines[i]
last = lines[-1]
if last:
yield last
def write(self, data, offset=None):
if offset is None:
offset = self.offset
off = 0
with self.lock:
offs = self.offset
if offset is not None:
offs = offset
while off < len(data):
n = min(len(data), self.iounit)
resp = self._dorpc(fcall.Twrite(offset=offs,
data=data[off:off+n]))
off += resp.count
offs += resp.count
if resp.count < n:
break
if offset is None:
self.offset = offs
return off
def readdir(self):
if not self.qid.type & Qid.QTDIR:
raise Exception, "Can only call readdir on a directory"
off = 0
while True:
data = self.read(self.iounit, off)
if not data:
break
off += len(data)
for s in Stat.unmarshall_list(data):
yield s
def close(self):
assert not self.closed
self.closed = True
try:
self._cleanup()
except:
pass
self.tg = None
self.fid = None
self.client = None
self.qid = None
def remove(self):
try:
self._dorpc(fcall.Tremove())
finally:
self.close()
# vim:se sts=4 sw=4 et:
| mit | 1,797,695,052,249,184,500 | 27.270115 | 86 | 0.494511 | false |
furkantokac/Faunus | src/utilinux.py | 1 | 3351 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import subprocess, os, socket
def validate_ip(addr):
try:
socket.inet_aton(addr)
return True # legal
except socket.error:
return False # Not legal
def get_stdout(pi):
result = pi.communicate()
if len(result[0])>0:
return result[0]
else:
return result[1] # some error has occured
def killall(process_name):
counter = 0
pid = is_process_running(process_name)
while pid!=0:
execute_shell('kill '+str(pid))
pid = is_process_running(process_name)
counter += 1
return counter
def execute_shell(command, error=''):
return execute(command, wait=True, shellexec=True, errorstring=error)
def execute_shell_root(command, sudo_pwd='', error=''):
command = "echo "+str(sudo_pwd)+" | sudo -S "+command
return execute(command, wait=True, shellexec=True, errorstring=error)
def execute(command='', wait=True, shellexec=False, errorstring='', ags=None):
try:
if (shellexec):
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
p = subprocess.Popen(args=ags)
if wait:
p.wait()
result = get_stdout(p)
return result
else:
return p
except subprocess.CalledProcessError as e:
print('Error occured : '+errorstring)
return errorstring
except Exception as ea:
print('Exception occured : '+ea.message)
return errorstring
def is_process_running(name):
cmd = 'ps aux |grep '+name+' |grep -v grep'
s = execute_shell(cmd)
if len(s)==0:
return 0
else:
t = s.split()
return int(t[1])
def check_dependency(packet_name):
if len(check_sysfile(packet_name))==0:
print(packet_name+" executable not found. Make sure you have installed "+packet_name)
return False
return True
def check_sysfile(filename):
if os.path.exists('/usr/sbin/'+filename):
return '/usr/sbin/'+filename
elif os.path.exists('/sbin/'+filename):
return '/sbin/'+filename
else:
return ''
def get_sysctl(setting, sudo_pwd):
result = execute_shell_root('sysctl '+setting, sudo_pwd).decode("utf-8")
if '=' in result:
return result.split('=')[1].lstrip()
else:
return result
def set_sysctl(setting, value, sudo_pwd):
return execute_shell_root('sysctl -w '+setting+'='+value, sudo_pwd).decode("utf-8")
def interface_iw(name):
response = execute_shell('iwconfig').decode("utf-8")
lines = response.splitlines()
for line in lines:
if not line.startswith(' ') and len(line)>0:
text = line.split(' ')[0]
if line.startswith(name):
return text
return False
def interface_if(name):
response = execute_shell('ifconfig').decode("utf-8")
lines = response.splitlines()
for line in lines:
if not line.startswith(' ') and len(line)>0:
text = line.split(' ')[0]
if text.startswith(name):
return text
return False
def check_eth_connected(sudo_pwd):
response = execute_shell_root("ethtool eth0 | grep detected:", sudo_pwd).decode("utf-8")
if "yes" in response:
return True
return False | mit | 3,148,119,722,267,052,000 | 25.1875 | 101 | 0.604297 | false |
zycdragonball/tensorflow | tensorflow/contrib/framework/python/framework/tensor_util.py | 48 | 10771 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tensor utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
__all__ = [
'assert_same_float_dtype',
'assert_scalar',
'assert_scalar_int',
'convert_to_tensor_or_sparse_tensor',
'is_tensor',
'reduce_sum_n',
'remove_squeezable_dimensions',
'with_shape',
'with_same_shape']
# Temporary for backwards compatibility
is_tensor = tensor_util.is_tensor
assert_same_float_dtype = check_ops.assert_same_float_dtype
assert_scalar = check_ops.assert_scalar
convert_to_tensor_or_sparse_tensor = (
sparse_tensor.convert_to_tensor_or_sparse_tensor)
def reduce_sum_n(tensors, name=None):
"""Reduce tensors to a scalar sum.
This reduces each tensor in `tensors` to a scalar via `tf.reduce_sum`, then
adds them via `tf.add_n`.
Args:
tensors: List of tensors, all of the same numeric type.
name: Tensor name, and scope for all other ops.
Returns:
Total loss tensor, or None if no losses have been configured.
Raises:
ValueError: if `losses` is missing or empty.
"""
if not tensors:
raise ValueError('No tensors provided.')
with ops.name_scope(name, 'reduce_sum_n', tensors) as name_scope:
tensors = [
math_ops.reduce_sum(t, name='%s/sum' % t.op.name) for t in tensors]
if len(tensors) == 1:
return tensors[0]
return math_ops.add_n(tensors, name=name_scope)
def remove_squeezable_dimensions(predictions, labels, name=None):
"""Squeeze last dim if ranks of `predictions` and `labels` differ by 1.
This will use static shape if available. Otherwise, it will add graph
operations, which could result in a performance hit.
Args:
predictions: Predicted values, a `Tensor` of arbitrary dimensions.
labels: Label values, a `Tensor` whose dimensions match `predictions`.
name: Name of the op.
Returns:
Tuple of `predictions` and `labels`, possibly with last dim squeezed.
"""
with ops.name_scope(name, 'remove_squeezable_dimensions',
[predictions, labels]):
predictions = ops.convert_to_tensor(predictions)
labels = ops.convert_to_tensor(labels)
predictions_shape = predictions.get_shape()
predictions_rank = predictions_shape.ndims
labels_shape = labels.get_shape()
labels_rank = labels_shape.ndims
if (labels_rank is not None) and (predictions_rank is not None):
# Use static rank.
rank_diff = predictions_rank - labels_rank
if rank_diff == -1:
labels = array_ops.squeeze(labels, [-1])
elif rank_diff == 1:
predictions = array_ops.squeeze(predictions, [-1])
return predictions, labels
# Use dynamic rank.
rank_diff = array_ops.rank(predictions) - array_ops.rank(labels)
if (predictions_rank is None) or (
predictions_shape.dims[-1].is_compatible_with(1)):
predictions = control_flow_ops.cond(
math_ops.equal(1, rank_diff),
lambda: array_ops.squeeze(predictions, [-1]),
lambda: predictions)
if (labels_rank is None) or (
labels_shape.dims[-1].is_compatible_with(1)):
labels = control_flow_ops.cond(
math_ops.equal(-1, rank_diff),
lambda: array_ops.squeeze(labels, [-1]),
lambda: labels)
return predictions, labels
def _all_equal(tensor0, tensor1):
with ops.name_scope('all_equal', values=[tensor0, tensor1]) as scope:
return math_ops.reduce_all(
math_ops.equal(tensor0, tensor1, name='equal'), name=scope)
def _is_rank(expected_rank, actual_tensor):
"""Returns whether actual_tensor's rank is expected_rank.
Args:
expected_rank: Integer defining the expected rank, or tensor of same.
actual_tensor: Tensor to test.
Returns:
New tensor.
"""
with ops.name_scope('is_rank', values=[actual_tensor]) as scope:
expected = ops.convert_to_tensor(expected_rank, name='expected')
actual = array_ops.rank(actual_tensor, name='actual')
return math_ops.equal(expected, actual, name=scope)
def _is_shape(expected_shape, actual_tensor, actual_shape=None):
"""Returns whether actual_tensor's shape is expected_shape.
Args:
expected_shape: Integer list defining the expected shape, or tensor of same.
actual_tensor: Tensor to test.
actual_shape: Shape of actual_tensor, if we already have it.
Returns:
New tensor.
"""
with ops.name_scope('is_shape', values=[actual_tensor]) as scope:
is_rank = _is_rank(array_ops.size(expected_shape), actual_tensor)
if actual_shape is None:
actual_shape = array_ops.shape(actual_tensor, name='actual')
shape_equal = _all_equal(
ops.convert_to_tensor(expected_shape, name='expected'),
actual_shape)
return math_ops.logical_and(is_rank, shape_equal, name=scope)
def _assert_shape_op(expected_shape, actual_tensor):
"""Asserts actual_tensor's shape is expected_shape.
Args:
expected_shape: List of integers defining the expected shape, or tensor of
same.
actual_tensor: Tensor to test.
Returns:
New assert tensor.
"""
with ops.name_scope('assert_shape', values=[actual_tensor]) as scope:
actual_shape = array_ops.shape(actual_tensor, name='actual')
is_shape = _is_shape(expected_shape, actual_tensor, actual_shape)
return control_flow_ops.Assert(
is_shape, [
'Wrong shape for %s [expected] [actual].' % actual_tensor.name,
expected_shape,
actual_shape
], name=scope)
def with_same_shape(expected_tensor, tensor):
"""Assert tensors are the same shape, from the same graph.
Args:
expected_tensor: Tensor with expected shape.
tensor: Tensor of actual values.
Returns:
Tuple of (actual_tensor, label_tensor), possibly with assert ops added.
"""
with ops.name_scope('%s/' % tensor.op.name, values=[expected_tensor, tensor]):
tensor_shape = expected_tensor.get_shape()
expected_shape = (
tensor_shape.as_list() if tensor_shape.is_fully_defined()
else array_ops.shape(expected_tensor, name='expected_shape'))
return with_shape(expected_shape, tensor)
def with_shape(expected_shape, tensor):
"""Asserts tensor has expected shape.
If tensor shape and expected_shape, are fully defined, assert they match.
Otherwise, add assert op that will validate the shape when tensor is
evaluated, and set shape on tensor.
Args:
expected_shape: Expected shape to assert, as a 1D array of ints, or tensor
of same.
tensor: Tensor whose shape we're validating.
Returns:
tensor, perhaps with a dependent assert operation.
Raises:
ValueError: if tensor has an invalid shape.
"""
if isinstance(tensor, sparse_tensor.SparseTensor):
raise ValueError('SparseTensor not supported.')
# Shape type must be 1D int32.
if tensor_util.is_tensor(expected_shape):
if expected_shape.dtype.base_dtype != dtypes.int32:
raise ValueError(
'Invalid dtype %s for shape %s expected of tensor %s.' % (
expected_shape.dtype, expected_shape, tensor.name))
if isinstance(expected_shape, (list, tuple)):
if not expected_shape:
expected_shape = np.asarray([], dtype=np.int32)
else:
np_expected_shape = np.asarray(expected_shape)
expected_shape = (
np.asarray(expected_shape, dtype=np.int32)
if np_expected_shape.dtype == np.int64 else np_expected_shape)
if isinstance(expected_shape, np.ndarray):
if expected_shape.ndim > 1:
raise ValueError(
'Invalid rank %s for shape %s expected of tensor %s.' % (
expected_shape.ndim, expected_shape, tensor.name))
if expected_shape.dtype != np.int32:
raise ValueError(
'Invalid dtype %s for shape %s expected of tensor %s.' % (
expected_shape.dtype, expected_shape, tensor.name))
actual_shape = tensor.get_shape()
if (not actual_shape.is_fully_defined()
or tensor_util.is_tensor(expected_shape)):
with ops.name_scope('%s/' % tensor.op.name, values=[tensor]):
if (not tensor_util.is_tensor(expected_shape)
and (len(expected_shape) < 1)):
# TODO(irving): Remove scalar special case
return array_ops.reshape(tensor, [])
with ops.control_dependencies([_assert_shape_op(expected_shape, tensor)]):
result = array_ops.identity(tensor)
if not tensor_util.is_tensor(expected_shape):
result.set_shape(expected_shape)
return result
if (not tensor_util.is_tensor(expected_shape) and
not actual_shape.is_compatible_with(expected_shape)):
if (len(expected_shape) < 1) and actual_shape.is_compatible_with([1]):
# TODO(irving): Remove scalar special case.
with ops.name_scope('%s/' % tensor.op.name, values=[tensor]):
return array_ops.reshape(tensor, [])
raise ValueError('Invalid shape for tensor %s, expected %s, got %s.' % (
tensor.name, expected_shape, actual_shape))
return tensor
def assert_scalar_int(tensor, name=None):
"""Assert `tensor` is 0-D, of type `tf.int32` or `tf.int64`.
Args:
tensor: `Tensor` to test.
name: Name of the op and of the new `Tensor` if one is created.
Returns:
`tensor`, for chaining.
Raises:
ValueError: if `tensor` is not 0-D, of integer type.
"""
with ops.name_scope(name, 'assert_scalar_int', [tensor]) as name_scope:
tensor = ops.convert_to_tensor(tensor)
data_type = tensor.dtype
if not data_type.base_dtype.is_integer:
raise ValueError('Expected integer type for %s, received type: %s.'
% (tensor.name, data_type))
return check_ops.assert_scalar(tensor, name=name_scope)
| apache-2.0 | 5,745,323,022,520,242,000 | 35.886986 | 80 | 0.676725 | false |
kmolab/kmolab.github.io | data/Brython-3.3.4/Lib/test/test_urllib2.py | 24 | 61773 | import unittest
from test import support
import os
import io
import socket
import array
import sys
import urllib.request
# The proxy bypass method imported below has logic specific to the OSX
# proxy config data structure but is testable on all platforms.
from urllib.request import Request, OpenerDirector, _proxy_bypass_macosx_sysconf
import urllib.error
# XXX
# Request
# CacheFTPHandler (hard to write)
# parse_keqv_list, parse_http_list, HTTPDigestAuthHandler
class TrivialTests(unittest.TestCase):
def test___all__(self):
# Verify which names are exposed
for module in 'request', 'response', 'parse', 'error', 'robotparser':
context = {}
exec('from urllib.%s import *' % module, context)
del context['__builtins__']
if module == 'request' and os.name == 'nt':
u, p = context.pop('url2pathname'), context.pop('pathname2url')
self.assertEqual(u.__module__, 'nturl2path')
self.assertEqual(p.__module__, 'nturl2path')
for k, v in context.items():
self.assertEqual(v.__module__, 'urllib.%s' % module,
"%r is exposed in 'urllib.%s' but defined in %r" %
(k, module, v.__module__))
def test_trivial(self):
# A couple trivial tests
self.assertRaises(ValueError, urllib.request.urlopen, 'bogus url')
# XXX Name hacking to get this to work on Windows.
fname = os.path.abspath(urllib.request.__file__).replace('\\', '/')
if os.name == 'nt':
file_url = "file:///%s" % fname
else:
file_url = "file://%s" % fname
f = urllib.request.urlopen(file_url)
f.read()
f.close()
def test_parse_http_list(self):
tests = [
('a,b,c', ['a', 'b', 'c']),
('path"o,l"og"i"cal, example', ['path"o,l"og"i"cal', 'example']),
('a, b, "c", "d", "e,f", g, h',
['a', 'b', '"c"', '"d"', '"e,f"', 'g', 'h']),
('a="b\\"c", d="e\\,f", g="h\\\\i"',
['a="b"c"', 'd="e,f"', 'g="h\\i"'])]
for string, list in tests:
self.assertEqual(urllib.request.parse_http_list(string), list)
def test_URLError_reasonstr(self):
err = urllib.error.URLError('reason')
self.assertIn(err.reason, str(err))
class RequestHdrsTests(unittest.TestCase):
def test_request_headers_dict(self):
"""
The Request.headers dictionary is not a documented interface. It
should stay that way, because the complete set of headers are only
accessible through the .get_header(), .has_header(), .header_items()
interface. However, .headers pre-dates those methods, and so real code
will be using the dictionary.
The introduction in 2.4 of those methods was a mistake for the same
reason: code that previously saw all (urllib2 user)-provided headers in
.headers now sees only a subset.
"""
url = "http://example.com"
self.assertEqual(Request(url,
headers={"Spam-eggs": "blah"}
).headers["Spam-eggs"], "blah")
self.assertEqual(Request(url,
headers={"spam-EggS": "blah"}
).headers["Spam-eggs"], "blah")
def test_request_headers_methods(self):
"""
Note the case normalization of header names here, to
.capitalize()-case. This should be preserved for
backwards-compatibility. (In the HTTP case, normalization to
.title()-case is done by urllib2 before sending headers to
http.client).
Note that e.g. r.has_header("spam-EggS") is currently False, and
r.get_header("spam-EggS") returns None, but that could be changed in
future.
Method r.remove_header should remove items both from r.headers and
r.unredirected_hdrs dictionaries
"""
url = "http://example.com"
req = Request(url, headers={"Spam-eggs": "blah"})
self.assertTrue(req.has_header("Spam-eggs"))
self.assertEqual(req.header_items(), [('Spam-eggs', 'blah')])
req.add_header("Foo-Bar", "baz")
self.assertEqual(sorted(req.header_items()),
[('Foo-bar', 'baz'), ('Spam-eggs', 'blah')])
self.assertFalse(req.has_header("Not-there"))
self.assertIsNone(req.get_header("Not-there"))
self.assertEqual(req.get_header("Not-there", "default"), "default")
def test_password_manager(self):
mgr = urllib.request.HTTPPasswordMgr()
add = mgr.add_password
find_user_pass = mgr.find_user_password
add("Some Realm", "http://example.com/", "joe", "password")
add("Some Realm", "http://example.com/ni", "ni", "ni")
add("c", "http://example.com/foo", "foo", "ni")
add("c", "http://example.com/bar", "bar", "nini")
add("b", "http://example.com/", "first", "blah")
add("b", "http://example.com/", "second", "spam")
add("a", "http://example.com", "1", "a")
add("Some Realm", "http://c.example.com:3128", "3", "c")
add("Some Realm", "d.example.com", "4", "d")
add("Some Realm", "e.example.com:3128", "5", "e")
self.assertEqual(find_user_pass("Some Realm", "example.com"),
('joe', 'password'))
#self.assertEqual(find_user_pass("Some Realm", "http://example.com/ni"),
# ('ni', 'ni'))
self.assertEqual(find_user_pass("Some Realm", "http://example.com"),
('joe', 'password'))
self.assertEqual(find_user_pass("Some Realm", "http://example.com/"),
('joe', 'password'))
self.assertEqual(
find_user_pass("Some Realm", "http://example.com/spam"),
('joe', 'password'))
self.assertEqual(
find_user_pass("Some Realm", "http://example.com/spam/spam"),
('joe', 'password'))
self.assertEqual(find_user_pass("c", "http://example.com/foo"),
('foo', 'ni'))
self.assertEqual(find_user_pass("c", "http://example.com/bar"),
('bar', 'nini'))
self.assertEqual(find_user_pass("b", "http://example.com/"),
('second', 'spam'))
# No special relationship between a.example.com and example.com:
self.assertEqual(find_user_pass("a", "http://example.com/"),
('1', 'a'))
self.assertEqual(find_user_pass("a", "http://a.example.com/"),
(None, None))
# Ports:
self.assertEqual(find_user_pass("Some Realm", "c.example.com"),
(None, None))
self.assertEqual(find_user_pass("Some Realm", "c.example.com:3128"),
('3', 'c'))
self.assertEqual(
find_user_pass("Some Realm", "http://c.example.com:3128"),
('3', 'c'))
self.assertEqual(find_user_pass("Some Realm", "d.example.com"),
('4', 'd'))
self.assertEqual(find_user_pass("Some Realm", "e.example.com:3128"),
('5', 'e'))
def test_password_manager_default_port(self):
"""
The point to note here is that we can't guess the default port if
there's no scheme. This applies to both add_password and
find_user_password.
"""
mgr = urllib.request.HTTPPasswordMgr()
add = mgr.add_password
find_user_pass = mgr.find_user_password
add("f", "http://g.example.com:80", "10", "j")
add("g", "http://h.example.com", "11", "k")
add("h", "i.example.com:80", "12", "l")
add("i", "j.example.com", "13", "m")
self.assertEqual(find_user_pass("f", "g.example.com:100"),
(None, None))
self.assertEqual(find_user_pass("f", "g.example.com:80"),
('10', 'j'))
self.assertEqual(find_user_pass("f", "g.example.com"),
(None, None))
self.assertEqual(find_user_pass("f", "http://g.example.com:100"),
(None, None))
self.assertEqual(find_user_pass("f", "http://g.example.com:80"),
('10', 'j'))
self.assertEqual(find_user_pass("f", "http://g.example.com"),
('10', 'j'))
self.assertEqual(find_user_pass("g", "h.example.com"), ('11', 'k'))
self.assertEqual(find_user_pass("g", "h.example.com:80"), ('11', 'k'))
self.assertEqual(find_user_pass("g", "http://h.example.com:80"),
('11', 'k'))
self.assertEqual(find_user_pass("h", "i.example.com"), (None, None))
self.assertEqual(find_user_pass("h", "i.example.com:80"), ('12', 'l'))
self.assertEqual(find_user_pass("h", "http://i.example.com:80"),
('12', 'l'))
self.assertEqual(find_user_pass("i", "j.example.com"), ('13', 'm'))
self.assertEqual(find_user_pass("i", "j.example.com:80"),
(None, None))
self.assertEqual(find_user_pass("i", "http://j.example.com"),
('13', 'm'))
self.assertEqual(find_user_pass("i", "http://j.example.com:80"),
(None, None))
class MockOpener:
addheaders = []
def open(self, req, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.req, self.data, self.timeout = req, data, timeout
def error(self, proto, *args):
self.proto, self.args = proto, args
class MockFile:
def read(self, count=None): pass
def readline(self, count=None): pass
def close(self): pass
class MockHeaders(dict):
def getheaders(self, name):
return list(self.values())
class MockResponse(io.StringIO):
def __init__(self, code, msg, headers, data, url=None):
io.StringIO.__init__(self, data)
self.code, self.msg, self.headers, self.url = code, msg, headers, url
def info(self):
return self.headers
def geturl(self):
return self.url
class MockCookieJar:
def add_cookie_header(self, request):
self.ach_req = request
def extract_cookies(self, response, request):
self.ec_req, self.ec_r = request, response
class FakeMethod:
def __init__(self, meth_name, action, handle):
self.meth_name = meth_name
self.handle = handle
self.action = action
def __call__(self, *args):
return self.handle(self.meth_name, self.action, *args)
class MockHTTPResponse(io.IOBase):
def __init__(self, fp, msg, status, reason):
self.fp = fp
self.msg = msg
self.status = status
self.reason = reason
self.code = 200
def read(self):
return ''
def info(self):
return {}
def geturl(self):
return self.url
class MockHTTPClass:
def __init__(self):
self.level = 0
self.req_headers = []
self.data = None
self.raise_on_endheaders = False
self._tunnel_headers = {}
def __call__(self, host, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.host = host
self.timeout = timeout
return self
def set_debuglevel(self, level):
self.level = level
def set_tunnel(self, host, port=None, headers=None):
self._tunnel_host = host
self._tunnel_port = port
if headers:
self._tunnel_headers = headers
else:
self._tunnel_headers.clear()
def request(self, method, url, body=None, headers=None):
self.method = method
self.selector = url
if headers is not None:
self.req_headers += headers.items()
self.req_headers.sort()
if body:
self.data = body
if self.raise_on_endheaders:
import socket
raise socket.error()
def getresponse(self):
return MockHTTPResponse(MockFile(), {}, 200, "OK")
def close(self):
pass
class MockHandler:
# useful for testing handler machinery
# see add_ordered_mock_handlers() docstring
handler_order = 500
def __init__(self, methods):
self._define_methods(methods)
def _define_methods(self, methods):
for spec in methods:
if len(spec) == 2: name, action = spec
else: name, action = spec, None
meth = FakeMethod(name, action, self.handle)
setattr(self.__class__, name, meth)
def handle(self, fn_name, action, *args, **kwds):
self.parent.calls.append((self, fn_name, args, kwds))
if action is None:
return None
elif action == "return self":
return self
elif action == "return response":
res = MockResponse(200, "OK", {}, "")
return res
elif action == "return request":
return Request("http://blah/")
elif action.startswith("error"):
code = action[action.rfind(" ")+1:]
try:
code = int(code)
except ValueError:
pass
res = MockResponse(200, "OK", {}, "")
return self.parent.error("http", args[0], res, code, "", {})
elif action == "raise":
raise urllib.error.URLError("blah")
assert False
def close(self): pass
def add_parent(self, parent):
self.parent = parent
self.parent.calls = []
def __lt__(self, other):
if not hasattr(other, "handler_order"):
# No handler_order, leave in original order. Yuck.
return True
return self.handler_order < other.handler_order
def add_ordered_mock_handlers(opener, meth_spec):
"""Create MockHandlers and add them to an OpenerDirector.
meth_spec: list of lists of tuples and strings defining methods to define
on handlers. eg:
[["http_error", "ftp_open"], ["http_open"]]
defines methods .http_error() and .ftp_open() on one handler, and
.http_open() on another. These methods just record their arguments and
return None. Using a tuple instead of a string causes the method to
perform some action (see MockHandler.handle()), eg:
[["http_error"], [("http_open", "return request")]]
defines .http_error() on one handler (which simply returns None), and
.http_open() on another handler, which returns a Request object.
"""
handlers = []
count = 0
for meths in meth_spec:
class MockHandlerSubclass(MockHandler): pass
h = MockHandlerSubclass(meths)
h.handler_order += count
h.add_parent(opener)
count = count + 1
handlers.append(h)
opener.add_handler(h)
return handlers
def build_test_opener(*handler_instances):
opener = OpenerDirector()
for h in handler_instances:
opener.add_handler(h)
return opener
class MockHTTPHandler(urllib.request.BaseHandler):
# useful for testing redirections and auth
# sends supplied headers and code as first response
# sends 200 OK as second response
def __init__(self, code, headers):
self.code = code
self.headers = headers
self.reset()
def reset(self):
self._count = 0
self.requests = []
def http_open(self, req):
import email, http.client, copy
self.requests.append(copy.deepcopy(req))
if self._count == 0:
self._count = self._count + 1
name = http.client.responses[self.code]
msg = email.message_from_string(self.headers)
return self.parent.error(
"http", req, MockFile(), self.code, name, msg)
else:
self.req = req
msg = email.message_from_string("\r\n\r\n")
return MockResponse(200, "OK", msg, "", req.get_full_url())
class MockHTTPSHandler(urllib.request.AbstractHTTPHandler):
# Useful for testing the Proxy-Authorization request by verifying the
# properties of httpcon
def __init__(self):
urllib.request.AbstractHTTPHandler.__init__(self)
self.httpconn = MockHTTPClass()
def https_open(self, req):
return self.do_open(self.httpconn, req)
class MockPasswordManager:
def add_password(self, realm, uri, user, password):
self.realm = realm
self.url = uri
self.user = user
self.password = password
def find_user_password(self, realm, authuri):
self.target_realm = realm
self.target_url = authuri
return self.user, self.password
class OpenerDirectorTests(unittest.TestCase):
def test_add_non_handler(self):
class NonHandler(object):
pass
self.assertRaises(TypeError,
OpenerDirector().add_handler, NonHandler())
def test_badly_named_methods(self):
# test work-around for three methods that accidentally follow the
# naming conventions for handler methods
# (*_open() / *_request() / *_response())
# These used to call the accidentally-named methods, causing a
# TypeError in real code; here, returning self from these mock
# methods would either cause no exception, or AttributeError.
from urllib.error import URLError
o = OpenerDirector()
meth_spec = [
[("do_open", "return self"), ("proxy_open", "return self")],
[("redirect_request", "return self")],
]
add_ordered_mock_handlers(o, meth_spec)
o.add_handler(urllib.request.UnknownHandler())
for scheme in "do", "proxy", "redirect":
self.assertRaises(URLError, o.open, scheme+"://example.com/")
def test_handled(self):
# handler returning non-None means no more handlers will be called
o = OpenerDirector()
meth_spec = [
["http_open", "ftp_open", "http_error_302"],
["ftp_open"],
[("http_open", "return self")],
[("http_open", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
r = o.open(req)
# Second .http_open() gets called, third doesn't, since second returned
# non-None. Handlers without .http_open() never get any methods called
# on them.
# In fact, second mock handler defining .http_open() returns self
# (instead of response), which becomes the OpenerDirector's return
# value.
self.assertEqual(r, handlers[2])
calls = [(handlers[0], "http_open"), (handlers[2], "http_open")]
for expected, got in zip(calls, o.calls):
handler, name, args, kwds = got
self.assertEqual((handler, name), expected)
self.assertEqual(args, (req,))
def test_handler_order(self):
o = OpenerDirector()
handlers = []
for meths, handler_order in [
([("http_open", "return self")], 500),
(["http_open"], 0),
]:
class MockHandlerSubclass(MockHandler): pass
h = MockHandlerSubclass(meths)
h.handler_order = handler_order
handlers.append(h)
o.add_handler(h)
o.open("http://example.com/")
# handlers called in reverse order, thanks to their sort order
self.assertEqual(o.calls[0][0], handlers[1])
self.assertEqual(o.calls[1][0], handlers[0])
def test_raise(self):
# raising URLError stops processing of request
o = OpenerDirector()
meth_spec = [
[("http_open", "raise")],
[("http_open", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
self.assertRaises(urllib.error.URLError, o.open, req)
self.assertEqual(o.calls, [(handlers[0], "http_open", (req,), {})])
def test_http_error(self):
# XXX http_error_default
# http errors are a special case
o = OpenerDirector()
meth_spec = [
[("http_open", "error 302")],
[("http_error_400", "raise"), "http_open"],
[("http_error_302", "return response"), "http_error_303",
"http_error"],
[("http_error_302")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
class Unknown:
def __eq__(self, other): return True
req = Request("http://example.com/")
o.open(req)
assert len(o.calls) == 2
calls = [(handlers[0], "http_open", (req,)),
(handlers[2], "http_error_302",
(req, Unknown(), 302, "", {}))]
for expected, got in zip(calls, o.calls):
handler, method_name, args = expected
self.assertEqual((handler, method_name), got[:2])
self.assertEqual(args, got[2])
def test_processors(self):
# *_request / *_response methods get called appropriately
o = OpenerDirector()
meth_spec = [
[("http_request", "return request"),
("http_response", "return response")],
[("http_request", "return request"),
("http_response", "return response")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
o.open(req)
# processor methods are called on *all* handlers that define them,
# not just the first handler that handles the request
calls = [
(handlers[0], "http_request"), (handlers[1], "http_request"),
(handlers[0], "http_response"), (handlers[1], "http_response")]
for i, (handler, name, args, kwds) in enumerate(o.calls):
if i < 2:
# *_request
self.assertEqual((handler, name), calls[i])
self.assertEqual(len(args), 1)
self.assertIsInstance(args[0], Request)
else:
# *_response
self.assertEqual((handler, name), calls[i])
self.assertEqual(len(args), 2)
self.assertIsInstance(args[0], Request)
# response from opener.open is None, because there's no
# handler that defines http_open to handle it
self.assertTrue(args[1] is None or
isinstance(args[1], MockResponse))
def test_method_deprecations(self):
req = Request("http://www.example.com")
with self.assertWarns(DeprecationWarning):
req.add_data("data")
with self.assertWarns(DeprecationWarning):
req.get_data()
with self.assertWarns(DeprecationWarning):
req.has_data()
with self.assertWarns(DeprecationWarning):
req.get_host()
with self.assertWarns(DeprecationWarning):
req.get_selector()
with self.assertWarns(DeprecationWarning):
req.is_unverifiable()
with self.assertWarns(DeprecationWarning):
req.get_origin_req_host()
with self.assertWarns(DeprecationWarning):
req.get_type()
def sanepathname2url(path):
try:
path.encode("utf-8")
except UnicodeEncodeError:
raise unittest.SkipTest("path is not encodable to utf8")
urlpath = urllib.request.pathname2url(path)
if os.name == "nt" and urlpath.startswith("///"):
urlpath = urlpath[2:]
# XXX don't ask me about the mac...
return urlpath
class HandlerTests(unittest.TestCase):
def test_ftp(self):
class MockFTPWrapper:
def __init__(self, data): self.data = data
def retrfile(self, filename, filetype):
self.filename, self.filetype = filename, filetype
return io.StringIO(self.data), len(self.data)
def close(self): pass
class NullFTPHandler(urllib.request.FTPHandler):
def __init__(self, data): self.data = data
def connect_ftp(self, user, passwd, host, port, dirs,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.user, self.passwd = user, passwd
self.host, self.port = host, port
self.dirs = dirs
self.ftpwrapper = MockFTPWrapper(self.data)
return self.ftpwrapper
import ftplib
data = "rheum rhaponicum"
h = NullFTPHandler(data)
h.parent = MockOpener()
for url, host, port, user, passwd, type_, dirs, filename, mimetype in [
("ftp://localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://parrot@localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "parrot", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://%25parrot@localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "%parrot", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://%2542parrot@localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "%42parrot", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://localhost:80/foo/bar/",
"localhost", 80, "", "", "D",
["foo", "bar"], "", None),
("ftp://localhost/baz.gif;type=a",
"localhost", ftplib.FTP_PORT, "", "", "A",
[], "baz.gif", None), # XXX really this should guess image/gif
]:
req = Request(url)
req.timeout = None
r = h.ftp_open(req)
# ftp authentication not yet implemented by FTPHandler
self.assertEqual(h.user, user)
self.assertEqual(h.passwd, passwd)
self.assertEqual(h.host, socket.gethostbyname(host))
self.assertEqual(h.port, port)
self.assertEqual(h.dirs, dirs)
self.assertEqual(h.ftpwrapper.filename, filename)
self.assertEqual(h.ftpwrapper.filetype, type_)
headers = r.info()
self.assertEqual(headers.get("Content-type"), mimetype)
self.assertEqual(int(headers["Content-length"]), len(data))
def test_file(self):
import email.utils, socket
h = urllib.request.FileHandler()
o = h.parent = MockOpener()
TESTFN = support.TESTFN
urlpath = sanepathname2url(os.path.abspath(TESTFN))
towrite = b"hello, world\n"
urls = [
"file://localhost%s" % urlpath,
"file://%s" % urlpath,
"file://%s%s" % (socket.gethostbyname('localhost'), urlpath),
]
try:
localaddr = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
localaddr = ''
if localaddr:
urls.append("file://%s%s" % (localaddr, urlpath))
for url in urls:
f = open(TESTFN, "wb")
try:
try:
f.write(towrite)
finally:
f.close()
r = h.file_open(Request(url))
try:
data = r.read()
headers = r.info()
respurl = r.geturl()
finally:
r.close()
stats = os.stat(TESTFN)
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
finally:
os.remove(TESTFN)
self.assertEqual(data, towrite)
self.assertEqual(headers["Content-type"], "text/plain")
self.assertEqual(headers["Content-length"], "13")
self.assertEqual(headers["Last-modified"], modified)
self.assertEqual(respurl, url)
for url in [
"file://localhost:80%s" % urlpath,
"file:///file_does_not_exist.txt",
"file://%s:80%s/%s" % (socket.gethostbyname('localhost'),
os.getcwd(), TESTFN),
"file://somerandomhost.ontheinternet.com%s/%s" %
(os.getcwd(), TESTFN),
]:
try:
f = open(TESTFN, "wb")
try:
f.write(towrite)
finally:
f.close()
self.assertRaises(urllib.error.URLError,
h.file_open, Request(url))
finally:
os.remove(TESTFN)
h = urllib.request.FileHandler()
o = h.parent = MockOpener()
# XXXX why does // mean ftp (and /// mean not ftp!), and where
# is file: scheme specified? I think this is really a bug, and
# what was intended was to distinguish between URLs like:
# file:/blah.txt (a file)
# file://localhost/blah.txt (a file)
# file:///blah.txt (a file)
# file://ftp.example.com/blah.txt (an ftp URL)
for url, ftp in [
("file://ftp.example.com//foo.txt", False),
("file://ftp.example.com///foo.txt", False),
# XXXX bug: fails with OSError, should be URLError
("file://ftp.example.com/foo.txt", False),
("file://somehost//foo/something.txt", False),
("file://localhost//foo/something.txt", False),
]:
req = Request(url)
try:
h.file_open(req)
# XXXX remove OSError when bug fixed
except (urllib.error.URLError, OSError):
self.assertFalse(ftp)
else:
self.assertIs(o.req, req)
self.assertEqual(req.type, "ftp")
self.assertEqual(req.type == "ftp", ftp)
def test_http(self):
h = urllib.request.AbstractHTTPHandler()
o = h.parent = MockOpener()
url = "http://example.com/"
for method, data in [("GET", None), ("POST", b"blah")]:
req = Request(url, data, {"Foo": "bar"})
req.timeout = None
req.add_unredirected_header("Spam", "eggs")
http = MockHTTPClass()
r = h.do_open(http, req)
# result attributes
r.read; r.readline # wrapped MockFile methods
r.info; r.geturl # addinfourl methods
r.code, r.msg == 200, "OK" # added from MockHTTPClass.getreply()
hdrs = r.info()
hdrs.get; hdrs.__contains__ # r.info() gives dict from .getreply()
self.assertEqual(r.geturl(), url)
self.assertEqual(http.host, "example.com")
self.assertEqual(http.level, 0)
self.assertEqual(http.method, method)
self.assertEqual(http.selector, "/")
self.assertEqual(http.req_headers,
[("Connection", "close"),
("Foo", "bar"), ("Spam", "eggs")])
self.assertEqual(http.data, data)
# check socket.error converted to URLError
http.raise_on_endheaders = True
self.assertRaises(urllib.error.URLError, h.do_open, http, req)
# Check for TypeError on POST data which is str.
req = Request("http://example.com/","badpost")
self.assertRaises(TypeError, h.do_request_, req)
# check adding of standard headers
o.addheaders = [("Spam", "eggs")]
for data in b"", None: # POST, GET
req = Request("http://example.com/", data)
r = MockResponse(200, "OK", {}, "")
newreq = h.do_request_(req)
if data is None: # GET
self.assertNotIn("Content-length", req.unredirected_hdrs)
self.assertNotIn("Content-type", req.unredirected_hdrs)
else: # POST
self.assertEqual(req.unredirected_hdrs["Content-length"], "0")
self.assertEqual(req.unredirected_hdrs["Content-type"],
"application/x-www-form-urlencoded")
# XXX the details of Host could be better tested
self.assertEqual(req.unredirected_hdrs["Host"], "example.com")
self.assertEqual(req.unredirected_hdrs["Spam"], "eggs")
# don't clobber existing headers
req.add_unredirected_header("Content-length", "foo")
req.add_unredirected_header("Content-type", "bar")
req.add_unredirected_header("Host", "baz")
req.add_unredirected_header("Spam", "foo")
newreq = h.do_request_(req)
self.assertEqual(req.unredirected_hdrs["Content-length"], "foo")
self.assertEqual(req.unredirected_hdrs["Content-type"], "bar")
self.assertEqual(req.unredirected_hdrs["Host"], "baz")
self.assertEqual(req.unredirected_hdrs["Spam"], "foo")
# Check iterable body support
def iterable_body():
yield b"one"
yield b"two"
yield b"three"
for headers in {}, {"Content-Length": 11}:
req = Request("http://example.com/", iterable_body(), headers)
if not headers:
# Having an iterable body without a Content-Length should
# raise an exception
self.assertRaises(ValueError, h.do_request_, req)
else:
newreq = h.do_request_(req)
# A file object.
# Test only Content-Length attribute of request.
file_obj = io.BytesIO()
file_obj.write(b"Something\nSomething\nSomething\n")
for headers in {}, {"Content-Length": 30}:
req = Request("http://example.com/", file_obj, headers)
if not headers:
# Having an iterable body without a Content-Length should
# raise an exception
self.assertRaises(ValueError, h.do_request_, req)
else:
newreq = h.do_request_(req)
self.assertEqual(int(newreq.get_header('Content-length')),30)
file_obj.close()
# array.array Iterable - Content Length is calculated
iterable_array = array.array("I",[1,2,3,4])
for headers in {}, {"Content-Length": 16}:
req = Request("http://example.com/", iterable_array, headers)
newreq = h.do_request_(req)
self.assertEqual(int(newreq.get_header('Content-length')),16)
def test_http_doubleslash(self):
# Checks the presence of any unnecessary double slash in url does not
# break anything. Previously, a double slash directly after the host
# could cause incorrect parsing.
h = urllib.request.AbstractHTTPHandler()
h.parent = MockOpener()
data = b""
ds_urls = [
"http://example.com/foo/bar/baz.html",
"http://example.com//foo/bar/baz.html",
"http://example.com/foo//bar/baz.html",
"http://example.com/foo/bar//baz.html"
]
for ds_url in ds_urls:
ds_req = Request(ds_url, data)
# Check whether host is determined correctly if there is no proxy
np_ds_req = h.do_request_(ds_req)
self.assertEqual(np_ds_req.unredirected_hdrs["Host"],"example.com")
# Check whether host is determined correctly if there is a proxy
ds_req.set_proxy("someproxy:3128",None)
p_ds_req = h.do_request_(ds_req)
self.assertEqual(p_ds_req.unredirected_hdrs["Host"],"example.com")
def test_fixpath_in_weirdurls(self):
# Issue4493: urllib2 to supply '/' when to urls where path does not
# start with'/'
h = urllib.request.AbstractHTTPHandler()
h.parent = MockOpener()
weird_url = 'http://www.python.org?getspam'
req = Request(weird_url)
newreq = h.do_request_(req)
self.assertEqual(newreq.host,'www.python.org')
self.assertEqual(newreq.selector,'/?getspam')
url_without_path = 'http://www.python.org'
req = Request(url_without_path)
newreq = h.do_request_(req)
self.assertEqual(newreq.host,'www.python.org')
self.assertEqual(newreq.selector,'')
def test_errors(self):
h = urllib.request.HTTPErrorProcessor()
o = h.parent = MockOpener()
url = "http://example.com/"
req = Request(url)
# all 2xx are passed through
r = MockResponse(200, "OK", {}, "", url)
newr = h.http_response(req, r)
self.assertIs(r, newr)
self.assertFalse(hasattr(o, "proto")) # o.error not called
r = MockResponse(202, "Accepted", {}, "", url)
newr = h.http_response(req, r)
self.assertIs(r, newr)
self.assertFalse(hasattr(o, "proto")) # o.error not called
r = MockResponse(206, "Partial content", {}, "", url)
newr = h.http_response(req, r)
self.assertIs(r, newr)
self.assertFalse(hasattr(o, "proto")) # o.error not called
# anything else calls o.error (and MockOpener returns None, here)
r = MockResponse(502, "Bad gateway", {}, "", url)
self.assertIsNone(h.http_response(req, r))
self.assertEqual(o.proto, "http") # o.error called
self.assertEqual(o.args, (req, r, 502, "Bad gateway", {}))
def test_cookies(self):
cj = MockCookieJar()
h = urllib.request.HTTPCookieProcessor(cj)
h.parent = MockOpener()
req = Request("http://example.com/")
r = MockResponse(200, "OK", {}, "")
newreq = h.http_request(req)
self.assertIs(cj.ach_req, req)
self.assertIs(cj.ach_req, newreq)
self.assertEqual(req.origin_req_host, "example.com")
self.assertFalse(req.unverifiable)
newr = h.http_response(req, r)
self.assertIs(cj.ec_req, req)
self.assertIs(cj.ec_r, r)
self.assertIs(r, newr)
def test_redirect(self):
from_url = "http://example.com/a.html"
to_url = "http://example.com/b.html"
h = urllib.request.HTTPRedirectHandler()
o = h.parent = MockOpener()
# ordinary redirect behaviour
for code in 301, 302, 303, 307:
for data in None, "blah\nblah\n":
method = getattr(h, "http_error_%s" % code)
req = Request(from_url, data)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
req.add_header("Nonsense", "viking=withhold")
if data is not None:
req.add_header("Content-Length", str(len(data)))
req.add_unredirected_header("Spam", "spam")
try:
method(req, MockFile(), code, "Blah",
MockHeaders({"location": to_url}))
except urllib.error.HTTPError:
# 307 in response to POST requires user OK
self.assertTrue(code == 307 and data is not None)
self.assertEqual(o.req.get_full_url(), to_url)
try:
self.assertEqual(o.req.get_method(), "GET")
except AttributeError:
self.assertFalse(o.req.data)
# now it's a GET, there should not be headers regarding content
# (possibly dragged from before being a POST)
headers = [x.lower() for x in o.req.headers]
self.assertNotIn("content-length", headers)
self.assertNotIn("content-type", headers)
self.assertEqual(o.req.headers["Nonsense"],
"viking=withhold")
self.assertNotIn("Spam", o.req.headers)
self.assertNotIn("Spam", o.req.unredirected_hdrs)
# loop detection
req = Request(from_url)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
def redirect(h, req, url=to_url):
h.http_error_302(req, MockFile(), 302, "Blah",
MockHeaders({"location": url}))
# Note that the *original* request shares the same record of
# redirections with the sub-requests caused by the redirections.
# detect infinite loop redirect of a URL to itself
req = Request(from_url, origin_req_host="example.com")
count = 0
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
try:
while 1:
redirect(h, req, "http://example.com/")
count = count + 1
except urllib.error.HTTPError:
# don't stop until max_repeats, because cookies may introduce state
self.assertEqual(count, urllib.request.HTTPRedirectHandler.max_repeats)
# detect endless non-repeating chain of redirects
req = Request(from_url, origin_req_host="example.com")
count = 0
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
try:
while 1:
redirect(h, req, "http://example.com/%d" % count)
count = count + 1
except urllib.error.HTTPError:
self.assertEqual(count,
urllib.request.HTTPRedirectHandler.max_redirections)
def test_invalid_redirect(self):
from_url = "http://example.com/a.html"
valid_schemes = ['http','https','ftp']
invalid_schemes = ['file','imap','ldap']
schemeless_url = "example.com/b.html"
h = urllib.request.HTTPRedirectHandler()
o = h.parent = MockOpener()
req = Request(from_url)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
for scheme in invalid_schemes:
invalid_url = scheme + '://' + schemeless_url
self.assertRaises(urllib.error.HTTPError, h.http_error_302,
req, MockFile(), 302, "Security Loophole",
MockHeaders({"location": invalid_url}))
for scheme in valid_schemes:
valid_url = scheme + '://' + schemeless_url
h.http_error_302(req, MockFile(), 302, "That's fine",
MockHeaders({"location": valid_url}))
self.assertEqual(o.req.get_full_url(), valid_url)
def test_relative_redirect(self):
from_url = "http://example.com/a.html"
relative_url = "/b.html"
h = urllib.request.HTTPRedirectHandler()
o = h.parent = MockOpener()
req = Request(from_url)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
valid_url = urllib.parse.urljoin(from_url,relative_url)
h.http_error_302(req, MockFile(), 302, "That's fine",
MockHeaders({"location": valid_url}))
self.assertEqual(o.req.get_full_url(), valid_url)
def test_cookie_redirect(self):
# cookies shouldn't leak into redirected requests
from http.cookiejar import CookieJar
from test.test_http_cookiejar import interact_netscape
cj = CookieJar()
interact_netscape(cj, "http://www.example.com/", "spam=eggs")
hh = MockHTTPHandler(302, "Location: http://www.cracker.com/\r\n\r\n")
hdeh = urllib.request.HTTPDefaultErrorHandler()
hrh = urllib.request.HTTPRedirectHandler()
cp = urllib.request.HTTPCookieProcessor(cj)
o = build_test_opener(hh, hdeh, hrh, cp)
o.open("http://www.example.com/")
self.assertFalse(hh.req.has_header("Cookie"))
def test_redirect_fragment(self):
redirected_url = 'http://www.example.com/index.html#OK\r\n\r\n'
hh = MockHTTPHandler(302, 'Location: ' + redirected_url)
hdeh = urllib.request.HTTPDefaultErrorHandler()
hrh = urllib.request.HTTPRedirectHandler()
o = build_test_opener(hh, hdeh, hrh)
fp = o.open('http://www.example.com')
self.assertEqual(fp.geturl(), redirected_url.strip())
def test_proxy(self):
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(http="proxy.example.com:3128"))
o.add_handler(ph)
meth_spec = [
[("http_open", "return response")]
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://acme.example.com/")
self.assertEqual(req.host, "acme.example.com")
o.open(req)
self.assertEqual(req.host, "proxy.example.com:3128")
self.assertEqual([(handlers[0], "http_open")],
[tup[0:2] for tup in o.calls])
def test_proxy_no_proxy(self):
os.environ['no_proxy'] = 'python.org'
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(http="proxy.example.com"))
o.add_handler(ph)
req = Request("http://www.perl.org/")
self.assertEqual(req.host, "www.perl.org")
o.open(req)
self.assertEqual(req.host, "proxy.example.com")
req = Request("http://www.python.org")
self.assertEqual(req.host, "www.python.org")
o.open(req)
self.assertEqual(req.host, "www.python.org")
del os.environ['no_proxy']
def test_proxy_no_proxy_all(self):
os.environ['no_proxy'] = '*'
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(http="proxy.example.com"))
o.add_handler(ph)
req = Request("http://www.python.org")
self.assertEqual(req.host, "www.python.org")
o.open(req)
self.assertEqual(req.host, "www.python.org")
del os.environ['no_proxy']
def test_proxy_https(self):
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(https="proxy.example.com:3128"))
o.add_handler(ph)
meth_spec = [
[("https_open", "return response")]
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("https://www.example.com/")
self.assertEqual(req.host, "www.example.com")
o.open(req)
self.assertEqual(req.host, "proxy.example.com:3128")
self.assertEqual([(handlers[0], "https_open")],
[tup[0:2] for tup in o.calls])
def test_proxy_https_proxy_authorization(self):
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(https='proxy.example.com:3128'))
o.add_handler(ph)
https_handler = MockHTTPSHandler()
o.add_handler(https_handler)
req = Request("https://www.example.com/")
req.add_header("Proxy-Authorization","FooBar")
req.add_header("User-Agent","Grail")
self.assertEqual(req.host, "www.example.com")
self.assertIsNone(req._tunnel_host)
o.open(req)
# Verify Proxy-Authorization gets tunneled to request.
# httpsconn req_headers do not have the Proxy-Authorization header but
# the req will have.
self.assertNotIn(("Proxy-Authorization","FooBar"),
https_handler.httpconn.req_headers)
self.assertIn(("User-Agent","Grail"),
https_handler.httpconn.req_headers)
self.assertIsNotNone(req._tunnel_host)
self.assertEqual(req.host, "proxy.example.com:3128")
self.assertEqual(req.get_header("Proxy-authorization"),"FooBar")
# TODO: This should be only for OSX
@unittest.skipUnless(sys.platform == 'darwin', "only relevant for OSX")
def test_osx_proxy_bypass(self):
bypass = {
'exclude_simple': False,
'exceptions': ['foo.bar', '*.bar.com', '127.0.0.1', '10.10',
'10.0/16']
}
# Check hosts that should trigger the proxy bypass
for host in ('foo.bar', 'www.bar.com', '127.0.0.1', '10.10.0.1',
'10.0.0.1'):
self.assertTrue(_proxy_bypass_macosx_sysconf(host, bypass),
'expected bypass of %s to be True' % host)
# Check hosts that should not trigger the proxy bypass
for host in ('abc.foo.bar', 'bar.com', '127.0.0.2', '10.11.0.1', 'test'):
self.assertFalse(_proxy_bypass_macosx_sysconf(host, bypass),
'expected bypass of %s to be False' % host)
# Check the exclude_simple flag
bypass = {'exclude_simple': True, 'exceptions': []}
self.assertTrue(_proxy_bypass_macosx_sysconf('test', bypass))
def test_basic_auth(self, quote_char='"'):
opener = OpenerDirector()
password_manager = MockPasswordManager()
auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
realm = "ACME Widget Store"
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Basic realm=%s%s%s\r\n\r\n' %
(quote_char, realm, quote_char) )
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Authorization",
realm, http_handler, password_manager,
"http://acme.example.com/protected",
"http://acme.example.com/protected",
)
def test_basic_auth_with_single_quoted_realm(self):
self.test_basic_auth(quote_char="'")
def test_basic_auth_with_unquoted_realm(self):
opener = OpenerDirector()
password_manager = MockPasswordManager()
auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
realm = "ACME Widget Store"
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Basic realm=%s\r\n\r\n' % realm)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
with self.assertWarns(UserWarning):
self._test_basic_auth(opener, auth_handler, "Authorization",
realm, http_handler, password_manager,
"http://acme.example.com/protected",
"http://acme.example.com/protected",
)
def test_proxy_basic_auth(self):
opener = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(http="proxy.example.com:3128"))
opener.add_handler(ph)
password_manager = MockPasswordManager()
auth_handler = urllib.request.ProxyBasicAuthHandler(password_manager)
realm = "ACME Networks"
http_handler = MockHTTPHandler(
407, 'Proxy-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Proxy-authorization",
realm, http_handler, password_manager,
"http://acme.example.com:3128/protected",
"proxy.example.com:3128",
)
def test_basic_and_digest_auth_handlers(self):
# HTTPDigestAuthHandler raised an exception if it couldn't handle a 40*
# response (http://python.org/sf/1479302), where it should instead
# return None to allow another handler (especially
# HTTPBasicAuthHandler) to handle the response.
# Also (http://python.org/sf/14797027, RFC 2617 section 1.2), we must
# try digest first (since it's the strongest auth scheme), so we record
# order of calls here to check digest comes first:
class RecordingOpenerDirector(OpenerDirector):
def __init__(self):
OpenerDirector.__init__(self)
self.recorded = []
def record(self, info):
self.recorded.append(info)
class TestDigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
def http_error_401(self, *args, **kwds):
self.parent.record("digest")
urllib.request.HTTPDigestAuthHandler.http_error_401(self,
*args, **kwds)
class TestBasicAuthHandler(urllib.request.HTTPBasicAuthHandler):
def http_error_401(self, *args, **kwds):
self.parent.record("basic")
urllib.request.HTTPBasicAuthHandler.http_error_401(self,
*args, **kwds)
opener = RecordingOpenerDirector()
password_manager = MockPasswordManager()
digest_handler = TestDigestAuthHandler(password_manager)
basic_handler = TestBasicAuthHandler(password_manager)
realm = "ACME Networks"
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(basic_handler)
opener.add_handler(digest_handler)
opener.add_handler(http_handler)
# check basic auth isn't blocked by digest handler failing
self._test_basic_auth(opener, basic_handler, "Authorization",
realm, http_handler, password_manager,
"http://acme.example.com/protected",
"http://acme.example.com/protected",
)
# check digest was tried before basic (twice, because
# _test_basic_auth called .open() twice)
self.assertEqual(opener.recorded, ["digest", "basic"]*2)
def test_unsupported_auth_digest_handler(self):
opener = OpenerDirector()
# While using DigestAuthHandler
digest_auth_handler = urllib.request.HTTPDigestAuthHandler(None)
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Kerberos\r\n\r\n')
opener.add_handler(digest_auth_handler)
opener.add_handler(http_handler)
self.assertRaises(ValueError,opener.open,"http://www.example.com")
def test_unsupported_auth_basic_handler(self):
# While using BasicAuthHandler
opener = OpenerDirector()
basic_auth_handler = urllib.request.HTTPBasicAuthHandler(None)
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: NTLM\r\n\r\n')
opener.add_handler(basic_auth_handler)
opener.add_handler(http_handler)
self.assertRaises(ValueError,opener.open,"http://www.example.com")
def _test_basic_auth(self, opener, auth_handler, auth_header,
realm, http_handler, password_manager,
request_url, protected_url):
import base64
user, password = "wile", "coyote"
# .add_password() fed through to password manager
auth_handler.add_password(realm, request_url, user, password)
self.assertEqual(realm, password_manager.realm)
self.assertEqual(request_url, password_manager.url)
self.assertEqual(user, password_manager.user)
self.assertEqual(password, password_manager.password)
opener.open(request_url)
# should have asked the password manager for the username/password
self.assertEqual(password_manager.target_realm, realm)
self.assertEqual(password_manager.target_url, protected_url)
# expect one request without authorization, then one with
self.assertEqual(len(http_handler.requests), 2)
self.assertFalse(http_handler.requests[0].has_header(auth_header))
userpass = bytes('%s:%s' % (user, password), "ascii")
auth_hdr_value = ('Basic ' +
base64.encodebytes(userpass).strip().decode())
self.assertEqual(http_handler.requests[1].get_header(auth_header),
auth_hdr_value)
self.assertEqual(http_handler.requests[1].unredirected_hdrs[auth_header],
auth_hdr_value)
# if the password manager can't find a password, the handler won't
# handle the HTTP auth error
password_manager.user = password_manager.password = None
http_handler.reset()
opener.open(request_url)
self.assertEqual(len(http_handler.requests), 1)
self.assertFalse(http_handler.requests[0].has_header(auth_header))
class MiscTests(unittest.TestCase):
def opener_has_handler(self, opener, handler_class):
self.assertTrue(any(h.__class__ == handler_class
for h in opener.handlers))
def test_build_opener(self):
class MyHTTPHandler(urllib.request.HTTPHandler): pass
class FooHandler(urllib.request.BaseHandler):
def foo_open(self): pass
class BarHandler(urllib.request.BaseHandler):
def bar_open(self): pass
build_opener = urllib.request.build_opener
o = build_opener(FooHandler, BarHandler)
self.opener_has_handler(o, FooHandler)
self.opener_has_handler(o, BarHandler)
# can take a mix of classes and instances
o = build_opener(FooHandler, BarHandler())
self.opener_has_handler(o, FooHandler)
self.opener_has_handler(o, BarHandler)
# subclasses of default handlers override default handlers
o = build_opener(MyHTTPHandler)
self.opener_has_handler(o, MyHTTPHandler)
# a particular case of overriding: default handlers can be passed
# in explicitly
o = build_opener()
self.opener_has_handler(o, urllib.request.HTTPHandler)
o = build_opener(urllib.request.HTTPHandler)
self.opener_has_handler(o, urllib.request.HTTPHandler)
o = build_opener(urllib.request.HTTPHandler())
self.opener_has_handler(o, urllib.request.HTTPHandler)
# Issue2670: multiple handlers sharing the same base class
class MyOtherHTTPHandler(urllib.request.HTTPHandler): pass
o = build_opener(MyHTTPHandler, MyOtherHTTPHandler)
self.opener_has_handler(o, MyHTTPHandler)
self.opener_has_handler(o, MyOtherHTTPHandler)
def test_HTTPError_interface(self):
"""
Issue 13211 reveals that HTTPError didn't implement the URLError
interface even though HTTPError is a subclass of URLError.
"""
msg = 'something bad happened'
url = code = fp = None
hdrs = 'Content-Length: 42'
err = urllib.error.HTTPError(url, code, msg, hdrs, fp)
self.assertTrue(hasattr(err, 'reason'))
self.assertEqual(err.reason, 'something bad happened')
self.assertTrue(hasattr(err, 'hdrs'))
self.assertEqual(err.hdrs, 'Content-Length: 42')
expected_errmsg = 'HTTP Error %s: %s' % (err.code, err.msg)
self.assertEqual(str(err), expected_errmsg)
class RequestTests(unittest.TestCase):
def setUp(self):
self.get = Request("http://www.python.org/~jeremy/")
self.post = Request("http://www.python.org/~jeremy/",
"data",
headers={"X-Test": "test"})
def test_method(self):
self.assertEqual("POST", self.post.get_method())
self.assertEqual("GET", self.get.get_method())
def test_data(self):
self.assertFalse(self.get.data)
self.assertEqual("GET", self.get.get_method())
self.get.data = "spam"
self.assertTrue(self.get.data)
self.assertEqual("POST", self.get.get_method())
def test_get_full_url(self):
self.assertEqual("http://www.python.org/~jeremy/",
self.get.get_full_url())
def test_selector(self):
self.assertEqual("/~jeremy/", self.get.selector)
req = Request("http://www.python.org/")
self.assertEqual("/", req.selector)
def test_get_type(self):
self.assertEqual("http", self.get.type)
def test_get_host(self):
self.assertEqual("www.python.org", self.get.host)
def test_get_host_unquote(self):
req = Request("http://www.%70ython.org/")
self.assertEqual("www.python.org", req.host)
def test_proxy(self):
self.assertFalse(self.get.has_proxy())
self.get.set_proxy("www.perl.org", "http")
self.assertTrue(self.get.has_proxy())
self.assertEqual("www.python.org", self.get.origin_req_host)
self.assertEqual("www.perl.org", self.get.host)
def test_wrapped_url(self):
req = Request("<URL:http://www.python.org>")
self.assertEqual("www.python.org", req.host)
def test_url_fragment(self):
req = Request("http://www.python.org/?qs=query#fragment=true")
self.assertEqual("/?qs=query", req.selector)
req = Request("http://www.python.org/#fun=true")
self.assertEqual("/", req.selector)
# Issue 11703: geturl() omits fragment in the original URL.
url = 'http://docs.python.org/library/urllib2.html#OK'
req = Request(url)
self.assertEqual(req.get_full_url(), url)
def test_HTTPError_interface_call(self):
"""
Issue 15701 - HTTPError interface has info method available from URLError
"""
err = urllib.request.HTTPError(msg="something bad happened", url=None,
code=None, hdrs='Content-Length:42', fp=None)
self.assertTrue(hasattr(err, 'reason'))
assert hasattr(err, 'reason')
assert hasattr(err, 'info')
assert callable(err.info)
try:
err.info()
except AttributeError:
self.fail('err.info call failed.')
self.assertEqual(err.info(), "Content-Length:42")
def test_main(verbose=None):
from test import test_urllib2
support.run_doctest(test_urllib2, verbose)
support.run_doctest(urllib.request, verbose)
tests = (TrivialTests,
OpenerDirectorTests,
HandlerTests,
MiscTests,
RequestTests,
RequestHdrsTests)
support.run_unittest(*tests)
if __name__ == "__main__":
test_main(verbose=True)
| agpl-3.0 | -5,734,097,133,676,702,000 | 39.533465 | 83 | 0.567513 | false |
ywcui1990/nupic.research | projects/vehicle-control/agent/run_sm.py | 6 | 7819 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from collections import defaultdict
import operator
import time
import numpy
from unity_client.server import Server
from nupic.encoders.coordinate import CoordinateEncoder
from nupic.encoders.scalar import ScalarEncoder
from nupic.algorithms.monitor_mixin.trace import CountsTrace
from sensorimotor.extended_temporal_memory import ApicalTiebreakPairMemory
from htmresearch.support.apical_tm_pair_monitor_mixin import (
ApicalTMPairMonitorMixin)
class MonitoredApicalTiebreakPairMemory(
ApicalTMPairMonitorMixin, ApicalTiebreakPairMemory): pass
SCALE = 5
RADIUS = 10
class Agent(object):
def __init__(self):
self.encoder = CoordinateEncoder(n=1024,
w=21)
self.motorEncoder = ScalarEncoder(21, -1, 1,
n=1024)
self.tm = MonitoredApicalTiebreakPairMemory(
columnDimensions=[2048],
basalInputDimensions: (999999,) # Dodge input checking.
cellsPerColumn=1,
initialPermanence=0.5,
connectedPermanence=0.6,
permanenceIncrement=0.1,
permanenceDecrement=0.02,
minThreshold=35,
activationThreshold=35,
maxNewSynapseCount=40)
self.plotter = Plotter(self.tm, showOverlaps=False, showOverlapsValues=False)
self.lastState = None
self.lastAction = None
self.prevMotorPattern = ()
def sync(self, outputData):
if not ("location" in outputData and
"steer" in outputData):
print "Warning: Missing data:", outputData
return
reset = outputData.get("reset") or False
if reset:
print "Reset."
self.tm.reset()
location = outputData["location"]
steer = outputData["steer"]
x = int(location["x"] * SCALE)
z = int(location["z"] * SCALE)
coordinate = numpy.array([x, z])
encoding = self.encoder.encode((coordinate, RADIUS))
motorEncoding = self.motorEncoder.encode(steer)
sensorPattern = set(encoding.nonzero()[0])
motorPattern = set(motorEncoding.nonzero()[0])
self.tm.compute(sensorPattern,
activeCellsExternalBasal=motorPattern,
reinforceCandidatesExternalBasal=self.prevMotorPattern,
growthCandidatesExternalBasal=self.prevMotorPattern)
print self.tm.mmPrettyPrintMetrics(self.tm.mmGetDefaultMetrics())
self.plotter.update(encoding, reset)
if reset:
self.plotter.render()
self.lastState = encoding
self.lastAction = steer
self.prevMotorPattern = motorPattern
class Plotter(object):
def __init__(self, tm, showOverlaps=False, showOverlapsValues=False):
self.tm = tm
self.showOverlaps = showOverlaps
self.showOverlapsValues = showOverlapsValues
self.encodings = []
self.resets = []
self.numSegmentsPerCell = []
self.numSynapsesPerSegment = []
import matplotlib.pyplot as plt
self.plt = plt
import matplotlib.cm as cm
self.cm = cm
from pylab import rcParams
if self.showOverlaps and self.showOverlapsValues:
rcParams.update({'figure.figsize': (20, 20)})
else:
rcParams.update({'figure.figsize': (6, 12)})
rcParams.update({'figure.autolayout': True})
rcParams.update({'figure.facecolor': 'white'})
rcParams.update({'ytick.labelsize': 8})
def update(self, encoding, reset):
self.encodings.append(encoding)
self.resets.append(reset)
# TODO: Deal with empty segments / unconnected synapses
numSegmentsPerCell = [len(segments) for segments in
self.tm.connections._segmentsForCell.values()]
self.numSegmentsPerCell.append(numpy.array(numSegmentsPerCell))
numSynapsesPerSegment = [len(synapses) for synapses in
self.tm.connections._synapsesForSegment.values()]
self.numSynapsesPerSegment.append(numpy.array(numSynapsesPerSegment))
def render(self):
timestamp = int(time.time())
self.plt.figure(1)
self.plt.clf()
self._renderMetrics(timestamp)
if self.showOverlaps:
self.plt.figure(2)
self.plt.clf()
self._renderOverlaps(timestamp)
def _renderMetrics(self, timestamp):
traces = self.tm.mmGetDefaultTraces()
traces = [trace for trace in traces if type(trace) is CountsTrace]
t = len(traces)
n = t + 2
for i in xrange(t):
trace = traces[i]
self.plt.subplot(n, 1, i+1)
self._plot(trace.data, trace.title)
self.plt.subplot(n, 1, t+1)
self._plotDistributions(self.numSegmentsPerCell, "# segments per cell")
self.plt.subplot(n, 1, t+2)
self._plotDistributions(self.numSynapsesPerSegment, "# synapses per segment")
self.plt.draw()
self.plt.savefig("sm-{0}_A.png".format(timestamp))
def _renderOverlaps(self, timestamp):
self.plt.subplot(1, 1, 1)
overlaps = self._computeOverlaps()
self._imshow(overlaps, "Overlaps", aspect=None)
for i in self._computeResetIndices():
self.plt.axvline(i, color='black', alpha=0.5)
self.plt.axhline(i, color='black', alpha=0.5)
if self.showOverlapsValues:
for i in range(len(overlaps)):
for j in range(len(overlaps[i])):
overlap = "%.1f" % overlaps[i][j]
self.plt.annotate(overlap, xy=(i, j), fontsize=6, color='red', verticalalignment='center', horizontalalignment='center')
self.plt.draw()
self.plt.savefig("sm-{0}_B.png".format(timestamp))
def _computeOverlaps(self):
overlaps = []
encodings = self.encodings
for i in range(len(encodings)):
row = []
for j in range(len(encodings)):
n = max(encodings[i].sum(), encodings[j].sum())
overlap = (encodings[i] & encodings[j]).sum() / float(n)
row.append(overlap)
overlaps.append(row)
return overlaps
def _computeResetIndices(self):
return numpy.array(self.resets).nonzero()[0]
def _plot(self, data, title):
self.plt.plot(range(len(data)), data)
self._finishPlot(data, title)
def _finishPlot(self, data, title):
self.plt.title(title)
self.plt.xlim(0, len(data))
for i in self._computeResetIndices():
self.plt.axvline(i, color='black', alpha=0.5)
def _imshow(self, data, title, aspect='auto'):
self.plt.title(title)
self.plt.imshow(data,
cmap=self.cm.Greys,
interpolation="nearest",
aspect=aspect,
vmin=0,
vmax=1)
def _plotDistributions(self, data, title):
means = [numpy.mean(x) if len(x) else 0 for x in data]
maxs = [numpy.max(x) if len(x) else 0 for x in data]
self.plt.plot(range(len(data)), means, label='mean')
self.plt.plot(range(len(data)), maxs, label='max')
self.plt.legend(loc='lower right')
self._finishPlot(data, title)
if __name__ == "__main__":
agent = Agent()
Server(agent)
| agpl-3.0 | 2,449,206,957,233,335,300 | 28.066914 | 130 | 0.654687 | false |
morelab/weblabdeusto | server/launch/sample_balanced2_concurrent_experiments/main_machine/lab_and_experiment1/experiment48/server_config.py | 968 | 1526 | #!/usr/bin/env python
#-*-*- encoding: utf-8 -*-*-
weblab_xilinx_experiment_xilinx_device = 'FPGA'
weblab_xilinx_experiment_port_number = 1
# This should be something like this:
# import os as _os
# xilinx_home = _os.getenv('XILINX_HOME')
# if xilinx_home == None:
# if _os.name == 'nt':
# xilinx_home = r'C:\Program Files\Xilinx'
# elif _os.name == 'posix':
# xilinx_home = r"/home/nctrun/Xilinx"
#
# if _os.name == 'nt':
# xilinx_impact_full_path = [xilinx_home + r'\bin\nt\impact']
# elif _os.name == 'posix':
# xilinx_impact_full_path = [xilinx_home + r'/bin/lin/impact']
# But for testing we are going to fake it:
xilinx_home = "."
xilinx_impact_full_path = ["python","./tests/unit/weblab/experiment/devices/xilinx_impact/fake_impact.py" ]
xilinx_device_to_program = 'XilinxImpact' # 'JTagBlazer', 'DigilentAdept'
xilinx_device_to_send_commands = 'SerialPort' # 'HttpDevice'
digilent_adept_full_path = ["python","./test/unit/weblab/experiment/devices/digilent_adept/fake_digilent_adept.py" ]
digilent_adept_batch_content = """something with the variable $FILE"""
xilinx_http_device_ip_FPGA = "192.168.50.138"
xilinx_http_device_port_FPGA = 80
xilinx_http_device_app_FPGA = ""
xilinx_batch_content_FPGA = """setMode -bs
setCable -port auto
addDevice -position 1 -file $FILE
Program -p 1
exit
"""
# Though it is not really a FPGA, the webcam url var name depends on the device,
# specified above.
fpga_webcam_url = '''https://www.weblab.deusto.es/webcam/fpga0/image.jpg'''
| bsd-2-clause | 6,883,212,198,247,922,000 | 32.911111 | 116 | 0.68152 | false |
ahujamoh/seldon-server | scripts/zookeeper/set-client-config.py | 7 | 1898 | #!/usr/bin/env python
import sys, getopt, argparse
from kazoo.client import KazooClient
import json
def loadZookeeperOptions(opts,zk):
node = "/all_clients/"+opts['client']+"/offline/semvec"
if zk.exists(node):
data, stat = zk.get(node)
jStr = data.decode("utf-8")
print "Found zookeeper configuration:",jStr
j = json.loads(jStr)
for key in j:
opts[key] = j[key]
def activateModel(args,folder,zk):
node = "/all_clients/"+args.client+"/svtext"
print "Activating model in zookeper at node ",node," with data ",folder
if zk.exists(node):
zk.set(node,folder)
else:
zk.create(node,folder,makepath=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog='set-client-config')
parser.add_argument('-z', '--zookeeper', help='zookeeper hosts', required=True)
parser.add_argument('--clientVariable', help='client variable name', default="$CLIENT")
args = parser.parse_args()
opts = vars(args)
zk = KazooClient(hosts=args.zookeeper)
zk.start()
for line in sys.stdin:
line = line.rstrip()
parts = line.split()
if len(parts) == 3 and not line.startswith("#"):
clients = parts[0].split(',')
node = parts[1]
value = parts[2]
print "--------------------------"
print parts[0],node,"->",value
for client in clients:
nodeClient = node.replace(args.clientVariable,client)
valueClient = value.replace(args.clientVariable,client)
print "----"
print nodeClient
print valueClient
if zk.exists(nodeClient):
zk.set(nodeClient,valueClient)
else:
zk.create(nodeClient,valueClient,makepath=True)
zk.stop()
| apache-2.0 | -7,577,533,050,142,838,000 | 31.724138 | 91 | 0.564805 | false |
mhabrnal/abrt | tests/runtests/abrt-cli-report-mantisbt/fakefaf.py | 6 | 1502 | #!/usr/bin/env python
# Single purpose HTTP server
# - accepts POST of ureport JSON and dumps it to a file
import sys
import json
import BaseHTTPServer, cgi
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_POST(self):
# parse form data
form = cgi.FieldStorage(
fp=self.rfile,
headers=self.headers,
environ={
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': self.headers['Content-Type'],
}
)
self.send_response(202)
self.send_header('Content-Type', 'application/json')
self.send_header('Connection', 'close')
self.end_headers()
if self.path == '/reports/new/':
ureport = json.load(form['file'].file)
with open(self.save_ureport, 'w') as fh:
json.dump(ureport, fh, indent=2)
response = {
'bthash': '691cf824e3e07457156125636e86c50279e29496',
'reported_to': [
{}
],
'result': False
}
else:
with open(self.save_ureport, 'w') as fh:
fh.write('{"invalid_request_path": "%s"}' % self.path)
return
json.dump(response, self.wfile, indent=2)
PORT = 12345
print "Serving at port", PORT
Handler.save_ureport = sys.argv[1] if len(sys.argv) > 1 else 'ureport.json'
httpd = BaseHTTPServer.HTTPServer(("127.0.0.1", PORT), Handler)
httpd.serve_forever()
| gpl-2.0 | -9,040,532,042,226,652,000 | 29.04 | 75 | 0.551265 | false |
h3biomed/ansible | lib/ansible/plugins/doc_fragments/default_callback.py | 3 | 1911 | # -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
DOCUMENTATION = r'''
options:
display_skipped_hosts:
name: Show skipped hosts
description: "Toggle to control displaying skipped task/host results in a task"
type: bool
default: yes
env:
- name: DISPLAY_SKIPPED_HOSTS
deprecated:
why: environment variables without "ANSIBLE_" prefix are deprecated
version: "2.12"
alternatives: the "ANSIBLE_DISPLAY_SKIPPED_HOSTS" environment variable
- name: ANSIBLE_DISPLAY_SKIPPED_HOSTS
ini:
- key: display_skipped_hosts
section: defaults
display_ok_hosts:
name: Show 'ok' hosts
description: "Toggle to control displaying 'ok' task/host results in a task"
type: bool
default: yes
env:
- name: ANSIBLE_DISPLAY_OK_HOSTS
ini:
- key: display_ok_hosts
section: defaults
version_added: '2.7'
display_failed_stderr:
name: Use STDERR for failed and unreachable tasks
description: "Toggle to control whether failed and unreachable tasks are displayed to STDERR (vs. STDOUT)"
type: bool
default: no
env:
- name: ANSIBLE_DISPLAY_FAILED_STDERR
ini:
- key: display_failed_stderr
section: defaults
version_added: '2.7'
show_custom_stats:
name: Show custom stats
description: 'This adds the custom stats set via the set_stats plugin to the play recap'
type: bool
default: no
env:
- name: ANSIBLE_SHOW_CUSTOM_STATS
ini:
- key: show_custom_stats
section: defaults
'''
| gpl-3.0 | 5,353,748,675,572,584,000 | 31.948276 | 114 | 0.59079 | false |
pyfa-org/Pyfa | eos/saveddata/fighterAbility.py | 2 | 5835 | # ===============================================================================
# Copyright (C) 2010 Diego Duclos
#
# This file is part of eos.
#
# eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with eos. If not, see <http://www.gnu.org/licenses/>.
# ===============================================================================
from logbook import Logger
from sqlalchemy.orm import reconstructor
from eos.utils.stats import DmgTypes
pyfalog = Logger(__name__)
class FighterAbility:
# We aren't able to get data on the charges that can be stored with fighters. So we hardcode that data here, keyed
# with the fighter squadron role
NUM_SHOTS_MAPPING = {
1: 0, # Superiority fighter / Attack
2: 12, # Light fighter / Attack
4: 6, # Heavy fighter / Heavy attack
5: 3, # Heavy fighter / Long range attack
}
# Same as above
REARM_TIME_MAPPING = {
1: 0, # Superiority fighter / Attack
2: 4000, # Light fighter / Attack
4: 6000, # Heavy fighter / Heavy attack
5: 20000, # Heavy fighter / Long range attack
}
def __init__(self, effect):
"""Initialize from the program"""
self.__effect = effect
self.effectID = effect.ID if effect is not None else None
self.active = False
self.build()
@reconstructor
def init(self):
"""Initialize from the database"""
self.__effect = None
if self.effectID:
self.__effect = next((x for x in self.fighter.item.effects.values() if x.ID == self.effectID), None)
if self.__effect is None:
pyfalog.error("Effect (id: {0}) does not exist", self.effectID)
return
self.build()
def build(self):
pass
@property
def effect(self):
return self.__effect
@property
def name(self):
return self.__effect.getattr('displayName') or self.__effect.name
@property
def attrPrefix(self):
return self.__effect.getattr('prefix')
@property
def dealsDamage(self):
attr = "{}DamageMultiplier".format(self.attrPrefix)
return attr in self.fighter.itemModifiedAttributes or self.fighter.charge is not None
@property
def grouped(self):
# is the ability applied per fighter (webs, returns False), or as a group (MWD, returned True)
return self.__effect.getattr('grouped')
@property
def hasCharges(self):
return self.__effect.getattr('hasCharges')
@property
def reloadTime(self):
return self.getReloadTime()
def getReloadTime(self, spentShots=None):
if spentShots is not None:
spentShots = max(self.numShots, spentShots)
else:
spentShots = self.numShots
rearm_time = (self.REARM_TIME_MAPPING[self.fighter.getModifiedItemAttr("fighterSquadronRole")] or 0 if self.hasCharges else 0)
return self.fighter.getModifiedItemAttr("fighterRefuelingTime") + rearm_time * spentShots
@property
def numShots(self):
return self.NUM_SHOTS_MAPPING[self.fighter.getModifiedItemAttr("fighterSquadronRole")] or 0 if self.hasCharges else 0
@property
def cycleTime(self):
speed = self.fighter.getModifiedItemAttr("{}Duration".format(self.attrPrefix))
return speed
def getVolley(self, targetProfile=None):
if not self.dealsDamage or not self.active:
return DmgTypes(0, 0, 0, 0)
if self.attrPrefix == "fighterAbilityLaunchBomb":
em = self.fighter.getModifiedChargeAttr("emDamage", 0)
therm = self.fighter.getModifiedChargeAttr("thermalDamage", 0)
kin = self.fighter.getModifiedChargeAttr("kineticDamage", 0)
exp = self.fighter.getModifiedChargeAttr("explosiveDamage", 0)
else:
em = self.fighter.getModifiedItemAttr("{}DamageEM".format(self.attrPrefix), 0)
therm = self.fighter.getModifiedItemAttr("{}DamageTherm".format(self.attrPrefix), 0)
kin = self.fighter.getModifiedItemAttr("{}DamageKin".format(self.attrPrefix), 0)
exp = self.fighter.getModifiedItemAttr("{}DamageExp".format(self.attrPrefix), 0)
dmgMult = self.fighter.amount * self.fighter.getModifiedItemAttr("{}DamageMultiplier".format(self.attrPrefix), 1)
volley = DmgTypes(
em=em * dmgMult * (1 - getattr(targetProfile, "emAmount", 0)),
thermal=therm * dmgMult * (1 - getattr(targetProfile, "thermalAmount", 0)),
kinetic=kin * dmgMult * (1 - getattr(targetProfile, "kineticAmount", 0)),
explosive=exp * dmgMult * (1 - getattr(targetProfile, "explosiveAmount", 0)))
return volley
def getDps(self, targetProfile=None, cycleTimeOverride=None):
volley = self.getVolley(targetProfile=targetProfile)
if not volley:
return DmgTypes(0, 0, 0, 0)
cycleTime = cycleTimeOverride if cycleTimeOverride is not None else self.cycleTime
dpsFactor = 1 / (cycleTime / 1000)
dps = DmgTypes(
em=volley.em * dpsFactor,
thermal=volley.thermal * dpsFactor,
kinetic=volley.kinetic * dpsFactor,
explosive=volley.explosive * dpsFactor)
return dps
def clear(self):
pass
| gpl-3.0 | 1,730,855,668,616,154,600 | 37.388158 | 134 | 0.631705 | false |
jallohm/django | tests/template_tests/syntax_tests/test_extends.py | 83 | 15481 | from django.test import SimpleTestCase
from ..utils import setup
inheritance_templates = {
'inheritance01': "1{% block first %}&{% endblock %}3{% block second %}_{% endblock %}",
'inheritance02': "{% extends 'inheritance01' %}"
"{% block first %}2{% endblock %}{% block second %}4{% endblock %}",
'inheritance03': "{% extends 'inheritance02' %}",
'inheritance04': "{% extends 'inheritance01' %}",
'inheritance05': "{% extends 'inheritance02' %}",
'inheritance06': "{% extends foo %}",
'inheritance07': "{% extends 'inheritance01' %}{% block second %}5{% endblock %}",
'inheritance08': "{% extends 'inheritance02' %}{% block second %}5{% endblock %}",
'inheritance09': "{% extends 'inheritance04' %}",
'inheritance10': "{% extends 'inheritance04' %} ",
'inheritance11': "{% extends 'inheritance04' %}"
"{% block first %}2{% endblock %}{% block second %}4{% endblock %}",
'inheritance12': "{% extends 'inheritance07' %}{% block first %}2{% endblock %}",
'inheritance13': "{% extends 'inheritance02' %}"
"{% block first %}a{% endblock %}{% block second %}b{% endblock %}",
'inheritance14': "{% extends 'inheritance01' %}{% block newblock %}NO DISPLAY{% endblock %}",
'inheritance15': "{% extends 'inheritance01' %}"
"{% block first %}2{% block inner %}inner{% endblock %}{% endblock %}",
'inheritance16': "{% extends 'inheritance15' %}{% block inner %}out{% endblock %}",
'inheritance17': "{% load testtags %}{% block first %}1234{% endblock %}",
'inheritance18': "{% load testtags %}{% echo this that theother %}5678",
'inheritance19': "{% extends 'inheritance01' %}"
"{% block first %}{% load testtags %}{% echo 400 %}5678{% endblock %}",
'inheritance20': "{% extends 'inheritance01' %}{% block first %}{{ block.super }}a{% endblock %}",
'inheritance21': "{% extends 'inheritance02' %}{% block first %}{{ block.super }}a{% endblock %}",
'inheritance22': "{% extends 'inheritance04' %}{% block first %}{{ block.super }}a{% endblock %}",
'inheritance23': "{% extends 'inheritance20' %}{% block first %}{{ block.super }}b{% endblock %}",
'inheritance24': "{% extends context_template %}"
"{% block first %}2{% endblock %}{% block second %}4{% endblock %}",
'inheritance25': "{% extends context_template.1 %}"
"{% block first %}2{% endblock %}{% block second %}4{% endblock %}",
'inheritance26': "no tags",
'inheritance27': "{% extends 'inheritance26' %}",
'inheritance 28': "{% block first %}!{% endblock %}",
'inheritance29': "{% extends 'inheritance 28' %}",
'inheritance30': "1{% if optional %}{% block opt %}2{% endblock %}{% endif %}3",
'inheritance31': "{% extends 'inheritance30' %}{% block opt %}two{% endblock %}",
'inheritance32': "{% extends 'inheritance30' %}{% block opt %}two{% endblock %}",
'inheritance33': "1{% if optional == 1 %}{% block opt %}2{% endblock %}{% endif %}3",
'inheritance34': "{% extends 'inheritance33' %}{% block opt %}two{% endblock %}",
'inheritance35': "{% extends 'inheritance33' %}{% block opt %}two{% endblock %}",
'inheritance36': "{% for n in numbers %}_{% block opt %}{{ n }}{% endblock %}{% endfor %}_",
'inheritance37': "{% extends 'inheritance36' %}{% block opt %}X{% endblock %}",
'inheritance38': "{% extends 'inheritance36' %}{% block opt %}X{% endblock %}",
'inheritance39': "{% extends 'inheritance30' %}{% block opt %}new{{ block.super }}{% endblock %}",
'inheritance40': "{% extends 'inheritance33' %}{% block opt %}new{{ block.super }}{% endblock %}",
'inheritance41': "{% extends 'inheritance36' %}{% block opt %}new{{ block.super }}{% endblock %}",
'inheritance42': "{% extends 'inheritance02'|cut:' ' %}",
}
class InheritanceTests(SimpleTestCase):
libraries = {'testtags': 'template_tests.templatetags.testtags'}
@setup(inheritance_templates)
def test_inheritance01(self):
"""
Standard template with no inheritance
"""
output = self.engine.render_to_string('inheritance01')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance02(self):
"""
Standard two-level inheritance
"""
output = self.engine.render_to_string('inheritance02')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance03(self):
"""
Three-level with no redefinitions on third level
"""
output = self.engine.render_to_string('inheritance03')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance04(self):
"""
Two-level with no redefinitions on second level
"""
output = self.engine.render_to_string('inheritance04')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance05(self):
"""
Two-level with double quotes instead of single quotes
"""
output = self.engine.render_to_string('inheritance05')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance06(self):
"""
Three-level with variable parent-template name
"""
output = self.engine.render_to_string('inheritance06', {'foo': 'inheritance02'})
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance07(self):
"""
Two-level with one block defined, one block not defined
"""
output = self.engine.render_to_string('inheritance07')
self.assertEqual(output, '1&35')
@setup(inheritance_templates)
def test_inheritance08(self):
"""
Three-level with one block defined on this level, two blocks
defined next level
"""
output = self.engine.render_to_string('inheritance08')
self.assertEqual(output, '1235')
@setup(inheritance_templates)
def test_inheritance09(self):
"""
Three-level with second and third levels blank
"""
output = self.engine.render_to_string('inheritance09')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance10(self):
"""
Three-level with space NOT in a block -- should be ignored
"""
output = self.engine.render_to_string('inheritance10')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance11(self):
"""
Three-level with both blocks defined on this level, but none on
second level
"""
output = self.engine.render_to_string('inheritance11')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance12(self):
"""
Three-level with this level providing one and second level
providing the other
"""
output = self.engine.render_to_string('inheritance12')
self.assertEqual(output, '1235')
@setup(inheritance_templates)
def test_inheritance13(self):
"""
Three-level with this level overriding second level
"""
output = self.engine.render_to_string('inheritance13')
self.assertEqual(output, '1a3b')
@setup(inheritance_templates)
def test_inheritance14(self):
"""
A block defined only in a child template shouldn't be displayed
"""
output = self.engine.render_to_string('inheritance14')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance15(self):
"""
A block within another block
"""
output = self.engine.render_to_string('inheritance15')
self.assertEqual(output, '12inner3_')
@setup(inheritance_templates)
def test_inheritance16(self):
"""
A block within another block (level 2)
"""
output = self.engine.render_to_string('inheritance16')
self.assertEqual(output, '12out3_')
@setup(inheritance_templates)
def test_inheritance17(self):
"""
{% load %} tag (parent -- setup for exception04)
"""
output = self.engine.render_to_string('inheritance17')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance18(self):
"""
{% load %} tag (standard usage, without inheritance)
"""
output = self.engine.render_to_string('inheritance18')
self.assertEqual(output, 'this that theother5678')
@setup(inheritance_templates)
def test_inheritance19(self):
"""
{% load %} tag (within a child template)
"""
output = self.engine.render_to_string('inheritance19')
self.assertEqual(output, '140056783_')
@setup(inheritance_templates)
def test_inheritance20(self):
"""
Two-level inheritance with {{ block.super }}
"""
output = self.engine.render_to_string('inheritance20')
self.assertEqual(output, '1&a3_')
@setup(inheritance_templates)
def test_inheritance21(self):
"""
Three-level inheritance with {{ block.super }} from parent
"""
output = self.engine.render_to_string('inheritance21')
self.assertEqual(output, '12a34')
@setup(inheritance_templates)
def test_inheritance22(self):
"""
Three-level inheritance with {{ block.super }} from grandparent
"""
output = self.engine.render_to_string('inheritance22')
self.assertEqual(output, '1&a3_')
@setup(inheritance_templates)
def test_inheritance23(self):
"""
Three-level inheritance with {{ block.super }} from parent and
grandparent
"""
output = self.engine.render_to_string('inheritance23')
self.assertEqual(output, '1&ab3_')
@setup(inheritance_templates)
def test_inheritance24(self):
"""
Inheritance from local context without use of template loader
"""
context_template = self.engine.from_string("1{% block first %}_{% endblock %}3{% block second %}_{% endblock %}")
output = self.engine.render_to_string('inheritance24', {'context_template': context_template})
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance25(self):
"""
Inheritance from local context with variable parent template
"""
context_template = [
self.engine.from_string("Wrong"),
self.engine.from_string("1{% block first %}_{% endblock %}3{% block second %}_{% endblock %}"),
]
output = self.engine.render_to_string('inheritance25', {'context_template': context_template})
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance26(self):
"""
Set up a base template to extend
"""
output = self.engine.render_to_string('inheritance26')
self.assertEqual(output, 'no tags')
@setup(inheritance_templates)
def test_inheritance27(self):
"""
Inheritance from a template that doesn't have any blocks
"""
output = self.engine.render_to_string('inheritance27')
self.assertEqual(output, 'no tags')
@setup(inheritance_templates)
def test_inheritance_28(self):
"""
Set up a base template with a space in it.
"""
output = self.engine.render_to_string('inheritance 28')
self.assertEqual(output, '!')
@setup(inheritance_templates)
def test_inheritance29(self):
"""
Inheritance from a template with a space in its name should work.
"""
output = self.engine.render_to_string('inheritance29')
self.assertEqual(output, '!')
@setup(inheritance_templates)
def test_inheritance30(self):
"""
Base template, putting block in a conditional {% if %} tag
"""
output = self.engine.render_to_string('inheritance30', {'optional': True})
self.assertEqual(output, '123')
# Inherit from a template with block wrapped in an {% if %} tag
# (in parent), still gets overridden
@setup(inheritance_templates)
def test_inheritance31(self):
output = self.engine.render_to_string('inheritance31', {'optional': True})
self.assertEqual(output, '1two3')
@setup(inheritance_templates)
def test_inheritance32(self):
output = self.engine.render_to_string('inheritance32')
self.assertEqual(output, '13')
@setup(inheritance_templates)
def test_inheritance33(self):
"""
Base template, putting block in a conditional {% if %} tag
"""
output = self.engine.render_to_string('inheritance33', {'optional': 1})
self.assertEqual(output, '123')
@setup(inheritance_templates)
def test_inheritance34(self):
"""
Inherit from a template with block wrapped in an {% if %} tag
(in parent), still gets overridden
"""
output = self.engine.render_to_string('inheritance34', {'optional': 1})
self.assertEqual(output, '1two3')
@setup(inheritance_templates)
def test_inheritance35(self):
"""
Inherit from a template with block wrapped in an {% if %} tag
(in parent), still gets overridden
"""
output = self.engine.render_to_string('inheritance35', {'optional': 2})
self.assertEqual(output, '13')
@setup(inheritance_templates)
def test_inheritance36(self):
"""
Base template, putting block in a {% for %} tag
"""
output = self.engine.render_to_string('inheritance36', {'numbers': '123'})
self.assertEqual(output, '_1_2_3_')
@setup(inheritance_templates)
def test_inheritance37(self):
"""
Inherit from a template with block wrapped in an {% for %} tag
(in parent), still gets overridden
"""
output = self.engine.render_to_string('inheritance37', {'numbers': '123'})
self.assertEqual(output, '_X_X_X_')
@setup(inheritance_templates)
def test_inheritance38(self):
"""
Inherit from a template with block wrapped in an {% for %} tag
(in parent), still gets overridden
"""
output = self.engine.render_to_string('inheritance38')
self.assertEqual(output, '_')
# The super block will still be found.
@setup(inheritance_templates)
def test_inheritance39(self):
output = self.engine.render_to_string('inheritance39', {'optional': True})
self.assertEqual(output, '1new23')
@setup(inheritance_templates)
def test_inheritance40(self):
output = self.engine.render_to_string('inheritance40', {'optional': 1})
self.assertEqual(output, '1new23')
@setup(inheritance_templates)
def test_inheritance41(self):
output = self.engine.render_to_string('inheritance41', {'numbers': '123'})
self.assertEqual(output, '_new1_new2_new3_')
@setup(inheritance_templates)
def test_inheritance42(self):
"""
Expression starting and ending with a quote
"""
output = self.engine.render_to_string('inheritance42')
self.assertEqual(output, '1234')
| bsd-3-clause | 7,454,957,381,154,839,000 | 38.093434 | 121 | 0.606744 | false |
pgleeson/TestArea | lib/jython/Lib/test/test_cl.py | 31 | 3929 | #! /usr/bin/env python
"""Whimpy test script for the cl module
Roger E. Masse
"""
import cl
from test.test_support import verbose
clattrs = ['ADDED_ALGORITHM_ERROR', 'ALAW', 'ALGORITHM_ID',
'ALGORITHM_VERSION', 'AUDIO', 'AWARE_ERROR', 'AWARE_MPEG_AUDIO',
'AWARE_MULTIRATE', 'AWCMP_CONST_QUAL', 'AWCMP_FIXED_RATE',
'AWCMP_INDEPENDENT', 'AWCMP_JOINT_STEREO', 'AWCMP_LOSSLESS',
'AWCMP_MPEG_LAYER_I', 'AWCMP_MPEG_LAYER_II', 'AWCMP_STEREO',
'Algorithm', 'AlgorithmNumber', 'AlgorithmType', 'AudioFormatName',
'BAD_ALGORITHM_NAME', 'BAD_ALGORITHM_TYPE', 'BAD_BLOCK_SIZE',
'BAD_BOARD', 'BAD_BUFFERING', 'BAD_BUFFERLENGTH_NEG',
'BAD_BUFFERLENGTH_ODD', 'BAD_BUFFER_EXISTS', 'BAD_BUFFER_HANDLE',
'BAD_BUFFER_POINTER', 'BAD_BUFFER_QUERY_SIZE', 'BAD_BUFFER_SIZE',
'BAD_BUFFER_SIZE_POINTER', 'BAD_BUFFER_TYPE',
'BAD_COMPRESSION_SCHEME', 'BAD_COMPRESSOR_HANDLE',
'BAD_COMPRESSOR_HANDLE_POINTER', 'BAD_FRAME_SIZE',
'BAD_FUNCTIONALITY', 'BAD_FUNCTION_POINTER', 'BAD_HEADER_SIZE',
'BAD_INITIAL_VALUE', 'BAD_INTERNAL_FORMAT', 'BAD_LICENSE',
'BAD_MIN_GT_MAX', 'BAD_NO_BUFFERSPACE', 'BAD_NUMBER_OF_BLOCKS',
'BAD_PARAM', 'BAD_PARAM_ID_POINTER', 'BAD_PARAM_TYPE', 'BAD_POINTER',
'BAD_PVBUFFER', 'BAD_SCHEME_POINTER', 'BAD_STREAM_HEADER',
'BAD_STRING_POINTER', 'BAD_TEXT_STRING_PTR', 'BEST_FIT',
'BIDIRECTIONAL', 'BITRATE_POLICY', 'BITRATE_TARGET',
'BITS_PER_COMPONENT', 'BLENDING', 'BLOCK_SIZE', 'BOTTOM_UP',
'BUFFER_NOT_CREATED', 'BUF_DATA', 'BUF_FRAME', 'BytesPerPixel',
'BytesPerSample', 'CHANNEL_POLICY', 'CHROMA_THRESHOLD', 'CODEC',
'COMPONENTS', 'COMPRESSED_BUFFER_SIZE', 'COMPRESSION_RATIO',
'COMPRESSOR', 'CONTINUOUS_BLOCK', 'CONTINUOUS_NONBLOCK',
'CompressImage', 'DATA', 'DECOMPRESSOR', 'DecompressImage',
'EDGE_THRESHOLD', 'ENABLE_IMAGEINFO', 'END_OF_SEQUENCE', 'ENUM_VALUE',
'EXACT_COMPRESSION_RATIO', 'EXTERNAL_DEVICE', 'FLOATING_ENUM_VALUE',
'FLOATING_RANGE_VALUE', 'FRAME', 'FRAME_BUFFER_SIZE',
'FRAME_BUFFER_SIZE_ZERO', 'FRAME_RATE', 'FRAME_TYPE', 'G711_ALAW',
'G711_ULAW', 'GRAYSCALE', 'GetAlgorithmName', 'HDCC',
'HDCC_SAMPLES_PER_TILE', 'HDCC_TILE_THRESHOLD', 'HEADER_START_CODE',
'IMAGE_HEIGHT', 'IMAGE_WIDTH', 'INTERNAL_FORMAT',
'INTERNAL_IMAGE_HEIGHT', 'INTERNAL_IMAGE_WIDTH', 'INTRA', 'JPEG',
'JPEG_ERROR', 'JPEG_NUM_PARAMS', 'JPEG_QUALITY_FACTOR',
'JPEG_QUANTIZATION_TABLES', 'JPEG_SOFTWARE', 'JPEG_STREAM_HEADERS',
'KEYFRAME', 'LAST_FRAME_INDEX', 'LAYER', 'LUMA_THRESHOLD',
'MAX_NUMBER_OF_AUDIO_ALGORITHMS', 'MAX_NUMBER_OF_ORIGINAL_FORMATS',
'MAX_NUMBER_OF_PARAMS', 'MAX_NUMBER_OF_VIDEO_ALGORITHMS', 'MONO',
'MPEG_VIDEO', 'MVC1', 'MVC2', 'MVC2_BLENDING', 'MVC2_BLENDING_OFF',
'MVC2_BLENDING_ON', 'MVC2_CHROMA_THRESHOLD', 'MVC2_EDGE_THRESHOLD',
'MVC2_ERROR', 'MVC2_LUMA_THRESHOLD', 'NEXT_NOT_AVAILABLE',
'NOISE_MARGIN', 'NONE', 'NUMBER_OF_FRAMES', 'NUMBER_OF_PARAMS',
'ORIENTATION', 'ORIGINAL_FORMAT', 'OpenCompressor',
'OpenDecompressor', 'PARAM_OUT_OF_RANGE', 'PREDICTED', 'PREROLL',
'ParamID', 'ParamNumber', 'ParamType', 'QUALITY_FACTOR',
'QUALITY_LEVEL', 'QueryAlgorithms', 'QueryMaxHeaderSize',
'QueryScheme', 'QuerySchemeFromName', 'RANGE_VALUE', 'RGB', 'RGB332',
'RGB8', 'RGBA', 'RGBX', 'RLE', 'RLE24', 'RTR', 'RTR1',
'RTR_QUALITY_LEVEL', 'SAMPLES_PER_TILE', 'SCHEME_BUSY',
'SCHEME_NOT_AVAILABLE', 'SPEED', 'STEREO_INTERLEAVED',
'STREAM_HEADERS', 'SetDefault', 'SetMax', 'SetMin', 'TILE_THRESHOLD',
'TOP_DOWN', 'ULAW', 'UNCOMPRESSED', 'UNCOMPRESSED_AUDIO',
'UNCOMPRESSED_VIDEO', 'UNKNOWN_SCHEME', 'VIDEO', 'VideoFormatName',
'Y', 'YCbCr', 'YCbCr422', 'YCbCr422DC', 'YCbCr422HC', 'YUV', 'YUV422',
'YUV422DC', 'YUV422HC', '__doc__', '__name__', 'cvt_type', 'error']
# This is a very inobtrusive test for the existence of the cl
# module and all its attributes.
def main():
# touch all the attributes of al without doing anything
if verbose:
print 'Touching cl module attributes...'
for attr in clattrs:
if verbose:
print 'touching: ', attr
getattr(cl, attr)
main()
| gpl-2.0 | 6,156,309,849,971,909,000 | 49.371795 | 70 | 0.697888 | false |
vrenaville/ngo-addons-backport | openerp/addons/base/ir/ir_exports.py | 72 | 1721 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
class ir_exports(osv.osv):
_name = "ir.exports"
_order = 'name'
_columns = {
'name': fields.char('Export Name', size=128),
'resource': fields.char('Resource', size=128, select=True),
'export_fields': fields.one2many('ir.exports.line', 'export_id',
'Export ID'),
}
ir_exports()
class ir_exports_line(osv.osv):
_name = 'ir.exports.line'
_order = 'id'
_columns = {
'name': fields.char('Field Name', size=64),
'export_id': fields.many2one('ir.exports', 'Export', select=True, ondelete='cascade'),
}
ir_exports_line()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 8,530,300,790,177,927,000 | 35.617021 | 94 | 0.589192 | false |
josyb/myhdl | cosimulation/test/test_bin2gray.py | 4 | 2691 | import unittest
from unittest import TestCase
from myhdl import Simulation, Signal, delay, intbv, bin
from bin2gray import bin2gray
MAX_WIDTH = 10
def nextLn(Ln):
""" Return Gray code Ln+1, given Ln. """
Ln0 = ['0' + codeword for codeword in Ln]
Ln1 = ['1' + codeword for codeword in Ln]
Ln1.reverse()
return Ln0 + Ln1
class TestOriginalGrayCode(TestCase):
def testOriginalGrayCode(self):
""" Check that the code is an original Gray code """
Rn = []
def stimulus(B, G, n):
for i in range(2**n):
B.next = intbv(i)
yield delay(10)
Rn.append(bin(G, width=n))
Ln = ['0', '1'] # n == 1
for n in range(2, MAX_WIDTH):
Ln = nextLn(Ln)
del Rn[:]
B = Signal(intbv(1))
G = Signal(intbv(0))
dut = bin2gray(B, G, n)
stim = stimulus(B, G, n)
sim = Simulation(dut, stim)
sim.run(quiet=1)
self.assertEqual(Ln, Rn)
class TestGrayCodeProperties(TestCase):
def testSingleBitChange(self):
""" Check that only one bit changes in successive codewords """
def test(B, G, G_Z, width):
B.next = intbv(0)
yield delay(10)
for i in range(1, 2**width):
G_Z.next = G
B.next = intbv(i)
yield delay(10)
diffcode = bin(G ^ G_Z)
self.assertEqual(diffcode.count('1'), 1)
for width in range(2, MAX_WIDTH):
B = Signal(intbv(1))
G = Signal(intbv(0))
G_Z = Signal(intbv(0))
dut = bin2gray(B, G, width)
check = test(B, G, G_Z, width)
sim = Simulation(dut, check)
sim.run(quiet=1)
def testUniqueCodeWords(self):
""" Check that all codewords occur exactly once """
def test(B, G, width):
actual = []
for i in range(2**width):
B.next = intbv(i)
yield delay(10)
actual.append(int(G))
actual.sort()
expected = list(range(2**width))
self.assertEqual(actual, expected)
for width in range(1, MAX_WIDTH):
B = Signal(intbv(1))
G = Signal(intbv(0))
dut = bin2gray(B, G, width)
check = test(B, G, width)
sim = Simulation(dut, check)
sim.run(quiet=1)
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | -7,735,305,907,664,894,000 | 23.463636 | 71 | 0.466741 | false |
openstack/futurist | releasenotes/source/conf.py | 1 | 8555 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'openstackdocstheme',
'reno.sphinxext',
]
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/futurist'
openstackdocs_bug_project = 'futurist'
openstackdocs_bug_tag = ''
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = '2016, futurist Developers'
# Release notes do not need a version number in the title, they
# cover multiple releases.
# The full version, including alpha/beta/rc tags.
release = ''
# The short X.Y version.
version = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'futuristReleaseNotesDoc'
# -- Options for LaTeX output ---------------------------------------------
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'futuristReleaseNotes.tex',
'futurist Release Notes Documentation',
'futurist Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'futuristReleaseNotes',
'futurist Release Notes Documentation',
['futurist Developers'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'futuristReleaseNotes',
'futurist Release Notes Documentation',
'futurist Developers', 'futuristReleaseNotes',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# -- Options for Internationalization output ------------------------------
locale_dirs = ['locale/']
| apache-2.0 | -3,698,890,901,426,041,000 | 31.777778 | 79 | 0.708124 | false |
anchore/anchore-engine | tests/integration/services/policy_engine/engine/policy/gates/test_vulnerabilities.py | 1 | 5979 | import pytest
from anchore_engine.services.policy_engine.engine.policy.gates.vulnerabilities import (
VulnerabilitiesGate,
UnsupportedDistroTrigger,
FeedOutOfDateTrigger,
VulnerabilityMatchTrigger,
)
from anchore_engine.db import Image, get_thread_scoped_session
from tests.integration.services.policy_engine.engine.policy.gates import GateUnitTest
from anchore_engine.subsys import logger
from tests.integration.services.policy_engine.engine.policy.gates import (
cls_fully_loaded_test_env,
cls_no_feeds_test_env,
)
logger.enable_test_logging()
@pytest.mark.usefixtures("cls_fully_loaded_test_env")
class AnchoreSecGateTest(GateUnitTest):
"""
Test against the debian 8 based "node" image in the test env.
It contains the package:
mercurial 3.1.2-2+deb8u3
Vuln data for testing:
[
{
"Vulnerability": {
"FixedIn": [
{
"VersionFormat": "dpkg",
"NamespaceName": "debian:8",
"Version": "None",
"Name": "mercurial"
}
],
"NamespaceName": "debian:8",
"Link": "https://fake.cve.testing/tracker/CVE-TEST-TEST0",
"Name": "CVE-TEST-TEST0",
"Severity": "Low"
}
},
{
"Vulnerability": {
"FixedIn": [
{
"VersionFormat": "dpkg",
"NamespaceName": "debian:8",
"Version": "3.1.2-2+deb8u3",
"Name": "mercurial"
}
],
"NamespaceName": "debian:8",
"Link": "https://fake.cve.testing/tracker/CVE-TEST-TEST1",
"Name": "CVE-TEST-TEST1",
"Severity": "Medium"
}
},
{
"Vulnerability": {
"FixedIn": [
{
"VersionFormat": "dpkg",
"NamespaceName": "debian:8",
"Version": "3.1.1-2+deb8u3",
"Name": "mercurial"
}
],
"NamespaceName": "debian:8",
"Link": "https://fake.cve.testing/tracker/CVE-TEST-TEST2",
"Name": "CVE-TEST-TEST2",
"Severity": "High"
}
},
{
"Vulnerability": {
"FixedIn": [
{
"VersionFormat": "dpkg",
"NamespaceName": "debian:8",
"Version": "3.1.3-2+deb8u3",
"Name": "mercurial"
}
],
"NamespaceName": "debian:8",
"Link": "https://fake.cve.testing/tracker/CVE-TEST-TEST3",
"Name": "CVE-TEST-TEST3",
"Severity": "Critical"
}
}
]
The debian:8 feed vuln data is purely fake and for testing against this package specifically
"""
gate_clazz = VulnerabilitiesGate
__default_image__ = "node"
def test_unsupported_distro(self):
t, gate, test_context = self.get_initialized_trigger(
UnsupportedDistroTrigger.__trigger_name__
)
db = get_thread_scoped_session()
image = db.query(Image).get(
(self.test_env.get_images_named("busybox")[0][0], "0")
)
test_context = gate.prepare_context(image, test_context)
t.evaluate(image, test_context)
logger.info(("Fired: {}".format(t.fired)))
self.assertEqual(len(t.fired), 1)
def test_feedoutofdate(self):
t, gate, test_context = self.get_initialized_trigger(
FeedOutOfDateTrigger.__trigger_name__, max_days_since_sync="0"
)
test_context = gate.prepare_context(self.test_image, test_context)
t.evaluate(self.test_image, test_context)
logger.info(("Fired: {}".format(t.fired)))
self.assertEqual(len(t.fired), 1)
t, gate, test_context = self.get_initialized_trigger(
FeedOutOfDateTrigger.__trigger_name__, max_days_since_sync="1000000"
)
test_context = gate.prepare_context(self.test_image, test_context)
t.evaluate(self.test_image, test_context)
logger.info(("Fired: {}".format(t.fired)))
self.assertEqual(len(t.fired), 0)
def test_all_severity(self):
t, gate, test_context = self.get_initialized_trigger(
VulnerabilityMatchTrigger.__trigger_name__,
package_type="all",
severity="unknown",
severity_comparison=">=",
)
test_context = gate.prepare_context(self.test_image, test_context)
t.evaluate(self.test_image, test_context)
logger.info("Fired: {}".format(t.fired))
self.assertGreaterEqual(len(t.fired), 1)
def test_packages_severity(self):
t, gate, test_context = self.get_initialized_trigger(
VulnerabilityMatchTrigger.__trigger_name__,
package_type="all",
severity="medium",
severity_comparison=">=",
)
test_context = gate.prepare_context(self.test_image, test_context)
t.evaluate(self.test_image, test_context)
logger.info("Fired: {}".format(t.fired))
# CVE-TEST-TEST3, all others are either already fixed or < medium
self.assertGreaterEqual(len(t.fired), 1)
def test_fixavailableparam(self):
t, gate, test_context = self.get_initialized_trigger(
VulnerabilityMatchTrigger.__trigger_name__,
package_type="all",
severity="medium",
severity_comparison=">=",
fix_available="True",
)
test_context = gate.prepare_context(self.test_image, test_context)
t.evaluate(self.test_image, test_context)
logger.info("Fired: {}".format(t.fired))
# CVE-TEST-TEST3
self.assertGreaterEqual(len(t.fired), 1)
t, gate, test_context = self.get_initialized_trigger(
VulnerabilityMatchTrigger.__trigger_name__,
fix_available="False",
severity="unknown",
severity_comparison=">=",
package_type="all",
)
test_context = gate.prepare_context(self.test_image, test_context)
t.evaluate(self.test_image, test_context)
logger.info("Fired: {}".format(t.fired))
# CVE-TEST-TEST0
self.assertGreaterEqual(len(t.fired), 1)
| apache-2.0 | 2,048,998,247,285,682,700 | 31.494565 | 96 | 0.588225 | false |
codilime/cloudify-agent | cloudify_agent/api/pm/nssm.py | 1 | 7393 | #########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import os
from cloudify.exceptions import CommandExecutionException
from cloudify_agent import VIRTUALENV
from cloudify_agent.api import defaults
from cloudify_agent.api import exceptions
from cloudify_agent.api import utils
from cloudify_agent.api.pm.base import Daemon
class NonSuckingServiceManagerDaemon(Daemon):
"""
Implementation for the nssm windows service management.
Based on the nssm service management. see https://nssm.cc/
Following are all possible custom key-word arguments
(in addition to the ones available in the base daemon)
``startup_policy``
Specifies the start type for the service.
possible values are:
boot - A device driver that is loaded by the boot loader.
system - A device driver that is started during kernel
initialization
auto - A service that automatically starts each time the
computer is restarted and runs even if no one logs on to
the computer.
demand - A service that must be manually started. This is the
default value if start= is not specified.
disabled - A service that cannot be started. To start a disabled
service, change the start type to some other value.
``failure_reset_timeout``
Specifies the length of the period (in seconds) with no failures
after which the failure count should be reset to 0.
``failure_restart_delay``
Specifies delay time (in milliseconds) for the restart action.
"""
PROCESS_MANAGEMENT = 'nssm'
RUNNING_STATES = ['SERVICE_RUNNING', 'SERVICE_STOP_PENDING']
def __init__(self, logger=None, **params):
super(NonSuckingServiceManagerDaemon, self).__init__(
logger=logger, **params)
self.config_path = os.path.join(
self.workdir,
'{0}.conf.bat'.format(self.name))
self.nssm_path = utils.get_absolute_resource_path(
os.path.join('pm', 'nssm', 'nssm.exe'))
self.startup_policy = params.get('startup_policy', 'auto')
self.failure_reset_timeout = params.get('failure_reset_timeout', 60)
self.failure_restart_delay = params.get('failure_restart_delay', 5000)
def create_script(self):
pass
def create_config(self):
env_string = self._create_env_string()
# creating the installation script
self._logger.debug('Rendering configuration script "{0}" from template'
.format(self.config_path))
utils.render_template_to_file(
template_path='pm/nssm/nssm.conf.template',
file_path=self.config_path,
queue=self.queue,
nssm_path=self.nssm_path,
log_level=self.log_level,
log_file=self.get_logfile(),
workdir=self.workdir,
user=self.user,
rest_host=self.rest_host,
rest_port=self.rest_port,
rest_protocol=self.rest_protocol,
file_server_host=self.file_server_host,
file_server_port=self.file_server_port,
file_server_protocol=self.file_server_protocol,
security_enabled=self.security_enabled,
verify_rest_certificate=self.verify_rest_certificate,
local_rest_cert_file=self.local_rest_cert_file,
rest_cert_content=self.rest_cert_content,
broker_url=self.broker_url,
min_workers=self.min_workers,
max_workers=self.max_workers,
virtualenv_path=VIRTUALENV,
name=self.name,
storage_dir=utils.internal.get_storage_directory(self.user),
custom_environment=env_string,
startup_policy=self.startup_policy,
failure_reset_timeout=self.failure_reset_timeout,
failure_restart_delay=self.failure_restart_delay
)
self._logger.debug('Rendered configuration script: {0}'.format(
self.config_path))
# run the configuration script
self._logger.info('Running configuration script')
self._runner.run(self.config_path)
self._logger.debug('Successfully executed configuration script')
def before_self_stop(self):
if self.startup_policy in ['boot', 'system', 'auto']:
self._logger.debug('Disabling service: {0}'.format(self.name))
self._runner.run('sc config {0} start= disabled'.format(self.name))
def delete(self, force=defaults.DAEMON_FORCE_DELETE):
if self._is_agent_registered():
if not force:
raise exceptions.DaemonStillRunningException(self.name)
self.stop()
self._logger.info('Removing {0} service'.format(
self.name))
self._runner.run('{0} remove {1} confirm'.format(
self.nssm_path,
self.name))
self._logger.debug('Deleting {0}'.format(self.config_path))
if os.path.exists(self.config_path):
os.remove(self.config_path)
def start_command(self):
if not os.path.isfile(self.config_path):
raise exceptions.DaemonNotConfiguredError(self.name)
return 'sc start {0}'.format(self.name)
def stop_command(self):
return 'sc stop {0}'.format(self.name)
def status(self):
try:
command = '{0} status {1}'.format(self.nssm_path, self.name)
response = self._runner.run(command)
# apparently nssm output is encoded in utf16.
# encode to ascii to be able to parse this
state = response.std_out.decode('utf16').encode(
'utf-8').rstrip()
self._logger.info(state)
if state in self.RUNNING_STATES:
return True
else:
return False
except CommandExecutionException as e:
self._logger.debug(str(e))
return False
def get_worker_id_placeholder(self):
"""
Returns placeholder suitable for windows systems.
Due to bug in Celery placeholder %I is not working
properly on nt systems.
"""
return '{0}'
def _create_env_string(self):
env_string = ''
if self.extra_env_path and os.path.exists(self.extra_env_path):
with open(self.extra_env_path) as f:
content = f.read()
for line in content.splitlines():
if line.startswith('set'):
parts = line.split(' ')[1].split('=')
key = parts[0]
value = parts[1]
env_string = '{0} {1}={2}'.format(env_string, key, value)
return env_string.rstrip()
| apache-2.0 | 4,193,093,223,739,896,300 | 37.108247 | 79 | 0.6145 | false |
ghickman/django | tests/admin_widgets/tests.py | 2 | 58590 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import gettext
import os
from datetime import datetime, timedelta
from importlib import import_module
from unittest import skipIf
from django import forms
from django.conf import settings
from django.contrib import admin
from django.contrib.admin import widgets
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.auth.models import User
from django.core.files.storage import default_storage
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.db.models import CharField, DateField
from django.test import SimpleTestCase, TestCase, override_settings
from django.utils import six, translation
from . import models
from .widgetadmin import site as widget_admin_site
try:
import pytz
except ImportError:
pytz = None
class TestDataMixin(object):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='[email protected]',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
pk=101, username='testser', first_name='Add', last_name='User', email='[email protected]',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=False,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
models.Car.objects.create(id=1, owner=cls.u1, make='Volkswagen', model='Passat')
models.Car.objects.create(id=2, owner=cls.u2, make='BMW', model='M3')
class SeleniumDataMixin(object):
def setUp(self):
self.u1 = User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='[email protected]',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
class AdminFormfieldForDBFieldTests(SimpleTestCase):
"""
Tests for correct behavior of ModelAdmin.formfield_for_dbfield
"""
def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides):
"""
Helper to call formfield_for_dbfield for a given model and field name
and verify that the returned formfield is appropriate.
"""
# Override any settings on the model admin
class MyModelAdmin(admin.ModelAdmin):
pass
for k in admin_overrides:
setattr(MyModelAdmin, k, admin_overrides[k])
# Construct the admin, and ask it for a formfield
ma = MyModelAdmin(model, admin.site)
ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None)
# "unwrap" the widget wrapper, if needed
if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper):
widget = ff.widget.widget
else:
widget = ff.widget
# Check that we got a field of the right type
self.assertTrue(
isinstance(widget, widgetclass),
"Wrong widget for %s.%s: expected %s, got %s" % (
model.__class__.__name__,
fieldname,
widgetclass,
type(widget),
)
)
# Return the formfield so that other tests can continue
return ff
def test_DateField(self):
self.assertFormfield(models.Event, 'start_date', widgets.AdminDateWidget)
def test_DateTimeField(self):
self.assertFormfield(models.Member, 'birthdate', widgets.AdminSplitDateTime)
def test_TimeField(self):
self.assertFormfield(models.Event, 'start_time', widgets.AdminTimeWidget)
def test_TextField(self):
self.assertFormfield(models.Event, 'description', widgets.AdminTextareaWidget)
def test_URLField(self):
self.assertFormfield(models.Event, 'link', widgets.AdminURLFieldWidget)
def test_IntegerField(self):
self.assertFormfield(models.Event, 'min_age', widgets.AdminIntegerFieldWidget)
def test_CharField(self):
self.assertFormfield(models.Member, 'name', widgets.AdminTextInputWidget)
def test_EmailField(self):
self.assertFormfield(models.Member, 'email', widgets.AdminEmailInputWidget)
def test_FileField(self):
self.assertFormfield(models.Album, 'cover_art', widgets.AdminFileWidget)
def test_ForeignKey(self):
self.assertFormfield(models.Event, 'main_band', forms.Select)
def test_raw_id_ForeignKey(self):
self.assertFormfield(models.Event, 'main_band', widgets.ForeignKeyRawIdWidget,
raw_id_fields=['main_band'])
def test_radio_fields_ForeignKey(self):
ff = self.assertFormfield(models.Event, 'main_band', widgets.AdminRadioSelect,
radio_fields={'main_band': admin.VERTICAL})
self.assertEqual(ff.empty_label, None)
def test_many_to_many(self):
self.assertFormfield(models.Band, 'members', forms.SelectMultiple)
def test_raw_id_many_to_many(self):
self.assertFormfield(models.Band, 'members', widgets.ManyToManyRawIdWidget,
raw_id_fields=['members'])
def test_filtered_many_to_many(self):
self.assertFormfield(models.Band, 'members', widgets.FilteredSelectMultiple,
filter_vertical=['members'])
def test_formfield_overrides(self):
self.assertFormfield(models.Event, 'start_date', forms.TextInput,
formfield_overrides={DateField: {'widget': forms.TextInput}})
def test_formfield_overrides_widget_instances(self):
"""
Test that widget instances in formfield_overrides are not shared between
different fields. (#19423)
"""
class BandAdmin(admin.ModelAdmin):
formfield_overrides = {
CharField: {'widget': forms.TextInput(attrs={'size': '10'})}
}
ma = BandAdmin(models.Band, admin.site)
f1 = ma.formfield_for_dbfield(models.Band._meta.get_field('name'), request=None)
f2 = ma.formfield_for_dbfield(models.Band._meta.get_field('style'), request=None)
self.assertNotEqual(f1.widget, f2.widget)
self.assertEqual(f1.widget.attrs['maxlength'], '100')
self.assertEqual(f2.widget.attrs['maxlength'], '20')
self.assertEqual(f2.widget.attrs['size'], '10')
def test_field_with_choices(self):
self.assertFormfield(models.Member, 'gender', forms.Select)
def test_choices_with_radio_fields(self):
self.assertFormfield(models.Member, 'gender', widgets.AdminRadioSelect,
radio_fields={'gender': admin.VERTICAL})
def test_inheritance(self):
self.assertFormfield(models.Album, 'backside_art', widgets.AdminFileWidget)
def test_m2m_widgets(self):
"""m2m fields help text as it applies to admin app (#9321)."""
class AdvisorAdmin(admin.ModelAdmin):
filter_vertical = ['companies']
self.assertFormfield(models.Advisor, 'companies', widgets.FilteredSelectMultiple,
filter_vertical=['companies'])
ma = AdvisorAdmin(models.Advisor, admin.site)
f = ma.formfield_for_dbfield(models.Advisor._meta.get_field('companies'), request=None)
self.assertEqual(
six.text_type(f.help_text),
'Hold down "Control", or "Command" on a Mac, to select more than one.'
)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase):
def test_filter_choices_by_request_user(self):
"""
Ensure the user can only see their own cars in the foreign key dropdown.
"""
self.client.login(username="super", password="secret")
response = self.client.get(reverse('admin:admin_widgets_cartire_add'))
self.assertNotContains(response, "BMW M3")
self.assertContains(response, "Volkswagen Passat")
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyWidgetChangeList(TestDataMixin, TestCase):
def setUp(self):
self.client.login(username="super", password="secret")
def test_changelist_ForeignKey(self):
response = self.client.get(reverse('admin:admin_widgets_car_changelist'))
self.assertContains(response, '/auth/user/add/')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyRawIdWidget(TestDataMixin, TestCase):
def setUp(self):
self.client.login(username="super", password="secret")
def test_nonexistent_target_id(self):
band = models.Band.objects.create(name='Bogey Blues')
pk = band.pk
band.delete()
post_data = {
"main_band": '%s' % pk,
}
# Try posting with a non-existent pk in a raw id field: this
# should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'), post_data)
self.assertContains(response,
'Select a valid choice. That choice is not one of the available choices.')
def test_invalid_target_id(self):
for test_str in ('Iñtërnâtiônàlizætiøn', "1234'", -1234):
# This should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'),
{"main_band": test_str})
self.assertContains(response,
'Select a valid choice. That choice is not one of the available choices.')
def test_url_params_from_lookup_dict_any_iterable(self):
lookup1 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')})
lookup2 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']})
self.assertEqual(lookup1, {'color__in': 'red,blue'})
self.assertEqual(lookup1, lookup2)
def test_url_params_from_lookup_dict_callable(self):
def my_callable():
return 'works'
lookup1 = widgets.url_params_from_lookup_dict({'myfield': my_callable})
lookup2 = widgets.url_params_from_lookup_dict({'myfield': my_callable()})
self.assertEqual(lookup1, lookup2)
class FilteredSelectMultipleWidgetTest(SimpleTestCase):
def test_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', False)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple="multiple" name="test" class="selectfilter" '
'data-field-name="test\\" data-is-stacked="0">\n</select>'
)
def test_stacked_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', True)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple="multiple" name="test" class="selectfilterstacked" '
'data-field-name="test\\" data-is-stacked="1">\n</select>'
)
class AdminDateWidgetTest(SimpleTestCase):
def test_attrs(self):
"""
Ensure that user-supplied attrs are used.
Refs #12073.
"""
w = widgets.AdminDateWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="vDateField" name="test" size="10" />',
)
# pass attrs to widget
w = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="myDateField" name="test" size="20" />',
)
class AdminTimeWidgetTest(SimpleTestCase):
def test_attrs(self):
"""
Ensure that user-supplied attrs are used.
Refs #12073.
"""
w = widgets.AdminTimeWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="vTimeField" name="test" size="8" />',
)
# pass attrs to widget
w = widgets.AdminTimeWidget(attrs={'size': 20, 'class': 'myTimeField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="myTimeField" name="test" size="20" />',
)
class AdminSplitDateTimeWidgetTest(SimpleTestCase):
def test_render(self):
w = widgets.AdminSplitDateTime()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">'
'Date: <input value="2007-12-01" type="text" class="vDateField" '
'name="test_0" size="10" /><br />'
'Time: <input value="09:30:00" type="text" class="vTimeField" '
'name="test_1" size="8" /></p>'
)
def test_localization(self):
w = widgets.AdminSplitDateTime()
with self.settings(USE_L10N=True), translation.override('de-at'):
w.is_localized = True
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">'
'Datum: <input value="01.12.2007" type="text" '
'class="vDateField" name="test_0"size="10" /><br />'
'Zeit: <input value="09:30:00" type="text" class="vTimeField" '
'name="test_1" size="8" /></p>'
)
class AdminURLWidgetTest(SimpleTestCase):
def test_render(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', ''),
'<input class="vURLField" name="test" type="url" />'
)
self.assertHTMLEqual(
w.render('test', 'http://example.com'),
'<p class="url">Currently:<a href="http://example.com">'
'http://example.com</a><br />'
'Change:<input class="vURLField" name="test" type="url" '
'value="http://example.com" /></p>'
)
def test_render_idn(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', 'http://example-äüö.com'),
'<p class="url">Currently: <a href="http://xn--example--7za4pnc.com">'
'http://example-äüö.com</a><br />'
'Change:<input class="vURLField" name="test" type="url" '
'value="http://example-äüö.com" /></p>'
)
def test_render_quoting(self):
# WARNING: Don't use assertHTMLEqual in that testcase!
# assertHTMLEqual will get rid of some escapes which are tested here!
w = widgets.AdminURLFieldWidget()
self.assertEqual(
w.render('test', 'http://example.com/<sometag>some text</sometag>'),
'<p class="url">Currently: '
'<a href="http://example.com/%3Csometag%3Esome%20text%3C/sometag%3E">'
'http://example.com/<sometag>some text</sometag></a><br />'
'Change: <input class="vURLField" name="test" type="url" '
'value="http://example.com/<sometag>some text</sometag>" /></p>'
)
self.assertEqual(
w.render('test', 'http://example-äüö.com/<sometag>some text</sometag>'),
'<p class="url">Currently: '
'<a href="http://xn--example--7za4pnc.com/%3Csometag%3Esome%20text%3C/sometag%3E">'
'http://example-äüö.com/<sometag>some text</sometag></a><br />'
'Change: <input class="vURLField" name="test" type="url" '
'value="http://example-äüö.com/<sometag>some text</sometag>" /></p>'
)
self.assertEqual(
w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"'),
'<p class="url">Currently: '
'<a href="http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22">'
'http://www.example.com/%C3%A4"><script>'
'alert("XSS!")</script>"</a><br />'
'Change: <input class="vURLField" name="test" type="url" '
'value="http://www.example.com/%C3%A4"><script>'
'alert("XSS!")</script>"" /></p>'
)
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls',
)
class AdminFileWidgetTests(TestDataMixin, TestCase):
@classmethod
def setUpTestData(cls):
super(AdminFileWidgetTests, cls).setUpTestData()
band = models.Band.objects.create(name='Linkin Park')
cls.album = band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
def test_render(self):
w = widgets.AdminFileWidget()
self.assertHTMLEqual(
w.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> '
'<span class="clearable-file-input">'
'<input type="checkbox" name="test-clear" id="test-clear_id" /> '
'<label for="test-clear_id">Clear</label></span><br />'
'Change: <input type="file" name="test" /></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
self.assertHTMLEqual(
w.render('test', SimpleUploadedFile('test', b'content')),
'<input type="file" name="test" />',
)
def test_readonly_fields(self):
"""
File widgets should render as a link when they're marked "read only."
"""
self.client.login(username="super", password="secret")
response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,)))
self.assertContains(
response,
'<p><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">'
'albums\hybrid_theory.jpg</a></p>' % {'STORAGE_URL': default_storage.url('')},
html=True,
)
self.assertNotContains(
response,
'<input type="file" name="cover_art" id="id_cover_art" />',
html=True,
)
response = self.client.get(reverse('admin:admin_widgets_album_add'))
self.assertContains(
response,
'<p></p>',
html=True,
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ForeignKeyRawIdWidgetTest(TestCase):
def test_render(self):
band = models.Band.objects.create(name='Linkin Park')
band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
rel = models.Album._meta.get_field('band').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', band.pk, attrs={}),
'<input type="text" name="test" value="%(bandpk)s" '
'class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/band/?_to_field=id" class="related-lookup" '
'id="lookup_id_test" title="Lookup"></a> <strong>Linkin Park</strong>'
% {'bandpk': band.pk}
)
def test_relations_to_non_primary_key(self):
# Check that ForeignKeyRawIdWidget works with fields which aren't
# related to the model's primary key.
apple = models.Inventory.objects.create(barcode=86, name='Apple')
models.Inventory.objects.create(barcode=22, name='Pear')
core = models.Inventory.objects.create(
barcode=87, name='Core', parent=apple
)
rel = models.Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', core.parent_id, attrs={}),
'<input type="text" name="test" value="86" '
'class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/inventory/?_to_field=barcode" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong>Apple</strong>'
)
def test_fk_related_model_not_in_admin(self):
# FK to a model not registered with admin site. Raw ID widget should
# have no magnifying glass link. See #16542
big_honeycomb = models.Honeycomb.objects.create(location='Old tree')
big_honeycomb.bee_set.create()
rel = models.Bee._meta.get_field('honeycomb').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('honeycomb_widget', big_honeycomb.pk, attrs={}),
'<input type="text" name="honeycomb_widget" value="%(hcombpk)s" />'
' <strong>Honeycomb object</strong>'
% {'hcombpk': big_honeycomb.pk}
)
def test_fk_to_self_model_not_in_admin(self):
# FK to self, not registered with admin site. Raw ID widget should have
# no magnifying glass link. See #16542
subject1 = models.Individual.objects.create(name='Subject #1')
models.Individual.objects.create(name='Child', parent=subject1)
rel = models.Individual._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('individual_widget', subject1.pk, attrs={}),
'<input type="text" name="individual_widget" value="%(subj1pk)s" />'
' <strong>Individual object</strong>'
% {'subj1pk': subject1.pk}
)
def test_proper_manager_for_label_lookup(self):
# see #9258
rel = models.Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
hidden = models.Inventory.objects.create(
barcode=93, name='Hidden', hidden=True
)
child_of_hidden = models.Inventory.objects.create(
barcode=94, name='Child of hidden', parent=hidden
)
self.assertHTMLEqual(
w.render('test', child_of_hidden.parent_id, attrs={}),
'<input type="text" name="test" value="93" class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/inventory/?_to_field=barcode" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong>Hidden</strong>'
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ManyToManyRawIdWidgetTest(TestCase):
def test_render(self):
band = models.Band.objects.create(name='Linkin Park')
m1 = models.Member.objects.create(name='Chester')
m2 = models.Member.objects.create(name='Mike')
band.members.add(m1, m2)
rel = models.Band._meta.get_field('members').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', [m1.pk, m2.pk], attrs={}), (
'<input type="text" name="test" value="%(m1pk)s,%(m2pk)s" class="vManyToManyRawIdAdminField" />'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % dict(m1pk=m1.pk, m2pk=m2.pk)
)
self.assertHTMLEqual(
w.render('test', [m1.pk]), (
'<input type="text" name="test" value="%(m1pk)s" class="vManyToManyRawIdAdminField">'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % dict(m1pk=m1.pk)
)
def test_m2m_related_model_not_in_admin(self):
# M2M relationship with model not registered with admin site. Raw ID
# widget should have no magnifying glass link. See #16542
consultor1 = models.Advisor.objects.create(name='Rockstar Techie')
c1 = models.Company.objects.create(name='Doodle')
c2 = models.Company.objects.create(name='Pear')
consultor1.companies.add(c1, c2)
rel = models.Advisor._meta.get_field('companies').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('company_widget1', [c1.pk, c2.pk], attrs={}),
'<input type="text" name="company_widget1" value="%(c1pk)s,%(c2pk)s" />' % {'c1pk': c1.pk, 'c2pk': c2.pk}
)
self.assertHTMLEqual(
w.render('company_widget2', [c1.pk]),
'<input type="text" name="company_widget2" value="%(c1pk)s" />' % {'c1pk': c1.pk}
)
class RelatedFieldWidgetWrapperTests(SimpleTestCase):
def test_no_can_add_related(self):
rel = models.Individual._meta.get_field('parent').remote_field
w = widgets.AdminRadioSelect()
# Used to fail with a name error.
w = widgets.RelatedFieldWidgetWrapper(w, rel, widget_admin_site)
self.assertFalse(w.can_add_related)
def test_select_multiple_widget_cant_change_delete_related(self):
rel = models.Individual._meta.get_field('parent').remote_field
widget = forms.SelectMultiple()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertFalse(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
def test_on_delete_cascade_rel_cant_delete_related(self):
rel = models.Individual._meta.get_field('soulmate').remote_field
widget = forms.Select()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertTrue(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class DateTimePickerSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_show_hide_date_time_picker_widgets(self):
"""
Ensure that pressing the ESC key closes the date and time picker
widgets.
Refs #17064.
"""
from selenium.webdriver.common.keys import Keys
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# First, with the date picker widget ---------------------------------
# Check that the date picker is hidden
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'none')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# Check that the date picker is visible
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'block')
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# Check that the date picker is hidden again
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'none')
# Then, with the time picker widget ----------------------------------
# Check that the time picker is hidden
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'none')
# Click the time icon
self.selenium.find_element_by_id('clocklink0').click()
# Check that the time picker is visible
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'block')
self.assertEqual(
[
x.text for x in
self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a")
],
['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.']
)
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# Check that the time picker is hidden again
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'none')
def test_calendar_nonday_class(self):
"""
Ensure cells that are not days of the month have the `nonday` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# make sure the first and last 6 cells have class nonday
for td in tds[:6] + tds[-6:]:
self.assertEqual(td.get_attribute('class'), 'nonday')
def test_calendar_selected_class(self):
"""
Ensure cell for the day in the input has the `selected` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify the selected cell
selected = tds[6]
self.assertEqual(selected.get_attribute('class'), 'selected')
self.assertEqual(selected.text, '1')
def test_calendar_no_selected_class(self):
"""
Ensure no cells are given the selected class when the field is empty.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify there are no cells with the selected class
selected = [td for td in tds if td.get_attribute('class') == 'selected']
self.assertEqual(len(selected), 0)
def test_calendar_show_date_from_input(self):
"""
Ensure that the calendar show the date from the input field for every
locale supported by django.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Enter test data
member = models.Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M')
# Get month name translations for every locale
month_string = 'May'
path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale')
for language_code, language_name in settings.LANGUAGES:
try:
catalog = gettext.translation('djangojs', path, [language_code])
except IOError:
continue
if month_string in catalog._catalog:
month_name = catalog._catalog[month_string]
else:
month_name = month_string
# Get the expected caption
may_translation = month_name
expected_caption = '{0:s} {1:d}'.format(may_translation.upper(), 1984)
# Test with every locale
with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True):
# Open a page that has a date picker widget
self.selenium.get('{}{}'.format(self.live_server_url,
reverse('admin:admin_widgets_member_change', args=(member.pk,))))
# Click on the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# Make sure that the right month and year are displayed
self.wait_for_text('#calendarin0 caption', expected_caption)
class DateTimePickerSeleniumChromeTests(DateTimePickerSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class DateTimePickerSeleniumIETests(DateTimePickerSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@skipIf(pytz is None, "this test requires pytz")
@override_settings(TIME_ZONE='Asia/Singapore')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class DateTimePickerShortcutsSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_date_time_picker_shortcuts(self):
"""
Ensure that date/time/datetime picker shortcuts work in the current time zone.
Refs #20663.
This test case is fairly tricky, it relies on selenium still running the browser
in the default time zone "America/Chicago" despite `override_settings` changing
the time zone to "Asia/Singapore".
"""
self.admin_login(username='super', password='secret', login_url='/')
error_margin = timedelta(seconds=10)
# If we are neighbouring a DST, we add an hour of error margin.
tz = pytz.timezone('America/Chicago')
utc_now = datetime.now(pytz.utc)
tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname()
tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname()
if tz_yesterday != tz_tomorrow:
error_margin += timedelta(hours=1)
now = datetime.now()
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
self.selenium.find_element_by_id('id_name').send_keys('test')
# Click on the "today" and "now" shortcuts.
shortcuts = self.selenium.find_elements_by_css_selector(
'.field-birthdate .datetimeshortcuts')
for shortcut in shortcuts:
shortcut.find_element_by_tag_name('a').click()
# Check that there is a time zone mismatch warning.
# Warning: This would effectively fail if the TIME_ZONE defined in the
# settings has the same UTC offset as "Asia/Singapore" because the
# mismatch warning would be rightfully missing from the page.
self.selenium.find_elements_by_css_selector(
'.field-birthdate .timezonewarning')
# Submit the form.
self.selenium.find_element_by_tag_name('form').submit()
self.wait_page_loaded()
# Make sure that "now" in javascript is within 10 seconds
# from "now" on the server side.
member = models.Member.objects.get(name='test')
self.assertGreater(member.birthdate, now - error_margin)
self.assertLess(member.birthdate, now + error_margin)
class DateTimePickerShortcutsSeleniumChromeTests(DateTimePickerShortcutsSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class DateTimePickerShortcutsSeleniumIETests(DateTimePickerShortcutsSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
# The above tests run with Asia/Singapore which are on the positive side of
# UTC. Here we test with a timezone on the negative side.
@override_settings(TIME_ZONE='US/Eastern')
class DateTimePickerAltTimezoneSeleniumFirefoxTests(DateTimePickerShortcutsSeleniumFirefoxTests):
pass
class DateTimePickerAltTimezoneSeleniumChromeTests(DateTimePickerAltTimezoneSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class DateTimePickerAltTimezoneSeleniumIETests(DateTimePickerAltTimezoneSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class HorizontalVerticalFilterSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
super(HorizontalVerticalFilterSeleniumFirefoxTests, self).setUp()
self.lisa = models.Student.objects.create(name='Lisa')
self.john = models.Student.objects.create(name='John')
self.bob = models.Student.objects.create(name='Bob')
self.peter = models.Student.objects.create(name='Peter')
self.jenny = models.Student.objects.create(name='Jenny')
self.jason = models.Student.objects.create(name='Jason')
self.cliff = models.Student.objects.create(name='Cliff')
self.arthur = models.Student.objects.create(name='Arthur')
self.school = models.School.objects.create(name='School of Awesome')
def assertActiveButtons(self, mode, field_name, choose, remove,
choose_all=None, remove_all=None):
choose_link = '#id_%s_add_link' % field_name
choose_all_link = '#id_%s_add_all_link' % field_name
remove_link = '#id_%s_remove_link' % field_name
remove_all_link = '#id_%s_remove_all_link' % field_name
self.assertEqual(self.has_css_class(choose_link, 'active'), choose)
self.assertEqual(self.has_css_class(remove_link, 'active'), remove)
if mode == 'horizontal':
self.assertEqual(self.has_css_class(choose_all_link, 'active'), choose_all)
self.assertEqual(self.has_css_class(remove_all_link, 'active'), remove_all)
def execute_basic_operations(self, mode, field_name):
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = 'id_%s_add_link' % field_name
choose_all_link = 'id_%s_add_all_link' % field_name
remove_link = 'id_%s_remove_link' % field_name
remove_all_link = 'id_%s_remove_all_link' % field_name
# Initial positions ---------------------------------------------------
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id)])
self.assertActiveButtons(mode, field_name, False, False, True, True)
# Click 'Choose all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(choose_all_link).click()
elif mode == 'vertical':
# There 's no 'Choose all' button in vertical mode, so individually
# select all options and click 'Choose'.
for option in self.selenium.find_elements_by_css_selector(from_box + ' > option'):
option.click()
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertActiveButtons(mode, field_name, False, False, False, True)
# Click 'Remove all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(remove_all_link).click()
elif mode == 'vertical':
# There 's no 'Remove all' button in vertical mode, so individually
# select all options and click 'Remove'.
for option in self.selenium.find_elements_by_css_selector(to_box + ' > option'):
option.click()
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectOptions(from_box,
[str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertSelectOptions(to_box, [])
self.assertActiveButtons(mode, field_name, False, False, True, False)
# Choose some options ------------------------------------------------
from_lisa_select_option = self.get_select_option(from_box, str(self.lisa.id))
# Check the title attribute is there for tool tips: ticket #20821
self.assertEqual(from_lisa_select_option.get_attribute('title'), from_lisa_select_option.get_attribute('text'))
from_lisa_select_option.click()
self.get_select_option(from_box, str(self.jason.id)).click()
self.get_select_option(from_box, str(self.bob.id)).click()
self.get_select_option(from_box, str(self.john.id)).click()
self.assertActiveButtons(mode, field_name, True, False, True, False)
self.selenium.find_element_by_id(choose_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.bob.id),
str(self.jason.id), str(self.john.id)])
# Check the tooltip is still there after moving: ticket #20821
to_lisa_select_option = self.get_select_option(to_box, str(self.lisa.id))
self.assertEqual(to_lisa_select_option.get_attribute('title'), to_lisa_select_option.get_attribute('text'))
# Remove some options -------------------------------------------------
self.get_select_option(to_box, str(self.lisa.id)).click()
self.get_select_option(to_box, str(self.bob.id)).click()
self.assertActiveButtons(mode, field_name, False, True, True, True)
self.selenium.find_element_by_id(remove_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id)])
self.assertSelectOptions(to_box,
[str(self.jason.id), str(self.john.id)])
# Choose some more options --------------------------------------------
self.get_select_option(from_box, str(self.arthur.id)).click()
self.get_select_option(from_box, str(self.cliff.id)).click()
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id)])
self.assertSelectOptions(to_box,
[str(self.jason.id), str(self.john.id),
str(self.arthur.id), str(self.cliff.id)])
def test_basic(self):
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get('%s%s' % (
self.live_server_url, reverse('admin:admin_widgets_school_change', args=(self.school.id,))))
self.wait_page_loaded()
self.execute_basic_operations('vertical', 'students')
self.execute_basic_operations('horizontal', 'alumni')
# Save and check that everything is properly stored in the database ---
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.school = models.School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()),
[self.arthur, self.cliff, self.jason, self.john])
self.assertEqual(list(self.school.alumni.all()),
[self.arthur, self.cliff, self.jason, self.john])
def test_filter(self):
"""
Ensure that typing in the search box filters out options displayed in
the 'from' box.
"""
from selenium.webdriver.common.keys import Keys
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_school_change', args=(self.school.id,))))
for field_name in ['students', 'alumni']:
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = '#id_%s_add_link' % field_name
remove_link = '#id_%s_remove_link' % field_name
input = self.selenium.find_element_by_css_selector('#id_%s_input' % field_name)
# Initial values
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
# Typing in some characters filters out non-matching options
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys('R')
self.assertSelectOptions(from_box, [str(self.arthur.id)])
# Clearing the text box makes the other options reappear
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
# -----------------------------------------------------------------
# Check that choosing a filtered option sends it properly to the
# 'to' box.
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
self.get_select_option(from_box, str(self.jason.id)).click()
self.selenium.find_element_by_css_selector(choose_link).click()
self.assertSelectOptions(from_box, [str(self.arthur.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id),
str(self.jason.id)])
self.get_select_option(to_box, str(self.lisa.id)).click()
self.selenium.find_element_by_css_selector(remove_link).click()
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.lisa.id)])
self.assertSelectOptions(to_box,
[str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE]) # Clear text box
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jenny.id),
str(self.john.id), str(self.lisa.id)])
self.assertSelectOptions(to_box,
[str(self.peter.id), str(self.jason.id)])
# -----------------------------------------------------------------
# Check that pressing enter on a filtered option sends it properly
# to the 'to' box.
self.get_select_option(to_box, str(self.jason.id)).click()
self.selenium.find_element_by_css_selector(remove_link).click()
input.send_keys('ja')
self.assertSelectOptions(from_box, [str(self.jason.id)])
input.send_keys([Keys.ENTER])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE])
# Save and check that everything is properly stored in the database ---
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.school = models.School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()),
[self.jason, self.peter])
self.assertEqual(list(self.school.alumni.all()),
[self.jason, self.peter])
class HorizontalVerticalFilterSeleniumChromeTests(HorizontalVerticalFilterSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class HorizontalVerticalFilterSeleniumIETests(HorizontalVerticalFilterSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminRawIdWidgetSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
super(AdminRawIdWidgetSeleniumFirefoxTests, self).setUp()
models.Band.objects.create(id=42, name='Bogey Blues')
models.Band.objects.create(id=98, name='Green Potatoes')
def test_ForeignKey(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_event_add')))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(
self.selenium.find_element_by_id('id_main_band').get_attribute('value'),
'')
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.wait_for_popup()
self.selenium.switch_to.window('id_main_band')
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.wait_for_popup()
self.selenium.switch_to.window('id_main_band')
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the other selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '98')
def test_many_to_many(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_event_add')))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(
self.selenium.find_element_by_id('id_supporting_bands').get_attribute('value'),
'')
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.wait_for_popup()
self.selenium.switch_to.window('id_supporting_bands')
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.wait_for_popup()
self.selenium.switch_to.window('id_supporting_bands')
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the two selected bands' ids
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42,98')
class AdminRawIdWidgetSeleniumChromeTests(AdminRawIdWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class AdminRawIdWidgetSeleniumIETests(AdminRawIdWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class RelatedFieldWidgetSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_ForeignKey_using_to_field(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get('%s%s' % (
self.live_server_url,
reverse('admin:admin_widgets_profile_add')))
main_window = self.selenium.current_window_handle
# Click the Add User button to add new
self.selenium.find_element_by_id('add_id_user').click()
self.wait_for_popup()
self.selenium.switch_to.window('id_user')
self.wait_for('#id_password')
password_field = self.selenium.find_element_by_id('id_password')
password_field.send_keys('password')
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'newuser'
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
# The field now contains the new user
self.wait_for('#id_user option[value="newuser"]')
# Click the Change User button to change it
self.selenium.find_element_by_id('change_id_user').click()
self.wait_for_popup()
self.selenium.switch_to.window('id_user')
self.wait_for('#id_username')
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'changednewuser'
username_field.clear()
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
# Wait up to 2 seconds for the new option to show up after clicking save in the popup.
self.selenium.implicitly_wait(2)
self.selenium.find_element_by_css_selector('#id_user option[value=changednewuser]')
self.selenium.implicitly_wait(0)
# Go ahead and submit the form to make sure it works
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.wait_for_text('li.success', 'The profile "changednewuser" was added successfully.')
profiles = models.Profile.objects.all()
self.assertEqual(len(profiles), 1)
self.assertEqual(profiles[0].user.username, username_value)
class RelatedFieldWidgetSeleniumChromeTests(RelatedFieldWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class RelatedFieldWidgetSeleniumIETests(RelatedFieldWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
| bsd-3-clause | 5,103,426,652,341,202,000 | 43.877395 | 119 | 0.620524 | false |
mediatum/mediatum | metadata/list.py | 1 | 7868 | """
mediatum - a multimedia content repository
Copyright (C) 2007 Arne Seifert <[email protected]>
Copyright (C) 2007 Matthias Kramm <[email protected]>
Copyright (C) 2012 Iryna Feuerstein <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import logging
import os
import codecs
from mediatumtal import tal
from utils.utils import esc
from core.metatype import Metatype, Context
from metadata.ilist import count_list_values_for_all_content_children
from core import Node
from core import db
q = db.query
logg = logging.getLogger(__name__)
class m_list(Metatype):
def formatValues(self, context):
valuelist = []
items = {}
try:
n = context.collection
if not isinstance(n, Node):
raise KeyError
field_name = context.field.getName()
id_attr_val = count_list_values_for_all_content_children(n.id, field_name)
items = {pair[0]: pair[1] for pair in id_attr_val}
except KeyError:
None
tempvalues = context.field.getValueList()
valuesfiles = context.field.files.all()
if len(valuesfiles): # a text file with list values was uploaded
if os.path.isfile(valuesfiles[0].abspath):
with codecs.open(valuesfiles[0].abspath, 'r', encoding='utf8') as valuesfile:
tempvalues = valuesfile.readlines()
if len(tempvalues): # Has the user entered any values?
if tempvalues[0].find('|') > 0: # there are values in different languages available
languages = [x.strip() for x in tempvalues[0].split('|')] # find out the languages
valuesdict = dict((lang, []) for lang in languages) # create a dictionary with languages as keys,
# and list of respective values as dictionary values
for i in range(len(tempvalues)):
if i: # if i not 0 - the language names itself shouldn't be included to the values
tmp = tempvalues[i].split('|')
for j in range(len(tmp)):
valuesdict[languages[j]].append(tmp[j])
lang = context.language
# if there is no default language, the first language-value will be used
if (not lang) or (lang not in valuesdict.keys()):
lang = languages[0]
tempvalues = valuesdict[lang]
for val in tempvalues:
indent = 0
canbeselected = 0
while val.startswith("*"):
val = val[1:]
indent = indent + 1
if val.startswith(" "):
canbeselected = 1
val = val.strip()
if not indent:
canbeselected = 1
if indent > 0:
indent = indent - 1
indentstr = " " * (2 * indent)
num = 0
if val in items.keys():
num = int(items[val])
try:
if int(num) < 0:
raise Exception()
elif int(num) == 0:
num = ""
else:
num = " (" + ustr(num) + ")"
except:
logg.exception("exception in getMaskEditorHTML, using empty string")
num = ""
val = esc(val)
if not canbeselected:
valuelist.append(("optgroup", "<optgroup label=\"" + indentstr + val + "\">", "", ""))
elif (val in context.value.split(";")):
valuelist.append(("optionselected", indentstr, val, num))
else:
valuelist.append(("option", indentstr, val, num))
return valuelist
def getEditorHTML(self, field, value="", width=400, lock=0, language=None, required=None):
context = Context(field, value=value, width=width, name=field.getName(), lock=lock, language=language)
return tal.getTAL("metadata/list.html", {"context": context,
"valuelist": filter(lambda x: x != "", self.formatValues(context)),
"required": self.is_required(required)},
macro="editorfield",
language=language)
def getSearchHTML(self, context):
return tal.getTAL("metadata/list.html",
{"context": context,
"valuelist": filter(lambda x: x != "",
self.formatValues(context))},
macro="searchfield",
language=context.language)
def getFormattedValue(self, metafield, maskitem, mask, node, language, html=True):
value = node.get(metafield.getName()).replace(";", "; ")
if html:
value = esc(value)
return (metafield.getLabel(), value)
def format_request_value_for_db(self, field, params, item, language=None):
if field.get('multiple'):
valuelist = params.getlist(item)
value = ";".join(valuelist)
else:
value = params.get(item)
return value.replace("; ", ";")
def getMaskEditorHTML(self, field, metadatatype=None, language=None):
value = u""
filename = u""
multiple_list = u""
try:
if field:
value = field.getValues()
if field.id and len(field.files) > 0:
filename = os.path.basename(field.files[0].abspath)
multiple_list = field.get('multiple')
except AttributeError:
value = field
return tal.getTAL("metadata/list.html",
{"value": value,
"filename": filename,
"multiple_list": multiple_list},
macro="maskeditor",
language=language)
def getName(self):
return "fieldtype_list"
def getInformation(self):
return {"moduleversion": "1.1", "softwareversion": "1.1"}
# method for additional keys of type list
def getLabels(self):
return m_list.labels
labels = {"de":
[
("list_multiple", "Mehrfachauswahl:"),
("current_file", "Aktuelle Textdatei: "),
("delete_valuesfile", "Vorhandene Datei löschen"),
("list_list_values_file", "Datei mit Listenwerten:"),
("list_list_values", "Listenwerte:"),
("fieldtype_list", "Werteliste"),
("fieldtype_list_desc", "Werte-Auswahlfeld als Drop-Down Liste")
],
"en":
[
("list_multiple", "Multiple choice:"),
("current_file", "Current values file: "),
("delete_valuesfile", "Delete the current text file"),
("list_list_values_file", "Textfile with list values:"),
("list_list_values", "List values:"),
("fieldtype_list", "valuelist"),
("fieldtype_list_desc", "drop down valuelist")
]
}
| gpl-3.0 | 5,651,938,077,670,280,000 | 39.142857 | 120 | 0.534443 | false |
varunkumta/azure-linux-extensions | CustomScript/azure/servicemanagement/websitemanagementservice.py | 46 | 10015 | #-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from azure import (
MANAGEMENT_HOST,
_str,
)
from azure.servicemanagement import (
WebSpaces,
WebSpace,
Sites,
Site,
MetricResponses,
MetricDefinitions,
PublishData,
_XmlSerializer,
)
from azure.servicemanagement.servicemanagementclient import (
_ServiceManagementClient,
)
class WebsiteManagementService(_ServiceManagementClient):
''' Note that this class is a preliminary work on WebSite
management. Since it lack a lot a features, final version
can be slightly different from the current one.
'''
def __init__(self, subscription_id=None, cert_file=None,
host=MANAGEMENT_HOST):
super(WebsiteManagementService, self).__init__(
subscription_id, cert_file, host)
#--Operations for web sites ----------------------------------------
def list_webspaces(self):
'''
List the webspaces defined on the account.
'''
return self._perform_get(self._get_list_webspaces_path(),
WebSpaces)
def get_webspace(self, webspace_name):
'''
Get details of a specific webspace.
webspace_name: The name of the webspace.
'''
return self._perform_get(self._get_webspace_details_path(webspace_name),
WebSpace)
def list_sites(self, webspace_name):
'''
List the web sites defined on this webspace.
webspace_name: The name of the webspace.
'''
return self._perform_get(self._get_sites_path(webspace_name),
Sites)
def get_site(self, webspace_name, website_name):
'''
List the web sites defined on this webspace.
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_get(self._get_sites_details_path(webspace_name,
website_name),
Site)
def create_site(self, webspace_name, website_name, geo_region, host_names,
plan='VirtualDedicatedPlan', compute_mode='Shared',
server_farm=None, site_mode=None):
'''
Create a website.
webspace_name: The name of the webspace.
website_name: The name of the website.
geo_region:
The geographical region of the webspace that will be created.
host_names:
An array of fully qualified domain names for website. Only one
hostname can be specified in the azurewebsites.net domain.
The hostname should match the name of the website. Custom domains
can only be specified for Shared or Standard websites.
plan:
This value must be 'VirtualDedicatedPlan'.
compute_mode:
This value should be 'Shared' for the Free or Paid Shared
offerings, or 'Dedicated' for the Standard offering. The default
value is 'Shared'. If you set it to 'Dedicated', you must specify
a value for the server_farm parameter.
server_farm:
The name of the Server Farm associated with this website. This is
a required value for Standard mode.
site_mode:
Can be None, 'Limited' or 'Basic'. This value is 'Limited' for the
Free offering, and 'Basic' for the Paid Shared offering. Standard
mode does not use the site_mode parameter; it uses the compute_mode
parameter.
'''
xml = _XmlSerializer.create_website_to_xml(webspace_name, website_name, geo_region, plan, host_names, compute_mode, server_farm, site_mode)
return self._perform_post(
self._get_sites_path(webspace_name),
xml,
Site)
def delete_site(self, webspace_name, website_name,
delete_empty_server_farm=False, delete_metrics=False):
'''
Delete a website.
webspace_name: The name of the webspace.
website_name: The name of the website.
delete_empty_server_farm:
If the site being deleted is the last web site in a server farm,
you can delete the server farm by setting this to True.
delete_metrics:
To also delete the metrics for the site that you are deleting, you
can set this to True.
'''
path = self._get_sites_details_path(webspace_name, website_name)
query = ''
if delete_empty_server_farm:
query += '&deleteEmptyServerFarm=true'
if delete_metrics:
query += '&deleteMetrics=true'
if query:
path = path + '?' + query.lstrip('&')
return self._perform_delete(path)
def restart_site(self, webspace_name, website_name):
'''
Restart a web site.
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_post(
self._get_restart_path(webspace_name, website_name),
'')
def get_historical_usage_metrics(self, webspace_name, website_name,
metrics = None, start_time=None, end_time=None, time_grain=None):
'''
Get historical usage metrics.
webspace_name: The name of the webspace.
website_name: The name of the website.
metrics: Optional. List of metrics name. Otherwise, all metrics returned.
start_time: Optional. An ISO8601 date. Otherwise, current hour is used.
end_time: Optional. An ISO8601 date. Otherwise, current time is used.
time_grain: Optional. A rollup name, as P1D. OTherwise, default rollup for the metrics is used.
More information and metrics name at:
http://msdn.microsoft.com/en-us/library/azure/dn166964.aspx
'''
metrics = ('names='+','.join(metrics)) if metrics else ''
start_time = ('StartTime='+start_time) if start_time else ''
end_time = ('EndTime='+end_time) if end_time else ''
time_grain = ('TimeGrain='+time_grain) if time_grain else ''
parameters = ('&'.join(v for v in (metrics, start_time, end_time, time_grain) if v))
parameters = '?'+parameters if parameters else ''
return self._perform_get(self._get_historical_usage_metrics_path(webspace_name, website_name) + parameters,
MetricResponses)
def get_metric_definitions(self, webspace_name, website_name):
'''
Get metric definitions of metrics available of this web site.
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_get(self._get_metric_definitions_path(webspace_name, website_name),
MetricDefinitions)
def get_publish_profile_xml(self, webspace_name, website_name):
'''
Get a site's publish profile as a string
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_get(self._get_publishxml_path(webspace_name, website_name),
None).body.decode("utf-8")
def get_publish_profile(self, webspace_name, website_name):
'''
Get a site's publish profile as an object
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_get(self._get_publishxml_path(webspace_name, website_name),
PublishData)
#--Helper functions --------------------------------------------------
def _get_list_webspaces_path(self):
return self._get_path('services/webspaces', None)
def _get_webspace_details_path(self, webspace_name):
return self._get_path('services/webspaces/', webspace_name)
def _get_sites_path(self, webspace_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites'
def _get_sites_details_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name)
def _get_restart_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name) + '/restart/'
def _get_historical_usage_metrics_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name) + '/metrics/'
def _get_metric_definitions_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name) + '/metricdefinitions/'
def _get_publishxml_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name) + '/publishxml/'
| apache-2.0 | 2,105,203,108,201,576,400 | 41.617021 | 147 | 0.588717 | false |
codekaki/odoo | addons/hr_timesheet/hr_timesheet.py | 39 | 9938 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields
from openerp.osv import osv
from openerp.tools.translate import _
class hr_employee(osv.osv):
_name = "hr.employee"
_inherit = "hr.employee"
_columns = {
'product_id': fields.many2one('product.product', 'Product', help="Specifies employee's designation as a product with type 'service'."),
'journal_id': fields.many2one('account.analytic.journal', 'Analytic Journal'),
'uom_id': fields.related('product_id', 'uom_id', type='many2one', relation='product.uom', string='Unit of Measure', store=True, readonly=True)
}
def _getAnalyticJournal(self, cr, uid, context=None):
md = self.pool.get('ir.model.data')
try:
dummy, res_id = md.get_object_reference(cr, uid, 'hr_timesheet', 'analytic_journal')
#search on id found in result to check if current user has read access right
check_right = self.pool.get('account.analytic.journal').search(cr, uid, [('id', '=', res_id)], context=context)
if check_right:
return res_id
except ValueError:
pass
return False
def _getEmployeeProduct(self, cr, uid, context=None):
md = self.pool.get('ir.model.data')
try:
dummy, res_id = md.get_object_reference(cr, uid, 'product', 'product_product_consultant')
#search on id found in result to check if current user has read access right
check_right = self.pool.get('product.template').search(cr, uid, [('id', '=', res_id)], context=context)
if check_right:
return res_id
except ValueError:
pass
return False
_defaults = {
'journal_id': _getAnalyticJournal,
'product_id': _getEmployeeProduct
}
hr_employee()
class hr_analytic_timesheet(osv.osv):
_name = "hr.analytic.timesheet"
_table = 'hr_analytic_timesheet'
_description = "Timesheet Line"
_inherits = {'account.analytic.line': 'line_id'}
_order = "id desc"
_columns = {
'line_id': fields.many2one('account.analytic.line', 'Analytic Line', ondelete='cascade', required=True),
'partner_id': fields.related('account_id', 'partner_id', type='many2one', string='Partner', relation='res.partner', store=True),
}
def unlink(self, cr, uid, ids, context=None):
toremove = {}
for obj in self.browse(cr, uid, ids, context=context):
toremove[obj.line_id.id] = True
self.pool.get('account.analytic.line').unlink(cr, uid, toremove.keys(), context=context)
return super(hr_analytic_timesheet, self).unlink(cr, uid, ids, context=context)
def on_change_unit_amount(self, cr, uid, id, prod_id, unit_amount, company_id, unit=False, journal_id=False, context=None):
res = {'value':{}}
if prod_id and unit_amount:
# find company
company_id = self.pool.get('res.company')._company_default_get(cr, uid, 'account.analytic.line', context=context)
r = self.pool.get('account.analytic.line').on_change_unit_amount(cr, uid, id, prod_id, unit_amount, company_id, unit, journal_id, context=context)
if r:
res.update(r)
# update unit of measurement
if prod_id:
uom = self.pool.get('product.product').browse(cr, uid, prod_id, context=context)
if uom.uom_id:
res['value'].update({'product_uom_id': uom.uom_id.id})
else:
res['value'].update({'product_uom_id': False})
return res
def _getEmployeeProduct(self, cr, uid, context=None):
if context is None:
context = {}
emp_obj = self.pool.get('hr.employee')
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id') or uid)], context=context)
if emp_id:
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if emp.product_id:
return emp.product_id.id
return False
def _getEmployeeUnit(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
if context is None:
context = {}
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id') or uid)], context=context)
if emp_id:
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if emp.product_id:
return emp.product_id.uom_id.id
return False
def _getGeneralAccount(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
if context is None:
context = {}
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id') or uid)], context=context)
if emp_id:
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if bool(emp.product_id):
a = emp.product_id.property_account_expense.id
if not a:
a = emp.product_id.categ_id.property_account_expense_categ.id
if a:
return a
return False
def _getAnalyticJournal(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
if context is None:
context = {}
if context.get('employee_id'):
emp_id = [context.get('employee_id')]
else:
emp_id = emp_obj.search(cr, uid, [('user_id','=',context.get('user_id') or uid)], limit=1, context=context)
if not emp_id:
raise osv.except_osv(_('Warning!'), _('Please create an employee for this user, using the menu: Human Resources > Employees.'))
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if emp.journal_id:
return emp.journal_id.id
else :
raise osv.except_osv(_('Warning!'), _('No analytic journal defined for \'%s\'.\nYou should assign an analytic journal on the employee form.')%(emp.name))
_defaults = {
'product_uom_id': _getEmployeeUnit,
'product_id': _getEmployeeProduct,
'general_account_id': _getGeneralAccount,
'journal_id': _getAnalyticJournal,
'date': lambda self, cr, uid, ctx: ctx.get('date', fields.date.context_today(self,cr,uid,context=ctx)),
'user_id': lambda obj, cr, uid, ctx: ctx.get('user_id') or uid,
}
def on_change_account_id(self, cr, uid, ids, account_id, context=None):
return {'value':{}}
def on_change_date(self, cr, uid, ids, date):
if ids:
new_date = self.read(cr, uid, ids[0], ['date'])['date']
if date != new_date:
warning = {'title':'User Alert!','message':'Changing the date will let this entry appear in the timesheet of the new date.'}
return {'value':{},'warning':warning}
return {'value':{}}
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
emp_obj = self.pool.get('hr.employee')
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id') or uid)], context=context)
ename = ''
if emp_id:
ename = emp_obj.browse(cr, uid, emp_id[0], context=context).name
if not vals.get('journal_id',False):
raise osv.except_osv(_('Warning!'), _('No \'Analytic Journal\' is defined for employee %s \nDefine an employee for the selected user and assign an \'Analytic Journal\'!')%(ename,))
if not vals.get('account_id',False):
raise osv.except_osv(_('Warning!'), _('No analytic account is defined on the project.\nPlease set one or we cannot automatically fill the timesheet.'))
return super(hr_analytic_timesheet, self).create(cr, uid, vals, context=context)
def on_change_user_id(self, cr, uid, ids, user_id):
if not user_id:
return {}
context = {'user_id': user_id}
return {'value': {
'product_id': self. _getEmployeeProduct(cr, uid, context),
'product_uom_id': self._getEmployeeUnit(cr, uid, context),
'general_account_id': self._getGeneralAccount(cr, uid, context),
'journal_id': self._getAnalyticJournal(cr, uid, context),
}}
class account_analytic_account(osv.osv):
_inherit = 'account.analytic.account'
_description = 'Analytic Account'
_columns = {
'use_timesheets': fields.boolean('Timesheets', help="Check this field if this project manages timesheets"),
}
def on_change_template(self, cr, uid, ids, template_id, context=None):
res = super(account_analytic_account, self).on_change_template(cr, uid, ids, template_id, context=context)
if template_id and 'value' in res:
template = self.browse(cr, uid, template_id, context=context)
res['value']['use_timesheets'] = template.use_timesheets
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -7,368,773,884,944,683,000 | 44.797235 | 191 | 0.59509 | false |
matbu/ansible-modules-extras | cloud/centurylink/clc_modify_server.py | 48 | 35314 | #!/usr/bin/python
#
# Copyright (c) 2015 CenturyLink
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
module: clc_modify_server
short_description: modify servers in CenturyLink Cloud.
description:
- An Ansible module to modify servers in CenturyLink Cloud.
version_added: "2.0"
options:
server_ids:
description:
- A list of server Ids to modify.
required: True
cpu:
description:
- How many CPUs to update on the server
required: False
default: None
memory:
description:
- Memory (in GB) to set to the server.
required: False
default: None
anti_affinity_policy_id:
description:
- The anti affinity policy id to be set for a hyper scale server.
This is mutually exclusive with 'anti_affinity_policy_name'
required: False
default: None
anti_affinity_policy_name:
description:
- The anti affinity policy name to be set for a hyper scale server.
This is mutually exclusive with 'anti_affinity_policy_id'
required: False
default: None
alert_policy_id:
description:
- The alert policy id to be associated to the server.
This is mutually exclusive with 'alert_policy_name'
required: False
default: None
alert_policy_name:
description:
- The alert policy name to be associated to the server.
This is mutually exclusive with 'alert_policy_id'
required: False
default: None
state:
description:
- The state to insure that the provided resources are in.
default: 'present'
required: False
choices: ['present', 'absent']
wait:
description:
- Whether to wait for the provisioning tasks to finish before returning.
default: True
required: False
choices: [ True, False]
requirements:
- python = 2.7
- requests >= 2.5.0
- clc-sdk
author: "CLC Runner (@clc-runner)"
notes:
- To use this module, it is required to set the below environment variables which enables access to the
Centurylink Cloud
- CLC_V2_API_USERNAME, the account login id for the centurylink cloud
- CLC_V2_API_PASSWORD, the account password for the centurylink cloud
- Alternatively, the module accepts the API token and account alias. The API token can be generated using the
CLC account login and password via the HTTP api call @ https://api.ctl.io/v2/authentication/login
- CLC_V2_API_TOKEN, the API token generated from https://api.ctl.io/v2/authentication/login
- CLC_ACCT_ALIAS, the account alias associated with the centurylink cloud
- Users can set CLC_V2_API_URL to specify an endpoint for pointing to a different CLC environment.
'''
EXAMPLES = '''
# Note - You must set the CLC_V2_API_USERNAME And CLC_V2_API_PASSWD Environment variables before running these examples
- name: set the cpu count to 4 on a server
clc_modify_server:
server_ids:
- UC1TESTSVR01
- UC1TESTSVR02
cpu: 4
state: present
- name: set the memory to 8GB on a server
clc_modify_server:
server_ids:
- UC1TESTSVR01
- UC1TESTSVR02
memory: 8
state: present
- name: set the anti affinity policy on a server
clc_modify_server:
server_ids:
- UC1TESTSVR01
- UC1TESTSVR02
anti_affinity_policy_name: 'aa_policy'
state: present
- name: remove the anti affinity policy on a server
clc_modify_server:
server_ids:
- UC1TESTSVR01
- UC1TESTSVR02
anti_affinity_policy_name: 'aa_policy'
state: absent
- name: add the alert policy on a server
clc_modify_server:
server_ids:
- UC1TESTSVR01
- UC1TESTSVR02
alert_policy_name: 'alert_policy'
state: present
- name: remove the alert policy on a server
clc_modify_server:
server_ids:
- UC1TESTSVR01
- UC1TESTSVR02
alert_policy_name: 'alert_policy'
state: absent
- name: set the memory to 16GB and cpu to 8 core on a lust if servers
clc_modify_server:
server_ids:
- UC1TESTSVR01
- UC1TESTSVR02
cpu: 8
memory: 16
state: present
'''
RETURN = '''
server_ids:
description: The list of server ids that are changed
returned: success
type: list
sample:
[
"UC1TEST-SVR01",
"UC1TEST-SVR02"
]
servers:
description: The list of server objects that are changed
returned: success
type: list
sample:
[
{
"changeInfo":{
"createdBy":"service.wfad",
"createdDate":1438196820,
"modifiedBy":"service.wfad",
"modifiedDate":1438196820
},
"description":"test-server",
"details":{
"alertPolicies":[
],
"cpu":1,
"customFields":[
],
"diskCount":3,
"disks":[
{
"id":"0:0",
"partitionPaths":[
],
"sizeGB":1
},
{
"id":"0:1",
"partitionPaths":[
],
"sizeGB":2
},
{
"id":"0:2",
"partitionPaths":[
],
"sizeGB":14
}
],
"hostName":"",
"inMaintenanceMode":false,
"ipAddresses":[
{
"internal":"10.1.1.1"
}
],
"memoryGB":1,
"memoryMB":1024,
"partitions":[
],
"powerState":"started",
"snapshots":[
],
"storageGB":17
},
"groupId":"086ac1dfe0b6411989e8d1b77c4065f0",
"id":"test-server",
"ipaddress":"10.120.45.23",
"isTemplate":false,
"links":[
{
"href":"/v2/servers/wfad/test-server",
"id":"test-server",
"rel":"self",
"verbs":[
"GET",
"PATCH",
"DELETE"
]
},
{
"href":"/v2/groups/wfad/086ac1dfe0b6411989e8d1b77c4065f0",
"id":"086ac1dfe0b6411989e8d1b77c4065f0",
"rel":"group"
},
{
"href":"/v2/accounts/wfad",
"id":"wfad",
"rel":"account"
},
{
"href":"/v2/billing/wfad/serverPricing/test-server",
"rel":"billing"
},
{
"href":"/v2/servers/wfad/test-server/publicIPAddresses",
"rel":"publicIPAddresses",
"verbs":[
"POST"
]
},
{
"href":"/v2/servers/wfad/test-server/credentials",
"rel":"credentials"
},
{
"href":"/v2/servers/wfad/test-server/statistics",
"rel":"statistics"
},
{
"href":"/v2/servers/wfad/510ec21ae82d4dc89d28479753bf736a/upcomingScheduledActivities",
"rel":"upcomingScheduledActivities"
},
{
"href":"/v2/servers/wfad/510ec21ae82d4dc89d28479753bf736a/scheduledActivities",
"rel":"scheduledActivities",
"verbs":[
"GET",
"POST"
]
},
{
"href":"/v2/servers/wfad/test-server/capabilities",
"rel":"capabilities"
},
{
"href":"/v2/servers/wfad/test-server/alertPolicies",
"rel":"alertPolicyMappings",
"verbs":[
"POST"
]
},
{
"href":"/v2/servers/wfad/test-server/antiAffinityPolicy",
"rel":"antiAffinityPolicyMapping",
"verbs":[
"PUT",
"DELETE"
]
},
{
"href":"/v2/servers/wfad/test-server/cpuAutoscalePolicy",
"rel":"cpuAutoscalePolicyMapping",
"verbs":[
"PUT",
"DELETE"
]
}
],
"locationId":"UC1",
"name":"test-server",
"os":"ubuntu14_64Bit",
"osType":"Ubuntu 14 64-bit",
"status":"active",
"storageType":"standard",
"type":"standard"
}
]
'''
__version__ = '${version}'
from distutils.version import LooseVersion
try:
import requests
except ImportError:
REQUESTS_FOUND = False
else:
REQUESTS_FOUND = True
#
# Requires the clc-python-sdk.
# sudo pip install clc-sdk
#
try:
import clc as clc_sdk
from clc import CLCException
from clc import APIFailedResponse
except ImportError:
CLC_FOUND = False
clc_sdk = None
else:
CLC_FOUND = True
class ClcModifyServer:
clc = clc_sdk
def __init__(self, module):
"""
Construct module
"""
self.clc = clc_sdk
self.module = module
if not CLC_FOUND:
self.module.fail_json(
msg='clc-python-sdk required for this module')
if not REQUESTS_FOUND:
self.module.fail_json(
msg='requests library is required for this module')
if requests.__version__ and LooseVersion(
requests.__version__) < LooseVersion('2.5.0'):
self.module.fail_json(
msg='requests library version should be >= 2.5.0')
self._set_user_agent(self.clc)
def process_request(self):
"""
Process the request - Main Code Path
:return: Returns with either an exit_json or fail_json
"""
self._set_clc_credentials_from_env()
p = self.module.params
cpu = p.get('cpu')
memory = p.get('memory')
state = p.get('state')
if state == 'absent' and (cpu or memory):
return self.module.fail_json(
msg='\'absent\' state is not supported for \'cpu\' and \'memory\' arguments')
server_ids = p['server_ids']
if not isinstance(server_ids, list):
return self.module.fail_json(
msg='server_ids needs to be a list of instances to modify: %s' %
server_ids)
(changed, server_dict_array, changed_server_ids) = self._modify_servers(
server_ids=server_ids)
self.module.exit_json(
changed=changed,
server_ids=changed_server_ids,
servers=server_dict_array)
@staticmethod
def _define_module_argument_spec():
"""
Define the argument spec for the ansible module
:return: argument spec dictionary
"""
argument_spec = dict(
server_ids=dict(type='list', required=True),
state=dict(default='present', choices=['present', 'absent']),
cpu=dict(),
memory=dict(),
anti_affinity_policy_id=dict(),
anti_affinity_policy_name=dict(),
alert_policy_id=dict(),
alert_policy_name=dict(),
wait=dict(type='bool', default=True)
)
mutually_exclusive = [
['anti_affinity_policy_id', 'anti_affinity_policy_name'],
['alert_policy_id', 'alert_policy_name']
]
return {"argument_spec": argument_spec,
"mutually_exclusive": mutually_exclusive}
def _set_clc_credentials_from_env(self):
"""
Set the CLC Credentials on the sdk by reading environment variables
:return: none
"""
env = os.environ
v2_api_token = env.get('CLC_V2_API_TOKEN', False)
v2_api_username = env.get('CLC_V2_API_USERNAME', False)
v2_api_passwd = env.get('CLC_V2_API_PASSWD', False)
clc_alias = env.get('CLC_ACCT_ALIAS', False)
api_url = env.get('CLC_V2_API_URL', False)
if api_url:
self.clc.defaults.ENDPOINT_URL_V2 = api_url
if v2_api_token and clc_alias:
self.clc._LOGIN_TOKEN_V2 = v2_api_token
self.clc._V2_ENABLED = True
self.clc.ALIAS = clc_alias
elif v2_api_username and v2_api_passwd:
self.clc.v2.SetCredentials(
api_username=v2_api_username,
api_passwd=v2_api_passwd)
else:
return self.module.fail_json(
msg="You must set the CLC_V2_API_USERNAME and CLC_V2_API_PASSWD "
"environment variables")
def _get_servers_from_clc(self, server_list, message):
"""
Internal function to fetch list of CLC server objects from a list of server ids
:param server_list: The list of server ids
:param message: the error message to throw in case of any error
:return the list of CLC server objects
"""
try:
return self.clc.v2.Servers(server_list).servers
except CLCException as ex:
return self.module.fail_json(msg=message + ': %s' % ex.message)
def _modify_servers(self, server_ids):
"""
modify the servers configuration on the provided list
:param server_ids: list of servers to modify
:return: a list of dictionaries with server information about the servers that were modified
"""
p = self.module.params
state = p.get('state')
server_params = {
'cpu': p.get('cpu'),
'memory': p.get('memory'),
'anti_affinity_policy_id': p.get('anti_affinity_policy_id'),
'anti_affinity_policy_name': p.get('anti_affinity_policy_name'),
'alert_policy_id': p.get('alert_policy_id'),
'alert_policy_name': p.get('alert_policy_name'),
}
changed = False
server_changed = False
aa_changed = False
ap_changed = False
server_dict_array = []
result_server_ids = []
request_list = []
changed_servers = []
if not isinstance(server_ids, list) or len(server_ids) < 1:
return self.module.fail_json(
msg='server_ids should be a list of servers, aborting')
servers = self._get_servers_from_clc(
server_ids,
'Failed to obtain server list from the CLC API')
for server in servers:
if state == 'present':
server_changed, server_result = self._ensure_server_config(
server, server_params)
if server_result:
request_list.append(server_result)
aa_changed = self._ensure_aa_policy_present(
server,
server_params)
ap_changed = self._ensure_alert_policy_present(
server,
server_params)
elif state == 'absent':
aa_changed = self._ensure_aa_policy_absent(
server,
server_params)
ap_changed = self._ensure_alert_policy_absent(
server,
server_params)
if server_changed or aa_changed or ap_changed:
changed_servers.append(server)
changed = True
self._wait_for_requests(self.module, request_list)
self._refresh_servers(self.module, changed_servers)
for server in changed_servers:
server_dict_array.append(server.data)
result_server_ids.append(server.id)
return changed, server_dict_array, result_server_ids
def _ensure_server_config(
self, server, server_params):
"""
ensures the server is updated with the provided cpu and memory
:param server: the CLC server object
:param server_params: the dictionary of server parameters
:return: (changed, group) -
changed: Boolean whether a change was made
result: The result from the CLC API call
"""
cpu = server_params.get('cpu')
memory = server_params.get('memory')
changed = False
result = None
if not cpu:
cpu = server.cpu
if not memory:
memory = server.memory
if memory != server.memory or cpu != server.cpu:
if not self.module.check_mode:
result = self._modify_clc_server(
self.clc,
self.module,
server.id,
cpu,
memory)
changed = True
return changed, result
@staticmethod
def _modify_clc_server(clc, module, server_id, cpu, memory):
"""
Modify the memory or CPU of a clc server.
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param server_id: id of the server to modify
:param cpu: the new cpu value
:param memory: the new memory value
:return: the result of CLC API call
"""
result = None
acct_alias = clc.v2.Account.GetAlias()
try:
# Update the server configuration
job_obj = clc.v2.API.Call('PATCH',
'servers/%s/%s' % (acct_alias,
server_id),
json.dumps([{"op": "set",
"member": "memory",
"value": memory},
{"op": "set",
"member": "cpu",
"value": cpu}]))
result = clc.v2.Requests(job_obj)
except APIFailedResponse as ex:
module.fail_json(
msg='Unable to update the server configuration for server : "{0}". {1}'.format(
server_id, str(ex.response_text)))
return result
@staticmethod
def _wait_for_requests(module, request_list):
"""
Block until server provisioning requests are completed.
:param module: the AnsibleModule object
:param request_list: a list of clc-sdk.Request instances
:return: none
"""
wait = module.params.get('wait')
if wait:
# Requests.WaitUntilComplete() returns the count of failed requests
failed_requests_count = sum(
[request.WaitUntilComplete() for request in request_list])
if failed_requests_count > 0:
module.fail_json(
msg='Unable to process modify server request')
@staticmethod
def _refresh_servers(module, servers):
"""
Loop through a list of servers and refresh them.
:param module: the AnsibleModule object
:param servers: list of clc-sdk.Server instances to refresh
:return: none
"""
for server in servers:
try:
server.Refresh()
except CLCException as ex:
module.fail_json(msg='Unable to refresh the server {0}. {1}'.format(
server.id, ex.message
))
def _ensure_aa_policy_present(
self, server, server_params):
"""
ensures the server is updated with the provided anti affinity policy
:param server: the CLC server object
:param server_params: the dictionary of server parameters
:return: (changed, group) -
changed: Boolean whether a change was made
result: The result from the CLC API call
"""
changed = False
acct_alias = self.clc.v2.Account.GetAlias()
aa_policy_id = server_params.get('anti_affinity_policy_id')
aa_policy_name = server_params.get('anti_affinity_policy_name')
if not aa_policy_id and aa_policy_name:
aa_policy_id = self._get_aa_policy_id_by_name(
self.clc,
self.module,
acct_alias,
aa_policy_name)
current_aa_policy_id = self._get_aa_policy_id_of_server(
self.clc,
self.module,
acct_alias,
server.id)
if aa_policy_id and aa_policy_id != current_aa_policy_id:
self._modify_aa_policy(
self.clc,
self.module,
acct_alias,
server.id,
aa_policy_id)
changed = True
return changed
def _ensure_aa_policy_absent(
self, server, server_params):
"""
ensures the the provided anti affinity policy is removed from the server
:param server: the CLC server object
:param server_params: the dictionary of server parameters
:return: (changed, group) -
changed: Boolean whether a change was made
result: The result from the CLC API call
"""
changed = False
acct_alias = self.clc.v2.Account.GetAlias()
aa_policy_id = server_params.get('anti_affinity_policy_id')
aa_policy_name = server_params.get('anti_affinity_policy_name')
if not aa_policy_id and aa_policy_name:
aa_policy_id = self._get_aa_policy_id_by_name(
self.clc,
self.module,
acct_alias,
aa_policy_name)
current_aa_policy_id = self._get_aa_policy_id_of_server(
self.clc,
self.module,
acct_alias,
server.id)
if aa_policy_id and aa_policy_id == current_aa_policy_id:
self._delete_aa_policy(
self.clc,
self.module,
acct_alias,
server.id)
changed = True
return changed
@staticmethod
def _modify_aa_policy(clc, module, acct_alias, server_id, aa_policy_id):
"""
modifies the anti affinity policy of the CLC server
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param acct_alias: the CLC account alias
:param server_id: the CLC server id
:param aa_policy_id: the anti affinity policy id
:return: result: The result from the CLC API call
"""
result = None
if not module.check_mode:
try:
result = clc.v2.API.Call('PUT',
'servers/%s/%s/antiAffinityPolicy' % (
acct_alias,
server_id),
json.dumps({"id": aa_policy_id}))
except APIFailedResponse as ex:
module.fail_json(
msg='Unable to modify anti affinity policy to server : "{0}". {1}'.format(
server_id, str(ex.response_text)))
return result
@staticmethod
def _delete_aa_policy(clc, module, acct_alias, server_id):
"""
Delete the anti affinity policy of the CLC server
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param acct_alias: the CLC account alias
:param server_id: the CLC server id
:return: result: The result from the CLC API call
"""
result = None
if not module.check_mode:
try:
result = clc.v2.API.Call('DELETE',
'servers/%s/%s/antiAffinityPolicy' % (
acct_alias,
server_id),
json.dumps({}))
except APIFailedResponse as ex:
module.fail_json(
msg='Unable to delete anti affinity policy to server : "{0}". {1}'.format(
server_id, str(ex.response_text)))
return result
@staticmethod
def _get_aa_policy_id_by_name(clc, module, alias, aa_policy_name):
"""
retrieves the anti affinity policy id of the server based on the name of the policy
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param alias: the CLC account alias
:param aa_policy_name: the anti affinity policy name
:return: aa_policy_id: The anti affinity policy id
"""
aa_policy_id = None
try:
aa_policies = clc.v2.API.Call(method='GET',
url='antiAffinityPolicies/%s' % alias)
except APIFailedResponse as ex:
return module.fail_json(
msg='Unable to fetch anti affinity policies from account alias : "{0}". {1}'.format(
alias, str(ex.response_text)))
for aa_policy in aa_policies.get('items'):
if aa_policy.get('name') == aa_policy_name:
if not aa_policy_id:
aa_policy_id = aa_policy.get('id')
else:
return module.fail_json(
msg='multiple anti affinity policies were found with policy name : %s' % aa_policy_name)
if not aa_policy_id:
module.fail_json(
msg='No anti affinity policy was found with policy name : %s' % aa_policy_name)
return aa_policy_id
@staticmethod
def _get_aa_policy_id_of_server(clc, module, alias, server_id):
"""
retrieves the anti affinity policy id of the server based on the CLC server id
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param alias: the CLC account alias
:param server_id: the CLC server id
:return: aa_policy_id: The anti affinity policy id
"""
aa_policy_id = None
try:
result = clc.v2.API.Call(
method='GET', url='servers/%s/%s/antiAffinityPolicy' %
(alias, server_id))
aa_policy_id = result.get('id')
except APIFailedResponse as ex:
if ex.response_status_code != 404:
module.fail_json(msg='Unable to fetch anti affinity policy for server "{0}". {1}'.format(
server_id, str(ex.response_text)))
return aa_policy_id
def _ensure_alert_policy_present(
self, server, server_params):
"""
ensures the server is updated with the provided alert policy
:param server: the CLC server object
:param server_params: the dictionary of server parameters
:return: (changed, group) -
changed: Boolean whether a change was made
result: The result from the CLC API call
"""
changed = False
acct_alias = self.clc.v2.Account.GetAlias()
alert_policy_id = server_params.get('alert_policy_id')
alert_policy_name = server_params.get('alert_policy_name')
if not alert_policy_id and alert_policy_name:
alert_policy_id = self._get_alert_policy_id_by_name(
self.clc,
self.module,
acct_alias,
alert_policy_name)
if alert_policy_id and not self._alert_policy_exists(
server, alert_policy_id):
self._add_alert_policy_to_server(
self.clc,
self.module,
acct_alias,
server.id,
alert_policy_id)
changed = True
return changed
def _ensure_alert_policy_absent(
self, server, server_params):
"""
ensures the alert policy is removed from the server
:param server: the CLC server object
:param server_params: the dictionary of server parameters
:return: (changed, group) -
changed: Boolean whether a change was made
result: The result from the CLC API call
"""
changed = False
acct_alias = self.clc.v2.Account.GetAlias()
alert_policy_id = server_params.get('alert_policy_id')
alert_policy_name = server_params.get('alert_policy_name')
if not alert_policy_id and alert_policy_name:
alert_policy_id = self._get_alert_policy_id_by_name(
self.clc,
self.module,
acct_alias,
alert_policy_name)
if alert_policy_id and self._alert_policy_exists(
server, alert_policy_id):
self._remove_alert_policy_to_server(
self.clc,
self.module,
acct_alias,
server.id,
alert_policy_id)
changed = True
return changed
@staticmethod
def _add_alert_policy_to_server(
clc, module, acct_alias, server_id, alert_policy_id):
"""
add the alert policy to CLC server
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param acct_alias: the CLC account alias
:param server_id: the CLC server id
:param alert_policy_id: the alert policy id
:return: result: The result from the CLC API call
"""
result = None
if not module.check_mode:
try:
result = clc.v2.API.Call('POST',
'servers/%s/%s/alertPolicies' % (
acct_alias,
server_id),
json.dumps({"id": alert_policy_id}))
except APIFailedResponse as ex:
module.fail_json(msg='Unable to set alert policy to the server : "{0}". {1}'.format(
server_id, str(ex.response_text)))
return result
@staticmethod
def _remove_alert_policy_to_server(
clc, module, acct_alias, server_id, alert_policy_id):
"""
remove the alert policy to the CLC server
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param acct_alias: the CLC account alias
:param server_id: the CLC server id
:param alert_policy_id: the alert policy id
:return: result: The result from the CLC API call
"""
result = None
if not module.check_mode:
try:
result = clc.v2.API.Call('DELETE',
'servers/%s/%s/alertPolicies/%s'
% (acct_alias, server_id, alert_policy_id))
except APIFailedResponse as ex:
module.fail_json(msg='Unable to remove alert policy from the server : "{0}". {1}'.format(
server_id, str(ex.response_text)))
return result
@staticmethod
def _get_alert_policy_id_by_name(clc, module, alias, alert_policy_name):
"""
retrieves the alert policy id of the server based on the name of the policy
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param alias: the CLC account alias
:param alert_policy_name: the alert policy name
:return: alert_policy_id: The alert policy id
"""
alert_policy_id = None
try:
alert_policies = clc.v2.API.Call(method='GET',
url='alertPolicies/%s' % alias)
except APIFailedResponse as ex:
return module.fail_json(msg='Unable to fetch alert policies for account : "{0}". {1}'.format(
alias, str(ex.response_text)))
for alert_policy in alert_policies.get('items'):
if alert_policy.get('name') == alert_policy_name:
if not alert_policy_id:
alert_policy_id = alert_policy.get('id')
else:
return module.fail_json(
msg='multiple alert policies were found with policy name : %s' % alert_policy_name)
return alert_policy_id
@staticmethod
def _alert_policy_exists(server, alert_policy_id):
"""
Checks if the alert policy exists for the server
:param server: the clc server object
:param alert_policy_id: the alert policy
:return: True: if the given alert policy id associated to the server, False otherwise
"""
result = False
alert_policies = server.alertPolicies
if alert_policies:
for alert_policy in alert_policies:
if alert_policy.get('id') == alert_policy_id:
result = True
return result
@staticmethod
def _set_user_agent(clc):
if hasattr(clc, 'SetRequestsSession'):
agent_string = "ClcAnsibleModule/" + __version__
ses = requests.Session()
ses.headers.update({"Api-Client": agent_string})
ses.headers['User-Agent'] += " " + agent_string
clc.SetRequestsSession(ses)
def main():
"""
The main function. Instantiates the module and calls process_request.
:return: none
"""
argument_dict = ClcModifyServer._define_module_argument_spec()
module = AnsibleModule(supports_check_mode=True, **argument_dict)
clc_modify_server = ClcModifyServer(module)
clc_modify_server.process_request()
from ansible.module_utils.basic import * # pylint: disable=W0614
if __name__ == '__main__':
main()
| gpl-3.0 | -9,147,589,440,165,450,000 | 34.997961 | 119 | 0.52642 | false |
OndrejIT/pyload | module/gui/ConnectionManager.py | 41 | 11002 | # -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
@author: mkaay
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from os.path import join
from uuid import uuid4 as uuid
class ConnectionManager(QWidget):
warningShown = False
def __init__(self):
QWidget.__init__(self)
if not self.warningShown:
QMessageBox.warning(self, 'Warning',
"We are sorry but the GUI is not stable yet. Please use the webinterface for much better experience. \n", QMessageBox.Ok)
ConnectionManager.warningShown = True
mainLayout = QHBoxLayout()
buttonLayout = QVBoxLayout()
self.setWindowTitle(_("pyLoad ConnectionManager"))
self.setWindowIcon(QIcon(join(pypath, "icons","logo.png")))
connList = QListWidget()
new = QPushButton(_("New"))
edit = QPushButton(_("Edit"))
remove = QPushButton(_("Remove"))
connect = QPushButton(_("Connect"))
#box = QFrame()
boxLayout = QVBoxLayout()
#box.setLayout(boxLayout)
boxLayout.addWidget(QLabel(_("Connect:")))
boxLayout.addWidget(connList)
line = QFrame()
#line.setFixedWidth(100)
line.setFrameShape(line.HLine)
line.setFrameShadow(line.Sunken)
line.setFixedHeight(10)
boxLayout.addWidget(line)
form = QFormLayout()
form.setMargin(5)
form.setSpacing(20)
form.setAlignment(Qt.AlignRight)
checkbox = QCheckBox()
form.addRow(_("Use internal Core:"), checkbox)
boxLayout.addLayout(form)
mainLayout.addLayout(boxLayout)
mainLayout.addLayout(buttonLayout)
buttonLayout.addWidget(new)
buttonLayout.addWidget(edit)
buttonLayout.addWidget(remove)
buttonLayout.addStretch()
buttonLayout.addWidget(connect)
self.setLayout(mainLayout)
self.internal = checkbox
self.new = new
self.connectb = connect
self.remove = remove
self.editb = edit
self.connList = connList
self.edit = self.EditWindow()
self.connectSignals()
self.defaultStates = {}
def connectSignals(self):
self.connect(self, SIGNAL("setConnections"), self.setConnections)
self.connect(self.new, SIGNAL("clicked()"), self.slotNew)
self.connect(self.editb, SIGNAL("clicked()"), self.slotEdit)
self.connect(self.remove, SIGNAL("clicked()"), self.slotRemove)
self.connect(self.connectb, SIGNAL("clicked()"), self.slotConnect)
self.connect(self.edit, SIGNAL("save"), self.slotSave)
self.connect(self.connList, SIGNAL("itemDoubleClicked(QListWidgetItem *)"), self.slotItemDoubleClicked)
self.connect(self.internal, SIGNAL("clicked()"), self.slotInternal)
def setConnections(self, connections):
self.connList.clear()
for conn in connections:
item = QListWidgetItem()
item.setData(Qt.DisplayRole, QVariant(conn["name"]))
item.setData(Qt.UserRole, QVariant(conn))
self.connList.addItem(item)
if conn["default"]:
item.setData(Qt.DisplayRole, QVariant(_("%s (Default)") % conn["name"]))
self.connList.setCurrentItem(item)
def slotNew(self):
data = {"id":uuid().hex, "type":"remote", "default":False, "name":"", "host":"", "port":"7228", "user":"admin", "password":""}
self.edit.setData(data)
self.edit.show()
def slotEdit(self):
item = self.connList.currentItem()
data = item.data(Qt.UserRole).toPyObject()
data = self.cleanDict(data)
self.edit.setData(data)
self.edit.show()
def slotRemove(self):
item = self.connList.currentItem()
data = item.data(Qt.UserRole).toPyObject()
data = self.cleanDict(data)
self.emit(SIGNAL("removeConnection"), data)
def slotConnect(self):
if self.internal.checkState() == 2:
data = {"type": "internal"}
self.emit(SIGNAL("connect"), data)
else:
item = self.connList.currentItem()
data = item.data(Qt.UserRole).toPyObject()
data = self.cleanDict(data)
self.emit(SIGNAL("connect"), data)
def cleanDict(self, data):
tmp = {}
for k, d in data.items():
tmp[str(k)] = d
return tmp
def slotSave(self, data):
self.emit(SIGNAL("saveConnection"), data)
def slotItemDoubleClicked(self, defaultItem):
data = defaultItem.data(Qt.UserRole).toPyObject()
self.setDefault(data, True)
did = self.cleanDict(data)["id"]
#allItems = self.connList.findItems("*", Qt.MatchWildcard)
count = self.connList.count()
for i in range(count):
item = self.connList.item(i)
data = item.data(Qt.UserRole).toPyObject()
if self.cleanDict(data)["id"] == did:
continue
self.setDefault(data, False)
def slotInternal(self):
if self.internal.checkState() == 2:
self.connList.clearSelection()
def setDefault(self, data, state):
data = self.cleanDict(data)
self.edit.setData(data)
data = self.edit.getData()
data["default"] = state
self.edit.emit(SIGNAL("save"), data)
class EditWindow(QWidget):
def __init__(self):
QWidget.__init__(self)
self.setWindowTitle(_("pyLoad ConnectionManager"))
self.setWindowIcon(QIcon(join(pypath, "icons","logo.png")))
grid = QGridLayout()
nameLabel = QLabel(_("Name:"))
hostLabel = QLabel(_("Host:"))
localLabel = QLabel(_("Local:"))
userLabel = QLabel(_("User:"))
pwLabel = QLabel(_("Password:"))
portLabel = QLabel(_("Port:"))
name = QLineEdit()
host = QLineEdit()
local = QCheckBox()
user = QLineEdit()
password = QLineEdit()
password.setEchoMode(QLineEdit.Password)
port = QSpinBox()
port.setRange(1,10000)
save = QPushButton(_("Save"))
cancel = QPushButton(_("Cancel"))
grid.addWidget(nameLabel, 0, 0)
grid.addWidget(name, 0, 1)
grid.addWidget(localLabel, 1, 0)
grid.addWidget(local, 1, 1)
grid.addWidget(hostLabel, 2, 0)
grid.addWidget(host, 2, 1)
grid.addWidget(portLabel, 3, 0)
grid.addWidget(port, 3, 1)
grid.addWidget(userLabel, 4, 0)
grid.addWidget(user, 4, 1)
grid.addWidget(pwLabel, 5, 0)
grid.addWidget(password, 5, 1)
grid.addWidget(cancel, 6, 0)
grid.addWidget(save, 6, 1)
self.setLayout(grid)
self.controls = {"name": name,
"host": host,
"local": local,
"user": user,
"password": password,
"port": port,
"save": save,
"cancel": cancel}
self.connect(cancel, SIGNAL("clicked()"), self.hide)
self.connect(save, SIGNAL("clicked()"), self.slotDone)
self.connect(local, SIGNAL("stateChanged(int)"), self.slotLocalChanged)
self.id = None
self.default = None
def setData(self, data):
if not data: return
self.id = data["id"]
self.default = data["default"]
self.controls["name"].setText(data["name"])
if data["type"] == "local":
data["local"] = True
else:
data["local"] = False
self.controls["local"].setChecked(data["local"])
if not data["local"]:
self.controls["user"].setText(data["user"])
self.controls["password"].setText(data["password"])
self.controls["port"].setValue(int(data["port"]))
self.controls["host"].setText(data["host"])
self.controls["user"].setDisabled(False)
self.controls["password"].setDisabled(False)
self.controls["port"].setDisabled(False)
self.controls["host"].setDisabled(False)
else:
self.controls["user"].setText("")
self.controls["port"].setValue(1)
self.controls["host"].setText("")
self.controls["user"].setDisabled(True)
self.controls["password"].setDisabled(True)
self.controls["port"].setDisabled(True)
self.controls["host"].setDisabled(True)
def slotLocalChanged(self, val):
if val == 2:
self.controls["user"].setDisabled(True)
self.controls["password"].setDisabled(True)
self.controls["port"].setDisabled(True)
self.controls["host"].setDisabled(True)
elif val == 0:
self.controls["user"].setDisabled(False)
self.controls["password"].setDisabled(False)
self.controls["port"].setDisabled(False)
self.controls["host"].setDisabled(False)
def getData(self):
d = {}
d["id"] = self.id
d["default"] = self.default
d["name"] = self.controls["name"].text()
d["local"] = self.controls["local"].isChecked()
d["user"] = self.controls["user"].text()
d["password"] = self.controls["password"].text()
d["host"] = self.controls["host"].text()
d["port"] = self.controls["port"].value()
if d["local"]:
d["type"] = "local"
else:
d["type"] = "remote"
return d
def slotDone(self):
data = self.getData()
self.hide()
self.emit(SIGNAL("save"), data)
| gpl-3.0 | -338,773,994,099,214,100 | 35.430464 | 134 | 0.544901 | false |
nakagami/reportlab | src/tools/utils/dumpttf.py | 10 | 1887 | __all__=('dumpttf',)
def dumpttf(fn,fontName=None, verbose=0):
'''dump out known glyphs from a ttf file'''
import os
if not os.path.isfile(fn):
raise IOError('No such file "%s"' % fn)
from reportlab.pdfbase.pdfmetrics import registerFont, stringWidth
from reportlab.pdfbase.ttfonts import TTFont
from reportlab.pdfgen.canvas import Canvas
if fontName is None:
fontName = os.path.splitext(os.path.basename(fn))[0]
dmpfn = '%s-ttf-dump.pdf' % fontName
ttf = TTFont(fontName, fn)
K = ttf.face.charToGlyph.keys()
registerFont(ttf)
c = Canvas(dmpfn)
W,H = c._pagesize
titleFontSize = 30 # title font size
titleFontName = 'Helvetica'
labelFontName = 'Courier'
fontSize = 10
border = 36
dx0 = stringWidth('12345: ', fontName, fontSize)
dx = dx0+20
dy = 20
K.sort()
y = 0
page = 0
for i, k in enumerate(K):
if y<border:
if page: c.showPage()
page += 1
y = H - border - titleFontSize
c.setFont(titleFontName, titleFontSize)
c.drawCentredString(W/2.0,y, 'TrueType Font %s Page %d' %(fontName,page))
y -= 0.2*titleFontSize + dy
x = border
c.setFont(labelFontName, 10)
c.drawString(x,y,'%5.5x:' % k )
c.setFont(fontName, 10)
c.drawString(x+dx0,y,unichr(k).encode('utf8'))
x += dx
if x+dx>W-border:
x = border
y -= dy
c.showPage()
c.save()
if verbose:
print 'Font %s("%s") has %d glyphs\ndumped to "%s"' % (fontName,fn,len(K),dmpfn)
if __name__=='__main__':
import sys, glob
if '--verbose' in sys.argv:
sys.argv.remove('--verbose')
verbose = 1
else:
verbose = 0
for a in sys.argv[1:]:
for fn in glob.glob(a):
dumpttf(fn, verbose=verbose)
| bsd-3-clause | -8,049,532,288,049,378,000 | 30.45 | 88 | 0.564388 | false |
Subsets and Splits